Merge "Added limit to number of Loop Iterations in C2Fuzzer"
diff --git a/apex/manifest.json b/apex/manifest.json
index 5d72031..5b235cd 100644
--- a/apex/manifest.json
+++ b/apex/manifest.json
@@ -1,6 +1,10 @@
{
"name": "com.android.media",
- "version": 319999900,
+
+ // Placeholder module version to be replaced during build.
+ // Do not change!
+ "version": 0,
+
"requireNativeLibs": [
"libandroid.so",
"libbinder_ndk.so",
diff --git a/apex/manifest_codec.json b/apex/manifest_codec.json
index b0d962d..f2b8b36 100644
--- a/apex/manifest_codec.json
+++ b/apex/manifest_codec.json
@@ -1,6 +1,10 @@
{
"name": "com.android.media.swcodec",
- "version": 319999900,
+
+ // Placeholder module version to be replaced during build.
+ // Do not change!
+ "version": 0,
+
"requireNativeLibs": [
":sphal"
]
diff --git a/camera/VendorTagDescriptor.cpp b/camera/VendorTagDescriptor.cpp
index 24fa912..b37803a 100644
--- a/camera/VendorTagDescriptor.cpp
+++ b/camera/VendorTagDescriptor.cpp
@@ -760,7 +760,7 @@
Mutex::Autolock al(sLock);
if (sGlobalVendorTagDescriptorCache == NULL) {
ALOGE("%s: Vendor tag descriptor cache not initialized.", __FUNCTION__);
- return VENDOR_TAG_NAME_ERR;
+ return VENDOR_TAG_TYPE_ERR;
}
return sGlobalVendorTagDescriptorCache->getTagType(tag, id);
}
diff --git a/drm/libmediadrm/interface/mediadrm/DrmUtils.h b/drm/libmediadrm/interface/mediadrm/DrmUtils.h
index ec0b878..e219a91 100644
--- a/drm/libmediadrm/interface/mediadrm/DrmUtils.h
+++ b/drm/libmediadrm/interface/mediadrm/DrmUtils.h
@@ -31,6 +31,7 @@
#include <chrono>
#include <cstddef>
#include <cstdint>
+#include <cstring>
#include <ctime>
#include <deque>
#include <endian.h>
@@ -92,7 +93,8 @@
template <typename... Args>
void LogToBuffer(android_LogPriority level, const uint8_t uuid[16], const char *fmt, Args... args) {
- const uint64_t* uuid2 = reinterpret_cast<const uint64_t*>(uuid);
+ uint64_t uuid2[2] = {};
+ std::memcpy(uuid2, uuid, sizeof(uuid2));
std::string uuidFmt("uuid=[%lx %lx] ");
uuidFmt += fmt;
LogToBuffer(level, uuidFmt.c_str(), htobe64(uuid2[0]), htobe64(uuid2[1]), args...);
diff --git a/include/private/media/AudioTrackShared.h b/include/private/media/AudioTrackShared.h
index bd6db55..a1e1702 100644
--- a/include/private/media/AudioTrackShared.h
+++ b/include/private/media/AudioTrackShared.h
@@ -55,6 +55,7 @@
// for audio_track_cblk_t::mState, to match TrackBase.h
static inline constexpr int CBLK_STATE_IDLE = 0;
+static inline constexpr int CBLK_STATE_ACTIVE = 6;
static inline constexpr int CBLK_STATE_PAUSING = 7;
/**
diff --git a/media/TEST_MAPPING b/media/TEST_MAPPING
index 41fe080..2793e76 100644
--- a/media/TEST_MAPPING
+++ b/media/TEST_MAPPING
@@ -1,24 +1,7 @@
// for frameworks/av/media
{
- "presubmit-large": [
- // runs whenever we change something in this tree
- {
- "name": "CtsMediaCodecTestCases",
- "options": [
- {
- "include-filter": "android.media.codec.cts.EncodeDecodeTest"
- }
- ]
- },
- {
- "name": "CtsMediaCodecTestCases",
- "options": [
- {
- "include-filter": "android.media.codec.cts.DecodeEditEncodeTest"
- }
- ]
- }
- ],
+ // TODO (b/229286407) Add EncodeDecodeTest and DecodeEditEncodeTest to
+ // presubmit-large once issues in cuttlefish are fixed
"presubmit": [
{
"name": "GtsMediaTestCases",
@@ -32,7 +15,8 @@
{
"include-filter": "com.google.android.media.gts.WidevineH264PlaybackTests"
}
- ]
+ ],
+ "file_patterns": ["(?i)drm|crypto"]
}
],
diff --git a/media/codec2/components/aom/C2SoftAomDec.cpp b/media/codec2/components/aom/C2SoftAomDec.cpp
index c7985ca..971b009 100644
--- a/media/codec2/components/aom/C2SoftAomDec.cpp
+++ b/media/codec2/components/aom/C2SoftAomDec.cpp
@@ -539,9 +539,10 @@
std::shared_ptr<C2GraphicBlock> block;
uint32_t format = HAL_PIXEL_FORMAT_YV12;
+ std::shared_ptr<C2StreamColorAspectsTuning::output> defaultColorAspects;
if (img->fmt == AOM_IMG_FMT_I42016) {
IntfImpl::Lock lock = mIntf->lock();
- std::shared_ptr<C2StreamColorAspectsTuning::output> defaultColorAspects = mIntf->getDefaultColorAspects_l();
+ defaultColorAspects = mIntf->getDefaultColorAspects_l();
if (defaultColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
defaultColorAspects->matrix == C2Color::MATRIX_BT2020 &&
@@ -588,10 +589,10 @@
const uint16_t *srcV = (const uint16_t *)img->planes[AOM_PLANE_V];
if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
- convertYUV420Planar16ToY410((uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2,
- srcUStride / 2, srcVStride / 2,
- dstYStride / sizeof(uint32_t),
- mWidth, mHeight);
+ convertYUV420Planar16ToY410OrRGBA1010102(
+ (uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2, srcUStride / 2,
+ srcVStride / 2, dstYStride / sizeof(uint32_t), mWidth, mHeight,
+ std::static_pointer_cast<const C2ColorAspectsStruct>(defaultColorAspects));
} else {
convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride / 2,
srcUStride / 2, srcVStride / 2, dstYStride, dstUVStride,
diff --git a/media/codec2/components/base/Android.bp b/media/codec2/components/base/Android.bp
index 160e250..809f942 100644
--- a/media/codec2/components/base/Android.bp
+++ b/media/codec2/components/base/Android.bp
@@ -27,6 +27,11 @@
"libsfplugin_ccodec_utils",
],
+ header_libs: [
+ "libarect_headers",
+ "libnativewindow_headers",
+ ],
+
shared_libs: [
"libcutils", // for properties
"liblog", // for ALOG
@@ -61,6 +66,11 @@
"libsfplugin_ccodec_utils",
],
+ header_libs: [
+ "libarect_headers",
+ "libnativewindow_headers",
+ ],
+
shared_libs: [
"libcodec2_soft_common",
"libcutils", // for properties
diff --git a/media/codec2/components/base/SimpleC2Component.cpp b/media/codec2/components/base/SimpleC2Component.cpp
index 434246f..581f40c 100644
--- a/media/codec2/components/base/SimpleC2Component.cpp
+++ b/media/codec2/components/base/SimpleC2Component.cpp
@@ -18,6 +18,7 @@
#define LOG_TAG "SimpleC2Component"
#include <log/log.h>
+#include <android/hardware_buffer.h>
#include <cutils/properties.h>
#include <media/stagefright/foundation/AMessage.h>
@@ -26,6 +27,8 @@
#include <C2Config.h>
#include <C2Debug.h>
#include <C2PlatformSupport.h>
+#include <Codec2BufferUtils.h>
+#include <Codec2CommonUtils.h>
#include <SimpleC2Component.h>
namespace android {
@@ -131,6 +134,215 @@
}
}
+namespace {
+
+static C2ColorAspectsStruct FillMissingColorAspects(
+ std::shared_ptr<const C2ColorAspectsStruct> aspects,
+ int32_t width, int32_t height) {
+ C2ColorAspectsStruct _aspects;
+ if (aspects) {
+ _aspects = *aspects;
+ }
+
+ // use matrix for conversion
+ if (_aspects.matrix == C2Color::MATRIX_UNSPECIFIED) {
+ // if not specified, deduce matrix from primaries
+ if (_aspects.primaries == C2Color::PRIMARIES_UNSPECIFIED) {
+ // if those are also not specified, deduce primaries first from transfer, then from
+ // width and height
+ if (_aspects.transfer == C2Color::TRANSFER_ST2084
+ || _aspects.transfer == C2Color::TRANSFER_HLG) {
+ _aspects.primaries = C2Color::PRIMARIES_BT2020;
+ } else if (width >= 3840 || height >= 3840 || width * (int64_t)height >= 3840 * 1634) {
+ // TODO: stagefright defaults to BT.2020 for UHD, but perhaps we should default to
+ // BT.709 for non-HDR 10-bit UHD content
+ // (see media/libstagefright/foundation/ColorUtils.cpp)
+ _aspects.primaries = C2Color::PRIMARIES_BT2020;
+ } else if ((width <= 720 && height <= 576)
+ || (height <= 720 && width <= 576)) {
+ // note: it does not actually matter whether to use 525 or 625 here as the
+ // conversion is the same
+ _aspects.primaries = C2Color::PRIMARIES_BT601_625;
+ } else {
+ _aspects.primaries = C2Color::PRIMARIES_BT709;
+ }
+ }
+
+ switch (_aspects.primaries) {
+ case C2Color::PRIMARIES_BT601_525:
+ case C2Color::PRIMARIES_BT601_625:
+ _aspects.matrix = C2Color::MATRIX_BT601;
+ break;
+
+ case C2Color::PRIMARIES_BT709:
+ _aspects.matrix = C2Color::MATRIX_BT709;
+ break;
+
+ case C2Color::PRIMARIES_BT2020:
+ default:
+ _aspects.matrix = C2Color::MATRIX_BT2020;
+ }
+ }
+
+ return _aspects;
+}
+
+// matrix conversion coefficients
+// (see media/libstagefright/colorconverter/ColorConverter.cpp for more details)
+struct Coeffs {
+ int32_t _y, _r_v, _g_u, _g_v, _b_u, _c16;
+};
+
+static const struct Coeffs GetCoeffsForAspects(const C2ColorAspectsStruct &aspects) {
+ bool isFullRange = aspects.range == C2Color::RANGE_FULL;
+
+ switch (aspects.matrix) {
+ case C2Color::MATRIX_BT601:
+ /**
+ * BT.601: K_R = 0.299; K_B = 0.114
+ */
+ if (isFullRange) {
+ return Coeffs { 1024, 1436, 352, 731, 1815, 0 };
+ } else {
+ return Coeffs { 1196, 1639, 402, 835, 2072, 64 };
+ }
+ break;
+
+ case C2Color::MATRIX_BT709:
+ /**
+ * BT.709: K_R = 0.2126; K_B = 0.0722
+ */
+ if (isFullRange) {
+ return Coeffs { 1024, 1613, 192, 479, 1900, 0 };
+ } else {
+ return Coeffs { 1196, 1841, 219, 547, 2169, 64 };
+ }
+ break;
+
+ case C2Color::MATRIX_BT2020:
+ default:
+ /**
+ * BT.2020: K_R = 0.2627; K_B = 0.0593
+ */
+ if (isFullRange) {
+ return Coeffs { 1024, 1510, 169, 585, 1927, 0 };
+ } else {
+ return Coeffs { 1196, 1724, 192, 668, 2200, 64 };
+ }
+ }
+}
+
+}
+
+#define CLIP3(min, v, max) (((v) < (min)) ? (min) : (((max) > (v)) ? (v) : (max)))
+void convertYUV420Planar16ToRGBA1010102(
+ uint32_t *dst, const uint16_t *srcY, const uint16_t *srcU,
+ const uint16_t *srcV, size_t srcYStride, size_t srcUStride,
+ size_t srcVStride, size_t dstStride, size_t width,
+ size_t height,
+ std::shared_ptr<const C2ColorAspectsStruct> aspects) {
+
+ C2ColorAspectsStruct _aspects = FillMissingColorAspects(aspects, width, height);
+
+ struct Coeffs coeffs = GetCoeffsForAspects(_aspects);
+
+ int32_t _y = coeffs._y;
+ int32_t _b_u = coeffs._b_u;
+ int32_t _neg_g_u = -coeffs._g_u;
+ int32_t _neg_g_v = -coeffs._g_v;
+ int32_t _r_v = coeffs._r_v;
+ int32_t _c16 = coeffs._c16;
+
+ // Converting two lines at a time, slightly faster
+ for (size_t y = 0; y < height; y += 2) {
+ uint32_t *dstTop = (uint32_t *)dst;
+ uint32_t *dstBot = (uint32_t *)(dst + dstStride);
+ uint16_t *ySrcTop = (uint16_t *)srcY;
+ uint16_t *ySrcBot = (uint16_t *)(srcY + srcYStride);
+ uint16_t *uSrc = (uint16_t *)srcU;
+ uint16_t *vSrc = (uint16_t *)srcV;
+
+ for (size_t x = 0; x < width; x += 2) {
+ int32_t u, v, y00, y01, y10, y11;
+ u = *uSrc - 512;
+ uSrc += 1;
+ v = *vSrc - 512;
+ vSrc += 1;
+
+ y00 = *ySrcTop - _c16;
+ ySrcTop += 1;
+ y01 = *ySrcTop - _c16;
+ ySrcTop += 1;
+ y10 = *ySrcBot - _c16;
+ ySrcBot += 1;
+ y11 = *ySrcBot - _c16;
+ ySrcBot += 1;
+
+ int32_t u_b = u * _b_u;
+ int32_t u_g = u * _neg_g_u;
+ int32_t v_g = v * _neg_g_v;
+ int32_t v_r = v * _r_v;
+
+ int32_t yMult, b, g, r;
+ yMult = y00 * _y + 512;
+ b = (yMult + u_b) / 1024;
+ g = (yMult + v_g + u_g) / 1024;
+ r = (yMult + v_r) / 1024;
+ b = CLIP3(0, b, 1023);
+ g = CLIP3(0, g, 1023);
+ r = CLIP3(0, r, 1023);
+ *dstTop++ = 3 << 30 | (b << 20) | (g << 10) | r;
+
+ yMult = y01 * _y + 512;
+ b = (yMult + u_b) / 1024;
+ g = (yMult + v_g + u_g) / 1024;
+ r = (yMult + v_r) / 1024;
+ b = CLIP3(0, b, 1023);
+ g = CLIP3(0, g, 1023);
+ r = CLIP3(0, r, 1023);
+ *dstTop++ = 3 << 30 | (b << 20) | (g << 10) | r;
+
+ yMult = y10 * _y + 512;
+ b = (yMult + u_b) / 1024;
+ g = (yMult + v_g + u_g) / 1024;
+ r = (yMult + v_r) / 1024;
+ b = CLIP3(0, b, 1023);
+ g = CLIP3(0, g, 1023);
+ r = CLIP3(0, r, 1023);
+ *dstBot++ = 3 << 30 | (b << 20) | (g << 10) | r;
+
+ yMult = y11 * _y + 512;
+ b = (yMult + u_b) / 1024;
+ g = (yMult + v_g + u_g) / 1024;
+ r = (yMult + v_r) / 1024;
+ b = CLIP3(0, b, 1023);
+ g = CLIP3(0, g, 1023);
+ r = CLIP3(0, r, 1023);
+ *dstBot++ = 3 << 30 | (b << 20) | (g << 10) | r;
+ }
+
+ srcY += srcYStride * 2;
+ srcU += srcUStride;
+ srcV += srcVStride;
+ dst += dstStride * 2;
+ }
+}
+
+void convertYUV420Planar16ToY410OrRGBA1010102(
+ uint32_t *dst, const uint16_t *srcY,
+ const uint16_t *srcU, const uint16_t *srcV,
+ size_t srcYStride, size_t srcUStride,
+ size_t srcVStride, size_t dstStride, size_t width, size_t height,
+ std::shared_ptr<const C2ColorAspectsStruct> aspects) {
+ if (isAtLeastT()) {
+ convertYUV420Planar16ToRGBA1010102(dst, srcY, srcU, srcV, srcYStride, srcUStride,
+ srcVStride, dstStride, width, height, aspects);
+ } else {
+ convertYUV420Planar16ToY410(dst, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
+ dstStride, width, height);
+ }
+}
+
void convertYUV420Planar16ToYV12(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, const uint16_t *srcY,
const uint16_t *srcU, const uint16_t *srcV, size_t srcYStride,
size_t srcUStride, size_t srcVStride, size_t dstYStride,
@@ -767,35 +979,29 @@
// Save supported hal pixel formats for bit depth of 10, the first time this is called
if (!mBitDepth10HalPixelFormats.size()) {
std::vector<int> halPixelFormats;
- // TODO(b/178229371) Enable HAL_PIXEL_FORMAT_YCBCR_P010 once framework supports it
- // halPixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
+ halPixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
// since allowRGBA1010102 can chance in each call, but mBitDepth10HalPixelFormats
// is populated only once, allowRGBA1010102 is not considered at this stage.
halPixelFormats.push_back(HAL_PIXEL_FORMAT_RGBA_1010102);
for (int halPixelFormat : halPixelFormats) {
- std::shared_ptr<C2GraphicBlock> block;
-
- uint32_t gpuConsumerFlags = halPixelFormat == HAL_PIXEL_FORMAT_RGBA_1010102
- ? C2AndroidMemoryUsage::HW_TEXTURE_READ
- : 0;
- C2MemoryUsage usage = {C2MemoryUsage::CPU_READ | gpuConsumerFlags,
- C2MemoryUsage::CPU_WRITE};
- // TODO(b/214411172) Use AHardwareBuffer_isSupported once it supports P010
- c2_status_t status =
- mOutputBlockPool->fetchGraphicBlock(320, 240, halPixelFormat, usage, &block);
- if (status == C2_OK) {
+ if (isHalPixelFormatSupported((AHardwareBuffer_Format)halPixelFormat)) {
mBitDepth10HalPixelFormats.push_back(halPixelFormat);
}
}
// Add YV12 in the end as a fall-back option
mBitDepth10HalPixelFormats.push_back(HAL_PIXEL_FORMAT_YV12);
}
- // When RGBA1010102 is not allowed and if the first supported hal pixel is format is
- // HAL_PIXEL_FORMAT_RGBA_1010102, then return HAL_PIXEL_FORMAT_YV12
- if (!allowRGBA1010102 && mBitDepth10HalPixelFormats[0] == HAL_PIXEL_FORMAT_RGBA_1010102) {
- return HAL_PIXEL_FORMAT_YV12;
+ // From Android T onwards, HAL_PIXEL_FORMAT_RGBA_1010102 corresponds to true
+ // RGBA 1010102 format unlike earlier versions where it was used to represent
+ // YUVA 1010102 data
+ if (!isAtLeastT()) {
+ // When RGBA1010102 is not allowed and if the first supported hal pixel is format is
+ // HAL_PIXEL_FORMAT_RGBA_1010102, then return HAL_PIXEL_FORMAT_YV12
+ if (!allowRGBA1010102 && mBitDepth10HalPixelFormats[0] == HAL_PIXEL_FORMAT_RGBA_1010102) {
+ return HAL_PIXEL_FORMAT_YV12;
+ }
}
// Return the first entry from supported formats
return mBitDepth10HalPixelFormats[0];
diff --git a/media/codec2/components/base/SimpleC2Interface.cpp b/media/codec2/components/base/SimpleC2Interface.cpp
index 29740d1..993e602 100644
--- a/media/codec2/components/base/SimpleC2Interface.cpp
+++ b/media/codec2/components/base/SimpleC2Interface.cpp
@@ -28,6 +28,14 @@
/* SimpleInterface */
+static C2R SubscribedParamIndicesSetter(
+ bool mayBlock, C2InterfaceHelper::C2P<C2SubscribedParamIndicesTuning> &me) {
+ (void)mayBlock;
+ (void)me;
+
+ return C2R::Ok();
+}
+
SimpleInterface<void>::BaseParams::BaseParams(
const std::shared_ptr<C2ReflectorHelper> &reflector,
C2String name,
@@ -186,7 +194,7 @@
.withDefault(C2SubscribedParamIndicesTuning::AllocShared(0u))
.withFields({ C2F(mSubscribedParamIndices, m.values[0]).any(),
C2F(mSubscribedParamIndices, m.values).any() })
- .withSetter(Setter<C2SubscribedParamIndicesTuning>::NonStrictValuesWithNoDeps)
+ .withSetter(SubscribedParamIndicesSetter)
.build());
/* TODO
diff --git a/media/codec2/components/base/include/SimpleC2Component.h b/media/codec2/components/base/include/SimpleC2Component.h
index d244f45..7600c5b 100644
--- a/media/codec2/components/base/include/SimpleC2Component.h
+++ b/media/codec2/components/base/include/SimpleC2Component.h
@@ -26,6 +26,8 @@
#include <media/stagefright/foundation/ALooper.h>
#include <media/stagefright/foundation/Mutexed.h>
+struct C2ColorAspectsStruct;
+
namespace android {
void convertYUV420Planar8ToYV12(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, const uint8_t *srcY,
@@ -33,19 +35,26 @@
size_t srcUStride, size_t srcVStride, size_t dstYStride,
size_t dstUVStride, uint32_t width, uint32_t height,
bool isMonochrome = false);
-void convertYUV420Planar16ToY410(uint32_t *dst, const uint16_t *srcY, const uint16_t *srcU,
- const uint16_t *srcV, size_t srcYStride, size_t srcUStride,
- size_t srcVStride, size_t dstStride, size_t width, size_t height);
+
+void convertYUV420Planar16ToY410OrRGBA1010102(
+ uint32_t *dst, const uint16_t *srcY,
+ const uint16_t *srcU, const uint16_t *srcV,
+ size_t srcYStride, size_t srcUStride,
+ size_t srcVStride, size_t dstStride, size_t width, size_t height,
+ std::shared_ptr<const C2ColorAspectsStruct> aspects = nullptr);
+
void convertYUV420Planar16ToYV12(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, const uint16_t *srcY,
const uint16_t *srcU, const uint16_t *srcV, size_t srcYStride,
size_t srcUStride, size_t srcVStride, size_t dstYStride,
size_t dstUVStride, size_t width, size_t height,
bool isMonochrome = false);
+
void convertYUV420Planar16ToP010(uint16_t *dstY, uint16_t *dstUV, const uint16_t *srcY,
const uint16_t *srcU, const uint16_t *srcV, size_t srcYStride,
size_t srcUStride, size_t srcVStride, size_t dstYStride,
size_t dstUVStride, size_t width, size_t height,
bool isMonochrome = false);
+
class SimpleC2Component
: public C2Component, public std::enable_shared_from_this<SimpleC2Component> {
public:
diff --git a/media/codec2/components/base/include/SimpleC2Interface.h b/media/codec2/components/base/include/SimpleC2Interface.h
index 2051d3d..916f392 100644
--- a/media/codec2/components/base/include/SimpleC2Interface.h
+++ b/media/codec2/components/base/include/SimpleC2Interface.h
@@ -209,6 +209,7 @@
return me.F(me.v.value).validatePossible(me.v.value);
}
+ // TODO(b/230146771): fix crash
static C2R NonStrictValuesWithNoDeps(
bool mayBlock, C2InterfaceHelper::C2P<type> &me) {
(void)mayBlock;
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.cpp b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
index 0f59d76..ee37b06 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.cpp
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
@@ -21,6 +21,7 @@
#include <C2Debug.h>
#include <C2PlatformSupport.h>
#include <Codec2BufferUtils.h>
+#include <Codec2CommonUtils.h>
#include <Codec2Mapper.h>
#include <SimpleC2Interface.h>
#include <log/log.h>
@@ -189,11 +190,23 @@
.withSetter(ColorAspectsSetter, mDefaultColorAspects, mCodedColorAspects)
.build());
+ std::vector<uint32_t> pixelFormats = {HAL_PIXEL_FORMAT_YCBCR_420_888};
+ if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
+ pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
+ }
+ // If color format surface isn't added to supported formats, there is no way to know
+ // when the color-format is configured to surface. This is necessary to be able to
+ // choose 10-bit format while decoding 10-bit clips in surface mode.
+ pixelFormats.push_back(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
+
// TODO: support more formats?
- addParameter(DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
- .withConstValue(new C2StreamPixelFormatInfo::output(
- 0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
- .build());
+ addParameter(
+ DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
+ .withDefault(new C2StreamPixelFormatInfo::output(
+ 0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
+ .withFields({C2F(mPixelFormat, value).oneOf(pixelFormats)})
+ .withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
+ .build());
}
static C2R SizeSetter(bool mayBlock,
@@ -314,6 +327,9 @@
return C2R::Ok();
}
+ // unsafe getters
+ std::shared_ptr<C2StreamPixelFormatInfo::output> getPixelFormat_l() const { return mPixelFormat; }
+
private:
std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
@@ -402,6 +418,11 @@
bool C2SoftGav1Dec::initDecoder() {
mSignalledError = false;
mSignalledOutputEos = false;
+ mHalPixelFormat = HAL_PIXEL_FORMAT_YV12;
+ {
+ IntfImpl::Lock lock = mIntf->lock();
+ mPixelFormatInfo = mIntf->getPixelFormat_l();
+ }
mCodecCtx.reset(new libgav1::Decoder());
if (mCodecCtx == nullptr) {
@@ -627,10 +648,10 @@
std::shared_ptr<C2GraphicBlock> block;
uint32_t format = HAL_PIXEL_FORMAT_YV12;
- if (buffer->bitdepth == 10) {
+ std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects;
+ if (buffer->bitdepth == 10 && mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
IntfImpl::Lock lock = mIntf->lock();
- std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects =
- mIntf->getColorAspects_l();
+ codedColorAspects = mIntf->getColorAspects_l();
bool allowRGBA1010102 = false;
if (codedColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
codedColorAspects->matrix == C2Color::MATRIX_BT2020 &&
@@ -647,6 +668,24 @@
return false;
}
}
+
+ if (mHalPixelFormat != format) {
+ C2StreamPixelFormatInfo::output pixelFormat(0u, format);
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ c2_status_t err = mIntf->config({&pixelFormat }, C2_MAY_BLOCK, &failures);
+ if (err == C2_OK) {
+ work->worklets.front()->output.configUpdate.push_back(
+ C2Param::Copy(pixelFormat));
+ } else {
+ ALOGE("Config update pixelFormat failed");
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return UNKNOWN_ERROR;
+ }
+ mHalPixelFormat = format;
+ }
+
C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
c2_status_t err = pool->fetchGraphicBlock(align(mWidth, 16), mHeight, format,
@@ -686,9 +725,11 @@
const uint16_t *srcV = (const uint16_t *)buffer->plane[2];
if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
- convertYUV420Planar16ToY410((uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2,
- srcUStride / 2, srcVStride / 2, dstYStride / sizeof(uint32_t),
- mWidth, mHeight);
+ convertYUV420Planar16ToY410OrRGBA1010102(
+ (uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2,
+ srcUStride / 2, srcVStride / 2,
+ dstYStride / sizeof(uint32_t), mWidth, mHeight,
+ std::static_pointer_cast<const C2ColorAspectsStruct>(codedColorAspects));
} else if (format == HAL_PIXEL_FORMAT_YCBCR_P010) {
convertYUV420Planar16ToP010((uint16_t *)dstY, (uint16_t *)dstU, srcY, srcU, srcV,
srcYStride / 2, srcUStride / 2, srcVStride / 2, dstYStride / 2,
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.h b/media/codec2/components/gav1/C2SoftGav1Dec.h
index 134fa0d..e982c5b 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.h
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.h
@@ -54,6 +54,11 @@
std::shared_ptr<IntfImpl> mIntf;
std::unique_ptr<libgav1::Decoder> mCodecCtx;
+ // configurations used by component in process
+ // (TODO: keep this in intf but make them internal only)
+ std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormatInfo;
+
+ uint32_t mHalPixelFormat;
uint32_t mWidth;
uint32_t mHeight;
bool mSignalledOutputEos;
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.cpp b/media/codec2/components/vpx/C2SoftVpxDec.cpp
index 5fc89be..8087396 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxDec.cpp
@@ -19,13 +19,13 @@
#include <log/log.h>
#include <algorithm>
-
#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/foundation/MediaDefs.h>
#include <C2Debug.h>
#include <C2PlatformSupport.h>
#include <Codec2BufferUtils.h>
+#include <Codec2CommonUtils.h>
#include <SimpleC2Interface.h>
#include "C2SoftVpxDec.h"
@@ -218,11 +218,24 @@
.build());
// TODO: support more formats?
+ std::vector<uint32_t> pixelFormats = {HAL_PIXEL_FORMAT_YCBCR_420_888};
+#ifdef VP9
+ if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
+ pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
+ }
+ // If color format surface isn't added to supported formats, there is no way to know
+ // when the color-format is configured to surface. This is necessary to be able to
+ // choose 10-bit format while decoding 10-bit clips in surface mode
+ pixelFormats.push_back(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
+#endif
addParameter(
DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
- .withConstValue(new C2StreamPixelFormatInfo::output(
- 0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
+ .withDefault(new C2StreamPixelFormatInfo::output(
+ 0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
+ .withFields({C2F(mPixelFormat, value).oneOf(pixelFormats)})
+ .withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
.build());
+
}
static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::output> &oldMe,
@@ -298,6 +311,11 @@
return C2R::Ok();
}
+ // unsafe getters
+ std::shared_ptr<C2StreamPixelFormatInfo::output> getPixelFormat_l() const {
+ return mPixelFormat;
+ }
+
private:
std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
@@ -424,6 +442,11 @@
#else
mMode = MODE_VP8;
#endif
+ mHalPixelFormat = HAL_PIXEL_FORMAT_YV12;
+ {
+ IntfImpl::Lock lock = mIntf->lock();
+ mPixelFormatInfo = mIntf->getPixelFormat_l();
+ }
mWidth = 320;
mHeight = 240;
@@ -679,9 +702,11 @@
std::shared_ptr<C2GraphicBlock> block;
uint32_t format = HAL_PIXEL_FORMAT_YV12;
- if (img->fmt == VPX_IMG_FMT_I42016) {
+ std::shared_ptr<C2StreamColorAspectsTuning::output> defaultColorAspects;
+ if (img->fmt == VPX_IMG_FMT_I42016 &&
+ mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
IntfImpl::Lock lock = mIntf->lock();
- std::shared_ptr<C2StreamColorAspectsTuning::output> defaultColorAspects = mIntf->getDefaultColorAspects_l();
+ defaultColorAspects = mIntf->getDefaultColorAspects_l();
bool allowRGBA1010102 = false;
if (defaultColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
defaultColorAspects->matrix == C2Color::MATRIX_BT2020 &&
@@ -690,6 +715,24 @@
}
format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
}
+
+ if (mHalPixelFormat != format) {
+ C2StreamPixelFormatInfo::output pixelFormat(0u, format);
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ c2_status_t err = mIntf->config({&pixelFormat }, C2_MAY_BLOCK, &failures);
+ if (err == C2_OK) {
+ work->worklets.front()->output.configUpdate.push_back(
+ C2Param::Copy(pixelFormat));
+ } else {
+ ALOGE("Config update pixelFormat failed");
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return UNKNOWN_ERROR;
+ }
+ mHalPixelFormat = format;
+ }
+
C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
c2_status_t err = pool->fetchGraphicBlock(align(mWidth, 16), mHeight, format, usage, &block);
if (err != C2_OK) {
@@ -733,11 +776,14 @@
queue->entries.push_back(
[dstY, srcY, srcU, srcV,
srcYStride, srcUStride, srcVStride, dstYStride,
- width = mWidth, height = std::min(mHeight - i, kHeight)] {
- convertYUV420Planar16ToY410(
+ width = mWidth, height = std::min(mHeight - i, kHeight),
+ defaultColorAspects] {
+ convertYUV420Planar16ToY410OrRGBA1010102(
(uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2,
srcUStride / 2, srcVStride / 2, dstYStride / sizeof(uint32_t),
- width, height);
+ width, height,
+ std::static_pointer_cast<const C2ColorAspectsStruct>(
+ defaultColorAspects));
});
srcY += srcYStride / 2 * kHeight;
srcU += srcUStride / 2 * (kHeight / 2);
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.h b/media/codec2/components/vpx/C2SoftVpxDec.h
index 2065165..e9d6dc9 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.h
+++ b/media/codec2/components/vpx/C2SoftVpxDec.h
@@ -63,10 +63,15 @@
std::shared_ptr<Mutexed<ConversionQueue>> mQueue;
};
+ // configurations used by component in process
+ // (TODO: keep this in intf but make them internal only)
+ std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormatInfo;
+
std::shared_ptr<IntfImpl> mIntf;
vpx_codec_ctx_t *mCodecCtx;
bool mFrameParallelMode; // Frame parallel is only supported by VP9 decoder.
+ uint32_t mHalPixelFormat;
uint32_t mWidth;
uint32_t mHeight;
bool mSignalledOutputEos;
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index 70e742c..fca8f4f 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -60,6 +60,7 @@
enum drc_effect_type_t : int32_t; ///< DRC effect type
enum drc_album_mode_t : int32_t; ///< DRC album mode
enum hdr_dynamic_metadata_type_t : uint32_t; ///< HDR dynamic metadata type
+ enum hdr_format_t : uint32_t; ///< HDR format
enum intra_refresh_mode_t : uint32_t; ///< intra refresh modes
enum level_t : uint32_t; ///< coding level
enum ordinal_key_t : uint32_t; ///< work ordering keys
@@ -192,10 +193,9 @@
kParamIndexPictureType,
// deprecated
kParamIndexHdr10PlusMetadata,
-
kParamIndexPictureQuantization,
-
kParamIndexHdrDynamicMetadata,
+ kParamIndexHdrFormat,
/* ------------------------------------ video components ------------------------------------ */
@@ -1664,6 +1664,34 @@
constexpr char C2_PARAMKEY_INPUT_HDR_DYNAMIC_INFO[] = "input.hdr-dynamic-info";
constexpr char C2_PARAMKEY_OUTPUT_HDR_DYNAMIC_INFO[] = "output.hdr-dynamic-info";
+/**
+ * HDR Format
+ */
+C2ENUM(C2Config::hdr_format_t, uint32_t,
+ UNKNOWN, ///< HDR format not known (default)
+ SDR, ///< not HDR (SDR)
+ HLG, ///< HLG
+ HDR10, ///< HDR10
+ HDR10_PLUS, ///< HDR10+
+);
+
+/**
+ * HDR Format Info
+ *
+ * This information may be present during configuration to allow encoders to
+ * prepare encoding certain HDR formats. When this information is not present
+ * before start, encoders should determine the HDR format based on the available
+ * HDR metadata on the first input frame.
+ *
+ * While this information is optional, it is not a hint. When present, encoders
+ * that do not support dynamic reconfiguration do not need to switch to the HDR
+ * format based on the metadata on the first input frame.
+ */
+typedef C2StreamParam<C2Info, C2SimpleValueStruct<C2EasyEnum<C2Config::hdr_format_t>>,
+ kParamIndexHdrFormat>
+ C2StreamHdrFormatInfo;
+constexpr char C2_PARAMKEY_HDR_FORMAT[] = "coded.hdr-format";
+
/* ------------------------------------ block-based coding ----------------------------------- */
/**
diff --git a/media/codec2/fuzzer/Android.bp b/media/codec2/fuzzer/Android.bp
index 3adc212..147a52e 100644
--- a/media/codec2/fuzzer/Android.bp
+++ b/media/codec2/fuzzer/Android.bp
@@ -38,6 +38,12 @@
"-Wall",
"-Werror",
],
+
+ fuzz_config: {
+ cc: [
+ "wonsik@google.com",
+ ],
+ },
}
cc_fuzz {
diff --git a/media/codec2/hidl/1.0/utils/types.cpp b/media/codec2/hidl/1.0/utils/types.cpp
index 72f7c43..5c24bd7 100644
--- a/media/codec2/hidl/1.0/utils/types.cpp
+++ b/media/codec2/hidl/1.0/utils/types.cpp
@@ -26,6 +26,7 @@
#include <C2BlockInternal.h>
#include <C2Buffer.h>
#include <C2Component.h>
+#include <C2FenceFactory.h>
#include <C2Param.h>
#include <C2ParamInternal.h>
#include <C2PlatformSupport.h>
@@ -759,17 +760,14 @@
// Note: File descriptors are not duplicated. The original file descriptor must
// not be closed before the transaction is complete.
bool objcpy(hidl_handle* d, const C2Fence& s) {
- (void)s; // TODO: implement s.fd()
- int fenceFd = -1;
d->setTo(nullptr);
- if (fenceFd >= 0) {
- native_handle_t *handle = native_handle_create(1, 0);
- if (!handle) {
- LOG(ERROR) << "Failed to create a native handle.";
- return false;
- }
- handle->data[0] = fenceFd;
+ native_handle_t* handle = _C2FenceFactory::CreateNativeHandle(s);
+ if (handle) {
d->setTo(handle, true /* owns */);
+// } else if (!s.ready()) {
+// // TODO: we should wait for unmarshallable fences but this may not be
+// // the best place for it. We can safely ignore here as at this time
+// // all fences used here are marshallable.
}
return true;
}
@@ -1184,9 +1182,8 @@
// Note: File descriptors are not duplicated. The original file descriptor must
// not be closed before the transaction is complete.
bool objcpy(C2Fence* d, const hidl_handle& s) {
- // TODO: Implement.
- (void)s;
- *d = C2Fence();
+ const native_handle_t* handle = s.getNativeHandle();
+ *d = _C2FenceFactory::CreateFromNativeHandle(handle);
return true;
}
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
index bd7ec0d..327717b 100644
--- a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
@@ -372,9 +372,8 @@
ULock l(queueLock);
flushedIndices.emplace_back(frameID);
}
- char* data = (char*)malloc(bytesCount);
- ASSERT_NE(data, nullptr);
- eleStream.read(data, bytesCount);
+ std::vector<char> eleData(bytesCount);
+ eleStream.read(eleData.data(), bytesCount);
// if we have reached at the end of input stream, signal eos
if (eleStream.gcount() < bytesCount) {
bytesCount = eleStream.gcount();
@@ -396,12 +395,11 @@
ASSERT_EQ(0u, view.offset());
ASSERT_EQ((size_t)bytesCount, view.size());
- memcpy(view.base(), data, bytesCount);
+ memcpy(view.base(), eleData.data(), bytesCount);
work->input.buffers.clear();
work->input.buffers.emplace_back(new LinearBuffer(block));
work->worklets.clear();
work->worklets.emplace_back(new C2Worklet);
- free(data);
std::list<std::unique_ptr<C2Work>> items;
items.push_back(std::move(work));
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
index 67873fa..117d9ca 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
@@ -214,10 +214,8 @@
calc_md5_cksum(uPlane, uvStride, cropWidth / 2, cropHeight / 2, au1_u_chksum);
calc_md5_cksum(vPlane, uvStride, cropWidth / 2, cropHeight / 2, au1_v_chksum);
} else if (bitDepth == 8 && layoutType == C2PlanarLayout::TYPE_YUV && colInc == 2) {
- uint8_t* cbPlane = (uint8_t*)malloc(cropWidth * cropHeight / 4);
- uint8_t* crPlane = (uint8_t*)malloc(cropWidth * cropHeight / 4);
- ASSERT_NE(cbPlane, nullptr);
- ASSERT_NE(crPlane, nullptr);
+ std::vector<uint8_t> cbPlane(cropWidth * cropHeight / 4);
+ std::vector<uint8_t> crPlane(cropWidth * cropHeight / 4);
size_t count = 0;
for (size_t k = 0; k < (cropHeight / 2); k++) {
for (size_t l = 0; l < (cropWidth); l = l + 2) {
@@ -227,10 +225,10 @@
}
}
calc_md5_cksum(yPlane, yStride, cropWidth, cropHeight, au1_y_chksum);
- calc_md5_cksum(cbPlane, cropWidth / 2, cropWidth / 2, cropHeight / 2, au1_u_chksum);
- calc_md5_cksum(crPlane, cropWidth / 2, cropWidth / 2, cropHeight / 2, au1_v_chksum);
- free(cbPlane);
- free(crPlane);
+ calc_md5_cksum(cbPlane.data(), cropWidth / 2, cropWidth / 2, cropHeight / 2,
+ au1_u_chksum);
+ calc_md5_cksum(crPlane.data(), cropWidth / 2, cropWidth / 2, cropHeight / 2,
+ au1_v_chksum);
} else {
mMd5Enable = false;
ALOGV("Disabling MD5 chksm flag");
diff --git a/media/codec2/hidl/plugin/samples/SampleFilterPlugin.cpp b/media/codec2/hidl/plugin/samples/SampleFilterPlugin.cpp
index b942be7..c8997bb 100644
--- a/media/codec2/hidl/plugin/samples/SampleFilterPlugin.cpp
+++ b/media/codec2/hidl/plugin/samples/SampleFilterPlugin.cpp
@@ -417,6 +417,7 @@
}
std::unique_lock lock(mQueueMutex);
mQueue.splice(mQueue.end(), *items);
+ mQueueCondition.notify_all();
return C2_OK;
}
@@ -665,6 +666,7 @@
grallocHandle, GraphicBuffer::CLONE_HANDLE,
width, height, format, 1, usage, stride);
+ native_handle_delete(grallocHandle);
std::shared_ptr<C2GraphicBlock> dstBlock;
C2BlockPool::local_id_t poolId = mIntf->getPoolId();
std::shared_ptr<C2BlockPool> pool;
@@ -683,6 +685,7 @@
grallocHandle, GraphicBuffer::CLONE_HANDLE,
width, height, format, 1, usage, stride);
+ native_handle_delete(grallocHandle);
Rect sourceCrop(0, 0, width, height);
renderengine::DisplaySettings clientCompositionDisplay;
@@ -807,7 +810,8 @@
// affectedParams
{
C2StreamHdrStaticInfo::output::PARAM_TYPE,
- C2StreamHdr10PlusInfo::output::PARAM_TYPE,
+ C2StreamHdr10PlusInfo::output::PARAM_TYPE, // will be deprecated
+ C2StreamHdrDynamicMetadataInfo::output::PARAM_TYPE,
C2StreamColorAspectsInfo::output::PARAM_TYPE,
},
};
diff --git a/media/codec2/sfplugin/Android.bp b/media/codec2/sfplugin/Android.bp
index feeddb5..134bc53 100644
--- a/media/codec2/sfplugin/Android.bp
+++ b/media/codec2/sfplugin/Android.bp
@@ -37,6 +37,10 @@
"media_ndk_headers",
],
+ static_libs: [
+ "SurfaceFlingerProperties",
+ ],
+
shared_libs: [
"android.hardware.cas.native@1.0",
"android.hardware.drm@1.0",
@@ -61,6 +65,7 @@
"libstagefright_codecbase",
"libstagefright_foundation",
"libstagefright_omx",
+ "libstagefright_surface_utils",
"libstagefright_xmlparser",
"libui",
"libutils",
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 2b9ec7d..39e5bd8 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -871,6 +871,11 @@
}
config->mTunneled = true;
}
+
+ int32_t pushBlankBuffersOnStop = 0;
+ if (msg->findInt32(KEY_PUSH_BLANK_BUFFERS_ON_STOP, &pushBlankBuffersOnStop)) {
+ config->mPushBlankBuffersOnStop = pushBlankBuffersOnStop == 1;
+ }
}
}
setSurface(surface);
@@ -1473,8 +1478,12 @@
// with more enc stat kinds
// Future extended encoding statistics for the level 2 should be added here
case VIDEO_ENCODING_STATISTICS_LEVEL_1:
- config->subscribeToConfigUpdate(comp,
- {kParamIndexAverageBlockQuantization, kParamIndexPictureType});
+ config->subscribeToConfigUpdate(
+ comp,
+ {
+ C2AndroidStreamAverageBlockQuantizationInfo::output::PARAM_TYPE,
+ C2StreamPictureTypeInfo::output::PARAM_TYPE,
+ });
break;
case VIDEO_ENCODING_STATISTICS_LEVEL_NONE:
break;
@@ -1803,9 +1812,16 @@
if (tryAndReportOnError(setRunning) != OK) {
return;
}
+
+ err2 = mChannel->requestInitialInputBuffers();
+
+ if (err2 != OK) {
+ ALOGE("Initial request for Input Buffers failed");
+ mCallback->onError(err2,ACTION_CODE_FATAL);
+ return;
+ }
mCallback->onStartCompleted();
- (void)mChannel->requestInitialInputBuffers();
}
void CCodec::initiateShutdown(bool keepComponentAllocated) {
@@ -1831,7 +1847,13 @@
}
state->set(STOPPING);
}
-
+ {
+ Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+ const std::unique_ptr<Config> &config = *configLocked;
+ if (config->mPushBlankBuffersOnStop) {
+ mChannel->pushBlankBufferToOutputSurface();
+ }
+ }
mChannel->reset();
(new AMessage(kWhatStop, this))->post();
}
@@ -1919,6 +1941,13 @@
config->mInputSurfaceDataspace = HAL_DATASPACE_UNKNOWN;
}
}
+ {
+ Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+ const std::unique_ptr<Config> &config = *configLocked;
+ if (config->mPushBlankBuffersOnStop) {
+ mChannel->pushBlankBufferToOutputSurface();
+ }
+ }
mChannel->reset();
// thiz holds strong ref to this while the thread is running.
@@ -2371,7 +2400,8 @@
C2StreamColorAspectsInfo::output::PARAM_TYPE,
C2StreamDataSpaceInfo::output::PARAM_TYPE,
C2StreamHdrStaticInfo::output::PARAM_TYPE,
- C2StreamHdr10PlusInfo::output::PARAM_TYPE,
+ C2StreamHdr10PlusInfo::output::PARAM_TYPE, // will be deprecated
+ C2StreamHdrDynamicMetadataInfo::output::PARAM_TYPE,
C2StreamPixelAspectRatioInfo::output::PARAM_TYPE,
C2StreamSurfaceScalingInfo::output::PARAM_TYPE
};
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 99aa593..c605128 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -15,8 +15,11 @@
*/
//#define LOG_NDEBUG 0
+#include <utils/Errors.h>
#define LOG_TAG "CCodecBufferChannel"
+#define ATRACE_TAG ATRACE_TAG_VIDEO
#include <utils/Log.h>
+#include <utils/Trace.h>
#include <algorithm>
#include <atomic>
@@ -44,9 +47,9 @@
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/MediaCodec.h>
#include <media/stagefright/MediaCodecConstants.h>
#include <media/stagefright/SkipCutBuffer.h>
+#include <media/stagefright/SurfaceUtils.h>
#include <media/MediaCodecBuffer.h>
#include <mediadrm/ICrypto.h>
#include <system/window.h>
@@ -327,6 +330,8 @@
}
c2_status_t err = C2_OK;
if (!items.empty()) {
+ ScopedTrace trace(ATRACE_TAG, android::base::StringPrintf(
+ "CCodecBufferChannel::queue(%s@ts=%lld)", mName, (long long)timeUs).c_str());
{
Mutexed<PipelineWatcher>::Locked watcher(mPipelineWatcher);
PipelineWatcher::Clock::time_point now = PipelineWatcher::Clock::now();
@@ -875,6 +880,19 @@
return UNKNOWN_ERROR;
}
const C2ConstGraphicBlock &block = blocks.front();
+ C2Fence c2fence = block.fence();
+ sp<Fence> fence = Fence::NO_FENCE;
+ // TODO: it's not sufficient to just check isHW() and then construct android::fence from it.
+ // Once C2Fence::type() is added, check the exact C2Fence type
+ if (c2fence.isHW()) {
+ int fenceFd = c2fence.fd();
+ fence = sp<Fence>::make(fenceFd);
+ if (!fence) {
+ ALOGE("[%s] Failed to allocate a fence", mName);
+ close(fenceFd);
+ return NO_MEMORY;
+ }
+ }
// TODO: revisit this after C2Fence implementation.
android::IGraphicBufferProducer::QueueBufferInput qbi(
@@ -887,7 +905,7 @@
blocks.front().crop().bottom()),
videoScalingMode,
transform,
- Fence::NO_FENCE, 0);
+ fence, 0);
if (hdrStaticInfo || hdrDynamicInfo) {
HdrMetadata hdr;
if (hdrStaticInfo) {
@@ -924,6 +942,11 @@
hdr.validTypes |= HdrMetadata::CTA861_3;
hdr.cta8613 = cta861_meta;
}
+
+ // does not have valid info
+ if (!(hdr.validTypes & (HdrMetadata::SMPTE2086 | HdrMetadata::CTA861_3))) {
+ hdrStaticInfo.reset();
+ }
}
if (hdrDynamicInfo
&& hdrDynamicInfo->m.type_ == C2Config::HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40) {
@@ -933,9 +956,9 @@
hdrDynamicInfo->m.data + hdrDynamicInfo->flexCount());
}
qbi.setHdrMetadata(hdr);
-
- SetHdrMetadataToGralloc4Handle(hdrStaticInfo, hdrDynamicInfo, block.handle());
}
+ SetMetadataToGralloc4Handle(dataSpace, hdrStaticInfo, hdrDynamicInfo, block.handle());
+
// we don't have dirty regions
qbi.setSurfaceDamage(Region::INVALID_REGION);
android::IGraphicBufferProducer::QueueBufferOutput qbo;
@@ -1892,7 +1915,7 @@
int32_t flags = 0;
if (worklet->output.flags & C2FrameData::FLAG_END_OF_STREAM) {
- flags |= MediaCodec::BUFFER_FLAG_EOS;
+ flags |= BUFFER_FLAG_END_OF_STREAM;
ALOGV("[%s] onWorkDone: output EOS", mName);
}
@@ -1909,6 +1932,8 @@
// When using input surface we need to restore the original input timestamp.
timestamp = work->input.ordinal.customOrdinal;
}
+ ScopedTrace trace(ATRACE_TAG, android::base::StringPrintf(
+ "CCodecBufferChannel::onWorkDone(%s@ts=%lld)", mName, timestamp.peekll()).c_str());
ALOGV("[%s] onWorkDone: input %lld, codec %lld => output %lld => %lld",
mName,
work->input.ordinal.customOrdinal.peekll(),
@@ -1930,7 +1955,7 @@
sp<MediaCodecBuffer> outBuffer;
if (output->buffers && output->buffers->registerCsd(initData, &index, &outBuffer) == OK) {
outBuffer->meta()->setInt64("timeUs", timestamp.peek());
- outBuffer->meta()->setInt32("flags", MediaCodec::BUFFER_FLAG_CODECCONFIG);
+ outBuffer->meta()->setInt32("flags", BUFFER_FLAG_CODEC_CONFIG);
ALOGV("[%s] onWorkDone: csd index = %zu [%p]", mName, index, outBuffer.get());
output.unlock();
@@ -1966,7 +1991,7 @@
switch (info->coreIndex().coreIndex()) {
case C2StreamPictureTypeMaskInfo::CORE_INDEX:
if (((C2StreamPictureTypeMaskInfo *)info.get())->value & C2Config::SYNC_FRAME) {
- flags |= MediaCodec::BUFFER_FLAG_SYNCFRAME;
+ flags |= BUFFER_FLAG_KEY_FRAME;
}
break;
default:
@@ -2086,12 +2111,13 @@
}
PipelineWatcher::Clock::duration CCodecBufferChannel::elapsed() {
- // When client pushed EOS, we want all the work to be done quickly.
// Otherwise, component may have stalled work due to input starvation up to
// the sum of the delay in the pipeline.
+ // TODO(b/231253301): When client pushed EOS, the pipeline could have less
+ // number of frames.
size_t n = 0;
- if (!mInputMetEos) {
- size_t outputDelay = mOutput.lock()->outputDelay;
+ size_t outputDelay = mOutput.lock()->outputDelay;
+ {
Mutexed<Input>::Locked input(mInput);
n = input->inputDelay + input->pipelineDelay + outputDelay;
}
@@ -2170,4 +2196,13 @@
}
}
+status_t CCodecBufferChannel::pushBlankBufferToOutputSurface() {
+ Mutexed<OutputSurface>::Locked output(mOutputSurface);
+ sp<ANativeWindow> nativeWindow = static_cast<ANativeWindow *>(output->surface.get());
+ if (nativeWindow == nullptr) {
+ return INVALID_OPERATION;
+ }
+ return pushBlankBuffersToNativeWindow(nativeWindow.get());
+}
+
} // namespace android
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index 26eef30..b3a5f4b 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -181,6 +181,11 @@
void setMetaMode(MetaMode mode);
+ /**
+ * Push a blank buffer to the configured native output surface.
+ */
+ status_t pushBlankBufferToOutputSurface();
+
private:
class QueueGuard;
diff --git a/media/codec2/sfplugin/CCodecBuffers.cpp b/media/codec2/sfplugin/CCodecBuffers.cpp
index 97e1a01..8f61129 100644
--- a/media/codec2/sfplugin/CCodecBuffers.cpp
+++ b/media/codec2/sfplugin/CCodecBuffers.cpp
@@ -22,7 +22,6 @@
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/MediaDefs.h>
-#include <media/stagefright/MediaCodec.h>
#include <media/stagefright/MediaCodecConstants.h>
#include <media/stagefright/SkipCutBuffer.h>
#include <mediadrm/ICrypto.h>
@@ -34,6 +33,8 @@
namespace {
+constexpr uint32_t PIXEL_FORMAT_UNKNOWN = 0;
+
sp<GraphicBlockBuffer> AllocateGraphicBuffer(
const std::shared_ptr<C2BlockPool> &pool,
const sp<AMessage> &format,
@@ -203,7 +204,7 @@
bool OutputBuffers::convert(
const std::shared_ptr<C2Buffer> &src, sp<Codec2Buffer> *dst) {
- if (!src || src->data().type() != C2BufferData::LINEAR) {
+ if (src && src->data().type() != C2BufferData::LINEAR) {
return false;
}
int32_t configEncoding = kAudioEncodingPcm16bit;
@@ -232,7 +233,12 @@
if (!mDataConverter) {
return false;
}
- sp<MediaCodecBuffer> srcBuffer = ConstLinearBlockBuffer::Allocate(mFormat, src);
+ sp<MediaCodecBuffer> srcBuffer;
+ if (src) {
+ srcBuffer = ConstLinearBlockBuffer::Allocate(mFormat, src);
+ } else {
+ srcBuffer = new MediaCodecBuffer(mFormat, new ABuffer(0));
+ }
if (!srcBuffer) {
return false;
}
@@ -288,7 +294,7 @@
int32_t flags,
const sp<AMessage>& format,
const C2WorkOrdinalStruct& ordinal) {
- bool eos = flags & MediaCodec::BUFFER_FLAG_EOS;
+ bool eos = flags & BUFFER_FLAG_END_OF_STREAM;
if (!buffer && eos) {
// TRICKY: we may be violating ordering of the stash here. Because we
// don't expect any more emplace() calls after this, the ordering should
@@ -296,7 +302,7 @@
mReorderStash.emplace_back(
buffer, notify, timestamp, flags, format, ordinal);
} else {
- flags = flags & ~MediaCodec::BUFFER_FLAG_EOS;
+ flags = flags & ~BUFFER_FLAG_END_OF_STREAM;
auto it = mReorderStash.begin();
for (; it != mReorderStash.end(); ++it) {
if (less(ordinal, it->ordinal)) {
@@ -307,7 +313,7 @@
buffer, notify, timestamp, flags, format, ordinal);
if (eos) {
mReorderStash.back().flags =
- mReorderStash.back().flags | MediaCodec::BUFFER_FLAG_EOS;
+ mReorderStash.back().flags | BUFFER_FLAG_END_OF_STREAM;
}
}
while (!mReorderStash.empty() && mReorderStash.size() > mDepth) {
@@ -344,7 +350,7 @@
// Flushing mReorderStash because no other buffers should come after output
// EOS.
- if (entry.flags & MediaCodec::BUFFER_FLAG_EOS) {
+ if (entry.flags & BUFFER_FLAG_END_OF_STREAM) {
// Flush reorder stash
setReorderDepth(0);
}
@@ -1252,8 +1258,8 @@
if (newBuffer == nullptr) {
return NO_MEMORY;
}
+ newBuffer->setFormat(mFormat);
}
- newBuffer->setFormat(mFormat);
*index = mImpl.assignSlot(newBuffer);
handleImageData(newBuffer);
*clientBuffer = newBuffer;
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index 0899e99..c293d04 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -16,10 +16,15 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "CCodecConfig"
+
+#include <initializer_list>
+
#include <cutils/properties.h>
#include <log/log.h>
#include <utils/NativeHandle.h>
+#include <android-base/properties.h>
+
#include <C2Component.h>
#include <C2Param.h>
#include <util/C2InterfaceHelper.h>
@@ -324,7 +329,8 @@
: mInputFormat(new AMessage),
mOutputFormat(new AMessage),
mUsingSurface(false),
- mTunneled(false) { }
+ mTunneled(false),
+ mPushBlankBuffersOnStop(false) { }
void CCodecConfig::initializeStandardParams() {
typedef Domain D;
@@ -397,10 +403,10 @@
// Rotation
// Note: SDK rotation is clock-wise, while C2 rotation is counter-clock-wise
add(ConfigMapper(KEY_ROTATION, C2_PARAMKEY_VUI_ROTATION, "value")
- .limitTo(D::VIDEO & D::CODED)
+ .limitTo((D::VIDEO | D::IMAGE) & D::CODED)
.withMappers(negate, negate));
add(ConfigMapper(KEY_ROTATION, C2_PARAMKEY_ROTATION, "value")
- .limitTo(D::VIDEO & D::RAW)
+ .limitTo((D::VIDEO | D::IMAGE) & D::RAW)
.withMappers(negate, negate));
// android 'video-scaling'
@@ -510,6 +516,9 @@
add(ConfigMapper("cta861.max-fall", C2_PARAMKEY_HDR_STATIC_INFO, "max-fall")
.limitTo((D::VIDEO | D::IMAGE) & D::RAW));
+ add(ConfigMapper(C2_PARAMKEY_HDR_FORMAT, C2_PARAMKEY_HDR_FORMAT, "value")
+ .limitTo((D::VIDEO | D::IMAGE) & D::CODED & D::CONFIG));
+
add(ConfigMapper(std::string(KEY_FEATURE_) + FEATURE_SecurePlayback,
C2_PARAMKEY_SECURE_MODE, "value"));
@@ -609,10 +618,30 @@
add(ConfigMapper("csd-0", C2_PARAMKEY_INIT_DATA, "value")
.limitTo(D::OUTPUT & D::READ));
- add(ConfigMapper(KEY_HDR10_PLUS_INFO, C2_PARAMKEY_INPUT_HDR10_PLUS_INFO, "value")
+ deprecated(ConfigMapper(KEY_HDR10_PLUS_INFO, C2_PARAMKEY_INPUT_HDR10_PLUS_INFO, "value")
.limitTo(D::VIDEO & D::PARAM & D::INPUT));
- add(ConfigMapper(KEY_HDR10_PLUS_INFO, C2_PARAMKEY_OUTPUT_HDR10_PLUS_INFO, "value")
+ deprecated(ConfigMapper(KEY_HDR10_PLUS_INFO, C2_PARAMKEY_OUTPUT_HDR10_PLUS_INFO, "value")
+ .limitTo(D::VIDEO & D::OUTPUT));
+
+ add(ConfigMapper(
+ std::string(C2_PARAMKEY_INPUT_HDR_DYNAMIC_INFO) + ".type",
+ C2_PARAMKEY_INPUT_HDR_DYNAMIC_INFO, "type")
+ .limitTo(D::VIDEO & D::PARAM & D::INPUT));
+
+ add(ConfigMapper(
+ std::string(C2_PARAMKEY_INPUT_HDR_DYNAMIC_INFO) + ".data",
+ C2_PARAMKEY_INPUT_HDR_DYNAMIC_INFO, "data")
+ .limitTo(D::VIDEO & D::PARAM & D::INPUT));
+
+ add(ConfigMapper(
+ std::string(C2_PARAMKEY_OUTPUT_HDR_DYNAMIC_INFO) + ".type",
+ C2_PARAMKEY_OUTPUT_HDR_DYNAMIC_INFO, "type")
+ .limitTo(D::VIDEO & D::OUTPUT));
+
+ add(ConfigMapper(
+ std::string(C2_PARAMKEY_OUTPUT_HDR_DYNAMIC_INFO) + ".data",
+ C2_PARAMKEY_OUTPUT_HDR_DYNAMIC_INFO, "data")
.limitTo(D::VIDEO & D::OUTPUT));
add(ConfigMapper(C2_PARAMKEY_TEMPORAL_LAYERING, C2_PARAMKEY_TEMPORAL_LAYERING, "")
@@ -899,6 +928,9 @@
add(ConfigMapper(KEY_AAC_MAX_OUTPUT_CHANNEL_COUNT, C2_PARAMKEY_MAX_CHANNEL_COUNT, "value")
.limitTo(D::AUDIO & (D::CONFIG | D::PARAM | D::READ)));
+ add(ConfigMapper(KEY_MAX_OUTPUT_CHANNEL_COUNT, C2_PARAMKEY_MAX_CHANNEL_COUNT, "value")
+ .limitTo(D::AUDIO & (D::CONFIG | D::PARAM | D::READ)));
+
add(ConfigMapper(KEY_AAC_SBR_MODE, C2_PARAMKEY_AAC_SBR_MODE, "value")
.limitTo(D::AUDIO & D::ENCODER & (D::CONFIG | D::PARAM | D::READ))
.withMapper([](C2Value v) -> C2Value {
@@ -957,11 +989,25 @@
.limitTo(D::ENCODER & D::VIDEO & D::READ));
add(ConfigMapper(KEY_PICTURE_TYPE, C2_PARAMKEY_PICTURE_TYPE, "value")
- .limitTo(D::ENCODER & D::VIDEO & D::READ));
+ .limitTo(D::ENCODER & D::VIDEO & D::READ)
+ .withMappers([](C2Value v) -> C2Value {
+ int32_t sdk;
+ C2Config::picture_type_t c2;
+ if (v.get(&sdk) && C2Mapper::map(sdk, &c2)) {
+ return C2Value(c2);
+ }
+ return C2Value();
+ }, [](C2Value v) -> C2Value {
+ C2Config::picture_type_t c2;
+ int32_t sdk = PICTURE_TYPE_UNKNOWN;
+ using C2ValueType=typename _c2_reduce_enum_to_underlying_type<decltype(c2)>::type;
+ if (v.get((C2ValueType*)&c2) && C2Mapper::map(c2, &sdk)) {
+ return sdk;
+ }
+ return C2Value();
+ }));
/* still to do
- constexpr char KEY_PUSH_BLANK_BUFFERS_ON_STOP[] = "push-blank-buffers-on-shutdown";
-
not yet used by MediaCodec, but defined as MediaFormat
KEY_AUDIO_SESSION_ID // we use "audio-hw-sync"
KEY_OUTPUT_REORDER_DEPTH
@@ -1068,6 +1114,13 @@
C2_PARAMKEY_SURFACE_SCALING_MODE);
} else {
addLocalParam(new C2StreamColorAspectsInfo::input(0u), C2_PARAMKEY_COLOR_ASPECTS);
+
+ if (domain.value == C2Component::DOMAIN_VIDEO) {
+ addLocalParam(new C2AndroidStreamAverageBlockQuantizationInfo::output(0u, 0),
+ C2_PARAMKEY_AVERAGE_QP);
+ addLocalParam(new C2StreamPictureTypeInfo::output(0u, 0),
+ C2_PARAMKEY_PICTURE_TYPE);
+ }
}
}
@@ -1102,6 +1155,17 @@
}
}
+ // Parameters that are not subscribed initially, but can be subscribed
+ // upon explicit request.
+ static const std::initializer_list<C2Param::Index> kOptionalParams = {
+ C2AndroidStreamAverageBlockQuantizationInfo::output::PARAM_TYPE,
+ C2StreamPictureTypeInfo::output::PARAM_TYPE,
+ };
+ for (const C2Param::Index &index : kOptionalParams) {
+ mSubscribedIndices.erase(index);
+ }
+ subscribeToConfigUpdate(configurable, {}, C2_MAY_BLOCK);
+
return OK;
}
@@ -1109,15 +1173,21 @@
const std::shared_ptr<Codec2Client::Configurable> &configurable,
const std::vector<C2Param::Index> &indices,
c2_blocking_t blocking) {
+ static const int32_t kProductFirstApiLevel =
+ base::GetIntProperty<int32_t>("ro.product.first_api_level", 0);
+ static const int32_t kBoardApiLevel =
+ base::GetIntProperty<int32_t>("ro.board.first_api_level", 0);
+ static const int32_t kFirstApiLevel =
+ (kBoardApiLevel != 0) ? kBoardApiLevel : kProductFirstApiLevel;
mSubscribedIndices.insert(indices.begin(), indices.end());
- // TODO: enable this when components no longer crash on this config
- if (mSubscribedIndices.size() != mSubscribedIndicesSize && false) {
- std::vector<uint32_t> indices;
+ if (mSubscribedIndices.size() != mSubscribedIndicesSize
+ && kFirstApiLevel >= __ANDROID_API_T__) {
+ std::vector<uint32_t> indicesVector;
for (C2Param::Index ix : mSubscribedIndices) {
- indices.push_back(ix);
+ indicesVector.push_back(ix);
}
std::unique_ptr<C2SubscribedParamIndicesTuning> subscribeTuning =
- C2SubscribedParamIndicesTuning::AllocUnique(indices);
+ C2SubscribedParamIndicesTuning::AllocUnique(indicesVector);
std::vector<std::unique_ptr<C2SettingResult>> results;
c2_status_t c2Err = configurable->config({ subscribeTuning.get() }, blocking, &results);
if (c2Err != C2_OK && c2Err != C2_BAD_INDEX) {
@@ -1127,6 +1197,20 @@
ALOGV("Subscribed to %zu params", mSubscribedIndices.size());
mSubscribedIndicesSize = mSubscribedIndices.size();
}
+#if defined(LOG_NDEBUG) && !LOG_NDEBUG
+ ALOGV("subscribed to %zu params:", mSubscribedIndices.size());
+ std::stringstream ss;
+ for (const C2Param::Index &index : mSubscribedIndices) {
+ ss << index << " ";
+ if (ss.str().length() > 70) {
+ ALOGV("%s", ss.str().c_str());
+ std::stringstream().swap(ss);
+ }
+ }
+ if (!ss.str().empty()) {
+ ALOGV("%s", ss.str().c_str());
+ }
+#endif
return OK;
}
@@ -1151,6 +1235,12 @@
bool changed = false;
for (std::unique_ptr<C2Param> &p : configUpdate) {
if (p && *p) {
+ // Allow unsubscribed vendor parameters to go through --- it may be
+ // later handled by the format shaper.
+ if (!p->isVendor() && mSubscribedIndices.count(p->index()) == 0) {
+ ALOGV("updateConfiguration: skipped unsubscribed param %08x", p->index());
+ continue;
+ }
auto insertion = mCurrentConfig.emplace(p->index(), nullptr);
if (insertion.second || *insertion.first->second != *p) {
if (mSupportedIndices.count(p->index()) || mLocalParams.count(p->index())) {
@@ -1521,6 +1611,22 @@
msg->removeEntryAt(msg->findEntryByName("cta861.max-cll"));
msg->removeEntryAt(msg->findEntryByName("cta861.max-fall"));
}
+
+ // HDR dynamic info
+ std::string keyPrefix = input ? C2_PARAMKEY_INPUT_HDR_DYNAMIC_INFO
+ : C2_PARAMKEY_OUTPUT_HDR_DYNAMIC_INFO;
+ std::string typeKey = keyPrefix + ".type";
+ std::string dataKey = keyPrefix + ".data";
+ int32_t type;
+ sp<ABuffer> data;
+ if (msg->findInt32(typeKey.c_str(), &type)
+ && msg->findBuffer(dataKey.c_str(), &data)) {
+ if (type == HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40) {
+ msg->setBuffer(KEY_HDR10_PLUS_INFO, data);
+ msg->removeEntryAt(msg->findEntryByName(typeKey.c_str()));
+ msg->removeEntryAt(msg->findEntryByName(dataKey.c_str()));
+ }
+ }
}
ALOGV("converted to SDK values as %s", msg->debugString().c_str());
@@ -1604,6 +1710,27 @@
params->setFloat(C2_PARAMKEY_INPUT_TIME_STRETCH, captureRate / frameRate);
}
}
+
+ // add HDR format for video encoding
+ if (configDomain == IS_CONFIG) {
+ // don't assume here that transfer is set for HDR, only require it for HLG
+ int transfer = 0;
+ params->findInt32(KEY_COLOR_TRANSFER, &transfer);
+
+ int profile;
+ if (params->findInt32(KEY_PROFILE, &profile)) {
+ std::shared_ptr<C2Mapper::ProfileLevelMapper> mapper =
+ C2Mapper::GetProfileLevelMapper(mCodingMediaType);
+ C2Config::hdr_format_t c2 = C2Config::hdr_format_t::UNKNOWN;
+ if (mapper && mapper->mapHdrFormat(profile, &c2)) {
+ if (c2 == C2Config::hdr_format_t::HLG &&
+ transfer != COLOR_TRANSFER_HLG) {
+ c2 = C2Config::hdr_format_t::UNKNOWN;
+ }
+ params->setInt32(C2_PARAMKEY_HDR_FORMAT, c2);
+ }
+ }
+ }
}
{ // reflect temporal layering into a binary blob
@@ -1701,6 +1828,16 @@
params->setFloat("cta861.max-fall", meta->sType1.mMaxFrameAverageLightLevel);
}
}
+
+ sp<ABuffer> hdrDynamicInfo;
+ if (params->findBuffer(KEY_HDR10_PLUS_INFO, &hdrDynamicInfo)) {
+ for (const std::string &prefix : { C2_PARAMKEY_INPUT_HDR_DYNAMIC_INFO,
+ C2_PARAMKEY_OUTPUT_HDR_DYNAMIC_INFO }) {
+ params->setInt32((prefix + ".type").c_str(),
+ HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40);
+ params->setBuffer((prefix + ".data").c_str(), hdrDynamicInfo);
+ }
+ }
}
// this is to verify that we set proper signedness for standard parameters
@@ -1893,7 +2030,9 @@
names->clear();
// TODO: expand to standard params
for (const auto &[key, desc] : mVendorParams) {
- names->push_back(key);
+ if (desc->isVisible()) {
+ names->push_back(key);
+ }
}
return OK;
}
diff --git a/media/codec2/sfplugin/CCodecConfig.h b/media/codec2/sfplugin/CCodecConfig.h
index 88e6239..2e7b866 100644
--- a/media/codec2/sfplugin/CCodecConfig.h
+++ b/media/codec2/sfplugin/CCodecConfig.h
@@ -148,6 +148,8 @@
bool mTunneled;
sp<NativeHandle> mSidebandHandle;
+ bool mPushBlankBuffersOnStop;
+
CCodecConfig();
/// initializes the members required to manage the format: descriptors, reflector,
@@ -396,4 +398,3 @@
} // namespace android
#endif // C_CODEC_H_
-
diff --git a/media/codec2/sfplugin/Codec2Buffer.cpp b/media/codec2/sfplugin/Codec2Buffer.cpp
index 2d3c70a..95b9e85 100644
--- a/media/codec2/sfplugin/Codec2Buffer.cpp
+++ b/media/codec2/sfplugin/Codec2Buffer.cpp
@@ -16,7 +16,9 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "Codec2Buffer"
+#define ATRACE_TAG ATRACE_TAG_VIDEO
#include <utils/Log.h>
+#include <utils/Trace.h>
#include <aidl/android/hardware/graphics/common/Cta861_3.h>
#include <aidl/android/hardware/graphics/common/Smpte2086.h>
@@ -229,6 +231,7 @@
mAllocatedDepth(0),
mBackBufferSize(0),
mMediaImage(new ABuffer(sizeof(MediaImage2))) {
+ ATRACE_CALL();
if (!format->findInt32(KEY_COLOR_FORMAT, &mClientColorFormat)) {
mClientColorFormat = COLOR_FormatYUV420Flexible;
}
@@ -581,6 +584,7 @@
* Copy C2GraphicView to MediaImage2.
*/
status_t copyToMediaImage() {
+ ATRACE_CALL();
if (mInitCheck != OK) {
return mInitCheck;
}
@@ -619,7 +623,9 @@
const sp<AMessage> &format,
const std::shared_ptr<C2GraphicBlock> &block,
std::function<sp<ABuffer>(size_t)> alloc) {
+ ATRACE_BEGIN("GraphicBlockBuffer::Allocate block->map()");
C2GraphicView view(block->map().get());
+ ATRACE_END();
if (view.error() != C2_OK) {
ALOGD("C2GraphicBlock::map failed: %d", view.error());
return nullptr;
@@ -664,6 +670,7 @@
}
std::shared_ptr<C2Buffer> GraphicBlockBuffer::asC2Buffer() {
+ ATRACE_CALL();
uint32_t width = mView.width();
uint32_t height = mView.height();
if (!mWrapped) {
@@ -752,8 +759,10 @@
ALOGD("C2Buffer precond fail");
return nullptr;
}
+ ATRACE_BEGIN("ConstGraphicBlockBuffer::Allocate block->map()");
std::unique_ptr<const C2GraphicView> view(std::make_unique<const C2GraphicView>(
buffer->data().graphicBlocks()[0].map().get()));
+ ATRACE_END();
std::unique_ptr<const C2GraphicView> holder;
GraphicView2MediaImageConverter converter(*view, format, false /* copy */);
@@ -854,11 +863,13 @@
return false;
}
+ ATRACE_BEGIN("ConstGraphicBlockBuffer::canCopy block->map()");
GraphicView2MediaImageConverter converter(
buffer->data().graphicBlocks()[0].map().get(),
// FIXME: format() is not const, but we cannot change it, so do a const cast here
const_cast<ConstGraphicBlockBuffer *>(this)->format(),
true /* copy */);
+ ATRACE_END();
if (converter.initCheck() != OK) {
ALOGD("ConstGraphicBlockBuffer::canCopy: converter init failed: %d", converter.initCheck());
return false;
@@ -954,9 +965,11 @@
}
using ::aidl::android::hardware::graphics::common::Cta861_3;
+using ::aidl::android::hardware::graphics::common::Dataspace;
using ::aidl::android::hardware::graphics::common::Smpte2086;
using ::android::gralloc4::MetadataType_Cta861_3;
+using ::android::gralloc4::MetadataType_Dataspace;
using ::android::gralloc4::MetadataType_Smpte2086;
using ::android::gralloc4::MetadataType_Smpte2094_40;
@@ -973,16 +986,47 @@
return sMapper;
}
-class NativeHandleDeleter {
+class Gralloc4Buffer {
public:
- explicit NativeHandleDeleter(native_handle_t *handle) : mHandle(handle) {}
- ~NativeHandleDeleter() {
- if (mHandle) {
- native_handle_delete(mHandle);
+ Gralloc4Buffer(const C2Handle *const handle) : mBuffer(nullptr) {
+ sp<IMapper4> mapper = GetMapper4();
+ if (!mapper) {
+ return;
+ }
+ // Unwrap raw buffer handle from the C2Handle
+ native_handle_t *nh = UnwrapNativeCodec2GrallocHandle(handle);
+ if (!nh) {
+ return;
+ }
+ // Import the raw handle so IMapper can use the buffer. The imported
+ // handle must be freed when the client is done with the buffer.
+ mapper->importBuffer(
+ hardware::hidl_handle(nh),
+ [&](const Error4 &error, void *buffer) {
+ if (error == Error4::NONE) {
+ mBuffer = buffer;
+ }
+ });
+
+ // TRICKY: UnwrapNativeCodec2GrallocHandle creates a new handle but
+ // does not clone the fds. Thus we need to delete the handle
+ // without closing it.
+ native_handle_delete(nh);
+ }
+
+ ~Gralloc4Buffer() {
+ sp<IMapper4> mapper = GetMapper4();
+ if (mapper && mBuffer) {
+ // Free the imported buffer handle. This does not release the
+ // underlying buffer itself.
+ mapper->freeBuffer(mBuffer);
}
}
+
+ void *get() const { return mBuffer; }
+ operator bool() const { return (mBuffer != nullptr); }
private:
- native_handle_t *mHandle;
+ void *mBuffer;
};
} // namspace
@@ -992,24 +1036,15 @@
std::shared_ptr<C2StreamHdrStaticMetadataInfo::input> *staticInfo,
std::shared_ptr<C2StreamHdrDynamicMetadataInfo::input> *dynamicInfo) {
c2_status_t err = C2_OK;
- native_handle_t *nativeHandle = UnwrapNativeCodec2GrallocHandle(handle);
- if (nativeHandle == nullptr) {
- // Nothing to do
- return err;
- }
- // TRICKY: UnwrapNativeCodec2GrallocHandle creates a new handle but
- // does not clone the fds. Thus we need to delete the handle
- // without closing it when going out of scope.
- // NativeHandle cannot solve this problem, as it would close and
- // delete the handle, while we need delete only.
- NativeHandleDeleter nhd(nativeHandle);
sp<IMapper4> mapper = GetMapper4();
- if (!mapper) {
+ Gralloc4Buffer buffer(handle);
+ if (!mapper || !buffer) {
// Gralloc4 not supported; nothing to do
return err;
}
Error4 mapperErr = Error4::NONE;
if (staticInfo) {
+ ALOGV("Grabbing static HDR info from gralloc4 metadata");
staticInfo->reset(new C2StreamHdrStaticMetadataInfo::input(0u));
memset(&(*staticInfo)->mastering, 0, sizeof((*staticInfo)->mastering));
(*staticInfo)->maxCll = 0;
@@ -1038,7 +1073,7 @@
mapperErr = Error4::BAD_VALUE;
}
};
- Return<void> ret = mapper->get(nativeHandle, MetadataType_Smpte2086, cb);
+ Return<void> ret = mapper->get(buffer.get(), MetadataType_Smpte2086, cb);
if (!ret.isOk()) {
err = C2_REFUSED;
} else if (mapperErr != Error4::NONE) {
@@ -1059,7 +1094,7 @@
mapperErr = Error4::BAD_VALUE;
}
};
- ret = mapper->get(nativeHandle, MetadataType_Cta861_3, cb);
+ ret = mapper->get(buffer.get(), MetadataType_Cta861_3, cb);
if (!ret.isOk()) {
err = C2_REFUSED;
} else if (mapperErr != Error4::NONE) {
@@ -1067,6 +1102,7 @@
}
}
if (dynamicInfo) {
+ ALOGV("Grabbing dynamic HDR info from gralloc4 metadata");
dynamicInfo->reset();
IMapper4::get_cb cb = [&mapperErr, dynamicInfo](Error4 err, const hidl_vec<uint8_t> &vec) {
mapperErr = err;
@@ -1080,7 +1116,7 @@
vec.size(), 0u, C2Config::HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40);
memcpy((*dynamicInfo)->m.data, vec.data(), vec.size());
};
- Return<void> ret = mapper->get(nativeHandle, MetadataType_Smpte2094_40, cb);
+ Return<void> ret = mapper->get(buffer.get(), MetadataType_Smpte2094_40, cb);
if (!ret.isOk() || mapperErr != Error4::NONE) {
dynamicInfo->reset();
}
@@ -1089,26 +1125,31 @@
return err;
}
-c2_status_t SetHdrMetadataToGralloc4Handle(
+c2_status_t SetMetadataToGralloc4Handle(
+ android_dataspace_t dataSpace,
const std::shared_ptr<const C2StreamHdrStaticMetadataInfo::output> &staticInfo,
const std::shared_ptr<const C2StreamHdrDynamicMetadataInfo::output> &dynamicInfo,
const C2Handle *const handle) {
c2_status_t err = C2_OK;
- native_handle_t *nativeHandle = UnwrapNativeCodec2GrallocHandle(handle);
- if (nativeHandle == nullptr) {
- // Nothing to do
- return err;
- }
- // TRICKY: UnwrapNativeCodec2GrallocHandle creates a new handle but
- // does not clone the fds. Thus we need to delete the handle
- // without closing it when going out of scope.
- NativeHandleDeleter nhd(nativeHandle);
sp<IMapper4> mapper = GetMapper4();
- if (!mapper) {
+ Gralloc4Buffer buffer(handle);
+ if (!mapper || !buffer) {
// Gralloc4 not supported; nothing to do
return err;
}
+ {
+ hidl_vec<uint8_t> metadata;
+ if (gralloc4::encodeDataspace(static_cast<Dataspace>(dataSpace), &metadata) == OK) {
+ Return<Error4> ret = mapper->set(buffer.get(), MetadataType_Dataspace, metadata);
+ if (!ret.isOk()) {
+ err = C2_REFUSED;
+ } else if (ret != Error4::NONE) {
+ err = C2_CORRUPTED;
+ }
+ }
+ }
if (staticInfo && *staticInfo) {
+ ALOGV("Setting static HDR info as gralloc4 metadata");
std::optional<Smpte2086> smpte2086 = Smpte2086{
{staticInfo->mastering.red.x, staticInfo->mastering.red.y},
{staticInfo->mastering.green.x, staticInfo->mastering.green.y},
@@ -1118,8 +1159,17 @@
staticInfo->mastering.minLuminance,
};
hidl_vec<uint8_t> vec;
- if (gralloc4::encodeSmpte2086(smpte2086, &vec) == OK) {
- Return<Error4> ret = mapper->set(nativeHandle, MetadataType_Smpte2086, vec);
+ if (0.0 <= smpte2086->primaryRed.x && smpte2086->primaryRed.x <= 1.0
+ && 0.0 <= smpte2086->primaryRed.y && smpte2086->primaryRed.y <= 1.0
+ && 0.0 <= smpte2086->primaryGreen.x && smpte2086->primaryGreen.x <= 1.0
+ && 0.0 <= smpte2086->primaryGreen.y && smpte2086->primaryGreen.y <= 1.0
+ && 0.0 <= smpte2086->primaryBlue.x && smpte2086->primaryBlue.x <= 1.0
+ && 0.0 <= smpte2086->primaryBlue.y && smpte2086->primaryBlue.y <= 1.0
+ && 0.0 <= smpte2086->whitePoint.x && smpte2086->whitePoint.x <= 1.0
+ && 0.0 <= smpte2086->whitePoint.y && smpte2086->whitePoint.y <= 1.0
+ && 0.0 <= smpte2086->maxLuminance && 0.0 <= smpte2086->minLuminance
+ && gralloc4::encodeSmpte2086(smpte2086, &vec) == OK) {
+ Return<Error4> ret = mapper->set(buffer.get(), MetadataType_Smpte2086, vec);
if (!ret.isOk()) {
err = C2_REFUSED;
} else if (ret != Error4::NONE) {
@@ -1130,8 +1180,9 @@
staticInfo->maxCll,
staticInfo->maxFall,
};
- if (gralloc4::encodeCta861_3(cta861_3, &vec) == OK) {
- Return<Error4> ret = mapper->set(nativeHandle, MetadataType_Cta861_3, vec);
+ if (0.0 <= cta861_3->maxContentLightLevel && 0.0 <= cta861_3->maxFrameAverageLightLevel
+ && gralloc4::encodeCta861_3(cta861_3, &vec) == OK) {
+ Return<Error4> ret = mapper->set(buffer.get(), MetadataType_Cta861_3, vec);
if (!ret.isOk()) {
err = C2_REFUSED;
} else if (ret != Error4::NONE) {
@@ -1139,10 +1190,8 @@
}
}
}
- if (dynamicInfo && *dynamicInfo) {
- hidl_vec<uint8_t> vec;
- vec.resize(dynamicInfo->flexCount());
- memcpy(vec.data(), dynamicInfo->m.data, dynamicInfo->flexCount());
+ if (dynamicInfo && *dynamicInfo && dynamicInfo->flexCount() > 0) {
+ ALOGV("Setting dynamic HDR info as gralloc4 metadata");
std::optional<IMapper4::MetadataType> metadataType;
switch (dynamicInfo->m.type_) {
case C2Config::HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_10:
@@ -1152,12 +1201,20 @@
metadataType = MetadataType_Smpte2094_40;
break;
}
+
if (metadataType) {
- Return<Error4> ret = mapper->set(nativeHandle, *metadataType, vec);
- if (!ret.isOk()) {
- err = C2_REFUSED;
- } else if (ret != Error4::NONE) {
- err = C2_CORRUPTED;
+ std::vector<uint8_t> smpte2094_40;
+ smpte2094_40.resize(dynamicInfo->flexCount());
+ memcpy(smpte2094_40.data(), dynamicInfo->m.data, dynamicInfo->flexCount());
+
+ hidl_vec<uint8_t> vec;
+ if (gralloc4::encodeSmpte2094_40({ smpte2094_40 }, &vec) == OK) {
+ Return<Error4> ret = mapper->set(buffer.get(), *metadataType, vec);
+ if (!ret.isOk()) {
+ err = C2_REFUSED;
+ } else if (ret != Error4::NONE) {
+ err = C2_CORRUPTED;
+ }
}
} else {
err = C2_BAD_VALUE;
diff --git a/media/codec2/sfplugin/Codec2Buffer.h b/media/codec2/sfplugin/Codec2Buffer.h
index b02b042..b73acab 100644
--- a/media/codec2/sfplugin/Codec2Buffer.h
+++ b/media/codec2/sfplugin/Codec2Buffer.h
@@ -410,14 +410,16 @@
std::shared_ptr<C2StreamHdrDynamicMetadataInfo::input> *dynamicInfo);
/**
- * Set HDR metadata to Gralloc4 handle.
+ * Set metadata to Gralloc4 handle.
*
+ * \param[in] dataSpace Dataspace to set.
* \param[in] staticInfo HDR static info to set. Ignored if null or invalid.
* \param[in] dynamicInfo HDR dynamic info to set. Ignored if null or invalid.
* \param[out] handle handle of the allocation.
* \return C2_OK if successful
*/
-c2_status_t SetHdrMetadataToGralloc4Handle(
+c2_status_t SetMetadataToGralloc4Handle(
+ const android_dataspace_t dataSpace,
const std::shared_ptr<const C2StreamHdrStaticMetadataInfo::output> &staticInfo,
const std::shared_ptr<const C2StreamHdrDynamicMetadataInfo::output> &dynamicInfo,
const C2Handle *const handle);
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index 63bd64b..3f9a40d 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -33,6 +33,7 @@
#include <OMX_Video.h>
#include <OMX_VideoExt.h>
#include <OMX_AsString.h>
+#include <SurfaceFlingerProperties.sysprop.h>
#include <android/hardware/media/omx/1.0/IOmx.h>
#include <android/hardware/media/omx/1.0/IOmxObserver.h>
@@ -136,7 +137,9 @@
continue;
}
switch (type.coreIndex()) {
- case C2StreamHdr10PlusInfo::CORE_INDEX:
+ case C2StreamHdrDynamicMetadataInfo::CORE_INDEX:
+ [[fallthrough]];
+ case C2StreamHdr10PlusInfo::CORE_INDEX: // will be deprecated
supportsHdr10Plus = true;
break;
case C2StreamHdrStaticInfo::CORE_INDEX:
@@ -148,14 +151,21 @@
}
}
- // For VP9/AV1, the static info is always propagated by framework.
+ // VP9 does not support HDR metadata in the bitstream and static metadata
+ // can always be carried by the framework. (The framework does not propagate
+ // dynamic metadata as that needs to be frame accurate.)
supportsHdr |= (mediaType == MIMETYPE_VIDEO_VP9);
- supportsHdr |= (mediaType == MIMETYPE_VIDEO_AV1);
// HDR support implies 10-bit support.
// TODO: directly check this from the component interface
supports10Bit = (supportsHdr || supportsHdr10Plus);
+ // If the device doesn't support HDR display, then no codec on the device
+ // can advertise support for HDR profiles.
+ // Default to true to maintain backward compatibility
+ auto ret = sysprop::SurfaceFlingerProperties::has_HDR_display();
+ bool hasHDRDisplay = ret.has_value() ? *ret : true;
+
bool added = false;
for (C2Value::Primitive profile : profileQuery[0].values.values) {
@@ -181,8 +191,8 @@
if (mapper && mapper->mapProfile(pl.profile, &sdkProfile)
&& mapper->mapLevel(pl.level, &sdkLevel)) {
caps->addProfileLevel((uint32_t)sdkProfile, (uint32_t)sdkLevel);
- // also list HDR profiles if component supports HDR
- if (supportsHdr) {
+ // also list HDR profiles if component supports HDR and device has HDR display
+ if (supportsHdr && hasHDRDisplay) {
auto hdrMapper = C2Mapper::GetHdrProfileLevelMapper(trait.mediaType);
if (hdrMapper && hdrMapper->mapProfile(pl.profile, &sdkProfile)) {
caps->addProfileLevel((uint32_t)sdkProfile, (uint32_t)sdkLevel);
@@ -281,6 +291,11 @@
}
};
+ // The color format is ordered by preference. The intention here is to advertise:
+ // c2.android.* codecs: YUV420s, Surface, <the rest>
+ // all other codecs: Surface, YUV420s, <the rest>
+ // TODO: get this preference via Codec2 API
+
// vendor video codecs prefer opaque format
if (trait.name.find("android") == std::string::npos) {
addDefaultColorFormat(COLOR_FormatSurface);
@@ -290,9 +305,8 @@
addDefaultColorFormat(COLOR_FormatYUV420SemiPlanar);
addDefaultColorFormat(COLOR_FormatYUV420PackedPlanar);
addDefaultColorFormat(COLOR_FormatYUV420PackedSemiPlanar);
- // framework video encoders must support surface format, though it is unclear
- // that they will be able to map it if it is opaque
- if (encoder && trait.name.find("android") != std::string::npos) {
+ // Android video codecs prefer CPU-readable formats
+ if (trait.name.find("android") != std::string::npos) {
addDefaultColorFormat(COLOR_FormatSurface);
}
for (int32_t colorFormat : supportedColorFormats) {
diff --git a/media/codec2/sfplugin/tests/CCodecBuffers_test.cpp b/media/codec2/sfplugin/tests/CCodecBuffers_test.cpp
index 41e4fff..a471291 100644
--- a/media/codec2/sfplugin/tests/CCodecBuffers_test.cpp
+++ b/media/codec2/sfplugin/tests/CCodecBuffers_test.cpp
@@ -861,4 +861,57 @@
+ std::to_string(std::get<3>(info.param));
});
+TEST(LinearOutputBuffersTest, PcmConvertFormat) {
+ // Prepare LinearOutputBuffers
+ std::shared_ptr<LinearOutputBuffers> buffers =
+ std::make_shared<LinearOutputBuffers>("test");
+ sp<AMessage> format{new AMessage};
+ format->setInt32(KEY_CHANNEL_COUNT, 1);
+ format->setInt32(KEY_SAMPLE_RATE, 8000);
+ format->setInt32(KEY_PCM_ENCODING, kAudioEncodingPcmFloat);
+ format->setInt32("android._config-pcm-encoding", kAudioEncodingPcm16bit);
+ format->setInt32("android._codec-pcm-encoding", kAudioEncodingPcmFloat);
+ buffers->setFormat(format);
+
+ // Prepare a linear C2Buffer
+ std::shared_ptr<C2BlockPool> pool;
+ ASSERT_EQ(OK, GetCodec2BlockPool(C2BlockPool::BASIC_LINEAR, nullptr, &pool));
+
+ std::shared_ptr<C2LinearBlock> block;
+ ASSERT_EQ(OK, pool->fetchLinearBlock(
+ 1024, C2MemoryUsage{C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE}, &block));
+ std::shared_ptr<C2Buffer> c2Buffer =
+ C2Buffer::CreateLinearBuffer(block->share(0, 1024, C2Fence()));
+
+ // Test regular buffer convert
+ size_t index;
+ sp<MediaCodecBuffer> clientBuffer;
+ ASSERT_EQ(OK, buffers->registerBuffer(c2Buffer, &index, &clientBuffer));
+ int32_t pcmEncoding = 0;
+ ASSERT_TRUE(clientBuffer->format()->findInt32(KEY_PCM_ENCODING, &pcmEncoding));
+ EXPECT_EQ(kAudioEncodingPcm16bit, pcmEncoding);
+ ASSERT_TRUE(buffers->releaseBuffer(clientBuffer, &c2Buffer));
+
+ // Test null buffer convert
+ ASSERT_EQ(OK, buffers->registerBuffer(nullptr, &index, &clientBuffer));
+ ASSERT_TRUE(clientBuffer->format()->findInt32(KEY_PCM_ENCODING, &pcmEncoding));
+ EXPECT_EQ(kAudioEncodingPcm16bit, pcmEncoding);
+ ASSERT_TRUE(buffers->releaseBuffer(clientBuffer, &c2Buffer));
+
+ // Do the same test in the array mode
+ std::shared_ptr<OutputBuffersArray> array = buffers->toArrayMode(8);
+
+ // Test regular buffer convert
+ ASSERT_EQ(OK, buffers->registerBuffer(c2Buffer, &index, &clientBuffer));
+ ASSERT_TRUE(clientBuffer->format()->findInt32(KEY_PCM_ENCODING, &pcmEncoding));
+ EXPECT_EQ(kAudioEncodingPcm16bit, pcmEncoding);
+ ASSERT_TRUE(buffers->releaseBuffer(clientBuffer, &c2Buffer));
+
+ // Test null buffer convert
+ ASSERT_EQ(OK, buffers->registerBuffer(nullptr, &index, &clientBuffer));
+ ASSERT_TRUE(clientBuffer->format()->findInt32(KEY_PCM_ENCODING, &pcmEncoding));
+ EXPECT_EQ(kAudioEncodingPcm16bit, pcmEncoding);
+ ASSERT_TRUE(buffers->releaseBuffer(clientBuffer, &c2Buffer));
+}
+
} // namespace android
diff --git a/media/codec2/sfplugin/utils/Android.bp b/media/codec2/sfplugin/utils/Android.bp
index 2f4d6b1..5c2f110 100644
--- a/media/codec2/sfplugin/utils/Android.bp
+++ b/media/codec2/sfplugin/utils/Android.bp
@@ -15,6 +15,7 @@
srcs: [
"Codec2BufferUtils.cpp",
+ "Codec2CommonUtils.cpp",
"Codec2Mapper.cpp",
],
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
index bff9db5..7fc4c27 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
@@ -16,7 +16,9 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "Codec2BufferUtils"
+#define ATRACE_TAG ATRACE_TAG_VIDEO
#include <utils/Log.h>
+#include <utils/Trace.h>
#include <libyuv.h>
@@ -36,8 +38,8 @@
namespace {
/**
- * A flippable, optimizable memcpy. Constructs such as (from ? src : dst) do not work as the results are
- * always const.
+ * A flippable, optimizable memcpy. Constructs such as (from ? src : dst)
+ * do not work as the results are always const.
*/
template<bool ToA, size_t S>
struct MemCopier {
@@ -139,15 +141,18 @@
if (IsNV12(view)) {
if (IsNV12(img)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV12->NV12");
libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width, height / 2);
return OK;
} else if (IsNV21(img)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV12->NV21");
if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_u, src_stride_u,
dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
return OK;
}
} else if (IsI420(img)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV12->I420");
if (!libyuv::NV12ToI420(src_y, src_stride_y, src_u, src_stride_u, dst_y, dst_stride_y,
dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
return OK;
@@ -155,15 +160,18 @@
}
} else if (IsNV21(view)) {
if (IsNV12(img)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV21->NV12");
if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_v, src_stride_v,
dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
return OK;
}
} else if (IsNV21(img)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV21->NV21");
libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width, height / 2);
return OK;
} else if (IsI420(img)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV21->I420");
if (!libyuv::NV21ToI420(src_y, src_stride_y, src_v, src_stride_v, dst_y, dst_stride_y,
dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
return OK;
@@ -171,22 +179,26 @@
}
} else if (IsI420(view)) {
if (IsNV12(img)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: I420->NV12");
if (!libyuv::I420ToNV12(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
return OK;
}
} else if (IsNV21(img)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: I420->NV21");
if (!libyuv::I420ToNV21(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
return OK;
}
} else if (IsI420(img)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: I420->I420");
libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width / 2, height / 2);
libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width / 2, height / 2);
return OK;
}
}
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: generic");
return _ImageCopy<true>(view, img, imgBase);
}
@@ -210,15 +222,18 @@
int height = view.crop().height;
if (IsNV12(img)) {
if (IsNV12(view)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV12->NV12");
libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width, height / 2);
return OK;
} else if (IsNV21(view)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV12->NV21");
if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_u, src_stride_u,
dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
return OK;
}
} else if (IsI420(view)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV12->I420");
if (!libyuv::NV12ToI420(src_y, src_stride_y, src_u, src_stride_u, dst_y, dst_stride_y,
dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
return OK;
@@ -226,15 +241,18 @@
}
} else if (IsNV21(img)) {
if (IsNV12(view)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV21->NV12");
if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_v, src_stride_v,
dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
return OK;
}
} else if (IsNV21(view)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV21->NV21");
libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width, height / 2);
return OK;
} else if (IsI420(view)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV21->I420");
if (!libyuv::NV21ToI420(src_y, src_stride_y, src_v, src_stride_v, dst_y, dst_stride_y,
dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
return OK;
@@ -242,22 +260,26 @@
}
} else if (IsI420(img)) {
if (IsNV12(view)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: I420->NV12");
if (!libyuv::I420ToNV12(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
return OK;
}
} else if (IsNV21(view)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: I420->NV21");
if (!libyuv::I420ToNV21(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
return OK;
}
} else if (IsI420(view)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: I420->I420");
libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width / 2, height / 2);
libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width / 2, height / 2);
return OK;
}
}
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: generic");
return _ImageCopy<false>(view, img, imgBase);
}
diff --git a/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
new file mode 100644
index 0000000..ef5800d
--- /dev/null
+++ b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
@@ -0,0 +1,91 @@
+/*
+ * Copyright 2018, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "Codec2BufferUtils"
+#define ATRACE_TAG ATRACE_TAG_VIDEO
+#include <utils/Log.h>
+
+#include <android/hardware_buffer.h>
+#include <android-base/properties.h>
+#include <cutils/properties.h>
+#include <media/hardware/HardwareAPI.h>
+#include <system/graphics.h>
+
+#include <C2Debug.h>
+
+#include "Codec2CommonUtils.h"
+
+namespace android {
+
+bool isAtLeastT() {
+ char deviceCodeName[PROP_VALUE_MAX];
+ __system_property_get("ro.build.version.codename", deviceCodeName);
+ return android_get_device_api_level() >= __ANDROID_API_T__ ||
+ !strcmp(deviceCodeName, "Tiramisu");
+}
+
+bool isVendorApiOrFirstApiAtLeastT() {
+ // The first SDK the device shipped with.
+ static const int32_t kProductFirstApiLevel =
+ base::GetIntProperty<int32_t>("ro.product.first_api_level", 0);
+
+ // GRF devices (introduced in Android 11) list the first and possibly the current api levels
+ // to signal which VSR requirements they conform to even if the first device SDK was higher.
+ static const int32_t kBoardFirstApiLevel =
+ base::GetIntProperty<int32_t>("ro.board.first_api_level", 0);
+ static const int32_t kBoardApiLevel =
+ base::GetIntProperty<int32_t>("ro.board.api_level", 0);
+
+ // For non-GRF devices, use the first SDK version by the product.
+ static const int32_t kFirstApiLevel =
+ kBoardApiLevel != 0 ? kBoardApiLevel :
+ kBoardFirstApiLevel != 0 ? kBoardFirstApiLevel :
+ kProductFirstApiLevel;
+
+ return kFirstApiLevel >= __ANDROID_API_T__;
+}
+
+bool isHalPixelFormatSupported(AHardwareBuffer_Format format) {
+ // HAL_PIXEL_FORMAT_YCBCR_P010 requirement was added in T VSR, although it could have been
+ // supported prior to this.
+ //
+ // Unfortunately, we cannot detect if P010 is properly supported using AHardwareBuffer
+ // API alone. For now limit P010 to devices that launched with Android T or known to conform
+ // to Android T VSR (as opposed to simply limiting to a T vendor image).
+ if (format == (AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010 &&
+ !isVendorApiOrFirstApiAtLeastT()) {
+ return false;
+ }
+
+ const AHardwareBuffer_Desc desc = {
+ .width = 320,
+ .height = 240,
+ .format = format,
+ .layers = 1,
+ .usage = AHARDWAREBUFFER_USAGE_CPU_READ_RARELY |
+ AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN |
+ AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE |
+ AHARDWAREBUFFER_USAGE_COMPOSER_OVERLAY,
+ .stride = 0,
+ .rfu0 = 0,
+ .rfu1 = 0,
+ };
+
+ return AHardwareBuffer_isSupported(&desc);
+}
+
+} // namespace android
diff --git a/media/codec2/sfplugin/utils/Codec2CommonUtils.h b/media/codec2/sfplugin/utils/Codec2CommonUtils.h
new file mode 100644
index 0000000..98dd65b
--- /dev/null
+++ b/media/codec2/sfplugin/utils/Codec2CommonUtils.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2022, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC2_COMMON_UTILS_H_
+#define CODEC2_COMMON_UTILS_H_
+
+#include <android/hardware_buffer.h>
+
+namespace android {
+
+bool isAtLeastT();
+
+bool isVendorApiOrFirstApiAtLeastT();
+
+/**
+ * Check if a given pixel format is supported.
+ * enums listed in android_pixel_format_t, android_pixel_format_v1_1_t
+ * and so on can be passed as these enums have an equivalent definition in
+ * AHardwareBuffer_Format as well.
+ */
+bool isHalPixelFormatSupported(AHardwareBuffer_Format format);
+
+} // namespace android
+
+#endif // CODEC2_COMMON_UTILS_H_
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.cpp b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
index 93f29ca..c606d6f 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.cpp
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
@@ -276,6 +276,13 @@
{ C2Config::PROFILE_HEVC_MAIN_10, HEVCProfileMain10HDR10Plus },
};
+ALookup<C2Config::hdr_format_t, int32_t> sHevcHdrFormats = {
+ { C2Config::hdr_format_t::SDR, HEVCProfileMain },
+ { C2Config::hdr_format_t::HLG, HEVCProfileMain10 },
+ { C2Config::hdr_format_t::HDR10, HEVCProfileMain10HDR10 },
+ { C2Config::hdr_format_t::HDR10_PLUS, HEVCProfileMain10HDR10Plus },
+};
+
ALookup<C2Config::level_t, int32_t> sMpeg2Levels = {
{ C2Config::LEVEL_MP2V_LOW, MPEG2LevelLL },
{ C2Config::LEVEL_MP2V_MAIN, MPEG2LevelML },
@@ -365,6 +372,17 @@
{ C2Config::PROFILE_VP9_3, VP9Profile3HDR10Plus },
};
+ALookup<C2Config::hdr_format_t, int32_t> sVp9HdrFormats = {
+ { C2Config::hdr_format_t::SDR, VP9Profile0 },
+ { C2Config::hdr_format_t::SDR, VP9Profile1 },
+ { C2Config::hdr_format_t::HLG, VP9Profile2 },
+ { C2Config::hdr_format_t::HLG, VP9Profile3 },
+ { C2Config::hdr_format_t::HDR10, VP9Profile2HDR },
+ { C2Config::hdr_format_t::HDR10, VP9Profile3HDR },
+ { C2Config::hdr_format_t::HDR10_PLUS, VP9Profile2HDR10Plus },
+ { C2Config::hdr_format_t::HDR10_PLUS, VP9Profile3HDR10Plus },
+};
+
ALookup<C2Config::level_t, int32_t> sAv1Levels = {
{ C2Config::LEVEL_AV1_2, AV1Level2 },
{ C2Config::LEVEL_AV1_2_1, AV1Level21 },
@@ -411,6 +429,13 @@
{ C2Config::PROFILE_AV1_0, AV1ProfileMain10HDR10Plus },
};
+ALookup<C2Config::hdr_format_t, int32_t> sAv1HdrFormats = {
+ { C2Config::hdr_format_t::SDR, AV1ProfileMain8 },
+ { C2Config::hdr_format_t::HLG, AV1ProfileMain10 },
+ { C2Config::hdr_format_t::HDR10, AV1ProfileMain10HDR10 },
+ { C2Config::hdr_format_t::HDR10_PLUS, AV1ProfileMain10HDR10Plus },
+};
+
// HAL_PIXEL_FORMAT_* -> COLOR_Format*
ALookup<uint32_t, int32_t> sPixelFormats = {
{ HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, COLOR_FormatSurface },
@@ -435,6 +460,13 @@
{ HAL_PIXEL_FORMAT_RGBA_FP16, COLOR_Format64bitABGRFloat },
};
+ALookup<C2Config::picture_type_t, int32_t> sPictureType = {
+ { C2Config::picture_type_t::SYNC_FRAME, PICTURE_TYPE_I },
+ { C2Config::picture_type_t::I_FRAME, PICTURE_TYPE_I },
+ { C2Config::picture_type_t::P_FRAME, PICTURE_TYPE_P },
+ { C2Config::picture_type_t::B_FRAME, PICTURE_TYPE_B },
+};
+
/**
* A helper that passes through vendor extension profile and level values.
*/
@@ -487,6 +519,10 @@
virtual bool simpleMap(int32_t from, C2Config::profile_t *to) {
return sAacProfiles.map(from, to);
}
+ // AAC does not have HDR format
+ virtual bool mapHdrFormat(int32_t, C2Config::hdr_format_t*) override {
+ return false;
+ }
};
struct AvcProfileLevelMapper : ProfileLevelMapperHelper {
@@ -517,6 +553,12 @@
virtual bool simpleMap(int32_t from, C2Config::profile_t *to) {
return sDolbyVisionProfiles.map(from, to);
}
+ // Dolby Vision is always HDR and the profile is fully expressive so use unknown
+ // HDR format
+ virtual bool mapHdrFormat(int32_t, C2Config::hdr_format_t *to) override {
+ *to = C2Config::hdr_format_t::UNKNOWN;
+ return true;
+ }
};
struct H263ProfileLevelMapper : ProfileLevelMapperHelper {
@@ -555,6 +597,9 @@
mIsHdr ? sHevcHdrProfiles.map(from, to) :
sHevcProfiles.map(from, to);
}
+ virtual bool mapHdrFormat(int32_t from, C2Config::hdr_format_t *to) override {
+ return sHevcHdrFormats.map(from, to);
+ }
private:
bool mIsHdr;
@@ -633,6 +678,9 @@
mIsHdr ? sVp9HdrProfiles.map(from, to) :
sVp9Profiles.map(from, to);
}
+ virtual bool mapHdrFormat(int32_t from, C2Config::hdr_format_t *to) override {
+ return sVp9HdrFormats.map(from, to);
+ }
private:
bool mIsHdr;
@@ -662,6 +710,9 @@
mIsHdr ? sAv1HdrProfiles.map(from, to) :
sAv1Profiles.map(from, to);
}
+ virtual bool mapHdrFormat(int32_t from, C2Config::hdr_format_t *to) override {
+ return sAv1HdrFormats.map(from, to);
+ }
private:
bool mIsHdr;
@@ -671,6 +722,13 @@
} // namespace
+// the default mapper is used for media types that do not support HDR
+bool C2Mapper::ProfileLevelMapper::mapHdrFormat(int32_t, C2Config::hdr_format_t *to) {
+ // by default map all (including vendor) profiles to SDR
+ *to = C2Config::hdr_format_t::SDR;
+ return true;
+}
+
// static
std::shared_ptr<C2Mapper::ProfileLevelMapper>
C2Mapper::GetProfileLevelMapper(std::string mediaType) {
@@ -1024,3 +1082,13 @@
}
return true;
}
+
+// static
+bool C2Mapper::map(C2Config::picture_type_t from, int32_t *to) {
+ return sPictureType.map(from, to);
+}
+
+// static
+bool C2Mapper::map(int32_t from, C2Config::picture_type_t *to) {
+ return sPictureType.map(from, to);
+}
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.h b/media/codec2/sfplugin/utils/Codec2Mapper.h
index 33d305e..c8e9e13 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.h
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.h
@@ -34,6 +34,16 @@
virtual bool mapProfile(int32_t, C2Config::profile_t*) = 0;
virtual bool mapLevel(C2Config::level_t, int32_t*) = 0;
virtual bool mapLevel(int32_t, C2Config::level_t*) = 0;
+
+ /**
+ * Mapper method that maps a MediaCodec profile to the supported
+ * HDR format for that profile. Since 10-bit profiles are used for
+ * HLG, this method will return HLG for all 10-bit profiles, but
+ * the caller should also verify that the transfer function is
+ * indeed HLG.
+ */
+ // not an abstract method as we have a default implementation for SDR
+ virtual bool mapHdrFormat(int32_t, C2Config::hdr_format_t *hdr);
virtual ~ProfileLevelMapper() = default;
};
diff --git a/media/codec2/tests/Android.bp b/media/codec2/tests/Android.bp
index b858fa5..68db7b2 100644
--- a/media/codec2/tests/Android.bp
+++ b/media/codec2/tests/Android.bp
@@ -39,6 +39,8 @@
cc_test {
name: "codec2_vndk_test",
test_suites: ["device-tests"],
+ // This test doesn't seem to support isolated with current assumption
+ isolated: false,
srcs: [
"C2_test.cpp",
diff --git a/media/codec2/vndk/C2AllocatorGralloc.cpp b/media/codec2/vndk/C2AllocatorGralloc.cpp
index bc4053d..f272499 100644
--- a/media/codec2/vndk/C2AllocatorGralloc.cpp
+++ b/media/codec2/vndk/C2AllocatorGralloc.cpp
@@ -54,6 +54,10 @@
static_assert((~C2MemoryUsage::PLATFORM_MASK & PASSTHROUGH_USAGE_MASK) == 0, "");
} // unnamed
+static bool isAtLeastT() {
+ return android_get_device_api_level() >= __ANDROID_API_T__;
+}
+
C2MemoryUsage C2AndroidMemoryUsage::FromGrallocUsage(uint64_t usage) {
// gralloc does not support WRITE_PROTECTED
return C2MemoryUsage(
@@ -702,6 +706,14 @@
}
case static_cast<uint32_t>(PixelFormat4::YCBCR_P010): {
+ // In Android T, P010 is relaxed to allow arbitrary stride for the Y and UV planes,
+ // try locking with the gralloc4 mapper first.
+ c2_status_t status = Gralloc4Mapper_lock(
+ const_cast<native_handle_t*>(mBuffer), grallocUsage, rect, layout, addr);
+ if (status == C2_OK) {
+ break;
+ }
+
void *pointer = nullptr;
status_t err = GraphicBufferMapper::get().lock(
const_cast<native_handle_t *>(mBuffer), grallocUsage, rect, &pointer);
@@ -760,10 +772,12 @@
default: {
// We don't know what it is, let's try to lock it with gralloc4
android_ycbcr ycbcrLayout;
- c2_status_t status = Gralloc4Mapper_lock(
- const_cast<native_handle_t*>(mBuffer), grallocUsage, rect, layout, addr);
- if (status == C2_OK) {
- break;
+ if (isAtLeastT()) {
+ c2_status_t status = Gralloc4Mapper_lock(
+ const_cast<native_handle_t*>(mBuffer), grallocUsage, rect, layout, addr);
+ if (status == C2_OK) {
+ break;
+ }
}
// fallback to lockYCbCr
diff --git a/media/codec2/vndk/C2AllocatorIon.cpp b/media/codec2/vndk/C2AllocatorIon.cpp
index 77b265a..a6a733e 100644
--- a/media/codec2/vndk/C2AllocatorIon.cpp
+++ b/media/codec2/vndk/C2AllocatorIon.cpp
@@ -31,6 +31,7 @@
#include <C2HandleIonInternal.h>
#include <android-base/properties.h>
+#include <media/stagefright/foundation/Mutexed.h>
namespace android {
@@ -180,7 +181,7 @@
c2_status_t map(size_t offset, size_t size, C2MemoryUsage usage, C2Fence *fence, void **addr) {
(void)fence; // TODO: wait for fence
*addr = nullptr;
- if (!mMappings.empty()) {
+ if (!mMappings.lock()->empty()) {
ALOGV("multiple map");
// TODO: technically we should return DUPLICATE here, but our block views don't
// actually unmap, so we end up remapping an ion buffer multiple times.
@@ -207,17 +208,18 @@
c2_status_t err = mapInternal(mapSize, mapOffset, alignmentBytes, prot, flags, &(map.addr), addr);
if (map.addr) {
- mMappings.push_back(map);
+ mMappings.lock()->push_back(map);
}
return err;
}
c2_status_t unmap(void *addr, size_t size, C2Fence *fence) {
- if (mMappings.empty()) {
+ Mutexed<std::list<Mapping>>::Locked mappings(mMappings);
+ if (mappings->empty()) {
ALOGD("tried to unmap unmapped buffer");
return C2_NOT_FOUND;
}
- for (auto it = mMappings.begin(); it != mMappings.end(); ++it) {
+ for (auto it = mappings->begin(); it != mappings->end(); ++it) {
if (addr != (uint8_t *)it->addr + it->alignmentBytes ||
size + it->alignmentBytes != it->size) {
continue;
@@ -230,8 +232,9 @@
if (fence) {
*fence = C2Fence(); // not using fences
}
- (void)mMappings.erase(it);
- ALOGV("successfully unmapped: addr=%p size=%zu fd=%d", addr, size, mHandle.bufferFd());
+ (void)mappings->erase(it);
+ ALOGV("successfully unmapped: addr=%p size=%zu fd=%d", addr, size,
+ mHandle.bufferFd());
return C2_OK;
}
ALOGD("unmap failed to find specified map");
@@ -239,9 +242,10 @@
}
virtual ~Impl() {
- if (!mMappings.empty()) {
+ Mutexed<std::list<Mapping>>::Locked mappings(mMappings);
+ if (!mappings->empty()) {
ALOGD("Dangling mappings!");
- for (const Mapping &map : mMappings) {
+ for (const Mapping &map : *mappings) {
(void)munmap(map.addr, map.size);
}
}
@@ -319,7 +323,7 @@
size_t alignmentBytes;
size_t size;
};
- std::list<Mapping> mMappings;
+ Mutexed<std::list<Mapping>> mMappings;
};
class C2AllocationIon::ImplV2 : public C2AllocationIon::Impl {
diff --git a/media/codec2/vndk/C2DmaBufAllocator.cpp b/media/codec2/vndk/C2DmaBufAllocator.cpp
index 1aa3d69..c470171 100644
--- a/media/codec2/vndk/C2DmaBufAllocator.cpp
+++ b/media/codec2/vndk/C2DmaBufAllocator.cpp
@@ -31,6 +31,7 @@
#include <list>
#include <android-base/properties.h>
+#include <media/stagefright/foundation/Mutexed.h>
namespace android {
@@ -161,7 +162,7 @@
size_t alignmentBytes;
size_t size;
};
- std::list<Mapping> mMappings;
+ Mutexed<std::list<Mapping>> mMappings;
// TODO: we could make this encapsulate shared_ptr and copiable
C2_DO_NOT_COPY(C2DmaBufAllocation);
@@ -171,7 +172,7 @@
void** addr) {
(void)fence; // TODO: wait for fence
*addr = nullptr;
- if (!mMappings.empty()) {
+ if (!mMappings.lock()->empty()) {
ALOGV("multiple map");
// TODO: technically we should return DUPLICATE here, but our block views
// don't actually unmap, so we end up remapping the buffer multiple times.
@@ -199,17 +200,18 @@
c2_status_t err =
mapInternal(mapSize, mapOffset, alignmentBytes, prot, flags, &(map.addr), addr);
if (map.addr) {
- mMappings.push_back(map);
+ mMappings.lock()->push_back(map);
}
return err;
}
c2_status_t C2DmaBufAllocation::unmap(void* addr, size_t size, C2Fence* fence) {
- if (mMappings.empty()) {
+ Mutexed<std::list<Mapping>>::Locked mappings(mMappings);
+ if (mappings->empty()) {
ALOGD("tried to unmap unmapped buffer");
return C2_NOT_FOUND;
}
- for (auto it = mMappings.begin(); it != mMappings.end(); ++it) {
+ for (auto it = mappings->begin(); it != mappings->end(); ++it) {
if (addr != (uint8_t*)it->addr + it->alignmentBytes ||
size + it->alignmentBytes != it->size) {
continue;
@@ -222,7 +224,7 @@
if (fence) {
*fence = C2Fence(); // not using fences
}
- (void)mMappings.erase(it);
+ (void)mappings->erase(it);
ALOGV("successfully unmapped: %d", mHandle.bufferFd());
return C2_OK;
}
@@ -253,9 +255,10 @@
}
C2DmaBufAllocation::~C2DmaBufAllocation() {
- if (!mMappings.empty()) {
+ Mutexed<std::list<Mapping>>::Locked mappings(mMappings);
+ if (!mappings->empty()) {
ALOGD("Dangling mappings!");
- for (const Mapping& map : mMappings) {
+ for (const Mapping& map : *mappings) {
int err = munmap(map.addr, map.size);
if (err) ALOGD("munmap failed");
}
diff --git a/media/codec2/vndk/C2Fence.cpp b/media/codec2/vndk/C2Fence.cpp
index 9c5183e..c1fb956 100644
--- a/media/codec2/vndk/C2Fence.cpp
+++ b/media/codec2/vndk/C2Fence.cpp
@@ -16,13 +16,24 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "C2FenceFactory"
+#include <cutils/native_handle.h>
#include <utils/Log.h>
+#include <ui/Fence.h>
#include <C2FenceFactory.h>
#include <C2SurfaceSyncObj.h>
+#define MAX_FENCE_FDS 1
+
class C2Fence::Impl {
public:
+ enum type_t : uint32_t {
+ INVALID_FENCE,
+ NULL_FENCE,
+ SURFACE_FENCE,
+ SYNC_FENCE,
+ };
+
virtual c2_status_t wait(c2_nsecs_t timeoutNs) = 0;
virtual bool valid() const = 0;
@@ -33,9 +44,26 @@
virtual bool isHW() const = 0;
+ virtual type_t type() const = 0;
+
+ /**
+ * Create a native handle for the fence so it can be marshalled.
+ * The native handle must store fence type in the first integer.
+ *
+ * \return a valid native handle if the fence can be marshalled, otherwise return null.
+ */
+ virtual native_handle_t *createNativeHandle() const = 0;
+
virtual ~Impl() = default;
Impl() = default;
+
+ static type_t GetTypeFromNativeHandle(const native_handle_t* nh) {
+ if (nh && nh->numFds >= 0 && nh->numFds <= MAX_FENCE_FDS && nh->numInts > 0) {
+ return static_cast<type_t>(nh->data[nh->numFds]);
+ }
+ return INVALID_FENCE;
+ }
};
c2_status_t C2Fence::wait(c2_nsecs_t timeoutNs) {
@@ -115,6 +143,15 @@
return false;
}
+ virtual type_t type() const {
+ return SURFACE_FENCE;
+ }
+
+ virtual native_handle_t *createNativeHandle() const {
+ ALOG_ASSERT(false, "Cannot create native handle from surface fence");
+ return nullptr;
+ }
+
virtual ~SurfaceFenceImpl() {};
SurfaceFenceImpl(std::shared_ptr<C2SurfaceSyncMemory> syncMem, uint32_t waitId) :
@@ -143,3 +180,119 @@
}
return C2Fence();
}
+
+using namespace android;
+
+class _C2FenceFactory::SyncFenceImpl : public C2Fence::Impl {
+public:
+ virtual c2_status_t wait(c2_nsecs_t timeoutNs) {
+ int64_t timeoutMs = timeoutNs / 1000000;
+ if (timeoutMs > INT_MAX) {
+ timeoutMs = INT_MAX;
+ }
+
+ switch (mFence->wait((int)timeoutMs)) {
+ case NO_ERROR:
+ return C2_OK;
+ case -ETIME:
+ return C2_TIMED_OUT;
+ default:
+ return C2_CORRUPTED;
+ }
+ }
+
+ virtual bool valid() const {
+ return mFence->getStatus() != Fence::Status::Invalid;
+ }
+
+ virtual bool ready() const {
+ return mFence->getStatus() == Fence::Status::Signaled;
+ }
+
+ virtual int fd() const {
+ return mFence->dup();
+ }
+
+ virtual bool isHW() const {
+ return true;
+ }
+
+ virtual type_t type() const {
+ return SYNC_FENCE;
+ }
+
+ virtual native_handle_t *createNativeHandle() const {
+ native_handle_t* nh = native_handle_create(1, 1);
+ if (!nh) {
+ ALOGE("Failed to allocate native handle for sync fence");
+ return nullptr;
+ }
+ nh->data[0] = fd();
+ nh->data[1] = type();
+ return nh;
+ }
+
+ virtual ~SyncFenceImpl() {};
+
+ SyncFenceImpl(int fenceFd) :
+ mFence(sp<Fence>::make(fenceFd)) {}
+
+ static std::shared_ptr<SyncFenceImpl> CreateFromNativeHandle(const native_handle_t* nh) {
+ if (!nh || nh->numFds != 1 || nh->numInts != 1) {
+ ALOGE("Invalid handle for sync fence");
+ return nullptr;
+ }
+ int fd = dup(nh->data[0]);
+ std::shared_ptr<SyncFenceImpl> p = std::make_shared<SyncFenceImpl>(fd);
+ if (!p) {
+ ALOGE("Failed to allocate sync fence impl");
+ close(fd);
+ }
+ return p;
+ }
+
+private:
+ const sp<Fence> mFence;
+};
+
+C2Fence _C2FenceFactory::CreateSyncFence(int fenceFd) {
+ std::shared_ptr<C2Fence::Impl> p;
+ if (fenceFd >= 0) {
+ p = std::make_shared<_C2FenceFactory::SyncFenceImpl>(fenceFd);
+ if (!p) {
+ ALOGE("Failed to allocate sync fence impl");
+ close(fenceFd);
+ }
+ if (!p->valid()) {
+ p.reset();
+ }
+ } else {
+ ALOGE("Create sync fence from invalid fd");
+ }
+ return C2Fence(p);
+}
+
+native_handle_t* _C2FenceFactory::CreateNativeHandle(const C2Fence& fence) {
+ return fence.mImpl? fence.mImpl->createNativeHandle() : nullptr;
+}
+
+C2Fence _C2FenceFactory::CreateFromNativeHandle(const native_handle_t* handle) {
+ if (!handle) {
+ return C2Fence();
+ }
+ C2Fence::Impl::type_t type = C2Fence::Impl::GetTypeFromNativeHandle(handle);
+ std::shared_ptr<C2Fence::Impl> p;
+ switch (type) {
+ case C2Fence::Impl::SYNC_FENCE:
+ p = SyncFenceImpl::CreateFromNativeHandle(handle);
+ break;
+ default:
+ ALOGW("Unsupported fence type %d", type);
+ break;
+ }
+ if (p && !p->valid()) {
+ p.reset();
+ }
+ return C2Fence(p);
+}
+
diff --git a/media/codec2/vndk/include/C2BqBufferPriv.h b/media/codec2/vndk/include/C2BqBufferPriv.h
index b2636e9..bec978a 100644
--- a/media/codec2/vndk/include/C2BqBufferPriv.h
+++ b/media/codec2/vndk/include/C2BqBufferPriv.h
@@ -101,6 +101,8 @@
uint32_t generationId,
uint64_t consumerUsage);
+ virtual void getConsumerUsage(uint64_t *consumerUsage);
+
private:
const std::shared_ptr<C2Allocator> mAllocator;
const local_id_t mLocalId;
@@ -138,7 +140,6 @@
uint32_t toGeneration, uint64_t toUsage, uint64_t toBqId,
android::sp<android::GraphicBuffer>& graphicBuffer, uint32_t oldGeneration,
std::shared_ptr<C2SurfaceSyncMemory> syncMem);
-
private:
friend struct _C2BlockFactory;
diff --git a/media/codec2/vndk/include/C2FenceFactory.h b/media/codec2/vndk/include/C2FenceFactory.h
index d4bed26..4944115 100644
--- a/media/codec2/vndk/include/C2FenceFactory.h
+++ b/media/codec2/vndk/include/C2FenceFactory.h
@@ -28,6 +28,7 @@
struct _C2FenceFactory {
class SurfaceFenceImpl;
+ class SyncFenceImpl;
/*
* Create C2Fence for BufferQueueBased blockpool.
@@ -38,6 +39,30 @@
static C2Fence CreateSurfaceFence(
std::shared_ptr<C2SurfaceSyncMemory> syncMem,
uint32_t waitId);
+
+ /*
+ * Create C2Fence from a fence file fd.
+ *
+ * \param fenceFd Fence file descriptor.
+ * It will be owned and closed by the returned fence object.
+ */
+ static C2Fence CreateSyncFence(int fenceFd);
+
+ /**
+ * Create a native handle from fence for marshalling
+ *
+ * \return a non-null pointer if the fence can be marshalled, otherwise return nullptr
+ */
+ static native_handle_t* CreateNativeHandle(const C2Fence& fence);
+
+ /*
+ * Create C2Fence from a native handle.
+
+ * \param handle A native handle representing a fence
+ * The fd in the native handle will be duplicated, so the caller will
+ * still own the handle and have to close it.
+ */
+ static C2Fence CreateFromNativeHandle(const native_handle_t* handle);
};
diff --git a/media/codec2/vndk/platform/C2BqBuffer.cpp b/media/codec2/vndk/platform/C2BqBuffer.cpp
index 01995fd..63b0f39 100644
--- a/media/codec2/vndk/platform/C2BqBuffer.cpp
+++ b/media/codec2/vndk/platform/C2BqBuffer.cpp
@@ -571,19 +571,12 @@
public:
Impl(const std::shared_ptr<C2Allocator> &allocator)
: mInit(C2_OK), mProducerId(0), mGeneration(0),
- mDqFailure(0), mLastDqTs(0), mLastDqLogTs(0),
- mAllocator(allocator) {
+ mConsumerUsage(0), mDqFailure(0), mLastDqTs(0),
+ mLastDqLogTs(0), mAllocator(allocator) {
}
~Impl() {
- bool noInit = false;
for (int i = 0; i < NUM_BUFFER_SLOTS; ++i) {
- if (!noInit && mProducer) {
- Return<HStatus> transResult =
- mProducer->detachBuffer(static_cast<int32_t>(i));
- noInit = !transResult.isOk() ||
- static_cast<HStatus>(transResult) == HStatus::NO_INIT;
- }
mBuffers[i].clear();
}
}
@@ -692,15 +685,6 @@
{
sp<GraphicBuffer> buffers[NUM_BUFFER_SLOTS];
std::scoped_lock<std::mutex> lock(mMutex);
- bool noInit = false;
- for (int i = 0; i < NUM_BUFFER_SLOTS; ++i) {
- if (!noInit && mProducer) {
- Return<HStatus> transResult =
- mProducer->detachBuffer(static_cast<int32_t>(i));
- noInit = !transResult.isOk() ||
- static_cast<HStatus>(transResult) == HStatus::NO_INIT;
- }
- }
int32_t oldGeneration = mGeneration;
if (producer) {
mProducer = producer;
@@ -747,6 +731,11 @@
"bqId: %llu migrated buffers # %d",
generation, (unsigned long long)producerId, migrated);
}
+ mConsumerUsage = usage;
+ }
+
+ void getConsumerUsage(uint64_t *consumeUsage) {
+ *consumeUsage = mConsumerUsage;
}
private:
@@ -755,6 +744,7 @@
c2_status_t mInit;
uint64_t mProducerId;
uint32_t mGeneration;
+ uint64_t mConsumerUsage;
OnRenderCallback mRenderCallback;
size_t mDqFailure;
@@ -1086,3 +1076,10 @@
mImpl->setRenderCallback(renderCallback);
}
}
+
+void C2BufferQueueBlockPool::getConsumerUsage(uint64_t *consumeUsage) {
+ if (mImpl) {
+ mImpl->getConsumerUsage(consumeUsage);
+ }
+}
+
diff --git a/media/codecs/m4v_h263/dec/src/vop.cpp b/media/codecs/m4v_h263/dec/src/vop.cpp
index 7b32498..abc0861 100644
--- a/media/codecs/m4v_h263/dec/src/vop.cpp
+++ b/media/codecs/m4v_h263/dec/src/vop.cpp
@@ -107,26 +107,57 @@
#ifndef PV_TOLERATE_VOL_ERRORS
if (layer) /* */
{
- /* support SSPL0-2 */
- if (tmpvar != 0x10 && tmpvar != 0x11 && tmpvar != 0x12 &&
- tmpvar != 0xA1 && tmpvar != 0xA2 && tmpvar != 0xA3/* Core SP@L1-L3 */)
- return PV_FAIL;
+ switch (tmpvar)
+ {
+ /* Simple Scalable Profile Levels */
+ case 0x10:
+ case 0x11:
+ case 0x12:
+ /* Core Scalable Profile Levels */
+ case 0xA1:
+ case 0xA2:
+ case 0xA3:
+ // Do Nothing, the cases listed above are supported values
+ break;
+ default:
+ // Unsupport profile level
+ return PV_FAIL;
+ }
}
else
{
- /* support SPL0-3 & SSPL0-2 */
- if (tmpvar != 0x01 && tmpvar != 0x02 && tmpvar != 0x03 && tmpvar != 0x08 &&
- /* While not technically supported, try to decode SPL4&SPL5 files as well. */
- /* We'll fail later if the size is too large. This is to allow playback of */
- /* some <=CIF files generated by other encoders. */
- tmpvar != 0x04 && tmpvar != 0x05 &&
- tmpvar != 0x10 && tmpvar != 0x11 && tmpvar != 0x12 &&
- tmpvar != 0x21 && tmpvar != 0x22 && /* Core Profile Levels */
- tmpvar != 0xA1 && tmpvar != 0xA2 && tmpvar != 0xA3 &&
- tmpvar != 0xF0 && tmpvar != 0xF1 && /* Advanced Simple Profile Levels*/
- tmpvar != 0xF2 && tmpvar != 0xF3 &&
- tmpvar != 0xF4 && tmpvar != 0xF5)
- return PV_FAIL;
+ switch (tmpvar)
+ {
+ /* Simple Profile Levels */
+ case 0x01:
+ case 0x02:
+ case 0x03:
+ case 0x04:
+ case 0x05:
+ case 0x06:
+ case 0x08:
+ case 0x10:
+ case 0x11:
+ case 0x12:
+ /* Core Profile Levels */
+ case 0x21:
+ case 0x22:
+ case 0xA1:
+ case 0xA2:
+ case 0xA3:
+ /* Advanced Simple Profile Levels*/
+ case 0xF0:
+ case 0xF1:
+ case 0xF2:
+ case 0xF3:
+ case 0xF4:
+ case 0xF5:
+ // Do Nothing, the cases listed above are supported values
+ break;
+ default:
+ // Unsupport profile level
+ return PV_FAIL;
+ }
}
#else
profile = tmpvar;
diff --git a/media/extractors/mp4/MPEG4Extractor.cpp b/media/extractors/mp4/MPEG4Extractor.cpp
index eccbf46..5ca874e 100644
--- a/media/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/extractors/mp4/MPEG4Extractor.cpp
@@ -1969,26 +1969,8 @@
}
if (chunk_type == FOURCC("fLaC")) {
-
- // From https://github.com/xiph/flac/blob/master/doc/isoflac.txt
- // 4 for mime, 4 for blockType and BlockLen, 34 for metadata
- uint8_t flacInfo[4 + 4 + 34];
- // skipping dFla, version
- data_offset += sizeof(buffer) + 12;
- size_t flacOffset = 4;
- // Add flaC header mime type to CSD
- strncpy((char *)flacInfo, "fLaC", 4);
- if (mDataSource->readAt(
- data_offset, flacInfo + flacOffset, sizeof(flacInfo) - flacOffset) <
- (ssize_t)sizeof(flacInfo) - flacOffset) {
- return ERROR_IO;
- }
- data_offset += sizeof(flacInfo) - flacOffset;
-
- AMediaFormat_setBuffer(mLastTrack->meta, AMEDIAFORMAT_KEY_CSD_0, flacInfo,
- sizeof(flacInfo));
+ data_offset += sizeof(buffer);
*offset = data_offset;
- CHECK_EQ(*offset, stop_offset);
}
while (*offset < stop_offset) {
@@ -2521,6 +2503,35 @@
break;
}
+ case FOURCC("dfLa"):
+ {
+ *offset += chunk_size;
+
+ // From https://github.com/xiph/flac/blob/master/doc/isoflac.txt
+ // 4 for mediaType, 4 for blockType and BlockLen, 34 for metadata
+ uint8_t flacInfo[4 + 4 + 34];
+
+ if (chunk_data_size != sizeof(flacInfo)) {
+ return ERROR_MALFORMED;
+ }
+
+ data_offset += 4;
+ size_t flacOffset = 4;
+ // Add flaC header mediaType to CSD
+ strncpy((char *)flacInfo, "fLaC", 4);
+
+ ssize_t bytesToRead = sizeof(flacInfo) - flacOffset;
+ if (mDataSource->readAt(
+ data_offset, flacInfo + flacOffset, bytesToRead) < bytesToRead) {
+ return ERROR_IO;
+ }
+
+ data_offset += bytesToRead;
+ AMediaFormat_setBuffer(mLastTrack->meta, AMEDIAFORMAT_KEY_CSD_0, flacInfo,
+ sizeof(flacInfo));
+ break;
+ }
+
case FOURCC("avcC"):
{
*offset += chunk_size;
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index 212a787..4ebb530 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -40,7 +40,7 @@
/**
* This is used to represent a value that has not been specified.
* For example, an application could use {@link #AAUDIO_UNSPECIFIED} to indicate
- * that is did not not care what the specific value of a parameter was
+ * that it did not care what the specific value of a parameter was
* and would accept whatever it was given.
*/
#define AAUDIO_UNSPECIFIED 0
diff --git a/media/libaaudio/src/core/AAudioAudio.cpp b/media/libaaudio/src/core/AAudioAudio.cpp
index f07e66e..938079b 100644
--- a/media/libaaudio/src/core/AAudioAudio.cpp
+++ b/media/libaaudio/src/core/AAudioAudio.cpp
@@ -352,7 +352,8 @@
{
AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
- return audioStream->waitForStateChange(inputState, nextState, timeoutNanoseconds);
+ android::sp<AudioStream> spAudioStream(audioStream);
+ return spAudioStream->waitForStateChange(inputState, nextState, timeoutNanoseconds);
}
// ============================================================
@@ -565,9 +566,7 @@
int64_t *timeNanoseconds)
{
AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
- if (framePosition == nullptr) {
- return AAUDIO_ERROR_NULL;
- } else if (timeNanoseconds == nullptr) {
+ if (framePosition == nullptr || timeNanoseconds == nullptr) {
return AAUDIO_ERROR_NULL;
} else if (clockid != CLOCK_MONOTONIC && clockid != CLOCK_BOOTTIME) {
return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
diff --git a/media/libaaudio/src/core/AAudioStreamParameters.cpp b/media/libaaudio/src/core/AAudioStreamParameters.cpp
index dc242b8..a990850 100644
--- a/media/libaaudio/src/core/AAudioStreamParameters.cpp
+++ b/media/libaaudio/src/core/AAudioStreamParameters.cpp
@@ -86,7 +86,6 @@
switch (mSessionId) {
case AAUDIO_SESSION_ID_NONE:
case AAUDIO_SESSION_ID_ALLOCATE:
- break;
default:
break;
}
diff --git a/media/libaaudio/tests/test_various.cpp b/media/libaaudio/tests/test_various.cpp
index cbf863f..b68fc7b 100644
--- a/media/libaaudio/tests/test_various.cpp
+++ b/media/libaaudio/tests/test_various.cpp
@@ -25,6 +25,7 @@
#include <gtest/gtest.h>
#include <unistd.h>
+#include <thread>
// Callback function that does nothing.
aaudio_data_callback_result_t NoopDataCallbackProc(
@@ -51,6 +52,7 @@
}
constexpr int64_t NANOS_PER_MILLISECOND = 1000 * 1000;
+constexpr int64_t MICROS_PER_MILLISECOND = 1000;
void checkReleaseThenClose(aaudio_performance_mode_t perfMode,
aaudio_sharing_mode_t sharingMode,
@@ -762,6 +764,58 @@
checkCallbackOnce(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
}
+void waitForStateChangeToClosingorClosed(AAudioStream **stream, std::atomic<bool>* isReady)
+{
+ *isReady = true;
+ aaudio_stream_state_t state = AAUDIO_STREAM_STATE_UNKNOWN;
+ EXPECT_EQ(AAUDIO_OK, AAudioStream_waitForStateChange(*stream,
+ AAUDIO_STREAM_STATE_OPEN, &state,
+ 10000 * NANOS_PER_MILLISECOND));
+ if ((state != AAUDIO_STREAM_STATE_CLOSING) && (state != AAUDIO_STREAM_STATE_CLOSED)){
+ FAIL() << "ERROR - State not closing or closed. Current state: " <<
+ AAudio_convertStreamStateToText(state);
+ }
+}
+
+void testWaitForStateChangeClose(aaudio_performance_mode_t perfMode) {
+ AAudioStreamBuilder *aaudioBuilder = nullptr;
+ AAudioStream *aaudioStream = nullptr;
+
+ ASSERT_EQ(AAUDIO_OK, AAudio_createStreamBuilder(&aaudioBuilder));
+ AAudioStreamBuilder_setPerformanceMode(aaudioBuilder, perfMode);
+ ASSERT_EQ(AAUDIO_OK, AAudioStreamBuilder_openStream(aaudioBuilder, &aaudioStream));
+
+ // Verify Open State
+ aaudio_stream_state_t state = AAUDIO_STREAM_STATE_UNKNOWN;
+ EXPECT_EQ(AAUDIO_OK, AAudioStream_waitForStateChange(aaudioStream,
+ AAUDIO_STREAM_STATE_UNKNOWN, &state,
+ 1000 * NANOS_PER_MILLISECOND));
+ EXPECT_EQ(AAUDIO_STREAM_STATE_OPEN, state);
+
+ std::atomic<bool> isWaitThreadReady{false};
+
+ // Spawn a new thread to wait for the state change
+ std::thread waitThread (waitForStateChangeToClosingorClosed, &aaudioStream,
+ &isWaitThreadReady);
+
+ // Wait for worker thread to be ready
+ while (!isWaitThreadReady) {
+ usleep(MICROS_PER_MILLISECOND);
+ }
+ // Sleep an additional millisecond to make sure waitForAudioThread is called
+ usleep(MICROS_PER_MILLISECOND);
+ EXPECT_EQ(AAUDIO_OK, AAudioStream_close(aaudioStream));
+ waitThread.join();
+}
+
+TEST(test_various, wait_for_state_change_close_none) {
+ testWaitForStateChangeClose(AAUDIO_PERFORMANCE_MODE_NONE);
+}
+
+TEST(test_various, wait_for_state_change_close_lowlat) {
+ testWaitForStateChangeClose(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
+}
+
// ************************************************************
struct WakeUpCallbackData {
void wakeOther() {
diff --git a/media/libaudioclient/AudioEffect.cpp b/media/libaudioclient/AudioEffect.cpp
index 9091599..8c645c3 100644
--- a/media/libaudioclient/AudioEffect.cpp
+++ b/media/libaudioclient/AudioEffect.cpp
@@ -94,7 +94,7 @@
return NO_INIT;
}
- if (type == NULL && uuid == NULL) {
+ if (type == nullptr && uuid == nullptr) {
ALOGW("Must specify at least type or uuid");
return BAD_VALUE;
}
@@ -105,8 +105,8 @@
mSessionId = sessionId;
memset(&mDescriptor, 0, sizeof(effect_descriptor_t));
- mDescriptor.type = *(type != NULL ? type : EFFECT_UUID_NULL);
- mDescriptor.uuid = *(uuid != NULL ? uuid : EFFECT_UUID_NULL);
+ mDescriptor.type = *(type != nullptr ? type : EFFECT_UUID_NULL);
+ mDescriptor.uuid = *(uuid != nullptr ? uuid : EFFECT_UUID_NULL);
// TODO b/182392769: use attribution source util
mIEffectClient = new EffectClient(this);
@@ -228,7 +228,7 @@
AudioSystem::releaseAudioSessionId(mSessionId,
VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(mClientAttributionSource.pid)));
}
- if (mIEffect != NULL) {
+ if (mIEffect != nullptr) {
mIEffect->disconnect();
IInterface::asBinder(mIEffect)->unlinkToDeath(mIEffectClient);
}
@@ -306,7 +306,7 @@
if (mEnabled == (cmdCode == EFFECT_CMD_ENABLE)) {
return NO_ERROR;
}
- if (replySize == NULL || *replySize != sizeof(status_t) || replyData == NULL) {
+ if (replySize == nullptr || *replySize != sizeof(status_t) || replyData == nullptr) {
return BAD_VALUE;
}
mLock.lock();
@@ -349,7 +349,7 @@
return (mStatus == ALREADY_EXISTS) ? (status_t) INVALID_OPERATION : mStatus;
}
- if (param == NULL || param->psize == 0 || param->vsize == 0) {
+ if (param == nullptr || param->psize == 0 || param->vsize == 0) {
return BAD_VALUE;
}
@@ -384,8 +384,7 @@
if (mStatus != NO_ERROR) {
return (mStatus == ALREADY_EXISTS) ? (status_t) INVALID_OPERATION : mStatus;
}
-
- if (param == NULL || param->psize == 0 || param->vsize == 0) {
+ if (param == nullptr || param->psize == 0 || param->vsize == 0) {
return BAD_VALUE;
}
@@ -440,8 +439,7 @@
if (mStatus != NO_ERROR && mStatus != ALREADY_EXISTS) {
return mStatus;
}
-
- if (param == NULL || param->psize == 0 || param->vsize == 0) {
+ if (param == nullptr || param->psize == 0 || param->vsize == 0) {
return BAD_VALUE;
}
@@ -537,6 +535,9 @@
status_t AudioEffect::queryNumberEffects(uint32_t *numEffects)
{
+ if (numEffects == nullptr) {
+ return BAD_VALUE;
+ }
const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
return af->queryNumberEffects(numEffects);
@@ -544,6 +545,9 @@
status_t AudioEffect::queryEffect(uint32_t index, effect_descriptor_t *descriptor)
{
+ if (descriptor == nullptr) {
+ return BAD_VALUE;
+ }
const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
return af->queryEffect(index, descriptor);
@@ -554,6 +558,9 @@
uint32_t preferredTypeFlag,
effect_descriptor_t *descriptor)
{
+ if (uuid == nullptr || type == nullptr || descriptor == nullptr) {
+ return BAD_VALUE;
+ }
const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
return af->getEffectDescriptor(uuid, type, preferredTypeFlag, descriptor);
@@ -584,6 +591,9 @@
status_t AudioEffect::newEffectUniqueId(audio_unique_id_t* id)
{
+ if (id == nullptr) {
+ return BAD_VALUE;
+ }
const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
*id = af->newAudioUniqueId(AUDIO_UNIQUE_ID_USE_EFFECT);
@@ -597,14 +607,15 @@
audio_source_t source,
audio_unique_id_t *id)
{
+ if ((typeStr == nullptr && uuidStr == nullptr) || id == nullptr) {
+ return BAD_VALUE;
+ }
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
- if (typeStr == NULL && uuidStr == NULL) return BAD_VALUE;
-
// Convert type & uuid from string to effect_uuid_t.
effect_uuid_t type;
- if (typeStr != NULL) {
+ if (typeStr != nullptr) {
status_t res = stringToGuid(typeStr, &type);
if (res != OK) return res;
} else {
@@ -612,7 +623,7 @@
}
effect_uuid_t uuid;
- if (uuidStr != NULL) {
+ if (uuidStr != nullptr) {
status_t res = stringToGuid(uuidStr, &uuid);
if (res != OK) return res;
} else {
@@ -640,14 +651,15 @@
audio_usage_t usage,
audio_unique_id_t *id)
{
+ if ((typeStr == nullptr && uuidStr == nullptr) || id == nullptr) {
+ return BAD_VALUE;
+ }
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
- if (typeStr == NULL && uuidStr == NULL) return BAD_VALUE;
-
// Convert type & uuid from string to effect_uuid_t.
effect_uuid_t type;
- if (typeStr != NULL) {
+ if (typeStr != nullptr) {
status_t res = stringToGuid(typeStr, &type);
if (res != OK) return res;
} else {
@@ -655,7 +667,7 @@
}
effect_uuid_t uuid;
- if (uuidStr != NULL) {
+ if (uuidStr != nullptr) {
status_t res = stringToGuid(uuidStr, &uuid);
if (res != OK) return res;
} else {
@@ -698,7 +710,7 @@
status_t AudioEffect::stringToGuid(const char *str, effect_uuid_t *guid)
{
- if (str == NULL || guid == NULL) {
+ if (str == nullptr || guid == nullptr) {
return BAD_VALUE;
}
@@ -724,7 +736,7 @@
status_t AudioEffect::guidToString(const effect_uuid_t *guid, char *str, size_t maxLen)
{
- if (guid == NULL || str == NULL) {
+ if (guid == nullptr || str == nullptr) {
return BAD_VALUE;
}
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index ad00bdb..8ecb8e4 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -403,7 +403,7 @@
// Note mPortId is not valid until the track is created, so omit mPortId in ALOG for set.
ALOGV("%s(): streamType %d, sampleRate %u, format %#x, channelMask %#x, frameCount %zu, "
- "flags #%x, notificationFrames %d, sessionId %d, transferType %d, uid %d, pid %d",
+ "flags %#x, notificationFrames %d, sessionId %d, transferType %d, uid %d, pid %d",
__func__,
streamType, sampleRate, format, channelMask, frameCount, flags, notificationFrames,
sessionId, transferType, attributionSource.uid, attributionSource.pid);
@@ -572,11 +572,13 @@
// (b) we can support re-creation of offloaded tracks
if (offloadInfo != NULL) {
mOffloadInfoCopy = *offloadInfo;
- mOffloadInfo = &mOffloadInfoCopy;
} else {
- mOffloadInfo = NULL;
memset(&mOffloadInfoCopy, 0, sizeof(audio_offload_info_t));
mOffloadInfoCopy = AUDIO_INFO_INITIALIZER;
+ mOffloadInfoCopy.format = format;
+ mOffloadInfoCopy.sample_rate = sampleRate;
+ mOffloadInfoCopy.channel_mask = channelMask;
+ mOffloadInfoCopy.stream_type = streamType;
}
mVolume[AUDIO_INTERLEAVE_LEFT] = 1.0f;
@@ -973,8 +975,16 @@
{
using namespace std::chrono_literals;
+ // We use atomic access here for state variables - these are used as hints
+ // to ensure we have ramped down audio.
+ const int priorState = mProxy->getState();
+ const uint32_t priorPosition = mProxy->getPosition().unsignedValue();
+
pause();
+ // Only if we were previously active, do we wait to ramp down the audio.
+ if (priorState != CBLK_STATE_ACTIVE) return true;
+
AutoMutex lock(mLock);
// offload and direct tracks do not wait because pause volume ramp is handled by hardware.
if (isOffloadedOrDirect_l()) return true;
@@ -982,16 +992,25 @@
// Wait for the track state to be anything besides pausing.
// This ensures that the volume has ramped down.
constexpr auto SLEEP_INTERVAL_MS = 10ms;
+ constexpr auto POSITION_TIMEOUT_MS = 40ms; // don't wait longer than this for position change.
auto begin = std::chrono::steady_clock::now();
while (true) {
- // wait for state to change
+ // Wait for state and position to change.
+ // After pause() the server state should be PAUSING, but that may immediately
+ // convert to PAUSED by prepareTracks before data is read into the mixer.
+ // Hence we check that the state is not PAUSING and that the server position
+ // has advanced to be a more reliable estimate that the volume ramp has completed.
const int state = mProxy->getState();
+ const uint32_t position = mProxy->getPosition().unsignedValue();
mLock.unlock(); // only local variables accessed until lock.
auto elapsed = std::chrono::duration_cast<std::chrono::milliseconds>(
std::chrono::steady_clock::now() - begin);
- if (state != CBLK_STATE_PAUSING) {
- ALOGV("%s: success state:%d after %lld ms", __func__, state, elapsed.count());
+ if (state != CBLK_STATE_PAUSING &&
+ (elapsed >= POSITION_TIMEOUT_MS || position != priorPosition)) {
+ ALOGV("%s: success state:%d, position:%u after %lld ms"
+ " (prior state:%d prior position:%u)",
+ __func__, state, position, elapsed.count(), priorState, priorPosition);
return true;
}
std::chrono::milliseconds remaining = timeout - elapsed;
@@ -1220,6 +1239,10 @@
legacy2aidl_audio_playback_rate_t_AudioPlaybackRate(playbackRate))));
if (status == NO_ERROR) {
mPlaybackRate = playbackRate;
+ } else if (status == INVALID_OPERATION
+ && playbackRate.mSpeed == 1.0f && mPlaybackRate.mPitch == 1.0f) {
+ mPlaybackRate = playbackRate;
+ return NO_ERROR;
}
return status;
}
diff --git a/media/libaudioclient/include/media/AudioEffect.h b/media/libaudioclient/include/media/AudioEffect.h
index dd4d2da..02ff43f 100644
--- a/media/libaudioclient/include/media/AudioEffect.h
+++ b/media/libaudioclient/include/media/AudioEffect.h
@@ -136,7 +136,7 @@
* indicated by count.
* PERMISSION_DENIED could not get AudioFlinger interface
* NO_INIT effect library failed to initialize
- * BAD_VALUE invalid audio session or descriptor pointers
+ * BAD_VALUE invalid audio session, or invalid descriptor or count pointers
*
* Returned value
* *descriptor updated with descriptors of pre processings enabled by default
@@ -160,6 +160,7 @@
* NO_ERROR successful operation.
* PERMISSION_DENIED could not get AudioFlinger interface
* or caller lacks required permissions.
+ * BAD_VALUE invalid pointer to id
* Returned value
* *id: The new unique system-wide effect id.
*/
@@ -194,7 +195,7 @@
* PERMISSION_DENIED could not get AudioFlinger interface
* or caller lacks required permissions.
* NO_INIT effect library failed to initialize.
- * BAD_VALUE invalid source, type uuid or implementation uuid.
+ * BAD_VALUE invalid source, type uuid or implementation uuid, or id pointer
* NAME_NOT_FOUND no effect with this uuid or type found.
*
* Returned value
@@ -233,7 +234,7 @@
* PERMISSION_DENIED could not get AudioFlinger interface
* or caller lacks required permissions.
* NO_INIT effect library failed to initialize.
- * BAD_VALUE invalid type uuid or implementation uuid.
+ * BAD_VALUE invalid type uuid or implementation uuid, or id pointer
* NAME_NOT_FOUND no effect with this uuid or type found.
*
* Returned value
@@ -455,7 +456,7 @@
* Returned status (from utils/Errors.h) can be:
* - NO_ERROR: successful operation.
* - INVALID_OPERATION: the application does not have control of the effect engine.
- * - BAD_VALUE: invalid parameter identifier or value.
+ * - BAD_VALUE: invalid parameter structure pointer, or invalid identifier or value.
* - DEAD_OBJECT: the effect engine has been deleted.
*/
virtual status_t setParameter(effect_param_t *param);
@@ -500,7 +501,7 @@
* Returned status (from utils/Errors.h) can be:
* - NO_ERROR: successful operation.
* - INVALID_OPERATION: the AudioEffect was not successfully initialized.
- * - BAD_VALUE: invalid parameter identifier.
+ * - BAD_VALUE: invalid parameter structure pointer, or invalid parameter identifier.
* - DEAD_OBJECT: the effect engine has been deleted.
*/
virtual status_t getParameter(effect_param_t *param);
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index fa21265..285a28a 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -1182,7 +1182,6 @@
sp<IMemory> mSharedBuffer;
transfer_type mTransfer;
audio_offload_info_t mOffloadInfoCopy;
- const audio_offload_info_t* mOffloadInfo;
audio_attributes_t mAttributes;
size_t mFrameSize; // frame size in bytes
diff --git a/media/libeffects/downmix/Android.bp b/media/libeffects/downmix/Android.bp
index b26d028..abe622d 100644
--- a/media/libeffects/downmix/Android.bp
+++ b/media/libeffects/downmix/Android.bp
@@ -24,6 +24,10 @@
vendor: true,
srcs: ["EffectDownmix.cpp"],
+ export_include_dirs: [
+ ".",
+ ],
+
shared_libs: [
"libaudioutils",
"libcutils",
diff --git a/media/libeffects/downmix/tests/Android.bp b/media/libeffects/downmix/tests/Android.bp
index 4940117..392a6fa 100644
--- a/media/libeffects/downmix/tests/Android.bp
+++ b/media/libeffects/downmix/tests/Android.bp
@@ -18,9 +18,6 @@
gtest: true,
host_supported: true,
vendor: true,
- include_dirs: [
- "frameworks/av/media/libeffects/downmix",
- ],
header_libs: [
"libaudioeffects",
],
@@ -51,9 +48,6 @@
name:"downmixtest",
host_supported: false,
proprietary: true,
- include_dirs: [
- "frameworks/av/media/libeffects/downmix",
- ],
header_libs: [
"libaudioeffects",
diff --git a/media/libeffects/lvm/benchmarks/Android.bp b/media/libeffects/lvm/benchmarks/Android.bp
index 8a25b85..c21c5f2 100644
--- a/media/libeffects/lvm/benchmarks/Android.bp
+++ b/media/libeffects/lvm/benchmarks/Android.bp
@@ -29,9 +29,6 @@
name: "reverb_benchmark",
vendor: true,
host_supported: true,
- include_dirs: [
- "frameworks/av/media/libeffects/lvm/wrapper/Reverb",
- ],
srcs: ["reverb_benchmark.cpp"],
static_libs: [
"libreverb",
diff --git a/media/libeffects/lvm/tests/Android.bp b/media/libeffects/lvm/tests/Android.bp
index 9939ed1..0568fbd 100644
--- a/media/libeffects/lvm/tests/Android.bp
+++ b/media/libeffects/lvm/tests/Android.bp
@@ -11,52 +11,33 @@
cc_test {
name: "EffectReverbTest",
- vendor: true,
- gtest: true,
- host_supported: true,
+ defaults: [
+ "libeffects-test-defaults",
+ ],
srcs: [
"EffectReverbTest.cpp",
- "EffectTestHelper.cpp",
- ],
- include_dirs: [
- "frameworks/av/media/libeffects/lvm/lib/Common/lib",
- "frameworks/av/media/libeffects/lvm/wrapper/Reverb",
],
static_libs: [
- "libaudioutils",
"libreverb",
"libreverbwrapper",
],
- shared_libs: [
- "liblog",
- ],
header_libs: [
"libaudioeffects",
- "libhardware_headers",
],
}
cc_test {
name: "EffectBundleTest",
- vendor: true,
- gtest: true,
- host_supported: true,
- test_suites: ["device-tests"],
+ defaults: [
+ "libeffects-test-defaults",
+ ],
srcs: [
"EffectBundleTest.cpp",
- "EffectTestHelper.cpp",
],
static_libs: [
- "libaudioutils",
"libbundlewrapper",
"libmusicbundle",
],
- shared_libs: [
- "liblog",
- ],
- header_libs: [
- "libhardware_headers",
- ],
}
cc_test {
@@ -108,10 +89,6 @@
proprietary: true,
gtest: false,
- include_dirs: [
- "frameworks/av/media/libeffects/lvm/wrapper/Reverb",
- ],
-
header_libs: [
"libaudioeffects",
],
diff --git a/media/libeffects/lvm/wrapper/Android.bp b/media/libeffects/lvm/wrapper/Android.bp
index e169e3c..1287514 100644
--- a/media/libeffects/lvm/wrapper/Android.bp
+++ b/media/libeffects/lvm/wrapper/Android.bp
@@ -89,6 +89,8 @@
local_include_dirs: ["Reverb"],
+ export_include_dirs: ["Reverb"],
+
header_libs: [
"libhardware_headers",
"libaudioeffects",
diff --git a/media/libeffects/spatializer/benchmarks/Android.bp b/media/libeffects/spatializer/benchmarks/Android.bp
new file mode 100644
index 0000000..ab7e468
--- /dev/null
+++ b/media/libeffects/spatializer/benchmarks/Android.bp
@@ -0,0 +1,21 @@
+package {
+ // See: http://go/android-license-faq
+ // A large-scale-change added 'default_applicable_licenses' to import
+ // all of the 'license_kinds' from "frameworks_av_license"
+ // to get the below license kinds:
+ // SPDX-license-identifier-Apache-2.0
+ default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_benchmark {
+ name: "spatializer_benchmark",
+ vendor: true,
+ srcs: ["spatializer_benchmark.cpp"],
+ shared_libs: [
+ "libaudioutils",
+ "liblog",
+ ],
+ header_libs: [
+ "libhardware_headers",
+ ],
+}
diff --git a/media/libeffects/spatializer/benchmarks/spatializer_benchmark.cpp b/media/libeffects/spatializer/benchmarks/spatializer_benchmark.cpp
new file mode 100644
index 0000000..e8ac480
--- /dev/null
+++ b/media/libeffects/spatializer/benchmarks/spatializer_benchmark.cpp
@@ -0,0 +1,164 @@
+/*
+ * Copyright 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <array>
+#include <dlfcn.h>
+#include <random>
+#include <vector>
+
+#include <benchmark/benchmark.h>
+#include <hardware/audio_effect.h>
+#include <log/log.h>
+
+audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = [] {
+ audio_effect_library_t symbol{};
+ void* effectLib = dlopen("libspatialaudio.so", RTLD_NOW);
+ if (effectLib) {
+ audio_effect_library_t* effectInterface =
+ (audio_effect_library_t*)dlsym(effectLib, AUDIO_EFFECT_LIBRARY_INFO_SYM_AS_STR);
+ if (effectInterface == nullptr) {
+ ALOGE("dlsym failed: %s", dlerror());
+ exit(-1);
+ }
+ symbol = (audio_effect_library_t)(*effectInterface);
+ } else {
+ ALOGE("dlopen failed: %s", dlerror());
+ exit(-1);
+ }
+ return symbol;
+}();
+
+// channel masks
+constexpr int kInputChMask = AUDIO_CHANNEL_OUT_5POINT1;
+
+// sampleRates
+constexpr size_t kSampleRates[] = {
+ 44100,
+ 48000,
+ 96000,
+};
+constexpr size_t kNumSampleRates = std::size(kSampleRates);
+
+// duration in ms
+constexpr size_t kDurations[] = {2, 5, 10};
+constexpr size_t kNumDurations = std::size(kDurations);
+
+// effect uuids
+constexpr effect_uuid_t kEffectUuid = {
+ 0xcc4677de, 0xff72, 0x11eb, 0x9a03, {0x02, 0x42, 0xac, 0x13, 0x00, 0x03}};
+
+constexpr float kMinAmplitude = -1.0f;
+constexpr float kMaxAmplitude = 1.0f;
+
+/*******************************************************************
+ * A test result running on Pixel 5 for comparison.
+ * The first parameter indicates the sample rate.
+ * 0: 44100, 1: 48000, 2: 96000
+ * The second parameter indicates the duration in ms.
+ * 0: 2, 1: 5, 2: 10
+ * -------------------------------------------------------------
+ * Benchmark Time CPU Iterations
+ * -------------------------------------------------------------
+ * BM_SPATIALIZER/0/0 739848 ns 738497 ns 934
+ * BM_SPATIALIZER/0/1 1250503 ns 1248337 ns 480
+ * BM_SPATIALIZER/0/2 2094092 ns 2090092 ns 310
+ * BM_SPATIALIZER/1/0 783114 ns 781626 ns 683
+ * BM_SPATIALIZER/1/1 1332951 ns 1330473 ns 452
+ * BM_SPATIALIZER/1/2 2258313 ns 2254022 ns 289
+ * BM_SPATIALIZER/2/0 1210332 ns 1207957 ns 477
+ * BM_SPATIALIZER/2/1 2356259 ns 2351764 ns 269
+ * BM_SPATIALIZER/2/2 4267814 ns 4259567 ns 155
+ *******************************************************************/
+
+static void BM_SPATIALIZER(benchmark::State& state) {
+ const size_t sampleRate = kSampleRates[state.range(0)];
+ const size_t durationMs = kDurations[state.range(1)];
+ const size_t frameCount = durationMs * sampleRate / 1000;
+ const size_t inputChannelCount = audio_channel_count_from_out_mask(kInputChMask);
+ const size_t outputChannelCount = audio_channel_count_from_out_mask(AUDIO_CHANNEL_OUT_STEREO);
+
+ // Initialize input buffer with deterministic pseudo-random values
+ std::minstd_rand gen(kInputChMask);
+ std::uniform_real_distribution<> dis(kMinAmplitude, kMaxAmplitude);
+ std::vector<float> input(frameCount * inputChannelCount);
+ for (auto& in : input) {
+ in = dis(gen);
+ }
+
+ effect_handle_t effectHandle = nullptr;
+ if (int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.create_effect(&kEffectUuid, 1 /* sessionId */,
+ 1 /* ioId */, &effectHandle);
+ status != 0) {
+ ALOGE("create_effect returned an error = %d\n", status);
+ return;
+ }
+
+ effect_config_t config{};
+ config.inputCfg.samplingRate = config.outputCfg.samplingRate = sampleRate;
+ config.inputCfg.channels = kInputChMask;
+ config.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+ config.inputCfg.format = config.outputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
+
+ int reply = 0;
+ uint32_t replySize = sizeof(reply);
+ if (int status = (*effectHandle)
+ ->command(effectHandle, EFFECT_CMD_SET_CONFIG, sizeof(effect_config_t),
+ &config, &replySize, &reply);
+ status != 0) {
+ ALOGE("command returned an error = %d\n", status);
+ return;
+ }
+
+ if (int status = (*effectHandle)
+ ->command(effectHandle, EFFECT_CMD_ENABLE, sizeof(effect_config_t),
+ &config, &replySize, &reply);
+ status != 0) {
+ ALOGE("command returned an error = %d\n", status);
+ return;
+ }
+
+ // Run the test
+ std::vector<float> output(frameCount * outputChannelCount);
+ for (auto _ : state) {
+ benchmark::DoNotOptimize(input.data());
+ benchmark::DoNotOptimize(output.data());
+
+ audio_buffer_t inBuffer = {.frameCount = frameCount, .f32 = input.data()};
+ audio_buffer_t outBuffer = {.frameCount = frameCount, .f32 = output.data()};
+ (*effectHandle)->process(effectHandle, &inBuffer, &outBuffer);
+
+ benchmark::ClobberMemory();
+ }
+
+ state.SetComplexityN(frameCount);
+
+ if (int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.release_effect(effectHandle); status != 0) {
+ ALOGE("release_effect returned an error = %d\n", status);
+ return;
+ }
+}
+
+static void SPATIALIZERArgs(benchmark::internal::Benchmark* b) {
+ for (int i = 0; i < kNumSampleRates; i++) {
+ for (int j = 0; j < kNumDurations; ++j) {
+ b->Args({i, j});
+ }
+ }
+}
+
+BENCHMARK(BM_SPATIALIZER)->Apply(SPATIALIZERArgs);
+
+BENCHMARK_MAIN();
diff --git a/media/libeffects/spatializer/tests/Android.bp b/media/libeffects/spatializer/tests/Android.bp
new file mode 100644
index 0000000..704e873
--- /dev/null
+++ b/media/libeffects/spatializer/tests/Android.bp
@@ -0,0 +1,21 @@
+// Build the unit tests for spatializer effect
+
+package {
+ // See: http://go/android-license-faq
+ // A large-scale-change added 'default_applicable_licenses' to import
+ // all of the 'license_kinds' from "frameworks_av_license"
+ // to get the below license kinds:
+ // SPDX-license-identifier-Apache-2.0
+ default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_test {
+ name: "SpatializerTest",
+ defaults: [
+ "libeffects-test-defaults",
+ ],
+ host_supported: false,
+ srcs: [
+ "SpatializerTest.cpp",
+ ],
+}
diff --git a/media/libeffects/spatializer/tests/SpatializerTest.cpp b/media/libeffects/spatializer/tests/SpatializerTest.cpp
new file mode 100644
index 0000000..110fbb1
--- /dev/null
+++ b/media/libeffects/spatializer/tests/SpatializerTest.cpp
@@ -0,0 +1,300 @@
+/*
+ * Copyright 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "SpatializerTest"
+
+#include <system/audio_effects/effect_spatializer.h>
+#include "EffectTestHelper.h"
+
+using namespace android;
+
+// relying on dlsym to fill the interface context
+audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = [] {
+ audio_effect_library_t symbol{};
+ void* effectLib = dlopen("libspatialaudio.so", RTLD_NOW);
+ if (effectLib) {
+ audio_effect_library_t* effectInterface =
+ (audio_effect_library_t*)dlsym(effectLib, AUDIO_EFFECT_LIBRARY_INFO_SYM_AS_STR);
+ if (effectInterface == nullptr) {
+ ALOGE("dlsym failed: %s", dlerror());
+ exit(-1);
+ }
+ symbol = (audio_effect_library_t)(*effectInterface);
+ } else {
+ ALOGE("dlopen failed: %s", dlerror());
+ exit(-1);
+ }
+ return symbol;
+}();
+
+// channel masks
+constexpr audio_channel_mask_t kSpatializerChMasks[] = {
+ AUDIO_CHANNEL_OUT_5POINT1,
+};
+constexpr size_t kNumSpatializerChMasks = std::size(kSpatializerChMasks);
+
+// sampleRates
+// TODO(b/234170025): Add all sampling rates once they are handled by spatializer
+constexpr int kSpatializerSampleRates[] = {44100, 48000, 96000};
+constexpr size_t kNumSpatializerSampleRates = std::size(kSpatializerSampleRates);
+
+// frame counts
+// TODO(b/234620538): Add sizes smaller than 80 once they are handled by spatializer
+constexpr size_t kSpatializerFrameCounts[] = {4800, 1920, 480, 80};
+constexpr size_t kNumSpatializerFrameCounts = std::size(kSpatializerFrameCounts);
+
+// effect uuids
+constexpr effect_uuid_t kSpatializerEffectUuids[] = {
+ {0xcc4677de, 0xff72, 0x11eb, 0x9a03, {0x02, 0x42, 0xac, 0x13, 0x00, 0x03}},
+};
+const size_t kNumSpatializerEffectUuids = std::size(kSpatializerEffectUuids);
+
+constexpr float kMinAmplitude = -1.0f;
+constexpr float kMaxAmplitude = 1.0f;
+constexpr float kSNRThreshold = 100.0f;
+constexpr size_t kNumBufferSplits = 2;
+
+using SingleEffectTestParam = std::tuple<int, int, int, int, int>;
+
+class SingleEffectTest : public ::testing::TestWithParam<SingleEffectTestParam> {
+ public:
+ SingleEffectTest()
+ : mInputChMask(kSpatializerChMasks[std::get<0>(GetParam())]),
+ mInputChannelCount(audio_channel_count_from_out_mask(mInputChMask)),
+ mOutputChMask(AUDIO_CHANNEL_OUT_STEREO),
+ mOutputChannelCount(audio_channel_count_from_out_mask(mOutputChMask)),
+ mSampleRate(kSpatializerSampleRates[std::get<1>(GetParam())]),
+ mFrameCount(kSpatializerFrameCounts[std::get<2>(GetParam())]),
+ mLoopCount(EffectTestHelper::kLoopCounts[std::get<3>(GetParam())]),
+ mTotalFrameCount(mFrameCount * mLoopCount),
+ mUuid(&kSpatializerEffectUuids[std::get<4>(GetParam())]) {}
+ void SetUp() override {
+ ASSERT_EQ(AUDIO_EFFECT_LIBRARY_TAG, AUDIO_EFFECT_LIBRARY_INFO_SYM.tag)
+ << "Invalid effect tag";
+ }
+ const size_t mInputChMask;
+ const size_t mInputChannelCount;
+ const size_t mOutputChMask;
+ const size_t mOutputChannelCount;
+ const size_t mSampleRate;
+ const size_t mFrameCount;
+ const size_t mLoopCount;
+ const size_t mTotalFrameCount;
+ const effect_uuid_t* mUuid;
+};
+
+// Test basic spatializer functionality (does not crash) for various combinations of sampling
+// rates, channel masks and frame counts.
+TEST_P(SingleEffectTest, SimpleProcess) {
+ SCOPED_TRACE(testing::Message()
+ << "chMask: " << mInputChMask << " sampleRate: " << mSampleRate);
+
+ EffectTestHelper effect(mUuid, mInputChMask, mOutputChMask, mSampleRate, mFrameCount,
+ mLoopCount);
+ ASSERT_NO_FATAL_FAILURE(effect.createEffect());
+ ASSERT_NO_FATAL_FAILURE(effect.setConfig());
+
+ // Initialize input buffer with deterministic pseudo-random values
+ std::vector<float> input(mTotalFrameCount * mInputChannelCount);
+ std::vector<float> output(mTotalFrameCount * mOutputChannelCount);
+ std::minstd_rand gen(mInputChMask);
+ std::uniform_real_distribution<> dis(kMinAmplitude, kMaxAmplitude);
+ for (auto& in : input) {
+ in = dis(gen);
+ }
+ ASSERT_NO_FATAL_FAILURE(effect.process(input.data(), output.data()));
+ ASSERT_NO_FATAL_FAILURE(effect.releaseEffect());
+}
+
+INSTANTIATE_TEST_SUITE_P(SpatializerTest, SingleEffectTest,
+ ::testing::Combine(::testing::Range(0, (int)kNumSpatializerChMasks),
+ ::testing::Range(0, (int)kNumSpatializerSampleRates),
+ ::testing::Range(0, (int)kNumSpatializerFrameCounts),
+ ::testing::Range(0,
+ (int)EffectTestHelper::kNumLoopCounts),
+ ::testing::Range(0, (int)kNumSpatializerEffectUuids)));
+
+using SingleEffectComparisonTestParam = std::tuple<int, int, int>;
+
+class SingleEffectComparisonTest
+ : public ::testing::TestWithParam<SingleEffectComparisonTestParam> {
+ public:
+ SingleEffectComparisonTest()
+ : mInputChMask(kSpatializerChMasks[std::get<0>(GetParam())]),
+ mInputChannelCount(audio_channel_count_from_out_mask(mInputChMask)),
+ mOutputChMask(AUDIO_CHANNEL_OUT_STEREO),
+ mOutputChannelCount(audio_channel_count_from_out_mask(mOutputChMask)),
+ mSampleRate(kSpatializerSampleRates[std::get<1>(GetParam())]),
+ mUuid(&kSpatializerEffectUuids[std::get<2>(GetParam())]) {}
+
+ const size_t mInputChMask;
+ const size_t mInputChannelCount;
+ const size_t mOutputChMask;
+ const size_t mOutputChannelCount;
+ const size_t mSampleRate;
+ const effect_uuid_t* mUuid;
+};
+
+// Ensure that effect produces similar output when an input is fed in a single call
+// or called multiples times with buffer split into smaller parts
+
+// TODO(b/234619903): This is currently disabled as output from the spatializer has
+// an algorithm delay that varies with frame count and hence makes it tricky to
+// compare output from two cases with different frame counts.
+// Feed valid input to spatializer and dump the output to verify spatializer is being
+// correctly initialized and once that is verified, enable the following
+TEST_P(SingleEffectComparisonTest, DISABLED_SimpleProcess) {
+ SCOPED_TRACE(testing::Message()
+ << "chMask: " << mInputChMask << " sampleRate: " << mSampleRate);
+ int testDurationMs = 20; // 20 ms
+ int testFrameCount = (mSampleRate * testDurationMs) / 1000;
+ int totalFrameCount = testFrameCount * kNumBufferSplits;
+ size_t totalInSamples = totalFrameCount * mInputChannelCount;
+ size_t totalOutSamples = totalFrameCount * mOutputChannelCount;
+ std::vector<float> input(totalInSamples);
+ std::vector<float> outRef(totalOutSamples);
+ std::vector<float> outTest(totalOutSamples);
+
+ // Initialize input buffer with deterministic pseudo-random values
+ std::minstd_rand gen(mInputChMask);
+ std::uniform_real_distribution<> dis(kMinAmplitude, kMaxAmplitude);
+ for (auto& in : input) {
+ in = dis(gen);
+ }
+
+ EffectTestHelper refEffect(mUuid, mInputChMask, mOutputChMask, mSampleRate, totalFrameCount, 1);
+ ASSERT_NO_FATAL_FAILURE(refEffect.createEffect());
+ ASSERT_NO_FATAL_FAILURE(refEffect.setConfig());
+ ASSERT_NO_FATAL_FAILURE(refEffect.process(input.data(), outRef.data()));
+ ASSERT_NO_FATAL_FAILURE(refEffect.releaseEffect());
+
+ EffectTestHelper testEffect(mUuid, mInputChMask, mOutputChMask, mSampleRate,
+ totalFrameCount / kNumBufferSplits, kNumBufferSplits);
+ ASSERT_NO_FATAL_FAILURE(testEffect.createEffect());
+ ASSERT_NO_FATAL_FAILURE(testEffect.setConfig());
+ ASSERT_NO_FATAL_FAILURE(testEffect.process(input.data(), outTest.data()));
+ ASSERT_NO_FATAL_FAILURE(testEffect.releaseEffect());
+
+ float snr = computeSnr(outTest.data(), outRef.data(), totalOutSamples);
+ ASSERT_GT(snr, kSNRThreshold) << "SNR between reference and test output " << snr
+ << " is lower than required " << kSNRThreshold;
+}
+
+INSTANTIATE_TEST_SUITE_P(SpatializerTest, SingleEffectComparisonTest,
+ ::testing::Combine(::testing::Range(0, (int)kNumSpatializerChMasks),
+ ::testing::Range(0, (int)kNumSpatializerSampleRates),
+ ::testing::Range(0, (int)kNumSpatializerEffectUuids)));
+
+// This test checks if get/set Spatializer effect params are in accordance with documentation. The
+// test doesn't validate the functionality of the params configured. It only checks the return
+// status of API calls.
+TEST(ParameterTests, CheckParameterSupport) {
+ EffectTestHelper effect(&kSpatializerEffectUuids[0], kSpatializerChMasks[0],
+ AUDIO_CHANNEL_OUT_STEREO, kSpatializerSampleRates[0],
+ kSpatializerFrameCounts[0], EffectTestHelper::kLoopCounts[0]);
+ ASSERT_NO_FATAL_FAILURE(effect.createEffect());
+
+ // capture list of channel masks supported
+ std::vector<audio_channel_mask_t> channelMasks;
+ int status = effect.getParam<true>(SPATIALIZER_PARAM_SUPPORTED_CHANNEL_MASKS, channelMasks);
+ EXPECT_EQ(status, 0) << "get Param returned an error " << status;
+ if (!status) {
+ EXPECT_EQ(1, channelMasks.size());
+ EXPECT_EQ(AUDIO_CHANNEL_OUT_5POINT1, channelMasks[0]);
+ }
+
+ // capture list of spatialization levels supported
+ std::vector<int8_t> spatializationLevels;
+ status = effect.getParam<true>(SPATIALIZER_PARAM_SUPPORTED_LEVELS, spatializationLevels);
+ EXPECT_EQ(status, 0) << "get Param returned an error " << status;
+ if (!status) {
+ EXPECT_EQ(1, spatializationLevels.size());
+ EXPECT_EQ(SPATIALIZATION_LEVEL_MULTICHANNEL, spatializationLevels[0]);
+ }
+
+ // capture list of spatialization modes supported
+ std::vector<int8_t> spatializationModes;
+ status = effect.getParam<true>(SPATIALIZER_PARAM_SUPPORTED_SPATIALIZATION_MODES,
+ spatializationModes);
+ EXPECT_EQ(status, 0) << "get Param returned an error " << status;
+ if (!status) {
+ EXPECT_EQ(1, spatializationModes.size());
+ EXPECT_EQ(SPATIALIZATION_MODE_BINAURAL, spatializationModes[0]);
+ }
+
+ // check if head tracking is supported
+ std::vector<int8_t> headTracking;
+ status = effect.getParam<false>(SPATIALIZER_PARAM_HEADTRACKING_SUPPORTED, headTracking);
+ EXPECT_EQ(status, 0) << "get Param returned an error " << status;
+ if (!status) {
+ EXPECT_EQ(1, headTracking.size());
+ EXPECT_EQ(true, headTracking[0]);
+ }
+
+ // verify spatialization level setting
+ std::vector<int8_t> level;
+ status = effect.getParam<false>(SPATIALIZER_PARAM_LEVEL, level);
+ EXPECT_EQ(status, 0) << "get Param returned an error " << status;
+ if (!status) {
+ EXPECT_EQ(1, level.size());
+ EXPECT_EQ(SPATIALIZATION_LEVEL_NONE, level[0]);
+ }
+
+ ASSERT_NO_FATAL_FAILURE(effect.setConfig());
+
+ status = effect.getParam<false>(SPATIALIZER_PARAM_LEVEL, level);
+ EXPECT_EQ(status, 0) << "get Param returned an error " << status;
+ if (!status) {
+ EXPECT_EQ(1, level.size());
+ EXPECT_EQ(SPATIALIZATION_LEVEL_MULTICHANNEL, level[0]);
+ }
+
+ // try setting unsupported parameters
+ level.clear();
+ level.push_back(SPATIALIZATION_LEVEL_MCHAN_BED_PLUS_OBJECTS);
+ ASSERT_EQ(1, level.size());
+ EXPECT_NE(0, effect.setParam(SPATIALIZER_PARAM_LEVEL, level));
+
+ // Ensure that unsupported level isn't set by above setParam
+ status = effect.getParam<false>(SPATIALIZER_PARAM_LEVEL, level);
+ EXPECT_EQ(status, 0) << "get Param returned an error " << status;
+ if (!status) {
+ EXPECT_EQ(1, level.size());
+ EXPECT_EQ(SPATIALIZATION_LEVEL_MULTICHANNEL, level[0]);
+ }
+
+ std::vector<float> hingeAngle = {3.1415f};
+ ASSERT_EQ(1, hingeAngle.size());
+ EXPECT_NE(0, effect.setParam(SPATIALIZER_PARAM_HINGE_ANGLE, hingeAngle));
+
+ std::vector<int8_t> headTrackingMode = {2}; // RELATIVE_WORLD
+ ASSERT_EQ(1, headTrackingMode.size());
+ EXPECT_NE(0, effect.setParam(SPATIALIZER_PARAM_HEADTRACKING_MODE, headTrackingMode));
+
+ // try setting supported parameters
+ std::vector<float> vectorFloat = {0.1, 0.2, 0.15, 0.04, 2.23, 3.14};
+ ASSERT_EQ(6, vectorFloat.size());
+ EXPECT_EQ(0, effect.setParam(SPATIALIZER_PARAM_HEAD_TO_STAGE, vectorFloat));
+
+ ASSERT_NO_FATAL_FAILURE(effect.releaseEffect());
+}
+
+int main(int argc, char** argv) {
+ ::testing::InitGoogleTest(&argc, argv);
+ int status = RUN_ALL_TESTS();
+ ALOGD("Test result = %d\n", status);
+ return status;
+}
diff --git a/media/libeffects/tests/common/Android.bp b/media/libeffects/tests/common/Android.bp
new file mode 100644
index 0000000..73179fb
--- /dev/null
+++ b/media/libeffects/tests/common/Android.bp
@@ -0,0 +1,45 @@
+package {
+ // See: http://go/android-license-faq
+ // A large-scale-change added 'default_applicable_licenses' to import
+ // all of the 'license_kinds' from "frameworks_av_license"
+ // to get the below license kinds:
+ // SPDX-license-identifier-Apache-2.0
+ default_applicable_licenses: ["frameworks_av_license"],
+}
+
+filegroup {
+ name: "libeffects-test-helper-srcs",
+ srcs: [
+ "EffectTestHelper.cpp",
+ ],
+}
+
+cc_library_headers {
+ name: "libeffects-test-helper-headers",
+ vendor: true,
+ host_supported: true,
+ export_include_dirs: [
+ ".",
+ ],
+}
+
+cc_defaults {
+ name: "libeffects-test-defaults",
+ vendor: true,
+ gtest: true,
+ host_supported: true,
+ test_suites: ["device-tests"],
+ static_libs: [
+ "libaudioutils",
+ ],
+ srcs: [
+ ":libeffects-test-helper-srcs",
+ ],
+ header_libs: [
+ "libeffects-test-helper-headers",
+ "libhardware_headers",
+ ],
+ shared_libs: [
+ "liblog",
+ ],
+}
diff --git a/media/libeffects/lvm/tests/EffectTestHelper.cpp b/media/libeffects/tests/common/EffectTestHelper.cpp
similarity index 97%
rename from media/libeffects/lvm/tests/EffectTestHelper.cpp
rename to media/libeffects/tests/common/EffectTestHelper.cpp
index ec727c7..db085ba 100644
--- a/media/libeffects/lvm/tests/EffectTestHelper.cpp
+++ b/media/libeffects/tests/common/EffectTestHelper.cpp
@@ -15,7 +15,6 @@
*/
#include "EffectTestHelper.h"
-extern audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM;
namespace android {
diff --git a/media/libeffects/lvm/tests/EffectTestHelper.h b/media/libeffects/tests/common/EffectTestHelper.h
similarity index 69%
rename from media/libeffects/lvm/tests/EffectTestHelper.h
rename to media/libeffects/tests/common/EffectTestHelper.h
index bcee84e..c99e27a 100644
--- a/media/libeffects/lvm/tests/EffectTestHelper.h
+++ b/media/libeffects/tests/common/EffectTestHelper.h
@@ -21,6 +21,7 @@
#include <audio_utils/primitives.h>
#include <climits>
#include <cstdlib>
+#include <dlfcn.h>
#include <gtest/gtest.h>
#include <hardware/audio_effect.h>
#include <log/log.h>
@@ -29,7 +30,9 @@
#include <system/audio.h>
#include <vector>
+extern audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM;
namespace android {
+
template <typename T>
static float computeSnr(const T* ref, const T* tst, size_t count) {
double signal{};
@@ -82,6 +85,7 @@
void createEffect();
void releaseEffect();
void setConfig();
+
template <typename VALUE_DTYPE>
void setParam(uint32_t type, VALUE_DTYPE const value) {
int reply = 0;
@@ -101,6 +105,76 @@
ASSERT_EQ(status, 0) << "set_param returned an error " << status;
ASSERT_EQ(reply, 0) << "set_param reply non zero " << reply;
};
+
+ template <bool MULTI_VALUES, typename T>
+ int32_t getParam(uint32_t type, std::vector<T>& values) {
+ const int kMaxEffectParamValues = 10;
+ uint32_t cmd[sizeof(effect_param_t) / sizeof(uint32_t) + 1];
+ uint32_t reply[sizeof(effect_param_t) / sizeof(uint32_t) + 1 + 1 + kMaxEffectParamValues];
+
+ effect_param_t* p = (effect_param_t*)cmd;
+ p->psize = sizeof(uint32_t);
+ if (MULTI_VALUES) {
+ p->vsize = (kMaxEffectParamValues + 1) * sizeof(T);
+ } else {
+ p->vsize = sizeof(T);
+ }
+ *(uint32_t*)p->data = type;
+ uint32_t replySize = sizeof(effect_param_t) + p->psize + p->vsize;
+
+ int32_t status = (*mEffectHandle)
+ ->command(mEffectHandle, EFFECT_CMD_GET_PARAM,
+ sizeof(effect_param_t) + sizeof(uint32_t), cmd,
+ &replySize, reply);
+ if (status) {
+ return status;
+ }
+ if (p->status) {
+ return p->status;
+ }
+ if (replySize <
+ sizeof(effect_param_t) + sizeof(uint32_t) + (MULTI_VALUES ? 2 : 1) * sizeof(T)) {
+ return -EINVAL;
+ }
+
+ T* params = (T*)((uint8_t*)reply + sizeof(effect_param_t) + sizeof(uint32_t));
+ int numParams = 1;
+ if (MULTI_VALUES) {
+ numParams = (int)*params++;
+ }
+ if (numParams > kMaxEffectParamValues) {
+ return -EINVAL;
+ }
+ values.clear();
+ std::copy(¶ms[0], ¶ms[numParams], back_inserter(values));
+ return 0;
+ }
+
+ template <typename T>
+ int setParam(uint32_t type, const std::vector<T>& values) {
+ int reply = 0;
+ uint32_t replySize = sizeof(reply);
+
+ uint32_t cmd[sizeof(effect_param_t) / sizeof(uint32_t) + 1 + values.size()];
+ effect_param_t* p = (effect_param_t*)cmd;
+ p->psize = sizeof(uint32_t);
+ p->vsize = sizeof(T) * values.size();
+ *(uint32_t*)p->data = type;
+ memcpy((uint32_t*)p->data + 1, values.data(), sizeof(T) * values.size());
+
+ int status = (*mEffectHandle)
+ ->command(mEffectHandle, EFFECT_CMD_SET_PARAM,
+ sizeof(effect_param_t) + p->psize + p->vsize, p, &replySize,
+ &reply);
+ if (status) {
+ return status;
+ }
+ if (reply) {
+ return reply;
+ }
+ return 0;
+ }
+
void process(float* input, float* output);
// Corresponds to SNR for 1 bit difference between two int16_t signals
diff --git a/media/libheif/HeifDecoderImpl.cpp b/media/libheif/HeifDecoderImpl.cpp
index fcac551..50f1bf2 100644
--- a/media/libheif/HeifDecoderImpl.cpp
+++ b/media/libheif/HeifDecoderImpl.cpp
@@ -26,6 +26,7 @@
#include <binder/IMemory.h>
#include <binder/MemoryDealer.h>
#include <drm/drm_framework_common.h>
+#include <log/log.h>
#include <media/mediametadataretriever.h>
#include <media/stagefright/MediaSource.h>
#include <media/stagefright/foundation/ADebug.h>
@@ -422,7 +423,13 @@
initFrameInfo(&mSequenceInfo, videoFrame);
- mSequenceLength = atoi(mRetriever->extractMetadata(METADATA_KEY_VIDEO_FRAME_COUNT));
+ const char* frameCount = mRetriever->extractMetadata(METADATA_KEY_VIDEO_FRAME_COUNT);
+ if (frameCount == nullptr) {
+ android_errorWriteWithInfoLog(0x534e4554, "215002587", -1, NULL, 0);
+ ALOGD("No valid sequence information in metadata");
+ return false;
+ }
+ mSequenceLength = atoi(frameCount);
if (defaultInfo == nullptr) {
defaultInfo = &mSequenceInfo;
diff --git a/media/libmediahelper/Android.bp b/media/libmediahelper/Android.bp
index b9d795d..165a8ad 100644
--- a/media/libmediahelper/Android.bp
+++ b/media/libmediahelper/Android.bp
@@ -44,7 +44,10 @@
"-Wextra",
"-Wall",
],
- shared_libs: ["libutils", "liblog"],
+ shared_libs: [
+ "libutils",
+ "liblog",
+ ],
header_libs: [
"libmedia_helper_headers",
"libaudio_system_headers",
@@ -52,7 +55,7 @@
export_header_lib_headers: [
"libmedia_helper_headers",
],
- clang: true,
+
host_supported: true,
target: {
darwin: {
diff --git a/media/libmediametrics/libmediametrics.map.txt b/media/libmediametrics/libmediametrics.map.txt
index c46281a..f37af64 100644
--- a/media/libmediametrics/libmediametrics.map.txt
+++ b/media/libmediametrics/libmediametrics.map.txt
@@ -1,29 +1,29 @@
LIBMEDIAMETRICS_1 {
global:
- mediametrics_addDouble; # apex
- mediametrics_addInt32; # apex
- mediametrics_addInt64; # apex
- mediametrics_addRate; # apex
- mediametrics_count; # apex
- mediametrics_create; # apex
- mediametrics_delete; # apex
- mediametrics_freeCString; # apex
- mediametrics_getAttributes; # apex
- mediametrics_getCString; # apex
- mediametrics_getDouble; # apex
- mediametrics_getInt32; # apex
- mediametrics_getInt64; # apex
- mediametrics_getKey; # apex
- mediametrics_getRate; # apex
- mediametrics_isEnabled; # apex
- mediametrics_readable; # apex
- mediametrics_selfRecord; # apex
- mediametrics_setCString; # apex
- mediametrics_setDouble; # apex
- mediametrics_setInt32; # apex
- mediametrics_setInt64; # apex
- mediametrics_setRate; # apex
- mediametrics_setUid; # apex
+ mediametrics_addDouble; # systemapi
+ mediametrics_addInt32; # systemapi
+ mediametrics_addInt64; # systemapi
+ mediametrics_addRate; # systemapi
+ mediametrics_count; # systemapi
+ mediametrics_create; # systemapi
+ mediametrics_delete; # systemapi
+ mediametrics_freeCString; # systemapi
+ mediametrics_getAttributes; # systemapi
+ mediametrics_getCString; # systemapi
+ mediametrics_getDouble; # systemapi
+ mediametrics_getInt32; # systemapi
+ mediametrics_getInt64; # systemapi
+ mediametrics_getKey; # systemapi
+ mediametrics_getRate; # systemapi
+ mediametrics_isEnabled; # systemapi
+ mediametrics_readable; # systemapi
+ mediametrics_selfRecord; # systemapi
+ mediametrics_setCString; # systemapi
+ mediametrics_setDouble; # systemapi
+ mediametrics_setInt32; # systemapi
+ mediametrics_setInt64; # systemapi
+ mediametrics_setRate; # systemapi
+ mediametrics_setUid; # systemapi
local:
*;
};
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDrm.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDrm.cpp
index 6788b56..5e29b3f 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDrm.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDrm.cpp
@@ -191,8 +191,8 @@
uint8_t key[kBlockSize],
uint8_t iv[kBlockSize],
CryptoPlugin::Mode mode,
- size_t *clearbytes,
- size_t *encryptedbytes)
+ uint32_t *clearbytes,
+ uint32_t *encryptedbytes)
{
// size needed to store all the crypto data
size_t cryptosize;
@@ -236,7 +236,7 @@
if (!meta.findData(kKeyEncryptedSizes, &type, &crypteddata, &cryptedsize)) {
return NULL;
}
- size_t numSubSamples = cryptedsize / sizeof(size_t);
+ size_t numSubSamples = cryptedsize / sizeof(uint32_t);
if (numSubSamples <= 0) {
ALOGE("getSampleCryptoInfo INVALID numSubSamples: %zu", numSubSamples);
@@ -285,8 +285,8 @@
(uint8_t*) key,
(uint8_t*) iv,
(CryptoPlugin::Mode)mode,
- (size_t*) cleardata,
- (size_t*) crypteddata);
+ (uint32_t*) cleardata,
+ (uint32_t*) crypteddata);
}
} // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDrm.h b/media/libmediaplayerservice/nuplayer/NuPlayerDrm.h
index 4360656..232638c 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDrm.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDrm.h
@@ -106,8 +106,8 @@
uint8_t key[kBlockSize],
uint8_t iv[kBlockSize],
CryptoPlugin::Mode mode,
- size_t *clearbytes,
- size_t *encryptedbytes);
+ uint32_t *clearbytes,
+ uint32_t *encryptedbytes);
static CryptoInfo *getSampleCryptoInfo(MetaDataBase &meta);
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index a4fbbbc..7917395 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -2195,7 +2195,10 @@
}
if (!msg->findInt32("aac-max-output-channel_count", &maxOutputChannelCount)) {
- maxOutputChannelCount = -1;
+ // check non AAC-specific key
+ if (!msg->findInt32("max-output-channel-count", &maxOutputChannelCount)) {
+ maxOutputChannelCount = -1;
+ }
}
if (!msg->findInt32("aac-pcm-limiter-enable", &pcmLimiterEnable)) {
// value is unknown
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index e47e7ff..10baec4 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -253,6 +253,40 @@
],
},
}
+
+cc_library_shared {
+ name: "libstagefright_surface_utils",
+
+ srcs: [
+ "SurfaceUtils.cpp",
+ ],
+
+ shared_libs: [
+ "libgui",
+ "liblog",
+ "libui",
+ "libutils",
+ ],
+
+ export_include_dirs: [
+ "include",
+ ],
+
+ cflags: [
+ "-Wno-multichar",
+ "-Werror",
+ "-Wall",
+ ],
+
+ sanitize: {
+ cfi: true,
+ misc_undefined: [
+ "unsigned-integer-overflow",
+ "signed-integer-overflow",
+ ],
+ },
+}
+
cc_library {
name: "libstagefright",
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 95afa62..9607425 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -564,9 +564,11 @@
// Set the preview display. Skip this if mSurface is null because
// applications may already set a surface to the camera.
if (mSurface != NULL) {
- // This CHECK is good, since we just passed the lock/unlock
- // check earlier by calling mCamera->setParameters().
- CHECK_EQ((status_t)OK, mCamera->setPreviewTarget(mSurface));
+ // Surface may be set incorrectly or could already be used even if we just
+ // passed the lock/unlock check earlier by calling mCamera->setParameters().
+ if ((err = mCamera->setPreviewTarget(mSurface)) != OK) {
+ return err;
+ }
}
// Use buffer queue to receive video buffers from camera
diff --git a/media/libstagefright/DataConverter.cpp b/media/libstagefright/DataConverter.cpp
index 52be054..b53ac77 100644
--- a/media/libstagefright/DataConverter.cpp
+++ b/media/libstagefright/DataConverter.cpp
@@ -24,6 +24,10 @@
#include <media/MediaCodecBuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AUtils.h>
+#include <system/audio.h>
+#include <audio_utils/primitives.h>
+#include <audio_utils/format.h>
+
namespace android {
@@ -81,12 +85,38 @@
return numSamples * mTargetSampleSize;
}
+static audio_format_t getAudioFormat(AudioEncoding e) {
+ audio_format_t format = AUDIO_FORMAT_INVALID;
+ switch (e) {
+ case kAudioEncodingPcm16bit:
+ format = AUDIO_FORMAT_PCM_16_BIT;
+ break;
+ case kAudioEncodingPcm8bit:
+ format = AUDIO_FORMAT_PCM_8_BIT;
+ break;
+ case kAudioEncodingPcmFloat:
+ format = AUDIO_FORMAT_PCM_FLOAT;
+ break;
+ case kAudioEncodingPcm24bitPacked:
+ format = AUDIO_FORMAT_PCM_24_BIT_PACKED;
+ break;
+ case kAudioEncodingPcm32bit:
+ format = AUDIO_FORMAT_PCM_32_BIT;
+ break;
+ default:
+ ALOGE("Invalid AudioEncoding %d", e);
+ }
+ return format;
+}
static size_t getAudioSampleSize(AudioEncoding e) {
switch (e) {
- case kAudioEncodingPcm16bit: return 2;
- case kAudioEncodingPcm8bit: return 1;
- case kAudioEncodingPcmFloat: return 4;
+ case kAudioEncodingPcm16bit:
+ case kAudioEncodingPcm8bit:
+ case kAudioEncodingPcmFloat:
+ case kAudioEncodingPcm24bitPacked:
+ case kAudioEncodingPcm32bit:
+ return audio_bytes_per_sample(getAudioFormat(e));
default: return 0;
}
}
@@ -116,7 +146,15 @@
} else if (mTo == kAudioEncodingPcmFloat && mFrom == kAudioEncodingPcm16bit) {
memcpy_to_float_from_i16((float*)tgt->base(), (const int16_t*)src->data(), src->size() / 2);
} else {
- return INVALID_OPERATION;
+ audio_format_t srcFormat = getAudioFormat(mFrom);
+ audio_format_t dstFormat = getAudioFormat(mTo);
+
+ if ((srcFormat == AUDIO_FORMAT_INVALID) || (dstFormat == AUDIO_FORMAT_INVALID))
+ return INVALID_OPERATION;
+
+ size_t frames = src->size() / audio_bytes_per_sample(srcFormat);
+ memcpy_by_audio_format((void*)tgt->base(), dstFormat, (void*)src->data(),
+ srcFormat, frames);
}
return OK;
}
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index 5da32c9..2d29853 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -348,6 +348,10 @@
status_t err = OK;
bool done = false;
size_t retriesLeft = kRetryCount;
+ if (!mDecoder) {
+ ALOGE("decoder is not initialized");
+ return NO_INIT;
+ }
do {
size_t index;
int64_t ptsUs = 0LL;
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index e42b538..55b5b76 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -1418,9 +1418,14 @@
if (mIsVideo) {
// video codec needs dedicated looper
if (mCodecLooper == NULL) {
+ status_t err = OK;
mCodecLooper = new ALooper;
mCodecLooper->setName("CodecLooper");
- mCodecLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
+ err = mCodecLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
+ if (OK != err) {
+ ALOGE("Codec Looper failed to start");
+ return err;
+ }
}
mCodecLooper->registerHandler(mCodec);
@@ -2490,7 +2495,9 @@
msg->setObject("c2buffer", obj);
msg->setInt64("timeUs", presentationTimeUs);
msg->setInt32("flags", flags);
- msg->setMessage("tunings", tunings);
+ if (tunings && tunings->countEntries() > 0) {
+ msg->setMessage("tunings", tunings);
+ }
msg->setPointer("errorDetailMsg", errorDetailMsg);
sp<AMessage> response;
@@ -2532,7 +2539,9 @@
msg->setInt32("skipBlocks", pattern.mSkipBlocks);
msg->setInt64("timeUs", presentationTimeUs);
msg->setInt32("flags", flags);
- msg->setMessage("tunings", tunings);
+ if (tunings && tunings->countEntries() > 0) {
+ msg->setMessage("tunings", tunings);
+ }
msg->setPointer("errorDetailMsg", errorDetailMsg);
sp<AMessage> response;
@@ -3004,8 +3013,9 @@
CHECK(msg->findInt32("err", &err));
CHECK(msg->findInt32("actionCode", &actionCode));
- ALOGE("Codec reported err %#x, actionCode %d, while in state %d/%s",
- err, actionCode, mState, stateString(mState).c_str());
+ ALOGE("Codec reported err %#x/%s, actionCode %d, while in state %d/%s",
+ err, StrMediaError(err).c_str(), actionCode,
+ mState, stateString(mState).c_str());
if (err == DEAD_OBJECT) {
mFlags |= kFlagSawMediaServerDie;
mFlags &= ~kFlagIsComponentAllocated;
@@ -4093,26 +4103,29 @@
break;
}
- if (asyncNotify != nullptr) {
- if (mSurface != NULL) {
- if (!mReleaseSurface) {
- uint64_t usage = 0;
- if (mSurface->getConsumerUsage(&usage) != OK) {
- usage = 0;
- }
- mReleaseSurface.reset(new ReleaseSurface(usage));
+ bool forceSync = false;
+ if (asyncNotify != nullptr && mSurface != NULL) {
+ if (!mReleaseSurface) {
+ uint64_t usage = 0;
+ if (mSurface->getConsumerUsage(&usage) != OK) {
+ usage = 0;
}
- if (mSurface != mReleaseSurface->getSurface()) {
- status_t err = connectToSurface(mReleaseSurface->getSurface());
- ALOGW_IF(err != OK, "error connecting to release surface: err = %d", err);
- if (err == OK && !(mFlags & kFlagUsesSoftwareRenderer)) {
- err = mCodec->setSurface(mReleaseSurface->getSurface());
- ALOGW_IF(err != OK, "error setting release surface: err = %d", err);
- }
- if (err == OK) {
- (void)disconnectFromSurface();
- mSurface = mReleaseSurface->getSurface();
- }
+ mReleaseSurface.reset(new ReleaseSurface(usage));
+ }
+ if (mSurface != mReleaseSurface->getSurface()) {
+ status_t err = connectToSurface(mReleaseSurface->getSurface());
+ ALOGW_IF(err != OK, "error connecting to release surface: err = %d", err);
+ if (err == OK && !(mFlags & kFlagUsesSoftwareRenderer)) {
+ err = mCodec->setSurface(mReleaseSurface->getSurface());
+ ALOGW_IF(err != OK, "error setting release surface: err = %d", err);
+ }
+ if (err == OK) {
+ (void)disconnectFromSurface();
+ mSurface = mReleaseSurface->getSurface();
+ } else {
+ // We were not able to switch the surface, so force
+ // synchronous release.
+ forceSync = true;
}
}
}
@@ -4136,8 +4149,10 @@
}
if (asyncNotify != nullptr) {
- mResourceManagerProxy->markClientForPendingRemoval();
- postPendingRepliesAndDeferredMessages("kWhatRelease:async");
+ if (!forceSync) {
+ mResourceManagerProxy->markClientForPendingRemoval();
+ postPendingRepliesAndDeferredMessages("kWhatRelease:async");
+ }
asyncNotifyPost.clear();
mAsyncReleaseCompleteNotification = asyncNotify;
}
@@ -4646,7 +4661,6 @@
mCSD.erase(mCSD.begin());
std::shared_ptr<C2Buffer> c2Buffer;
sp<hardware::HidlMemory> memory;
- size_t offset = 0;
if (mFlags & kFlagUseBlockModel) {
if (hasCryptoOrDescrambler()) {
@@ -4667,7 +4681,6 @@
memcpy(mem->unsecurePointer(), csd->data(), csd->size());
ssize_t heapOffset;
memory = hardware::fromHeap(mem->getMemory(&heapOffset, nullptr));
- offset += heapOffset;
} else {
std::shared_ptr<C2LinearBlock> block =
FetchLinearBlock(csd->size(), {std::string{mComponentName.c_str()}});
@@ -4709,12 +4722,10 @@
sp<WrapperObject<std::shared_ptr<C2Buffer>>> obj{
new WrapperObject<std::shared_ptr<C2Buffer>>{c2Buffer}};
msg->setObject("c2buffer", obj);
- msg->setMessage("tunings", new AMessage);
} else if (memory) {
sp<WrapperObject<sp<hardware::HidlMemory>>> obj{
new WrapperObject<sp<hardware::HidlMemory>>{memory}};
msg->setObject("memory", obj);
- msg->setMessage("tunings", new AMessage);
}
return onQueueInputBuffer(msg);
@@ -4896,9 +4907,10 @@
sp<MediaCodecBuffer> buffer = info->mData;
if (c2Buffer || memory) {
- sp<AMessage> tunings;
- CHECK(msg->findMessage("tunings", &tunings));
- onSetParameters(tunings);
+ sp<AMessage> tunings = NULL;
+ if (msg->findMessage("tunings", &tunings) && tunings != NULL) {
+ onSetParameters(tunings);
+ }
status_t err = OK;
if (c2Buffer) {
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 4b6470a..900ac32 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -126,14 +126,10 @@
}
}
-static bool isHdr(const sp<AMessage> &format) {
- // if CSD specifies HDR transfer(s), we assume HDR. Otherwise, if it specifies non-HDR
- // transfers, we must assume non-HDR. This is because CSD trumps any color-transfer key
- // in the format.
- int32_t isHdr;
- if (format->findInt32("android._is-hdr", &isHdr)) {
- return isHdr;
- }
+/**
+ * Returns true if, and only if, the given format corresponds to HDR10 or HDR10+.
+ */
+static bool isHdr10or10Plus(const sp<AMessage> &format) {
// if user/container supplied HDR static info without transfer set, assume true
if ((format->contains("hdr-static-info") || format->contains("hdr10-plus-info"))
@@ -143,8 +139,7 @@
// otherwise, verify that an HDR transfer function is set
int32_t transfer;
if (format->findInt32("color-transfer", &transfer)) {
- return transfer == ColorUtils::kColorTransferST2084
- || transfer == ColorUtils::kColorTransferHLG;
+ return transfer == ColorUtils::kColorTransferST2084;
}
return false;
}
@@ -419,8 +414,12 @@
}
// bump to HDR profile
- if (isHdr(format) && codecProfile == HEVCProfileMain10) {
- codecProfile = HEVCProfileMain10HDR10;
+ if (isHdr10or10Plus(format) && codecProfile == HEVCProfileMain10) {
+ if (format->contains("hdr10-plus-info")) {
+ codecProfile = HEVCProfileMain10HDR10Plus;
+ } else {
+ codecProfile = HEVCProfileMain10HDR10;
+ }
}
format->setInt32("profile", codecProfile);
@@ -615,16 +614,25 @@
{ 3, VP9Profile3 },
};
- const static ALookup<int32_t, int32_t> toHdr {
+ const static ALookup<int32_t, int32_t> toHdr10 {
{ VP9Profile2, VP9Profile2HDR },
{ VP9Profile3, VP9Profile3HDR },
};
+ const static ALookup<int32_t, int32_t> toHdr10Plus {
+ { VP9Profile2, VP9Profile2HDR10Plus },
+ { VP9Profile3, VP9Profile3HDR10Plus },
+ };
+
int32_t profile;
if (profiles.map(data[0], &profile)) {
// convert to HDR profile
- if (isHdr(format)) {
- toHdr.lookup(profile, &profile);
+ if (isHdr10or10Plus(format)) {
+ if (format->contains("hdr10-plus-info")) {
+ toHdr10Plus.lookup(profile, &profile);
+ } else {
+ toHdr10.lookup(profile, &profile);
+ }
}
format->setInt32("profile", profile);
@@ -684,7 +692,7 @@
int32_t profile;
if (profiles.map(std::make_pair(highBitDepth, profileData), &profile)) {
// bump to HDR profile
- if (isHdr(format) && profile == AV1ProfileMain10) {
+ if (isHdr10or10Plus(format) && profile == AV1ProfileMain10) {
if (format->contains("hdr10-plus-info")) {
profile = AV1ProfileMain10HDR10Plus;
} else {
@@ -1903,10 +1911,10 @@
if (msg->findString("ts-schema", &tsSchema)) {
unsigned int numLayers = 0;
unsigned int numBLayers = 0;
- char dummy;
+ char placeholder;
int tags = sscanf(tsSchema.c_str(), "android.generic.%u%c%u%c",
- &numLayers, &dummy, &numBLayers, &dummy);
- if ((tags == 1 || (tags == 3 && dummy == '+'))
+ &numLayers, &placeholder, &numBLayers, &placeholder);
+ if ((tags == 1 || (tags == 3 && placeholder == '+'))
&& numLayers > 0 && numLayers < UINT32_MAX - numBLayers
&& numLayers + numBLayers <= INT32_MAX) {
meta->setInt32(kKeyTemporalLayerCount, numLayers + numBLayers);
diff --git a/media/libstagefright/foundation/Android.bp b/media/libstagefright/foundation/Android.bp
index dd2c66f..29360b1 100644
--- a/media/libstagefright/foundation/Android.bp
+++ b/media/libstagefright/foundation/Android.bp
@@ -120,8 +120,6 @@
},
},
- clang: true,
-
sanitize: {
misc_undefined: [
"unsigned-integer-overflow",
@@ -165,7 +163,7 @@
shared_libs: [
"liblog",
- "libutils", // for sp<>
+ "libutils", // for sp<>
// actually invokes this, but called from folks who already load it
// "libmediandk",
],
@@ -193,8 +191,6 @@
"ColorUtils_fill.cpp",
],
- clang: true,
-
sanitize: {
misc_undefined: [
"unsigned-integer-overflow",
@@ -211,4 +207,3 @@
],
}
-
diff --git a/media/libstagefright/httplive/fuzzer/Android.bp b/media/libstagefright/httplive/fuzzer/Android.bp
index 14097b0..85fd8b7 100644
--- a/media/libstagefright/httplive/fuzzer/Android.bp
+++ b/media/libstagefright/httplive/fuzzer/Android.bp
@@ -40,7 +40,6 @@
"libdatasource",
"libmedia",
"libstagefright",
- "libutils",
],
header_libs: [
"libbase_headers",
@@ -53,6 +52,7 @@
"libstagefright_foundation",
"libhidlbase",
"libhidlmemory",
+ "libutils",
"android.hidl.allocator@1.0",
],
corpus: ["corpus/*"],
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index 9040e8b..78792c5 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -809,6 +809,7 @@
constexpr char KEY_MAX_FPS_TO_ENCODER[] = "max-fps-to-encoder";
constexpr char KEY_MAX_HEIGHT[] = "max-height";
constexpr char KEY_MAX_INPUT_SIZE[] = "max-input-size";
+constexpr char KEY_MAX_OUTPUT_CHANNEL_COUNT[] = "max-output-channel-count";
constexpr char KEY_MAX_PTS_GAP_TO_ENCODER[] = "max-pts-gap-to-encoder";
constexpr char KEY_MAX_WIDTH[] = "max-width";
constexpr char KEY_MIME[] = "mime";
diff --git a/media/libstagefright/include/media/stagefright/MediaErrors.h b/media/libstagefright/include/media/stagefright/MediaErrors.h
index d1df2ca..b91c850 100644
--- a/media/libstagefright/include/media/stagefright/MediaErrors.h
+++ b/media/libstagefright/include/media/stagefright/MediaErrors.h
@@ -163,11 +163,28 @@
|| (ERROR_DRM_VENDOR_MIN <= err && err <= ERROR_DRM_VENDOR_MAX);
}
-static inline std::string StrCryptoError(status_t err) {
#define STATUS_CASE(STATUS) \
case STATUS: \
return #STATUS
+static inline std::string StrMediaError(status_t err) {
+ switch(err) {
+ STATUS_CASE(ERROR_ALREADY_CONNECTED);
+ STATUS_CASE(ERROR_NOT_CONNECTED);
+ STATUS_CASE(ERROR_UNKNOWN_HOST);
+ STATUS_CASE(ERROR_CANNOT_CONNECT);
+ STATUS_CASE(ERROR_IO);
+ STATUS_CASE(ERROR_CONNECTION_LOST);
+ STATUS_CASE(ERROR_MALFORMED);
+ STATUS_CASE(ERROR_OUT_OF_RANGE);
+ STATUS_CASE(ERROR_BUFFER_TOO_SMALL);
+ STATUS_CASE(ERROR_UNSUPPORTED);
+ STATUS_CASE(ERROR_END_OF_STREAM);
+ }
+ return statusToString(err);
+}
+
+static inline std::string StrCryptoError(status_t err) {
switch (err) {
STATUS_CASE(ERROR_DRM_UNKNOWN);
STATUS_CASE(ERROR_DRM_NO_LICENSE);
@@ -209,10 +226,10 @@
STATUS_CASE(ERROR_DRM_STORAGE_READ);
STATUS_CASE(ERROR_DRM_STORAGE_WRITE);
STATUS_CASE(ERROR_DRM_ZERO_SUBSAMPLES);
-#undef STATUS_CASE
}
return statusToString(err);
}
+#undef STATUS_CASE
} // namespace android
diff --git a/media/libstagefright/renderfright/Android.bp b/media/libstagefright/renderfright/Android.bp
index 9a7bad9..3c00a1c 100644
--- a/media/libstagefright/renderfright/Android.bp
+++ b/media/libstagefright/renderfright/Android.bp
@@ -87,7 +87,7 @@
enabled: true,
},
double_loadable: true,
- clang: true,
+
cflags: [
"-fvisibility=hidden",
"-Werror=format",
diff --git a/media/libstagefright/renderfright/gl/ProgramCache.cpp b/media/libstagefright/renderfright/gl/ProgramCache.cpp
index 3ae35ec..1a3b4e7 100644
--- a/media/libstagefright/renderfright/gl/ProgramCache.cpp
+++ b/media/libstagefright/renderfright/gl/ProgramCache.cpp
@@ -374,7 +374,11 @@
return color * slope;
} else if (nits < x1) {
// scale [x0, x1] to [y0, y1] linearly
- float slope = (y1 - y0) / (x1 - x0);
+ // Use highp since some compilers may do this
+ // operation as reciprocal multiplication with
+ // re-association that could exceed the range
+ // of mediump float.
+ highp float slope = (y1 - y0) / (x1 - x0);
nits = y0 + (nits - x0) * slope;
} else if (nits < x2) {
// scale [x1, x2] to [y1, y2] using Hermite interp
diff --git a/media/libstagefright/rtsp/ARTPConnection.cpp b/media/libstagefright/rtsp/ARTPConnection.cpp
index 0bd342a..83291f3 100644
--- a/media/libstagefright/rtsp/ARTPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTPConnection.cpp
@@ -558,6 +558,7 @@
if (n != (ssize_t)buffer->size()) {
ALOGW("failed to send RTCP receiver report (%s).",
n >= 0 ? "connection gone" : strerror(errno));
+ ++it;
continue;
}
diff --git a/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.cpp b/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.cpp
index 810ae95..2b2692f 100644
--- a/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.cpp
+++ b/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.cpp
@@ -65,8 +65,8 @@
for (size_t i = 0; i < extractor->countTracks(); ++i) {
sp<MetaData> meta = extractor->getTrackMetaData(i);
- const char *trackMime;
- if (!strcasecmp(mime.c_str(), trackMime)) {
+ std::string trackMime = dataProvider->PickValueInArray(kTestedMimeTypes);
+ if (!strcasecmp(mime.c_str(), trackMime.c_str())) {
sp<IMediaSource> track = extractor->getTrack(i);
if (track == NULL) {
return NULL;
diff --git a/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.h b/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.h
index 98bfb94..6856ac0 100644
--- a/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.h
+++ b/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.h
@@ -42,6 +42,51 @@
kMaxValue = MPEG2TS,
};
+static std::string kTestedMimeTypes[] = {"audio/3gpp",
+ "audio/amr-wb",
+ "audio/vorbis",
+ "audio/opus",
+ "audio/mp4a-latm",
+ "audio/mpeg",
+ "audio/mpeg-L1",
+ "audio/mpeg-L2",
+ "audio/midi",
+ "audio/qcelp",
+ "audio/g711-alaw",
+ "audio/g711-mlaw",
+ "audio/flac",
+ "audio/aac-adts",
+ "audio/gsm",
+ "audio/ac3",
+ "audio/eac3",
+ "audio/eac3-joc",
+ "audio/ac4",
+ "audio/scrambled",
+ "audio/alac",
+ "audio/x-ms-wma",
+ "audio/x-adpcm-ms",
+ "audio/x-adpcm-dvi-ima",
+ "video/avc",
+ "video/hevc",
+ "video/mp4v-es",
+ "video/3gpp",
+ "video/x-vnd.on2.vp8",
+ "video/x-vnd.on2.vp9",
+ "video/av01",
+ "video/mpeg2",
+ "video/dolby-vision",
+ "video/scrambled",
+ "video/divx",
+ "video/divx3",
+ "video/xvid",
+ "video/x-motion-jpeg",
+ "text/3gpp-tt",
+ "application/x-subrip",
+ "text/vtt",
+ "text/cea-608",
+ "text/cea-708",
+ "application/x-id3v4"};
+
std::string genMimeType(FuzzedDataProvider *dataProvider);
sp<IMediaExtractor> genMediaExtractor(FuzzedDataProvider *dataProvider, uint16_t dataAmount);
sp<MediaSource> genMediaSource(FuzzedDataProvider *dataProvider, uint16_t maxMediaBlobSize);
diff --git a/media/libstagefright/xmlparser/Android.bp b/media/libstagefright/xmlparser/Android.bp
index 055dd80..afc873c 100644
--- a/media/libstagefright/xmlparser/Android.bp
+++ b/media/libstagefright/xmlparser/Android.bp
@@ -41,8 +41,6 @@
"-Wall",
],
- clang: true,
-
sanitize: {
misc_undefined: [
"unsigned-integer-overflow",
diff --git a/media/mediaserver/Android.bp b/media/mediaserver/Android.bp
index d03746d..2e27366 100644
--- a/media/mediaserver/Android.bp
+++ b/media/mediaserver/Android.bp
@@ -26,7 +26,33 @@
],
}
-cc_binary {
+prebuilt_etc {
+ name: "mediaserver.zygote64_32.rc",
+ src: "mediaserver.zygote64_32.rc",
+ sub_dir: "init/hw",
+}
+
+prebuilt_etc {
+ name: "mediaserver.zygote64.rc",
+ src: "mediaserver.zygote64.rc",
+ sub_dir: "init/hw",
+}
+
+soong_config_module_type {
+ name: "mediaserver_cc_binary",
+ module_type: "cc_binary",
+ config_namespace: "ANDROID",
+ bool_variables: ["TARGET_DYNAMIC_64_32_MEDIASERVER"],
+ properties: [
+ "compile_multilib",
+ "init_rc",
+ "multilib.lib32.suffix",
+ "multilib.lib64.suffix",
+ "required",
+ ],
+}
+
+mediaserver_cc_binary {
name: "mediaserver",
srcs: ["main_mediaserver.cpp"],
@@ -60,12 +86,32 @@
// ****************************************************************
compile_multilib: "prefer32",
- init_rc: ["mediaserver.rc"],
-
cflags: [
"-Werror",
"-Wall",
],
vintf_fragments: ["manifest_media_c2_software.xml"],
+
+ soong_config_variables: {
+ TARGET_DYNAMIC_64_32_MEDIASERVER: {
+ compile_multilib: "both",
+ multilib: {
+ lib32: {
+ suffix: "32",
+ },
+ lib64: {
+ suffix: "64",
+ },
+ },
+ required: [
+ "mediaserver.zygote64_32.rc",
+ "mediaserver.zygote64.rc",
+ ],
+ init_rc: ["mediaserver_dynamic.rc"],
+ conditions_default: {
+ init_rc: ["mediaserver.rc"],
+ },
+ },
+ },
}
diff --git a/media/mediaserver/mediaserver.zygote64.rc b/media/mediaserver/mediaserver.zygote64.rc
new file mode 100644
index 0000000..8842b01
--- /dev/null
+++ b/media/mediaserver/mediaserver.zygote64.rc
@@ -0,0 +1,6 @@
+service media /system/bin/mediaserver64
+ class main
+ user media
+ group audio camera inet net_bt net_bt_admin net_bw_acct drmrpc mediadrm
+ ioprio rt 4
+ task_profiles ProcessCapacityHigh HighPerformance
diff --git a/media/mediaserver/mediaserver.zygote64_32.rc b/media/mediaserver/mediaserver.zygote64_32.rc
new file mode 100644
index 0000000..4039073
--- /dev/null
+++ b/media/mediaserver/mediaserver.zygote64_32.rc
@@ -0,0 +1,6 @@
+service media /system/bin/mediaserver32
+ class main
+ user media
+ group audio camera inet net_bt net_bt_admin net_bw_acct drmrpc mediadrm
+ ioprio rt 4
+ task_profiles ProcessCapacityHigh HighPerformance
diff --git a/media/mediaserver/mediaserver_dynamic.rc b/media/mediaserver/mediaserver_dynamic.rc
new file mode 100644
index 0000000..65d5c40
--- /dev/null
+++ b/media/mediaserver/mediaserver_dynamic.rc
@@ -0,0 +1,4 @@
+on property:init.svc.media=*
+ setprop init.svc.mediadrm ${init.svc.media}
+
+import /system/etc/init/hw/mediaserver.${ro.zygote}.rc
diff --git a/media/mtp/tests/MtpFuzzer/Android.bp b/media/mtp/tests/MtpFuzzer/Android.bp
index 5365f4b..289b3ba 100644
--- a/media/mtp/tests/MtpFuzzer/Android.bp
+++ b/media/mtp/tests/MtpFuzzer/Android.bp
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package {
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
@@ -6,29 +22,20 @@
// SPDX-license-identifier-Apache-2.0
default_applicable_licenses: ["frameworks_av_media_mtp_license"],
}
-
-cc_fuzz {
- name: "mtp_fuzzer",
- srcs: [
- "mtp_fuzzer.cpp",
- "MtpMockDatabase.cpp",
- ],
+cc_defaults {
+ name: "mtp_fuzzer_defaults",
shared_libs: [
- "libmtp",
- "libbase",
- "liblog",
- "libutils",
+ "libbase",
+ "liblog",
+ "libutils",
],
+ static_libs: ["libc++fs",],
cflags: [
"-Wall",
"-Wextra",
"-Werror",
- "-DMTP_DEVICE",
"-Wno-unused-parameter",
],
- dictionary: "mtp_fuzzer.dict",
- corpus: ["corpus/*"],
-
fuzz_config: {
cc: ["jameswei@google.com"],
@@ -38,3 +45,15 @@
],
},
}
+cc_fuzz {
+ name: "mtp_fuzzer",
+ srcs: [
+ "mtp_fuzzer.cpp",
+ "MtpMockDatabase.cpp",
+ ],
+ cflags: ["-DMTP_DEVICE",],
+ shared_libs: ["libmtp",],
+ defaults: ["mtp_fuzzer_defaults"],
+ dictionary: "mtp_fuzzer.dict",
+ corpus: ["corpus/*"],
+}
diff --git a/media/mtp/tests/MtpFuzzer/README.md b/media/mtp/tests/MtpFuzzer/README.md
new file mode 100644
index 0000000..7c6ff7a
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/README.md
@@ -0,0 +1,24 @@
+# Fuzzers for libmtp
+
+## Table of contents
++ [mtp_fuzzer](#MtpServer)
+
+# <a name="MtpServer"></a> Fuzzer for MtpServer
+
+MtpServer supports the following parameters:
+1. PacketData (parameter name: "packetData")
+
+| Parameter| Valid Values |Configured Value|
+|-------------|----------|----- |
+|`packetData`| `String` |Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+ $ mm -j$(nproc) mtp_fuzzer
+```
+2. Run on device
+```
+ $ adb sync data
+ $ adb shell /data/fuzz/arm64/mtp_fuzzer/mtp_fuzzer corpus/ -dict=mtp_fuzzer.dict
+```
diff --git a/media/mtp/tests/MtpFuzzer/corpus/6-mtp-open_session_send_object_info.pkt b/media/mtp/tests/MtpFuzzer/corpus/6-mtp-open_session_send_object_info.pkt
new file mode 100644
index 0000000..71f2836
--- /dev/null
+++ b/media/mtp/tests/MtpFuzzer/corpus/6-mtp-open_session_send_object_info.pkt
Binary files differ
diff --git a/media/mtp/tests/MtpFuzzer/mtp_fuzzer.cpp b/media/mtp/tests/MtpFuzzer/mtp_fuzzer.cpp
index f578462..e886816 100644
--- a/media/mtp/tests/MtpFuzzer/mtp_fuzzer.cpp
+++ b/media/mtp/tests/MtpFuzzer/mtp_fuzzer.cpp
@@ -14,12 +14,15 @@
* limitations under the License.
*/
+#include <android-base/properties.h>
#include <android-base/unique_fd.h>
+#include <fuzzer/FuzzedDataProvider.h>
#include <stddef.h>
#include <stdint.h>
#include <stdlib.h>
#include <unistd.h>
-
+#include <filesystem>
+#include <fstream>
#include <string>
#define LOG_TAG "MtpFuzzer"
@@ -32,38 +35,40 @@
#include "MtpStorage.h"
#include "MtpUtils.h"
-const char* storage_desc = "Fuzz Storage";
+constexpr int32_t kMinFiles = 0;
+constexpr int32_t kMaxFiles = 5;
+constexpr int32_t kMaxBytes = 128;
+constexpr float kMinDataSizeFactor = 0.8;
// prefer tmpfs for file operations to avoid wearing out flash
const char* storage_path = "/storage/fuzzer/0";
-const char* source_database = "srcdb/";
+const char* source_database = "/data/local/tmp/srcdb/";
+const std::string test_path = std::string(source_database) + "TestDir/";
+const std::string kPropertyKey = "sys.fuse.transcode_mtp";
namespace android {
class MtpMockServer {
-public:
- std::unique_ptr<MtpMockHandle> mHandle;
- std::unique_ptr<MtpStorage> mStorage;
- std::unique_ptr<MtpMockDatabase> mDatabase;
- std::unique_ptr<MtpServer> mMtp;
- int mStorageId;
-
- MtpMockServer(const char* storage_path) : mStorageId(0) {
- bool ptp = false;
- const char* manu = "Google";
- const char* model = "Pixel 3XL";
- const char* version = "1.0";
- const char* serial = "ABDEF1231";
-
+ public:
+ MtpMockServer(const uint8_t* data, size_t size) : mFdp(data, size) {
// This is unused in our harness
int controlFd = -1;
mHandle = std::make_unique<MtpMockHandle>();
- mStorage = std::make_unique<MtpStorage>(mStorageId, storage_path, storage_desc, true,
- 0x200000000L);
+ mStorage = std::make_unique<MtpStorage>(
+ mFdp.ConsumeIntegral<uint32_t>() /* storageId */, storage_path,
+ mFdp.ConsumeRandomLengthString(kMaxBytes).c_str() /* descriptor */,
+ mFdp.ConsumeBool() /* removable */,
+ mFdp.ConsumeIntegral<uint64_t>() /* maxFileSize */);
mDatabase = std::make_unique<MtpMockDatabase>();
mDatabase->addStorage(mStorage.get());
- mMtp = std::make_unique<MtpServer>(mDatabase.get(), controlFd, ptp, manu, model, version,
- serial);
+ init(data, size);
+
+ mMtp = std::make_unique<MtpServer>(
+ mDatabase.get(), controlFd, mFdp.ConsumeBool() /* ptp */,
+ mFdp.ConsumeRandomLengthString(kMaxBytes).c_str() /* manu */,
+ mFdp.ConsumeRandomLengthString(kMaxBytes).c_str() /* model */,
+ mFdp.ConsumeRandomLengthString(kMaxBytes).c_str() /* version */,
+ mFdp.ConsumeRandomLengthString(kMaxBytes).c_str() /* serial */);
mMtp->addStorage(mStorage.get());
// clear the old handle first, so we don't leak memory
@@ -71,7 +76,76 @@
mMtp->mHandle = mHandle.get();
}
- void run() { mMtp->run(); }
+ void process() {
+ if (mFdp.ConsumeBool()) {
+ createDatabaseFromSourceDir(source_database, storage_path, MTP_PARENT_ROOT);
+ }
+
+ while (mFdp.remaining_bytes()) {
+ MtpStorage storage(mFdp.ConsumeIntegral<uint32_t>() /* id */,
+ mFdp.ConsumeRandomLengthString(kMaxBytes).c_str() /* filePath */,
+ mFdp.ConsumeRandomLengthString(kMaxBytes).c_str() /* description */,
+ mFdp.ConsumeBool() /* removable */,
+ mFdp.ConsumeIntegral<uint64_t>() /* maxFileSize */);
+
+ auto invokeMtpServerAPI = mFdp.PickValueInArray<const std::function<void()>>({
+ [&]() { mMtp->run(); },
+ [&]() { mMtp->sendObjectAdded(mFdp.ConsumeIntegral<uint32_t>()); },
+ [&]() { mMtp->sendObjectRemoved(mFdp.ConsumeIntegral<uint32_t>()); },
+ [&]() { mMtp->sendObjectInfoChanged(mFdp.ConsumeIntegral<uint32_t>()); },
+ [&]() { mMtp->sendDevicePropertyChanged(mFdp.ConsumeIntegral<uint16_t>()); },
+ [&]() { mMtp->addStorage(&storage); },
+ [&]() { mMtp->removeStorage(&storage); },
+ });
+
+ invokeMtpServerAPI();
+ }
+
+ std::filesystem::remove_all(source_database);
+ }
+
+ private:
+ void createFiles(std::string path, size_t fileCount) {
+ std::ofstream file;
+ for (size_t idx = 0; idx < fileCount; ++idx) {
+ file.open(path.append(std::to_string(idx)));
+ file.close();
+ }
+ }
+
+ void addPackets(const uint8_t* data, size_t size) {
+ size_t off = 0;
+ for (size_t i = 0; i < size; ++i) {
+ // A longer delimiter could be used, but this worked in practice
+ if (data[i] == '@') {
+ size_t pktsz = i - off;
+ if (pktsz > 0) {
+ packet_t pkt = packet_t((unsigned char*)data + off, (unsigned char*)data + i);
+ // insert into packet buffer
+ mHandle->add_packet(pkt);
+ off = i;
+ }
+ }
+ }
+ }
+
+ void init(const uint8_t* data, size_t size) {
+ std::vector<uint8_t> packetData = mFdp.ConsumeBytes<uint8_t>(
+ mFdp.ConsumeIntegralInRange<int32_t>(kMinDataSizeFactor * size, size));
+
+ // Packetize the input stream
+ addPackets(packetData.data(), packetData.size());
+
+ // Setting the property to true/false to randomly fuzz the PoC depended on it
+ base::SetProperty(kPropertyKey, mFdp.ConsumeBool() ? "true" : "false");
+
+ std::filesystem::create_directories(source_database);
+ if (mFdp.ConsumeBool()) {
+ std::filesystem::create_directories(test_path);
+ createFiles(test_path, mFdp.ConsumeIntegralInRange<size_t>(kMinFiles, kMaxFiles));
+ }
+ createFiles(source_database, mFdp.ConsumeIntegralInRange<size_t>(kMinFiles, kMaxFiles));
+ }
int createDatabaseFromSourceDir(const char* fromPath, const char* toPath,
MtpObjectHandle parentHandle) {
@@ -130,8 +204,14 @@
closedir(dir);
return ret;
}
+
+ FuzzedDataProvider mFdp;
+ std::unique_ptr<MtpMockHandle> mHandle;
+ std::unique_ptr<MtpStorage> mStorage;
+ std::unique_ptr<MtpMockDatabase> mDatabase;
+ std::unique_ptr<MtpServer> mMtp;
};
-}; // namespace android
+}; // namespace android
extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) __attribute__((optnone)) {
// reset our storage (from MtpUtils.h)
@@ -140,26 +220,9 @@
android::makeFolder(storage_path);
std::unique_ptr<android::MtpMockServer> mtp =
- std::make_unique<android::MtpMockServer>(storage_path);
+ std::make_unique<android::MtpMockServer>(data, size);
+ mtp->process();
- size_t off = 0;
-
- // Packetize the input stream
- for (size_t i = 0; i < size; i++) {
- // A longer delimiter could be used, but this worked in practice
- if (data[i] == '@') {
- size_t pktsz = i - off;
- if (pktsz > 0) {
- packet_t pkt = packet_t((unsigned char*)data + off, (unsigned char*)data + i);
- // insert into packet buffer
- mtp->mHandle->add_packet(pkt);
- off = i;
- }
- }
- }
-
- mtp->createDatabaseFromSourceDir(source_database, storage_path, MTP_PARENT_ROOT);
- mtp->run();
-
+ std::filesystem::remove_all("/storage/fuzzer");
return 0;
}
diff --git a/media/ndk/NdkMediaCodec.cpp b/media/ndk/NdkMediaCodec.cpp
index 227459a..9e92ea6 100644
--- a/media/ndk/NdkMediaCodec.cpp
+++ b/media/ndk/NdkMediaCodec.cpp
@@ -256,8 +256,8 @@
break;
}
msg->findString("detail", &detail);
- ALOGE("Codec reported error(0x%x), actionCode(%d), detail(%s)",
- err, actionCode, detail.c_str());
+ ALOGE("Codec reported error(0x%x/%s), actionCode(%d), detail(%s)",
+ err, StrMediaError(err).c_str(), actionCode, detail.c_str());
Mutex::Autolock _l(mCodec->mAsyncCallbackLock);
if (mCodec->mAsyncCallback.onAsyncError != NULL) {
diff --git a/media/ndk/NdkMediaDrm.cpp b/media/ndk/NdkMediaDrm.cpp
index 6e9945d..266cc37 100644
--- a/media/ndk/NdkMediaDrm.cpp
+++ b/media/ndk/NdkMediaDrm.cpp
@@ -181,7 +181,7 @@
AMediaDrmSessionId asid = {sessionId.data(), sessionId.size()};
int32_t dataSize = data.size();
const uint8_t *dataPtr = data.data();
- if (dataSize > 0) {
+ if (dataSize >= 0) {
(*mEventListener)(mObj, &asid, ndkEventType, 0, dataPtr, dataSize);
} else {
ALOGE("invalid event data size=%d", dataSize);
diff --git a/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp b/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp
index 130feee..32fc3be 100644
--- a/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp
+++ b/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp
@@ -34,11 +34,16 @@
const sp<IServiceManager> sm(defaultServiceManager());
if (sm != nullptr) {
const String16 name("batterystats");
- batteryStatService = checked_interface_cast<IBatteryStats>(sm->checkService(name));
- if (batteryStatService == nullptr) {
+ sp<IBinder> obj = sm->checkService(name);
+ if (!obj) {
ALOGW("batterystats service unavailable!");
return nullptr;
}
+ batteryStatService = checked_interface_cast<IBatteryStats>(obj);
+ if (batteryStatService == nullptr) {
+ ALOGW("batterystats service interface is invalid");
+ return nullptr;
+ }
}
return batteryStatService;
}
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
index c2a20c6..bb1699e 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
@@ -70,8 +70,9 @@
* @return OK if the request is valid
* otherwise if the request is not supported
*/
- status_t getOutputForAttr(const audio_attributes_t& attributes, uid_t uid,
- audio_output_flags_t flags,
+ status_t getOutputForAttr(const audio_attributes_t& attributes,
+ const audio_config_base_t& config,
+ uid_t uid, audio_output_flags_t flags,
sp<AudioPolicyMix> &primaryMix,
std::vector<sp<AudioPolicyMix>> *secondaryMixes);
@@ -126,6 +127,7 @@
enum class MixMatchStatus { MATCH, NO_MATCH, INVALID_MIX };
MixMatchStatus mixMatch(const AudioMix* mix, size_t mixIndex,
const audio_attributes_t& attributes,
+ const audio_config_base_t& config,
uid_t uid);
};
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index 5c3bdb3..1cea42f 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -577,6 +577,11 @@
audio_output_flags_t halFlags = mFlags;
if ((mFlags & AUDIO_OUTPUT_FLAG_SPATIALIZER) != 0) {
halFlags = (audio_output_flags_t)(AUDIO_OUTPUT_FLAG_FAST | AUDIO_OUTPUT_FLAG_DEEP_BUFFER);
+ // If no mixer config is specified for a spatializer output, default to 5.1 for proper
+ // configuration of the final downmixer or spatializer
+ if (mixerConfig == nullptr) {
+ lMixerConfig.channel_mask = AUDIO_CHANNEL_OUT_5POINT1;
+ }
}
ALOGV("opening output for device %s profile %p name %s",
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index b209a88..2f82b23 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -151,7 +151,7 @@
}
status_t AudioPolicyMixCollection::getOutputForAttr(
- const audio_attributes_t& attributes, uid_t uid,
+ const audio_attributes_t& attributes, const audio_config_base_t& config, uid_t uid,
audio_output_flags_t flags,
sp<AudioPolicyMix> &primaryMix,
std::vector<sp<AudioPolicyMix>> *secondaryMixes)
@@ -177,7 +177,7 @@
continue; // Primary output already found
}
- switch (mixMatch(policyMix.get(), i, attributes, uid)) {
+ switch (mixMatch(policyMix.get(), i, attributes, config, uid)) {
case MixMatchStatus::INVALID_MIX:
// The mix has contradictory rules, ignore it
// TODO: reject invalid mix at registration
@@ -202,7 +202,8 @@
}
AudioPolicyMixCollection::MixMatchStatus AudioPolicyMixCollection::mixMatch(
- const AudioMix* mix, size_t mixIndex, const audio_attributes_t& attributes, uid_t uid) {
+ const AudioMix* mix, size_t mixIndex, const audio_attributes_t& attributes,
+ const audio_config_base_t& config, uid_t uid) {
if (mix->mMixType == MIX_TYPE_PLAYERS) {
// Loopback render mixes are created from a public API and thus restricted
@@ -229,6 +230,14 @@
}
}
+ // Permit match only if requested format and mix format are PCM and can be format
+ // adapted by the mixer, or are the same (compressed) format.
+ if (!((audio_is_linear_pcm(config.format) && audio_is_linear_pcm(mix->mFormat.format)) ||
+ (config.format == mix->mFormat.format)) &&
+ config.format != AUDIO_CONFIG_BASE_INITIALIZER.format) {
+ return MixMatchStatus::NO_MATCH;
+ }
+
int userId = (int) multiuser_get_user_id(uid);
// TODO if adding more player rules (currently only 2), make rule handling "generic"
diff --git a/services/audiopolicy/config/bluetooth_audio_policy_configuration.xml b/services/audiopolicy/config/bluetooth_audio_policy_configuration.xml
index 22ff954..d34cca0 100644
--- a/services/audiopolicy/config/bluetooth_audio_policy_configuration.xml
+++ b/services/audiopolicy/config/bluetooth_audio_policy_configuration.xml
@@ -11,17 +11,7 @@
channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
</mixPort>
<!-- Le Audio Audio Ports -->
- <mixPort name="le audio output" role="source">
- <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
- samplingRates="8000,16000,24000,32000,44100,48000"
- channelMasks="AUDIO_CHANNEL_OUT_MONO,AUDIO_CHANNEL_OUT_STEREO"/>
- <profile name="" format="AUDIO_FORMAT_PCM_24_BIT_PACKED"
- samplingRates="8000,16000,24000,32000,44100,48000"
- channelMasks="AUDIO_CHANNEL_OUT_MONO,AUDIO_CHANNEL_OUT_STEREO"/>
- <profile name="" format="AUDIO_FORMAT_PCM_32_BIT"
- samplingRates="8000,16000,24000,32000,44100,48000"
- channelMasks="AUDIO_CHANNEL_OUT_MONO,AUDIO_CHANNEL_OUT_STEREO"/>
- </mixPort>
+ <mixPort name="le audio output" role="source"/>
<mixPort name="le audio input" role="sink">
<profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
samplingRates="8000,16000,24000,32000,44100,48000"
diff --git a/services/audiopolicy/config/bluetooth_audio_policy_configuration_7_0.xml b/services/audiopolicy/config/bluetooth_audio_policy_configuration_7_0.xml
index aad00d6..ef92d08 100644
--- a/services/audiopolicy/config/bluetooth_audio_policy_configuration_7_0.xml
+++ b/services/audiopolicy/config/bluetooth_audio_policy_configuration_7_0.xml
@@ -11,17 +11,7 @@
channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
</mixPort>
<!-- Le Audio Audio Ports -->
- <mixPort name="le audio output" role="source">
- <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
- samplingRates="8000 16000 24000 32000 44100 48000"
- channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO"/>
- <profile name="" format="AUDIO_FORMAT_PCM_24_BIT_PACKED"
- samplingRates="8000 16000 24000 32000 44100 48000"
- channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO"/>
- <profile name="" format="AUDIO_FORMAT_PCM_32_BIT"
- samplingRates="8000 16000 24000 32000 44100 48000"
- channelMasks="AUDIO_CHANNEL_OUT_MONO AUDIO_CHANNEL_OUT_STEREO"/>
- </mixPort>
+ <mixPort name="le audio output" role="source"/>
<mixPort name="le audio input" role="sink">
<profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
samplingRates="8000 16000 24000 32000 44100 48000"
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 3cfb944..dc6551b 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -1055,7 +1055,12 @@
// otherwise, fallback to the dynamic policies, if none match, query the engine.
// Secondary outputs are always found by dynamic policies as the engine do not support them
sp<AudioPolicyMix> primaryMix;
- status = mPolicyMixes.getOutputForAttr(*resultAttr, uid, *flags, primaryMix, secondaryMixes);
+ const audio_config_base_t clientConfig = {.sample_rate = config->sample_rate,
+ .channel_mask = config->channel_mask,
+ .format = config->format,
+ };
+ status = mPolicyMixes.getOutputForAttr(*resultAttr, clientConfig, uid, *flags, primaryMix,
+ secondaryMixes);
if (status != OK) {
return status;
}
@@ -1064,10 +1069,9 @@
bool usePrimaryOutputFromPolicyMixes = requestedDevice == nullptr && primaryMix != nullptr;
// FIXME: in case of RENDER policy, the output capabilities should be checked
- if ((usePrimaryOutputFromPolicyMixes
- || (secondaryMixes != nullptr && !secondaryMixes->empty()))
- && !audio_is_linear_pcm(config->format)) {
- ALOGD("%s: rejecting request as dynamic audio policy only support pcm", __func__);
+ if ((secondaryMixes != nullptr && !secondaryMixes->empty())
+ && !audio_is_linear_pcm(config->format)) {
+ ALOGD("%s: rejecting request as secondary mixes only support pcm", __func__);
return BAD_VALUE;
}
if (usePrimaryOutputFromPolicyMixes) {
@@ -1076,19 +1080,27 @@
primaryMix->mDeviceAddress,
AUDIO_FORMAT_DEFAULT);
sp<SwAudioOutputDescriptor> policyDesc = primaryMix->getOutput();
- if (deviceDesc != nullptr
- && (policyDesc == nullptr || (policyDesc->mFlags & AUDIO_OUTPUT_FLAG_DIRECT))) {
+ bool tryDirectForFlags = policyDesc == nullptr ||
+ (policyDesc->mFlags & AUDIO_OUTPUT_FLAG_DIRECT);
+ // if a direct output can be opened to deliver the track's multi-channel content to the
+ // output rather than being downmixed by the primary output, then use this direct
+ // output by by-passing the primary mix if possible, otherwise fall-through to primary
+ // mix.
+ bool tryDirectForChannelMask = policyDesc != nullptr
+ && (audio_channel_count_from_out_mask(policyDesc->getConfig().channel_mask) <
+ audio_channel_count_from_out_mask(config->channel_mask));
+ if (deviceDesc != nullptr && (tryDirectForFlags || tryDirectForChannelMask)) {
audio_io_handle_t newOutput;
status = openDirectOutput(
*stream, session, config,
(audio_output_flags_t)(*flags | AUDIO_OUTPUT_FLAG_DIRECT),
DeviceVector(deviceDesc), &newOutput);
- if (status != NO_ERROR) {
- policyDesc = nullptr;
- } else {
+ if (status == NO_ERROR) {
policyDesc = mOutputs.valueFor(newOutput);
primaryMix->setOutput(policyDesc);
- }
+ } else if (tryDirectForFlags) {
+ policyDesc = nullptr;
+ } // otherwise use primary if available.
}
if (policyDesc != nullptr) {
policyDesc->mPolicyMix = primaryMix;
@@ -1390,7 +1402,8 @@
}
if (mSpatializerOutput != nullptr
- && canBeSpatialized(attr, config, devices.toTypeAddrVector())) {
+ && canBeSpatializedInt(attr, config,
+ devices.toTypeAddrVector(), false /* allowCurrentOutputReconfig */)) {
return mSpatializerOutput->mIoHandle;
}
@@ -2920,7 +2933,7 @@
// stream by the engine.
DeviceTypeSet deviceTypes = {device};
if (device == AUDIO_DEVICE_OUT_DEFAULT_FOR_VOLUME) {
- DeviceTypeSet deviceTypes = mEngine->getOutputDevicesForAttributes(
+ deviceTypes = mEngine->getOutputDevicesForAttributes(
attr, nullptr, true /*fromCache*/).types();
}
return getVolumeIndex(getVolumeCurves(attr), index, deviceTypes);
@@ -2930,7 +2943,7 @@
int &index,
const DeviceTypeSet& deviceTypes) const
{
- if (isSingleDeviceType(deviceTypes, audio_is_output_device)) {
+ if (!isSingleDeviceType(deviceTypes, audio_is_output_device)) {
return BAD_VALUE;
}
index = curves.getVolumeIndex(deviceTypes);
@@ -3039,9 +3052,10 @@
}
}
}
- return mEffects.registerEffect(desc, io, session, id,
- (strategy == streamToStrategy(AUDIO_STREAM_MUSIC) ||
- strategy == PRODUCT_STRATEGY_NONE));
+ bool isMusicEffect = (session != AUDIO_SESSION_OUTPUT_STAGE)
+ && ((strategy == streamToStrategy(AUDIO_STREAM_MUSIC)
+ || strategy == PRODUCT_STRATEGY_NONE));
+ return mEffects.registerEffect(desc, io, session, id, isMusicEffect);
}
status_t AudioPolicyManager::unregisterEffect(int id)
@@ -4882,9 +4896,10 @@
}
}
-bool AudioPolicyManager::canBeSpatialized(const audio_attributes_t *attr,
+bool AudioPolicyManager::canBeSpatializedInt(const audio_attributes_t *attr,
const audio_config_t *config,
- const AudioDeviceTypeAddrVector &devices) const
+ const AudioDeviceTypeAddrVector &devices,
+ bool allowCurrentOutputReconfig) const
{
// The caller can have the audio attributes criteria ignored by either passing a null ptr or
// the AUDIO_ATTRIBUTES_INITIALIZER value.
@@ -4920,7 +4935,8 @@
if (!isChannelMaskSpatialized(config->channel_mask)) {
return false;
}
- if (mSpatializerOutput != nullptr && mSpatializerOutput->mProfile == profile) {
+ if (!allowCurrentOutputReconfig && mSpatializerOutput != nullptr
+ && mSpatializerOutput->mProfile == profile) {
if ((config->channel_mask & mSpatializerOutput->mMixerChannelMask)
!= config->channel_mask) {
return false;
@@ -4941,7 +4957,8 @@
audio_config_base_t clientConfig = client->config();
audio_config_t config = audio_config_initializer(&clientConfig);
if (desc != mSpatializerOutput
- && canBeSpatialized(&attr, &config, devicesTypeAddress)) {
+ && canBeSpatializedInt(&attr, &config,
+ devicesTypeAddress, false /* allowCurrentOutputReconfig */)) {
streamsToInvalidate.insert(client->stream());
}
}
@@ -4965,7 +4982,8 @@
config = audio_config_initializer(mixerConfig);
configPtr = &config;
}
- if (!canBeSpatialized(attr, configPtr, devicesTypeAddress)) {
+ if (!canBeSpatializedInt(
+ attr, configPtr, devicesTypeAddress)) {
ALOGW("%s provided attributes or mixer config cannot be spatialized", __func__);
return BAD_VALUE;
}
@@ -4988,6 +5006,7 @@
for (size_t i = 0; i < mOutputs.size(); i++) {
sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
if (!desc->isDuplicated() && desc->mProfile == profile) {
+ ALOGV("%s found output %d for spatializer profile", __func__, desc->mIoHandle);
mSpatializerOutput = desc;
break;
}
@@ -5007,39 +5026,29 @@
};
DeviceVector savedDevices = mSpatializerOutput->devices();
- closeOutput(mSpatializerOutput->mIoHandle);
- mSpatializerOutput.clear();
+ ALOGV("%s reopening spatializer output to match channel mask %#x (current mask %#x)",
+ __func__, configPtr->channel_mask, mSpatializerOutput->mMixerChannelMask);
- const sp<SwAudioOutputDescriptor> desc =
- new SwAudioOutputDescriptor(profile, mpClientInterface);
- status_t status = desc->open(nullptr, mixerConfig, devices,
- mEngine->getStreamTypeForAttributes(*attr),
- AUDIO_OUTPUT_FLAG_SPATIALIZER, output);
- if (status != NO_ERROR) {
- ALOGW("%s failed opening output: status %d, output %d", __func__, status, *output);
- if (*output != AUDIO_IO_HANDLE_NONE) {
- desc->close();
- }
+ closeOutput(mSpatializerOutput->mIoHandle);
+ //from now on mSpatializerOutput is null
+
+ sp<SwAudioOutputDescriptor> desc =
+ openOutputWithProfileAndDevice(profile, devices, mixerConfig);
+ if (desc == nullptr) {
// re open the spatializer output with previous channel mask
- status_t newStatus = desc->open(nullptr, &savedMixerConfig, savedDevices,
- mEngine->getStreamTypeForAttributes(*attr),
- AUDIO_OUTPUT_FLAG_SPATIALIZER, output);
- if (newStatus != NO_ERROR) {
- if (*output != AUDIO_IO_HANDLE_NONE) {
- desc->close();
- }
- ALOGE("%s failed to re-open mSpatializerOutput, status %d", __func__, newStatus);
+ desc = openOutputWithProfileAndDevice(profile, savedDevices, &savedMixerConfig);
+ if (desc == nullptr) {
+ ALOGE("%s failed to restore mSpatializerOutput with previous config", __func__);
} else {
mSpatializerOutput = desc;
- addOutput(*output, desc);
}
mPreviousOutputs = mOutputs;
mpClientInterface->onAudioPortListUpdate();
*output = AUDIO_IO_HANDLE_NONE;
- return status;
+ ALOGW("%s could not open spatializer output with requested config", __func__);
+ return BAD_VALUE;
}
mSpatializerOutput = desc;
- addOutput(*output, desc);
mPreviousOutputs = mOutputs;
mpClientInterface->onAudioPortListUpdate();
}
@@ -5711,6 +5720,9 @@
removeOutput(output);
mPreviousOutputs = mOutputs;
+ if (closingOutput == mSpatializerOutput) {
+ mSpatializerOutput.clear();
+ }
// MSD patches may have been released to support a non-MSD direct output. Reset MSD patch if
// no direct outputs are open.
@@ -5862,8 +5874,8 @@
continue;
}
sp<AudioPolicyMix> primaryMix;
- status_t status = mPolicyMixes.getOutputForAttr(client->attributes(), client->uid(),
- client->flags(), primaryMix, nullptr);
+ status_t status = mPolicyMixes.getOutputForAttr(client->attributes(), client->config(),
+ client->uid(), client->flags(), primaryMix, nullptr);
if (status != OK) {
continue;
}
@@ -5969,8 +5981,8 @@
for (const sp<TrackClientDescriptor>& client : outputDescriptor->getClientIterable()) {
sp<AudioPolicyMix> primaryMix;
std::vector<sp<AudioPolicyMix>> secondaryMixes;
- status_t status = mPolicyMixes.getOutputForAttr(client->attributes(), client->uid(),
- client->flags(), primaryMix, &secondaryMixes);
+ status_t status = mPolicyMixes.getOutputForAttr(client->attributes(), client->config(),
+ client->uid(), client->flags(), primaryMix, &secondaryMixes);
std::vector<sp<SwAudioOutputDescriptor>> secondaryDescs;
for (auto &secondaryMix : secondaryMixes) {
sp<SwAudioOutputDescriptor> outputDesc = secondaryMix->getOutput();
@@ -6239,8 +6251,8 @@
// check dynamic policies but only for primary descriptors (secondary not used for audible
// audio routing, only used for duplication for playback capture)
sp<AudioPolicyMix> policyMix;
- status_t status = mPolicyMixes.getOutputForAttr(attr, 0 /*uid unknown here*/,
- AUDIO_OUTPUT_FLAG_NONE, policyMix, nullptr);
+ status_t status = mPolicyMixes.getOutputForAttr(attr, AUDIO_CONFIG_BASE_INITIALIZER,
+ 0 /*uid unknown here*/, AUDIO_OUTPUT_FLAG_NONE, policyMix, nullptr);
if (status != OK) {
return status;
}
@@ -7269,7 +7281,8 @@
}
sp<SwAudioOutputDescriptor> AudioPolicyManager::openOutputWithProfileAndDevice(
- const sp<IOProfile>& profile, const DeviceVector& devices)
+ const sp<IOProfile>& profile, const DeviceVector& devices,
+ const audio_config_base_t *mixerConfig)
{
for (const auto& device : devices) {
// TODO: This should be checking if the profile supports the device combo.
@@ -7279,7 +7292,7 @@
}
sp<SwAudioOutputDescriptor> desc = new SwAudioOutputDescriptor(profile, mpClientInterface);
audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
- status_t status = desc->open(nullptr /* halConfig */, nullptr /* mixerConfig */, devices,
+ status_t status = desc->open(nullptr /* halConfig */, mixerConfig, devices,
AUDIO_STREAM_DEFAULT, AUDIO_OUTPUT_FLAG_NONE, &output);
if (status != NO_ERROR) {
return nullptr;
@@ -7309,7 +7322,7 @@
config.offload_info.channel_mask = config.channel_mask;
config.offload_info.format = config.format;
- status = desc->open(&config, nullptr /* mixerConfig */, devices,
+ status = desc->open(&config, mixerConfig, devices,
AUDIO_STREAM_DEFAULT, AUDIO_OUTPUT_FLAG_NONE, &output);
if (status != NO_ERROR) {
return nullptr;
@@ -7317,6 +7330,10 @@
}
addOutput(output, desc);
+
+ sp<DeviceDescriptor> speaker = mAvailableOutputDevices.getDevice(
+ AUDIO_DEVICE_OUT_SPEAKER, String8(""), AUDIO_FORMAT_DEFAULT);
+
if (audio_is_remote_submix_device(deviceType) && address != "0") {
sp<AudioPolicyMix> policyMix;
if (mPolicyMixes.getAudioPolicyMix(deviceType, address, policyMix) == NO_ERROR) {
@@ -7327,9 +7344,13 @@
address.string());
}
- } else if (((desc->mFlags & AUDIO_OUTPUT_FLAG_DIRECT) == 0) && hasPrimaryOutput()) {
- // no duplicated output for direct outputs and
- // outputs used by dynamic policy mixes
+ } else if (hasPrimaryOutput() && speaker != nullptr
+ && mPrimaryOutput->supportsDevice(speaker) && !desc->supportsDevice(speaker)
+ && ((desc->mFlags & AUDIO_OUTPUT_FLAG_DIRECT) == 0)) {
+ // no duplicated output for:
+ // - direct outputs
+ // - outputs used by dynamic policy mixes
+ // - outputs that supports SPEAKER while the primary output does not.
audio_io_handle_t duplicatedOutput = AUDIO_IO_HANDLE_NONE;
//TODO: configure audio effect output stage here
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index dcd12cd..317aa1c 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -358,7 +358,9 @@
virtual bool canBeSpatialized(const audio_attributes_t *attr,
const audio_config_t *config,
- const AudioDeviceTypeAddrVector &devices) const;
+ const AudioDeviceTypeAddrVector &devices) const {
+ return canBeSpatializedInt(attr, config, devices);
+ }
virtual status_t getSpatializerOutput(const audio_config_base_t *config,
const audio_attributes_t *attr,
@@ -991,6 +993,30 @@
const DeviceVector &devices,
audio_io_handle_t *output);
+ /**
+ * @brief Queries if some kind of spatialization will be performed if the audio playback
+ * context described by the provided arguments is present.
+ * The context is made of:
+ * - The audio attributes describing the playback use case.
+ * - The audio configuration describing the audio format, channels, sampling rate ...
+ * - The devices describing the sink audio device selected for playback.
+ * All arguments are optional and only the specified arguments are used to match against
+ * supported criteria. For instance, supplying no argument will tell if spatialization is
+ * supported or not in general.
+ * @param attr audio attributes describing the playback use case
+ * @param config audio configuration describing the audio format, channels, sample rate...
+ * @param devices the sink audio device selected for playback
+ * @param allowCurrentOutputReconfig if true, the result will be considering it is possible
+ * to close and reopen an existing spatializer output stream to match the requested
+ * criteria. If false, the criteria must be compatible with the opened sptializer
+ * output.
+ * @return true if spatialization is possible for this context, false otherwise.
+ */
+ virtual bool canBeSpatializedInt(const audio_attributes_t *attr,
+ const audio_config_t *config,
+ const AudioDeviceTypeAddrVector &devices,
+ bool allowCurrentOutputReconfig = true) const;
+
sp<IOProfile> getSpatializerOutputProfile(const audio_config_t *config,
const AudioDeviceTypeAddrVector &devices) const;
@@ -1086,8 +1112,20 @@
bool areAllActiveTracksRerouted(const sp<SwAudioOutputDescriptor>& output);
- sp<SwAudioOutputDescriptor> openOutputWithProfileAndDevice(const sp<IOProfile>& profile,
- const DeviceVector& devices);
+ /**
+ * @brief Opens an output stream from the supplied IOProfile and route it to the
+ * supplied audio devices. If a mixer config is specified, it is forwarded to audio
+ * flinger. If not, a default config is derived from the output stream config.
+ * Also opens a duplicating output if needed and queries the audio HAL for supported
+ * audio profiles if the IOProfile is dynamic.
+ * @param[in] profile IOProfile to use as template
+ * @param[in] devices initial route to apply to this output stream
+ * @param[in] mixerConfig if not null, use this to configure the mixer
+ * @return an output descriptor for the newly opened stream or null in case of error.
+ */
+ sp<SwAudioOutputDescriptor> openOutputWithProfileAndDevice(
+ const sp<IOProfile>& profile, const DeviceVector& devices,
+ const audio_config_base_t *mixerConfig = nullptr);
};
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index cd83900..1109793 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -392,7 +392,8 @@
audio_config_base_t config = mSpatializer->getAudioInConfig();
status_t status =
mAudioPolicyManager->getSpatializerOutput(&config, &attr, &newOutput);
-
+ ALOGV("%s currentOutput %d newOutput %d channel_mask %#x",
+ __func__, currentOutput, newOutput, config.channel_mask);
if (status == NO_ERROR && currentOutput == newOutput) {
return;
}
diff --git a/services/audiopolicy/service/Spatializer.cpp b/services/audiopolicy/service/Spatializer.cpp
index 0fdbe20..6745005 100644
--- a/services/audiopolicy/service/Spatializer.cpp
+++ b/services/audiopolicy/service/Spatializer.cpp
@@ -216,7 +216,9 @@
if (status != NO_ERROR) {
return status;
}
- mSupportsHeadTracking = supportsHeadTracking[0];
+// Disable head tracking until head sensor activity is properly controlled.
+// mSupportsHeadTracking = supportsHeadTracking[0];
+ mSupportsHeadTracking = false;
status = getHalParameter<true>(effect, SPATIALIZER_PARAM_SUPPORTED_LEVELS, &mLevels);
if (status != NO_ERROR) {
@@ -227,12 +229,8 @@
if (status != NO_ERROR) {
return status;
}
- status = getHalParameter<true>(effect, SPATIALIZER_PARAM_SUPPORTED_CHANNEL_MASKS,
+ return getHalParameter<true>(effect, SPATIALIZER_PARAM_SUPPORTED_CHANNEL_MASKS,
&mChannelMasks);
- if (status != NO_ERROR) {
- return status;
- }
- return NO_ERROR;
}
/** Gets the channel mask, sampling rate and format set for the spatializer input. */
@@ -727,7 +725,7 @@
switch (event) {
case AudioEffect::EVENT_FRAMES_PROCESSED: {
int frames = info == nullptr ? 0 : *(int*)info;
- ALOGD("%s frames processed %d for me %p", __func__, frames, me);
+ // ALOGD("%s frames processed %d for me %p", __func__, frames, me);
me->postFramesProcessedMsg(frames);
} break;
default:
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index 9d2d2b3..f95aa44 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -146,6 +146,8 @@
std::unique_ptr<AudioPolicyManagerTestClient> mClient;
std::unique_ptr<AudioPolicyTestManager> mManager;
+
+ const uint32_t k48000SamplingRate = 48000;
};
void AudioPolicyManagerTest::SetUp() {
@@ -405,11 +407,11 @@
AudioPolicyConfig& config = mManager->getConfig();
mMsdOutputDevice = new DeviceDescriptor(AUDIO_DEVICE_OUT_BUS);
sp<AudioProfile> pcmOutputProfile = new AudioProfile(
- AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, 48000);
+ AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, k48000SamplingRate);
sp<AudioProfile> ac3OutputProfile = new AudioProfile(
- AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1, 48000);
+ AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1, k48000SamplingRate);
sp<AudioProfile> iec958OutputProfile = new AudioProfile(
- AUDIO_FORMAT_IEC60958, AUDIO_CHANNEL_OUT_STEREO, 48000);
+ AUDIO_FORMAT_IEC60958, AUDIO_CHANNEL_OUT_STEREO, k48000SamplingRate);
mMsdOutputDevice->addAudioProfile(pcmOutputProfile);
mMsdOutputDevice->addAudioProfile(ac3OutputProfile);
mMsdOutputDevice->addAudioProfile(iec958OutputProfile);
@@ -464,7 +466,7 @@
// Add a profile with another encoding to the default device to test routing
// of streams that are not supported by MSD.
sp<AudioProfile> dtsOutputProfile = new AudioProfile(
- AUDIO_FORMAT_DTS, AUDIO_CHANNEL_OUT_5POINT1, 48000);
+ AUDIO_FORMAT_DTS, AUDIO_CHANNEL_OUT_5POINT1, k48000SamplingRate);
config.getDefaultOutputDevice()->addAudioProfile(dtsOutputProfile);
sp<OutputProfile> primaryEncodedOutputProfile = new OutputProfile("encoded");
primaryEncodedOutputProfile->addAudioProfile(dtsOutputProfile);
@@ -482,7 +484,7 @@
// Add HDMI input device with IEC60958 profile for HDMI in -> MSD patching.
mHdmiInputDevice = new DeviceDescriptor(AUDIO_DEVICE_IN_HDMI);
sp<AudioProfile> iec958InputProfile = new AudioProfile(
- AUDIO_FORMAT_IEC60958, AUDIO_CHANNEL_IN_STEREO, 48000);
+ AUDIO_FORMAT_IEC60958, AUDIO_CHANNEL_IN_STEREO, k48000SamplingRate);
mHdmiInputDevice->addAudioProfile(iec958InputProfile);
config.addDevice(mHdmiInputDevice);
sp<InputProfile> hdmiInputProfile = new InputProfile("hdmi input");
@@ -540,8 +542,8 @@
TEST_P(AudioPolicyManagerTestMsd, GetOutputForAttrEncodedRoutesToMsd) {
const PatchCountCheck patchCount = snapshotPatchCount();
audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
- getOutputForAttr(&selectedDeviceId,
- AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1, 48000, AUDIO_OUTPUT_FLAG_DIRECT);
+ getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1,
+ k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT);
ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
}
@@ -550,7 +552,7 @@
const PatchCountCheck patchCount = snapshotPatchCount();
audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
getOutputForAttr(&selectedDeviceId,
- AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, 48000);
+ AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, k48000SamplingRate);
ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
}
@@ -558,13 +560,13 @@
TEST_P(AudioPolicyManagerTestMsd, GetOutputForAttrEncodedPlusPcmRoutesToMsd) {
const PatchCountCheck patchCount = snapshotPatchCount();
audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
- getOutputForAttr(&selectedDeviceId,
- AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1, 48000, AUDIO_OUTPUT_FLAG_DIRECT);
+ getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1,
+ k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT);
ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
getOutputForAttr(&selectedDeviceId,
- AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, 48000);
+ AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, k48000SamplingRate);
ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
}
@@ -572,8 +574,8 @@
TEST_P(AudioPolicyManagerTestMsd, GetOutputForAttrUnsupportedFormatBypassesMsd) {
const PatchCountCheck patchCount = snapshotPatchCount();
audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
- getOutputForAttr(&selectedDeviceId,
- AUDIO_FORMAT_DTS, AUDIO_CHANNEL_OUT_5POINT1, 48000, AUDIO_OUTPUT_FLAG_DIRECT);
+ getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_DTS, AUDIO_CHANNEL_OUT_5POINT1,
+ k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT);
ASSERT_NE(selectedDeviceId, mMsdOutputDevice->getId());
ASSERT_EQ(0, patchCount.deltaFromSnapshot());
}
@@ -584,9 +586,8 @@
const PatchCountCheck patchCount = snapshotPatchCount();
audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
audio_port_handle_t portId;
- getOutputForAttr(&selectedDeviceId,
- AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1, 48000, AUDIO_OUTPUT_FLAG_DIRECT,
- nullptr /*output*/, &portId);
+ getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1,
+ k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, nullptr /*output*/, &portId);
ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
mManager->releaseOutput(portId);
@@ -596,9 +597,8 @@
const PatchCountCheck patchCount = snapshotPatchCount();
audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
audio_port_handle_t portId;
- getOutputForAttr(&selectedDeviceId,
- AUDIO_FORMAT_DTS, AUDIO_CHANNEL_OUT_5POINT1, 48000, AUDIO_OUTPUT_FLAG_DIRECT,
- nullptr /*output*/, &portId);
+ getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_DTS, AUDIO_CHANNEL_OUT_5POINT1,
+ k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, nullptr /*output*/, &portId);
ASSERT_NE(selectedDeviceId, mMsdOutputDevice->getId());
ASSERT_EQ(-static_cast<int>(mExpectedAudioPatchCount), patchCount.deltaFromSnapshot());
mManager->releaseOutput(portId);
@@ -607,8 +607,8 @@
{
const PatchCountCheck patchCount = snapshotPatchCount();
audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
- getOutputForAttr(&selectedDeviceId,
- AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1, 48000, AUDIO_OUTPUT_FLAG_DIRECT);
+ getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1,
+ k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT);
ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
ASSERT_EQ(0, patchCount.deltaFromSnapshot());
}
@@ -637,8 +637,8 @@
ASSERT_EQ(AUDIO_FORMAT_IEC60958, patch->mPatch.sinks[0].format);
ASSERT_EQ(AUDIO_CHANNEL_IN_STEREO, patch->mPatch.sources[0].channel_mask);
ASSERT_EQ(AUDIO_CHANNEL_OUT_STEREO, patch->mPatch.sinks[0].channel_mask);
- ASSERT_EQ(48000, patch->mPatch.sources[0].sample_rate);
- ASSERT_EQ(48000, patch->mPatch.sinks[0].sample_rate);
+ ASSERT_EQ(k48000SamplingRate, patch->mPatch.sources[0].sample_rate);
+ ASSERT_EQ(k48000SamplingRate, patch->mPatch.sinks[0].sample_rate);
ASSERT_EQ(1, patchCount.deltaFromSnapshot());
}
@@ -747,7 +747,7 @@
clearPolicyMix();
audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
- audioConfig.sample_rate = 48000;
+ audioConfig.sample_rate = k48000SamplingRate;
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_LOOP_BACK,
AUDIO_DEVICE_OUT_REMOTE_SUBMIX, mMixAddress, audioConfig,
std::vector<PolicyMixTuple>());
@@ -786,7 +786,7 @@
audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
- audioConfig.sample_rate = 48000;
+ audioConfig.sample_rate = k48000SamplingRate;
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_LOOP_BACK,
AUDIO_DEVICE_OUT_REMOTE_SUBMIX, mMixAddress, audioConfig,
std::vector<PolicyMixTuple>());
@@ -1008,7 +1008,7 @@
audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
- audioConfig.sample_rate = 48000;
+ audioConfig.sample_rate = k48000SamplingRate;
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_LOOP_BACK,
AUDIO_DEVICE_OUT_REMOTE_SUBMIX, "", audioConfig, std::vector<PolicyMixTuple>());
ASSERT_EQ(INVALID_OPERATION, ret);
@@ -1042,7 +1042,7 @@
audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
- audioConfig.sample_rate = 48000;
+ audioConfig.sample_rate = k48000SamplingRate;
status_t ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_LOOP_BACK,
AUDIO_DEVICE_OUT_REMOTE_SUBMIX, mMixAddress, audioConfig, mUsageRules);
ASSERT_EQ(NO_ERROR, ret);
@@ -1058,7 +1058,7 @@
std::string tags = "addr=" + mMixAddress;
strncpy(attr.tags, tags.c_str(), AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1);
getInputForAttr(attr, mTracker->getRiid(), &selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT,
- AUDIO_CHANNEL_IN_STEREO, 48000 /*sampleRate*/, AUDIO_INPUT_FLAG_NONE, &mPortId);
+ AUDIO_CHANNEL_IN_STEREO, k48000SamplingRate, AUDIO_INPUT_FLAG_NONE, &mPortId);
ASSERT_EQ(NO_ERROR, mManager->startInput(mPortId));
ASSERT_EQ(extractionPort.id, selectedDeviceId);
@@ -1085,8 +1085,8 @@
audio_port_handle_t playbackRoutedPortId = AUDIO_PORT_HANDLE_NONE;
getOutputForAttr(&playbackRoutedPortId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
- 48000 /*sampleRate*/, AUDIO_OUTPUT_FLAG_NONE,
- nullptr /*output*/, nullptr /*portId*/, attr);
+ k48000SamplingRate, AUDIO_OUTPUT_FLAG_NONE, nullptr /*output*/, nullptr /*portId*/,
+ attr);
if (std::find_if(begin(mUsageRules), end(mUsageRules), [&usage](const auto &usageRule) {
return (std::get<0>(usageRule) == usage) &&
(std::get<2>(usageRule) == RULE_MATCH_ATTRIBUTE_USAGE);}) != end(mUsageRules) ||
@@ -1228,7 +1228,7 @@
audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
audioConfig.channel_mask = AUDIO_CHANNEL_IN_STEREO;
audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
- audioConfig.sample_rate = 48000;
+ audioConfig.sample_rate = k48000SamplingRate;
status_t ret = addPolicyMix(MIX_TYPE_RECORDERS, MIX_ROUTE_FLAG_LOOP_BACK,
AUDIO_DEVICE_IN_REMOTE_SUBMIX, mMixAddress, audioConfig, mSourceRules);
ASSERT_EQ(NO_ERROR, ret);
@@ -1244,7 +1244,7 @@
std::string tags = std::string("addr=") + mMixAddress;
strncpy(attr.tags, tags.c_str(), AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1);
getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
- 48000 /*sampleRate*/, AUDIO_OUTPUT_FLAG_NONE, nullptr /*output*/, &mPortId, attr);
+ k48000SamplingRate, AUDIO_OUTPUT_FLAG_NONE, nullptr /*output*/, &mPortId, attr);
ASSERT_EQ(NO_ERROR, mManager->startOutput(mPortId));
ASSERT_EQ(injectionPort.id, getDeviceIdFromPatch(mClient->getLastAddedPatch()));
@@ -1272,7 +1272,7 @@
audio_port_handle_t captureRoutedPortId = AUDIO_PORT_HANDLE_NONE;
audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
getInputForAttr(attr, mTracker->getRiid(), &captureRoutedPortId, AUDIO_FORMAT_PCM_16_BIT,
- AUDIO_CHANNEL_IN_STEREO, 48000 /*sampleRate*/, AUDIO_INPUT_FLAG_NONE, &portId);
+ AUDIO_CHANNEL_IN_STEREO, k48000SamplingRate, AUDIO_INPUT_FLAG_NONE, &portId);
if (std::find_if(begin(mSourceRules), end(mSourceRules), [&source](const auto &sourceRule) {
return (std::get<1>(sourceRule) == source) &&
(std::get<2>(sourceRule) == RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET);})
@@ -1422,11 +1422,11 @@
// Try start input or output according to the device type
if (audio_is_output_devices(type)) {
getOutputForAttr(&routedPortId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
- 48000 /*sampleRate*/, AUDIO_OUTPUT_FLAG_NONE);
+ k48000SamplingRate, AUDIO_OUTPUT_FLAG_NONE);
} else if (audio_is_input_device(type)) {
RecordingActivityTracker tracker;
getInputForAttr({}, tracker.getRiid(), &routedPortId, AUDIO_FORMAT_PCM_16_BIT,
- AUDIO_CHANNEL_IN_STEREO, 48000 /*sampleRate*/, AUDIO_INPUT_FLAG_NONE);
+ AUDIO_CHANNEL_IN_STEREO, k48000SamplingRate, AUDIO_INPUT_FLAG_NONE);
}
ASSERT_EQ(devicePort.id, routedPortId);
@@ -1450,6 +1450,57 @@
)
);
+class AudioPolicyManagerCarTest : public AudioPolicyManagerTestDynamicPolicy {
+protected:
+ std::string getConfigFile() override { return sCarConfig; }
+
+ static const std::string sCarConfig;
+};
+
+const std::string AudioPolicyManagerCarTest::sCarConfig =
+ AudioPolicyManagerCarTest::sExecutableDir + "test_car_ap_atmos_offload_configuration.xml";
+
+TEST_F(AudioPolicyManagerCarTest, InitSuccess) {
+ // SetUp must finish with no assertions.
+}
+
+TEST_F(AudioPolicyManagerCarTest, Dump) {
+ dumpToLog();
+}
+
+TEST_F(AudioPolicyManagerCarTest, GetOutputForAttrAtmosOutputAfterRegisteringPolicyMix) {
+ status_t ret;
+ audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+ const std::string kTestBusMediaOutput = "bus0_media_out";
+ ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+ AUDIO_DEVICE_OUT_BUS, kTestBusMediaOutput, audioConfig, std::vector<PolicyMixTuple>());
+ ASSERT_EQ(NO_ERROR, ret);
+
+ audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ audio_io_handle_t output;
+ audio_port_handle_t portId;
+ getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_E_AC3_JOC, AUDIO_CHANNEL_OUT_5POINT1,
+ k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId);
+ ASSERT_NE(AUDIO_PORT_HANDLE_NONE, selectedDeviceId);
+ sp<SwAudioOutputDescriptor> outDesc = mManager->getOutputs().valueFor(output);
+ ASSERT_NE(nullptr, outDesc.get());
+ ASSERT_EQ(AUDIO_FORMAT_E_AC3_JOC, outDesc->getFormat());
+ ASSERT_EQ(AUDIO_CHANNEL_OUT_5POINT1, outDesc->getChannelMask());
+ ASSERT_EQ(k48000SamplingRate, outDesc->getSamplingRate());
+
+ selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ output = AUDIO_IO_HANDLE_NONE;
+ portId = AUDIO_PORT_HANDLE_NONE;
+ getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_7POINT1POINT4,
+ k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId);
+ ASSERT_NE(AUDIO_PORT_HANDLE_NONE, selectedDeviceId);
+ outDesc = mManager->getOutputs().valueFor(output);
+ ASSERT_NE(nullptr, outDesc.get());
+ ASSERT_EQ(AUDIO_FORMAT_PCM_16_BIT, outDesc->getFormat());
+ ASSERT_EQ(AUDIO_CHANNEL_OUT_7POINT1POINT4, outDesc->getChannelMask());
+ ASSERT_EQ(k48000SamplingRate, outDesc->getSamplingRate());
+}
+
class AudioPolicyManagerTVTest : public AudioPolicyManagerTestWithConfigurationFile {
protected:
std::string getConfigFile() override { return sTvConfig; }
@@ -1470,8 +1521,8 @@
audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
audio_io_handle_t output;
audio_port_handle_t portId;
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, 48000,
- flags, &output, &portId);
+ getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ k48000SamplingRate, flags, &output, &portId);
sp<SwAudioOutputDescriptor> outDesc = mManager->getOutputs().valueFor(output);
ASSERT_NE(nullptr, outDesc.get());
audio_port_v7 port = {};
diff --git a/services/audiopolicy/tests/resources/Android.bp b/services/audiopolicy/tests/resources/Android.bp
index ff4d568..5e71210 100644
--- a/services/audiopolicy/tests/resources/Android.bp
+++ b/services/audiopolicy/tests/resources/Android.bp
@@ -12,6 +12,7 @@
srcs: [
"test_audio_policy_configuration.xml",
"test_audio_policy_primary_only_configuration.xml",
+ "test_car_ap_atmos_offload_configuration.xml",
"test_invalid_audio_policy_configuration.xml",
"test_tv_apm_configuration.xml",
"test_settop_box_surround_configuration.xml",
diff --git a/services/audiopolicy/tests/resources/test_car_ap_atmos_offload_configuration.xml b/services/audiopolicy/tests/resources/test_car_ap_atmos_offload_configuration.xml
new file mode 100644
index 0000000..d131ed8
--- /dev/null
+++ b/services/audiopolicy/tests/resources/test_car_ap_atmos_offload_configuration.xml
@@ -0,0 +1,308 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!-- Copyright (C) 2018 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<audioPolicyConfiguration version="1.0" xmlns:xi="http://www.w3.org/2001/XInclude">
+ <globalConfiguration speaker_drc_enabled="true"/>
+
+ <modules>
+ <!-- Primary Audio HAL -->
+ <module name="primary" halVersion="3.0">
+ <attachedDevices>
+ <!-- One bus per context -->
+ <item>bus0_media_out</item>
+ <item>bus1_navigation_out</item>
+ <item>bus2_voice_command_out</item>
+ <item>bus3_call_ring_out</item>
+ <item>bus4_call_out</item>
+ <item>bus5_alarm_out</item>
+ <item>bus6_notification_out</item>
+ <item>bus7_system_sound_out</item>
+ <!-- names with _audio_zone_# are used for defined an emulator rear seat audio zone
+ where each number # is the zone id number -->
+ <item>bus100_audio_zone_1</item>
+ <item>bus200_audio_zone_2</item>
+ <item>Built-In Mic</item>
+ <item>Built-In Back Mic</item>
+ <item>Echo-Reference Mic</item>
+ <item>FM Tuner</item>
+ <item>Tone Generator 0</item>
+ <item>Tone Generator 1</item>
+ </attachedDevices>
+ <defaultOutputDevice>bus0_media_out</defaultOutputDevice>
+ <mixPorts>
+ <mixPort name="mixport_bus0_media_out" role="source"
+ flags="AUDIO_OUTPUT_FLAG_PRIMARY">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000"
+ channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </mixPort>
+ <mixPort name="mixport_bus0_media_out_atmos" role="source"
+ flags="AUDIO_OUTPUT_FLAG_DIRECT">
+ <profile name="" format="AUDIO_FORMAT_E_AC3_JOC"
+ samplingRates="48000"
+ channelMasks="AUDIO_CHANNEL_OUT_STEREO,AUDIO_CHANNEL_OUT_5POINT1"/>
+ </mixPort>
+ <mixPort name="mixport_bus0_media_out_atmos_pcm" role="source"
+ flags="AUDIO_OUTPUT_FLAG_DIRECT">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000"
+ channelMasks="AUDIO_CHANNEL_OUT_7POINT1POINT4"/>
+ </mixPort>
+ <mixPort name="mixport_bus1_navigation_out" role="source">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000"
+ channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </mixPort>
+ <mixPort name="mixport_bus2_voice_command_out" role="source">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000"
+ channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </mixPort>
+ <mixPort name="mixport_bus3_call_ring_out" role="source">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000"
+ channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </mixPort>
+ <mixPort name="mixport_bus4_call_out" role="source">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000"
+ channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </mixPort>
+ <mixPort name="mixport_bus5_alarm_out" role="source">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000"
+ channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </mixPort>
+ <mixPort name="mixport_bus6_notification_out" role="source">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000"
+ channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </mixPort>
+ <mixPort name="mixport_bus7_system_sound_out" role="source">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000"
+ channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </mixPort>
+ <mixPort name="mixport_bus100_audio_zone_1" role="source">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000"
+ channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </mixPort>
+ <mixPort name="mixport_bus200_audio_zone_2" role="source">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000"
+ channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </mixPort>
+ <mixPort name="primary input" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="8000,11025,12000,16000,22050,24000,32000,44100,48000"
+ channelMasks="AUDIO_CHANNEL_IN_MONO,AUDIO_CHANNEL_IN_STEREO,AUDIO_CHANNEL_IN_FRONT_BACK"/>
+ </mixPort>
+ <mixPort name="mixport_tuner0" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000"
+ channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+ </mixPort>
+ <mixPort name="mixport_input_bus_tone_zone_0" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000"
+ channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+ </mixPort>
+ <mixPort name="mixport_input_bus_tone_zone_1" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000"
+ channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+ </mixPort>
+ </mixPorts>
+ <devicePorts>
+ <devicePort tagName="bus0_media_out" role="sink" type="AUDIO_DEVICE_OUT_BUS"
+ address="bus0_media_out">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ <profile name="" format="AUDIO_FORMAT_E_AC3_JOC"
+ samplingRates="48000"
+ channelMasks="AUDIO_CHANNEL_OUT_STEREO,AUDIO_CHANNEL_OUT_5POINT1"/>
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_7POINT1POINT4"/>
+ <gains>
+ <gain name="" mode="AUDIO_GAIN_MODE_JOINT"
+ minValueMB="-3200" maxValueMB="600" defaultValueMB="0"
+ stepValueMB="100"/>
+ </gains>
+ </devicePort>
+ <devicePort tagName="bus1_navigation_out" role="sink" type="AUDIO_DEVICE_OUT_BUS"
+ address="bus1_navigation_out">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ <gains>
+ <gain name="" mode="AUDIO_GAIN_MODE_JOINT"
+ minValueMB="-3200" maxValueMB="600" defaultValueMB="0"
+ stepValueMB="100"/>
+ </gains>
+ </devicePort>
+ <devicePort tagName="bus2_voice_command_out" role="sink" type="AUDIO_DEVICE_OUT_BUS"
+ address="bus2_voice_command_out">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ <gains>
+ <gain name="" mode="AUDIO_GAIN_MODE_JOINT"
+ minValueMB="-3200" maxValueMB="600" defaultValueMB="0"
+ stepValueMB="100"/>
+ </gains>
+ </devicePort>
+ <devicePort tagName="bus3_call_ring_out" role="sink" type="AUDIO_DEVICE_OUT_BUS"
+ address="bus3_call_ring_out">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ <gains>
+ <gain name="" mode="AUDIO_GAIN_MODE_JOINT"
+ minValueMB="-3200" maxValueMB="600" defaultValueMB="0"
+ stepValueMB="100"/>
+ </gains>
+ </devicePort>
+ <devicePort tagName="bus4_call_out" role="sink" type="AUDIO_DEVICE_OUT_BUS"
+ address="bus4_call_out">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ <gains>
+ <gain name="" mode="AUDIO_GAIN_MODE_JOINT"
+ minValueMB="-3200" maxValueMB="600" defaultValueMB="0"
+ stepValueMB="100"/>
+ </gains>
+ </devicePort>
+ <devicePort tagName="bus5_alarm_out" role="sink" type="AUDIO_DEVICE_OUT_BUS"
+ address="bus5_alarm_out">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ <gains>
+ <gain name="" mode="AUDIO_GAIN_MODE_JOINT"
+ minValueMB="-3200" maxValueMB="600" defaultValueMB="0"
+ stepValueMB="100"/>
+ </gains>
+ </devicePort>
+ <devicePort tagName="bus6_notification_out" role="sink" type="AUDIO_DEVICE_OUT_BUS"
+ address="bus6_notification_out">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ <gains>
+ <gain name="" mode="AUDIO_GAIN_MODE_JOINT"
+ minValueMB="-3200" maxValueMB="600" defaultValueMB="0"
+ stepValueMB="100"/>
+ </gains>
+ </devicePort>
+ <devicePort tagName="bus7_system_sound_out" role="sink" type="AUDIO_DEVICE_OUT_BUS"
+ address="bus7_system_sound_out">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ <gains>
+ <gain name="" mode="AUDIO_GAIN_MODE_JOINT"
+ minValueMB="-3200" maxValueMB="600" defaultValueMB="0"
+ stepValueMB="100"/>
+ </gains>
+ </devicePort>
+ <devicePort tagName="bus100_audio_zone_1" role="sink" type="AUDIO_DEVICE_OUT_BUS"
+ address="bus100_audio_zone_1">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ <gains>
+ <gain name="" mode="AUDIO_GAIN_MODE_JOINT"
+ minValueMB="-3200" maxValueMB="600" defaultValueMB="0"
+ stepValueMB="100"/>
+ </gains>
+ </devicePort>
+ <devicePort tagName="bus200_audio_zone_2" role="sink" type="AUDIO_DEVICE_OUT_BUS"
+ address="bus200_audio_zone_2">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ <gains>
+ <gain name="" mode="AUDIO_GAIN_MODE_JOINT"
+ minValueMB="-3200" maxValueMB="600" defaultValueMB="0"
+ stepValueMB="100"/>
+ </gains>
+ </devicePort>
+ <devicePort tagName="Built-In Mic" type="AUDIO_DEVICE_IN_BUILTIN_MIC" role="source">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="8000,11025,12000,16000,22050,24000,32000,44100,48000"
+ channelMasks="AUDIO_CHANNEL_IN_MONO,AUDIO_CHANNEL_IN_STEREO,AUDIO_CHANNEL_IN_FRONT_BACK"/>
+ </devicePort>
+ <devicePort tagName="Built-In Back Mic" type="AUDIO_DEVICE_IN_BACK_MIC"
+ role="source">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="8000,11025,12000,16000,22050,24000,32000,44100,48000"
+ channelMasks="AUDIO_CHANNEL_IN_MONO,AUDIO_CHANNEL_IN_STEREO,AUDIO_CHANNEL_IN_FRONT_BACK"/>
+ </devicePort>
+ <devicePort tagName="Echo-Reference Mic" type="AUDIO_DEVICE_IN_ECHO_REFERENCE"
+ role="source">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="8000,11025,12000,16000,22050,24000,32000,44100,48000"
+ channelMasks="AUDIO_CHANNEL_IN_MONO,AUDIO_CHANNEL_IN_STEREO,AUDIO_CHANNEL_IN_FRONT_BACK"/>
+ </devicePort>
+ <devicePort tagName="FM Tuner" type="AUDIO_DEVICE_IN_FM_TUNER" role="source"
+ address="tuner0">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+ <gains>
+ <gain name="" mode="AUDIO_GAIN_MODE_JOINT"
+ minValueMB="-3200" maxValueMB="600" defaultValueMB="0"
+ stepValueMB="100"/>
+ </gains>
+ </devicePort>
+ <devicePort tagName="Tone Generator 0" type="AUDIO_DEVICE_IN_BUS" role="source"
+ address="input_bus_tone_zone_0">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+ <gains>
+ <gain name="" mode="AUDIO_GAIN_MODE_JOINT"
+ minValueMB="-3200" maxValueMB="600" defaultValueMB="0"
+ stepValueMB="100"/>
+ </gains>
+ </devicePort>
+ <devicePort tagName="Tone Generator 1" type="AUDIO_DEVICE_IN_BUS" role="source"
+ address="input_bus_tone_zone_1">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+ <gains>
+ <gain name="" mode="AUDIO_GAIN_MODE_JOINT"
+ minValueMB="-3200" maxValueMB="600" defaultValueMB="0"
+ stepValueMB="100"/>
+ </gains>
+ </devicePort>
+ </devicePorts>
+ <!-- route declaration, i.e. list all available sources for a given sink -->
+ <routes>
+ <route type="mix" sink="bus0_media_out"
+ sources="mixport_bus0_media_out,mixport_bus0_media_out_atmos,mixport_bus0_media_out_atmos_pcm"/>
+ <route type="mix" sink="bus1_navigation_out" sources="mixport_bus1_navigation_out"/>
+ <route type="mix" sink="bus2_voice_command_out"
+ sources="mixport_bus2_voice_command_out"/>
+ <route type="mix" sink="bus3_call_ring_out" sources="mixport_bus3_call_ring_out"/>
+ <route type="mix" sink="bus4_call_out" sources="mixport_bus4_call_out"/>
+ <route type="mix" sink="bus5_alarm_out" sources="mixport_bus5_alarm_out"/>
+ <route type="mix" sink="bus6_notification_out"
+ sources="mixport_bus6_notification_out"/>
+ <route type="mix" sink="bus7_system_sound_out"
+ sources="mixport_bus7_system_sound_out"/>
+ <route type="mix" sink="bus100_audio_zone_1" sources="mixport_bus100_audio_zone_1"/>
+ <route type="mix" sink="bus200_audio_zone_2" sources="mixport_bus200_audio_zone_2"/>
+ <route type="mix" sink="primary input"
+ sources="Built-In Mic,Built-In Back Mic,Echo-Reference Mic"/>
+ <route type="mix" sink="mixport_tuner0" sources="FM Tuner"/>
+ <route type="mix" sink="mixport_input_bus_tone_zone_0" sources="Tone Generator 0"/>
+ <route type="mix" sink="mixport_input_bus_tone_zone_1" sources="Tone Generator 1"/>
+ </routes>
+ </module>
+ </modules>
+</audioPolicyConfiguration>
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index 6058429..5da77d6 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -441,6 +441,10 @@
newFormat->setInt32(KEY_TILE_HEIGHT, mGridHeight);
newFormat->setInt32(KEY_GRID_ROWS, mGridRows);
newFormat->setInt32(KEY_GRID_COLUMNS, mGridCols);
+ int32_t left, top, right, bottom;
+ if (newFormat->findRect("crop", &left, &top, &right, &bottom)) {
+ newFormat->setRect("crop", 0, 0, mOutputWidth - 1, mOutputHeight - 1);
+ }
}
}
newFormat->setInt32(KEY_IS_DEFAULT, 1 /*isPrimary*/);
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 0cce2ca..886af1f 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -356,7 +356,11 @@
std::lock_guard<std::mutex> lock(mInterfaceMutex);
mDeviceState = newState;
status_t res = OK;
- for (auto& provider : mProviders) {
+ // Make a copy of mProviders because we unlock mInterfaceMutex temporarily
+ // within the loop. It's possible that during the time mInterfaceMutex is
+ // unlocked, mProviders has changed.
+ auto providers = mProviders;
+ for (auto& provider : providers) {
ALOGV("%s: Notifying %s for new state 0x%" PRIx64,
__FUNCTION__, provider->mProviderName.c_str(), newState);
// b/199240726 Camera providers can for example try to add/remove
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index de418da..f7fd5d6 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -4091,6 +4091,26 @@
// Verify buffer caches
std::vector<uint64_t> bufIds(offlineStream.circulatingBufferIds.begin(),
offlineStream.circulatingBufferIds.end());
+ {
+ // Due to timing it is possible that we may not have any remaining pending capture
+ // requests that can update the caches on Hal side. This can result in buffer cache
+ // mismatch between the service and the Hal and must be accounted for.
+ std::lock_guard<std::mutex> l(mFreedBuffersLock);
+ for (const auto& it : mFreedBuffers) {
+ if (it.first == id) {
+ ALOGV("%s: stream ID %d buffer id %" PRIu64 " cache removal still pending",
+ __FUNCTION__, id, it.second);
+ const auto& cachedEntry = std::find(bufIds.begin(), bufIds.end(), it.second);
+ if (cachedEntry != bufIds.end()) {
+ bufIds.erase(cachedEntry);
+ } else {
+ ALOGE("%s: stream ID %d buffer id %" PRIu64 " cache removal still pending "
+ "however buffer is no longer in the offline stream info!",
+ __FUNCTION__, id, it.second);
+ }
+ }
+ }
+ }
if (!verifyBufferIds(id, bufIds)) {
ALOGE("%s: stream ID %d buffer cache records mismatch!", __FUNCTION__, id);
return UNKNOWN_ERROR;
diff --git a/services/mediametrics/AudioPowerUsage.h b/services/mediametrics/AudioPowerUsage.h
index 7021902..4d12714 100644
--- a/services/mediametrics/AudioPowerUsage.h
+++ b/services/mediametrics/AudioPowerUsage.h
@@ -49,7 +49,7 @@
*/
std::pair<std::string, int32_t> dump(int32_t lines = INT32_MAX) const;
- // align with message AudioUsageDataReported in frameworks/base/cmds/statsd/src/atoms.proto
+ // align with message AudioPowerUsageDataReported in frameworks/proto_logging/stats/atoms.proto
enum AudioType {
UNKNOWN_TYPE = 0,
VOICE_CALL_TYPE = 1, // voice call
diff --git a/services/mediametrics/MediaMetricsService.cpp b/services/mediametrics/MediaMetricsService.cpp
index 636b343..5e56fbd 100644
--- a/services/mediametrics/MediaMetricsService.cpp
+++ b/services/mediametrics/MediaMetricsService.cpp
@@ -72,6 +72,7 @@
bool MediaMetricsService::useUidForPackage(
const std::string& package, const std::string& installer)
{
+ // NOLINTBEGIN(bugprone-branch-clone)
if (strchr(package.c_str(), '.') == nullptr) {
return false; // not of form 'com.whatever...'; assume internal and ok
} else if (strncmp(package.c_str(), "android.", 8) == 0) {
@@ -85,6 +86,7 @@
} else {
return true; // we're not sure where it came from, use uid only.
}
+ // NOLINTEND(bugprone-branch-clone)
}
/* static */
diff --git a/services/mediametrics/statsd_audiorecord.cpp b/services/mediametrics/statsd_audiorecord.cpp
index a7b045e..01adf7f 100644
--- a/services/mediametrics/statsd_audiorecord.cpp
+++ b/services/mediametrics/statsd_audiorecord.cpp
@@ -99,16 +99,14 @@
}
int32_t error_code = -1;
- if (item->getInt32("android.media.audiorecord.errcode", &error_code)) {
- metrics_proto.set_error_code(error_code);
- } else if (item->getInt32("android.media.audiorecord.lastError.code", &error_code)) {
+ if (item->getInt32("android.media.audiorecord.errcode", &error_code) ||
+ item->getInt32("android.media.audiorecord.lastError.code", &error_code)) {
metrics_proto.set_error_code(error_code);
}
std::string error_function;
- if (item->getString("android.media.audiorecord.errfunc", &error_function)) {
- metrics_proto.set_error_function(error_function);
- } else if (item->getString("android.media.audiorecord.lastError.at", &error_function)) {
+ if (item->getString("android.media.audiorecord.errfunc", &error_function) ||
+ item->getString("android.media.audiorecord.lastError.at", &error_function)) {
metrics_proto.set_error_function(error_function);
}
diff --git a/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp b/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp
index 6690b16..844f9fc 100644
--- a/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp
+++ b/services/mediaresourcemanager/fuzzer/mediaresourcemanager_fuzzer.cpp
@@ -226,33 +226,31 @@
mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(kMinThreadPairs, kMaxThreadPairs);
// Make even number of threads
size_t numThreads = numThreadPairs * 2;
- resourceThreadArgs threadArgs;
- vector<MediaResourceParcel> mediaResource;
+ resourceThreadArgs threadArgs[numThreadPairs];
+ vector<MediaResourceParcel> mediaResource[numThreadPairs];
pthread_t pt[numThreads];
- int i;
- for (i = 0; i < numThreads - 1; i += 2) {
- threadArgs.pid = mFuzzedDataProvider->ConsumeIntegral<int32_t>();
- threadArgs.uid = mFuzzedDataProvider->ConsumeIntegral<int32_t>();
+ for (int k = 0; k < numThreadPairs; ++k) {
+ threadArgs[k].pid = mFuzzedDataProvider->ConsumeIntegral<int32_t>();
+ threadArgs[k].uid = mFuzzedDataProvider->ConsumeIntegral<int32_t>();
int32_t mediaResourceType = mFuzzedDataProvider->ConsumeIntegralInRange<int32_t>(
kMinResourceType, kMaxResourceType);
int32_t mediaResourceSubType = mFuzzedDataProvider->ConsumeIntegralInRange<int32_t>(
kMinResourceType, kMaxResourceType);
uint64_t mediaResourceValue = mFuzzedDataProvider->ConsumeIntegral<uint64_t>();
- threadArgs.service = mService;
+ threadArgs[k].service = mService;
shared_ptr<IResourceManagerClient> testClient =
- ::ndk::SharedRefBase::make<TestClient>(threadArgs.pid, mService);
- threadArgs.testClient = testClient;
- threadArgs.testClientId = getId(testClient);
- mediaResource.push_back(MediaResource(static_cast<MedResType>(mediaResourceType),
- static_cast<MedResSubType>(mediaResourceSubType),
- mediaResourceValue));
- threadArgs.mediaResource = mediaResource;
- pthread_create(&pt[i], nullptr, addResource, &threadArgs);
- pthread_create(&pt[i + 1], nullptr, removeResource, &threadArgs);
- mediaResource.clear();
+ ::ndk::SharedRefBase::make<TestClient>(threadArgs[k].pid, mService);
+ threadArgs[k].testClient = testClient;
+ threadArgs[k].testClientId = getId(testClient);
+ mediaResource[k].push_back(MediaResource(static_cast<MedResType>(mediaResourceType),
+ static_cast<MedResSubType>(mediaResourceSubType),
+ mediaResourceValue));
+ threadArgs[k].mediaResource = mediaResource[k];
+ pthread_create(&pt[2 * k], nullptr, addResource, &threadArgs[k]);
+ pthread_create(&pt[2 * k + 1], nullptr, removeResource, &threadArgs[k]);
}
- for (i = 0; i < numThreads; ++i) {
+ for (int i = 0; i < numThreads; ++i) {
pthread_join(pt[i], nullptr);
}
@@ -265,14 +263,14 @@
int32_t mediaResourceSubType =
mFuzzedDataProvider->ConsumeIntegralInRange<int32_t>(kMinResourceType, kMaxResourceType);
uint64_t mediaResourceValue = mFuzzedDataProvider->ConsumeIntegral<uint64_t>();
- mediaResource.push_back(MediaResource(static_cast<MedResType>(mediaResourceType),
- static_cast<MedResSubType>(mediaResourceSubType),
- mediaResourceValue));
+ vector<MediaResourceParcel> mediaRes;
+ mediaRes.push_back(MediaResource(static_cast<MedResType>(mediaResourceType),
+ static_cast<MedResSubType>(mediaResourceSubType),
+ mediaResourceValue));
bool result;
- mService->reclaimResource(pidZero, mediaResource, &result);
- mService->removeResource(pidZero, getId(testClient), mediaResource);
+ mService->reclaimResource(pidZero, mediaRes, &result);
+ mService->removeResource(pidZero, getId(testClient), mediaRes);
mService->removeClient(pidZero, getId(testClient));
- mediaResource.clear();
}
void ResourceManagerServiceFuzzer::setServiceLog() {
diff --git a/services/tuner/Android.bp b/services/tuner/Android.bp
index 1dcfe53..be74368 100644
--- a/services/tuner/Android.bp
+++ b/services/tuner/Android.bp
@@ -129,14 +129,11 @@
"liblog",
"libtunerservice",
"libutils",
+ "tv_tuner_aidl_interface-ndk",
"tv_tuner_resource_manager_aidl_interface-ndk",
"tv_tuner_resource_manager_aidl_interface-cpp",
],
- static_libs: [
- "tv_tuner_aidl_interface-ndk",
- ],
-
init_rc: ["mediatuner.rc"],
cflags: [