Merge "Revert "Refactor AudioTrack callback to Interface""
diff --git a/camera/aidl/android/hardware/ICameraServiceProxy.aidl b/camera/aidl/android/hardware/ICameraServiceProxy.aidl
index bbb0289..3d78aef 100644
--- a/camera/aidl/android/hardware/ICameraServiceProxy.aidl
+++ b/camera/aidl/android/hardware/ICameraServiceProxy.aidl
@@ -37,8 +37,11 @@
oneway void notifyCameraState(in CameraSessionStats cameraSessionStats);
/**
- * Reports whether the top activity needs a rotate and crop override.
+ * Returns the necessary rotate and crop override for the top activity which
+ * will be one of ({@link android.hardware.camera2.CameraMetadata#SCALER_ROTATE_AND_CROP_NONE},
+ * {@link android.hardware.camera2.CameraMetadata#SCALER_ROTATE_AND_CROP_90},
+ * {@link android.hardware.camera2.CameraMetadata#SCALER_ROTATE_AND_CROP_180},
+ * {@link android.hardware.camera2.CameraMetadata#SCALER_ROTATE_AND_CROP_270}).
*/
- boolean isRotateAndCropOverrideNeeded(String packageName, int sensorOrientation,
- int lensFacing);
+ int getRotateAndCropOverride(String packageName, int lensFacing);
}
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 7c728cf..abc63ac 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -2144,6 +2144,50 @@
*/
ACAMERA_FLASH_INFO_AVAILABLE = // byte (acamera_metadata_enum_android_flash_info_available_t)
ACAMERA_FLASH_INFO_START,
+ /**
+ * <p>Maximum flashlight brightness level.</p>
+ *
+ * <p>Type: int32</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>If this value is greater than 1, then the device supports controlling the
+ * flashlight brightness level via
+ * {android.hardware.camera2.CameraManager#setTorchStrengthLevel}.
+ * If this value is equal to 1, flashlight brightness control is not supported.
+ * This value will be -1 if the flash unit is not available.</p>
+ */
+ ACAMERA_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL = // int32
+ ACAMERA_FLASH_INFO_START + 2,
+ /**
+ * <p>Default flashlight brightness level to be set via
+ * {android.hardware.camera2.CameraManager#setTorchStrengthLevel}.</p>
+ *
+ * <p>Type: int32</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>If flash unit is available this will be greater than or equal to 1 and less
+ * or equal to <code>ACAMERA_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL</code>.
+ * If flash unit is not available this will be set to -1.</p>
+ * <p>Setting flashlight brightness above the default level
+ * (i.e.<code>ACAMERA_FLASH_INFO_STRENGTH_DEFAULT_LEVEL</code>) may make the device more
+ * likely to reach thermal throttling conditions and slow down, or drain the
+ * battery quicker than normal. To minimize such issues, it is recommended to
+ * start the flashlight at this default brightness until a user explicitly requests
+ * a brighter level.</p>
+ *
+ * @see ACAMERA_FLASH_INFO_STRENGTH_DEFAULT_LEVEL
+ * @see ACAMERA_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL
+ */
+ ACAMERA_FLASH_INFO_STRENGTH_DEFAULT_LEVEL = // int32
+ ACAMERA_FLASH_INFO_START + 3,
ACAMERA_FLASH_INFO_END,
/**
diff --git a/media/codec2/components/avc/C2SoftAvcDec.cpp b/media/codec2/components/avc/C2SoftAvcDec.cpp
index e8287f9..cc4517d 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.cpp
+++ b/media/codec2/components/avc/C2SoftAvcDec.cpp
@@ -511,7 +511,7 @@
status_t C2SoftAvcDec::initDecoder() {
if (OK != createDecoder()) return UNKNOWN_ERROR;
mNumCores = MIN(getCpuCoreCount(), MAX_NUM_CORES);
- mStride = ALIGN32(mWidth);
+ mStride = ALIGN128(mWidth);
mSignalledError = false;
resetPlugin();
(void) setNumCores();
@@ -777,20 +777,20 @@
return C2_CORRUPTED;
}
if (mOutBlock &&
- (mOutBlock->width() != ALIGN32(mWidth) || mOutBlock->height() != mHeight)) {
+ (mOutBlock->width() != ALIGN128(mWidth) || mOutBlock->height() != mHeight)) {
mOutBlock.reset();
}
if (!mOutBlock) {
uint32_t format = HAL_PIXEL_FORMAT_YV12;
C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
c2_status_t err =
- pool->fetchGraphicBlock(ALIGN32(mWidth), mHeight, format, usage, &mOutBlock);
+ pool->fetchGraphicBlock(ALIGN128(mWidth), mHeight, format, usage, &mOutBlock);
if (err != C2_OK) {
ALOGE("fetchGraphicBlock for Output failed with status %d", err);
return err;
}
ALOGV("provided (%dx%d) required (%dx%d)",
- mOutBlock->width(), mOutBlock->height(), ALIGN32(mWidth), mHeight);
+ mOutBlock->width(), mOutBlock->height(), ALIGN128(mWidth), mHeight);
}
return C2_OK;
@@ -928,7 +928,7 @@
if (0 < ps_decode_op->u4_pic_wd && 0 < ps_decode_op->u4_pic_ht) {
if (mHeaderDecoded == false) {
mHeaderDecoded = true;
- mStride = ALIGN32(ps_decode_op->u4_pic_wd);
+ mStride = ALIGN128(ps_decode_op->u4_pic_wd);
setParams(mStride, IVD_DECODE_FRAME);
}
if (ps_decode_op->u4_pic_wd != mWidth || ps_decode_op->u4_pic_ht != mHeight) {
diff --git a/media/codec2/components/avc/C2SoftAvcDec.h b/media/codec2/components/avc/C2SoftAvcDec.h
index 5c07d29..59d5184 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.h
+++ b/media/codec2/components/avc/C2SoftAvcDec.h
@@ -38,7 +38,7 @@
#define ivdext_ctl_set_num_cores_op_t ih264d_ctl_set_num_cores_op_t
#define ivdext_ctl_get_vui_params_ip_t ih264d_ctl_get_vui_params_ip_t
#define ivdext_ctl_get_vui_params_op_t ih264d_ctl_get_vui_params_op_t
-#define ALIGN32(x) ((((x) + 31) >> 5) << 5)
+#define ALIGN128(x) ((((x) + 127) >> 7) << 7)
#define MAX_NUM_CORES 4
#define IVDEXT_CMD_CTL_SET_NUM_CORES \
(IVD_CONTROL_API_COMMAND_TYPE_T)IH264D_CMD_CTL_SET_NUM_CORES
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.cpp b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
index f857e87..7bd3358 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.cpp
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
@@ -20,6 +20,7 @@
#include <C2Debug.h>
#include <C2PlatformSupport.h>
+#include <Codec2Mapper.h>
#include <SimpleC2Interface.h>
#include <log/log.h>
#include <media/stagefright/foundation/AUtils.h>
@@ -156,6 +157,42 @@
.withSetter(DefaultColorAspectsSetter)
.build());
+ addParameter(
+ DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
+ .withDefault(new C2StreamColorAspectsInfo::input(
+ 0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+ .withFields({
+ C2F(mCodedColorAspects, range).inRange(
+ C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+ C2F(mCodedColorAspects, primaries).inRange(
+ C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+ C2F(mCodedColorAspects, transfer).inRange(
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
+ C2F(mCodedColorAspects, matrix).inRange(
+ C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
+ })
+ .withSetter(CodedColorAspectsSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
+ .withDefault(new C2StreamColorAspectsInfo::output(
+ 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+ .withFields({
+ C2F(mColorAspects, range).inRange(
+ C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+ C2F(mColorAspects, primaries).inRange(
+ C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+ C2F(mColorAspects, transfer).inRange(
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
+ C2F(mColorAspects, matrix).inRange(
+ C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
+ })
+ .withSetter(ColorAspectsSetter, mDefaultColorAspects, mCodedColorAspects)
+ .build());
+
// TODO: support more formats?
addParameter(DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
.withConstValue(new C2StreamPixelFormatInfo::output(
@@ -218,6 +255,37 @@
return C2R::Ok();
}
+ static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input> &me) {
+ (void)mayBlock;
+ if (me.v.range > C2Color::RANGE_OTHER) {
+ me.set().range = C2Color::RANGE_OTHER;
+ }
+ if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+ me.set().primaries = C2Color::PRIMARIES_OTHER;
+ }
+ if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+ me.set().transfer = C2Color::TRANSFER_OTHER;
+ }
+ if (me.v.matrix > C2Color::MATRIX_OTHER) {
+ me.set().matrix = C2Color::MATRIX_OTHER;
+ }
+ return C2R::Ok();
+ }
+
+ static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output> &me,
+ const C2P<C2StreamColorAspectsTuning::output> &def,
+ const C2P<C2StreamColorAspectsInfo::input> &coded) {
+ (void)mayBlock;
+ // take default values for all unspecified fields, and coded values for specified ones
+ me.set().range = coded.v.range == RANGE_UNSPECIFIED ? def.v.range : coded.v.range;
+ me.set().primaries = coded.v.primaries == PRIMARIES_UNSPECIFIED
+ ? def.v.primaries : coded.v.primaries;
+ me.set().transfer = coded.v.transfer == TRANSFER_UNSPECIFIED
+ ? def.v.transfer : coded.v.transfer;
+ me.set().matrix = coded.v.matrix == MATRIX_UNSPECIFIED ? def.v.matrix : coded.v.matrix;
+ return C2R::Ok();
+ }
+
static C2R ProfileLevelSetter(
bool mayBlock, C2P<C2StreamProfileLevelInfo::input> &me,
const C2P<C2StreamPictureSizeInfo::output> &size) {
@@ -232,6 +300,10 @@
return mDefaultColorAspects;
}
+ std::shared_ptr<C2StreamColorAspectsInfo::output> getColorAspects_l() {
+ return mColorAspects;
+ }
+
static C2R Hdr10PlusInfoInputSetter(bool mayBlock,
C2P<C2StreamHdr10PlusInfo::input> &me) {
(void)mayBlock;
@@ -254,6 +326,8 @@
std::shared_ptr<C2StreamColorInfo::output> mColorInfo;
std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
std::shared_ptr<C2StreamColorAspectsTuning::output> mDefaultColorAspects;
+ std::shared_ptr<C2StreamColorAspectsInfo::input> mCodedColorAspects;
+ std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
std::shared_ptr<C2StreamHdr10PlusInfo::input> mHdr10PlusInfoInput;
std::shared_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfoOutput;
};
@@ -371,6 +445,10 @@
const std::shared_ptr<C2GraphicBlock> &block) {
std::shared_ptr<C2Buffer> buffer =
createGraphicBuffer(block, C2Rect(mWidth, mHeight));
+ {
+ IntfImpl::Lock lock = mIntf->lock();
+ buffer->setInfo(mIntf->getColorAspects_l());
+ }
auto fillWork = [buffer, index](const std::unique_ptr<C2Work> &work) {
uint32_t flags = 0;
if ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
@@ -609,6 +687,38 @@
}
}
+void C2SoftGav1Dec::getVuiParams(const libgav1::DecoderBuffer *buffer) {
+ VuiColorAspects vuiColorAspects;
+ vuiColorAspects.primaries = buffer->color_primary;
+ vuiColorAspects.transfer = buffer->transfer_characteristics;
+ vuiColorAspects.coeffs = buffer->matrix_coefficients;
+ vuiColorAspects.fullRange = buffer->color_range;
+
+ // convert vui aspects to C2 values if changed
+ if (!(vuiColorAspects == mBitstreamColorAspects)) {
+ mBitstreamColorAspects = vuiColorAspects;
+ ColorAspects sfAspects;
+ C2StreamColorAspectsInfo::input codedAspects = { 0u };
+ ColorUtils::convertIsoColorAspectsToCodecAspects(
+ vuiColorAspects.primaries, vuiColorAspects.transfer, vuiColorAspects.coeffs,
+ vuiColorAspects.fullRange, sfAspects);
+ if (!C2Mapper::map(sfAspects.mPrimaries, &codedAspects.primaries)) {
+ codedAspects.primaries = C2Color::PRIMARIES_UNSPECIFIED;
+ }
+ if (!C2Mapper::map(sfAspects.mRange, &codedAspects.range)) {
+ codedAspects.range = C2Color::RANGE_UNSPECIFIED;
+ }
+ if (!C2Mapper::map(sfAspects.mMatrixCoeffs, &codedAspects.matrix)) {
+ codedAspects.matrix = C2Color::MATRIX_UNSPECIFIED;
+ }
+ if (!C2Mapper::map(sfAspects.mTransfer, &codedAspects.transfer)) {
+ codedAspects.transfer = C2Color::TRANSFER_UNSPECIFIED;
+ }
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ mIntf->config({&codedAspects}, C2_MAY_BLOCK, &failures);
+ }
+}
+
bool C2SoftGav1Dec::outputBuffer(const std::shared_ptr<C2BlockPool> &pool,
const std::unique_ptr<C2Work> &work) {
if (!(work && pool)) return false;
@@ -651,6 +761,7 @@
}
}
+ getVuiParams(buffer);
if (!(buffer->image_format == libgav1::kImageFormatYuv420 ||
buffer->image_format == libgav1::kImageFormatMonochrome400)) {
ALOGE("image_format %d not supported", buffer->image_format);
@@ -666,12 +777,12 @@
uint32_t format = HAL_PIXEL_FORMAT_YV12;
if (buffer->bitdepth == 10) {
IntfImpl::Lock lock = mIntf->lock();
- std::shared_ptr<C2StreamColorAspectsTuning::output> defaultColorAspects =
- mIntf->getDefaultColorAspects_l();
+ std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects =
+ mIntf->getColorAspects_l();
- if (defaultColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
- defaultColorAspects->matrix == C2Color::MATRIX_BT2020 &&
- defaultColorAspects->transfer == C2Color::TRANSFER_ST2084) {
+ if (codedColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
+ codedColorAspects->matrix == C2Color::MATRIX_BT2020 &&
+ codedColorAspects->transfer == C2Color::TRANSFER_ST2084) {
if (buffer->image_format != libgav1::kImageFormatYuv420) {
ALOGE("Only YUV420 output is supported when targeting RGBA_1010102");
mSignalledError = true;
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.h b/media/codec2/components/gav1/C2SoftGav1Dec.h
index 555adc9..134fa0d 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.h
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.h
@@ -17,7 +17,10 @@
#ifndef ANDROID_C2_SOFT_GAV1_DEC_H_
#define ANDROID_C2_SOFT_GAV1_DEC_H_
+#include <media/stagefright/foundation/ColorUtils.h>
+
#include <SimpleC2Component.h>
+#include <C2Config.h>
#include "libgav1/src/gav1/decoder.h"
#include "libgav1/src/gav1/decoder_settings.h"
@@ -56,10 +59,32 @@
bool mSignalledOutputEos;
bool mSignalledError;
+ // Color aspects. These are ISO values and are meant to detect changes in aspects to avoid
+ // converting them to C2 values for each frame
+ struct VuiColorAspects {
+ uint8_t primaries;
+ uint8_t transfer;
+ uint8_t coeffs;
+ uint8_t fullRange;
+
+ // default color aspects
+ VuiColorAspects()
+ : primaries(C2Color::PRIMARIES_UNSPECIFIED),
+ transfer(C2Color::TRANSFER_UNSPECIFIED),
+ coeffs(C2Color::MATRIX_UNSPECIFIED),
+ fullRange(C2Color::RANGE_UNSPECIFIED) { }
+
+ bool operator==(const VuiColorAspects &o) {
+ return primaries == o.primaries && transfer == o.transfer && coeffs == o.coeffs
+ && fullRange == o.fullRange;
+ }
+ } mBitstreamColorAspects;
+
struct timeval mTimeStart; // Time at the start of decode()
struct timeval mTimeEnd; // Time at the end of decode()
bool initDecoder();
+ void getVuiParams(const libgav1::DecoderBuffer *buffer);
void destroyDecoder();
void finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
const std::shared_ptr<C2GraphicBlock>& block);
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.cpp b/media/codec2/components/hevc/C2SoftHevcDec.cpp
index 6bcf3a2..5f5b2ef 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcDec.cpp
@@ -502,7 +502,7 @@
status_t C2SoftHevcDec::initDecoder() {
if (OK != createDecoder()) return UNKNOWN_ERROR;
mNumCores = MIN(getCpuCoreCount(), MAX_NUM_CORES);
- mStride = ALIGN32(mWidth);
+ mStride = ALIGN128(mWidth);
mSignalledError = false;
resetPlugin();
(void) setNumCores();
@@ -768,20 +768,20 @@
return C2_CORRUPTED;
}
if (mOutBlock &&
- (mOutBlock->width() != ALIGN32(mWidth) || mOutBlock->height() != mHeight)) {
+ (mOutBlock->width() != ALIGN128(mWidth) || mOutBlock->height() != mHeight)) {
mOutBlock.reset();
}
if (!mOutBlock) {
uint32_t format = HAL_PIXEL_FORMAT_YV12;
C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
c2_status_t err =
- pool->fetchGraphicBlock(ALIGN32(mWidth), mHeight, format, usage, &mOutBlock);
+ pool->fetchGraphicBlock(ALIGN128(mWidth), mHeight, format, usage, &mOutBlock);
if (err != C2_OK) {
ALOGE("fetchGraphicBlock for Output failed with status %d", err);
return err;
}
ALOGV("provided (%dx%d) required (%dx%d)",
- mOutBlock->width(), mOutBlock->height(), ALIGN32(mWidth), mHeight);
+ mOutBlock->width(), mOutBlock->height(), ALIGN128(mWidth), mHeight);
}
return C2_OK;
@@ -917,7 +917,7 @@
if (0 < ps_decode_op->u4_pic_wd && 0 < ps_decode_op->u4_pic_ht) {
if (mHeaderDecoded == false) {
mHeaderDecoded = true;
- setParams(ALIGN32(ps_decode_op->u4_pic_wd), IVD_DECODE_FRAME);
+ setParams(ALIGN128(ps_decode_op->u4_pic_wd), IVD_DECODE_FRAME);
}
if (ps_decode_op->u4_pic_wd != mWidth || ps_decode_op->u4_pic_ht != mHeight) {
mWidth = ps_decode_op->u4_pic_wd;
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.h b/media/codec2/components/hevc/C2SoftHevcDec.h
index b9b0a48..b9296e9 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.h
+++ b/media/codec2/components/hevc/C2SoftHevcDec.h
@@ -36,7 +36,7 @@
#define ivdext_ctl_set_num_cores_op_t ihevcd_cxa_ctl_set_num_cores_op_t
#define ivdext_ctl_get_vui_params_ip_t ihevcd_cxa_ctl_get_vui_params_ip_t
#define ivdext_ctl_get_vui_params_op_t ihevcd_cxa_ctl_get_vui_params_op_t
-#define ALIGN32(x) ((((x) + 31) >> 5) << 5)
+#define ALIGN128(x) ((((x) + 127) >> 7) << 7)
#define MAX_NUM_CORES 4
#define IVDEXT_CMD_CTL_SET_NUM_CORES \
(IVD_CONTROL_API_COMMAND_TYPE_T)IHEVCD_CXA_CMD_CTL_SET_NUM_CORES
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
index b1cf388..5f9b30b 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
@@ -572,7 +572,7 @@
if (OK != createDecoder()) return UNKNOWN_ERROR;
mNumCores = MIN(getCpuCoreCount(), MAX_NUM_CORES);
- mStride = ALIGN32(mWidth);
+ mStride = ALIGN128(mWidth);
mSignalledError = false;
resetPlugin();
(void) setNumCores();
@@ -845,20 +845,20 @@
return C2_CORRUPTED;
}
if (mOutBlock &&
- (mOutBlock->width() != ALIGN32(mWidth) || mOutBlock->height() != mHeight)) {
+ (mOutBlock->width() != ALIGN128(mWidth) || mOutBlock->height() != mHeight)) {
mOutBlock.reset();
}
if (!mOutBlock) {
uint32_t format = HAL_PIXEL_FORMAT_YV12;
C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
c2_status_t err =
- pool->fetchGraphicBlock(ALIGN32(mWidth), mHeight, format, usage, &mOutBlock);
+ pool->fetchGraphicBlock(ALIGN128(mWidth), mHeight, format, usage, &mOutBlock);
if (err != C2_OK) {
ALOGE("fetchGraphicBlock for Output failed with status %d", err);
return err;
}
ALOGV("provided (%dx%d) required (%dx%d)",
- mOutBlock->width(), mOutBlock->height(), ALIGN32(mWidth), mHeight);
+ mOutBlock->width(), mOutBlock->height(), ALIGN128(mWidth), mHeight);
}
return C2_OK;
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h
index fd66304a..8a29c14 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h
@@ -37,7 +37,7 @@
#define ivdext_ctl_set_num_cores_op_t impeg2d_ctl_set_num_cores_op_t
#define ivdext_ctl_get_seq_info_ip_t impeg2d_ctl_get_seq_info_ip_t
#define ivdext_ctl_get_seq_info_op_t impeg2d_ctl_get_seq_info_op_t
-#define ALIGN32(x) ((((x) + 31) >> 5) << 5)
+#define ALIGN128(x) ((((x) + 127) >> 7) << 7)
#define MAX_NUM_CORES 4
#define IVDEXT_CMD_CTL_SET_NUM_CORES \
(IVD_CONTROL_API_COMMAND_TYPE_T)IMPEG2D_CMD_CTL_SET_NUM_CORES
diff --git a/media/codec2/vndk/platform/C2BqBuffer.cpp b/media/codec2/vndk/platform/C2BqBuffer.cpp
index 169de0c..01995fd 100644
--- a/media/codec2/vndk/platform/C2BqBuffer.cpp
+++ b/media/codec2/vndk/platform/C2BqBuffer.cpp
@@ -16,6 +16,7 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "C2BqBuffer"
+#include <android/hardware_buffer.h>
#include <utils/Log.h>
#include <ui/BufferQueueDefs.h>
@@ -171,6 +172,91 @@
return stamp;
}
+// Do not rely on AHardwareBuffer module for GraphicBuffer handling since AHardwareBuffer
+// module is linked to framework which could have a different implementation of GraphicBuffer
+// than mainline/vndk implementation.(See b/203347494.)
+//
+// b2h/h2b between HardwareBuffer and GraphicBuffer cannot be used. (b2h/h2b depend on
+// AHardwareBuffer module for the conversion between HardwareBuffer and GraphicBuffer.)
+// hgbp_ prefixed methods are added to be used instead of b2h/h2b.
+//
+// TODO: Remove dependency with existing AHwB module. Also clean up conversions.(conversions here
+// and h2b/b2h coversions)
+const GraphicBuffer* hgbp_AHBuffer_to_GraphicBuffer(const AHardwareBuffer* buffer) {
+ return GraphicBuffer::fromAHardwareBuffer(buffer);
+}
+
+int hgbp_createFromHandle(const AHardwareBuffer_Desc* desc,
+ const native_handle_t* handle,
+ sp<GraphicBuffer> *outBuffer) {
+
+ if (!desc || !handle || !outBuffer) return ::android::BAD_VALUE;
+ if (desc->rfu0 != 0 || desc->rfu1 != 0) return ::android::BAD_VALUE;
+ if (desc->format == AHARDWAREBUFFER_FORMAT_BLOB && desc->height != 1)
+ return ::android::BAD_VALUE;
+
+ const int format = uint32_t(desc->format);
+ const uint64_t usage = uint64_t(desc->usage);
+ sp<GraphicBuffer> gbuffer(new GraphicBuffer(handle,
+ GraphicBuffer::HandleWrapMethod::CLONE_HANDLE,
+ desc->width, desc->height,
+ format, desc->layers, usage, desc->stride));
+ status_t err = gbuffer->initCheck();
+ if (err != 0 || gbuffer->handle == 0) return err;
+
+ *outBuffer = gbuffer;
+
+ return ::android::NO_ERROR;
+}
+
+void hgbp_describe(const AHardwareBuffer* buffer,
+ AHardwareBuffer_Desc* outDesc) {
+ if (!buffer || !outDesc) return;
+
+ const GraphicBuffer* gbuffer = hgbp_AHBuffer_to_GraphicBuffer(buffer);
+
+ outDesc->width = gbuffer->getWidth();
+ outDesc->height = gbuffer->getHeight();
+ outDesc->layers = gbuffer->getLayerCount();
+ outDesc->format = uint32_t(gbuffer->getPixelFormat());
+ outDesc->usage = uint64_t(gbuffer->getUsage());
+ outDesc->stride = gbuffer->getStride();
+ outDesc->rfu0 = 0;
+ outDesc->rfu1 = 0;
+}
+
+
+bool hgbp_h2b(HBuffer const& from, sp<GraphicBuffer>* to) {
+ AHardwareBuffer_Desc const* desc =
+ reinterpret_cast<AHardwareBuffer_Desc const*>(
+ from.description.data());
+ native_handle_t const* handle = from.nativeHandle;
+ if (hgbp_createFromHandle(desc, handle, to) != ::android::OK) {
+ return false;
+ }
+ return true;
+}
+
+bool hgbp_b2h(sp<GraphicBuffer> const& from, HBuffer* to,
+ uint32_t* toGenerationNumber) {
+ if (!from) {
+ return false;
+ }
+ AHardwareBuffer* hwBuffer = from->toAHardwareBuffer();
+ to->nativeHandle.setTo(
+ const_cast<native_handle_t*>(from->handle),
+ false);
+ hgbp_describe(
+ hwBuffer,
+ reinterpret_cast<AHardwareBuffer_Desc*>(to->description.data()));
+ if (toGenerationNumber) {
+ *toGenerationNumber = from->getGenerationNumber();
+ }
+ return true;
+}
+
+// End of hgbp methods for GraphicBuffer creation.
+
bool getGenerationNumberAndUsage(const sp<HGraphicBufferProducer> &producer,
uint32_t *generation, uint64_t *usage) {
status_t status{};
@@ -211,7 +297,7 @@
HBuffer const& hBuffer,
uint32_t generationNumber){
if (h2b(hStatus, &status) &&
- h2b(hBuffer, &slotBuffer) &&
+ hgbp_h2b(hBuffer, &slotBuffer) &&
slotBuffer) {
*generation = generationNumber;
*usage = slotBuffer->getUsage();
@@ -402,7 +488,7 @@
HBuffer const& hBuffer,
uint32_t generationNumber){
if (h2b(hStatus, &status) &&
- h2b(hBuffer, &slotBuffer) &&
+ hgbp_h2b(hBuffer, &slotBuffer) &&
slotBuffer) {
slotBuffer->setGenerationNumber(generationNumber);
outGeneration = generationNumber;
@@ -804,7 +890,7 @@
HBuffer hBuffer{};
uint32_t hGenerationNumber{};
- if (!b2h(graphicBuffer, &hBuffer, &hGenerationNumber)) {
+ if (!hgbp_b2h(graphicBuffer, &hBuffer, &hGenerationNumber)) {
ALOGD("I to O conversion failed");
return -1;
}
diff --git a/media/libaaudio/fuzzer/Android.bp b/media/libaaudio/fuzzer/Android.bp
new file mode 100644
index 0000000..b19376d
--- /dev/null
+++ b/media/libaaudio/fuzzer/Android.bp
@@ -0,0 +1,63 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_fuzz {
+ name: "libaaudio_fuzzer",
+ srcs: [
+ "libaaudio_fuzzer.cpp",
+ ],
+ header_libs: [
+ "libaaudio_headers",
+ ],
+ shared_libs: [
+ "libbinder",
+ "libaudiomanager",
+ "libaudiopolicy",
+ "libaudioclient_aidl_conversion",
+ ],
+ static_libs: [
+ "android.media.audio.common.types-V1-cpp",
+ "liblog",
+ "libutils",
+ "libcutils",
+ "libaaudio",
+ "libjsoncpp",
+ "libbase_ndk",
+ "libcgrouprc",
+ "libaudioutils",
+ "libaudioclient",
+ "aaudio-aidl-cpp",
+ "libmedia_helper",
+ "libmediametrics",
+ "libprocessgroup",
+ "av-types-aidl-cpp",
+ "libaaudio_internal",
+ "libcgrouprc_format",
+ "audiopolicy-aidl-cpp",
+ "audioflinger-aidl-cpp",
+ "audiopolicy-types-aidl-cpp",
+ "audioclient-types-aidl-cpp",
+ "shared-file-region-aidl-cpp",
+ "framework-permission-aidl-cpp",
+ "mediametricsservice-aidl-cpp",
+ ],
+ fuzz_config: {
+ cc: [
+ "android-media-fuzzing-reports@google.com",
+ ],
+ componentid: 155276,
+ },
+}
diff --git a/media/libaaudio/fuzzer/README.md b/media/libaaudio/fuzzer/README.md
new file mode 100644
index 0000000..4ba15c5
--- /dev/null
+++ b/media/libaaudio/fuzzer/README.md
@@ -0,0 +1,77 @@
+# Fuzzer for libaaudio
+
+## Plugin Design Considerations
+The fuzzer plugin for `libaaudio` are designed based on the understanding of the
+source code and tries to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data. This ensures more code paths are reached by the fuzzer.
+
+Fuzzers assigns values to the following parameters to pass on to libaaudio:
+1. Device Id (parameter name: `deviceId`)
+2. Sampling Rate (parameter name: `sampleRate`)
+3. Number of channels (parameter name: `channelCount`)
+4. Audio Travel Direction (parameter name: `direction`)
+5. Audio Format (parameter name: `format`)
+6. Audio Sharing Mode (parameter name: `sharingMode`)
+7. Audio Usage (parameter name: `usage`)
+8. Audio Content type (parameter name: `contentType`)
+9. Audio Input Preset (parameter name: `inputPreset`)
+10. Audio Privacy Sensitivity (parameter name: `privacySensitive`)
+11. Buffer Capacity In Frames (parameter name: `frames`)
+12. Performance Mode (parameter name: `mode`)
+13. Allowed Capture Policy (parameter name: `allowedCapturePolicy`)
+14. Session Id (parameter name: `sessionId`)
+15. Frames per Data Callback (parameter name: `framesPerDataCallback`)
+16. MMap Policy (parameter name: `policy`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `deviceId` | Any value of type `int32_t` | Value obtained from FuzzedDataProvider |
+| `sampleRate` | Any value of type `int32_t` | Value obtained from FuzzedDataProvider |
+| `channelCount` | Any value of type `int32_t` | Value obtained from FuzzedDataProvider |
+| `direction` | 0. `AAUDIO_DIRECTION_OUTPUT` 1. `AAUDIO_DIRECTION_INPUT` | Value obtained from FuzzedDataProvider |
+| `format` | 0. `AAUDIO_FORMAT_INVALID` 1. `AAUDIO_FORMAT_UNSPECIFIED` 2. `AAUDIO_FORMAT_PCM_I16` 3. `AAUDIO_FORMAT_PCM_FLOAT` | Value obtained from FuzzedDataProvider |
+| `sharingMode` | 0. `AAUDIO_SHARING_MODE_EXCLUSIVE` 1. `AAUDIO_SHARING_MODE_SHARED` | Value obtained from FuzzedDataProvider |
+| `usage` | 0. `AAUDIO_USAGE_MEDIA` 1. `AAUDIO_USAGE_VOICE_COMMUNICATION` 2. `AAUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING` 3. `AAUDIO_USAGE_ALARM` 4. `AAUDIO_USAGE_NOTIFICATION` 5. `AAUDIO_USAGE_NOTIFICATION_RINGTONE` 6. `AAUDIO_USAGE_NOTIFICATION_EVENT` 7. `AAUDIO_USAGE_ASSISTANCE_ACCESSIBILITY` 8. `AAUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE` 9. `AAUDIO_USAGE_ASSISTANCE_SONIFICATION` 10. `AAUDIO_USAGE_GAME` 11. `AAUDIO_USAGE_ASSISTANT` 12. `AAUDIO_SYSTEM_USAGE_EMERGENCY` 13. `AAUDIO_SYSTEM_USAGE_SAFETY` 14. `AAUDIO_SYSTEM_USAGE_VEHICLE_STATUS` 15. `AAUDIO_SYSTEM_USAGE_ANNOUNCEMENT` | Value obtained from FuzzedDataProvider |
+| `contentType` | 0. `AAUDIO_CONTENT_TYPE_SPEECH` 1. `AAUDIO_CONTENT_TYPE_MUSIC` 2. `AAUDIO_CONTENT_TYPE_MOVIE` 3. `AAUDIO_CONTENT_TYPE_SONIFICATION` | Value obtained from FuzzedDataProvider |
+| `inputPreset` | 0. `AAUDIO_INPUT_PRESET_GENERIC` 1. `AAUDIO_INPUT_PRESET_CAMCORDER` 2. `AAUDIO_INPUT_PRESET_VOICE_RECOGNITION` 3. `AAUDIO_INPUT_PRESET_VOICE_COMMUNICATION` 4. `AAUDIO_INPUT_PRESET_UNPROCESSED` 5. `AAUDIO_INPUT_PRESET_VOICE_PERFORMANCE` | Value obtained from FuzzedDataProvider |
+| `privacySensitive` | 0. `true` 1. `false` | Value obtained from FuzzedDataProvider |
+| `frames` | Any value of type `int32_t` | Value obtained from FuzzedDataProvider |
+| `mode` | 0. `AAUDIO_PERFORMANCE_MODE_NONE` 1. `AAUDIO_PERFORMANCE_MODE_POWER_SAVING` 2. `AAUDIO_PERFORMANCE_MODE_LOW_LATENCY` | Value obtained from FuzzedDataProvider |
+| `allowedCapturePolicy` | 0. `AAUDIO_ALLOW_CAPTURE_BY_ALL` 1. `AAUDIO_ALLOW_CAPTURE_BY_SYSTEM` 2. `AAUDIO_ALLOW_CAPTURE_BY_NONE` | Value obtained from FuzzedDataProvider |
+| `sessionId` | 0. `AAUDIO_SESSION_ID_NONE` 1. `AAUDIO_SESSION_ID_ALLOCATE` | Value obtained from FuzzedDataProvider |
+| `framesPerDataCallback` | Any value of type `int32_t` | Value obtained from FuzzedDataProvider |
+| `policy` | 0. `AAUDIO_POLICY_NEVER` 1. `AAUDIO_POLICY_AUTO` 2. `AAUDIO_POLICY_ALWAYS` | Value obtained from FuzzedDataProvider |
+
+This also ensures that the plugin is always deterministic for any given input.
+
+##### Maximize utilization of input data
+The plugin feed the entire input data to the module.
+This ensures that the plugins tolerates any kind of input (empty, huge,
+malformed, etc) and doesn't `exit()` on any input and thereby increasing the
+chance of identifying vulnerabilities.
+
+## Build
+
+This describes steps to build libaaudio_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+ $ mm -j$(nproc) libaaudio_fuzzer
+```
+### Steps to run
+
+To run on device
+```
+ $ adb sync data
+ $ adb shell /data/fuzz/arm64/libaaudio_fuzzer/libaaudio_fuzzer
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/media/libaaudio/fuzzer/libaaudio_fuzzer.cpp b/media/libaaudio/fuzzer/libaaudio_fuzzer.cpp
new file mode 100644
index 0000000..1167bb0
--- /dev/null
+++ b/media/libaaudio/fuzzer/libaaudio_fuzzer.cpp
@@ -0,0 +1,289 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+#include "aaudio/AAudio.h"
+#include "aaudio/AAudioTesting.h"
+#include <fuzzer/FuzzedDataProvider.h>
+
+constexpr int32_t kRandomStringLength = 256;
+
+constexpr int64_t kNanosPerMillisecond = 1000 * 1000;
+
+constexpr aaudio_direction_t kDirections[] = {
+ AAUDIO_DIRECTION_OUTPUT, AAUDIO_DIRECTION_INPUT, AAUDIO_UNSPECIFIED};
+
+constexpr aaudio_performance_mode_t kPerformanceModes[] = {
+ AAUDIO_PERFORMANCE_MODE_NONE, AAUDIO_PERFORMANCE_MODE_POWER_SAVING,
+ AAUDIO_PERFORMANCE_MODE_LOW_LATENCY, AAUDIO_UNSPECIFIED};
+
+constexpr aaudio_format_t kFormats[] = {
+ AAUDIO_FORMAT_INVALID, AAUDIO_FORMAT_UNSPECIFIED,
+ AAUDIO_FORMAT_PCM_I16, AAUDIO_FORMAT_PCM_FLOAT,
+ AAUDIO_FORMAT_PCM_I24_PACKED, AAUDIO_FORMAT_PCM_I32};
+
+constexpr aaudio_sharing_mode_t kSharingModes[] = {
+ AAUDIO_SHARING_MODE_EXCLUSIVE, AAUDIO_SHARING_MODE_SHARED};
+
+constexpr int32_t kSampleRates[] = {AAUDIO_UNSPECIFIED,
+ 8000,
+ 11025,
+ 16000,
+ 22050,
+ 32000,
+ 44100,
+ 48000,
+ 88200,
+ 96000};
+
+constexpr aaudio_usage_t kUsages[] = {
+ AAUDIO_USAGE_MEDIA,
+ AAUDIO_USAGE_VOICE_COMMUNICATION,
+ AAUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
+ AAUDIO_USAGE_ALARM,
+ AAUDIO_USAGE_NOTIFICATION,
+ AAUDIO_USAGE_NOTIFICATION_RINGTONE,
+ AAUDIO_USAGE_NOTIFICATION_EVENT,
+ AAUDIO_USAGE_ASSISTANCE_ACCESSIBILITY,
+ AAUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ AAUDIO_USAGE_ASSISTANCE_SONIFICATION,
+ AAUDIO_USAGE_GAME,
+ AAUDIO_USAGE_ASSISTANT,
+ AAUDIO_SYSTEM_USAGE_EMERGENCY,
+ AAUDIO_SYSTEM_USAGE_SAFETY,
+ AAUDIO_SYSTEM_USAGE_VEHICLE_STATUS,
+ AAUDIO_SYSTEM_USAGE_ANNOUNCEMENT,
+ AAUDIO_UNSPECIFIED};
+
+constexpr aaudio_content_type_t kContentTypes[] = {
+ AAUDIO_CONTENT_TYPE_SPEECH, AAUDIO_CONTENT_TYPE_MUSIC,
+ AAUDIO_CONTENT_TYPE_MOVIE, AAUDIO_CONTENT_TYPE_SONIFICATION,
+ AAUDIO_UNSPECIFIED};
+
+constexpr aaudio_input_preset_t kInputPresets[] = {
+ AAUDIO_INPUT_PRESET_GENERIC,
+ AAUDIO_INPUT_PRESET_CAMCORDER,
+ AAUDIO_INPUT_PRESET_VOICE_RECOGNITION,
+ AAUDIO_INPUT_PRESET_VOICE_COMMUNICATION,
+ AAUDIO_INPUT_PRESET_UNPROCESSED,
+ AAUDIO_INPUT_PRESET_VOICE_PERFORMANCE,
+ AAUDIO_UNSPECIFIED};
+
+constexpr aaudio_allowed_capture_policy_t kAllowedCapturePolicies[] = {
+ AAUDIO_ALLOW_CAPTURE_BY_ALL, AAUDIO_ALLOW_CAPTURE_BY_SYSTEM,
+ AAUDIO_ALLOW_CAPTURE_BY_NONE, AAUDIO_UNSPECIFIED};
+
+constexpr aaudio_session_id_t kSessionIds[] = {
+ AAUDIO_SESSION_ID_NONE, AAUDIO_SESSION_ID_ALLOCATE, AAUDIO_UNSPECIFIED};
+
+constexpr aaudio_policy_t kPolicies[] = {
+ AAUDIO_POLICY_NEVER, AAUDIO_POLICY_AUTO, AAUDIO_POLICY_ALWAYS,
+ AAUDIO_UNSPECIFIED};
+
+class LibAaudioFuzzer {
+public:
+ ~LibAaudioFuzzer() { deInit(); }
+ bool init();
+ void process(const uint8_t *data, size_t size);
+ void deInit();
+
+private:
+ AAudioStreamBuilder *mAaudioBuilder = nullptr;
+ AAudioStream *mAaudioStream = nullptr;
+};
+
+bool LibAaudioFuzzer::init() {
+ aaudio_result_t result = AAudio_createStreamBuilder(&mAaudioBuilder);
+ if ((result != AAUDIO_OK) || (!mAaudioBuilder)) {
+ return false;
+ }
+ return true;
+}
+
+void LibAaudioFuzzer::process(const uint8_t *data, size_t size) {
+ FuzzedDataProvider fdp(data, size);
+ aaudio_performance_mode_t mode =
+ fdp.PickValueInArray({fdp.PickValueInArray(kPerformanceModes),
+ fdp.ConsumeIntegral<int32_t>()});
+ AAudioStreamBuilder_setPerformanceMode(mAaudioBuilder, mode);
+
+ int32_t deviceId = fdp.PickValueInArray(
+ {AAUDIO_UNSPECIFIED, fdp.ConsumeIntegral<int32_t>()});
+ AAudioStreamBuilder_setDeviceId(mAaudioBuilder, deviceId);
+
+ std::string packageName = fdp.PickValueInArray<std::string>(
+ {"android.nativemedia.aaudio", "android.app.appops.cts",
+ fdp.ConsumeRandomLengthString(kRandomStringLength)});
+ AAudioStreamBuilder_setPackageName(mAaudioBuilder, packageName.c_str());
+
+ std::string attributionTag =
+ fdp.ConsumeRandomLengthString(kRandomStringLength);
+ AAudioStreamBuilder_setAttributionTag(mAaudioBuilder, attributionTag.c_str());
+
+ int32_t sampleRate = fdp.PickValueInArray(kSampleRates);
+ AAudioStreamBuilder_setSampleRate(mAaudioBuilder, sampleRate);
+
+ int32_t channelCount = fdp.PickValueInArray(
+ {AAUDIO_UNSPECIFIED, fdp.ConsumeIntegral<int32_t>()});
+ AAudioStreamBuilder_setChannelCount(mAaudioBuilder, channelCount);
+
+ aaudio_direction_t direction = fdp.PickValueInArray(
+ {fdp.PickValueInArray(kDirections), fdp.ConsumeIntegral<int32_t>()});
+ AAudioStreamBuilder_setDirection(mAaudioBuilder, direction);
+
+ aaudio_format_t format = fdp.PickValueInArray(
+ {fdp.PickValueInArray(kFormats), fdp.ConsumeIntegral<int32_t>()});
+ AAudioStreamBuilder_setFormat(mAaudioBuilder, format);
+
+ aaudio_sharing_mode_t sharingMode = fdp.PickValueInArray(
+ {fdp.PickValueInArray(kSharingModes), fdp.ConsumeIntegral<int32_t>()});
+ AAudioStreamBuilder_setSharingMode(mAaudioBuilder, sharingMode);
+
+ aaudio_usage_t usage = fdp.PickValueInArray(
+ {fdp.PickValueInArray(kUsages), fdp.ConsumeIntegral<int32_t>()});
+ AAudioStreamBuilder_setUsage(mAaudioBuilder, usage);
+
+ aaudio_content_type_t contentType = fdp.PickValueInArray(
+ {fdp.PickValueInArray(kContentTypes), fdp.ConsumeIntegral<int32_t>()});
+ AAudioStreamBuilder_setContentType(mAaudioBuilder, contentType);
+
+ aaudio_input_preset_t inputPreset = fdp.PickValueInArray(
+ {fdp.PickValueInArray(kInputPresets), fdp.ConsumeIntegral<int32_t>()});
+ AAudioStreamBuilder_setInputPreset(mAaudioBuilder, inputPreset);
+
+ bool privacySensitive = fdp.ConsumeBool();
+ AAudioStreamBuilder_setPrivacySensitive(mAaudioBuilder, privacySensitive);
+
+ int32_t frames = fdp.PickValueInArray(
+ {AAUDIO_UNSPECIFIED, fdp.ConsumeIntegral<int32_t>()});
+ AAudioStreamBuilder_setBufferCapacityInFrames(mAaudioBuilder, frames);
+
+ aaudio_allowed_capture_policy_t allowedCapturePolicy =
+ fdp.PickValueInArray({fdp.PickValueInArray(kAllowedCapturePolicies),
+ fdp.ConsumeIntegral<int32_t>()});
+ AAudioStreamBuilder_setAllowedCapturePolicy(mAaudioBuilder,
+ allowedCapturePolicy);
+
+ aaudio_session_id_t sessionId = fdp.PickValueInArray(
+ {fdp.PickValueInArray(kSessionIds), fdp.ConsumeIntegral<int32_t>()});
+ AAudioStreamBuilder_setSessionId(mAaudioBuilder, sessionId);
+
+ AAudioStreamBuilder_setDataCallback(mAaudioBuilder, nullptr, nullptr);
+ AAudioStreamBuilder_setErrorCallback(mAaudioBuilder, nullptr, nullptr);
+
+ int32_t framesPerDataCallback = fdp.PickValueInArray(
+ {AAUDIO_UNSPECIFIED, fdp.ConsumeIntegral<int32_t>()});
+ AAudioStreamBuilder_setFramesPerDataCallback(mAaudioBuilder,
+ framesPerDataCallback);
+
+ aaudio_policy_t policy = fdp.PickValueInArray(
+ {fdp.PickValueInArray(kPolicies), fdp.ConsumeIntegral<int32_t>()});
+ AAudio_setMMapPolicy(policy);
+ (void)AAudio_getMMapPolicy();
+
+ aaudio_result_t result =
+ AAudioStreamBuilder_openStream(mAaudioBuilder, &mAaudioStream);
+ if ((result != AAUDIO_OK) || (!mAaudioStream)) {
+ return;
+ }
+
+ int32_t framesPerBurst = AAudioStream_getFramesPerBurst(mAaudioStream);
+ uint8_t numberOfBursts = fdp.ConsumeIntegral<uint8_t>();
+ int32_t maxInputFrames = numberOfBursts * framesPerBurst;
+ int32_t requestedBufferSize =
+ fdp.ConsumeIntegral<uint16_t>() * framesPerBurst;
+ AAudioStream_setBufferSizeInFrames(mAaudioStream, requestedBufferSize);
+
+ int64_t position = 0, nanoseconds = 0;
+ AAudioStream_getTimestamp(mAaudioStream, CLOCK_MONOTONIC, &position,
+ &nanoseconds);
+
+ AAudioStream_requestStart(mAaudioStream);
+
+ aaudio_format_t actualFormat = AAudioStream_getFormat(mAaudioStream);
+ int32_t actualChannelCount = AAudioStream_getChannelCount(mAaudioStream);
+
+ int32_t count = fdp.ConsumeIntegral<int32_t>();
+ direction = AAudioStream_getDirection(mAaudioStream);
+ framesPerDataCallback = AAudioStream_getFramesPerDataCallback(mAaudioStream);
+
+ if (actualFormat == AAUDIO_FORMAT_PCM_I16) {
+ std::vector<int16_t> inputShortData(maxInputFrames * actualChannelCount,
+ 0x0);
+ if (direction == AAUDIO_DIRECTION_INPUT) {
+ AAudioStream_read(mAaudioStream, inputShortData.data(),
+ framesPerDataCallback, count * kNanosPerMillisecond);
+ } else if (direction == AAUDIO_DIRECTION_OUTPUT) {
+ AAudioStream_write(mAaudioStream, inputShortData.data(),
+ framesPerDataCallback, count * kNanosPerMillisecond);
+ }
+ } else if (actualFormat == AAUDIO_FORMAT_PCM_FLOAT) {
+ std::vector<float> inputFloatData(maxInputFrames * actualChannelCount, 0x0);
+ if (direction == AAUDIO_DIRECTION_INPUT) {
+ AAudioStream_read(mAaudioStream, inputFloatData.data(),
+ framesPerDataCallback, count * kNanosPerMillisecond);
+ } else if (direction == AAUDIO_DIRECTION_OUTPUT) {
+ AAudioStream_write(mAaudioStream, inputFloatData.data(),
+ framesPerDataCallback, count * kNanosPerMillisecond);
+ }
+ }
+
+ aaudio_stream_state_t state = AAUDIO_STREAM_STATE_UNKNOWN;
+ AAudioStream_waitForStateChange(mAaudioStream, AAUDIO_STREAM_STATE_UNKNOWN,
+ &state, count * kNanosPerMillisecond);
+ (void)AAudio_convertStreamStateToText(state);
+
+ (void)AAudioStream_getUsage(mAaudioStream);
+ (void)AAudioStream_getSampleRate(mAaudioStream);
+ (void)AAudioStream_getState(mAaudioStream);
+ (void)AAudioStream_getSamplesPerFrame(mAaudioStream);
+ (void)AAudioStream_getContentType(mAaudioStream);
+ (void)AAudioStream_getInputPreset(mAaudioStream);
+ (void)AAudioStream_isPrivacySensitive(mAaudioStream);
+ (void)AAudioStream_getAllowedCapturePolicy(mAaudioStream);
+ (void)AAudioStream_getPerformanceMode(mAaudioStream);
+ (void)AAudioStream_getDeviceId(mAaudioStream);
+ (void)AAudioStream_getSharingMode(mAaudioStream);
+ (void)AAudioStream_getSessionId(mAaudioStream);
+ (void)AAudioStream_getFramesRead(mAaudioStream);
+ (void)AAudioStream_getFramesWritten(mAaudioStream);
+ (void)AAudioStream_getXRunCount(mAaudioStream);
+ (void)AAudioStream_getBufferCapacityInFrames(mAaudioStream);
+ (void)AAudioStream_getBufferSizeInFrames(mAaudioStream);
+ (void)AAudioStream_isMMapUsed(mAaudioStream);
+
+ AAudioStream_requestPause(mAaudioStream);
+ AAudioStream_requestFlush(mAaudioStream);
+ AAudioStream_release(mAaudioStream);
+ AAudioStream_requestStop(mAaudioStream);
+}
+
+void LibAaudioFuzzer::deInit() {
+ if (mAaudioBuilder) {
+ AAudioStreamBuilder_delete(mAaudioBuilder);
+ }
+ if (mAaudioStream) {
+ AAudioStream_close(mAaudioStream);
+ }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+ LibAaudioFuzzer libAaudioFuzzer;
+ if (libAaudioFuzzer.init()) {
+ libAaudioFuzzer.process(data, size);
+ }
+ return 0;
+}
diff --git a/media/libaudioclient/AidlConversion.cpp b/media/libaudioclient/AidlConversion.cpp
index 5f63a69..fad861a 100644
--- a/media/libaudioclient/AidlConversion.cpp
+++ b/media/libaudioclient/AidlConversion.cpp
@@ -50,8 +50,12 @@
using media::audio::common::AudioGain;
using media::audio::common::AudioGainConfig;
using media::audio::common::AudioGainMode;
+using media::audio::common::AudioInputFlags;
+using media::audio::common::AudioIoFlags;
using media::audio::common::AudioMode;
using media::audio::common::AudioOffloadInfo;
+using media::audio::common::AudioOutputFlags;
+using media::audio::common::AudioPortDeviceExt;
using media::audio::common::AudioPortExt;
using media::audio::common::AudioPortMixExt;
using media::audio::common::AudioPortMixExtUseCase;
@@ -1281,129 +1285,129 @@
}
ConversionResult<audio_input_flags_t> aidl2legacy_AudioInputFlags_audio_input_flags_t(
- media::AudioInputFlags aidl) {
+ AudioInputFlags aidl) {
switch (aidl) {
- case media::AudioInputFlags::FAST:
+ case AudioInputFlags::FAST:
return AUDIO_INPUT_FLAG_FAST;
- case media::AudioInputFlags::HW_HOTWORD:
+ case AudioInputFlags::HW_HOTWORD:
return AUDIO_INPUT_FLAG_HW_HOTWORD;
- case media::AudioInputFlags::RAW:
+ case AudioInputFlags::RAW:
return AUDIO_INPUT_FLAG_RAW;
- case media::AudioInputFlags::SYNC:
+ case AudioInputFlags::SYNC:
return AUDIO_INPUT_FLAG_SYNC;
- case media::AudioInputFlags::MMAP_NOIRQ:
+ case AudioInputFlags::MMAP_NOIRQ:
return AUDIO_INPUT_FLAG_MMAP_NOIRQ;
- case media::AudioInputFlags::VOIP_TX:
+ case AudioInputFlags::VOIP_TX:
return AUDIO_INPUT_FLAG_VOIP_TX;
- case media::AudioInputFlags::HW_AV_SYNC:
+ case AudioInputFlags::HW_AV_SYNC:
return AUDIO_INPUT_FLAG_HW_AV_SYNC;
- case media::AudioInputFlags::DIRECT:
+ case AudioInputFlags::DIRECT:
return AUDIO_INPUT_FLAG_DIRECT;
}
return unexpected(BAD_VALUE);
}
-ConversionResult<media::AudioInputFlags> legacy2aidl_audio_input_flags_t_AudioInputFlags(
+ConversionResult<AudioInputFlags> legacy2aidl_audio_input_flags_t_AudioInputFlags(
audio_input_flags_t legacy) {
switch (legacy) {
case AUDIO_INPUT_FLAG_NONE:
break; // shouldn't get here. must be listed -Werror,-Wswitch
case AUDIO_INPUT_FLAG_FAST:
- return media::AudioInputFlags::FAST;
+ return AudioInputFlags::FAST;
case AUDIO_INPUT_FLAG_HW_HOTWORD:
- return media::AudioInputFlags::HW_HOTWORD;
+ return AudioInputFlags::HW_HOTWORD;
case AUDIO_INPUT_FLAG_RAW:
- return media::AudioInputFlags::RAW;
+ return AudioInputFlags::RAW;
case AUDIO_INPUT_FLAG_SYNC:
- return media::AudioInputFlags::SYNC;
+ return AudioInputFlags::SYNC;
case AUDIO_INPUT_FLAG_MMAP_NOIRQ:
- return media::AudioInputFlags::MMAP_NOIRQ;
+ return AudioInputFlags::MMAP_NOIRQ;
case AUDIO_INPUT_FLAG_VOIP_TX:
- return media::AudioInputFlags::VOIP_TX;
+ return AudioInputFlags::VOIP_TX;
case AUDIO_INPUT_FLAG_HW_AV_SYNC:
- return media::AudioInputFlags::HW_AV_SYNC;
+ return AudioInputFlags::HW_AV_SYNC;
case AUDIO_INPUT_FLAG_DIRECT:
- return media::AudioInputFlags::DIRECT;
+ return AudioInputFlags::DIRECT;
}
return unexpected(BAD_VALUE);
}
ConversionResult<audio_output_flags_t> aidl2legacy_AudioOutputFlags_audio_output_flags_t(
- media::AudioOutputFlags aidl) {
+ AudioOutputFlags aidl) {
switch (aidl) {
- case media::AudioOutputFlags::DIRECT:
+ case AudioOutputFlags::DIRECT:
return AUDIO_OUTPUT_FLAG_DIRECT;
- case media::AudioOutputFlags::PRIMARY:
+ case AudioOutputFlags::PRIMARY:
return AUDIO_OUTPUT_FLAG_PRIMARY;
- case media::AudioOutputFlags::FAST:
+ case AudioOutputFlags::FAST:
return AUDIO_OUTPUT_FLAG_FAST;
- case media::AudioOutputFlags::DEEP_BUFFER:
+ case AudioOutputFlags::DEEP_BUFFER:
return AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
- case media::AudioOutputFlags::COMPRESS_OFFLOAD:
+ case AudioOutputFlags::COMPRESS_OFFLOAD:
return AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
- case media::AudioOutputFlags::NON_BLOCKING:
+ case AudioOutputFlags::NON_BLOCKING:
return AUDIO_OUTPUT_FLAG_NON_BLOCKING;
- case media::AudioOutputFlags::HW_AV_SYNC:
+ case AudioOutputFlags::HW_AV_SYNC:
return AUDIO_OUTPUT_FLAG_HW_AV_SYNC;
- case media::AudioOutputFlags::TTS:
+ case AudioOutputFlags::TTS:
return AUDIO_OUTPUT_FLAG_TTS;
- case media::AudioOutputFlags::RAW:
+ case AudioOutputFlags::RAW:
return AUDIO_OUTPUT_FLAG_RAW;
- case media::AudioOutputFlags::SYNC:
+ case AudioOutputFlags::SYNC:
return AUDIO_OUTPUT_FLAG_SYNC;
- case media::AudioOutputFlags::IEC958_NONAUDIO:
+ case AudioOutputFlags::IEC958_NONAUDIO:
return AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO;
- case media::AudioOutputFlags::DIRECT_PCM:
+ case AudioOutputFlags::DIRECT_PCM:
return AUDIO_OUTPUT_FLAG_DIRECT_PCM;
- case media::AudioOutputFlags::MMAP_NOIRQ:
+ case AudioOutputFlags::MMAP_NOIRQ:
return AUDIO_OUTPUT_FLAG_MMAP_NOIRQ;
- case media::AudioOutputFlags::VOIP_RX:
+ case AudioOutputFlags::VOIP_RX:
return AUDIO_OUTPUT_FLAG_VOIP_RX;
- case media::AudioOutputFlags::INCALL_MUSIC:
+ case AudioOutputFlags::INCALL_MUSIC:
return AUDIO_OUTPUT_FLAG_INCALL_MUSIC;
- case media::AudioOutputFlags::GAPLESS_OFFLOAD:
+ case AudioOutputFlags::GAPLESS_OFFLOAD:
return AUDIO_OUTPUT_FLAG_GAPLESS_OFFLOAD;
}
return unexpected(BAD_VALUE);
}
-ConversionResult<media::AudioOutputFlags> legacy2aidl_audio_output_flags_t_AudioOutputFlags(
+ConversionResult<AudioOutputFlags> legacy2aidl_audio_output_flags_t_AudioOutputFlags(
audio_output_flags_t legacy) {
switch (legacy) {
case AUDIO_OUTPUT_FLAG_NONE:
break; // shouldn't get here. must be listed -Werror,-Wswitch
case AUDIO_OUTPUT_FLAG_DIRECT:
- return media::AudioOutputFlags::DIRECT;
+ return AudioOutputFlags::DIRECT;
case AUDIO_OUTPUT_FLAG_PRIMARY:
- return media::AudioOutputFlags::PRIMARY;
+ return AudioOutputFlags::PRIMARY;
case AUDIO_OUTPUT_FLAG_FAST:
- return media::AudioOutputFlags::FAST;
+ return AudioOutputFlags::FAST;
case AUDIO_OUTPUT_FLAG_DEEP_BUFFER:
- return media::AudioOutputFlags::DEEP_BUFFER;
+ return AudioOutputFlags::DEEP_BUFFER;
case AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD:
- return media::AudioOutputFlags::COMPRESS_OFFLOAD;
+ return AudioOutputFlags::COMPRESS_OFFLOAD;
case AUDIO_OUTPUT_FLAG_NON_BLOCKING:
- return media::AudioOutputFlags::NON_BLOCKING;
+ return AudioOutputFlags::NON_BLOCKING;
case AUDIO_OUTPUT_FLAG_HW_AV_SYNC:
- return media::AudioOutputFlags::HW_AV_SYNC;
+ return AudioOutputFlags::HW_AV_SYNC;
case AUDIO_OUTPUT_FLAG_TTS:
- return media::AudioOutputFlags::TTS;
+ return AudioOutputFlags::TTS;
case AUDIO_OUTPUT_FLAG_RAW:
- return media::AudioOutputFlags::RAW;
+ return AudioOutputFlags::RAW;
case AUDIO_OUTPUT_FLAG_SYNC:
- return media::AudioOutputFlags::SYNC;
+ return AudioOutputFlags::SYNC;
case AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO:
- return media::AudioOutputFlags::IEC958_NONAUDIO;
+ return AudioOutputFlags::IEC958_NONAUDIO;
case AUDIO_OUTPUT_FLAG_DIRECT_PCM:
- return media::AudioOutputFlags::DIRECT_PCM;
+ return AudioOutputFlags::DIRECT_PCM;
case AUDIO_OUTPUT_FLAG_MMAP_NOIRQ:
- return media::AudioOutputFlags::MMAP_NOIRQ;
+ return AudioOutputFlags::MMAP_NOIRQ;
case AUDIO_OUTPUT_FLAG_VOIP_RX:
- return media::AudioOutputFlags::VOIP_RX;
+ return AudioOutputFlags::VOIP_RX;
case AUDIO_OUTPUT_FLAG_INCALL_MUSIC:
- return media::AudioOutputFlags::INCALL_MUSIC;
+ return AudioOutputFlags::INCALL_MUSIC;
case AUDIO_OUTPUT_FLAG_GAPLESS_OFFLOAD:
- return media::AudioOutputFlags::GAPLESS_OFFLOAD;
+ return AudioOutputFlags::GAPLESS_OFFLOAD;
}
return unexpected(BAD_VALUE);
}
@@ -1413,9 +1417,9 @@
using LegacyMask = std::underlying_type_t<audio_input_flags_t>;
LegacyMask converted = VALUE_OR_RETURN(
- (convertBitmask<LegacyMask, int32_t, audio_input_flags_t, media::AudioInputFlags>(
+ (convertBitmask<LegacyMask, int32_t, audio_input_flags_t, AudioInputFlags>(
aidl, aidl2legacy_AudioInputFlags_audio_input_flags_t,
- indexToEnum_index<media::AudioInputFlags>,
+ indexToEnum_index<AudioInputFlags>,
enumToMask_bitmask<LegacyMask, audio_input_flags_t>)));
return static_cast<audio_input_flags_t>(converted);
}
@@ -1425,10 +1429,10 @@
using LegacyMask = std::underlying_type_t<audio_input_flags_t>;
LegacyMask legacyMask = static_cast<LegacyMask>(legacy);
- return convertBitmask<int32_t, LegacyMask, media::AudioInputFlags, audio_input_flags_t>(
+ return convertBitmask<int32_t, LegacyMask, AudioInputFlags, audio_input_flags_t>(
legacyMask, legacy2aidl_audio_input_flags_t_AudioInputFlags,
indexToEnum_bitmask<audio_input_flags_t>,
- enumToMask_index<int32_t, media::AudioInputFlags>);
+ enumToMask_index<int32_t, AudioInputFlags>);
}
ConversionResult<audio_output_flags_t> aidl2legacy_int32_t_audio_output_flags_t_mask(
@@ -1436,9 +1440,9 @@
return convertBitmask<audio_output_flags_t,
int32_t,
audio_output_flags_t,
- media::AudioOutputFlags>(
+ AudioOutputFlags>(
aidl, aidl2legacy_AudioOutputFlags_audio_output_flags_t,
- indexToEnum_index<media::AudioOutputFlags>,
+ indexToEnum_index<AudioOutputFlags>,
enumToMask_bitmask<audio_output_flags_t, audio_output_flags_t>);
}
@@ -1447,14 +1451,14 @@
using LegacyMask = std::underlying_type_t<audio_output_flags_t>;
LegacyMask legacyMask = static_cast<LegacyMask>(legacy);
- return convertBitmask<int32_t, LegacyMask, media::AudioOutputFlags, audio_output_flags_t>(
+ return convertBitmask<int32_t, LegacyMask, AudioOutputFlags, audio_output_flags_t>(
legacyMask, legacy2aidl_audio_output_flags_t_AudioOutputFlags,
indexToEnum_bitmask<audio_output_flags_t>,
- enumToMask_index<int32_t, media::AudioOutputFlags>);
+ enumToMask_index<int32_t, AudioOutputFlags>);
}
ConversionResult<audio_io_flags> aidl2legacy_AudioIoFlags_audio_io_flags(
- const media::AudioIoFlags& aidl, media::AudioPortRole role, media::AudioPortType type) {
+ const AudioIoFlags& aidl, media::AudioPortRole role, media::AudioPortType type) {
audio_io_flags legacy;
Direction dir = VALUE_OR_RETURN(direction(role, type));
switch (dir) {
@@ -1476,9 +1480,9 @@
return legacy;
}
-ConversionResult<media::AudioIoFlags> legacy2aidl_audio_io_flags_AudioIoFlags(
+ConversionResult<AudioIoFlags> legacy2aidl_audio_io_flags_AudioIoFlags(
const audio_io_flags& legacy, audio_port_role_t role, audio_port_type_t type) {
- media::AudioIoFlags aidl;
+ AudioIoFlags aidl;
Direction dir = VALUE_OR_RETURN(direction(role, type));
switch (dir) {
@@ -1497,21 +1501,22 @@
}
ConversionResult<audio_port_config_device_ext>
-aidl2legacy_AudioDevice_audio_port_config_device_ext(
- const AudioDevice& aidl, const media::AudioPortDeviceExtSys& aidlDeviceExt) {
+aidl2legacy_AudioPortDeviceExt_audio_port_config_device_ext(
+ const AudioPortDeviceExt& aidl, const media::AudioPortDeviceExtSys& aidlDeviceExt) {
audio_port_config_device_ext legacy;
legacy.hw_module = VALUE_OR_RETURN(
aidl2legacy_int32_t_audio_module_handle_t(aidlDeviceExt.hwModule));
- RETURN_IF_ERROR(aidl2legacy_AudioDevice_audio_device(aidl, &legacy.type, legacy.address));
+ RETURN_IF_ERROR(aidl2legacy_AudioDevice_audio_device(
+ aidl.device, &legacy.type, legacy.address));
return legacy;
}
-status_t legacy2aidl_audio_port_config_device_ext_AudioDevice(
+status_t legacy2aidl_audio_port_config_device_ext_AudioPortDeviceExt(
const audio_port_config_device_ext& legacy,
- AudioDevice* aidl, media::AudioPortDeviceExtSys* aidlDeviceExt) {
+ AudioPortDeviceExt* aidl, media::AudioPortDeviceExtSys* aidlDeviceExt) {
aidlDeviceExt->hwModule = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_module_handle_t_int32_t(legacy.hw_module));
- *aidl = VALUE_OR_RETURN_STATUS(
+ aidl->device = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_device_AudioDevice(legacy.type, legacy.address));
return OK;
}
@@ -1777,7 +1782,7 @@
return legacy;
case media::AudioPortType::DEVICE:
legacy.device = VALUE_OR_RETURN(
- aidl2legacy_AudioDevice_audio_port_config_device_ext(
+ aidl2legacy_AudioPortDeviceExt_audio_port_config_device_ext(
VALUE_OR_RETURN(UNION_GET(aidl, device)),
VALUE_OR_RETURN(UNION_GET(aidlSys, device))));
return legacy;
@@ -1806,10 +1811,10 @@
UNION_SET(*aidlSys, unspecified, false);
return OK;
case AUDIO_PORT_TYPE_DEVICE: {
- AudioDevice device;
+ AudioPortDeviceExt device;
media::AudioPortDeviceExtSys deviceSys;
RETURN_STATUS_IF_ERROR(
- legacy2aidl_audio_port_config_device_ext_AudioDevice(
+ legacy2aidl_audio_port_config_device_ext_AudioPortDeviceExt(
legacy.device, &device, &deviceSys));
UNION_SET(*aidl, device, device);
UNION_SET(*aidlSys, device, deviceSys);
@@ -1862,10 +1867,10 @@
aidl.hal.gain.value(), isInput));
legacy.config_mask |= AUDIO_PORT_CONFIG_GAIN;
}
- if (aidl.sys.flags.has_value()) {
+ if (aidl.hal.flags.has_value()) {
legacy.flags = VALUE_OR_RETURN(
aidl2legacy_AudioIoFlags_audio_io_flags(
- aidl.sys.flags.value(), aidl.sys.role, aidl.sys.type));
+ aidl.hal.flags.value(), aidl.sys.role, aidl.sys.type));
legacy.config_mask |= AUDIO_PORT_CONFIG_FLAGS;
}
legacy.ext = VALUE_OR_RETURN(
@@ -1900,7 +1905,7 @@
legacy2aidl_audio_gain_config_AudioGainConfig(legacy.gain, isInput));
}
if (legacy.config_mask & AUDIO_PORT_CONFIG_FLAGS) {
- aidl.sys.flags = VALUE_OR_RETURN(
+ aidl.hal.flags = VALUE_OR_RETURN(
legacy2aidl_audio_io_flags_AudioIoFlags(legacy.flags, legacy.role, legacy.type));
}
RETURN_IF_ERROR(legacy2aidl_AudioPortExt(legacy.ext, legacy.type, legacy.role,
@@ -2581,12 +2586,13 @@
}
ConversionResult<audio_port_device_ext>
-aidl2legacy_AudioDevice_audio_port_device_ext(
- const AudioDevice& aidl, const media::AudioPortDeviceExtSys& aidlSys) {
+aidl2legacy_AudioPortDeviceExt_audio_port_device_ext(
+ const AudioPortDeviceExt& aidl, const media::AudioPortDeviceExtSys& aidlSys) {
audio_port_device_ext legacy;
legacy.hw_module = VALUE_OR_RETURN(
aidl2legacy_int32_t_audio_module_handle_t(aidlSys.hwModule));
- RETURN_IF_ERROR(aidl2legacy_AudioDevice_audio_device(aidl, &legacy.type, legacy.address));
+ RETURN_IF_ERROR(aidl2legacy_AudioDevice_audio_device(
+ aidl.device, &legacy.type, legacy.address));
legacy.encapsulation_modes = VALUE_OR_RETURN(
aidl2legacy_AudioEncapsulationMode_mask(aidlSys.encapsulationModes));
legacy.encapsulation_metadata_types = VALUE_OR_RETURN(
@@ -2595,12 +2601,12 @@
return legacy;
}
-status_t legacy2aidl_audio_port_device_ext_AudioDevice(
+status_t legacy2aidl_audio_port_device_ext_AudioPortDeviceExt(
const audio_port_device_ext& legacy,
- AudioDevice* aidl, media::AudioPortDeviceExtSys* aidlDeviceExt) {
+ AudioPortDeviceExt* aidl, media::AudioPortDeviceExtSys* aidlDeviceExt) {
aidlDeviceExt->hwModule = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_module_handle_t_int32_t(legacy.hw_module));
- *aidl = VALUE_OR_RETURN_STATUS(
+ aidl->device = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_device_AudioDevice(legacy.type, legacy.address));
aidlDeviceExt->encapsulationModes = VALUE_OR_RETURN_STATUS(
legacy2aidl_AudioEncapsulationMode_mask(legacy.encapsulation_modes));
@@ -2653,7 +2659,7 @@
return legacy;
case media::AudioPortType::DEVICE:
legacy.device = VALUE_OR_RETURN(
- aidl2legacy_AudioDevice_audio_port_device_ext(
+ aidl2legacy_AudioPortDeviceExt_audio_port_device_ext(
VALUE_OR_RETURN(UNION_GET(aidl, device)),
VALUE_OR_RETURN(UNION_GET(aidlSys, device))));
return legacy;
@@ -2682,10 +2688,10 @@
UNION_SET(*aidlSys, unspecified, false);
return OK;
case AUDIO_PORT_TYPE_DEVICE: {
- AudioDevice device;
+ AudioPortDeviceExt device;
media::AudioPortDeviceExtSys deviceSys;
RETURN_STATUS_IF_ERROR(
- legacy2aidl_audio_port_device_ext_AudioDevice(
+ legacy2aidl_audio_port_device_ext_AudioPortDeviceExt(
legacy.device, &device, &deviceSys));
UNION_SET(*aidl, device, device);
UNION_SET(*aidlSys, device, deviceSys);
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index 9dd9fd6..d63a002 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -316,11 +316,8 @@
"aidl/android/media/AudioDualMonoMode.aidl",
"aidl/android/media/AudioFlag.aidl",
"aidl/android/media/AudioGainSys.aidl",
- "aidl/android/media/AudioInputFlags.aidl",
"aidl/android/media/AudioIoConfigEvent.aidl",
"aidl/android/media/AudioIoDescriptor.aidl",
- "aidl/android/media/AudioIoFlags.aidl",
- "aidl/android/media/AudioOutputFlags.aidl",
"aidl/android/media/AudioPatch.aidl",
"aidl/android/media/AudioPlaybackRate.aidl",
"aidl/android/media/AudioPort.aidl",
diff --git a/media/libaudioclient/aidl/android/media/AudioGainSys.aidl b/media/libaudioclient/aidl/android/media/AudioGainSys.aidl
index 9ec8390..426f4ed 100644
--- a/media/libaudioclient/aidl/android/media/AudioGainSys.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioGainSys.aidl
@@ -24,5 +24,4 @@
parcelable AudioGainSys {
int index;
boolean isInput;
- boolean useForVolume;
}
diff --git a/media/libaudioclient/aidl/android/media/AudioInputFlags.aidl b/media/libaudioclient/aidl/android/media/AudioInputFlags.aidl
deleted file mode 100644
index bfc0eb0..0000000
--- a/media/libaudioclient/aidl/android/media/AudioInputFlags.aidl
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package android.media;
-
-/**
- * {@hide}
- */
-@Backing(type="int")
-enum AudioInputFlags {
- FAST = 0,
- HW_HOTWORD = 1,
- RAW = 2,
- SYNC = 3,
- MMAP_NOIRQ = 4,
- VOIP_TX = 5,
- HW_AV_SYNC = 6,
- DIRECT = 7,
-}
diff --git a/media/libaudioclient/aidl/android/media/AudioIoFlags.aidl b/media/libaudioclient/aidl/android/media/AudioIoFlags.aidl
deleted file mode 100644
index f9b25bf..0000000
--- a/media/libaudioclient/aidl/android/media/AudioIoFlags.aidl
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-/**
- * {@hide}
- */
-union AudioIoFlags {
- /** Bitmask indexed by AudioInputFlags. */
- int input;
- /** Bitmask indexed by AudioOutputFlags. */
- int output;
-}
diff --git a/media/libaudioclient/aidl/android/media/AudioOutputFlags.aidl b/media/libaudioclient/aidl/android/media/AudioOutputFlags.aidl
deleted file mode 100644
index cebd8f0..0000000
--- a/media/libaudioclient/aidl/android/media/AudioOutputFlags.aidl
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright (C) 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package android.media;
-
-/**
- * {@hide}
- */
-@Backing(type="int")
-enum AudioOutputFlags {
- DIRECT = 0,
- PRIMARY = 1,
- FAST = 2,
- DEEP_BUFFER = 3,
- COMPRESS_OFFLOAD = 4,
- NON_BLOCKING = 5,
- HW_AV_SYNC = 6,
- TTS = 7,
- RAW = 8,
- SYNC = 9,
- IEC958_NONAUDIO = 10,
- DIRECT_PCM = 11,
- MMAP_NOIRQ = 12,
- VOIP_RX = 13,
- INCALL_MUSIC = 14,
- GAPLESS_OFFLOAD = 15,
-}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfigSys.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfigSys.aidl
index 6a615cd..8692848 100644
--- a/media/libaudioclient/aidl/android/media/AudioPortConfigSys.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioPortConfigSys.aidl
@@ -16,7 +16,6 @@
package android.media;
-import android.media.AudioIoFlags;
import android.media.AudioPortExtSys;
import android.media.AudioPortRole;
import android.media.AudioPortType;
@@ -29,7 +28,5 @@
AudioPortRole role;
/** Device, mix ... */
AudioPortType type;
- /** Flags: HW_AV_SYNC, DIRECT, ... Can be left unspecified. */
- @nullable AudioIoFlags flags;
AudioPortExtSys ext;
}
diff --git a/media/libaudioclient/include/media/AidlConversion.h b/media/libaudioclient/include/media/AidlConversion.h
index 52ea91b..fda9efb 100644
--- a/media/libaudioclient/include/media/AidlConversion.h
+++ b/media/libaudioclient/include/media/AidlConversion.h
@@ -25,10 +25,8 @@
#include <android/media/AudioClient.h>
#include <android/media/AudioDualMonoMode.h>
#include <android/media/AudioFlag.h>
-#include <android/media/AudioInputFlags.h>
#include <android/media/AudioIoConfigEvent.h>
#include <android/media/AudioIoDescriptor.h>
-#include <android/media/AudioOutputFlags.h>
#include <android/media/AudioPlaybackRate.h>
#include <android/media/AudioPort.h>
#include <android/media/AudioPortConfig.h>
@@ -49,8 +47,10 @@
#include <android/media/audio/common/AudioGain.h>
#include <android/media/audio/common/AudioGainConfig.h>
#include <android/media/audio/common/AudioGainMode.h>
+#include <android/media/audio/common/AudioInputFlags.h>
#include <android/media/audio/common/AudioMode.h>
#include <android/media/audio/common/AudioOffloadInfo.h>
+#include <android/media/audio/common/AudioOutputFlags.h>
#include <android/media/audio/common/AudioPortExt.h>
#include <android/media/audio/common/AudioPortMixExt.h>
#include <android/media/audio/common/AudioProfile.h>
@@ -173,15 +173,15 @@
ConversionResult<media::audio::common::AudioGainConfig>
legacy2aidl_audio_gain_config_AudioGainConfig(const audio_gain_config& legacy, bool isInput);
-ConversionResult<audio_input_flags_t> aidl2legacy_AudioInputFlags_audio_input_flags_t(
- media::AudioInputFlags aidl);
-ConversionResult<media::AudioInputFlags> legacy2aidl_audio_input_flags_t_AudioInputFlags(
- audio_input_flags_t legacy);
+ConversionResult<audio_input_flags_t>
+aidl2legacy_AudioInputFlags_audio_input_flags_t(media::audio::common::AudioInputFlags aidl);
+ConversionResult<media::audio::common::AudioInputFlags>
+legacy2aidl_audio_input_flags_t_AudioInputFlags(audio_input_flags_t legacy);
-ConversionResult<audio_output_flags_t> aidl2legacy_AudioOutputFlags_audio_output_flags_t(
- media::AudioOutputFlags aidl);
-ConversionResult<media::AudioOutputFlags> legacy2aidl_audio_output_flags_t_AudioOutputFlags(
- audio_output_flags_t legacy);
+ConversionResult<audio_output_flags_t>
+aidl2legacy_AudioOutputFlags_audio_output_flags_t(media::audio::common::AudioOutputFlags aidl);
+ConversionResult<media::audio::common::AudioOutputFlags>
+legacy2aidl_audio_output_flags_t_AudioOutputFlags(audio_output_flags_t legacy);
ConversionResult<audio_input_flags_t> aidl2legacy_int32_t_audio_input_flags_t_mask(
int32_t aidl);
@@ -194,17 +194,19 @@
audio_output_flags_t legacy);
ConversionResult<audio_io_flags> aidl2legacy_AudioIoFlags_audio_io_flags(
- const media::AudioIoFlags& aidl, media::AudioPortRole role, media::AudioPortType type);
-ConversionResult<media::AudioIoFlags> legacy2aidl_audio_io_flags_AudioIoFlags(
+ const media::audio::common::AudioIoFlags& aidl,
+ media::AudioPortRole role, media::AudioPortType type);
+ConversionResult<media::audio::common::AudioIoFlags> legacy2aidl_audio_io_flags_AudioIoFlags(
const audio_io_flags& legacy, audio_port_role_t role, audio_port_type_t type);
ConversionResult<audio_port_config_device_ext>
-aidl2legacy_AudioDevice_audio_port_config_device_ext(
- const media::audio::common::AudioDevice& aidl,
+aidl2legacy_AudioPortDeviceExt_audio_port_config_device_ext(
+ const media::audio::common::AudioPortDeviceExt& aidl,
const media::AudioPortDeviceExtSys& aidlDeviceExt);
-status_t legacy2aidl_audio_port_config_device_ext_AudioDevice(
+status_t legacy2aidl_audio_port_config_device_ext_AudioPortDeviceExt(
const audio_port_config_device_ext& legacy,
- media::audio::common::AudioDevice* aidl, media::AudioPortDeviceExtSys* aidlDeviceExt);
+ media::audio::common::AudioPortDeviceExt* aidl,
+ media::AudioPortDeviceExtSys* aidlDeviceExt);
ConversionResult<audio_stream_type_t> aidl2legacy_AudioStreamType_audio_stream_type_t(
media::audio::common::AudioStreamType aidl);
@@ -344,12 +346,12 @@
legacy2aidl_AudioEncapsulationMetadataType_mask(uint32_t legacy);
ConversionResult<audio_port_device_ext>
-aidl2legacy_AudioDevice_audio_port_device_ext(
- const media::audio::common::AudioDevice& aidl,
+aidl2legacy_AudioPortDeviceExt_audio_port_device_ext(
+ const media::audio::common::AudioPortDeviceExt& aidl,
const media::AudioPortDeviceExtSys& aidlDeviceExt);
-status_t legacy2aidl_audio_port_device_ext_AudioDevice(
+status_t legacy2aidl_audio_port_device_ext_AudioPortDeviceExt(
const audio_port_device_ext& legacy,
- media::audio::common::AudioDevice* aidl,
+ media::audio::common::AudioPortDeviceExt* aidl,
media::AudioPortDeviceExtSys* aidlDeviceExt);
ConversionResult<audio_port_mix_ext>
diff --git a/media/libaudiofoundation/AudioGain.cpp b/media/libaudiofoundation/AudioGain.cpp
index 1a8fbf0..47e0edb 100644
--- a/media/libaudiofoundation/AudioGain.cpp
+++ b/media/libaudiofoundation/AudioGain.cpp
@@ -122,20 +122,20 @@
ConversionResult<AudioGain::Aidl> AudioGain::toParcelable() const {
media::audio::common::AudioGain aidl = VALUE_OR_RETURN(
legacy2aidl_audio_gain_AudioGain(mGain, mIsInput));
+ aidl.useForVolume = mUseForVolume;
media::AudioGainSys aidlSys;
aidlSys.index = VALUE_OR_RETURN(convertIntegral<int32_t>(mIndex));
aidlSys.isInput = mIsInput;
- aidlSys.useForVolume = mUseForVolume;
return std::make_pair(aidl, aidlSys);
}
ConversionResult<sp<AudioGain>> AudioGain::fromParcelable(const AudioGain::Aidl& aidl) {
+ const media::audio::common::AudioGain& hal = aidl.first;
const media::AudioGainSys& sys = aidl.second;
auto index = VALUE_OR_RETURN(convertIntegral<int>(sys.index));
sp<AudioGain> legacy = sp<AudioGain>::make(index, sys.isInput);
- legacy->mGain = VALUE_OR_RETURN(
- aidl2legacy_AudioGain_audio_gain(aidl.first, sys.isInput));
- legacy->mUseForVolume = sys.useForVolume;
+ legacy->mGain = VALUE_OR_RETURN(aidl2legacy_AudioGain_audio_gain(hal, sys.isInput));
+ legacy->mUseForVolume = hal.useForVolume;
return legacy;
}
diff --git a/media/libaudiofoundation/AudioPort.cpp b/media/libaudiofoundation/AudioPort.cpp
index 099aff4..a00e00d 100644
--- a/media/libaudiofoundation/AudioPort.cpp
+++ b/media/libaudiofoundation/AudioPort.cpp
@@ -219,6 +219,14 @@
auto aidlGains = VALUE_OR_RETURN_STATUS(legacy2aidl_AudioGains(mGains));
parcelable->hal.gains = aidlGains.first;
parcelable->sys.gains = aidlGains.second;
+ if (mType == AUDIO_PORT_TYPE_MIX) {
+ media::audio::common::AudioPortMixExt mixExt{};
+ mixExt.maxOpenStreamCount = maxOpenCount;
+ mixExt.maxActiveStreamCount = maxActiveCount;
+ mixExt.recommendedMuteDurationMs = recommendedMuteDurationMs;
+ parcelable->hal.ext = media::audio::common::AudioPortExt::make<
+ media::audio::common::AudioPortExt::mix>(mixExt);
+ }
return OK;
}
@@ -241,6 +249,13 @@
mExtraAudioDescriptors = parcelable.hal.extraAudioDescriptors;
mGains = VALUE_OR_RETURN_STATUS(
aidl2legacy_AudioGains(std::make_pair(parcelable.hal.gains, parcelable.sys.gains)));
+ if (mType == AUDIO_PORT_TYPE_MIX) {
+ const media::audio::common::AudioPortMixExt& mixExt =
+ parcelable.hal.ext.get<media::audio::common::AudioPortExt::mix>();
+ maxOpenCount = mixExt.maxOpenStreamCount;
+ maxActiveCount = mixExt.maxActiveStreamCount;
+ recommendedMuteDurationMs = mixExt.recommendedMuteDurationMs;
+ }
return OK;
}
diff --git a/media/libaudiofoundation/DeviceDescriptorBase.cpp b/media/libaudiofoundation/DeviceDescriptorBase.cpp
index cf829b5..b383db4 100644
--- a/media/libaudiofoundation/DeviceDescriptorBase.cpp
+++ b/media/libaudiofoundation/DeviceDescriptorBase.cpp
@@ -30,16 +30,20 @@
{
}
-DeviceDescriptorBase::DeviceDescriptorBase(audio_devices_t type, const std::string& address) :
- DeviceDescriptorBase(AudioDeviceTypeAddr(type, address))
+DeviceDescriptorBase::DeviceDescriptorBase(
+ audio_devices_t type, const std::string& address,
+ const FormatVector &encodedFormats) :
+ DeviceDescriptorBase(AudioDeviceTypeAddr(type, address), encodedFormats)
{
}
-DeviceDescriptorBase::DeviceDescriptorBase(const AudioDeviceTypeAddr &deviceTypeAddr) :
+DeviceDescriptorBase::DeviceDescriptorBase(
+ const AudioDeviceTypeAddr &deviceTypeAddr, const FormatVector &encodedFormats) :
AudioPort("", AUDIO_PORT_TYPE_DEVICE,
audio_is_output_device(deviceTypeAddr.mType) ? AUDIO_PORT_ROLE_SINK :
AUDIO_PORT_ROLE_SOURCE),
- mDeviceTypeAddr(deviceTypeAddr)
+ mDeviceTypeAddr(deviceTypeAddr),
+ mEncodedFormats(encodedFormats)
{
if (mDeviceTypeAddr.address().empty() && audio_is_remote_submix_device(mDeviceTypeAddr.mType)) {
mDeviceTypeAddr.setAddress("0");
@@ -148,12 +152,35 @@
AudioPort::log(" ");
}
+template<typename T>
+bool checkEqual(const T& f1, const T& f2)
+{
+ std::set<typename T::value_type> s1(f1.begin(), f1.end());
+ std::set<typename T::value_type> s2(f2.begin(), f2.end());
+ return s1 == s2;
+}
+
bool DeviceDescriptorBase::equals(const sp<DeviceDescriptorBase> &other) const
{
return other != nullptr &&
static_cast<const AudioPort*>(this)->equals(other) &&
static_cast<const AudioPortConfig*>(this)->equals(other) &&
- mDeviceTypeAddr.equals(other->mDeviceTypeAddr);
+ mDeviceTypeAddr.equals(other->mDeviceTypeAddr) &&
+ checkEqual(mEncodedFormats, other->mEncodedFormats);
+}
+
+bool DeviceDescriptorBase::supportsFormat(audio_format_t format)
+{
+ if (mEncodedFormats.empty()) {
+ return true;
+ }
+
+ for (const auto& devFormat : mEncodedFormats) {
+ if (devFormat == format) {
+ return true;
+ }
+ }
+ return false;
}
@@ -169,9 +196,13 @@
AudioPortConfig::writeToParcelable(&parcelable->hal.activeConfig, useInputChannelMask());
parcelable->hal.id = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(mId));
- media::audio::common::AudioDevice device = VALUE_OR_RETURN_STATUS(
+ media::audio::common::AudioPortDeviceExt deviceExt;
+ deviceExt.device = VALUE_OR_RETURN_STATUS(
legacy2aidl_AudioDeviceTypeAddress(mDeviceTypeAddr));
- UNION_SET(parcelable->hal.ext, device, device);
+ deviceExt.encodedFormats = VALUE_OR_RETURN_STATUS(
+ convertContainer<std::vector<media::audio::common::AudioFormatDescription>>(
+ mEncodedFormats, legacy2aidl_audio_format_t_AudioFormatDescription));
+ UNION_SET(parcelable->hal.ext, device, deviceExt);
media::AudioPortDeviceExtSys deviceSys;
deviceSys.encapsulationModes = VALUE_OR_RETURN_STATUS(
legacy2aidl_AudioEncapsulationMode_mask(mEncapsulationModes));
@@ -198,10 +229,13 @@
return status;
}
- media::audio::common::AudioDevice device = VALUE_OR_RETURN_STATUS(
+ media::audio::common::AudioPortDeviceExt deviceExt = VALUE_OR_RETURN_STATUS(
UNION_GET(parcelable.hal.ext, device));
mDeviceTypeAddr = VALUE_OR_RETURN_STATUS(
- aidl2legacy_AudioDeviceTypeAddress(device));
+ aidl2legacy_AudioDeviceTypeAddress(deviceExt.device));
+ mEncodedFormats = VALUE_OR_RETURN_STATUS(
+ convertContainer<FormatVector>(deviceExt.encodedFormats,
+ aidl2legacy_AudioFormatDescription_audio_format_t));
media::AudioPortDeviceExtSys deviceSys = VALUE_OR_RETURN_STATUS(
UNION_GET(parcelable.sys.ext, device));
mEncapsulationModes = VALUE_OR_RETURN_STATUS(
diff --git a/media/libaudiofoundation/include/media/AudioPort.h b/media/libaudiofoundation/include/media/AudioPort.h
index aa9b5f6..b5a17c9 100644
--- a/media/libaudiofoundation/include/media/AudioPort.h
+++ b/media/libaudiofoundation/include/media/AudioPort.h
@@ -106,6 +106,21 @@
status_t readFromParcelable(const media::AudioPort& parcelable);
AudioGains mGains; // gain controllers
+ // Maximum number of input or output streams that can be simultaneously
+ // opened for this profile. By convention 0 means no limit. To respect
+ // legacy behavior, initialized to 1 for output profiles and 0 for input
+ // profiles
+ // FIXME: IOProfile code used the same value for both cases.
+ uint32_t maxOpenCount = 1;
+ // Maximum number of input or output streams that can be simultaneously
+ // active for this profile. By convention 0 means no limit. To respect
+ // legacy behavior, initialized to 0 for output profiles and 1 for input
+ // profiles
+ // FIXME: IOProfile code used the same value for both cases.
+ uint32_t maxActiveCount = 1;
+ // Mute duration while changing device on this output profile.
+ uint32_t recommendedMuteDurationMs = 0;
+
protected:
std::string mName;
audio_port_type_t mType;
diff --git a/media/libaudiofoundation/include/media/DeviceDescriptorBase.h b/media/libaudiofoundation/include/media/DeviceDescriptorBase.h
index 140ce36..a4092eb 100644
--- a/media/libaudiofoundation/include/media/DeviceDescriptorBase.h
+++ b/media/libaudiofoundation/include/media/DeviceDescriptorBase.h
@@ -36,16 +36,21 @@
public:
// Note that empty name refers by convention to a generic device.
explicit DeviceDescriptorBase(audio_devices_t type);
- DeviceDescriptorBase(audio_devices_t type, const std::string& address);
- explicit DeviceDescriptorBase(const AudioDeviceTypeAddr& deviceTypeAddr);
+ DeviceDescriptorBase(audio_devices_t type, const std::string& address,
+ const FormatVector &encodedFormats = FormatVector{});
+ DeviceDescriptorBase(const AudioDeviceTypeAddr& deviceTypeAddr,
+ const FormatVector &encodedFormats = FormatVector{});
- virtual ~DeviceDescriptorBase() {}
+ virtual ~DeviceDescriptorBase() = default;
audio_devices_t type() const { return mDeviceTypeAddr.mType; }
const std::string& address() const { return mDeviceTypeAddr.address(); }
void setAddress(const std::string &address);
const AudioDeviceTypeAddr& getDeviceTypeAddr() const { return mDeviceTypeAddr; }
+ const FormatVector& encodedFormats() const { return mEncodedFormats; }
+ bool supportsFormat(audio_format_t format);
+
// AudioPortConfig
virtual sp<AudioPort> getAudioPort() const {
return static_cast<AudioPort*>(const_cast<DeviceDescriptorBase*>(this));
@@ -82,6 +87,7 @@
protected:
AudioDeviceTypeAddr mDeviceTypeAddr;
+ FormatVector mEncodedFormats;
uint32_t mEncapsulationModes = 0;
uint32_t mEncapsulationMetadataTypes = 0;
private:
diff --git a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
index 58d05c6..b444fd7 100644
--- a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
@@ -43,7 +43,7 @@
DeviceDescriptor(const AudioDeviceTypeAddr &deviceTypeAddr, const std::string &tagName = "",
const FormatVector &encodedFormats = FormatVector{});
- virtual ~DeviceDescriptor() {}
+ virtual ~DeviceDescriptor() = default;
virtual void addAudioProfile(const sp<AudioProfile> &profile) {
addAudioProfileAndSort(mProfiles, profile);
@@ -51,8 +51,6 @@
virtual const std::string getTagName() const { return mTagName; }
- const FormatVector& encodedFormats() const { return mEncodedFormats; }
-
audio_format_t getEncodedFormat() { return mCurrentEncodedFormat; }
void setEncodedFormat(audio_format_t format) {
@@ -63,8 +61,6 @@
bool hasCurrentEncodedFormat() const;
- bool supportsFormat(audio_format_t format);
-
void setDynamic() { mIsDynamic = true; }
bool isDynamic() const { return mIsDynamic; }
@@ -106,7 +102,6 @@
}
std::string mTagName; // Unique human readable identifier for a device port found in conf file.
- FormatVector mEncodedFormats;
audio_format_t mCurrentEncodedFormat;
bool mIsDynamic = false;
const std::string mDeclaredAddress; // Original device address
diff --git a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
index 81828ed..0e1548f 100644
--- a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
+++ b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
@@ -37,9 +37,7 @@
public:
IOProfile(const std::string &name, audio_port_role_t role)
: AudioPort(name, AUDIO_PORT_TYPE_MIX, role),
- maxOpenCount(1),
curOpenCount(0),
- maxActiveCount(1),
curActiveCount(0) {}
virtual ~IOProfile() = default;
@@ -194,21 +192,11 @@
return false;
}
- // Maximum number of input or output streams that can be simultaneously opened for this profile.
- // By convention 0 means no limit. To respect legacy behavior, initialized to 1 for output
- // profiles and 0 for input profiles
- uint32_t maxOpenCount;
// Number of streams currently opened for this profile.
uint32_t curOpenCount;
- // Maximum number of input or output streams that can be simultaneously active for this profile.
- // By convention 0 means no limit. To respect legacy behavior, initialized to 0 for output
- // profiles and 1 for input profiles
- uint32_t maxActiveCount;
// Number of streams currently active for this profile. This is not the number of active clients
// (AudioTrack or AudioRecord) but the number of active HAL streams.
uint32_t curActiveCount;
- // Mute duration while changing device on this output profile.
- uint32_t recommendedMuteDurationMs = 0;
private:
DeviceVector mSupportedDevices; // supported devices: this input/output can be routed from/to
diff --git a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
index c9c8ede..4a4ee12 100644
--- a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
@@ -54,7 +54,7 @@
DeviceDescriptor::DeviceDescriptor(const AudioDeviceTypeAddr &deviceTypeAddr,
const std::string &tagName,
const FormatVector &encodedFormats) :
- DeviceDescriptorBase(deviceTypeAddr), mTagName(tagName), mEncodedFormats(encodedFormats),
+ DeviceDescriptorBase(deviceTypeAddr, encodedFormats), mTagName(tagName),
mDeclaredAddress(DeviceDescriptorBase::address())
{
mCurrentEncodedFormat = AUDIO_FORMAT_DEFAULT;
@@ -109,20 +109,6 @@
return (mCurrentEncodedFormat != AUDIO_FORMAT_DEFAULT);
}
-bool DeviceDescriptor::supportsFormat(audio_format_t format)
-{
- if (mEncodedFormats.empty()) {
- return true;
- }
-
- for (const auto& devFormat : mEncodedFormats) {
- if (devFormat == format) {
- return true;
- }
- }
- return false;
-}
-
status_t DeviceDescriptor::applyAudioPortConfig(const struct audio_port_config *config,
audio_port_config *backupConfig)
{
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index a46da41..6221c05 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -5457,7 +5457,8 @@
if (!desc->isDuplicated()) {
// exact match on device
if (device_distinguishes_on_address(deviceType) && desc->supportsDevice(device)
- && desc->containsSingleDeviceSupportingEncodedFormats(device)) {
+ && desc->containsSingleDeviceSupportingEncodedFormats(device)
+ && !mAvailableOutputDevices.containsAtLeastOne(desc->supportedDevices())) {
outputs.add(mOutputs.keyAt(i));
} else if (!mAvailableOutputDevices.containsAtLeastOne(desc->supportedDevices())) {
ALOGV("checkOutputsForDevice(): disconnecting adding output %d",
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 54f6520..49d5cd8 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -1836,10 +1836,9 @@
// Set rotate-and-crop override behavior
if (mOverrideRotateAndCropMode != ANDROID_SCALER_ROTATE_AND_CROP_AUTO) {
client->setRotateAndCropOverride(mOverrideRotateAndCropMode);
- } else if ((effectiveApiLevel == API_2) &&
- CameraServiceProxyWrapper::isRotateAndCropOverrideNeeded(clientPackageName,
- orientation, facing) ) {
- client->setRotateAndCropOverride(ANDROID_SCALER_ROTATE_AND_CROP_90);
+ } else if (effectiveApiLevel == API_2) {
+ client->setRotateAndCropOverride(CameraServiceProxyWrapper::getRotateAndCropOverride(
+ clientPackageName, facing));
}
// Set camera muting behavior
@@ -2241,13 +2240,9 @@
if (current != nullptr) {
const auto basicClient = current->getValue();
if (basicClient.get() != nullptr && basicClient->canCastToApiClient(API_2)) {
- if (CameraServiceProxyWrapper::isRotateAndCropOverrideNeeded(
- basicClient->getPackageName(), basicClient->getCameraOrientation(),
- basicClient->getCameraFacing())) {
- basicClient->setRotateAndCropOverride(ANDROID_SCALER_ROTATE_AND_CROP_90);
- } else {
- basicClient->setRotateAndCropOverride(ANDROID_SCALER_ROTATE_AND_CROP_NONE);
- }
+ basicClient->setRotateAndCropOverride(
+ CameraServiceProxyWrapper::getRotateAndCropOverride(
+ basicClient->getPackageName(), basicClient->getCameraFacing()));
}
}
}
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 9abb972..b10a4c4 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -1631,6 +1631,7 @@
case 3:
deviceInfo = initializeDeviceInfo<DeviceInfo3>(name, mProviderTagid,
id, minor);
+ deviceInfo->notifyDeviceStateChange(mDeviceState);
break;
default:
ALOGE("%s: Device %s: Unknown HIDL device HAL major version %d:", __FUNCTION__,
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
index 76927c0..8d170f1 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
@@ -120,13 +120,11 @@
proxyBinder->pingForUserUpdate();
}
-bool CameraServiceProxyWrapper::isRotateAndCropOverrideNeeded(
- String16 packageName, int sensorOrientation, int lensFacing) {
+int CameraServiceProxyWrapper::getRotateAndCropOverride(String16 packageName, int lensFacing) {
sp<ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
if (proxyBinder == nullptr) return true;
- bool ret = true;
- auto status = proxyBinder->isRotateAndCropOverrideNeeded(packageName, sensorOrientation,
- lensFacing, &ret);
+ int ret = 0;
+ auto status = proxyBinder->getRotateAndCropOverride(packageName, lensFacing, &ret);
if (!status.isOk()) {
ALOGE("%s: Failed during top activity orientation query: %s", __FUNCTION__,
status.exceptionMessage().c_str());
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
index ad9db68..a51e568 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
@@ -91,9 +91,8 @@
// Ping camera service proxy for user update
static void pingCameraServiceProxy();
- // Check whether the current top activity needs a rotate and crop override.
- static bool isRotateAndCropOverrideNeeded(String16 packageName, int sensorOrientation,
- int lensFacing);
+ // Return the current top activity rotate and crop override.
+ static int getRotateAndCropOverride(String16 packageName, int lensFacing);
};
} // android