Merge "aaudio: return error code when sample rate rejected" into main
diff --git a/media/audio/aconfig/audio.aconfig b/media/audio/aconfig/audio.aconfig
index 388e5e0..0b2a5c4 100644
--- a/media/audio/aconfig/audio.aconfig
+++ b/media/audio/aconfig/audio.aconfig
@@ -37,6 +37,14 @@
}
flag {
+ name: "ringer_mode_affects_alarm"
+ namespace: "media_audio"
+ description:
+ "Support a configuration where ringer mode affects alarm stream"
+ bug: "312456558"
+}
+
+flag {
name: "spatializer_offload"
namespace: "media_audio"
description: "Enable spatializer offload"
diff --git a/media/codec2/components/dav1d/C2SoftDav1dDec.cpp b/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
index 3f96cb3..10e4b79 100644
--- a/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
+++ b/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
@@ -42,6 +42,8 @@
constexpr size_t kMinInputBufferSize = 2 * 1024 * 1024;
+constexpr uint32_t kOutputDelay = 4;
+
class C2SoftDav1dDec::IntfImpl : public SimpleInterface<void>::BaseParams {
public:
explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
@@ -239,6 +241,13 @@
.withFields({C2F(mPixelFormat, value).oneOf(pixelFormats)})
.withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
.build());
+
+ addParameter(
+ DefineParam(mActualOutputDelay, C2_PARAMKEY_OUTPUT_DELAY)
+ .withDefault(new C2PortActualDelayTuning::output(kOutputDelay))
+ .withFields({C2F(mActualOutputDelay, value).inRange(0, kOutputDelay)})
+ .withSetter(Setter<decltype(*mActualOutputDelay)>::StrictValueWithNoDeps)
+ .build());
}
static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::output>& oldMe,
@@ -450,13 +459,6 @@
if (mDav1dCtx) {
Dav1dPicture p;
- while (mDecodedPictures.size() > 0) {
- p = mDecodedPictures.front();
- mDecodedPictures.pop_front();
-
- dav1d_picture_unref(&p);
- }
-
int res = 0;
while (true) {
memset(&p, 0, sizeof(p));
@@ -527,6 +529,8 @@
android::base::GetIntProperty(NUM_THREADS_DAV1D_PROPERTY, NUM_THREADS_DAV1D_DEFAULT);
if (numThreads > 0) lib_settings.n_threads = numThreads;
+ lib_settings.max_frame_delay = kOutputDelay;
+
int res = 0;
if ((res = dav1d_open(&mDav1dCtx, &lib_settings))) {
ALOGE("dav1d_open failed. status: %d.", res);
@@ -540,15 +544,6 @@
void C2SoftDav1dDec::destroyDecoder() {
if (mDav1dCtx) {
- Dav1dPicture p;
- while (mDecodedPictures.size() > 0) {
- memset(&p, 0, sizeof(p));
- p = mDecodedPictures.front();
- mDecodedPictures.pop_front();
-
- dav1d_picture_unref(&p);
- }
-
dav1d_close(&mDav1dCtx);
mDav1dCtx = nullptr;
mOutputBufferIndex = 0;
@@ -572,19 +567,24 @@
}
void C2SoftDav1dDec::finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
- const std::shared_ptr<C2GraphicBlock>& block) {
+ const std::shared_ptr<C2GraphicBlock>& block,
+ const Dav1dPicture &img) {
std::shared_ptr<C2Buffer> buffer = createGraphicBuffer(block, C2Rect(mWidth, mHeight));
{
IntfImpl::Lock lock = mIntf->lock();
buffer->setInfo(mIntf->getColorAspects_l());
}
- auto fillWork = [buffer, index](const std::unique_ptr<C2Work>& work) {
+
+ auto fillWork = [buffer, index, img, this](const std::unique_ptr<C2Work>& work) {
uint32_t flags = 0;
if ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
(c2_cntr64_t(index) == work->input.ordinal.frameIndex)) {
flags |= C2FrameData::FLAG_END_OF_STREAM;
ALOGV("signalling end_of_stream.");
}
+ getHDRStaticParams(&img, work);
+ getHDR10PlusInfoData(&img, work);
+
work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
work->worklets.front()->output.buffers.clear();
work->worklets.front()->output.buffers.push_back(buffer);
@@ -598,10 +598,6 @@
}
}
-static void freeCallback(const uint8_t */*data*/, void */*cookie*/) {
- return;
-}
-
void C2SoftDav1dDec::process(const std::unique_ptr<C2Work>& work,
const std::shared_ptr<C2BlockPool>& pool) {
work->result = C2_OK;
@@ -654,17 +650,40 @@
seq.max_height, (long)in_frameIndex);
}
+ // insert OBU TD if it is not present.
+ // TODO: b/286852962
+ uint8_t obu_type = (bitstream[0] >> 3) & 0xf;
Dav1dData data;
- res = dav1d_data_wrap(&data, bitstream, inSize, freeCallback, nullptr);
- if (res != 0) {
- ALOGE("Decoder wrap error %s!", strerror(DAV1D_ERR(res)));
+ uint8_t* ptr = (obu_type == DAV1D_OBU_TD) ? dav1d_data_create(&data, inSize)
+ : dav1d_data_create(&data, inSize + 2);
+ if (ptr == nullptr) {
+ ALOGE("dav1d_data_create failed!");
i_ret = -1;
+
} else {
data.m.timestamp = in_frameIndex;
- // ALOGV("inSize=%ld, in_frameIndex=%ld, timestamp=%ld",
- // inSize, frameIndex, data.m.timestamp);
+ int new_Size;
+ if (obu_type != DAV1D_OBU_TD) {
+ new_Size = (int)(inSize + 2);
+
+ // OBU TD
+ ptr[0] = 0x12;
+ ptr[1] = 0;
+
+ memcpy(ptr + 2, bitstream, inSize);
+ } else {
+ new_Size = (int)(inSize);
+ // TODO: b/277797541 - investigate how to wrap this pointer in Dav1dData to
+ // avoid memcopy operations.
+ memcpy(ptr, bitstream, new_Size);
+ }
+
+ // ALOGV("memcpy(ptr,bitstream,inSize=%ld,new_Size=%d,in_frameIndex=%ld,timestamp=%ld,"
+ // "ptr[0,1,2,3,4]=%x,%x,%x,%x,%x)",
+ // inSize, new_Size, frameIndex, data.m.timestamp, ptr[0], ptr[1], ptr[2],
+ // ptr[3], ptr[4]);
// Dump the bitstream data (inputBuffer) if dumping is enabled.
#ifdef FILE_DUMP_ENABLE
@@ -672,6 +691,7 @@
#endif
bool b_draining = false;
+ int res;
do {
res = dav1d_send_data(mDav1dCtx, &data);
@@ -685,39 +705,9 @@
break;
}
- bool b_output_error = false;
+ outputBuffer(pool, work);
- do {
- Dav1dPicture img;
- memset(&img, 0, sizeof(img));
-
- res = dav1d_get_picture(mDav1dCtx, &img);
- if (res == 0) {
- mDecodedPictures.push_back(img);
-
- if (!end_of_stream) break;
- } else if (res == DAV1D_ERR(EAGAIN)) {
- /* the decoder needs more data to be able to output something.
- * if there is more data pending, continue the loop below or
- * otherwise break */
- if (data.sz != 0) res = 0;
- break;
- } else {
- ALOGE("warning! Decoder error %d!", res);
- b_output_error = true;
- break;
- }
- } while (res == 0);
-
- if (b_output_error) break;
-
- /* on drain, we must ignore the 1st EAGAIN */
- if (!b_draining && (res == DAV1D_ERR(EAGAIN) || res == 0) &&
- (end_of_stream)) {
- b_draining = true;
- res = 0;
- }
- } while (res == 0 && ((data.sz != 0) || b_draining));
+ } while (res == DAV1D_ERR(EAGAIN));
if (data.sz > 0) {
ALOGE("unexpected data.sz=%zu after dav1d_send_data", data.sz);
@@ -739,8 +729,6 @@
}
}
- (void)outputBuffer(pool, work);
-
if (end_of_stream) {
drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
mSignalledOutputEos = true;
@@ -749,7 +737,7 @@
}
}
-void C2SoftDav1dDec::getHDRStaticParams(Dav1dPicture* picture,
+void C2SoftDav1dDec::getHDRStaticParams(const Dav1dPicture* picture,
const std::unique_ptr<C2Work>& work) {
C2StreamHdrStaticMetadataInfo::output hdrStaticMetadataInfo{};
bool infoPresent = false;
@@ -813,7 +801,7 @@
}
}
-void C2SoftDav1dDec::getHDR10PlusInfoData(Dav1dPicture* picture,
+void C2SoftDav1dDec::getHDR10PlusInfoData(const Dav1dPicture* picture,
const std::unique_ptr<C2Work>& work) {
if (picture != nullptr) {
if (picture->itut_t35 != nullptr) {
@@ -853,7 +841,7 @@
}
}
-void C2SoftDav1dDec::getVuiParams(Dav1dPicture* picture) {
+void C2SoftDav1dDec::getVuiParams(const Dav1dPicture* picture) {
VuiColorAspects vuiColorAspects;
if (picture) {
@@ -924,26 +912,11 @@
memset(&img, 0, sizeof(img));
int res = 0;
- if (mDecodedPictures.size() > 0) {
- img = mDecodedPictures.front();
- mDecodedPictures.pop_front();
- // ALOGD("Got a picture(out_frameIndex=%ld,timestamp=%ld) from the deque for
- // outputBuffer.",img.m.timestamp,img.m.timestamp);
- } else {
- res = dav1d_get_picture(mDav1dCtx, &img);
- if (res == 0) {
- // ALOGD("Got a picture(out_frameIndex=%ld,timestamp=%ld) from dav1d for
- // outputBuffer.",img.m.timestamp,img.m.timestamp);
- } else {
- ALOGE("failed to get a picture from dav1d for outputBuffer.");
- }
- }
-
+ res = dav1d_get_picture(mDav1dCtx, &img);
if (res == DAV1D_ERR(EAGAIN)) {
- ALOGD("Not enough data to output a picture.");
+ ALOGV("Not enough data to output a picture.");
return false;
- }
- if (res != 0) {
+ } else if (res != 0) {
ALOGE("The AV1 decoder failed to get a picture (res=%s).", strerror(DAV1D_ERR(res)));
return false;
}
@@ -969,8 +942,6 @@
}
getVuiParams(&img);
- getHDRStaticParams(&img, work);
- getHDR10PlusInfoData(&img, work);
// out_frameIndex that the decoded picture returns from dav1d.
int64_t out_frameIndex = img.m.timestamp;
@@ -1156,9 +1127,8 @@
convFormat);
}
+ finishWork(out_frameIndex, work, std::move(block), img);
dav1d_picture_unref(&img);
-
- finishWork(out_frameIndex, work, std::move(block));
block = nullptr;
return true;
}
diff --git a/media/codec2/components/dav1d/C2SoftDav1dDec.h b/media/codec2/components/dav1d/C2SoftDav1dDec.h
index e3d2a93..5d2a725 100644
--- a/media/codec2/components/dav1d/C2SoftDav1dDec.h
+++ b/media/codec2/components/dav1d/C2SoftDav1dDec.h
@@ -58,7 +58,6 @@
int mOutputBufferIndex = 0;
Dav1dContext* mDav1dCtx = nullptr;
- std::deque<Dav1dPicture> mDecodedPictures;
// configurations used by component in process
// (TODO: keep this in intf but make them internal only)
@@ -101,12 +100,13 @@
nsecs_t mTimeEnd = 0; // Time at the end of decode()
bool initDecoder();
- void getHDRStaticParams(Dav1dPicture* picture, const std::unique_ptr<C2Work>& work);
- void getHDR10PlusInfoData(Dav1dPicture* picture, const std::unique_ptr<C2Work>& work);
- void getVuiParams(Dav1dPicture* picture);
+ void getHDRStaticParams(const Dav1dPicture* picture, const std::unique_ptr<C2Work>& work);
+ void getHDR10PlusInfoData(const Dav1dPicture* picture, const std::unique_ptr<C2Work>& work);
+ void getVuiParams(const Dav1dPicture* picture);
void destroyDecoder();
void finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
- const std::shared_ptr<C2GraphicBlock>& block);
+ const std::shared_ptr<C2GraphicBlock>& block,
+ const Dav1dPicture &img);
// Sets |work->result| and mSignalledError. Returns false.
void setError(const std::unique_ptr<C2Work>& work, c2_status_t error);
bool allocTmpFrameBuffer(size_t size);
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index 9a3399d..785cdf2 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -160,6 +160,10 @@
kParamIndexSecureMode,
kParamIndexEncryptedBuffer, // info-buffer, used with SM_READ_PROTECTED_WITH_ENCRYPTED
+ /* multiple access unit support */
+ kParamIndexLargeFrame,
+ kParamIndexAccessUnitInfos, // struct
+
// deprecated
kParamIndexDelayRequest = kParamIndexDelay | C2Param::CoreIndex::IS_REQUEST_FLAG,
@@ -1114,6 +1118,36 @@
constexpr char C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE[] = "input.buffers.max-size";
constexpr char C2_PARAMKEY_OUTPUT_MAX_BUFFER_SIZE[] = "output.buffers.max-size";
+/**
+ * Large frame struct
+ *
+ * This structure describes the size limits for large frames (frames with multiple
+ * access units.)
+ */
+struct C2LargeFrameStruct {
+ uint32_t maxSize; ///< maximum size of the buffer in bytes
+ uint32_t thresholdSize; ///< size threshold for the buffer in bytes. The buffer is considered
+ ///< full as soon as its size reaches or surpasses this limit.
+ C2LargeFrameStruct()
+ : maxSize(0),
+ thresholdSize(0) {}
+
+ C2LargeFrameStruct(uint32_t maxSize_, uint32_t thresholdSize_)
+ : maxSize(maxSize_), thresholdSize(thresholdSize_) {}
+
+ DEFINE_AND_DESCRIBE_C2STRUCT(LargeFrame)
+ C2FIELD(maxSize, "max-size")
+ C2FIELD(thresholdSize, "threshold-size")
+};
+
+/**
+ * This tuning controls the size limits for large output frames for the component.
+ * The default value for this tuning is platform specific.
+ */
+typedef C2StreamParam<C2Tuning, C2LargeFrameStruct, kParamIndexLargeFrame>
+ C2LargeFrame;
+constexpr char C2_PARAMKEY_OUTPUT_LARGE_FRAME[] = "output.large-frame";
+
/* ---------------------------------------- misc. state ---------------------------------------- */
/**
@@ -2146,6 +2180,49 @@
C2StreamAudioFrameSizeInfo;
constexpr char C2_PARAMKEY_AUDIO_FRAME_SIZE[] = "raw.audio-frame-size";
+/**
+ * Information for an access unit in a large frame (containing multiple access units)
+ */
+struct C2AccessUnitInfosStruct {
+
+ inline C2AccessUnitInfosStruct() {
+ memset(this, 0, sizeof(*this));
+ }
+
+ inline C2AccessUnitInfosStruct(
+ uint32_t flags_,
+ uint32_t size_,
+ int64_t timestamp_)
+ : flags(flags_),
+ size(size_),
+ timestamp(timestamp_) { }
+
+ uint32_t flags; ///<flags for the access-unit
+ uint32_t size; ///<size of access-unit
+ int64_t timestamp; ///<timestamp in us for the access-unit
+
+ DEFINE_AND_DESCRIBE_C2STRUCT(AccessUnitInfos)
+ C2FIELD(flags, "flags")
+ C2FIELD(size, "size")
+ C2FIELD(timestamp, "timestamp")
+};
+
+/**
+ * Multiple access unit support (e.g large audio frames)
+ *
+ * If supported by a component, multiple access units may be contained
+ * in a single work item. For now this is only defined for linear buffers.
+ * The metadata indicates the access-unit boundaries in a single buffer.
+ * The boundary of each access-units are marked by its size, immediately
+ * followed by the next access-unit.
+ */
+typedef C2StreamParam<C2Info, C2SimpleArrayStruct<C2AccessUnitInfosStruct>,
+ kParamIndexAccessUnitInfos>
+ C2AccessUnitInfos;
+
+constexpr char C2_PARAMKEY_INPUT_ACCESS_UNIT_INFOS[] = "input.access-unit-infos";
+constexpr char C2_PARAMKEY_OUTPUT_ACCESS_UNIT_INFOS[] = "output.access-unit-infos";
+
/* --------------------------------------- AAC components --------------------------------------- */
/**
diff --git a/media/codec2/core/include/C2ParamDef.h b/media/codec2/core/include/C2ParamDef.h
index d578820..86dfe65 100644
--- a/media/codec2/core/include/C2ParamDef.h
+++ b/media/codec2/core/include/C2ParamDef.h
@@ -404,6 +404,7 @@
/// Specialization for an input port parameter.
struct input : public T, public S,
public _C2StructCheck<S, ParamIndex, T::PARAM_KIND | T::Index::DIR_INPUT> {
+ using T::operator!=;
_C2_CORE_INDEX_OVERRIDE(ParamIndex)
/// Wrapper around base structure's constructor.
template<typename ...Args>
@@ -416,6 +417,7 @@
/// Specialization for an output port parameter.
struct output : public T, public S,
public _C2StructCheck<S, ParamIndex, T::PARAM_KIND | T::Index::DIR_OUTPUT> {
+ using T::operator!=;
_C2_CORE_INDEX_OVERRIDE(ParamIndex)
/// Wrapper around base structure's constructor.
template<typename ...Args>
@@ -470,6 +472,7 @@
/// Specialization for an input port parameter.
struct input : public T,
public _C2FlexStructCheck<S, ParamIndex, T::PARAM_KIND | T::Index::DIR_INPUT> {
+ using T::operator!=;
private:
/// Wrapper around base structure's constructor while also specifying port/direction.
template<typename ...Args>
@@ -486,6 +489,7 @@
/// Specialization for an output port parameter.
struct output : public T,
public _C2FlexStructCheck<S, ParamIndex, T::PARAM_KIND | T::Index::DIR_OUTPUT> {
+ using T::operator!=;
private:
/// Wrapper around base structure's constructor while also specifying port/direction.
template<typename ...Args>
@@ -549,6 +553,7 @@
struct input : public T, public S,
public _C2StructCheck<S, ParamIndex,
T::PARAM_KIND | T::Index::IS_STREAM_FLAG | T::Type::DIR_INPUT> {
+ using T::operator!=;
_C2_CORE_INDEX_OVERRIDE(ParamIndex)
/// Default constructor. Stream-ID is undefined.
@@ -567,6 +572,7 @@
struct output : public T, public S,
public _C2StructCheck<S, ParamIndex,
T::PARAM_KIND | T::Index::IS_STREAM_FLAG | T::Type::DIR_OUTPUT> {
+ using T::operator!=;
_C2_CORE_INDEX_OVERRIDE(ParamIndex)
/// Default constructor. Stream-ID is undefined.
@@ -634,6 +640,7 @@
struct input : public T,
public _C2FlexStructCheck<S, ParamIndex,
T::PARAM_KIND | T::Index::IS_STREAM_FLAG | T::Type::DIR_INPUT> {
+ using T::operator!=;
private:
/// Default constructor. Stream-ID is undefined.
inline input(size_t flexCount) : T(_Type::CalcSize(flexCount), input::PARAM_TYPE) { }
@@ -656,6 +663,7 @@
struct output : public T,
public _C2FlexStructCheck<S, ParamIndex,
T::PARAM_KIND | T::Index::IS_STREAM_FLAG | T::Type::DIR_OUTPUT> {
+ using T::operator!=;
private:
/// Default constructor. Stream-ID is undefined.
inline output(size_t flexCount) : T(_Type::CalcSize(flexCount), output::PARAM_TYPE) { }
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index c3b32e6..9d9b574 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -2011,10 +2011,20 @@
__INTRODUCED_IN(28);
/**
- * Passes back the time at which a particular frame was presented.
+ * Returns the time at which a particular frame was played on a speaker or headset,
+ * or was recorded on a microphone.
+ *
* This can be used to synchronize audio with video or MIDI.
* It can also be used to align a recorded stream with a playback stream.
*
+ * The framePosition is an index into the stream of audio data.
+ * The first frame played or recorded is at framePosition 0.
+ *
+ * These framePositions are the same units that you get from AAudioStream_getFramesRead()
+ * or AAudioStream_getFramesWritten().
+ * A "frame" is a set of audio sample values that are played simultaneously.
+ * For example, a stereo stream has two samples in a frame, left and right.
+ *
* Timestamps are only valid when the stream is in {@link #AAUDIO_STREAM_STATE_STARTED}.
* {@link #AAUDIO_ERROR_INVALID_STATE} will be returned if the stream is not started.
* Note that because requestStart() is asynchronous, timestamps will not be valid until
@@ -2030,8 +2040,8 @@
*
* @param stream reference provided by AAudioStreamBuilder_openStream()
* @param clockid CLOCK_MONOTONIC or CLOCK_BOOTTIME
- * @param framePosition pointer to a variable to receive the position
- * @param timeNanoseconds pointer to a variable to receive the time
+ * @param[out] framePosition pointer to a variable to receive the position
+ * @param[out] timeNanoseconds pointer to a variable to receive the time
* @return {@link #AAUDIO_OK} or a negative error
*/
AAUDIO_API aaudio_result_t AAudioStream_getTimestamp(AAudioStream* _Nonnull stream,
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 58e0486..15b03cb 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -2855,7 +2855,9 @@
if (isOffloadedOrDirect_l() || mDoNotReconnect) {
// FIXME re-creation of offloaded and direct tracks is not yet implemented;
- // reconsider enabling for linear PCM encodings when position can be preserved.
+ // Disabled since (1) timestamp correction is not implemented for non-PCM and
+ // (2) We pre-empt existing direct tracks on resource constraint, so these tracks
+ // shouldn't reconnect.
result = DEAD_OBJECT;
return result;
}
diff --git a/media/libaudioclient/TEST_MAPPING b/media/libaudioclient/TEST_MAPPING
index 60bb4f0..bd508b3 100644
--- a/media/libaudioclient/TEST_MAPPING
+++ b/media/libaudioclient/TEST_MAPPING
@@ -41,10 +41,11 @@
}
]
}
- ],
- "postsubmit": [
- {
- "name": "audioeffect_analysis"
- }
]
+ // TODO(b/302036943): Enable once we make it pass with AIDL HAL on CF.
+ // "postsubmit": [
+ // {
+ // "name": "audioeffect_analysis"
+ // }
+ // ]
}
diff --git a/media/libaudioclient/tests/audiorecord_tests.cpp b/media/libaudioclient/tests/audiorecord_tests.cpp
index 8c63a6d..61edd4d 100644
--- a/media/libaudioclient/tests/audiorecord_tests.cpp
+++ b/media/libaudioclient/tests/audiorecord_tests.cpp
@@ -14,9 +14,13 @@
* limitations under the License.
*/
+#include <sstream>
+
//#define LOG_NDEBUG 0
#define LOG_TAG "AudioRecordTest"
+#include <android-base/logging.h>
+#include <binder/ProcessState.h>
#include <gtest/gtest.h>
#include "audio_test_utils.h"
@@ -25,32 +29,40 @@
class AudioRecordTest : public ::testing::Test {
public:
- virtual void SetUp() override {
+ void SetUp() override {
mAC = new AudioCapture(AUDIO_SOURCE_DEFAULT, 44100, AUDIO_FORMAT_PCM_16_BIT,
AUDIO_CHANNEL_IN_FRONT);
ASSERT_NE(nullptr, mAC);
ASSERT_EQ(OK, mAC->create()) << "record creation failed";
}
- virtual void TearDown() override {
+ void TearDown() override {
if (mAC) ASSERT_EQ(OK, mAC->stop());
}
sp<AudioCapture> mAC;
};
-class AudioRecordCreateTest
- : public ::testing::TestWithParam<
- std::tuple<uint32_t, audio_format_t, audio_channel_mask_t, audio_input_flags_t,
- audio_session_t, audio_source_t>> {
+using RecordCreateTestParam = std::tuple<uint32_t, audio_format_t, audio_channel_mask_t,
+ audio_input_flags_t, audio_session_t, audio_source_t>;
+enum {
+ RECORD_PARAM_SAMPLE_RATE,
+ RECORD_PARAM_FORMAT,
+ RECORD_PARAM_CHANNEL_MASK,
+ RECORD_PARAM_FLAGS,
+ RECORD_PARAM_SESSION_ID,
+ RECORD_PARAM_INPUT_SOURCE
+};
+
+class AudioRecordCreateTest : public ::testing::TestWithParam<RecordCreateTestParam> {
public:
AudioRecordCreateTest()
- : mSampleRate(std::get<0>(GetParam())),
- mFormat(std::get<1>(GetParam())),
- mChannelMask(std::get<2>(GetParam())),
- mFlags(std::get<3>(GetParam())),
- mSessionId(std::get<4>(GetParam())),
- mInputSource(std::get<5>(GetParam())){};
+ : mSampleRate(std::get<RECORD_PARAM_SAMPLE_RATE>(GetParam())),
+ mFormat(std::get<RECORD_PARAM_FORMAT>(GetParam())),
+ mChannelMask(std::get<RECORD_PARAM_CHANNEL_MASK>(GetParam())),
+ mFlags(std::get<RECORD_PARAM_FLAGS>(GetParam())),
+ mSessionId(std::get<RECORD_PARAM_SESSION_ID>(GetParam())),
+ mInputSource(std::get<RECORD_PARAM_INPUT_SOURCE>(GetParam())){};
const uint32_t mSampleRate;
const audio_format_t mFormat;
@@ -62,14 +74,14 @@
sp<AudioCapture> mAC;
- virtual void SetUp() override {
+ void SetUp() override {
mAC = new AudioCapture(mInputSource, mSampleRate, mFormat, mChannelMask, mFlags, mSessionId,
mTransferType);
ASSERT_NE(nullptr, mAC);
ASSERT_EQ(OK, mAC->create()) << "record creation failed";
}
- virtual void TearDown() override {
+ void TearDown() override {
if (mAC) ASSERT_EQ(OK, mAC->stop());
}
};
@@ -197,6 +209,18 @@
EXPECT_EQ(OK, mAC->audioProcess()) << "audioProcess failed";
}
+static std::string GetRecordTestName(const testing::TestParamInfo<RecordCreateTestParam>& info) {
+ const auto& p = info.param;
+ std::ostringstream s;
+ s << std::get<RECORD_PARAM_SAMPLE_RATE>(p) << "_"
+ << audio_format_to_string(std::get<RECORD_PARAM_FORMAT>(p)) << "__"
+ << audio_channel_mask_to_string(std::get<RECORD_PARAM_CHANNEL_MASK>(p)) << "__"
+ << "Flags_0x" << std::hex << std::get<RECORD_PARAM_FLAGS>(p) << std::dec << "__"
+ << "Session_" << std::get<RECORD_PARAM_SESSION_ID>(p) << "__"
+ << audio_source_to_string(std::get<RECORD_PARAM_INPUT_SOURCE>(p));
+ return s.str();
+}
+
// for port primary input
INSTANTIATE_TEST_SUITE_P(AudioRecordPrimaryInput, AudioRecordCreateTest,
::testing::Combine(::testing::Values(8000, 11025, 12000, 16000, 22050,
@@ -207,7 +231,8 @@
AUDIO_CHANNEL_IN_FRONT_BACK),
::testing::Values(AUDIO_INPUT_FLAG_NONE),
::testing::Values(AUDIO_SESSION_NONE),
- ::testing::Values(AUDIO_SOURCE_DEFAULT)));
+ ::testing::Values(AUDIO_SOURCE_DEFAULT)),
+ GetRecordTestName);
// for port fast input
INSTANTIATE_TEST_SUITE_P(AudioRecordFastInput, AudioRecordCreateTest,
@@ -219,7 +244,8 @@
AUDIO_CHANNEL_IN_FRONT_BACK),
::testing::Values(AUDIO_INPUT_FLAG_FAST),
::testing::Values(AUDIO_SESSION_NONE),
- ::testing::Values(AUDIO_SOURCE_DEFAULT)));
+ ::testing::Values(AUDIO_SOURCE_DEFAULT)),
+ GetRecordTestName);
// misc
INSTANTIATE_TEST_SUITE_P(AudioRecordMiscInput, AudioRecordCreateTest,
@@ -232,4 +258,35 @@
AUDIO_SOURCE_CAMCORDER,
AUDIO_SOURCE_VOICE_RECOGNITION,
AUDIO_SOURCE_VOICE_COMMUNICATION,
- AUDIO_SOURCE_UNPROCESSED)));
+ AUDIO_SOURCE_UNPROCESSED)),
+ GetRecordTestName);
+
+namespace {
+
+class TestExecutionTracer : public ::testing::EmptyTestEventListener {
+ public:
+ void OnTestStart(const ::testing::TestInfo& test_info) override {
+ TraceTestState("Started", test_info);
+ }
+ void OnTestEnd(const ::testing::TestInfo& test_info) override {
+ TraceTestState("Finished", test_info);
+ }
+ void OnTestPartResult(const ::testing::TestPartResult& result) override { LOG(INFO) << result; }
+
+ private:
+ static void TraceTestState(const std::string& state, const ::testing::TestInfo& test_info) {
+ LOG(INFO) << state << " " << test_info.test_suite_name() << "::" << test_info.name();
+ }
+};
+
+} // namespace
+
+int main(int argc, char** argv) {
+ ::testing::InitGoogleTest(&argc, argv);
+ ::testing::UnitTest::GetInstance()->listeners().Append(new TestExecutionTracer());
+ android::base::SetMinimumLogSeverity(::android::base::DEBUG);
+ // This is for death handlers instantiated by the framework code.
+ android::ProcessState::self()->setThreadPoolMaxThreadCount(1);
+ android::ProcessState::self()->startThreadPool();
+ return RUN_ALL_TESTS();
+}
diff --git a/media/libaudiohal/FactoryHal.cpp b/media/libaudiohal/FactoryHal.cpp
index f88915d..c414e19 100644
--- a/media/libaudiohal/FactoryHal.cpp
+++ b/media/libaudiohal/FactoryHal.cpp
@@ -50,9 +50,8 @@
* This list need to keep sync with AudioHalVersionInfo.VERSIONS in
* media/java/android/media/AudioHalVersionInfo.java.
*/
-static const std::array<AudioHalVersionInfo, 5> sAudioHALVersions = {
- // TODO: remove this comment to get AIDL
- // AudioHalVersionInfo(AudioHalVersionInfo::Type::AIDL, 1, 0),
+static const std::array<AudioHalVersionInfo, 6> sAudioHALVersions = {
+ AudioHalVersionInfo(AudioHalVersionInfo::Type::AIDL, 1, 0),
AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 7, 1),
AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 7, 0),
AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 6, 0),
diff --git a/media/libaudiohal/impl/DeviceHalAidl.cpp b/media/libaudiohal/impl/DeviceHalAidl.cpp
index 86fa63f..3aec2cb 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalAidl.cpp
@@ -914,15 +914,39 @@
}
status_t DeviceHalAidl::prepareToDisconnectExternalDevice(const struct audio_port_v7* port) {
- // There is not AIDL API defined for `prepareToDisconnectExternalDevice`.
- // Call `setConnectedState` instead.
- // TODO(b/279824103): call prepareToDisconnectExternalDevice when it is added.
- RETURN_STATUS_IF_ERROR(setConnectedState(port, false /*connected*/));
- std::lock_guard l(mLock);
- mDeviceDisconnectionNotified.insert(port->id);
- // Return that there was no error as otherwise the disconnection procedure will not be
- // considered complete for upper layers, and 'setConnectedState' will not be called again
- return OK;
+ ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ TIME_CHECK();
+ if (mModule == nullptr) return NO_INIT;
+ if (port == nullptr) {
+ return BAD_VALUE;
+ }
+ const bool isInput = VALUE_OR_RETURN_STATUS(
+ ::aidl::android::portDirection(port->role, port->type)) ==
+ ::aidl::android::AudioPortDirection::INPUT;
+ AudioPort aidlPort = VALUE_OR_RETURN_STATUS(
+ ::aidl::android::legacy2aidl_audio_port_v7_AudioPort(*port, isInput));
+ if (aidlPort.ext.getTag() != AudioPortExt::device) {
+ ALOGE("%s: provided port is not a device port (module %s): %s",
+ __func__, mInstance.c_str(), aidlPort.toString().c_str());
+ return BAD_VALUE;
+ }
+ status_t status = NO_ERROR;
+ {
+ std::lock_guard l(mLock);
+ status = mMapper.prepareToDisconnectExternalDevice(aidlPort);
+ }
+ if (status == UNKNOWN_TRANSACTION) {
+ // If there is not AIDL API defined for `prepareToDisconnectExternalDevice`.
+ // Call `setConnectedState` instead.
+ RETURN_STATUS_IF_ERROR(setConnectedState(port, false /*connected*/));
+ std::lock_guard l(mLock);
+ mDeviceDisconnectionNotified.insert(port->id);
+ // Return that there was no error as otherwise the disconnection procedure will not be
+ // considered complete for upper layers, and 'setConnectedState' will not be called again
+ return OK;
+ } else {
+ return status;
+ }
}
status_t DeviceHalAidl::setConnectedState(const struct audio_port_v7 *port, bool connected) {
@@ -936,11 +960,10 @@
std::lock_guard l(mLock);
if (mDeviceDisconnectionNotified.erase(port->id) > 0) {
// For device disconnection, APM will first call `prepareToDisconnectExternalDevice`
- // and then call `setConnectedState`. However, there is no API for
- // `prepareToDisconnectExternalDevice` yet. In that case, `setConnectedState` will be
- // called when calling `prepareToDisconnectExternalDevice`. Do not call to the HAL if
- // previous call is successful. Also remove the cache here to avoid a large cache after
- // a long run.
+ // and then call `setConnectedState`. If `prepareToDisconnectExternalDevice` doesn't
+ // exit, `setConnectedState` will be called when calling
+ // `prepareToDisconnectExternalDevice`. Do not call to the HAL if previous call is
+ // successful. Also remove the cache here to avoid a large cache after a long run.
return OK;
}
}
diff --git a/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp b/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp
index 96a3e60..3dbc14a 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp
+++ b/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp
@@ -114,15 +114,6 @@
return OK;
}
-status_t DevicesFactoryHalAidl::getHalPids(std::vector<pid_t> *pids) {
- if (pids == nullptr) {
- return BAD_VALUE;
- }
- // Retrieval of HAL pids requires "list services" permission which is not granted
- // to the audio server. This job is performed by AudioService (in Java) instead.
- return PERMISSION_DENIED;
-}
-
status_t DevicesFactoryHalAidl::setCallbackOnce(sp<DevicesFactoryHalCallback> callback) {
// Dynamic registration of module instances is not supported. The functionality
// in the audio server which is related to this callback can be removed together
diff --git a/media/libaudiohal/impl/DevicesFactoryHalAidl.h b/media/libaudiohal/impl/DevicesFactoryHalAidl.h
index 97e3796..17bfe43 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalAidl.h
+++ b/media/libaudiohal/impl/DevicesFactoryHalAidl.h
@@ -35,8 +35,6 @@
// necessary to release references to the returned object.
status_t openDevice(const char *name, sp<DeviceHalInterface> *device) override;
- status_t getHalPids(std::vector<pid_t> *pids) override;
-
status_t setCallbackOnce(sp<DevicesFactoryHalCallback> callback) override;
android::detail::AudioHalVersionInfo getHalVersion() const override;
diff --git a/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp b/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
index eef60b5..1cac9da 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
+++ b/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
@@ -163,29 +163,6 @@
return BAD_VALUE;
}
-status_t DevicesFactoryHalHidl::getHalPids(std::vector<pid_t> *pids) {
- std::set<pid_t> pidsSet;
- auto factories = copyDeviceFactories();
- for (const auto& factory : factories) {
- using ::android::hidl::base::V1_0::DebugInfo;
-
- DebugInfo debugInfo;
- auto ret = factory->getDebugInfo([&] (const auto &info) {
- debugInfo = info;
- });
- if (!ret.isOk()) {
- return INVALID_OPERATION;
- }
- if (debugInfo.pid == (int)IServiceManager::PidConstant::NO_PID) {
- continue;
- }
- pidsSet.insert(debugInfo.pid);
- }
-
- *pids = {pidsSet.begin(), pidsSet.end()};
- return NO_ERROR;
-}
-
status_t DevicesFactoryHalHidl::setCallbackOnce(sp<DevicesFactoryHalCallback> callback) {
ALOG_ASSERT(callback != nullptr);
bool needToCallCallback = false;
diff --git a/media/libaudiohal/impl/DevicesFactoryHalHidl.h b/media/libaudiohal/impl/DevicesFactoryHalHidl.h
index 3285af7..e38d86d 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalHidl.h
+++ b/media/libaudiohal/impl/DevicesFactoryHalHidl.h
@@ -43,8 +43,6 @@
// necessary to release references to the returned object.
status_t openDevice(const char *name, sp<DeviceHalInterface> *device) override;
- status_t getHalPids(std::vector<pid_t> *pids) override;
-
status_t setCallbackOnce(sp<DevicesFactoryHalCallback> callback) override;
android::detail::AudioHalVersionInfo getHalVersion() const override;
diff --git a/media/libaudiohal/impl/Hal2AidlMapper.cpp b/media/libaudiohal/impl/Hal2AidlMapper.cpp
index 413a1f8..63ace8c 100644
--- a/media/libaudiohal/impl/Hal2AidlMapper.cpp
+++ b/media/libaudiohal/impl/Hal2AidlMapper.cpp
@@ -145,8 +145,30 @@
std::vector<int32_t>* ids, std::set<int32_t>* portIds) -> status_t {
for (const auto& s : configs) {
AudioPortConfig portConfig;
- RETURN_STATUS_IF_ERROR(setPortConfig(
- s, destinationPortIds, &portConfig, cleanups));
+ if (status_t status = setPortConfig(
+ s, destinationPortIds, &portConfig, cleanups); status != OK) {
+ if (s.ext.getTag() == AudioPortExt::mix) {
+ // See b/315528763. Despite that the framework knows the actual format of
+ // the mix port, it still uses the original format. Luckily, there is
+ // the I/O handle which can be used to find the mix port.
+ ALOGI("fillPortConfigs: retrying to find a mix port config with default "
+ "configuration");
+ if (auto it = findPortConfig(std::nullopt, s.flags,
+ s.ext.get<AudioPortExt::mix>().handle);
+ it != mPortConfigs.end()) {
+ portConfig = it->second;
+ } else {
+ const std::string flags = s.flags.has_value() ?
+ s.flags->toString() : "<unspecified>";
+ ALOGE("fillPortConfigs: existing port config for flags %s, handle %d "
+ "not found in module %s", flags.c_str(),
+ s.ext.get<AudioPortExt::mix>().handle, mInstance.c_str());
+ return BAD_VALUE;
+ }
+ } else {
+ return status;
+ }
+ }
LOG_ALWAYS_FATAL_IF(portConfig.id == 0,
"fillPortConfigs: initial config: %s, port config: %s",
s.toString().c_str(), portConfig.toString().c_str());
@@ -666,6 +688,14 @@
return false;
}
+status_t Hal2AidlMapper::prepareToDisconnectExternalDevice(const AudioPort& devicePort) {
+ auto portsIt = findPort(devicePort.ext.get<AudioPortExt::device>().device);
+ if (portsIt == mPorts.end()) {
+ return BAD_VALUE;
+ }
+ return statusTFromBinderStatus(mModule->prepareToDisconnectExternalDevice(portsIt->second.id));
+}
+
status_t Hal2AidlMapper::prepareToOpenStream(
int32_t ioHandle, const AudioDevice& device, const AudioIoFlags& flags,
AudioSource source, Cleanups* cleanups, AudioConfig* config,
@@ -675,7 +705,6 @@
flags.toString().c_str(), toString(source).c_str(),
config->toString().c_str(), mixPortConfig->toString().c_str());
resetUnusedPatchesAndPortConfigs();
- const bool isInput = flags.getTag() == AudioIoFlags::Tag::input;
const AudioConfig initialConfig = *config;
// Find / create AudioPortConfigs for the device port and the mix port,
// then find / create a patch between them, and open a stream on the mix port.
@@ -687,8 +716,38 @@
if (created) {
cleanups->add(&Hal2AidlMapper::resetPortConfig, devicePortConfig.id);
}
+ status_t status = prepareToOpenStreamHelper(ioHandle, devicePortConfig.portId,
+ devicePortConfig.id, flags, source, initialConfig, cleanups, config,
+ mixPortConfig, patch);
+ if (status != OK) {
+ // If using the client-provided config did not work out for establishing a mix port config
+ // or patching, try with the device port config. Note that in general device port config and
+ // mix port config are not required to be the same, however they must match if the HAL
+ // module can't perform audio stream conversions.
+ AudioConfig deviceConfig = initialConfig;
+ if (setConfigFromPortConfig(&deviceConfig, devicePortConfig)->base != initialConfig.base) {
+ ALOGD("%s: retrying with device port config: %s", __func__,
+ devicePortConfig.toString().c_str());
+ status = prepareToOpenStreamHelper(ioHandle, devicePortConfig.portId,
+ devicePortConfig.id, flags, source, initialConfig, cleanups,
+ &deviceConfig, mixPortConfig, patch);
+ if (status == OK) {
+ *config = deviceConfig;
+ }
+ }
+ }
+ return status;
+}
+
+status_t Hal2AidlMapper::prepareToOpenStreamHelper(
+ int32_t ioHandle, int32_t devicePortId, int32_t devicePortConfigId,
+ const AudioIoFlags& flags, AudioSource source, const AudioConfig& initialConfig,
+ Cleanups* cleanups, AudioConfig* config, AudioPortConfig* mixPortConfig,
+ AudioPatch* patch) {
+ const bool isInput = flags.getTag() == AudioIoFlags::Tag::input;
+ bool created = false;
RETURN_STATUS_IF_ERROR(findOrCreateMixPortConfig(*config, flags, ioHandle, source,
- std::set<int32_t>{devicePortConfig.portId}, mixPortConfig, &created));
+ std::set<int32_t>{devicePortId}, mixPortConfig, &created));
if (created) {
cleanups->add(&Hal2AidlMapper::resetPortConfig, mixPortConfig->id);
}
@@ -715,7 +774,7 @@
ALOGD("%s: retrying to find/create a mix port config using config %s", __func__,
config->toString().c_str());
RETURN_STATUS_IF_ERROR(findOrCreateMixPortConfig(*config, flags, ioHandle, source,
- std::set<int32_t>{devicePortConfig.portId}, mixPortConfig, &created));
+ std::set<int32_t>{devicePortId}, mixPortConfig, &created));
if (created) {
cleanups->add(&Hal2AidlMapper::resetPortConfig, mixPortConfig->id);
}
@@ -728,10 +787,10 @@
}
if (isInput) {
RETURN_STATUS_IF_ERROR(findOrCreatePatch(
- {devicePortConfig.id}, {mixPortConfig->id}, patch, &created));
+ {devicePortConfigId}, {mixPortConfig->id}, patch, &created));
} else {
RETURN_STATUS_IF_ERROR(findOrCreatePatch(
- {mixPortConfig->id}, {devicePortConfig.id}, patch, &created));
+ {mixPortConfig->id}, {devicePortConfigId}, patch, &created));
}
if (created) {
cleanups->add(&Hal2AidlMapper::resetPatch, patch->id);
diff --git a/media/libaudiohal/impl/Hal2AidlMapper.h b/media/libaudiohal/impl/Hal2AidlMapper.h
index ee55b22..21cfd5a 100644
--- a/media/libaudiohal/impl/Hal2AidlMapper.h
+++ b/media/libaudiohal/impl/Hal2AidlMapper.h
@@ -72,6 +72,8 @@
return ::aidl::android::convertContainer(mRoutes, routes, converter);
}
status_t initialize();
+ status_t prepareToDisconnectExternalDevice(
+ const ::aidl::android::media::audio::common::AudioPort& devicePort);
// If the resulting 'mixPortConfig->id' is 0, that means the stream was not created,
// and 'config' is a suggested config.
status_t prepareToOpenStream(
@@ -164,6 +166,14 @@
const std::optional<::aidl::android::media::audio::common::AudioIoFlags>& flags,
int32_t ioHandle);
bool isPortBeingHeld(int32_t portId);
+ status_t prepareToOpenStreamHelper(
+ int32_t ioHandle, int32_t devicePortId, int32_t devicePortConfigId,
+ const ::aidl::android::media::audio::common::AudioIoFlags& flags,
+ ::aidl::android::media::audio::common::AudioSource source,
+ const ::aidl::android::media::audio::common::AudioConfig& initialConfig,
+ Cleanups* cleanups, ::aidl::android::media::audio::common::AudioConfig* config,
+ ::aidl::android::media::audio::common::AudioPortConfig* mixPortConfig,
+ ::aidl::android::hardware::audio::core::AudioPatch* patch);
bool portConfigBelongsToPort(int32_t portConfigId, int32_t portId) {
auto it = mPortConfigs.find(portConfigId);
return it != mPortConfigs.end() && it->second.portId == portId;
diff --git a/media/libaudiohal/include/media/audiohal/DevicesFactoryHalInterface.h b/media/libaudiohal/include/media/audiohal/DevicesFactoryHalInterface.h
index 8397e9b..c34a671 100644
--- a/media/libaudiohal/include/media/audiohal/DevicesFactoryHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/DevicesFactoryHalInterface.h
@@ -42,8 +42,6 @@
// necessary to release references to the returned object.
virtual status_t openDevice(const char *name, sp<DeviceHalInterface> *device) = 0;
- virtual status_t getHalPids(std::vector<pid_t> *pids) = 0;
-
// Sets a DevicesFactoryHalCallback to notify the client.
// The callback can be only set once.
virtual status_t setCallbackOnce(sp<DevicesFactoryHalCallback> callback) = 0;
diff --git a/media/libeffects/downmix/Android.bp b/media/libeffects/downmix/Android.bp
index a5259aa..37633ae 100644
--- a/media/libeffects/downmix/Android.bp
+++ b/media/libeffects/downmix/Android.bp
@@ -56,9 +56,7 @@
":effectCommonFile",
],
defaults: [
- "aidlaudioservice_defaults",
- "latest_android_hardware_audio_effect_ndk_shared",
- "latest_android_media_audio_common_types_ndk_shared",
+ "aidlaudioeffectservice_defaults",
],
header_libs: [
"libaudioeffects",
diff --git a/media/libeffects/downmix/aidl/DownmixContext.cpp b/media/libeffects/downmix/aidl/DownmixContext.cpp
index 0e76d1d..13e0e5a 100644
--- a/media/libeffects/downmix/aidl/DownmixContext.cpp
+++ b/media/libeffects/downmix/aidl/DownmixContext.cpp
@@ -20,12 +20,60 @@
#include "DownmixContext.h"
-using aidl::android::hardware::audio::effect::IEffect;
using aidl::android::hardware::audio::common::getChannelCount;
+using aidl::android::hardware::audio::effect::IEffect;
using aidl::android::media::audio::common::AudioChannelLayout;
+using aidl::android::media::audio::common::AudioConfig;
namespace aidl::android::hardware::audio::effect {
+namespace {
+
+inline bool isChannelMaskValid(const AudioChannelLayout& channelMask) {
+ if (channelMask.getTag() != AudioChannelLayout::layoutMask) return false;
+ int chMask = channelMask.get<AudioChannelLayout::layoutMask>();
+ // check against unsupported channels (up to FCC_26)
+ constexpr uint32_t MAXIMUM_CHANNEL_MASK = AudioChannelLayout::LAYOUT_22POINT2 |
+ AudioChannelLayout::CHANNEL_FRONT_WIDE_LEFT |
+ AudioChannelLayout::CHANNEL_FRONT_WIDE_RIGHT;
+ if (chMask & ~MAXIMUM_CHANNEL_MASK) {
+ LOG(ERROR) << "Unsupported channels in " << (chMask & ~MAXIMUM_CHANNEL_MASK);
+ return false;
+ }
+ return true;
+}
+
+inline bool isStereoChannelMask(const AudioChannelLayout& channelMask) {
+ if (channelMask.getTag() != AudioChannelLayout::layoutMask) return false;
+
+ return channelMask.get<AudioChannelLayout::layoutMask>() == AudioChannelLayout::LAYOUT_STEREO;
+}
+
+} // namespace
+
+bool DownmixContext::validateCommonConfig(const Parameter::Common& common) {
+ const AudioConfig& input = common.input;
+ const AudioConfig& output = common.output;
+ if (input.base.sampleRate != output.base.sampleRate) {
+ LOG(ERROR) << __func__ << ": SRC not supported, input: " << input.toString()
+ << " output: " << output.toString();
+ return false;
+ }
+
+ if (!isStereoChannelMask(output.base.channelMask)) {
+ LOG(ERROR) << __func__ << ": output should be stereo, not "
+ << output.base.channelMask.toString();
+ return false;
+ }
+
+ if (!isChannelMaskValid(input.base.channelMask)) {
+ LOG(ERROR) << __func__ << ": invalid input channel, " << input.base.channelMask.toString();
+ return false;
+ }
+
+ return true;
+}
+
DownmixContext::DownmixContext(int statusDepth, const Parameter::Common& common)
: EffectContext(statusDepth, common) {
LOG(DEBUG) << __func__;
@@ -62,7 +110,7 @@
resetBuffer();
}
-IEffect::Status DownmixContext::lvmProcess(float* in, float* out, int samples) {
+IEffect::Status DownmixContext::downmixProcess(float* in, float* out, int samples) {
LOG(DEBUG) << __func__ << " in " << in << " out " << out << " sample " << samples;
IEffect::Status status = {EX_ILLEGAL_ARGUMENT, 0, 0};
@@ -122,18 +170,4 @@
}
}
-bool DownmixContext::isChannelMaskValid(AudioChannelLayout channelMask) {
- if (channelMask.getTag() != AudioChannelLayout::layoutMask) return false;
- int chMask = channelMask.get<AudioChannelLayout::layoutMask>();
- // check against unsupported channels (up to FCC_26)
- constexpr uint32_t MAXIMUM_CHANNEL_MASK = AudioChannelLayout::LAYOUT_22POINT2 |
- AudioChannelLayout::CHANNEL_FRONT_WIDE_LEFT |
- AudioChannelLayout::CHANNEL_FRONT_WIDE_RIGHT;
- if (chMask & ~MAXIMUM_CHANNEL_MASK) {
- LOG(ERROR) << "Unsupported channels in " << (chMask & ~MAXIMUM_CHANNEL_MASK);
- return false;
- }
- return true;
-}
-
} // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/downmix/aidl/DownmixContext.h b/media/libeffects/downmix/aidl/DownmixContext.h
index 1571c38..a381d7f 100644
--- a/media/libeffects/downmix/aidl/DownmixContext.h
+++ b/media/libeffects/downmix/aidl/DownmixContext.h
@@ -50,7 +50,9 @@
return RetCode::SUCCESS;
}
- IEffect::Status lvmProcess(float* in, float* out, int samples);
+ IEffect::Status downmixProcess(float* in, float* out, int samples);
+
+ static bool validateCommonConfig(const Parameter::Common& common);
private:
DownmixState mState;
@@ -60,7 +62,6 @@
// Common Params
void init_params(const Parameter::Common& common);
- bool isChannelMaskValid(::aidl::android::media::audio::common::AudioChannelLayout channelMask);
};
} // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/downmix/aidl/EffectDownmix.cpp b/media/libeffects/downmix/aidl/EffectDownmix.cpp
index 7068c5c..702a6f0 100644
--- a/media/libeffects/downmix/aidl/EffectDownmix.cpp
+++ b/media/libeffects/downmix/aidl/EffectDownmix.cpp
@@ -193,6 +193,8 @@
return mContext;
}
+ if (!DownmixContext::validateCommonConfig(common)) return nullptr;
+
mContext = std::make_shared<DownmixContext>(1 /* statusFmqDepth */, common);
return mContext;
}
@@ -210,7 +212,7 @@
LOG(ERROR) << __func__ << " nullContext";
return {EX_NULL_POINTER, 0, 0};
}
- return mContext->lvmProcess(in, out, sampleToProcess);
+ return mContext->downmixProcess(in, out, sampleToProcess);
}
} // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/dynamicsproc/Android.bp b/media/libeffects/dynamicsproc/Android.bp
index 7838117..9e154cf 100644
--- a/media/libeffects/dynamicsproc/Android.bp
+++ b/media/libeffects/dynamicsproc/Android.bp
@@ -86,9 +86,7 @@
],
defaults: [
- "aidlaudioservice_defaults",
- "latest_android_hardware_audio_effect_ndk_shared",
- "latest_android_media_audio_common_types_ndk_shared",
+ "aidlaudioeffectservice_defaults",
"dynamicsprocessingdefaults",
],
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
index e5e5368..f3a3860 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
@@ -313,8 +313,8 @@
void DynamicsProcessingContext::init() {
std::lock_guard lg(mMutex);
mState = DYNAMICS_PROCESSING_STATE_INITIALIZED;
- mChannelCount = ::aidl::android::hardware::audio::common::getChannelCount(
- mCommon.input.base.channelMask);
+ mChannelCount = static_cast<int>(::aidl::android::hardware::audio::common::getChannelCount(
+ mCommon.input.base.channelMask));
}
dp_fx::DPChannel* DynamicsProcessingContext::getChannel_l(int channel) {
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h
index ced7f19..839c6dd 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h
@@ -74,7 +74,7 @@
static constexpr float kPreferredProcessingDurationMs = 10.0f;
static constexpr int kBandCount = 5;
std::mutex mMutex;
- size_t mChannelCount GUARDED_BY(mMutex) = 0;
+ int mChannelCount GUARDED_BY(mMutex) = 0;
DynamicsProcessingState mState GUARDED_BY(mMutex) = DYNAMICS_PROCESSING_STATE_UNINITIALIZED;
std::unique_ptr<dp_fx::DPFrequency> mDpFreq GUARDED_BY(mMutex) = nullptr;
bool mEngineInited GUARDED_BY(mMutex) = false;
diff --git a/media/libeffects/hapticgenerator/Android.bp b/media/libeffects/hapticgenerator/Android.bp
index fc80211..cc19a80 100644
--- a/media/libeffects/hapticgenerator/Android.bp
+++ b/media/libeffects/hapticgenerator/Android.bp
@@ -75,9 +75,7 @@
],
defaults: [
- "aidlaudioservice_defaults",
- "latest_android_hardware_audio_effect_ndk_shared",
- "latest_android_media_audio_common_types_ndk_shared",
+ "aidlaudioeffectservice_defaults",
"hapticgeneratordefaults",
],
diff --git a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
index de44e05..354ee00 100644
--- a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
+++ b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
@@ -14,6 +14,7 @@
* limitations under the License.
*/
+#include <cstddef>
#define LOG_TAG "AHAL_HapticGeneratorContext"
#include <Utils.h>
@@ -162,8 +163,8 @@
}
// Construct input buffer according to haptic channel source
- for (size_t i = 0; i < mFrameCount; ++i) {
- for (size_t j = 0; j < mParams.mHapticChannelCount; ++j) {
+ for (int64_t i = 0; i < mFrameCount; ++i) {
+ for (int j = 0; j < mParams.mHapticChannelCount; ++j) {
mInputBuffer[i * mParams.mHapticChannelCount + j] =
in[i * mParams.mAudioChannelCount + mParams.mHapticChannelSource[j]];
}
@@ -180,8 +181,7 @@
// buffer, which contains haptic data at the end of the buffer, directly to sink buffer.
// In that case, copy haptic data to input buffer instead of output buffer.
// Note: this may not work with rpc/binder calls
- int offset = samples;
- for (int i = 0; i < hapticSampleCount; ++i) {
+ for (size_t i = 0; i < hapticSampleCount; ++i) {
in[samples + i] = hapticOutBuffer[i];
}
return {STATUS_OK, samples, static_cast<int32_t>(samples + hapticSampleCount)};
@@ -199,7 +199,7 @@
mParams.mHapticChannelCount = ::aidl::android::hardware::audio::common::getChannelCount(
outputChMask, media::audio::common::AudioChannelLayout::LAYOUT_HAPTIC_AB);
LOG_ALWAYS_FATAL_IF(mParams.mHapticChannelCount > 2, "haptic channel count is too large");
- for (size_t i = 0; i < mParams.mHapticChannelCount; ++i) {
+ for (int i = 0; i < mParams.mHapticChannelCount; ++i) {
// By default, use the first audio channel to generate haptic channels.
mParams.mHapticChannelSource[i] = 0;
}
diff --git a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h
index a0a0a4c..26e69e4 100644
--- a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h
+++ b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h
@@ -92,7 +92,7 @@
HapticGeneratorState mState;
HapticGeneratorParam mParams GUARDED_BY(mMutex);
int mSampleRate;
- int mFrameCount = 0;
+ int64_t mFrameCount = 0;
// A cache for all shared pointers of the HapticGenerator
struct HapticGeneratorProcessorsRecord mProcessorsRecord;
diff --git a/media/libeffects/loudness/Android.bp b/media/libeffects/loudness/Android.bp
index 7acba11..05bbec3 100644
--- a/media/libeffects/loudness/Android.bp
+++ b/media/libeffects/loudness/Android.bp
@@ -54,9 +54,7 @@
":effectCommonFile",
],
defaults: [
- "aidlaudioservice_defaults",
- "latest_android_hardware_audio_effect_ndk_shared",
- "latest_android_media_audio_common_types_ndk_shared",
+ "aidlaudioeffectservice_defaults",
],
header_libs: [
"libaudioeffects",
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
index a163f4b..71bb2ef 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
@@ -227,7 +227,7 @@
bool viEnabled = params.VirtualizerOperatingMode == LVM_MODE_ON;
if (eqEnabled) {
- for (int i = 0; i < lvm::MAX_NUM_BANDS; i++) {
+ for (size_t i = 0; i < lvm::MAX_NUM_BANDS; i++) {
float bandFactor = mBandGainMdB[i] / 1500.0;
float bandCoefficient = lvm::kBandEnergyCoefficient[i];
float bandEnergy = bandFactor * bandCoefficient * bandCoefficient;
@@ -236,7 +236,7 @@
// cross EQ coefficients
float bandFactorSum = 0;
- for (int i = 0; i < lvm::MAX_NUM_BANDS - 1; i++) {
+ for (size_t i = 0; i < lvm::MAX_NUM_BANDS - 1; i++) {
float bandFactor1 = mBandGainMdB[i] / 1500.0;
float bandFactor2 = mBandGainMdB[i + 1] / 1500.0;
@@ -259,7 +259,7 @@
energyContribution += boostFactor * boostCoefficient * boostCoefficient;
if (eqEnabled) {
- for (int i = 0; i < lvm::MAX_NUM_BANDS; i++) {
+ for (size_t i = 0; i < lvm::MAX_NUM_BANDS; i++) {
float bandFactor = mBandGainMdB[i] / 1500.0;
float bandCrossCoefficient = lvm::kBassBoostEnergyCrossCoefficient[i];
float bandEnergy = boostFactor * bandFactor * bandCrossCoefficient;
@@ -421,7 +421,6 @@
RetCode BundleContext::setVolumeStereo(const Parameter::VolumeStereo& volume) {
LVM_ControlParams_t params;
- LVM_ReturnStatus_en status = LVM_SUCCESS;
// Convert volume to dB
float leftdB = VolToDb(volume.left);
@@ -512,7 +511,7 @@
const auto [min, max] =
std::minmax_element(bandLevels.begin(), bandLevels.end(),
[](const auto& a, const auto& b) { return a.index < b.index; });
- return min->index >= 0 && max->index < lvm::MAX_NUM_BANDS;
+ return min->index >= 0 && static_cast<size_t>(max->index) < lvm::MAX_NUM_BANDS;
}
RetCode BundleContext::updateControlParameter(const std::vector<Equalizer::BandLevel>& bandLevels) {
diff --git a/media/libeffects/lvm/wrapper/Android.bp b/media/libeffects/lvm/wrapper/Android.bp
index fa300d2..5fb3966 100644
--- a/media/libeffects/lvm/wrapper/Android.bp
+++ b/media/libeffects/lvm/wrapper/Android.bp
@@ -110,9 +110,7 @@
],
static_libs: ["libmusicbundle"],
defaults: [
- "aidlaudioservice_defaults",
- "latest_android_hardware_audio_effect_ndk_shared",
- "latest_android_media_audio_common_types_ndk_shared",
+ "aidlaudioeffectservice_defaults",
],
local_include_dirs: ["Aidl"],
header_libs: [
@@ -140,9 +138,7 @@
],
static_libs: ["libreverb"],
defaults: [
- "aidlaudioservice_defaults",
- "latest_android_hardware_audio_effect_ndk_shared",
- "latest_android_media_audio_common_types_ndk_shared",
+ "aidlaudioeffectservice_defaults",
],
local_include_dirs: ["Reverb/aidl"],
header_libs: [
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
index 79e67f2..468b268 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
@@ -329,7 +329,7 @@
*/
int ReverbContext::convertLevel(int level) {
- for (int i = 0; i < kLevelMapping.size(); i++) {
+ for (std::size_t i = 0; i < kLevelMapping.size(); i++) {
if (level <= kLevelMapping[i]) {
return i;
}
diff --git a/media/libeffects/preprocessing/Android.bp b/media/libeffects/preprocessing/Android.bp
index d018c47..564eb36 100644
--- a/media/libeffects/preprocessing/Android.bp
+++ b/media/libeffects/preprocessing/Android.bp
@@ -67,9 +67,7 @@
":effectCommonFile",
],
defaults: [
- "aidlaudioservice_defaults",
- "latest_android_hardware_audio_effect_ndk_shared",
- "latest_android_media_audio_common_types_ndk_shared",
+ "aidlaudioeffectservice_defaults",
],
local_include_dirs: ["aidl"],
shared_libs: [
diff --git a/media/libeffects/visualizer/Android.bp b/media/libeffects/visualizer/Android.bp
index cf782f7..a8b665b 100644
--- a/media/libeffects/visualizer/Android.bp
+++ b/media/libeffects/visualizer/Android.bp
@@ -60,8 +60,6 @@
],
defaults: [
"aidlaudioeffectservice_defaults",
- "latest_android_hardware_audio_effect_ndk_shared",
- "latest_android_media_audio_common_types_ndk_shared",
"visualizer_defaults",
],
cflags: [
diff --git a/media/module/codecs/mp3dec/src/pvmp3_framedecoder.cpp b/media/module/codecs/mp3dec/src/pvmp3_framedecoder.cpp
index e8fea73..fb9f1e9 100644
--- a/media/module/codecs/mp3dec/src/pvmp3_framedecoder.cpp
+++ b/media/module/codecs/mp3dec/src/pvmp3_framedecoder.cpp
@@ -310,26 +310,31 @@
}
// Check if the input is valid by checking if it contains a sync word
-static bool isInputValid(uint8 *buf, uint32 inSize)
+static ERROR_CODE validate_input(uint8 *buf, uint32 inSize)
{
- // Buffer needs to contain at least 4 bytes which is the size of
- // the header
- if (inSize < 4) return false;
+ /*
+ * Verify that at least the header is complete
+ * Note that SYNC_WORD_LNGTH is in unit of bits, but inSize is in unit of bytes.
+ */
+ if (inSize < ((SYNC_WORD_LNGTH + 21) >> 3))
+ {
+ return NO_ENOUGH_MAIN_DATA_ERROR;
+ }
size_t totalInSize = 0;
size_t frameSize = 0;
while (totalInSize <= (inSize - 4)) {
if (!parseHeader(U32_AT(buf + totalInSize), &frameSize)) {
- return false;
+ return SYNCH_LOST_ERROR;
}
// Buffer needs to be large enough to include complete frame
if ((frameSize > inSize) || (totalInSize > (inSize - frameSize))) {
- return false;
+ return SYNCH_LOST_ERROR;
}
totalInSize += frameSize;
}
- return true;
+ return NO_DECODING_ERROR;
}
ERROR_CODE pvmp3_framedecoder(tPVMP3DecoderExternal *pExt,
@@ -348,10 +353,11 @@
mp3Header info_data;
mp3Header *info = &info_data;
- if (!isInputValid(pExt->pInputBuffer, pExt->inputBufferCurrentLength))
+ errorCode = validate_input(pExt->pInputBuffer, pExt->inputBufferCurrentLength);
+ if (errorCode != NO_DECODING_ERROR)
{
pExt->outputFrameSize = 0;
- return SYNCH_LOST_ERROR;
+ return errorCode;
}
pVars->inputStream.pBuffer = pExt->pInputBuffer;
diff --git a/media/ndk/libmediandk.map.txt b/media/ndk/libmediandk.map.txt
index 4f045fd..262c169 100644
--- a/media/ndk/libmediandk.map.txt
+++ b/media/ndk/libmediandk.map.txt
@@ -185,11 +185,11 @@
AMediaCodecCryptoInfo_setPattern; # introduced=24
AMediaCodec_configure;
AMediaCodec_createCodecByName;
- AMediaCodec_createCodecByNameForClient; # systemapi # introduced=31
+ AMediaCodec_createCodecByNameForClient; # systemapi introduced=31
AMediaCodec_createDecoderByType;
- AMediaCodec_createDecoderByTypeForClient; # systemapi # introduced=31
+ AMediaCodec_createDecoderByTypeForClient; # systemapi introduced=31
AMediaCodec_createEncoderByType;
- AMediaCodec_createEncoderByTypeForClient; # systemapi # introduced=31
+ AMediaCodec_createEncoderByTypeForClient; # systemapi introduced=31
AMediaCodec_delete;
AMediaCodec_dequeueInputBuffer;
AMediaCodec_dequeueOutputBuffer;
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index cb342ee..f8960cf 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -286,9 +286,6 @@
BatteryNotifier::getInstance().noteResetAudio();
mMediaLogNotifier->run("MediaLogNotifier");
- std::vector<pid_t> halPids;
- mDevicesFactoryHal->getHalPids(&halPids);
- mediautils::TimeCheck::setAudioHalPids(halPids);
// Notify that we have started (also called when audioserver service restarts)
mediametrics::LogItem(mMetricsId)
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 4d699b2..cc10c28 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -185,7 +185,7 @@
// Minimum amount of time between checking to see if the timestamp is advancing
// for underrun detection. If we check too frequently, we may not detect a
// timestamp update and will falsely detect underrun.
-static const nsecs_t kMinimumTimeBetweenTimestampChecksNs = 150 /* ms */ * 1000;
+static constexpr nsecs_t kMinimumTimeBetweenTimestampChecksNs = 150 /* ms */ * 1'000'000;
// The universal constant for ubiquitous 20ms value. The value of 20ms seems to provide a good
// balance between power consumption and latency, and allows threads to be scheduled reliably
diff --git a/services/audioflinger/sounddose/SoundDoseManager.cpp b/services/audioflinger/sounddose/SoundDoseManager.cpp
index 197cff9..1c98e64 100644
--- a/services/audioflinger/sounddose/SoundDoseManager.cpp
+++ b/services/audioflinger/sounddose/SoundDoseManager.cpp
@@ -53,7 +53,7 @@
size_t channelCount, audio_format_t format) {
const std::lock_guard _l(mLock);
- if (mHalSoundDose.size() > 0 && mEnabledCsd) {
+ if (!mUseFrameworkMel && mHalSoundDose.size() > 0 && mEnabledCsd) {
ALOGD("%s: using HAL MEL computation, no MelProcessor needed.", __func__);
return nullptr;
}
@@ -143,7 +143,7 @@
ALOGV("%s", __func__);
const std::lock_guard _l(mLock);
- if (mHalSoundDose.size() > 0) {
+ if (!mUseFrameworkMel && mHalSoundDose.size() > 0) {
bool success = true;
for (auto& halSoundDose : mHalSoundDose) {
// using the HAL sound dose interface
@@ -549,9 +549,6 @@
}
void SoundDoseManager::setUseFrameworkMel(bool useFrameworkMel) {
- // invalidate any HAL sound dose interface used
- resetHalSoundDoseInterfaces();
-
const std::lock_guard _l(mLock);
mUseFrameworkMel = useFrameworkMel;
}
@@ -582,7 +579,7 @@
bool SoundDoseManager::useHalSoundDose() const {
const std::lock_guard _l(mLock);
- return mHalSoundDose.size() > 0;
+ return !mUseFrameworkMel && mHalSoundDose.size() > 0;
}
void SoundDoseManager::resetSoundDose() {
diff --git a/services/audiopolicy/managerdefault/Android.bp b/services/audiopolicy/managerdefault/Android.bp
index b4f71ae..6a37b4e 100644
--- a/services/audiopolicy/managerdefault/Android.bp
+++ b/services/audiopolicy/managerdefault/Android.bp
@@ -43,6 +43,8 @@
"framework-permission-aidl-cpp",
"libaudioclient_aidl_conversion",
"audioclient-types-aidl-cpp",
+ // Flag support
+ "com.android.media.audioserver-aconfig-cc"
],
header_libs: [
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 2e13178..81369e2 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -42,6 +42,7 @@
#include <Serializer.h>
#include <android/media/audio/common/AudioPort.h>
#include <com_android_media_audio.h>
+#include <com_android_media_audioserver.h>
#include <cutils/bitops.h>
#include <cutils/properties.h>
#include <media/AudioParameter.h>
@@ -1509,11 +1510,30 @@
}
if (!profile->canOpenNewIo()) {
+ if (!com::android::media::audioserver::direct_track_reprioritization()) {
+ return NAME_NOT_FOUND;
+ } else if ((profile->getFlags() & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) != 0) {
+ // MMAP gracefully handles lack of an exclusive track resource by mixing
+ // above the audio framework. For AAudio to know that the limit is reached,
+ // return an error.
+ return NAME_NOT_FOUND;
+ } else {
+ // Close outputs on this profile, if available, to free resources for this request
+ for (int i = 0; i < mOutputs.size() && !profile->canOpenNewIo(); i++) {
+ const auto desc = mOutputs.valueAt(i);
+ if (desc->mProfile == profile) {
+ closeOutput(desc->mIoHandle);
+ }
+ }
+ }
+ }
+
+ // Unable to close streams to find free resources for this request
+ if (!profile->canOpenNewIo()) {
return NAME_NOT_FOUND;
}
- sp<SwAudioOutputDescriptor> outputDesc =
- new SwAudioOutputDescriptor(profile, mpClientInterface);
+ auto outputDesc = sp<SwAudioOutputDescriptor>::make(profile, mpClientInterface);
// An MSD patch may be using the only output stream that can service this request. Release
// all MSD patches to prioritize this request over any active output on MSD.
diff --git a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
index d2643c1..065f0c5 100644
--- a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
@@ -692,6 +692,14 @@
mHasFlashUnit = false;
}
+ if (flags::feature_combination_query()) {
+ res = addSessionConfigQueryVersionTag();
+ if (OK != res) {
+ ALOGE("%s: Unable to add sessionConfigurationQueryVersion tag: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ }
+ }
+
camera_metadata_entry entry =
mCameraCharacteristics.find(ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL);
if (entry.count == 1) {
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.cc b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
index 79c91ef..25fe61b 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.cc
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
@@ -542,8 +542,12 @@
} else {
const bool renderSuccess =
isYuvFormat(static_cast<PixelFormat>(textureBuffer->getPixelFormat()))
- ? mEglTextureYuvProgram->draw(mEglSurfaceTexture->updateTexture())
- : mEglTextureRgbProgram->draw(mEglSurfaceTexture->updateTexture());
+ ? mEglTextureYuvProgram->draw(
+ mEglSurfaceTexture->getTextureId(),
+ mEglSurfaceTexture->getTransformMatrix())
+ : mEglTextureRgbProgram->draw(
+ mEglSurfaceTexture->getTextureId(),
+ mEglSurfaceTexture->getTransformMatrix());
if (!renderSuccess) {
ALOGE("%s: Failed to render texture", __func__);
return cameraStatus(Status::INTERNAL_ERROR);
diff --git a/services/camera/virtualcamera/util/EglProgram.cc b/services/camera/virtualcamera/util/EglProgram.cc
index 510fd33..7554a67 100644
--- a/services/camera/virtualcamera/util/EglProgram.cc
+++ b/services/camera/virtualcamera/util/EglProgram.cc
@@ -68,12 +68,13 @@
})";
constexpr char kExternalTextureVertexShader[] = R"(#version 300 es
+ uniform mat4 aTextureTransformMatrix; // Transform matrix given by surface texture.
in vec4 aPosition;
in vec2 aTextureCoord;
out vec2 vTextureCoord;
void main() {
gl_Position = aPosition;
- vTextureCoord = aTextureCoord;
+ vTextureCoord = (aTextureTransformMatrix * vec4(aTextureCoord, 0.0, 1.0)).xy;
})";
constexpr char kExternalYuvTextureFragmentShader[] = R"(#version 300 es
@@ -100,10 +101,12 @@
})";
constexpr int kCoordsPerVertex = 3;
-constexpr std::array<float, 12> kSquareCoords{-1.f, 1.0f, 0.0f, // top left
- -1.f, -1.f, 0.0f, // bottom left
- 1.0f, -1.f, 0.0f, // bottom right
- 1.0f, 1.0f, 0.0f}; // top right
+
+constexpr std::array<float, 12> kSquareCoords{
+ -1.f, -1.0f, 0.0f, // top left
+ -1.f, 1.f, 0.0f, // bottom left
+ 1.0f, 1.f, 0.0f, // bottom right
+ 1.0f, -1.0f, 0.0f}; // top right
constexpr std::array<float, 8> kTextureCoords{0.0f, 1.0f, // top left
0.0f, 0.0f, // bottom left
@@ -265,32 +268,50 @@
} else {
ALOGE("External texture EGL shader program initialization failed.");
}
+
+ // Lookup and cache handles to uniforms & attributes.
+ mPositionHandle = glGetAttribLocation(mProgram, "aPosition");
+ mTextureCoordHandle = glGetAttribLocation(mProgram, "aTextureCoord");
+ mTransformMatrixHandle =
+ glGetUniformLocation(mProgram, "aTextureTransformMatrix");
+ mTextureHandle = glGetUniformLocation(mProgram, "uTexture");
+
+ // Pass vertex array to the shader.
+ glEnableVertexAttribArray(mPositionHandle);
+ glVertexAttribPointer(mPositionHandle, kCoordsPerVertex, GL_FLOAT, false,
+ kSquareCoords.size(), kSquareCoords.data());
+
+ // Pass texture coordinates corresponding to vertex array to the shader.
+ glEnableVertexAttribArray(mTextureCoordHandle);
+ glVertexAttribPointer(mTextureCoordHandle, 2, GL_FLOAT, false,
+ kTextureCoords.size(), kTextureCoords.data());
}
-bool EglTextureProgram::draw(GLuint textureId) {
+EglTextureProgram::~EglTextureProgram() {
+ if (mPositionHandle != -1) {
+ glDisableVertexAttribArray(mPositionHandle);
+ }
+ if (mTextureCoordHandle != -1) {
+ glDisableVertexAttribArray(mTextureCoordHandle);
+ }
+}
+
+bool EglTextureProgram::draw(GLuint textureId,
+ const std::array<float, 16>& transformMatrix) {
// Load compiled shader.
glUseProgram(mProgram);
if (checkEglError("glUseProgram")) {
return false;
}
- // Pass vertex array to the shader.
- int positionHandle = glGetAttribLocation(mProgram, "aPosition");
- glEnableVertexAttribArray(positionHandle);
- glVertexAttribPointer(positionHandle, kCoordsPerVertex, GL_FLOAT, false,
- kSquareCoords.size(), kSquareCoords.data());
-
- // Pass texture coordinates corresponding to vertex array to the shader.
- int textureCoordHandle = glGetAttribLocation(mProgram, "aTextureCoord");
- glEnableVertexAttribArray(textureCoordHandle);
- glVertexAttribPointer(textureCoordHandle, 2, GL_FLOAT, false,
- kTextureCoords.size(), kTextureCoords.data());
+ // Pass transformation matrix for the texture coordinates.
+ glUniformMatrix4fv(mTransformMatrixHandle, 1, /*transpose=*/GL_FALSE,
+ transformMatrix.data());
// Configure texture for the shader.
- int textureHandle = glGetUniformLocation(mProgram, "uTexture");
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_EXTERNAL_OES, textureId);
- glUniform1i(textureHandle, 0);
+ glUniform1i(mTextureHandle, 0);
// Draw triangle strip forming a square filling the viewport.
glDrawElements(GL_TRIANGLES, kDrawOrder.size(), GL_UNSIGNED_BYTE,
diff --git a/services/camera/virtualcamera/util/EglProgram.h b/services/camera/virtualcamera/util/EglProgram.h
index 1b5f2cd..c695cbb 100644
--- a/services/camera/virtualcamera/util/EglProgram.h
+++ b/services/camera/virtualcamera/util/EglProgram.h
@@ -17,6 +17,8 @@
#ifndef ANDROID_COMPANION_VIRTUALCAMERA_EGLPROGRAM_H
#define ANDROID_COMPANION_VIRTUALCAMERA_EGLPROGRAM_H
+#include <array>
+
#include "GLES/gl.h"
namespace android {
@@ -58,8 +60,23 @@
enum class TextureFormat { RGBA, YUV };
EglTextureProgram(TextureFormat textureFormat = TextureFormat::YUV);
+ virtual ~EglTextureProgram();
- bool draw(GLuint textureId);
+ // Draw texture over whole viewport, applying transformMatrix to texture
+ // coordinates.
+ //
+ // Transform matrix is 4x4 matrix represented in column-major order and is
+ // applied to texture coordinates in (s,t,0,1), s,t from <0,1> range prior to
+ // sampling:
+ //
+ // textureCoord = transformMatrix * vec4(s,t,0,1).xy
+ bool draw(GLuint textureId, const std::array<float, 16>& transformMatrix);
+
+ private:
+ int mPositionHandle = -1;
+ int mTextureCoordHandle = -1;
+ int mTransformMatrixHandle = -1;
+ int mTextureHandle = -1;
};
} // namespace virtualcamera
diff --git a/services/camera/virtualcamera/util/EglSurfaceTexture.cc b/services/camera/virtualcamera/util/EglSurfaceTexture.cc
index 5b479c0..9f26e19 100644
--- a/services/camera/virtualcamera/util/EglSurfaceTexture.cc
+++ b/services/camera/virtualcamera/util/EglSurfaceTexture.cc
@@ -68,6 +68,16 @@
return mTextureId;
}
+GLuint EglSurfaceTexture::getTextureId() const {
+ return mTextureId;
+}
+
+std::array<float, 16> EglSurfaceTexture::getTransformMatrix() {
+ std::array<float, 16> matrix;
+ mGlConsumer->getTransformMatrix(matrix.data());
+ return matrix;
+}
+
uint32_t EglSurfaceTexture::getWidth() const {
return mWidth;
}
diff --git a/services/camera/virtualcamera/util/EglSurfaceTexture.h b/services/camera/virtualcamera/util/EglSurfaceTexture.h
index 14dc7d6..faad7c4 100644
--- a/services/camera/virtualcamera/util/EglSurfaceTexture.h
+++ b/services/camera/virtualcamera/util/EglSurfaceTexture.h
@@ -57,6 +57,17 @@
// Returns EGL texture id of the texture.
GLuint updateTexture();
+ // Returns EGL texture id of the underlying texture.
+ GLuint getTextureId() const;
+
+ // Returns 4x4 transformation matrix in column-major order,
+ // which should be applied to EGL texture coordinates
+ // before sampling from the texture backed by android native buffer,
+ // so the corresponding region of the underlying buffer is sampled.
+ //
+ // See SurfaceTexture.getTransformMatrix for more details.
+ std::array<float, 16> getTransformMatrix();
+
private:
sp<IGraphicBufferProducer> mBufferProducer;
sp<IGraphicBufferConsumer> mBufferConsumer;