Merge "FilterWrapper: propagate ParamReflectors to the wrapper component" into main
diff --git a/Android.bp b/Android.bp
index 72b8721..afb1341 100644
--- a/Android.bp
+++ b/Android.bp
@@ -133,3 +133,19 @@
frozen: true,
}
+
+latest_av_audio_types_aidl = "av-audio-types-aidl-V1"
+
+cc_defaults {
+ name: "latest_av_audio_types_aidl_ndk_shared",
+ shared_libs: [
+ latest_av_audio_types_aidl + "-ndk",
+ ],
+}
+
+cc_defaults {
+ name: "latest_av_audio_types_aidl_ndk_static",
+ static_libs: [
+ latest_av_audio_types_aidl + "-ndk",
+ ],
+}
diff --git a/camera/Android.bp b/camera/Android.bp
index 22f1633..4c5b160 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -46,6 +46,7 @@
aconfig_declarations {
name: "camera_platform_flags",
package: "com.android.internal.camera.flags",
+ container: "system",
srcs: ["camera_platform.aconfig"],
}
diff --git a/camera/camera_platform.aconfig b/camera/camera_platform.aconfig
index 5d2a263..46a4cf2 100644
--- a/camera/camera_platform.aconfig
+++ b/camera/camera_platform.aconfig
@@ -1,4 +1,5 @@
package: "com.android.internal.camera.flags"
+container: "system"
flag {
namespace: "camera_platform"
diff --git a/camera/tests/fuzzer/camera_fuzzer.cpp b/camera/tests/fuzzer/camera_fuzzer.cpp
index c2a7549..9912fbe 100644
--- a/camera/tests/fuzzer/camera_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_fuzzer.cpp
@@ -88,6 +88,7 @@
bool initCamera();
void invokeCamera();
void invokeSetParameters();
+ native_handle_t* createNativeHandle();
sp<Camera> mCamera = nullptr;
FuzzedDataProvider* mFDP = nullptr;
@@ -102,6 +103,18 @@
};
};
+native_handle_t* CameraFuzzer::createNativeHandle() {
+ int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kMinElements, kMaxElements);
+ int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
+ native_handle_t* handle = native_handle_create(numFds, numInts);
+ for (int32_t i = 0; i < numFds; ++i) {
+ std::string filename = mFDP->ConsumeRandomLengthString(kMaxBytes);
+ int32_t fd = open(filename.c_str(), O_RDWR | O_CREAT | O_TRUNC);
+ handle->data[i] = fd;
+ }
+ return handle;
+}
+
bool CameraFuzzer::initCamera() {
ProcessState::self()->startThreadPool();
sp<IServiceManager> sm = defaultServiceManager();
@@ -288,15 +301,11 @@
},
[&]() {
int64_t timestamp = mFDP->ConsumeIntegral<int64_t>();
- int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
- int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
- native_handle_t* handle = native_handle_create(numFds, numInts);
+ native_handle_t* handle = createNativeHandle();
mCamera->recordingFrameHandleCallbackTimestamp(timestamp, handle);
},
[&]() {
- int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
- int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
- native_handle_t* handle = native_handle_create(numFds, numInts);
+ native_handle_t* handle = createNativeHandle();
mCamera->releaseRecordingFrameHandle(handle);
},
[&]() { mCamera->releaseRecordingFrame(iMem); },
@@ -305,9 +314,7 @@
for (int8_t i = 0;
i < mFDP->ConsumeIntegralInRange<int8_t>(kMinElements, kMaxElements);
++i) {
- int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
- int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
- native_handle_t* handle = native_handle_create(numFds, numInts);
+ native_handle_t* handle = createNativeHandle();
handles.push_back(handle);
}
mCamera->releaseRecordingFrameHandleBatch(handles);
@@ -317,9 +324,7 @@
for (int8_t i = 0;
i < mFDP->ConsumeIntegralInRange<int8_t>(kMinElements, kMaxElements);
++i) {
- int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
- int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
- native_handle_t* handle = native_handle_create(numFds, numInts);
+ native_handle_t* handle = createNativeHandle();
handles.push_back(handle);
}
std::vector<nsecs_t> timestamps;
diff --git a/media/TEST_MAPPING b/media/TEST_MAPPING
index d388596..1a637ac 100644
--- a/media/TEST_MAPPING
+++ b/media/TEST_MAPPING
@@ -47,8 +47,13 @@
],
// Postsubmit tests for TV devices
"tv-postsubmit": [
- {
- "name": "DecoderRenderTest"
- }
+ {
+ "name": "CtsMediaDecoderTestCases",
+ "options": [
+ {
+ "include-filter": "android.media.decoder.cts.DecoderRenderTest"
+ }
+ ]
+ }
]
}
diff --git a/media/aconfig/codec_fwk.aconfig b/media/aconfig/codec_fwk.aconfig
index b3c02eb..d662585 100644
--- a/media/aconfig/codec_fwk.aconfig
+++ b/media/aconfig/codec_fwk.aconfig
@@ -44,6 +44,16 @@
}
flag {
+ name: "input_surface_throttle"
+ namespace: "codec_fwk"
+ description: "Bugfix flag for input surface throttle"
+ bug: "342269852"
+ metadata {
+ purpose: PURPOSE_BUGFIX
+ }
+}
+
+flag {
name: "large_audio_frame_finish"
namespace: "codec_fwk"
description: "Implementation flag for large audio frame finishing tasks"
@@ -101,7 +111,17 @@
name: "set_state_early"
namespace: "codec_fwk"
description: "Bugfix flag for setting state early to avoid a race condition"
- bug: "298613711"
+ bug: "298613712"
+ metadata {
+ purpose: PURPOSE_BUGFIX
+ }
+}
+
+flag {
+ name: "stop_hal_before_surface"
+ namespace: "codec_fwk"
+ description: "Bugfix flag for setting state early to avoid a race condition"
+ bug: "339247977"
metadata {
purpose: PURPOSE_BUGFIX
}
diff --git a/media/audio/aconfig/Android.bp b/media/audio/aconfig/Android.bp
index b1d4ad4..2f659a2 100644
--- a/media/audio/aconfig/Android.bp
+++ b/media/audio/aconfig/Android.bp
@@ -8,18 +8,21 @@
aconfig_declarations {
name: "com.android.media.audioserver-aconfig",
package: "com.android.media.audioserver",
+ container: "system",
srcs: ["audioserver.aconfig"],
}
aconfig_declarations {
name: "com.android.media.audio-aconfig",
package: "com.android.media.audio",
+ container: "system",
srcs: ["audio.aconfig"],
}
aconfig_declarations {
name: "com.android.media.aaudio-aconfig",
package: "com.android.media.aaudio",
+ container: "system",
srcs: ["aaudio.aconfig"],
}
@@ -63,6 +66,7 @@
aconfig_declarations {
name: "android.media.audio-aconfig",
package: "android.media.audio",
+ container: "system",
srcs: ["audio_framework.aconfig"],
visibility: ["//visibility:private"],
}
@@ -70,6 +74,7 @@
aconfig_declarations {
name: "android.media.audiopolicy-aconfig",
package: "android.media.audiopolicy",
+ container: "system",
srcs: ["audiopolicy_framework.aconfig"],
visibility: ["//visibility:private"],
}
@@ -77,6 +82,7 @@
aconfig_declarations {
name: "android.media.midi-aconfig",
package: "android.media.midi",
+ container: "system",
srcs: ["midi_flags.aconfig"],
visibility: ["//visibility:private"],
}
diff --git a/media/audio/aconfig/OWNERS b/media/audio/aconfig/OWNERS
new file mode 100644
index 0000000..fb1e866
--- /dev/null
+++ b/media/audio/aconfig/OWNERS
@@ -0,0 +1,4 @@
+# Bug component: 48436
+atneya@google.com
+elaurent@google.com
+include platform/frameworks/av:/media/janitors/audio_OWNERS #{LAST_RESORT_SUGGESTION}
diff --git a/media/audio/aconfig/aaudio.aconfig b/media/audio/aconfig/aaudio.aconfig
index 7196525..c160109 100644
--- a/media/audio/aconfig/aaudio.aconfig
+++ b/media/audio/aconfig/aaudio.aconfig
@@ -3,6 +3,7 @@
# Please add flags in alphabetical order.
package: "com.android.media.aaudio"
+container: "system"
flag {
name: "sample_rate_conversion"
diff --git a/media/audio/aconfig/audio.aconfig b/media/audio/aconfig/audio.aconfig
index 73cb8ca..8ca4f9e 100644
--- a/media/audio/aconfig/audio.aconfig
+++ b/media/audio/aconfig/audio.aconfig
@@ -3,6 +3,7 @@
# Please add flags in alphabetical order.
package: "com.android.media.audio"
+container: "system"
flag {
name: "alarm_min_volume_zero"
diff --git a/media/audio/aconfig/audiopolicy_framework.aconfig b/media/audio/aconfig/audiopolicy_framework.aconfig
index 833730a..80e64ad 100644
--- a/media/audio/aconfig/audiopolicy_framework.aconfig
+++ b/media/audio/aconfig/audiopolicy_framework.aconfig
@@ -4,6 +4,7 @@
# Please add flags in alphabetical order.
package: "android.media.audiopolicy"
+container: "system"
flag {
name: "audio_policy_update_mixing_rules_api"
diff --git a/media/audio/aconfig/audioserver.aconfig b/media/audio/aconfig/audioserver.aconfig
index 21ea1a2..5c6504f 100644
--- a/media/audio/aconfig/audioserver.aconfig
+++ b/media/audio/aconfig/audioserver.aconfig
@@ -3,6 +3,7 @@
# Please add flags in alphabetical order.
package: "com.android.media.audioserver"
+container: "system"
flag {
name: "direct_track_reprioritization"
diff --git a/media/audio/aconfig/midi_flags.aconfig b/media/audio/aconfig/midi_flags.aconfig
index ff9238a..efb643f 100644
--- a/media/audio/aconfig/midi_flags.aconfig
+++ b/media/audio/aconfig/midi_flags.aconfig
@@ -4,6 +4,7 @@
# Please add flags in alphabetical order.
package: "android.media.midi"
+container: "system"
flag {
name: "virtual_ump"
diff --git a/media/codec2/components/avc/Android.bp b/media/codec2/components/avc/Android.bp
index a7ae85b..8ccb9ac 100644
--- a/media/codec2/components/avc/Android.bp
+++ b/media/codec2/components/avc/Android.bp
@@ -17,6 +17,10 @@
static_libs: ["libavcdec"],
+ cflags: [
+ "-DKEEP_THREADS_ACTIVE=1",
+ ],
+
srcs: ["C2SoftAvcDec.cpp"],
export_include_dirs: ["."],
diff --git a/media/codec2/components/avc/C2SoftAvcDec.cpp b/media/codec2/components/avc/C2SoftAvcDec.cpp
index 3385b95..77fdeb9 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.cpp
+++ b/media/codec2/components/avc/C2SoftAvcDec.cpp
@@ -16,6 +16,9 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "C2SoftAvcDec"
+#ifndef KEEP_THREADS_ACTIVE
+#define KEEP_THREADS_ACTIVE 0
+#endif
#include <log/log.h>
#include <media/stagefright/foundation/MediaDefs.h>
@@ -416,7 +419,7 @@
ivdext_create_op_t s_create_op = {};
s_create_ip.s_ivd_create_ip_t.u4_size = sizeof(ivdext_create_ip_t);
- s_create_ip.u4_keep_threads_active = 1;
+ s_create_ip.u4_keep_threads_active = KEEP_THREADS_ACTIVE;
s_create_ip.s_ivd_create_ip_t.e_cmd = IVD_CMD_CREATE;
s_create_ip.s_ivd_create_ip_t.u4_share_disp_buf = 0;
s_create_ip.s_ivd_create_ip_t.e_output_format = mIvColorFormat;
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.cpp b/media/codec2/components/flac/C2SoftFlacEnc.cpp
index 7b63e75..780660e 100644
--- a/media/codec2/components/flac/C2SoftFlacEnc.cpp
+++ b/media/codec2/components/flac/C2SoftFlacEnc.cpp
@@ -155,7 +155,7 @@
mSignalledError = false;
mSignalledOutputEos = false;
mIsFirstFrame = true;
- mAnchorTimeStamp = 0ull;
+ mAnchorTimeStamp = 0;
mProcessedSamples = 0u;
mEncoderWriteData = false;
mEncoderReturnedNbBytes = 0;
@@ -186,7 +186,7 @@
mSignalledError = false;
mSignalledOutputEos = false;
mIsFirstFrame = true;
- mAnchorTimeStamp = 0ull;
+ mAnchorTimeStamp = 0;
mProcessedSamples = 0u;
mEncoderWriteData = false;
mEncoderReturnedNbBytes = 0;
@@ -236,7 +236,7 @@
inSize, (int)work->input.ordinal.timestamp.peeku(),
(int)work->input.ordinal.frameIndex.peeku(), work->input.flags);
if (mIsFirstFrame && inSize) {
- mAnchorTimeStamp = work->input.ordinal.timestamp.peekull();
+ mAnchorTimeStamp = work->input.ordinal.timestamp.peekll();
mIsFirstFrame = false;
}
@@ -405,7 +405,7 @@
C2WriteView wView = mOutputBlock->map().get();
uint8_t* outData = wView.data();
const uint32_t sampleRate = mIntf->getSampleRate();
- const uint64_t outTimeStamp = mProcessedSamples * 1000000ll / sampleRate;
+ const int64_t outTimeStamp = mProcessedSamples * 1000000ll / sampleRate;
ALOGV("writing %zu bytes of encoded data on output", bytes);
// increment mProcessedSamples to maintain audio synchronization during
// play back
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.h b/media/codec2/components/flac/C2SoftFlacEnc.h
index 1f3be3c..ed9c298 100644
--- a/media/codec2/components/flac/C2SoftFlacEnc.h
+++ b/media/codec2/components/flac/C2SoftFlacEnc.h
@@ -72,7 +72,7 @@
bool mSignalledOutputEos;
uint32_t mBlockSize;
bool mIsFirstFrame;
- uint64_t mAnchorTimeStamp;
+ int64_t mAnchorTimeStamp;
uint64_t mProcessedSamples;
// should the data received by the callback be written to the output port
bool mEncoderWriteData;
diff --git a/media/codec2/components/hevc/Android.bp b/media/codec2/components/hevc/Android.bp
index d1388b9..cb9c2ae 100644
--- a/media/codec2/components/hevc/Android.bp
+++ b/media/codec2/components/hevc/Android.bp
@@ -15,6 +15,10 @@
"libcodec2_soft_sanitize_cfi-defaults",
],
+ cflags: [
+ "-DKEEP_THREADS_ACTIVE=1",
+ ],
+
srcs: ["C2SoftHevcDec.cpp"],
static_libs: ["libhevcdec"],
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.cpp b/media/codec2/components/hevc/C2SoftHevcDec.cpp
index 81db2a1..64aa7a4 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcDec.cpp
@@ -16,6 +16,9 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "C2SoftHevcDec"
+#ifndef KEEP_THREADS_ACTIVE
+#define KEEP_THREADS_ACTIVE 0
+#endif
#include <log/log.h>
#include <media/stagefright/foundation/MediaDefs.h>
@@ -407,7 +410,7 @@
ivdext_create_op_t s_create_op = {};
s_create_ip.s_ivd_create_ip_t.u4_size = sizeof(ivdext_create_ip_t);
- s_create_ip.u4_keep_threads_active = 1;
+ s_create_ip.u4_keep_threads_active = KEEP_THREADS_ACTIVE;
s_create_ip.s_ivd_create_ip_t.e_cmd = IVD_CMD_CREATE;
s_create_ip.s_ivd_create_ip_t.u4_share_disp_buf = 0;
s_create_ip.s_ivd_create_ip_t.e_output_format = mIvColorformat;
diff --git a/media/codec2/components/mpeg2/Android.bp b/media/codec2/components/mpeg2/Android.bp
index a58044c..e644ee3 100644
--- a/media/codec2/components/mpeg2/Android.bp
+++ b/media/codec2/components/mpeg2/Android.bp
@@ -14,6 +14,10 @@
"libcodec2_soft_sanitize_signed-defaults",
],
+ cflags: [
+ "-DKEEP_THREADS_ACTIVE=0",
+ ],
+
srcs: ["C2SoftMpeg2Dec.cpp"],
static_libs: ["libmpeg2dec"],
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
index 491098d..562dcf5 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
@@ -16,6 +16,9 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "C2SoftMpeg2Dec"
+#ifndef KEEP_THREADS_ACTIVE
+#define KEEP_THREADS_ACTIVE 0
+#endif
#include <log/log.h>
#include <media/stagefright/foundation/MediaDefs.h>
@@ -433,7 +436,7 @@
s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_size = sizeof(ivdext_fill_mem_rec_ip_t);
s_fill_mem_ip.u4_share_disp_buf = 0;
- s_fill_mem_ip.u4_keep_threads_active = 1;
+ s_fill_mem_ip.u4_keep_threads_active = KEEP_THREADS_ACTIVE;
s_fill_mem_ip.e_output_format = mIvColorformat;
s_fill_mem_ip.u4_deinterlace = 1;
s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.e_cmd = IV_CMD_FILL_NUM_MEM_REC;
@@ -475,7 +478,7 @@
s_init_ip.s_ivd_init_ip_t.u4_frm_max_ht = mHeight;
s_init_ip.u4_share_disp_buf = 0;
s_init_ip.u4_deinterlace = 1;
- s_init_ip.u4_keep_threads_active = 1;
+ s_init_ip.u4_keep_threads_active = KEEP_THREADS_ACTIVE;
s_init_ip.s_ivd_init_ip_t.u4_num_mem_rec = mNumMemRecords;
s_init_ip.s_ivd_init_ip_t.e_output_format = mIvColorformat;
s_init_op.s_ivd_init_op_t.u4_size = sizeof(ivdext_init_op_t);
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.cpp b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
index 1c5772f..8eb8da5 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
@@ -465,6 +465,7 @@
mTemporalPatternIdx(0),
mLastTimestamp(0x7FFFFFFFFFFFFFFFull),
mSignalledOutputEos(false),
+ mHeaderGenerated(false),
mSignalledError(false) {
for (int i = 0; i < MAXTEMPORALLAYERS; i++) {
mTemporalLayerBitrateRatio[i] = 1.0f;
@@ -494,6 +495,7 @@
// this one is not allocated by us
mCodecInterface = nullptr;
+ mHeaderGenerated = false;
}
c2_status_t C2SoftVpxEnc::onStop() {
@@ -558,6 +560,7 @@
(uint32_t)mBitrateControlMode, mTemporalLayers, mIntf->getSyncFramePeriod(),
mMinQuantizer, mMaxQuantizer);
+ mHeaderGenerated = false;
mCodecConfiguration = new vpx_codec_enc_cfg_t;
if (!mCodecConfiguration) goto CleanUp;
codec_return = vpx_codec_enc_config_default(mCodecInterface,
@@ -873,6 +876,27 @@
return;
}
+ // Header generation is limited to Android V and above, as MediaMuxer did not handle
+ // CSD for VP9 correctly in Android U and before.
+ if (isAtLeastV() && !mHeaderGenerated) {
+ vpx_fixed_buf_t* codec_private_data = vpx_codec_get_global_headers(mCodecContext);
+ if (codec_private_data) {
+ std::unique_ptr<C2StreamInitDataInfo::output> csd =
+ C2StreamInitDataInfo::output::AllocUnique(codec_private_data->sz, 0u);
+ if (!csd) {
+ ALOGE("CSD allocation failed");
+ mSignalledError = true;
+ work->result = C2_NO_MEMORY;
+ work->workletsProcessed = 1u;
+ return;
+ }
+ memcpy(csd->m.value, codec_private_data->buf, codec_private_data->sz);
+ work->worklets.front()->output.configUpdate.push_back(std::move(csd));
+ ALOGV("CSD Produced of size %zu bytes", codec_private_data->sz);
+ }
+ mHeaderGenerated = true;
+ }
+
const C2ConstGraphicBlock inBuffer =
inputBuffer->data().graphicBlocks().front();
if (inBuffer.width() < mSize->width ||
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.h b/media/codec2/components/vpx/C2SoftVpxEnc.h
index 980de04..87d24f9 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.h
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.h
@@ -207,6 +207,9 @@
// Signalled EOS
bool mSignalledOutputEos;
+ // Header generated
+ bool mHeaderGenerated;
+
// Signalled Error
bool mSignalledError;
diff --git a/media/codec2/core/Android.bp b/media/codec2/core/Android.bp
index 7d5740b..c205dcd 100644
--- a/media/codec2/core/Android.bp
+++ b/media/codec2/core/Android.bp
@@ -26,9 +26,6 @@
"//apex_available:platform",
"com.android.media.swcodec",
],
- vndk: {
- enabled: true,
- },
double_loadable: true,
srcs: ["C2.cpp"],
diff --git a/media/codec2/hal/aidl/Android.bp b/media/codec2/hal/aidl/Android.bp
index 48b6e21..e16e2b1 100644
--- a/media/codec2/hal/aidl/Android.bp
+++ b/media/codec2/hal/aidl/Android.bp
@@ -8,6 +8,7 @@
name: "libcodec2_aidl_client",
defaults: [
+ "aconfig_lib_cc_static_link.defaults",
"libcodec2_hal_selection",
],
@@ -65,6 +66,7 @@
],
defaults: [
+ "aconfig_lib_cc_static_link.defaults",
"libcodec2_hal_selection",
],
diff --git a/media/codec2/hal/client/Android.bp b/media/codec2/hal/client/Android.bp
index af6f4ae..864eeb8 100644
--- a/media/codec2/hal/client/Android.bp
+++ b/media/codec2/hal/client/Android.bp
@@ -33,6 +33,13 @@
"libcodec2-aidl-client-defaults",
],
+ // http://b/343951602#comment4 Explicitly set cpp_std to gnu++20. The
+ // default inherited from libcodec2-impl-defaults sets it to gnu++17 which
+ // causes a segfault when mixing global std::string symbols built with
+ // gnu++17 and gnu++20. TODO(b/343951602): clean this after
+ // libcodec2-impl-defaults opt into gnu++17 is removed.
+ cpp_std: "gnu++20",
+
header_libs: [
"libcodec2_internal", // private
],
diff --git a/media/codec2/hal/client/GraphicsTracker.cpp b/media/codec2/hal/client/GraphicsTracker.cpp
index 1c2a0fb..dbbabfe 100644
--- a/media/codec2/hal/client/GraphicsTracker.cpp
+++ b/media/codec2/hal/client/GraphicsTracker.cpp
@@ -173,7 +173,7 @@
}
GraphicsTracker::GraphicsTracker(int maxDequeueCount)
- : mBufferCache(new BufferCache()), mMaxDequeue{maxDequeueCount},
+ : mBufferCache(new BufferCache()), mNumDequeueing{0}, mMaxDequeue{maxDequeueCount},
mMaxDequeueCommitted{maxDequeueCount},
mDequeueable{maxDequeueCount},
mTotalDequeued{0}, mTotalCancelled{0}, mTotalDropped{0}, mTotalReleased{0},
@@ -235,6 +235,7 @@
const sp<IGraphicBufferProducer>& igbp, uint32_t generation) {
// TODO: wait until operations to previous IGBP is completed.
std::shared_ptr<BufferCache> prevCache;
+ int prevDequeueRequested = 0;
int prevDequeueCommitted;
std::unique_lock<std::mutex> cl(mConfigLock);
@@ -243,6 +244,9 @@
mInConfig = true;
prevCache = mBufferCache;
prevDequeueCommitted = mMaxDequeueCommitted;
+ if (mMaxDequeueRequested.has_value()) {
+ prevDequeueRequested = mMaxDequeueRequested.value();
+ }
}
// NOTE: Switching to the same surface is blocked from MediaCodec.
// Switching to the same surface might not work if tried, since disconnect()
@@ -263,6 +267,11 @@
mInConfig = false;
return C2_BAD_VALUE;
}
+ ALOGD("new surface in configuration: maxDequeueRequested(%d), maxDequeueCommitted(%d)",
+ prevDequeueRequested, prevDequeueCommitted);
+ if (prevDequeueRequested > 0 && prevDequeueRequested > prevDequeueCommitted) {
+ prevDequeueCommitted = prevDequeueRequested;
+ }
if (igbp) {
ret = igbp->setMaxDequeuedBufferCount(prevDequeueCommitted);
if (ret != ::android::OK) {
@@ -280,6 +289,34 @@
std::unique_lock<std::mutex> l(mLock);
mInConfig = false;
mBufferCache = newCache;
+ // {@code dequeued} is the number of currently dequeued buffers.
+ // {@code prevDequeueCommitted} is max dequeued buffer at any moment
+ // from the new surface.
+ // {@code newDequeueable} is hence the current # of dequeueable buffers
+ // if no change occurs.
+ int dequeued = mDequeued.size() + mNumDequeueing;
+ int newDequeueable = prevDequeueCommitted - dequeued;
+ if (newDequeueable < 0) {
+ // This will not happen.
+ // But if this happens, we respect the value and try to continue.
+ ALOGE("calculated new dequeueable is negative: %d max(%d),dequeued(%d)",
+ newDequeueable, prevDequeueCommitted, dequeued);
+ }
+
+ if (mMaxDequeueRequested.has_value() && mMaxDequeueRequested == prevDequeueCommitted) {
+ mMaxDequeueRequested.reset();
+ }
+ mMaxDequeue = mMaxDequeueCommitted = prevDequeueCommitted;
+
+ int delta = newDequeueable - mDequeueable;
+ if (delta > 0) {
+ writeIncDequeueableLocked(delta);
+ } else if (delta < 0) {
+ drainDequeueableLocked(-delta);
+ }
+ ALOGV("new surfcace dequeueable %d(delta %d), maxDequeue %d",
+ newDequeueable, delta, mMaxDequeue);
+ mDequeueable = newDequeueable;
}
return C2_OK;
}
@@ -529,6 +566,7 @@
ALOGE("writing end for the waitable object seems to be closed");
return C2_BAD_STATE;
}
+ mNumDequeueing++;
mDequeueable--;
*cache = mBufferCache;
return C2_OK;
@@ -543,6 +581,7 @@
bool cached, int slot, const sp<Fence> &fence,
std::shared_ptr<BufferItem> *pBuffer, bool *updateDequeue) {
std::unique_lock<std::mutex> l(mLock);
+ mNumDequeueing--;
if (res == C2_OK) {
if (cached) {
auto it = cache->mBuffers.find(slot);
@@ -655,7 +694,8 @@
ALOGE("allocate by dequeueBuffer() successful, but requestBuffer() failed %d",
status);
igbp->cancelBuffer(slotId, fence);
- return C2_CORRUPTED;
+ // This might be due to life-cycle end and/or surface switching.
+ return C2_BLOCKING;
}
*buffer = std::make_shared<BufferItem>(generation, slotId, realloced, fence);
if (!*buffer) {
diff --git a/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h b/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
index dd6c869..762030b 100644
--- a/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
+++ b/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
@@ -234,6 +234,7 @@
// Maps bufferId to buffer
std::map<uint64_t, std::shared_ptr<BufferItem>> mDequeued;
std::set<uint64_t> mDeallocating;
+ int mNumDequeueing;
// These member variables are read and modified accessed as follows.
// 1. mConfigLock being held
diff --git a/media/codec2/hal/common/Android.bp b/media/codec2/hal/common/Android.bp
index 7d7b285..4c9da33 100644
--- a/media/codec2/hal/common/Android.bp
+++ b/media/codec2/hal/common/Android.bp
@@ -31,6 +31,10 @@
],
static_libs: ["aconfig_mediacodec_flags_c_lib"],
+
+ defaults: [
+ "aconfig_lib_cc_static_link.defaults",
+ ],
}
cc_library_static {
diff --git a/media/codec2/hal/common/MultiAccessUnitHelper.cpp b/media/codec2/hal/common/MultiAccessUnitHelper.cpp
index 8086ef2..b1fa82f 100644
--- a/media/codec2/hal/common/MultiAccessUnitHelper.cpp
+++ b/media/codec2/hal/common/MultiAccessUnitHelper.cpp
@@ -27,6 +27,7 @@
#include <C2Debug.h>
#include <C2PlatformSupport.h>
+static inline constexpr uint32_t MAX_SUPPORTED_SIZE = ( 10 * 512000 * 8 * 2u);
namespace android {
static C2R MultiAccessUnitParamsSetter(
@@ -39,8 +40,6 @@
res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.thresholdSize)));
} else if (me.v.maxSize < me.v.thresholdSize) {
me.set().maxSize = me.v.thresholdSize;
- } else if (me.v.thresholdSize == 0 && me.v.maxSize > 0) {
- me.set().thresholdSize = me.v.maxSize;
}
std::vector<std::unique_ptr<C2SettingResult>> failures;
res.retrieveFailures(&failures);
@@ -61,9 +60,9 @@
.withDefault(new C2LargeFrame::output(0u, 0, 0))
.withFields({
C2F(mLargeFrameParams, maxSize).inRange(
- 0, c2_min(UINT_MAX, 10 * 512000 * 8 * 2u)),
+ 0, c2_min(UINT_MAX, MAX_SUPPORTED_SIZE)),
C2F(mLargeFrameParams, thresholdSize).inRange(
- 0, c2_min(UINT_MAX, 10 * 512000 * 8 * 2u))
+ 0, c2_min(UINT_MAX, MAX_SUPPORTED_SIZE))
})
.withSetter(MultiAccessUnitParamsSetter)
.build());
@@ -115,6 +114,18 @@
return false;
}
+bool MultiAccessUnitInterface::getMaxInputSize(
+ C2StreamMaxBufferSizeInfo::input* const maxInputSize) const {
+ if (maxInputSize == nullptr || mC2ComponentIntf == nullptr) {
+ return false;
+ }
+ c2_status_t err = mC2ComponentIntf->query_vb({maxInputSize}, {}, C2_MAY_BLOCK, nullptr);
+ if (err != OK) {
+ return false;
+ }
+ return true;
+}
+
//C2MultiAccessUnitBuffer
class C2MultiAccessUnitBuffer : public C2Buffer {
public:
@@ -128,6 +139,7 @@
MultiAccessUnitHelper::MultiAccessUnitHelper(
const std::shared_ptr<MultiAccessUnitInterface>& intf,
std::shared_ptr<C2BlockPool>& linearPool):
+ mMultiAccessOnOffAllowed(true),
mInit(false),
mInterface(intf),
mLinearPool(linearPool) {
@@ -152,6 +164,63 @@
return result;
}
+bool MultiAccessUnitHelper::tryReconfigure(const std::unique_ptr<C2Param> ¶m) {
+ C2LargeFrame::output *lfp = C2LargeFrame::output::From(param.get());
+ if (lfp == nullptr) {
+ return false;
+ }
+ bool isDecoder = (mInterface->kind() == C2Component::KIND_DECODER) ? true : false;
+ if (!isDecoder) {
+ C2StreamMaxBufferSizeInfo::input maxInputSize(0);
+ if (!mInterface->getMaxInputSize(&maxInputSize)) {
+ LOG(ERROR) << "Error in reconfigure: "
+ << "Encoder failed to respond with a valid max input size";
+ return false;
+ }
+ // This is assuming a worst case compression ratio of 1:1
+ // In no case the encoder should give an output more than
+ // what is being provided to the encoder in a single call.
+ if (lfp->maxSize < maxInputSize.value) {
+ lfp->maxSize = maxInputSize.value;
+ }
+ }
+ lfp->maxSize =
+ (lfp->maxSize > MAX_SUPPORTED_SIZE) ? MAX_SUPPORTED_SIZE :
+ (lfp->maxSize < 0) ? 0 : lfp->maxSize;
+ lfp->thresholdSize =
+ (lfp->thresholdSize > MAX_SUPPORTED_SIZE) ? MAX_SUPPORTED_SIZE :
+ (lfp->thresholdSize < 0) ? 0 : lfp->thresholdSize;
+ C2LargeFrame::output currentConfig = mInterface->getLargeFrameParam();
+ if ((currentConfig.maxSize == lfp->maxSize)
+ && (currentConfig.thresholdSize == lfp->thresholdSize)) {
+ // no need to update
+ return false;
+ }
+ if (isDecoder) {
+ bool isOnOffTransition =
+ (currentConfig.maxSize == 0 && lfp->maxSize != 0)
+ || (currentConfig.maxSize != 0 && lfp->maxSize == 0);
+ if (isOnOffTransition && !mMultiAccessOnOffAllowed) {
+ LOG(ERROR) << "Setting new configs not allowed"
+ << " MaxSize: " << lfp->maxSize
+ << " ThresholdSize: " << lfp->thresholdSize;
+ return false;
+ }
+ }
+ std::vector<C2Param*> config{lfp};
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ if (C2_OK != mInterface->config(config, C2_MAY_BLOCK, &failures)) {
+ LOG(ERROR) << "Dynamic config not applied for"
+ << " MaxSize: " << lfp->maxSize
+ << " ThresholdSize: " << lfp->thresholdSize;
+ return false;
+ }
+ LOG(DEBUG) << "Updated from param maxSize "
+ << lfp->maxSize
+ << " ThresholdSize " << lfp->thresholdSize;
+ return true;
+}
+
std::shared_ptr<MultiAccessUnitInterface> MultiAccessUnitHelper::getInterface() {
return mInterface;
}
@@ -163,6 +232,7 @@
void MultiAccessUnitHelper::reset() {
std::lock_guard<std::mutex> l(mLock);
mFrameHolder.clear();
+ mMultiAccessOnOffAllowed = true;
}
c2_status_t MultiAccessUnitHelper::error(
@@ -181,6 +251,7 @@
}
}
mFrameHolder.clear();
+ mMultiAccessOnOffAllowed = true;
return C2_OK;
}
@@ -232,16 +303,23 @@
uint64_t newFrameIdx = mFrameIndex++;
// TODO: Do not split buffers if component inherantly supports MultipleFrames.
// if thats case, only replace frameindex.
- auto cloneInputWork = [&newFrameIdx](std::unique_ptr<C2Work>& inWork, uint32_t flags) {
+ auto cloneInputWork = [&frameInfo, &newFrameIdx, this]
+ (std::unique_ptr<C2Work>& inWork, uint32_t flags) -> std::unique_ptr<C2Work> {
std::unique_ptr<C2Work> newWork(new C2Work);
newWork->input.flags = (C2FrameData::flags_t)flags;
newWork->input.ordinal = inWork->input.ordinal;
newWork->input.ordinal.frameIndex = newFrameIdx;
if (!inWork->input.configUpdate.empty()) {
for (std::unique_ptr<C2Param>& param : inWork->input.configUpdate) {
- newWork->input.configUpdate.push_back(
- std::move(C2Param::Copy(*(param.get()))));
+ if (param->index() == C2LargeFrame::output::PARAM_TYPE) {
+ if (tryReconfigure(param)) {
+ frameInfo.mConfigUpdate.push_back(std::move(param));
+ }
+ } else {
+ newWork->input.configUpdate.push_back(std::move(param));
+ }
}
+ inWork->input.configUpdate.clear();
}
newWork->input.infoBuffers = (inWork->input.infoBuffers);
if (!inWork->worklets.empty() && inWork->worklets.front() != nullptr) {
@@ -331,6 +409,7 @@
frameInfo.mLargeFrameTuning = multiAccessParams;
std::lock_guard<std::mutex> l(mLock);
mFrameHolder.push_back(std::move(frameInfo));
+ mMultiAccessOnOffAllowed = false;
}
}
return C2_OK;
@@ -360,6 +439,7 @@
std::list<MultiAccessUnitInfo>::iterator frame =
mFrameHolder.begin();
while (!foundFrame && frame != mFrameHolder.end()) {
+ c2_status_t res = C2_OK;
auto it = frame->mComponentFrameIds.find(thisFrameIndex);
if (it != frame->mComponentFrameIds.end()) {
foundFrame = true;
@@ -369,8 +449,7 @@
if (work->result != C2_OK
|| work->worklets.empty()
|| !work->worklets.front()
- || (frame->mLargeFrameTuning.thresholdSize == 0
- || frame->mLargeFrameTuning.maxSize == 0)) {
+ || frame->mLargeFrameTuning.maxSize == 0) {
if (removeEntry) {
frame->mComponentFrameIds.erase(it);
removeEntry = false;
@@ -388,10 +467,27 @@
addOutWork(frame->mLargeWork);
frame->reset();
if (workResult != C2_OK) {
- frame->mAccessUnitInfos.clear();
+ frame->mComponentFrameIds.clear();
+ removeEntry = false;
}
- } else if (C2_OK != processWorklets(*frame, work, addOutWork)) {
- LOG(DEBUG) << "Error while processing work";
+ } else if (C2_OK != (res = processWorklets(*frame, work, addOutWork))) {
+ // Upon error in processing worklets, we return the work with
+ // result set to the error. This should indicate the error to the
+ // framework and thus doing what is necessary to handle the
+ // error.
+ LOG(DEBUG) << "Error while processing worklets";
+ if (frame->mLargeWork == nullptr) {
+ frame->mLargeWork.reset(new C2Work);
+ frame->mLargeWork->input.ordinal = frame->inOrdinal;
+ frame->mLargeWork->input.ordinal.frameIndex =
+ frame->inOrdinal.frameIndex;
+ }
+ frame->mLargeWork->result = res;
+ finalizeWork(*frame);
+ addOutWork(frame->mLargeWork);
+ frame->reset();
+ frame->mComponentFrameIds.clear();
+ removeEntry = false;
}
if (removeEntry) {
LOG(DEBUG) << "Removing entry: " << thisFrameIndex
@@ -528,9 +624,6 @@
LOG(DEBUG) << "maxOutSize " << frame.mLargeFrameTuning.maxSize
<< " threshold " << frame.mLargeFrameTuning.thresholdSize;
- if ((*worklet)->output.buffers.size() > 0) {
- allocateWork(frame, true, true);
- }
LOG(DEBUG) << "This worklet has " << (*worklet)->output.buffers.size() << " buffers"
<< " ts: " << (*worklet)->output.ordinal.timestamp.peekull();
int64_t workletTimestamp = (*worklet)->output.ordinal.timestamp.peekull();
@@ -552,43 +645,39 @@
inputSize -= (inputSize % frameSize);
}
while (inputOffset < inputSize) {
- if (frame.mWview->offset() >= frame.mLargeFrameTuning.thresholdSize) {
+ if ((frame.mWview != nullptr)
+ && (frame.mWview->offset() >= frame.mLargeFrameTuning.thresholdSize)) {
frame.mLargeWork->result = C2_OK;
finalizeWork(frame, flagsForCopy);
addWork(frame.mLargeWork);
frame.reset();
- allocateWork(frame, true, true);
}
if (mInterface->kind() == C2Component::KIND_ENCODER) {
if (inputSize > frame.mLargeFrameTuning.maxSize) {
- LOG(ERROR) << "Enc: Output buffer too small for AU, configured with "
- << frame.mLargeFrameTuning.maxSize
- << " block size: " << blocks.front().size()
- << "alloc size " << frame.mWview->size();
- if (frame.mLargeWork
- && frame.mWview && frame.mWview->offset() > 0) {
+ LOG(WARNING) << "WARNING Encoder:"
+ << " Output buffer too small for configuration"
+ << " configured max size " << frame.mLargeFrameTuning.maxSize
+ << " access unit size " << inputSize;
+ if (frame.mLargeWork && (frame.mWview && frame.mWview->offset() > 0)) {
+ frame.mLargeWork->result = C2_OK;
finalizeWork(frame, flagsForCopy);
addWork(frame.mLargeWork);
frame.reset();
- allocateWork(frame, true, false);
}
- frame.mLargeWork->result = C2_NO_MEMORY;
- finalizeWork(frame, 0, true);
- addWork(frame.mLargeWork);
- frame.reset();
- return C2_NO_MEMORY;
- } else if (inputSize > frame.mWview->size()) {
+ frame.mLargeFrameTuning.maxSize = inputSize;
+ } else if ((frame.mWview != nullptr)
+ && (inputSize > frame.mWview->size())) {
LOG(DEBUG) << "Enc: Large frame hitting bufer limit, current size "
<< frame.mWview->offset();
- if (frame.mLargeWork
- && frame.mWview && frame.mWview->offset() > 0) {
+ if (frame.mWview->offset() > 0) {
+ frame.mLargeWork->result = C2_OK;
finalizeWork(frame, flagsForCopy);
addWork(frame.mLargeWork);
frame.reset();
- allocateWork(frame, true, true);
}
}
}
+ allocateWork(frame, true, true);
C2ReadView rView = blocks.front().map().get();
if (rView.error()) {
LOG(ERROR) << "Buffer read view error";
@@ -683,26 +772,39 @@
frame.mWview->setOffset(0);
std::shared_ptr<C2Buffer> c2Buffer = C2Buffer::CreateLinearBuffer(
frame.mBlock->share(0, size, ::C2Fence()));
- if (frame.mAccessUnitInfos.size() > 0) {
- if (finalFlags & C2FrameData::FLAG_END_OF_STREAM) {
- frame.mAccessUnitInfos.back().flags |=
- C2FrameData::FLAG_END_OF_STREAM;
- }
- std::shared_ptr<C2AccessUnitInfos::output> largeFrame =
- C2AccessUnitInfos::output::AllocShared(
- frame.mAccessUnitInfos.size(), 0u, frame.mAccessUnitInfos);
- frame.mInfos.push_back(largeFrame);
- frame.mAccessUnitInfos.clear();
- }
- for (auto &info : frame.mInfos) {
- c2Buffer->setInfo(std::const_pointer_cast<C2Info>(info));
- }
frame.mLargeWork->worklets.front()->output.buffers.push_back(std::move(c2Buffer));
- frame.mInfos.clear();
- frame.mBlock.reset();
- frame.mWview.reset();
+ }
+ if (frame.mLargeWork->worklets.front()->output.buffers.size() > 0) {
+ std::shared_ptr<C2Buffer>& c2Buffer =
+ frame.mLargeWork->worklets.front()->output.buffers.front();
+ if (c2Buffer != nullptr) {
+ if (frame.mAccessUnitInfos.size() > 0) {
+ if (finalFlags & C2FrameData::FLAG_END_OF_STREAM) {
+ frame.mAccessUnitInfos.back().flags |= C2FrameData::FLAG_END_OF_STREAM;
+ }
+ std::shared_ptr<C2AccessUnitInfos::output> largeFrame =
+ C2AccessUnitInfos::output::AllocShared(
+ frame.mAccessUnitInfos.size(), 0u, frame.mAccessUnitInfos);
+ frame.mInfos.push_back(largeFrame);
+ frame.mAccessUnitInfos.clear();
+ }
+ for (auto &info : frame.mInfos) {
+ c2Buffer->setInfo(std::const_pointer_cast<C2Info>(info));
+ }
+ }
+ }
+ if (frame.mConfigUpdate.size() > 0) {
+ outFrameData.configUpdate.insert(
+ outFrameData.configUpdate.end(),
+ make_move_iterator(frame.mConfigUpdate.begin()),
+ make_move_iterator(frame.mConfigUpdate.end()));
}
}
+ frame.mConfigUpdate.clear();
+ frame.mInfos.clear();
+ frame.mBlock.reset();
+ frame.mWview.reset();
+
LOG(DEBUG) << "Multi access-unitflag setting as " << finalFlags;
return C2_OK;
}
@@ -735,6 +837,7 @@
mBlock.reset();
mWview.reset();
mInfos.clear();
+ mConfigUpdate.clear();
mAccessUnitInfos.clear();
mLargeWork.reset();
}
diff --git a/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h b/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h
index bb4464c..070a1f5 100644
--- a/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h
+++ b/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h
@@ -46,6 +46,7 @@
protected:
bool getDecoderSampleRateAndChannelCount(
uint32_t * const sampleRate_, uint32_t * const channelCount_) const;
+ bool getMaxInputSize(C2StreamMaxBufferSizeInfo::input* const maxInputSize) const;
const std::shared_ptr<C2ComponentInterface> mC2ComponentIntf;
std::shared_ptr<C2LargeFrame::output> mLargeFrameParams;
C2ComponentKindSetting mKind;
@@ -140,6 +141,11 @@
std::vector<std::shared_ptr<const C2Info>> mInfos;
/*
+ * Vector for holding config updates from the wrapper
+ */
+ std::vector<std::unique_ptr<C2Param>> mConfigUpdate;
+
+ /*
* C2AccessUnitInfos for the current buffer
*/
std::vector<C2AccessUnitInfosStruct> mAccessUnitInfos;
@@ -170,6 +176,11 @@
};
/*
+ * Reconfigure helper
+ */
+ bool tryReconfigure(const std::unique_ptr<C2Param> &p);
+
+ /*
* Creates a linear block to be used with work
*/
c2_status_t createLinearBlock(MultiAccessUnitInfo &frame);
@@ -195,6 +206,14 @@
uint32_t size,
int64_t timestamp);
+ // Flag to allow dynamic on/off settings on this helper.
+ // Once enabled and buffers in transit, it is not possible
+ // to turn this module off by setting the max output value
+ // to 0. This is because the skip cut buffer expects the
+ // metadata to be always present along with a valid buffer.
+ // This flag is used to monitor that state of this module.
+ bool mMultiAccessOnOffAllowed;
+
bool mInit;
// Interface of this module
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
index ab47b7c..36907e1 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
@@ -263,9 +263,6 @@
ALOGV("mComponent->reset() timeConsumed=%" PRId64 " us", timeConsumed);
ASSERT_EQ(err, C2_OK);
- err = mComponent->start();
- ASSERT_EQ(err, C2_OK);
-
// Query supported params by the component
std::vector<std::shared_ptr<C2ParamDescriptor>> params;
startTime = getNowUs();
@@ -298,6 +295,9 @@
timeConsumed);
}
+ err = mComponent->start();
+ ASSERT_EQ(err, C2_OK);
+
std::list<std::unique_ptr<C2Work>> workList;
startTime = getNowUs();
err = mComponent->queue(&workList);
diff --git a/media/codec2/hal/plugin/FilterWrapper.cpp b/media/codec2/hal/plugin/FilterWrapper.cpp
index 4e03dbb..ab6e3eb 100644
--- a/media/codec2/hal/plugin/FilterWrapper.cpp
+++ b/media/codec2/hal/plugin/FilterWrapper.cpp
@@ -49,11 +49,6 @@
std::weak_ptr<FilterWrapper> filterWrapper)
: mIntf(intf), mFilterWrapper(filterWrapper) {
takeFilters(std::move(filters));
- for (size_t i = 0; i < mFilters.size(); ++i) {
- mControlParamTypes.insert(
- mFilters[i].desc.controlParams.begin(),
- mFilters[i].desc.controlParams.end());
- }
}
~WrappedDecoderInterface() override = default;
@@ -91,6 +86,12 @@
// TODO: documentation
mFilters = std::move(filters);
+ mControlParamTypes.clear();
+ for (size_t i = 0; i < mFilters.size(); ++i) {
+ mControlParamTypes.insert(
+ mFilters[i].desc.controlParams.begin(),
+ mFilters[i].desc.controlParams.end());
+ }
mTypeToIndexForQuery.clear();
mTypeToIndexForConfig.clear();
for (size_t i = 0; i < mFilters.size(); ++i) {
diff --git a/media/codec2/sfplugin/Android.bp b/media/codec2/sfplugin/Android.bp
index 362373e..7076bac 100644
--- a/media/codec2/sfplugin/Android.bp
+++ b/media/codec2/sfplugin/Android.bp
@@ -91,6 +91,10 @@
"libcodec2_client",
],
+ defaults: [
+ "aconfig_lib_cc_static_link.defaults",
+ ],
+
sanitize: {
cfi: true,
misc_undefined: [
diff --git a/media/codec2/sfplugin/C2AidlNode.cpp b/media/codec2/sfplugin/C2AidlNode.cpp
index 93c9d8b..4e46ad6 100644
--- a/media/codec2/sfplugin/C2AidlNode.cpp
+++ b/media/codec2/sfplugin/C2AidlNode.cpp
@@ -68,10 +68,15 @@
}
::ndk::ScopedAStatus C2AidlNode::submitBuffer(
- int32_t buffer, const ::aidl::android::hardware::HardwareBuffer& hBuffer,
+ int32_t buffer,
+ const std::optional<::aidl::android::hardware::HardwareBuffer>& hBuffer,
int32_t flags, int64_t timestamp, const ::ndk::ScopedFileDescriptor& fence) {
sp<GraphicBuffer> gBuf;
- AHardwareBuffer *ahwb = hBuffer.get();
+ AHardwareBuffer *ahwb = nullptr;
+ if (hBuffer.has_value()) {
+ ahwb = hBuffer.value().get();
+ }
+
if (ahwb) {
gBuf = AHardwareBuffer_to_GraphicBuffer(ahwb);
}
@@ -105,6 +110,10 @@
return mImpl->onInputBufferDone(index);
}
+void C2AidlNode::onInputBufferEmptied() {
+ return mImpl->onInputBufferEmptied();
+}
+
android_dataspace C2AidlNode::getDataspace() {
return mImpl->getDataspace();
}
diff --git a/media/codec2/sfplugin/C2AidlNode.h b/media/codec2/sfplugin/C2AidlNode.h
index 365a41d..95290fd 100644
--- a/media/codec2/sfplugin/C2AidlNode.h
+++ b/media/codec2/sfplugin/C2AidlNode.h
@@ -49,7 +49,7 @@
::ndk::ScopedAStatus submitBuffer(
int32_t buffer,
- const ::aidl::android::hardware::HardwareBuffer& hBuffer,
+ const std::optional<::aidl::android::hardware::HardwareBuffer>& hBuffer,
int32_t flags,
int64_t timestampUs,
const ::ndk::ScopedFileDescriptor& fence) override;
@@ -68,13 +68,19 @@
void setFrameSize(uint32_t width, uint32_t height);
/**
- * Clean up work item reference.
+ * Notify that the input buffer reference is no longer needed by the component.
+ * Clean up if necessary.
*
* \param index input work index
*/
void onInputBufferDone(c2_cntr64_t index);
/**
+ * Notify input buffer is emptied.
+ */
+ void onInputBufferEmptied();
+
+ /**
* Returns dataspace information from GraphicBufferSource.
*/
android_dataspace getDataspace();
diff --git a/media/codec2/sfplugin/C2NodeImpl.cpp b/media/codec2/sfplugin/C2NodeImpl.cpp
index 6f53e0f..585072d 100644
--- a/media/codec2/sfplugin/C2NodeImpl.cpp
+++ b/media/codec2/sfplugin/C2NodeImpl.cpp
@@ -25,6 +25,7 @@
#include <C2Debug.h>
#include <C2PlatformSupport.h>
+#include <android_media_codec.h>
#include <android/fdsan.h>
#include <media/stagefright/foundation/ColorUtils.h>
#include <ui/Fence.h>
@@ -373,7 +374,10 @@
}
work->worklets.clear();
work->worklets.emplace_back(new C2Worklet);
- mBufferIdsInUse.lock()->emplace(work->input.ordinal.frameIndex.peeku(), buffer);
+ {
+ Mutexed<BuffersTracker>::Locked buffers(mBuffersTracker);
+ buffers->mIdsInUse.emplace(work->input.ordinal.frameIndex.peeku(), buffer);
+ }
mQueueThread->queue(comp, fenceFd, std::move(work), std::move(fd0), std::move(fd1));
return OK;
@@ -405,29 +409,74 @@
}
void C2NodeImpl::onInputBufferDone(c2_cntr64_t index) {
- if (mAidlHal) {
- if (!mAidlBufferSource) {
- ALOGD("Buffer source not set (index=%llu)", index.peekull());
- return;
- }
- } else {
- if (!mBufferSource) {
- ALOGD("Buffer source not set (index=%llu)", index.peekull());
- return;
- }
- }
-
- int32_t bufferId = 0;
- {
- decltype(mBufferIdsInUse)::Locked bufferIds(mBufferIdsInUse);
- auto it = bufferIds->find(index.peeku());
- if (it == bufferIds->end()) {
+ if (android::media::codec::provider_->input_surface_throttle()) {
+ Mutexed<BuffersTracker>::Locked buffers(mBuffersTracker);
+ auto it = buffers->mIdsInUse.find(index.peeku());
+ if (it == buffers->mIdsInUse.end()) {
ALOGV("Untracked input index %llu (maybe already removed)", index.peekull());
return;
}
- bufferId = it->second;
- (void)bufferIds->erase(it);
+ int32_t bufferId = it->second;
+ (void)buffers->mIdsInUse.erase(it);
+ buffers->mAvailableIds.push_back(bufferId);
+ } else {
+ if (!hasBufferSource()) {
+ return;
+ }
+ int32_t bufferId = 0;
+ {
+ Mutexed<BuffersTracker>::Locked buffers(mBuffersTracker);
+ auto it = buffers->mIdsInUse.find(index.peeku());
+ if (it == buffers->mIdsInUse.end()) {
+ ALOGV("Untracked input index %llu (maybe already removed)", index.peekull());
+ return;
+ }
+ bufferId = it->second;
+ (void)buffers->mIdsInUse.erase(it);
+ }
+ notifyInputBufferEmptied(bufferId);
}
+}
+
+void C2NodeImpl::onInputBufferEmptied() {
+ if (!android::media::codec::provider_->input_surface_throttle()) {
+ ALOGE("onInputBufferEmptied should not be called "
+ "when input_surface_throttle is false");
+ return;
+ }
+ if (!hasBufferSource()) {
+ return;
+ }
+ int32_t bufferId = 0;
+ {
+ Mutexed<BuffersTracker>::Locked buffers(mBuffersTracker);
+ if (buffers->mAvailableIds.empty()) {
+ ALOGV("The codec is ready to take more input buffers "
+ "but no input buffers are ready yet.");
+ return;
+ }
+ bufferId = buffers->mAvailableIds.front();
+ buffers->mAvailableIds.pop_front();
+ }
+ notifyInputBufferEmptied(bufferId);
+}
+
+bool C2NodeImpl::hasBufferSource() {
+ if (mAidlHal) {
+ if (!mAidlBufferSource) {
+ ALOGD("Buffer source not set");
+ return false;
+ }
+ } else {
+ if (!mBufferSource) {
+ ALOGD("Buffer source not set");
+ return false;
+ }
+ }
+ return true;
+}
+
+void C2NodeImpl::notifyInputBufferEmptied(int32_t bufferId) {
if (mAidlHal) {
::ndk::ScopedFileDescriptor nullFence;
(void)mAidlBufferSource->onInputBufferEmptied(bufferId, nullFence);
diff --git a/media/codec2/sfplugin/C2NodeImpl.h b/media/codec2/sfplugin/C2NodeImpl.h
index e060fd8..cc826b4 100644
--- a/media/codec2/sfplugin/C2NodeImpl.h
+++ b/media/codec2/sfplugin/C2NodeImpl.h
@@ -73,13 +73,19 @@
void setFrameSize(uint32_t width, uint32_t height);
/**
- * Clean up work item reference.
+ * Notify that the input buffer reference is no longer needed by the component.
+ * Clean up if necessary.
*
* \param index input work index
*/
void onInputBufferDone(c2_cntr64_t index);
/**
+ * Notify input buffer is emptied.
+ */
+ void onInputBufferEmptied();
+
+ /**
* Returns dataspace information from GraphicBufferSource.
*/
android_dataspace getDataspace();
@@ -118,12 +124,24 @@
c2_cntr64_t mPrevInputTimestamp; // input timestamp for previous frame
c2_cntr64_t mPrevCodecTimestamp; // adjusted (codec) timestamp for previous frame
- Mutexed<std::map<uint64_t, uint32_t>> mBufferIdsInUse;
+ // Tracks the status of buffers
+ struct BuffersTracker {
+ BuffersTracker() = default;
+
+ // Keeps track of buffers that are used by the component. Maps timestamp -> ID
+ std::map<uint64_t, uint32_t> mIdsInUse;
+ // Keeps track of the buffer IDs that are available after being released from the component.
+ std::list<uint32_t> mAvailableIds;
+ };
+ Mutexed<BuffersTracker> mBuffersTracker;
class QueueThread;
sp<QueueThread> mQueueThread;
bool mAidlHal;
+
+ bool hasBufferSource();
+ void notifyInputBufferEmptied(int32_t bufferId);
};
} // namespace android
diff --git a/media/codec2/sfplugin/C2OMXNode.cpp b/media/codec2/sfplugin/C2OMXNode.cpp
index ce02c88..98e25e2 100644
--- a/media/codec2/sfplugin/C2OMXNode.cpp
+++ b/media/codec2/sfplugin/C2OMXNode.cpp
@@ -291,6 +291,10 @@
return mImpl->onInputBufferDone(index);
}
+void C2OMXNode::onInputBufferEmptied() {
+ return mImpl->onInputBufferEmptied();
+}
+
android_dataspace C2OMXNode::getDataspace() {
return mImpl->getDataspace();
}
diff --git a/media/codec2/sfplugin/C2OMXNode.h b/media/codec2/sfplugin/C2OMXNode.h
index d077202..5549b88 100644
--- a/media/codec2/sfplugin/C2OMXNode.h
+++ b/media/codec2/sfplugin/C2OMXNode.h
@@ -86,13 +86,19 @@
void setFrameSize(uint32_t width, uint32_t height);
/**
- * Clean up work item reference.
+ * Notify that the input buffer reference is no longer needed by the component.
+ * Clean up if necessary.
*
* \param index input work index
*/
void onInputBufferDone(c2_cntr64_t index);
/**
+ * Notify input buffer is emptied.
+ */
+ void onInputBufferEmptied();
+
+ /**
* Returns dataspace information from GraphicBufferSource.
*/
android_dataspace getDataspace();
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 463b63f..ca0aabb 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -444,6 +444,10 @@
mNode->onInputBufferDone(index);
}
+ void onInputBufferEmptied() override {
+ mNode->onInputBufferEmptied();
+ }
+
android_dataspace getDataspace() override {
return mNode->getDataspace();
}
@@ -663,6 +667,10 @@
mNode->onInputBufferDone(index);
}
+ void onInputBufferEmptied() override {
+ mNode->onInputBufferEmptied();
+ }
+
android_dataspace getDataspace() override {
return mNode->getDataspace();
}
@@ -2227,8 +2235,23 @@
// So we reverse their order for stopUseOutputSurface() to notify C2Fence waiters
// prior to comp->stop().
// See also b/300350761.
- mChannel->stopUseOutputSurface(pushBlankBuffer);
- status_t err = comp->stop();
+ //
+ // The workaround is no longer needed with fetchGraphicBlock & C2Fence changes.
+ // so we are reverting back to the logical sequence of the operations when
+ // AIDL HALs are selected.
+ // When the HIDL HALs are selected, we retained workaround(the reversed
+ // order) as default in order to keep legacy behavior.
+ bool stopHalBeforeSurface =
+ Codec2Client::IsAidlSelected() ||
+ property_get_bool("debug.codec2.stop_hal_before_surface", false);
+ status_t err = C2_OK;
+ if (stopHalBeforeSurface && android::media::codec::provider_->stop_hal_before_surface()) {
+ err = comp->stop();
+ mChannel->stopUseOutputSurface(pushBlankBuffer);
+ } else {
+ mChannel->stopUseOutputSurface(pushBlankBuffer);
+ err = comp->stop();
+ }
if (err != C2_OK) {
// TODO: convert err into status_t
mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
@@ -2323,8 +2346,22 @@
// So we reverse their order for stopUseOutputSurface() to notify C2Fence waiters
// prior to comp->release().
// See also b/300350761.
- mChannel->stopUseOutputSurface(pushBlankBuffer);
- comp->release();
+ //
+ // The workaround is no longer needed with fetchGraphicBlock & C2Fence changes.
+ // so we are reverting back to the logical sequence of the operations when
+ // AIDL HALs are selected.
+ // When the HIDL HALs are selected, we retained workaround(the reversed
+ // order) as default in order to keep legacy behavior.
+ bool stopHalBeforeSurface =
+ Codec2Client::IsAidlSelected() ||
+ property_get_bool("debug.codec2.stop_hal_before_surface", false);
+ if (stopHalBeforeSurface && android::media::codec::provider_->stop_hal_before_surface()) {
+ comp->release();
+ mChannel->stopUseOutputSurface(pushBlankBuffer);
+ } else {
+ mChannel->stopUseOutputSurface(pushBlankBuffer);
+ comp->release();
+ }
{
Mutexed<State>::Locked state(mState);
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 3984b83..f0a4180 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -1069,6 +1069,10 @@
return;
}
}
+ if (android::media::codec::provider_->input_surface_throttle()
+ && mInputSurface != nullptr) {
+ mInputSurface->onInputBufferEmptied();
+ }
size_t numActiveSlots = 0;
while (!mPipelineWatcher.lock()->pipelineFull()) {
sp<MediaCodecBuffer> inBuffer;
@@ -2784,7 +2788,16 @@
}
void CCodecBufferChannel::setInfoBuffer(const std::shared_ptr<C2InfoBuffer> &buffer) {
- mInfoBuffers.push_back(buffer);
+ if (mInputSurface == nullptr) {
+ mInfoBuffers.push_back(buffer);
+ } else {
+ std::list<std::unique_ptr<C2Work>> items;
+ std::unique_ptr<C2Work> work(new C2Work);
+ work->input.infoBuffers.emplace_back(*buffer);
+ work->worklets.emplace_back(new C2Worklet);
+ items.push_back(std::move(work));
+ c2_status_t err = mComponent->queue(&items);
+ }
}
status_t toStatusT(c2_status_t c2s, c2_operation_t c2op) {
diff --git a/media/codec2/sfplugin/InputSurfaceWrapper.h b/media/codec2/sfplugin/InputSurfaceWrapper.h
index 4bf6cd0..c158c5b 100644
--- a/media/codec2/sfplugin/InputSurfaceWrapper.h
+++ b/media/codec2/sfplugin/InputSurfaceWrapper.h
@@ -102,6 +102,7 @@
}
/**
+ * Notify that the input buffer reference is no longer needed.
* Clean up C2Work related references if necessary. No-op by default.
*
* \param index index of input work.
@@ -109,6 +110,12 @@
virtual void onInputBufferDone(c2_cntr64_t /* index */) {}
/**
+ * Signal one input buffer as emptied.
+ * No-op by default.
+ */
+ virtual void onInputBufferEmptied() {}
+
+ /**
* Returns dataspace information from GraphicBufferSource.
*/
virtual android_dataspace getDataspace() { return mDataSpace; }
diff --git a/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
index 77a76e8..7a33af4 100644
--- a/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
@@ -32,10 +32,15 @@
namespace android {
-static bool isAtLeast(int version, const char *codeName) {
- char deviceCodeName[PROP_VALUE_MAX];
- __system_property_get("ro.build.version.codename", deviceCodeName);
- return android_get_device_api_level() >= version || !strcmp(deviceCodeName, codeName);
+static bool isAtLeast(int version, const std::string codeName) {
+ static std::once_flag sCheckOnce;
+ static std::string sDeviceCodeName;
+ static int sDeviceApiLevel;
+ std::call_once(sCheckOnce, [&](){
+ sDeviceCodeName = base::GetProperty("ro.build.version.codename", "");
+ sDeviceApiLevel = android_get_device_api_level();
+ });
+ return sDeviceApiLevel >= version || sDeviceCodeName == codeName;
}
bool isAtLeastT() {
diff --git a/media/codec2/vndk/Android.bp b/media/codec2/vndk/Android.bp
index 9f57bfd..dc06ee6 100644
--- a/media/codec2/vndk/Android.bp
+++ b/media/codec2/vndk/Android.bp
@@ -53,6 +53,7 @@
],
defaults: [
+ "aconfig_lib_cc_static_link.defaults",
"libcodec2_hal_selection",
],
diff --git a/media/codec2/vndk/C2Fence.cpp b/media/codec2/vndk/C2Fence.cpp
index 5d50fc3..3438406 100644
--- a/media/codec2/vndk/C2Fence.cpp
+++ b/media/codec2/vndk/C2Fence.cpp
@@ -533,8 +533,7 @@
break;
default:
ALOGV("Unsupported fence type %d", type);
- // If this is malformed-handle close the handle here.
- (void) native_handle_close(handle);
+ // Nothing else to do. The handle is owned by the caller.
// return a null-fence in this case
break;
}
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 2afe80c..d6b1163 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -1707,14 +1707,14 @@
mSelectedDeviceId = deviceId;
if (mStatus == NO_ERROR) {
if (isOffloadedOrDirect_l()) {
- if (mState == STATE_STOPPED || mState == STATE_FLUSHED) {
- ALOGD("%s(%d): creating a new AudioTrack", __func__, mPortId);
- result = restoreTrack_l("setOutputDevice", true /* forceRestore */);
- } else {
+ if (isPlaying_l()) {
ALOGW("%s(%d). Offloaded or Direct track is not STOPPED or FLUSHED. "
"State: %s.",
__func__, mPortId, stateToString(mState));
result = INVALID_OPERATION;
+ } else {
+ ALOGD("%s(%d): creating a new AudioTrack", __func__, mPortId);
+ result = restoreTrack_l("setOutputDevice", true /* forceRestore */);
}
} else {
// allow track invalidation when track is not playing to propagate
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index 48f8992..a329edf 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -918,6 +918,11 @@
return OK;
}
+status_t AudioFlingerClientAdapter::resetReferencesForTest() {
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mDelegate->resetReferencesForTest()));
+ return OK;
+}
+
////////////////////////////////////////////////////////////////////////////////////////////////////
// AudioFlingerServerAdapter
AudioFlingerServerAdapter::AudioFlingerServerAdapter(
@@ -1476,4 +1481,9 @@
return Status::ok();
}
+Status AudioFlingerServerAdapter::resetReferencesForTest() {
+ RETURN_BINDER_IF_ERROR(mDelegate->resetReferencesForTest());
+ return Status::ok();
+}
+
} // namespace android
diff --git a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
index 31d3af5..3046e7f 100644
--- a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
@@ -293,6 +293,12 @@
*/
AudioPortFw getAudioMixPort(in AudioPortFw devicePort, in AudioPortFw mixPort);
+ /**
+ * Reset Circular references in AudioFlinger service.
+ * Test API
+ */
+ void resetReferencesForTest();
+
// When adding a new method, please review and update
// IAudioFlinger.h AudioFlingerServerAdapter::Delegate::TransactionCode
// AudioFlinger.cpp AudioFlinger::onTransactWrapper()
diff --git a/media/libaudioclient/aidl/fuzzer/Android.bp b/media/libaudioclient/aidl/fuzzer/Android.bp
index 02c865d..1071beb 100644
--- a/media/libaudioclient/aidl/fuzzer/Android.bp
+++ b/media/libaudioclient/aidl/fuzzer/Android.bp
@@ -74,7 +74,7 @@
],
fuzz_config: {
cc: [
- "android-media-fuzzing-reports@google.com",
+ "android-audio-fuzzing-reports@google.com",
],
componentid: 155276,
hotlists: ["4593311"],
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index 5a1e037..1daaafe 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -388,6 +388,8 @@
virtual status_t getAudioMixPort(const struct audio_port_v7 *devicePort,
struct audio_port_v7 *mixPort) const = 0;
+
+ virtual status_t resetReferencesForTest() = 0;
};
/**
@@ -504,6 +506,7 @@
status_t getAudioPolicyConfig(media::AudioPolicyConfig* output) override;
status_t getAudioMixPort(const struct audio_port_v7 *devicePort,
struct audio_port_v7 *mixPort) const override;
+ status_t resetReferencesForTest() override;
private:
const sp<media::IAudioFlingerService> mDelegate;
@@ -606,6 +609,8 @@
GET_AUDIO_POLICY_CONFIG =
media::BnAudioFlingerService::TRANSACTION_getAudioPolicyConfig,
GET_AUDIO_MIX_PORT = media::BnAudioFlingerService::TRANSACTION_getAudioMixPort,
+ RESET_REFERENCES_FOR_TEST =
+ media::BnAudioFlingerService::TRANSACTION_resetReferencesForTest,
};
protected:
@@ -742,6 +747,7 @@
Status getAudioMixPort(const media::AudioPortFw& devicePort,
const media::AudioPortFw& mixPort,
media::AudioPortFw* _aidl_return) override;
+ Status resetReferencesForTest() override;
private:
const sp<AudioFlingerServerAdapter::Delegate> mDelegate;
};
diff --git a/media/libaudiohal/impl/Android.bp b/media/libaudiohal/impl/Android.bp
index dd8f021..1a6b949 100644
--- a/media/libaudiohal/impl/Android.bp
+++ b/media/libaudiohal/impl/Android.bp
@@ -227,11 +227,11 @@
"latest_android_hardware_audio_core_sounddose_ndk_shared",
"latest_android_hardware_audio_effect_ndk_shared",
"latest_android_media_audio_common_types_ndk_shared",
+ "latest_av_audio_types_aidl_ndk_shared",
],
shared_libs: [
"android.hardware.common-V2-ndk",
"android.hardware.common.fmq-V1-ndk",
- "av-audio-types-aidl-V1-ndk",
"libaudio_aidl_conversion_common_cpp",
"libaudio_aidl_conversion_common_ndk",
"libaudio_aidl_conversion_common_ndk_cpp",
diff --git a/media/libaudiohal/impl/DeviceHalAidl.cpp b/media/libaudiohal/impl/DeviceHalAidl.cpp
index 2447b18..9f21404 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalAidl.cpp
@@ -386,7 +386,7 @@
return runCb([](CbRef cb) { cb->onWriteReady(); });
}
ndk::ScopedAStatus onError() override {
- return runCb([](CbRef cb) { cb->onError(); });
+ return runCb([](CbRef cb) { cb->onError(true /*isHardError*/); });
}
ndk::ScopedAStatus onDrainReady() override {
return runCb([](CbRef cb) { cb->onDrainReady(); });
diff --git a/media/libaudiohal/impl/EffectHalAidl.cpp b/media/libaudiohal/impl/EffectHalAidl.cpp
index c35a60e..3fe2046 100644
--- a/media/libaudiohal/impl/EffectHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectHalAidl.cpp
@@ -57,7 +57,9 @@
using ::aidl::android::hardware::audio::effect::Descriptor;
using ::aidl::android::hardware::audio::effect::IEffect;
using ::aidl::android::hardware::audio::effect::IFactory;
+using ::aidl::android::hardware::audio::effect::kEventFlagDataMqNotEmpty;
using ::aidl::android::hardware::audio::effect::kEventFlagDataMqUpdate;
+using ::aidl::android::hardware::audio::effect::kEventFlagNotEmpty;
using ::aidl::android::hardware::audio::effect::kReopenSupportedVersion;
using ::aidl::android::hardware::audio::effect::State;
@@ -199,6 +201,7 @@
efState & kEventFlagDataMqUpdate) {
ALOGV("%s %s V%d receive dataMQUpdate eventFlag from HAL", __func__, effectName.c_str(),
halVersion);
+
mConversion->reopen();
}
auto statusQ = mConversion->getStatusMQ();
@@ -224,12 +227,22 @@
floatsToWrite, mInBuffer->audioBuffer(), inputQ->availableToWrite());
return INVALID_OPERATION;
}
- efGroup->wake(aidl::android::hardware::audio::effect::kEventFlagNotEmpty);
+
+ // for V2 audio effect HAL, expect different EventFlag to avoid bit conflict with FMQ_NOT_EMPTY
+ efGroup->wake(halVersion >= kReopenSupportedVersion ? kEventFlagDataMqNotEmpty
+ : kEventFlagNotEmpty);
IEffect::Status retStatus{};
- if (!statusQ->readBlocking(&retStatus, 1) || retStatus.status != OK ||
- (size_t)retStatus.fmqConsumed != floatsToWrite || retStatus.fmqProduced == 0) {
- ALOGE("%s read status failed: %s", __func__, retStatus.toString().c_str());
+ if (!statusQ->readBlocking(&retStatus, 1)) {
+ ALOGE("%s %s V%d read status from status FMQ failed", __func__, effectName.c_str(),
+ halVersion);
+ return INVALID_OPERATION;
+ }
+ if (retStatus.status != OK || (size_t)retStatus.fmqConsumed != floatsToWrite ||
+ retStatus.fmqProduced == 0) {
+ ALOGE("%s read status failed: %s, consumed %d (of %zu) produced %d", __func__,
+ retStatus.toString().c_str(), retStatus.fmqConsumed, floatsToWrite,
+ retStatus.fmqProduced);
return INVALID_OPERATION;
}
diff --git a/media/libaudiohal/impl/EffectProxy.cpp b/media/libaudiohal/impl/EffectProxy.cpp
index 9aa02e2..fb4658f 100644
--- a/media/libaudiohal/impl/EffectProxy.cpp
+++ b/media/libaudiohal/impl/EffectProxy.cpp
@@ -82,8 +82,7 @@
ndk::ScopedAStatus EffectProxy::setOffloadParam(const effect_offload_param_t* offload) {
const auto& itor = std::find_if(mSubEffects.begin(), mSubEffects.end(), [&](const auto& sub) {
const auto& desc = sub.descriptor;
- return offload->isOffload ==
- (desc.common.flags.hwAcceleratorMode == Flags::HardwareAccelerator::TUNNEL);
+ return offload->isOffload == desc.common.flags.offloadIndication;
});
if (itor == mSubEffects.end()) {
ALOGE("%s no %soffload sub-effect found", __func__, offload->isOffload ? "" : "non-");
@@ -93,7 +92,7 @@
}
mActiveSubIdx = std::distance(mSubEffects.begin(), itor);
- ALOGI("%s: active %soffload sub-effect %zu descriptor: %s", __func__,
+ ALOGI("%s: active %soffload sub-effect %zu: %s", __func__,
offload->isOffload ? "" : "non-", mActiveSubIdx,
::android::audio::utils::toString(mSubEffects[mActiveSubIdx].descriptor.common.id.uuid)
.c_str());
@@ -163,7 +162,7 @@
ndk::ScopedAStatus EffectProxy::getDescriptor(Descriptor* desc) {
*desc = mSubEffects[mActiveSubIdx].descriptor;
- desc->common.id.uuid = desc->common.id.proxy.value();
+ desc->common = mDescriptorCommon;
return ndk::ScopedAStatus::ok();
}
@@ -185,42 +184,35 @@
return ndk::ScopedAStatus::ok();
}
+// Sub-effects are required to have identical features, so here we return the SW sub-effect
+// descriptor, with the implementation UUID replaced with proxy UUID, and flags setting respect all
+// sub-effects.
Descriptor::Common EffectProxy::buildDescriptorCommon(
const AudioUuid& uuid, const std::vector<Descriptor>& subEffectDescs) {
- // initial flag values before we know which sub-effect to active (with setOffloadParam)
- // align to HIDL EffectProxy flags
- Descriptor::Common common = {.flags = {.type = Flags::Type::INSERT,
- .insert = Flags::Insert::LAST,
- .volume = Flags::Volume::CTRL}};
-
+ Descriptor::Common swCommon;
+ const Flags& firstFlag = subEffectDescs[0].common.flags;
+ bool offloadExist = false;
for (const auto& desc : subEffectDescs) {
- if (desc.common.flags.hwAcceleratorMode == Flags::HardwareAccelerator::TUNNEL) {
- common.flags.hwAcceleratorMode = Flags::HardwareAccelerator::TUNNEL;
+ if (desc.common.flags.offloadIndication) {
+ offloadExist = true;
+ } else {
+ swCommon = desc.common;
}
-
- // set indication if any sub-effect indication was set
- common.flags.offloadIndication |= desc.common.flags.offloadIndication;
- common.flags.deviceIndication |= desc.common.flags.deviceIndication;
- common.flags.audioModeIndication |= desc.common.flags.audioModeIndication;
- common.flags.audioSourceIndication |= desc.common.flags.audioSourceIndication;
- // Set to NONE if any sub-effect not supporting any Volume command
- if (desc.common.flags.volume == Flags::Volume::NONE) {
- common.flags.volume = Flags::Volume::NONE;
- }
- // set to AUXILIARY if any sub-effect is of AUXILIARY type
- if (desc.common.flags.type == Flags::Type::AUXILIARY) {
- common.flags.type = Flags::Type::AUXILIARY;
+ if (desc.common.flags.audioModeIndication != firstFlag.audioModeIndication ||
+ desc.common.flags.audioSourceIndication != firstFlag.audioSourceIndication ||
+ desc.common.flags.sinkMetadataIndication != firstFlag.sinkMetadataIndication ||
+ desc.common.flags.sourceMetadataIndication != firstFlag.sourceMetadataIndication ||
+ desc.common.flags.deviceIndication != firstFlag.deviceIndication) {
+ ALOGW("Inconsistent flags %s vs %s", desc.common.flags.toString().c_str(),
+ firstFlag.toString().c_str());
}
}
- // copy type UUID from any of sub-effects, all sub-effects should have same type
- common.id.type = subEffectDescs[0].common.id.type;
+ swCommon.flags.offloadIndication = offloadExist;
// replace implementation UUID with proxy UUID.
- common.id.uuid = uuid;
- common.id.proxy = std::nullopt;
- common.name = "Proxy";
- common.implementor = "AOSP";
- return common;
+ swCommon.id.uuid = uuid;
+ swCommon.id.proxy = std::nullopt;
+ return swCommon;
}
// Handle with active sub-effect first, only send to other sub-effects when success
diff --git a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
index 3b2f344..64cc7ed 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
@@ -188,7 +188,6 @@
aidlEffect = ndk::SharedRefBase::make<EffectProxy>(
aidlUuid, mProxyUuidDescriptorMap.at(aidlUuid) /* sub-effect descriptor list */,
mFactory);
- mProxyList.emplace_back(std::static_pointer_cast<EffectProxy>(aidlEffect));
} else {
RETURN_STATUS_IF_ERROR(
statusTFromBinderStatus(mFactory->createEffect(aidlUuid, &aidlEffect)));
@@ -205,15 +204,9 @@
}
status_t EffectsFactoryHalAidl::dumpEffects(int fd) {
- status_t ret = OK;
- // record the error ret and continue dump as many effects as possible
- for (const auto& proxy : mProxyList) {
- if (status_t temp = BAD_VALUE; proxy && (temp = proxy->dump(fd, nullptr, 0)) != OK) {
- ret = temp;
- }
- }
+ // TODO: b/333803769 improve the effect dump implementation
RETURN_STATUS_IF_ERROR(mFactory->dump(fd, nullptr, 0));
- return ret;
+ return OK;
}
status_t EffectsFactoryHalAidl::allocateBuffer(size_t size, sp<EffectBufferHalInterface>* buffer) {
diff --git a/media/libaudiohal/impl/EffectsFactoryHalAidl.h b/media/libaudiohal/impl/EffectsFactoryHalAidl.h
index 73089b0..3b8628c 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalAidl.h
+++ b/media/libaudiohal/impl/EffectsFactoryHalAidl.h
@@ -84,9 +84,6 @@
// Query result of pre and post processing from effect factory
const std::vector<Processing> mAidlProcessings;
- // list of the EffectProxy instances
- std::list<std::shared_ptr<EffectProxy>> mProxyList;
-
virtual ~EffectsFactoryHalAidl() = default;
status_t getHalDescriptorWithImplUuid(
const ::aidl::android::media::audio::common::AudioUuid& uuid,
diff --git a/media/libaudiohal/impl/Hal2AidlMapper.cpp b/media/libaudiohal/impl/Hal2AidlMapper.cpp
index 263e3e9..cbade70 100644
--- a/media/libaudiohal/impl/Hal2AidlMapper.cpp
+++ b/media/libaudiohal/impl/Hal2AidlMapper.cpp
@@ -136,8 +136,8 @@
// 'sinks' will not be updated because 'setAudioPatch' only needs IDs. Here we log
// the source arguments, where only the audio configuration and device specifications
// are relevant.
- ALOGD("%s: [disregard IDs] sources: %s, sinks: %s",
- __func__, ::android::internal::ToString(sources).c_str(),
+ ALOGD("%s: patch ID: %d, [disregard IDs] sources: %s, sinks: %s",
+ __func__, *patchId, ::android::internal::ToString(sources).c_str(),
::android::internal::ToString(sinks).c_str());
auto fillPortConfigs = [&](
const std::vector<AudioPortConfig>& configs,
@@ -181,7 +181,9 @@
};
// When looking up port configs, the destinationPortId is only used for mix ports.
// Thus, we process device port configs first, and look up the destination port ID from them.
- bool sourceIsDevice = std::any_of(sources.begin(), sources.end(),
+ const bool sourceIsDevice = std::any_of(sources.begin(), sources.end(),
+ [](const auto& config) { return config.ext.getTag() == AudioPortExt::device; });
+ const bool sinkIsDevice = std::any_of(sinks.begin(), sinks.end(),
[](const auto& config) { return config.ext.getTag() == AudioPortExt::device; });
const std::vector<AudioPortConfig>& devicePortConfigs =
sourceIsDevice ? sources : sinks;
@@ -202,10 +204,29 @@
existingPatchIt->second = patch;
} else {
bool created = false;
- RETURN_STATUS_IF_ERROR(findOrCreatePatch(patch, &patch, &created));
+ // When the framework does not specify a patch ID, only the mix port config
+ // is used for finding an existing patch. That's because the framework assumes
+ // that there can only be one patch for an I/O thread.
+ PatchMatch match = sourceIsDevice && sinkIsDevice ?
+ MATCH_BOTH : (sourceIsDevice ? MATCH_SINKS : MATCH_SOURCES);
+ auto requestedPatch = patch;
+ RETURN_STATUS_IF_ERROR(findOrCreatePatch(patch, match,
+ &patch, &created));
// No cleanup of the patch is needed, it is managed by the framework.
*patchId = patch.id;
if (!created) {
+ requestedPatch.id = patch.id;
+ if (patch != requestedPatch) {
+ ALOGI("%s: Updating transient patch. Current: %s, new: %s",
+ __func__, patch.toString().c_str(), requestedPatch.toString().c_str());
+ // Since matching may be done by mix port only, update the patch if the device port
+ // config has changed.
+ patch = requestedPatch;
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+ mModule->setAudioPatch(patch, &patch)));
+ existingPatchIt = mPatches.find(patch.id);
+ existingPatchIt->second = patch;
+ }
// The framework might have "created" a patch which already existed due to
// stream creation. Need to release the ownership from the stream.
for (auto& s : mStreams) {
@@ -274,18 +295,18 @@
}
status_t Hal2AidlMapper::findOrCreatePatch(
- const AudioPatch& requestedPatch, AudioPatch* patch, bool* created) {
+ const AudioPatch& requestedPatch, PatchMatch match, AudioPatch* patch, bool* created) {
std::set<int32_t> sourcePortConfigIds(requestedPatch.sourcePortConfigIds.begin(),
requestedPatch.sourcePortConfigIds.end());
std::set<int32_t> sinkPortConfigIds(requestedPatch.sinkPortConfigIds.begin(),
requestedPatch.sinkPortConfigIds.end());
- return findOrCreatePatch(sourcePortConfigIds, sinkPortConfigIds, patch, created);
+ return findOrCreatePatch(sourcePortConfigIds, sinkPortConfigIds, match, patch, created);
}
status_t Hal2AidlMapper::findOrCreatePatch(
const std::set<int32_t>& sourcePortConfigIds, const std::set<int32_t>& sinkPortConfigIds,
- AudioPatch* patch, bool* created) {
- auto patchIt = findPatch(sourcePortConfigIds, sinkPortConfigIds);
+ PatchMatch match, AudioPatch* patch, bool* created) {
+ auto patchIt = findPatch(sourcePortConfigIds, sinkPortConfigIds, match);
if (patchIt == mPatches.end()) {
AudioPatch requestedPatch, appliedPatch;
requestedPatch.sourcePortConfigIds.insert(requestedPatch.sourcePortConfigIds.end(),
@@ -456,7 +477,8 @@
}
Hal2AidlMapper::Patches::iterator Hal2AidlMapper::findPatch(
- const std::set<int32_t>& sourcePortConfigIds, const std::set<int32_t>& sinkPortConfigIds) {
+ const std::set<int32_t>& sourcePortConfigIds, const std::set<int32_t>& sinkPortConfigIds,
+ PatchMatch match) {
return std::find_if(mPatches.begin(), mPatches.end(),
[&](const auto& pair) {
const auto& p = pair.second;
@@ -464,7 +486,15 @@
p.sourcePortConfigIds.begin(), p.sourcePortConfigIds.end());
std::set<int32_t> patchSinks(
p.sinkPortConfigIds.begin(), p.sinkPortConfigIds.end());
- return sourcePortConfigIds == patchSrcs && sinkPortConfigIds == patchSinks; });
+ switch (match) {
+ case MATCH_SOURCES:
+ return sourcePortConfigIds == patchSrcs;
+ case MATCH_SINKS:
+ return sinkPortConfigIds == patchSinks;
+ case MATCH_BOTH:
+ return sourcePortConfigIds == patchSrcs && sinkPortConfigIds == patchSinks;
+ }
+ });
}
Hal2AidlMapper::Ports::iterator Hal2AidlMapper::findPort(const AudioDevice& device) {
@@ -816,10 +846,10 @@
}
if (isInput) {
RETURN_STATUS_IF_ERROR(findOrCreatePatch(
- {devicePortConfigId}, {mixPortConfig->id}, patch, &created));
+ {devicePortConfigId}, {mixPortConfig->id}, MATCH_BOTH, patch, &created));
} else {
RETURN_STATUS_IF_ERROR(findOrCreatePatch(
- {mixPortConfig->id}, {devicePortConfigId}, patch, &created));
+ {mixPortConfig->id}, {devicePortConfigId}, MATCH_BOTH, patch, &created));
}
if (created) {
cleanups->add(&Hal2AidlMapper::resetPatch, patch->id);
diff --git a/media/libaudiohal/impl/Hal2AidlMapper.h b/media/libaudiohal/impl/Hal2AidlMapper.h
index f302c23..c70c8af 100644
--- a/media/libaudiohal/impl/Hal2AidlMapper.h
+++ b/media/libaudiohal/impl/Hal2AidlMapper.h
@@ -133,6 +133,8 @@
using Streams = std::map<wp<StreamHalInterface>,
std::pair<int32_t /*mix port config ID*/, int32_t /*patch ID*/>>;
+ enum PatchMatch { MATCH_SOURCES, MATCH_SINKS, MATCH_BOTH };
+
const std::string mInstance;
const std::shared_ptr<::aidl::android::hardware::audio::core::IModule> mModule;
@@ -150,11 +152,13 @@
::aidl::android::media::audio::common::AudioPortConfig* result, bool *created);
void eraseConnectedPort(int32_t portId);
status_t findOrCreatePatch(
- const std::set<int32_t>& sourcePortConfigIds,
- const std::set<int32_t>& sinkPortConfigIds,
+ const std::set<int32_t>& sourcePortConfigIds,
+ const std::set<int32_t>& sinkPortConfigIds,
+ PatchMatch match,
::aidl::android::hardware::audio::core::AudioPatch* patch, bool* created);
status_t findOrCreatePatch(
const ::aidl::android::hardware::audio::core::AudioPatch& requestedPatch,
+ PatchMatch match,
::aidl::android::hardware::audio::core::AudioPatch* patch, bool* created);
status_t findOrCreateDevicePortConfig(
const ::aidl::android::media::audio::common::AudioDevice& device,
@@ -175,7 +179,7 @@
const std::set<int32_t>& destinationPortIds,
::aidl::android::media::audio::common::AudioPortConfig* portConfig, bool* created);
Patches::iterator findPatch(const std::set<int32_t>& sourcePortConfigIds,
- const std::set<int32_t>& sinkPortConfigIds);
+ const std::set<int32_t>& sinkPortConfigIds, PatchMatch match);
Ports::iterator findPort(const ::aidl::android::media::audio::common::AudioDevice& device);
Ports::iterator findPort(
const ::aidl::android::media::audio::common::AudioConfig& config,
diff --git a/media/libaudiohal/impl/StreamHalAidl.cpp b/media/libaudiohal/impl/StreamHalAidl.cpp
index 97c9659..6c0dc76 100644
--- a/media/libaudiohal/impl/StreamHalAidl.cpp
+++ b/media/libaudiohal/impl/StreamHalAidl.cpp
@@ -200,8 +200,12 @@
StreamDescriptor::Reply reply;
switch (state) {
case StreamDescriptor::State::ACTIVE:
+ case StreamDescriptor::State::DRAINING:
+ case StreamDescriptor::State::TRANSFERRING:
RETURN_STATUS_IF_ERROR(pause(&reply));
- if (reply.state != StreamDescriptor::State::PAUSED) {
+ if (reply.state != StreamDescriptor::State::PAUSED &&
+ reply.state != StreamDescriptor::State::DRAIN_PAUSED &&
+ reply.state != StreamDescriptor::State::TRANSFER_PAUSED) {
ALOGE("%s: unexpected stream state: %s (expected PAUSED)",
__func__, toString(reply.state).c_str());
return INVALID_OPERATION;
@@ -209,6 +213,7 @@
FALLTHROUGH_INTENDED;
case StreamDescriptor::State::PAUSED:
case StreamDescriptor::State::DRAIN_PAUSED:
+ case StreamDescriptor::State::TRANSFER_PAUSED:
if (mIsInput) return flush();
RETURN_STATUS_IF_ERROR(flush(&reply));
if (reply.state != StreamDescriptor::State::IDLE) {
@@ -248,20 +253,71 @@
ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
TIME_CHECK();
if (!mStream) return NO_INIT;
- const auto state = getState();
- StreamDescriptor::Reply reply;
- if (state == StreamDescriptor::State::STANDBY) {
- RETURN_STATUS_IF_ERROR(sendCommand(makeHalCommand<HalCommand::Tag::start>(), &reply, true));
- return sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), &reply, true);
+ if (!mContext.isMmapped()) {
+ return BAD_VALUE;
}
-
- return INVALID_OPERATION;
+ StreamDescriptor::Reply reply;
+ RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
+ switch (reply.state) {
+ case StreamDescriptor::State::STANDBY:
+ RETURN_STATUS_IF_ERROR(
+ sendCommand(makeHalCommand<HalCommand::Tag::start>(), &reply, true));
+ if (reply.state != StreamDescriptor::State::IDLE) {
+ ALOGE("%s: unexpected stream state: %s (expected IDLE)",
+ __func__, toString(reply.state).c_str());
+ return INVALID_OPERATION;
+ }
+ FALLTHROUGH_INTENDED;
+ case StreamDescriptor::State::IDLE:
+ RETURN_STATUS_IF_ERROR(
+ sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), &reply, true));
+ if (reply.state != StreamDescriptor::State::ACTIVE) {
+ ALOGE("%s: unexpected stream state: %s (expected ACTIVE)",
+ __func__, toString(reply.state).c_str());
+ return INVALID_OPERATION;
+ }
+ FALLTHROUGH_INTENDED;
+ case StreamDescriptor::State::ACTIVE:
+ return OK;
+ case StreamDescriptor::State::DRAINING:
+ RETURN_STATUS_IF_ERROR(
+ sendCommand(makeHalCommand<HalCommand::Tag::start>(), &reply, true));
+ if (reply.state != StreamDescriptor::State::ACTIVE) {
+ ALOGE("%s: unexpected stream state: %s (expected ACTIVE)",
+ __func__, toString(reply.state).c_str());
+ return INVALID_OPERATION;
+ }
+ return OK;
+ default:
+ ALOGE("%s: not supported from %s stream state %s",
+ __func__, mIsInput ? "input" : "output", toString(reply.state).c_str());
+ return INVALID_OPERATION;
+ }
}
status_t StreamHalAidl::stop() {
ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ TIME_CHECK();
if (!mStream) return NO_INIT;
- return standby();
+ if (!mContext.isMmapped()) {
+ return BAD_VALUE;
+ }
+ StreamDescriptor::Reply reply;
+ RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
+ if (const auto state = reply.state; state == StreamDescriptor::State::ACTIVE) {
+ return drain(false /*earlyNotify*/, nullptr);
+ } else if (state == StreamDescriptor::State::DRAINING) {
+ RETURN_STATUS_IF_ERROR(pause());
+ return flush();
+ } else if (state == StreamDescriptor::State::PAUSED) {
+ return flush();
+ } else if (state != StreamDescriptor::State::IDLE &&
+ state != StreamDescriptor::State::STANDBY) {
+ ALOGE("%s: not supported from %s stream state %s",
+ __func__, mIsInput ? "input" : "output", toString(state).c_str());
+ return INVALID_OPERATION;
+ }
+ return OK;
}
status_t StreamHalAidl::getLatency(uint32_t *latency) {
@@ -276,11 +332,12 @@
return OK;
}
-status_t StreamHalAidl::getObservablePosition(int64_t *frames, int64_t *timestamp) {
+status_t StreamHalAidl::getObservablePosition(int64_t* frames, int64_t* timestamp,
+ StatePositions* statePositions) {
ALOGV("%p %s::%s", this, getClassName().c_str(), __func__);
if (!mStream) return NO_INIT;
StreamDescriptor::Reply reply;
- RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
+ RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply, statePositions));
*frames = std::max<int64_t>(0, reply.observable.frames);
*timestamp = std::max<int64_t>(0, reply.observable.timeNs);
return OK;
@@ -323,8 +380,11 @@
return INVALID_OPERATION;
}
}
+ StreamContextAidl::DataMQ::Error fmqError = StreamContextAidl::DataMQ::Error::NONE;
+ std::string fmqErrorMsg;
if (!mIsInput) {
- bytes = std::min(bytes, mContext.getDataMQ()->availableToWrite());
+ bytes = std::min(bytes,
+ mContext.getDataMQ()->availableToWrite(&fmqError, &fmqErrorMsg));
}
StreamDescriptor::Command burst =
StreamDescriptor::Command::make<StreamDescriptor::Command::Tag::burst>(bytes);
@@ -341,12 +401,14 @@
LOG_ALWAYS_FATAL_IF(*transferred > bytes,
"%s: HAL module read %zu bytes, which exceeds requested count %zu",
__func__, *transferred, bytes);
- if (auto toRead = mContext.getDataMQ()->availableToRead();
+ if (auto toRead = mContext.getDataMQ()->availableToRead(&fmqError, &fmqErrorMsg);
toRead != 0 && !mContext.getDataMQ()->read(static_cast<int8_t*>(buffer), toRead)) {
ALOGE("%s: failed to read %zu bytes to data MQ", __func__, toRead);
return NOT_ENOUGH_DATA;
}
}
+ LOG_ALWAYS_FATAL_IF(fmqError != StreamContextAidl::DataMQ::Error::NONE,
+ "%s", fmqErrorMsg.c_str());
mStreamPowerLog.log(buffer, *transferred);
return OK;
}
@@ -379,10 +441,12 @@
return INVALID_OPERATION;
}
return OK;
- } else if (state == StreamDescriptor::State::PAUSED) {
+ } else if (state == StreamDescriptor::State::PAUSED ||
+ state == StreamDescriptor::State::TRANSFER_PAUSED ||
+ state == StreamDescriptor::State::DRAIN_PAUSED) {
return sendCommand(makeHalCommand<HalCommand::Tag::start>(), reply);
} else {
- ALOGE("%s: unexpected stream state: %s (expected IDLE or PAUSED)",
+ ALOGE("%s: unexpected stream state: %s (expected IDLE or one of *PAUSED states)",
__func__, toString(state).c_str());
return INVALID_OPERATION;
}
@@ -430,8 +494,12 @@
if (auto state = getState(); state == StreamDescriptor::State::DRAINING) {
// Retrieve the current state together with position counters unconditionally
// to ensure that the state on our side gets updated.
- sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(),
- nullptr, true /*safeFromNonWorkerThread */);
+ sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(), nullptr,
+ true /*safeFromNonWorkerThread */);
+ // For compatibility with HIDL behavior, apply a "soft" position reset
+ // after receiving the "drain ready" callback.
+ std::lock_guard l(mLock);
+ mStatePositions.framesAtFlushOrDrain = mLastReply.observable.frames;
} else {
ALOGW("%s: unexpected onDrainReady in the state %s", __func__, toString(state).c_str());
}
@@ -439,15 +507,8 @@
void StreamHalAidl::onAsyncError() {
std::lock_guard l(mLock);
- if (mLastReply.state == StreamDescriptor::State::IDLE ||
- mLastReply.state == StreamDescriptor::State::DRAINING ||
- mLastReply.state == StreamDescriptor::State::TRANSFERRING) {
- mLastReply.state = StreamDescriptor::State::ERROR;
- ALOGW("%s: onError received", __func__);
- } else {
- ALOGW("%s: unexpected onError in the state %s", __func__,
- toString(mLastReply.state).c_str());
- }
+ ALOGW("%s: received in the state %s", __func__, toString(mLastReply.state).c_str());
+ mLastReply.state = StreamDescriptor::State::ERROR;
}
status_t StreamHalAidl::createMmapBuffer(int32_t minSizeFrames __unused,
@@ -498,9 +559,9 @@
}
status_t StreamHalAidl::sendCommand(
- const ::aidl::android::hardware::audio::core::StreamDescriptor::Command &command,
+ const ::aidl::android::hardware::audio::core::StreamDescriptor::Command& command,
::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply,
- bool safeFromNonWorkerThread) {
+ bool safeFromNonWorkerThread, StatePositions* statePositions) {
// TIME_CHECK(); // TODO(b/243839867) reenable only when optimized.
if (!safeFromNonWorkerThread) {
const pid_t workerTid = mWorkerTid.load(std::memory_order_acquire);
@@ -532,6 +593,23 @@
}
mLastReply = *reply;
mLastReplyExpirationNs = uptimeNanos() + mLastReplyLifeTimeNs;
+ if (!mIsInput && reply->status == STATUS_OK) {
+ if (command.getTag() == StreamDescriptor::Command::standby &&
+ reply->state == StreamDescriptor::State::STANDBY) {
+ mStatePositions.framesAtStandby = reply->observable.frames;
+ } else if (command.getTag() == StreamDescriptor::Command::flush &&
+ reply->state == StreamDescriptor::State::IDLE) {
+ mStatePositions.framesAtFlushOrDrain = reply->observable.frames;
+ } else if (!mContext.isAsynchronous() &&
+ command.getTag() == StreamDescriptor::Command::drain &&
+ (reply->state == StreamDescriptor::State::IDLE ||
+ reply->state == StreamDescriptor::State::DRAINING)) {
+ mStatePositions.framesAtFlushOrDrain = reply->observable.frames;
+ } // for asynchronous drain, the frame count is saved in 'onAsyncDrainReady'
+ }
+ if (statePositions != nullptr) {
+ *statePositions = mStatePositions;
+ }
}
}
switch (reply->status) {
@@ -547,7 +625,8 @@
}
status_t StreamHalAidl::updateCountersIfNeeded(
- ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply) {
+ ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply,
+ StatePositions* statePositions) {
bool doUpdate = false;
{
std::lock_guard l(mLock);
@@ -557,10 +636,13 @@
// Since updates are paced, it is OK to perform them from any thread, they should
// not interfere with I/O operations of the worker.
return sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(),
- reply, true /*safeFromNonWorkerThread */);
+ reply, true /*safeFromNonWorkerThread */, statePositions);
} else if (reply != nullptr) { // provide cached reply
std::lock_guard l(mLock);
*reply = mLastReply;
+ if (statePositions != nullptr) {
+ *statePositions = mStatePositions;
+ }
}
return OK;
}
@@ -618,7 +700,7 @@
status_t StreamOutHalAidl::setVolume(float left, float right) {
TIME_CHECK();
if (!mStream) return NO_INIT;
- size_t channelCount = audio_channel_out_mask_from_count(mConfig.channel_mask);
+ size_t channelCount = audio_channel_count_from_out_mask(mConfig.channel_mask);
if (channelCount == 0) channelCount = 2;
std::vector<float> volumes(channelCount);
if (channelCount == 1) {
@@ -647,21 +729,27 @@
return transfer(const_cast<void*>(buffer), bytes, written);
}
-status_t StreamOutHalAidl::getRenderPosition(uint32_t *dspFrames) {
+status_t StreamOutHalAidl::getRenderPosition(uint64_t *dspFrames) {
if (dspFrames == nullptr) {
return BAD_VALUE;
}
int64_t aidlFrames = 0, aidlTimestamp = 0;
- RETURN_STATUS_IF_ERROR(getObservablePosition(&aidlFrames, &aidlTimestamp));
- *dspFrames = static_cast<uint32_t>(aidlFrames);
+ StatePositions statePositions{};
+ RETURN_STATUS_IF_ERROR(
+ getObservablePosition(&aidlFrames, &aidlTimestamp, &statePositions));
+ // Number of audio frames since the stream has exited standby.
+ // See the table at the start of 'StreamHalInterface' on when it needs to reset.
+ int64_t mostRecentResetPoint;
+ if (!mContext.isAsynchronous() && audio_has_proportional_frames(mConfig.format)) {
+ mostRecentResetPoint = statePositions.framesAtStandby;
+ } else {
+ mostRecentResetPoint =
+ std::max(statePositions.framesAtStandby, statePositions.framesAtFlushOrDrain);
+ }
+ *dspFrames = aidlFrames <= mostRecentResetPoint ? 0 : aidlFrames - mostRecentResetPoint;
return OK;
}
-status_t StreamOutHalAidl::getNextWriteTimestamp(int64_t *timestamp __unused) {
- // Obsolete, use getPresentationPosition.
- return INVALID_OPERATION;
-}
-
status_t StreamOutHalAidl::setCallback(wp<StreamOutHalInterfaceCallback> callback) {
ALOGD("%p %s", this, __func__);
TIME_CHECK();
@@ -715,13 +803,26 @@
return BAD_VALUE;
}
int64_t aidlFrames = 0, aidlTimestamp = 0;
- RETURN_STATUS_IF_ERROR(getObservablePosition(&aidlFrames, &aidlTimestamp));
- *frames = aidlFrames;
+ StatePositions statePositions{};
+ RETURN_STATUS_IF_ERROR(getObservablePosition(&aidlFrames, &aidlTimestamp, &statePositions));
+ // See the table at the start of 'StreamHalInterface'.
+ if (!mContext.isAsynchronous() && audio_has_proportional_frames(mConfig.format)) {
+ *frames = aidlFrames;
+ } else {
+ const int64_t mostRecentResetPoint =
+ std::max(statePositions.framesAtStandby, statePositions.framesAtFlushOrDrain);
+ *frames = aidlFrames <= mostRecentResetPoint ? 0 : aidlFrames - mostRecentResetPoint;
+ }
timestamp->tv_sec = aidlTimestamp / NANOS_PER_SECOND;
timestamp->tv_nsec = aidlTimestamp - timestamp->tv_sec * NANOS_PER_SECOND;
return OK;
}
+status_t StreamOutHalAidl::presentationComplete() {
+ ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ return OK;
+}
+
status_t StreamOutHalAidl::updateSourceMetadata(
const StreamOutHalInterface::SourceMetadata& sourceMetadata) {
TIME_CHECK();
@@ -851,10 +952,10 @@
}
}
-void StreamOutHalAidl::onError() {
+void StreamOutHalAidl::onError(bool isHardError) {
onAsyncError();
if (auto clientCb = mClientCallback.load().promote(); clientCb != nullptr) {
- clientCb->onError();
+ clientCb->onError(isHardError);
}
}
diff --git a/media/libaudiohal/impl/StreamHalAidl.h b/media/libaudiohal/impl/StreamHalAidl.h
index b20eb00..9cb2cff 100644
--- a/media/libaudiohal/impl/StreamHalAidl.h
+++ b/media/libaudiohal/impl/StreamHalAidl.h
@@ -95,7 +95,8 @@
size_t getBufferSizeBytes() const { return mFrameSizeBytes * mBufferSizeFrames; }
size_t getBufferSizeFrames() const { return mBufferSizeFrames; }
size_t getBufferDurationMs(int32_t sampleRate) const {
- return sampleRate != 0 ? mBufferSizeFrames * MILLIS_PER_SECOND / sampleRate : 0;
+ auto bufferSize = mIsMmapped ? getMmapBurstSize() : mBufferSizeFrames;
+ return sampleRate != 0 ? bufferSize * MILLIS_PER_SECOND / sampleRate : 0;
}
CommandMQ* getCommandMQ() const { return mCommandMQ.get(); }
DataMQ* getDataMQ() const { return mDataMQ.get(); }
@@ -104,7 +105,7 @@
bool isAsynchronous() const { return mIsAsynchronous; }
bool isMmapped() const { return mIsMmapped; }
const MmapBufferDescriptor& getMmapBufferDescriptor() const { return mMmapBufferDescriptor; }
-
+ size_t getMmapBurstSize() const { return mMmapBufferDescriptor.burstSizeFrames;}
private:
static std::unique_ptr<DataMQ> maybeCreateDataMQ(
const ::aidl::android::hardware::audio::core::StreamDescriptor& descriptor) {
@@ -194,6 +195,11 @@
// For tests.
friend class sp<StreamHalAidl>;
+ struct StatePositions {
+ int64_t framesAtFlushOrDrain;
+ int64_t framesAtStandby;
+ };
+
template<class T>
static std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon> getStreamCommon(
const std::shared_ptr<T>& stream);
@@ -212,7 +218,8 @@
status_t getLatency(uint32_t *latency);
// Always returns non-negative values.
- status_t getObservablePosition(int64_t *frames, int64_t *timestamp);
+ status_t getObservablePosition(int64_t* frames, int64_t* timestamp,
+ StatePositions* statePositions = nullptr);
// Always returns non-negative values.
status_t getHardwarePosition(int64_t *frames, int64_t *timestamp);
@@ -268,11 +275,13 @@
// Note: Since `sendCommand` takes mLock while holding mCommandReplyLock, never call
// it with `mLock` being held.
status_t sendCommand(
- const ::aidl::android::hardware::audio::core::StreamDescriptor::Command &command,
+ const ::aidl::android::hardware::audio::core::StreamDescriptor::Command& command,
::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply = nullptr,
- bool safeFromNonWorkerThread = false);
+ bool safeFromNonWorkerThread = false,
+ StatePositions* statePositions = nullptr);
status_t updateCountersIfNeeded(
- ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply = nullptr);
+ ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply = nullptr,
+ StatePositions* statePositions = nullptr);
const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon> mStream;
const std::shared_ptr<::aidl::android::media::audio::IHalAdapterVendorExtension> mVendorExt;
@@ -280,6 +289,9 @@
std::mutex mLock;
::aidl::android::hardware::audio::core::StreamDescriptor::Reply mLastReply GUARDED_BY(mLock);
int64_t mLastReplyExpirationNs GUARDED_BY(mLock) = 0;
+ // Cached values of observable positions when the stream last entered certain state.
+ // Updated for output streams only.
+ StatePositions mStatePositions GUARDED_BY(mLock) = {};
// mStreamPowerLog is used for audio signal power logging.
StreamPowerLog mStreamPowerLog;
std::atomic<pid_t> mWorkerTid = -1;
@@ -308,10 +320,7 @@
// Return the number of audio frames written by the audio dsp to DAC since
// the output has exited standby.
- status_t getRenderPosition(uint32_t *dspFrames) override;
-
- // Get the local time at which the next write to the audio driver will be presented.
- status_t getNextWriteTimestamp(int64_t *timestamp) override;
+ status_t getRenderPosition(uint64_t *dspFrames) override;
// Set the callback for notifying completion of non-blocking write and drain.
status_t setCallback(wp<StreamOutHalInterfaceCallback> callback) override;
@@ -331,12 +340,19 @@
// Requests notification when data buffered by the driver/hardware has been played.
status_t drain(bool earlyNotify) override;
- // Notifies to the audio driver to flush the queued data.
+ // Notifies to the audio driver to flush (that is, drop) the queued data. Stream must
+ // already be paused before calling 'flush'.
status_t flush() override;
// Return a recent count of the number of audio frames presented to an external observer.
+ // This excludes frames which have been written but are still in the pipeline. See the
+ // table at the start of the 'StreamOutHalInterface' for the specification of the frame
+ // count behavior w.r.t. 'flush', 'drain' and 'standby' operations.
status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp) override;
+ // Notifies the HAL layer that the framework considers the current playback as completed.
+ status_t presentationComplete() override;
+
// Called when the metadata of the stream's source has been changed.
status_t updateSourceMetadata(const SourceMetadata& sourceMetadata) override;
@@ -370,7 +386,7 @@
// StreamOutHalInterfaceCallback
void onWriteReady() override;
void onDrainReady() override;
- void onError() override;
+ void onError(bool isHardError) override;
private:
friend class sp<StreamOutHalAidl>;
@@ -413,6 +429,7 @@
// Return a recent count of the number of audio frames received and
// the clock time associated with that frame count.
+ // The count must not reset to zero when a PCM input enters standby.
status_t getCapturePosition(int64_t *frames, int64_t *time) override;
// Get active microphones
diff --git a/media/libaudiohal/impl/StreamHalHidl.cpp b/media/libaudiohal/impl/StreamHalHidl.cpp
index 77c75db..a931fdd 100644
--- a/media/libaudiohal/impl/StreamHalHidl.cpp
+++ b/media/libaudiohal/impl/StreamHalHidl.cpp
@@ -17,6 +17,8 @@
#define LOG_TAG "StreamHalHidl"
//#define LOG_NDEBUG 0
+#include <cinttypes>
+
#include <android/hidl/manager/1.0/IServiceManager.h>
#include <hwbinder/IPCThreadState.h>
#include <media/AudioParameter.h>
@@ -589,32 +591,39 @@
return OK;
}
-status_t StreamOutHalHidl::getRenderPosition(uint32_t *dspFrames) {
+status_t StreamOutHalHidl::getRenderPosition(uint64_t *dspFrames) {
// TIME_CHECK(); // TODO(b/243839867) reenable only when optimized.
if (mStream == 0) return NO_INIT;
Result retval;
+ uint32_t halPosition = 0;
Return<void> ret = mStream->getRenderPosition(
[&](Result r, uint32_t d) {
retval = r;
if (retval == Result::OK) {
- *dspFrames = d;
+ halPosition = d;
}
});
- return processReturn("getRenderPosition", ret, retval);
-}
+ status_t status = processReturn("getRenderPosition", ret, retval);
+ if (status != OK) {
+ return status;
+ }
+ // Maintain a 64-bit render position using the 32-bit result from the HAL.
+ // This delta calculation relies on the arithmetic overflow behavior
+ // of integers. For example (100 - 0xFFFFFFF0) = 116.
+ std::lock_guard l(mPositionMutex);
+ const auto truncatedPosition = (uint32_t)mRenderPosition;
+ int32_t deltaHalPosition; // initialization not needed, overwitten by __builtin_sub_overflow()
+ (void) __builtin_sub_overflow(halPosition, truncatedPosition, &deltaHalPosition);
-status_t StreamOutHalHidl::getNextWriteTimestamp(int64_t *timestamp) {
- TIME_CHECK();
- if (mStream == 0) return NO_INIT;
- Result retval;
- Return<void> ret = mStream->getNextWriteTimestamp(
- [&](Result r, int64_t t) {
- retval = r;
- if (retval == Result::OK) {
- *timestamp = t;
- }
- });
- return processReturn("getRenderPosition", ret, retval);
+ if (deltaHalPosition >= 0) {
+ mRenderPosition += deltaHalPosition;
+ } else if (mExpectRetrograde) {
+ mExpectRetrograde = false;
+ mRenderPosition -= static_cast<uint64_t>(-deltaHalPosition);
+ ALOGW("Retrograde motion of %" PRId32 " frames", -deltaHalPosition);
+ }
+ *dspFrames = mRenderPosition;
+ return OK;
}
status_t StreamOutHalHidl::setCallback(wp<StreamOutHalInterfaceCallback> callback) {
@@ -667,9 +676,23 @@
status_t StreamOutHalHidl::flush() {
TIME_CHECK();
if (mStream == 0) return NO_INIT;
+ {
+ std::lock_guard l(mPositionMutex);
+ mRenderPosition = 0;
+ mExpectRetrograde = false;
+ }
return processReturn("pause", mStream->flush());
}
+status_t StreamOutHalHidl::standby() {
+ {
+ std::lock_guard l(mPositionMutex);
+ mRenderPosition = 0;
+ mExpectRetrograde = false;
+ }
+ return StreamHalHidl::standby();
+}
+
status_t StreamOutHalHidl::getPresentationPosition(uint64_t *frames, struct timespec *timestamp) {
// TIME_CHECK(); // TODO(b/243839867) reenable only when optimized.
if (mStream == 0) return NO_INIT;
@@ -696,6 +719,16 @@
}
}
+status_t StreamOutHalHidl::presentationComplete() {
+ // Avoid suppressing retrograde motion in mRenderPosition for gapless offload/direct when
+ // transitioning between tracks.
+ // The HAL resets the frame position without flush/stop being called, but calls back prior to
+ // this event. So, on the next occurrence of retrograde motion, we permit backwards movement of
+ // mRenderPosition.
+ mExpectRetrograde = true;
+ return OK;
+}
+
#if MAJOR_VERSION == 2
status_t StreamOutHalHidl::updateSourceMetadata(
const StreamOutHalInterface::SourceMetadata& /* sourceMetadata */) {
@@ -964,7 +997,7 @@
sp<StreamOutHalInterfaceCallback> callback = mCallback.load().promote();
if (callback == 0) return;
ALOGV("asyncCallback onError");
- callback->onError();
+ callback->onError(false /*isHardError*/);
}
void StreamOutHalHidl::onCodecFormatChanged(const std::vector<uint8_t>& metadataBs) {
diff --git a/media/libaudiohal/impl/StreamHalHidl.h b/media/libaudiohal/impl/StreamHalHidl.h
index 48da633..433e0a3 100644
--- a/media/libaudiohal/impl/StreamHalHidl.h
+++ b/media/libaudiohal/impl/StreamHalHidl.h
@@ -18,10 +18,12 @@
#define ANDROID_HARDWARE_STREAM_HAL_HIDL_H
#include <atomic>
+#include <mutex>
#include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/IStream.h)
#include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/IStreamIn.h)
#include PATH(android/hardware/audio/FILE_VERSION/IStreamOut.h)
+#include <android-base/thread_annotations.h>
#include <fmq/EventFlag.h>
#include <fmq/MessageQueue.h>
#include <media/audiohal/EffectHalInterface.h>
@@ -119,6 +121,9 @@
class StreamOutHalHidl : public StreamOutHalInterface, public StreamHalHidl {
public:
+ // Put the audio hardware input/output into standby mode (from StreamHalInterface).
+ status_t standby() override;
+
// Return the frame size (number of bytes per sample) of a stream.
virtual status_t getFrameSize(size_t *size);
@@ -136,10 +141,7 @@
// Return the number of audio frames written by the audio dsp to DAC since
// the output has exited standby.
- virtual status_t getRenderPosition(uint32_t *dspFrames);
-
- // Get the local time at which the next write to the audio driver will be presented.
- virtual status_t getNextWriteTimestamp(int64_t *timestamp);
+ virtual status_t getRenderPosition(uint64_t *dspFrames);
// Set the callback for notifying completion of non-blocking write and drain.
virtual status_t setCallback(wp<StreamOutHalInterfaceCallback> callback);
@@ -159,12 +161,19 @@
// Requests notification when data buffered by the driver/hardware has been played.
virtual status_t drain(bool earlyNotify);
- // Notifies to the audio driver to flush the queued data.
+ // Notifies to the audio driver to flush (that is, drop) the queued data. Stream must
+ // already be paused before calling 'flush'.
virtual status_t flush();
// Return a recent count of the number of audio frames presented to an external observer.
+ // This excludes frames which have been written but are still in the pipeline. See the
+ // table at the start of the 'StreamOutHalInterface' for the specification of the frame
+ // count behavior w.r.t. 'flush', 'drain' and 'standby' operations.
virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp);
+ // Notifies the HAL layer that the framework considers the current playback as completed.
+ status_t presentationComplete() override;
+
// Called when the metadata of the stream's source has been changed.
status_t updateSourceMetadata(const SourceMetadata& sourceMetadata) override;
@@ -221,6 +230,10 @@
std::unique_ptr<StatusMQ> mStatusMQ;
std::atomic<pid_t> mWriterClient;
EventFlag* mEfGroup;
+ std::mutex mPositionMutex;
+ // Used to expand correctly the 32-bit position from the HAL.
+ uint64_t mRenderPosition GUARDED_BY(mPositionMutex) = 0;
+ bool mExpectRetrograde GUARDED_BY(mPositionMutex) = false; // See 'presentationComplete'.
// Can not be constructed directly by clients.
StreamOutHalHidl(const sp<::android::hardware::audio::CPP_VERSION::IStreamOut>& stream);
@@ -250,6 +263,7 @@
// Return a recent count of the number of audio frames received and
// the clock time associated with that frame count.
+ // The count must not reset to zero when a PCM input enters standby.
virtual status_t getCapturePosition(int64_t *frames, int64_t *time);
// Get active microphones
diff --git a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
index 37615af..4bd7e3d 100644
--- a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
@@ -107,7 +107,7 @@
public:
virtual void onWriteReady() {}
virtual void onDrainReady() {}
- virtual void onError() {}
+ virtual void onError(bool /*isHardError*/) {}
protected:
StreamOutHalInterfaceCallback() = default;
@@ -135,6 +135,38 @@
virtual ~StreamOutHalInterfaceLatencyModeCallback() = default;
};
+/**
+ * On position reporting. There are two methods: 'getRenderPosition' and
+ * 'getPresentationPosition'. The first difference is that they may have a
+ * time offset because "render" position relates to what happens between
+ * ADSP and DAC, while "observable" position is relative to the external
+ * observer. The second difference is that 'getRenderPosition' always
+ * resets on standby (for all types of stream data) according to its
+ * definition. Since the original C definition of 'getRenderPosition' used
+ * 32-bit frame counters, and also because in complex playback chains that
+ * include wireless devices the "observable" position has more practical
+ * meaning, 'getRenderPosition' does not exist in the AIDL HAL interface.
+ * The table below summarizes frame count behavior for 'getPresentationPosition':
+ *
+ * | Mixed | Direct | Direct
+ * | | non-offload | offload
+ * ==============|============|==============|==============
+ * PCM and | Continuous | |
+ * encapsulated | | |
+ * bitstream | | |
+ * --------------|------------| Continuous† |
+ * Bitstream | | | Reset on
+ * encapsulated | | | flush, drain
+ * into PCM | | | and standby
+ * | Not | |
+ * --------------| supported |--------------|
+ * Bitstream | | Reset on |
+ * | | flush, drain |
+ * | | and standby |
+ * | | |
+ *
+ * † - on standby, reset of the frame count happens at the framework level.
+ */
class StreamOutHalInterface : public virtual StreamHalInterface {
public:
// Return the audio hardware driver estimated latency in milliseconds.
@@ -151,10 +183,7 @@
// Return the number of audio frames written by the audio dsp to DAC since
// the output has exited standby.
- virtual status_t getRenderPosition(uint32_t *dspFrames) = 0;
-
- // Get the local time at which the next write to the audio driver will be presented.
- virtual status_t getNextWriteTimestamp(int64_t *timestamp) = 0;
+ virtual status_t getRenderPosition(uint64_t *dspFrames) = 0;
// Set the callback for notifying completion of non-blocking write and drain.
// The callback must be owned by someone else. The output stream does not own it
@@ -176,12 +205,19 @@
// Requests notification when data buffered by the driver/hardware has been played.
virtual status_t drain(bool earlyNotify) = 0;
- // Notifies to the audio driver to flush the queued data.
+ // Notifies to the audio driver to flush (that is, drop) the queued data. Stream must
+ // already be paused before calling 'flush'.
virtual status_t flush() = 0;
// Return a recent count of the number of audio frames presented to an external observer.
+ // This excludes frames which have been written but are still in the pipeline. See the
+ // table at the start of the 'StreamOutHalInterface' for the specification of the frame
+ // count behavior w.r.t. 'flush', 'drain' and 'standby' operations.
virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp) = 0;
+ // Notifies the HAL layer that the framework considers the current playback as completed.
+ virtual status_t presentationComplete() = 0;
+
struct SourceMetadata {
std::vector<playback_track_metadata_v7_t> tracks;
};
@@ -270,6 +306,7 @@
// Return a recent count of the number of audio frames received and
// the clock time associated with that frame count.
+ // The count must not reset to zero when a PCM input enters standby.
virtual status_t getCapturePosition(int64_t *frames, int64_t *time) = 0;
// Get active microphones
diff --git a/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp b/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
index 5106874..0bd6fb0 100644
--- a/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
+++ b/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
@@ -39,6 +39,7 @@
using ::aidl::android::hardware::audio::core::VendorParameter;
using ::aidl::android::media::audio::common::AudioChannelLayout;
using ::aidl::android::media::audio::common::AudioConfig;
+using ::aidl::android::media::audio::common::AudioDevice;
using ::aidl::android::media::audio::common::AudioDeviceDescription;
using ::aidl::android::media::audio::common::AudioDeviceType;
using ::aidl::android::media::audio::common::AudioFormatDescription;
@@ -160,6 +161,24 @@
createProfile(PcmType::INT_16_BIT, {AudioChannelLayout::LAYOUT_STEREO}, {48000})};
Configuration c;
+ AudioPort micInDevice =
+ createPort(c.nextPortId++, "Built-In Mic", 0, true,
+ createPortDeviceExt(AudioDeviceType::IN_MICROPHONE,
+ 1 << AudioPortDeviceExt::FLAG_INDEX_DEFAULT_DEVICE));
+ micInDevice.profiles = standardPcmAudioProfiles;
+ c.ports.push_back(micInDevice);
+
+ AudioPort micInBackDevice =
+ createPort(c.nextPortId++, "Built-In Back Mic", 0, true,
+ createPortDeviceExt(AudioDeviceType::IN_MICROPHONE_BACK, 0));
+ micInDevice.profiles = standardPcmAudioProfiles;
+ c.ports.push_back(micInBackDevice);
+
+ AudioPort primaryInMix =
+ createPort(c.nextPortId++, "primary input", 0, true, createPortMixExt(0, 1));
+ primaryInMix.profiles = standardPcmAudioProfiles;
+ c.ports.push_back(primaryInMix);
+
AudioPort btOutDevice =
createPort(c.nextPortId++, "BT A2DP Out", 0, false,
createPortDeviceExt(AudioDeviceType::OUT_DEVICE, 0,
@@ -172,6 +191,7 @@
btOutMix.profiles = standardPcmAudioProfiles;
c.ports.push_back(btOutMix);
+ c.routes.push_back(createRoute({micInDevice, micInBackDevice}, primaryInMix));
c.routes.push_back(createRoute({btOutMix}, btOutDevice));
return c;
@@ -184,6 +204,11 @@
explicit ModuleMock(const Configuration& config) : mConfig(config) {}
bool isScreenTurnedOn() const { return mIsScreenTurnedOn; }
ScreenRotation getScreenRotation() const { return mScreenRotation; }
+ std::vector<AudioPatch> getPatches() {
+ std::vector<AudioPatch> result;
+ getAudioPatches(&result);
+ return result;
+ }
private:
ndk::ScopedAStatus setModuleDebug(
@@ -1141,3 +1166,51 @@
EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
EXPECT_EQ(0, mMapper->findFwkPatch(newPatchId));
}
+
+TEST_F(Hal2AidlMapperTest, ChangeTransientPatchDevice) {
+ std::mutex mutex; // Only needed for cleanups.
+ auto mapperAccessor = std::make_unique<LockedAccessor<Hal2AidlMapper>>(*mMapper, mutex);
+ Hal2AidlMapper::Cleanups cleanups(*mapperAccessor);
+ AudioConfig config;
+ config.base.channelMask = AudioChannelLayout::make<AudioChannelLayout::layoutMask>(
+ AudioChannelLayout::LAYOUT_STEREO);
+ config.base.format =
+ AudioFormatDescription{.type = AudioFormatType::PCM, .pcm = PcmType::INT_16_BIT};
+ config.base.sampleRate = 48000;
+ AudioDevice defaultDevice;
+ defaultDevice.type.type = AudioDeviceType::IN_DEFAULT;
+ AudioPortConfig mixPortConfig;
+ AudioPatch transientPatch;
+ ASSERT_EQ(OK, mMapper->prepareToOpenStream(43 /*ioHandle*/, defaultDevice,
+ AudioIoFlags::make<AudioIoFlags::input>(0),
+ AudioSource::DEFAULT, &cleanups, &config,
+ &mixPortConfig, &transientPatch));
+ cleanups.disarmAll();
+ ASSERT_NE(0, transientPatch.id);
+ ASSERT_NE(0, mixPortConfig.id);
+ sp<StreamHalInterface> stream = sp<StreamHalMock>::make();
+ mMapper->addStream(stream, mixPortConfig.id, transientPatch.id);
+
+ AudioPatch patch{};
+ int32_t patchId;
+ AudioPortConfig backMicPortConfig;
+ backMicPortConfig.channelMask = config.base.channelMask;
+ backMicPortConfig.format = config.base.format;
+ backMicPortConfig.sampleRate = aidl::android::media::audio::common::Int{config.base.sampleRate};
+ backMicPortConfig.flags = AudioIoFlags::make<AudioIoFlags::input>(0);
+ backMicPortConfig.ext = createPortDeviceExt(AudioDeviceType::IN_MICROPHONE_BACK, 0);
+ ASSERT_EQ(OK, mMapper->createOrUpdatePatch({backMicPortConfig}, {mixPortConfig}, &patchId,
+ &cleanups));
+ cleanups.disarmAll();
+ ASSERT_EQ(android::OK,
+ mMapper->findPortConfig(backMicPortConfig.ext.get<AudioPortExt::device>().device,
+ &backMicPortConfig));
+ EXPECT_NE(0, backMicPortConfig.id);
+
+ EXPECT_EQ(transientPatch.id, patchId);
+ auto patches = mModule->getPatches();
+ auto patchIt = findById(patches, patchId);
+ ASSERT_NE(patchIt, patches.end());
+ EXPECT_EQ(std::vector<int32_t>{backMicPortConfig.id}, patchIt->sourcePortConfigIds);
+ EXPECT_EQ(std::vector<int32_t>{mixPortConfig.id}, patchIt->sinkPortConfigIds);
+}
diff --git a/media/libeffects/data/Android.bp b/media/libeffects/data/Android.bp
new file mode 100644
index 0000000..2acf229
--- /dev/null
+++ b/media/libeffects/data/Android.bp
@@ -0,0 +1,19 @@
+// Copyright (C) 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+prebuilt_etc {
+ name: "framework-audio_effects.xml",
+ src: "audio_effects.xml",
+ filename: "audio_effects.xml",
+}
diff --git a/media/libeffects/downmix/aidl/EffectDownmix.cpp b/media/libeffects/downmix/aidl/EffectDownmix.cpp
index de60ca4..883d41d 100644
--- a/media/libeffects/downmix/aidl/EffectDownmix.cpp
+++ b/media/libeffects/downmix/aidl/EffectDownmix.cpp
@@ -177,7 +177,10 @@
* in the life cycle of workerThread (threadLoop).
*/
uint32_t efState = 0;
- if (!mEventFlag || ::android::OK != mEventFlag->wait(kEventFlagNotEmpty, &efState)) {
+ if (!mEventFlag ||
+ ::android::OK != mEventFlag->wait(mDataMqNotEmptyEf, &efState, 0 /* no timeout */,
+ true /* retry */) ||
+ !(efState & mDataMqNotEmptyEf)) {
LOG(ERROR) << getEffectName() << __func__ << ": StatusEventFlag invalid";
}
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
index fdc16e3..836e034 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
@@ -213,11 +213,12 @@
RETURN_OK_IF(mState != State::INIT);
mImplContext = createContext(common);
RETURN_IF(!mContext || !mImplContext, EX_NULL_POINTER, "createContextFailed");
- int version = 0;
- RETURN_IF(!getInterfaceVersion(&version).isOk(), EX_UNSUPPORTED_OPERATION,
+ RETURN_IF(!getInterfaceVersion(&mVersion).isOk(), EX_UNSUPPORTED_OPERATION,
"FailedToGetInterfaceVersion");
mImplContext->setVersion(version);
mEventFlag = mImplContext->getStatusEventFlag();
+ mDataMqNotEmptyEf =
+ mVersion >= kReopenSupportedVersion ? kEventFlagDataMqNotEmpty : kEventFlagNotEmpty;
if (specific.has_value()) {
RETURN_IF_ASTATUS_NOT_OK(setParameterSpecific(specific.value()), "setSpecParamErr");
@@ -231,8 +232,9 @@
mState = State::IDLE;
mContext->dupeFmq(ret);
- RETURN_IF(createThread(getEffectName()) != RetCode::SUCCESS, EX_UNSUPPORTED_OPERATION,
- "FailedToCreateWorker");
+ RETURN_IF(createThread(getEffectNameWithVersion()) != RetCode::SUCCESS,
+ EX_UNSUPPORTED_OPERATION, "FailedToCreateWorker");
+ LOG(INFO) << getEffectNameWithVersion() << __func__;
return ndk::ScopedAStatus::ok();
}
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h b/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h
index daabdb7..e5373f3 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h
@@ -65,9 +65,9 @@
{5, 3, -1, 3, 5}}}; /* Rock Preset */
static const std::vector<Equalizer::Preset> kEqPresets = {
- {0, "Normal"}, {1, "Classical"}, {2, "Dance"}, {3, "Flat"}, {4, "Folk"},
- {5, "Heavy Metal"}, {6, "Hip Hop"}, {7, "Jazz"}, {8, "Pop"}, {9, "Rock"}};
-
+ {-1, "Custom"}, {0, "Normal"}, {1, "Classical"}, {2, "Dance"},
+ {3, "Flat"}, {4, "Folk"}, {5, "Heavy Metal"}, {6, "Hip Hop"},
+ {7, "Jazz"}, {8, "Pop"}, {9, "Rock"}};
const std::vector<Range::EqualizerRange> kEqRanges = {
MAKE_RANGE(Equalizer, preset, 0, MAX_NUM_PRESETS - 1),
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
index 44ea2a4..3ae3edc 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
@@ -19,6 +19,7 @@
#define LOG_TAG "ReverbContext"
#include <android-base/logging.h>
#include <Utils.h>
+#include <audio_utils/primitives.h>
#include "ReverbContext.h"
#include "VectorArithmetic.h"
@@ -347,6 +348,15 @@
mCommon.output.base.channelMask);
int frameCount = mCommon.input.frameCount;
+ if (mBypass) {
+ if (isAuxiliary()) {
+ memset(out, 0, getOutputFrameSize() * frameCount);
+ } else {
+ memcpy_to_float_from_float_with_clamping(out, in, samples, 1);
+ }
+ return {STATUS_OK, samples, outChannels * frameCount};
+ }
+
// Reverb only effects the stereo channels in multichannel source.
if (channels < 1 || channels > LVM_MAX_CHANNELS) {
LOG(ERROR) << __func__ << " process invalid PCM channels " << channels;
diff --git a/media/libeffects/visualizer/aidl/Visualizer.cpp b/media/libeffects/visualizer/aidl/Visualizer.cpp
index 9c2b71e..9b493d4 100644
--- a/media/libeffects/visualizer/aidl/Visualizer.cpp
+++ b/media/libeffects/visualizer/aidl/Visualizer.cpp
@@ -72,7 +72,7 @@
.uuid = getEffectImplUuidVisualizer(),
.proxy = std::nullopt},
.flags = {.type = Flags::Type::INSERT,
- .insert = Flags::Insert::LAST,
+ .insert = Flags::Insert::FIRST,
.volume = Flags::Volume::NONE},
.name = VisualizerImpl::kEffectName,
.implementor = "The Android Open Source Project"},
diff --git a/media/libheif/OWNERS b/media/libheif/OWNERS
new file mode 100644
index 0000000..a61ad21
--- /dev/null
+++ b/media/libheif/OWNERS
@@ -0,0 +1,2 @@
+include platform/frameworks/av:/media/janitors/avic_OWNERS
+include platform/frameworks/av:/media/janitors/codec_OWNERS
\ No newline at end of file
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index 840897f..9cd0e6e 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -81,9 +81,6 @@
cc_library_shared {
name: "libmedia_omx",
vendor_available: true,
- vndk: {
- enabled: true,
- },
double_loadable: true,
srcs: [
diff --git a/media/libmediahelper/Android.bp b/media/libmediahelper/Android.bp
index 649f813..b5867a6 100644
--- a/media/libmediahelper/Android.bp
+++ b/media/libmediahelper/Android.bp
@@ -30,9 +30,6 @@
name: "libmedia_helper",
vendor_available: true,
min_sdk_version: "29",
- vndk: {
- enabled: true,
- },
double_loadable: true,
srcs: [
"AudioParameter.cpp",
diff --git a/media/libmediaplayerservice/fuzzer/Android.bp b/media/libmediaplayerservice/fuzzer/Android.bp
index 507da29..5ff5a33 100644
--- a/media/libmediaplayerservice/fuzzer/Android.bp
+++ b/media/libmediaplayerservice/fuzzer/Android.bp
@@ -44,7 +44,7 @@
],
fuzz_config: {
cc: [
- "android-media-fuzzing-reports@google.com",
+ "android-media-playback+bugs@google.com",
],
componentid: 155276,
hotlists: [
@@ -130,7 +130,6 @@
"libplayerservice_datasource",
],
shared_libs: [
- "libmediaplayerservice",
"libdatasource",
"libdrmframework",
"libstagefright_httplive",
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 886285e..c9a2eea 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -318,6 +318,10 @@
"aconfig_mediacodec_flags_c_lib",
],
+ defaults: [
+ "aconfig_lib_cc_static_link.defaults",
+ ],
+
static_libs: [
"android.media.codec-aconfig-cc",
"libstagefright_esds",
diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp
index aaf7465..1008445 100644
--- a/media/libstagefright/MediaMuxer.cpp
+++ b/media/libstagefright/MediaMuxer.cpp
@@ -249,6 +249,11 @@
sampleMetaData.setInt32(kKeyIsMuxerData, 1);
}
+ if (flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) {
+ sampleMetaData.setInt32(kKeyIsCodecConfig, true);
+ ALOGV("BUFFER_FLAG_CODEC_CONFIG");
+ }
+
if (flags & MediaCodec::BUFFER_FLAG_EOS) {
sampleMetaData.setInt32(kKeyIsEndOfStream, 1);
ALOGV("BUFFER_FLAG_EOS");
diff --git a/media/libstagefright/SurfaceUtils.cpp b/media/libstagefright/SurfaceUtils.cpp
index 604dcb0..714e312 100644
--- a/media/libstagefright/SurfaceUtils.cpp
+++ b/media/libstagefright/SurfaceUtils.cpp
@@ -111,8 +111,9 @@
}
}
- int finalUsage = usage | consumerUsage;
- ALOGV("gralloc usage: %#x(producer) + %#x(consumer) = %#x", usage, consumerUsage, finalUsage);
+ uint64_t finalUsage = (uint32_t) usage | (uint32_t) consumerUsage;
+ ALOGV("gralloc usage: %#x(producer) + %#x(consumer) = 0x%" PRIx64,
+ usage, consumerUsage, finalUsage);
err = native_window_set_usage(nativeWindow, finalUsage);
if (err != NO_ERROR) {
ALOGE("native_window_set_usage failed: %s (%d)", strerror(-err), -err);
@@ -126,7 +127,7 @@
return err;
}
- ALOGD("set up nativeWindow %p for %dx%d, color %#x, rotation %d, usage %#x",
+ ALOGD("set up nativeWindow %p for %dx%d, color %#x, rotation %d, usage 0x%" PRIx64,
nativeWindow, width, height, format, rotation, finalUsage);
return NO_ERROR;
}
diff --git a/media/libstagefright/VideoRenderQualityTracker.cpp b/media/libstagefright/VideoRenderQualityTracker.cpp
index eb9ac0f..bf29b1d 100644
--- a/media/libstagefright/VideoRenderQualityTracker.cpp
+++ b/media/libstagefright/VideoRenderQualityTracker.cpp
@@ -302,13 +302,6 @@
mRenderDurationMs += (actualRenderTimeUs - mLastRenderTimeUs) / 1000;
}
- // Now that a frame has been rendered, the previously skipped frames can be processed as skipped
- // frames since the app is not skipping them to terminate playback.
- for (int64_t contentTimeUs : mPendingSkippedFrameContentTimeUsList) {
- processMetricsForSkippedFrame(contentTimeUs);
- }
- mPendingSkippedFrameContentTimeUsList = {};
-
// We can render a pending queued frame if it's the last frame of the video, so release it
// immediately.
if (contentTimeUs == mTunnelFrameQueuedContentTimeUs && mTunnelFrameQueuedContentTimeUs != -1) {
@@ -332,9 +325,25 @@
(long long) contentTimeUs, (long long) nextExpectedFrame.contentTimeUs);
break;
}
+ // Process all skipped frames before the dropped frame.
+ while (!mPendingSkippedFrameContentTimeUsList.empty()) {
+ if (mPendingSkippedFrameContentTimeUsList.front() >= nextExpectedFrame.contentTimeUs) {
+ break;
+ }
+ processMetricsForSkippedFrame(mPendingSkippedFrameContentTimeUsList.front());
+ mPendingSkippedFrameContentTimeUsList.pop_front();
+ }
processMetricsForDroppedFrame(nextExpectedFrame.contentTimeUs,
nextExpectedFrame.desiredRenderTimeUs);
}
+ // Process all skipped frames before the rendered frame.
+ while (!mPendingSkippedFrameContentTimeUsList.empty()) {
+ if (mPendingSkippedFrameContentTimeUsList.front() >= nextExpectedFrame.contentTimeUs) {
+ break;
+ }
+ processMetricsForSkippedFrame(mPendingSkippedFrameContentTimeUsList.front());
+ mPendingSkippedFrameContentTimeUsList.pop_front();
+ }
processMetricsForRenderedFrame(nextExpectedFrame.contentTimeUs,
nextExpectedFrame.desiredRenderTimeUs, actualRenderTimeUs,
freezeEventOut, judderEventOut);
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index bfa361c..d50bc1e 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -139,6 +139,7 @@
<Limit name="bitrate" range="1-40000000" />
</Variant>
<Feature name="adaptive-playback" />
+ <Feature name="dynamic-color-aspects" />
<Attribute name="software-codec" />
</MediaCodec>
<MediaCodec name="c2.android.hevc.decoder" type="video/hevc" variant="slow-cpu,!slow-cpu">
@@ -160,6 +161,7 @@
<Limit name="bitrate" range="1-5000000" />
</Variant>
<Feature name="adaptive-playback" />
+ <Feature name="dynamic-color-aspects" />
<Attribute name="software-codec" />
</MediaCodec>
<MediaCodec name="c2.android.vp8.decoder" type="video/x-vnd.on2.vp8" variant="slow-cpu,!slow-cpu">
@@ -178,6 +180,7 @@
<Limit name="bitrate" range="1-40000000" />
</Variant>
<Feature name="adaptive-playback" />
+ <Feature name="dynamic-color-aspects" />
<Attribute name="software-codec" />
</MediaCodec>
<MediaCodec name="c2.android.vp9.decoder" type="video/x-vnd.on2.vp9" variant="slow-cpu,!slow-cpu">
@@ -197,6 +200,7 @@
<Limit name="bitrate" range="1-5000000" />
</Variant>
<Feature name="adaptive-playback" />
+ <Feature name="dynamic-color-aspects" />
<Attribute name="software-codec" />
</MediaCodec>
<MediaCodec name="c2.android.av1.decoder" type="video/av01" variant="slow-cpu,!slow-cpu">
@@ -216,6 +220,7 @@
<Limit name="bitrate" range="1-5000000" />
</Variant>
<Feature name="adaptive-playback" />
+ <Feature name="dynamic-color-aspects" />
<Feature name="low-latency" />
<Attribute name="software-codec" />
</MediaCodec>
@@ -235,6 +240,7 @@
<Limit name="bitrate" range="1-5000000" />
</Variant>
<Feature name="adaptive-playback" />
+ <Feature name="dynamic-color-aspects" />
<Feature name="low-latency" />
<Attribute name="software-codec" />
</MediaCodec>
diff --git a/media/libstagefright/omx/Android.bp b/media/libstagefright/omx/Android.bp
index 79ab009..630817c 100644
--- a/media/libstagefright/omx/Android.bp
+++ b/media/libstagefright/omx/Android.bp
@@ -20,9 +20,6 @@
cc_library_shared {
name: "libstagefright_omx",
vendor_available: true,
- vndk: {
- enabled: true,
- },
double_loadable: true,
srcs: [
@@ -218,9 +215,6 @@
cc_library_shared {
name: "libstagefright_omx_utils",
vendor_available: true,
- vndk: {
- enabled: true,
- },
double_loadable: true,
srcs: ["OMXUtils.cpp"],
export_include_dirs: [
diff --git a/media/libstagefright/renderfright/Android.bp b/media/libstagefright/renderfright/Android.bp
index 22b13f6..bb850ca 100644
--- a/media/libstagefright/renderfright/Android.bp
+++ b/media/libstagefright/renderfright/Android.bp
@@ -84,9 +84,6 @@
name: "librenderfright",
defaults: ["librenderfright_defaults"],
vendor_available: true,
- vndk: {
- enabled: true,
- },
double_loadable: true,
cflags: [
diff --git a/media/libstagefright/rtsp/fuzzer/Android.bp b/media/libstagefright/rtsp/fuzzer/Android.bp
index a2791ba..ff64af5 100644
--- a/media/libstagefright/rtsp/fuzzer/Android.bp
+++ b/media/libstagefright/rtsp/fuzzer/Android.bp
@@ -29,11 +29,19 @@
header_libs: [
"libstagefright_rtsp_headers",
],
- fuzz_config:{
+ fuzz_config: {
cc: [
- "android-media-fuzzing-reports@google.com",
+ "android-media-playback@google.com",
],
componentid: 155276,
+ hotlists: [
+ "4593311",
+ ],
+ description: "This fuzzer targets the APIs of libstagefright_rtsp",
+ vector: "local_privileges_required",
+ service_privilege: "privileged",
+ users: "multi_user",
+ fuzzed_code_usage: "shipped",
},
}
@@ -44,7 +52,7 @@
],
defaults: [
"libstagefright_rtsp_fuzzer_defaults",
- ]
+ ],
}
cc_fuzz {
@@ -55,7 +63,7 @@
defaults: [
"libstagefright_rtsp_fuzzer_defaults",
],
- shared_libs:[
+ shared_libs: [
"libandroid_net",
"libbase",
"libstagefright",
diff --git a/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.cpp b/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.cpp
index 9f46a74..b29429a 100644
--- a/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.cpp
+++ b/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.cpp
@@ -21,105 +21,256 @@
#include <media/stagefright/MPEG2TSWriter.h>
#include <media/stagefright/MPEG4Writer.h>
#include <media/stagefright/OggWriter.h>
-
-#include "MediaMimeTypes.h"
-
#include <webm/WebmWriter.h>
namespace android {
-std::string genMimeType(FuzzedDataProvider *dataProvider) {
- uint8_t idx = dataProvider->ConsumeIntegralInRange<uint8_t>(0, kMimeTypes.size() - 1);
- return std::string(kMimeTypes[idx]);
-}
-sp<IMediaExtractor> genMediaExtractor(FuzzedDataProvider *dataProvider, std::string mimeType,
- uint16_t maxDataAmount) {
- uint32_t dataBlobSize = dataProvider->ConsumeIntegralInRange<uint16_t>(0, maxDataAmount);
- std::vector<uint8_t> data = dataProvider->ConsumeBytes<uint8_t>(dataBlobSize);
- // data:[<mediatype>][;base64],<data>
- std::string uri("data:");
- uri += mimeType;
- // Currently libstagefright only accepts base64 uris
- uri += ";base64,";
- android::AString out;
- android::encodeBase64(data.data(), data.size(), &out);
- uri += out.c_str();
-
- sp<DataSource> source =
- DataSourceFactory::getInstance()->CreateFromURI(NULL /* httpService */, uri.c_str());
-
- if (source == NULL) {
- return NULL;
- }
-
- return MediaExtractorFactory::Create(source);
-}
-
-sp<MediaSource> genMediaSource(FuzzedDataProvider *dataProvider, uint16_t maxMediaBlobSize) {
- std::string mime = genMimeType(dataProvider);
- sp<IMediaExtractor> extractor = genMediaExtractor(dataProvider, mime, maxMediaBlobSize);
-
- if (extractor == NULL) {
- return NULL;
- }
-
- for (size_t i = 0; i < extractor->countTracks(); ++i) {
- sp<MetaData> meta = extractor->getTrackMetaData(i);
-
- std::string trackMime = dataProvider->PickValueInArray(kTestedMimeTypes);
- if (!strcasecmp(mime.c_str(), trackMime.c_str())) {
- sp<IMediaSource> track = extractor->getTrack(i);
- if (track == NULL) {
- return NULL;
- }
- return new CallbackMediaSource(track);
- }
- }
-
- return NULL;
-}
-
-sp<MediaWriter> createWriter(int fd, StandardWriters writerType, sp<MetaData> fileMeta) {
+sp<MediaWriter> createWriter(int fd, StandardWriters writerType, sp<MetaData> writerMeta,
+ FuzzedDataProvider* fdp) {
sp<MediaWriter> writer;
+
+ if (fdp->ConsumeBool()) {
+ writerMeta->setInt32(kKeyRealTimeRecording, fdp->ConsumeBool());
+ }
+
switch (writerType) {
- case OGG:
- writer = new OggWriter(fd);
- fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_OGG);
- break;
case AAC:
- writer = new AACWriter(fd);
- fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_AAC_ADIF);
+ writer = sp<AACWriter>::make(fd);
+
+ if (fdp->ConsumeBool()) {
+ writerMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_AAC_ADIF);
+ }
break;
case AAC_ADTS:
- writer = new AACWriter(fd);
- fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_AAC_ADTS);
- break;
- case WEBM:
- writer = new WebmWriter(fd);
- fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_WEBM);
- break;
- case MPEG4:
- writer = new MPEG4Writer(fd);
- fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_MPEG_4);
+ writer = sp<AACWriter>::make(fd);
+
+ if (fdp->ConsumeBool()) {
+ writerMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_AAC_ADTS);
+ }
break;
case AMR_NB:
- writer = new AMRWriter(fd);
- fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_AMR_NB);
+ writer = sp<AMRWriter>::make(fd);
+
+ if (fdp->ConsumeBool()) {
+ writerMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_AMR_NB);
+ }
break;
case AMR_WB:
- writer = new AMRWriter(fd);
- fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_AMR_WB);
+ writer = sp<AMRWriter>::make(fd);
+
+ if (fdp->ConsumeBool()) {
+ writerMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_AMR_WB);
+ }
break;
case MPEG2TS:
- writer = new MPEG2TSWriter(fd);
- fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_MPEG2TS);
+ writer = sp<MPEG2TSWriter>::make(fd);
+
+ if (fdp->ConsumeBool()) {
+ writerMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_MPEG2TS);
+ }
break;
- default:
- return nullptr;
+ case MPEG4:
+ writer = sp<MPEG4Writer>::make(fd);
+
+ if (fdp->ConsumeBool()) {
+ writerMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_MPEG_4);
+ } else if (fdp->ConsumeBool()) {
+ writerMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_HEIF);
+ } else if (fdp->ConsumeBool()) {
+ writerMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_THREE_GPP);
+ }
+
+ if (fdp->ConsumeBool()) {
+ writerMeta->setInt32(kKey2ByteNalLength, fdp->ConsumeBool());
+ }
+
+ if (fdp->ConsumeBool()) {
+ writerMeta->setInt32(kKeyTimeScale,
+ fdp->ConsumeIntegralInRange<int32_t>(600, 96000));
+ }
+
+ if (fdp->ConsumeBool()) {
+ writerMeta->setInt32(kKey4BitTrackIds, fdp->ConsumeBool());
+ }
+
+ if (fdp->ConsumeBool()) {
+ writerMeta->setInt64(kKeyTrackTimeStatus, fdp->ConsumeIntegral<int64_t>());
+ }
+
+ if (fdp->ConsumeBool()) {
+ writerMeta->setInt32(kKeyRotation, fdp->ConsumeIntegralInRange<uint8_t>(0, 3) * 90);
+ }
+
+ if (fdp->ConsumeBool()) {
+ writerMeta->setInt64(kKeyTime, fdp->ConsumeIntegral<int64_t>());
+ }
+ break;
+ case OGG:
+ writer = sp<OggWriter>::make(fd);
+
+ if (fdp->ConsumeBool()) {
+ writerMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_OGG);
+ }
+ break;
+ case WEBM:
+ writer = sp<WebmWriter>::make(fd);
+
+ if (fdp->ConsumeBool()) {
+ writerMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_WEBM);
+ }
+
+ if (fdp->ConsumeBool()) {
+ writerMeta->setInt32(kKeyTimeScale,
+ fdp->ConsumeIntegralInRange<int32_t>(600, 96000));
+ }
+ break;
}
- if (writer != nullptr) {
- fileMeta->setInt32(kKeyRealTimeRecording, false);
- }
+
return writer;
}
+
+sp<FuzzSource> createSource(StandardWriters writerType, FuzzedDataProvider* fdp) {
+ sp<MetaData> meta = sp<MetaData>::make();
+
+ switch (writerType) {
+ case AAC:
+ case AAC_ADTS:
+ meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_AAC);
+ meta->setInt32(kKeyChannelCount, fdp->ConsumeIntegralInRange<uint8_t>(1, 7));
+ meta->setInt32(kKeySampleRate, fdp->PickValueInArray<uint32_t>(kSampleRateTable));
+
+ if (fdp->ConsumeBool()) {
+ meta->setInt32(kKeyAACProfile, fdp->ConsumeIntegral<int32_t>());
+ }
+ break;
+ case AMR_NB:
+ meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_AMR_NB);
+ meta->setInt32(kKeyChannelCount, 1);
+ meta->setInt32(kKeySampleRate, 8000);
+ break;
+ case AMR_WB:
+ meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_AMR_WB);
+ meta->setInt32(kKeyChannelCount, 1);
+ meta->setInt32(kKeySampleRate, 16000);
+ break;
+ case MPEG2TS:
+ if (fdp->ConsumeBool()) {
+ meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_AAC);
+ meta->setInt32(kKeyChannelCount, fdp->ConsumeIntegral<int32_t>());
+ meta->setInt32(kKeySampleRate, fdp->PickValueInArray<uint32_t>(kSampleRateTable));
+ } else {
+ meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
+ // The +1s ensure a minimum height and width of 1.
+ meta->setInt32(kKeyWidth, fdp->ConsumeIntegral<uint16_t>() + 1);
+ meta->setInt32(kKeyHeight, fdp->ConsumeIntegral<uint16_t>() + 1);
+ }
+ break;
+ case MPEG4: {
+ auto mime = fdp->PickValueInArray<std::string>(kMpeg4MimeTypes);
+ meta->setCString(kKeyMIMEType, mime.c_str());
+
+ if (fdp->ConsumeBool()) {
+ meta->setInt32(kKeyBackgroundMode, fdp->ConsumeBool());
+ }
+
+ if (!strncasecmp(mime.c_str(), "audio/", 6)) {
+ meta->setInt32(kKeyChannelCount, fdp->ConsumeIntegral<int32_t>());
+ meta->setInt32(kKeySampleRate, fdp->PickValueInArray<uint32_t>(kSampleRateTable));
+
+ } else {
+ // The +1s ensure a minimum height and width of 1.
+ meta->setInt32(kKeyWidth, fdp->ConsumeIntegral<uint16_t>() + 1);
+ meta->setInt32(kKeyHeight, fdp->ConsumeIntegral<uint16_t>() + 1);
+
+ if (fdp->ConsumeBool()) {
+ meta->setInt32(kKeyDisplayWidth, fdp->ConsumeIntegral<uint16_t>());
+ }
+
+ if (fdp->ConsumeBool()) {
+ meta->setInt32(kKeyDisplayHeight, fdp->ConsumeIntegral<uint16_t>());
+ }
+
+ if (fdp->ConsumeBool()) {
+ meta->setInt32(kKeyTileWidth, fdp->ConsumeIntegral<uint16_t>());
+ }
+
+ if (fdp->ConsumeBool()) {
+ meta->setInt32(kKeyTileHeight, fdp->ConsumeIntegral<uint16_t>());
+ }
+ if (fdp->ConsumeBool()) {
+ meta->setInt32(kKeyGridRows, fdp->ConsumeIntegral<uint8_t>());
+ }
+
+ if (fdp->ConsumeBool()) {
+ meta->setInt32(kKeyGridCols, fdp->ConsumeIntegral<uint8_t>());
+ }
+
+ if (fdp->ConsumeBool()) {
+ meta->setInt32(kKeyTemporalLayerCount, fdp->ConsumeIntegral<int32_t>());
+ }
+
+ if (fdp->ConsumeBool()) {
+ meta->setInt32(kKeySARWidth, fdp->ConsumeIntegral<uint16_t>());
+ }
+
+ if (fdp->ConsumeBool()) {
+ meta->setInt32(kKeySARHeight, fdp->ConsumeIntegral<uint16_t>());
+ }
+ }
+
+ if (fdp->ConsumeBool()) {
+ meta->setInt32(kKeyBitRate, fdp->ConsumeIntegral<int32_t>());
+ }
+
+ if (fdp->ConsumeBool()) {
+ meta->setInt32(kKeyMaxBitRate, fdp->ConsumeIntegral<int32_t>());
+ }
+
+ if (fdp->ConsumeBool()) {
+ meta->setInt32(kKeyTrackIsDefault, fdp->ConsumeBool());
+ }
+ break;
+ }
+ case OGG:
+ meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_OPUS);
+
+ if (fdp->ConsumeBool()) {
+ meta->setInt32(kKeyChannelCount, fdp->ConsumeIntegral<int32_t>());
+ }
+
+ if (fdp->ConsumeBool()) {
+ meta->setInt32(kKeySampleRate, fdp->PickValueInArray<uint32_t>(kSampleRateTable));
+ }
+ break;
+ case WEBM:
+ if (fdp->ConsumeBool()) {
+ if (fdp->ConsumeBool()) {
+ meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_VP8);
+ } else {
+ meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_VP9);
+ }
+
+ if (fdp->ConsumeBool()) {
+ // The +1s ensure a minimum height and width of 1.
+ meta->setInt32(kKeyWidth, fdp->ConsumeIntegral<uint16_t>() + 1);
+ meta->setInt32(kKeyHeight, fdp->ConsumeIntegral<uint16_t>() + 1);
+ }
+ } else {
+ if (fdp->ConsumeBool()) {
+ meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_VORBIS);
+ } else {
+ meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_OPUS);
+ }
+
+ if (fdp->ConsumeBool()) {
+ meta->setInt32(kKeyChannelCount, fdp->ConsumeIntegral<int32_t>());
+ }
+ meta->setInt32(kKeySampleRate, fdp->PickValueInArray<uint32_t>(kSampleRateTable));
+ }
+
+ break;
+ }
+
+ return sp<FuzzSource>::make(meta, fdp);
+}
} // namespace android
diff --git a/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.h b/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.h
index 6856ac0..ad1218b 100644
--- a/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.h
+++ b/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.h
@@ -15,20 +15,52 @@
*/
#pragma once
-#include <datasource/DataSourceFactory.h>
+
#include <fuzzer/FuzzedDataProvider.h>
-#include <android/IMediaExtractor.h>
-#include <media/IMediaHTTPService.h>
-#include <media/mediarecorder.h>
-#include <media/stagefright/CallbackMediaSource.h>
+
+#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/MediaExtractorFactory.h>
#include <media/stagefright/MediaWriter.h>
-#include <media/stagefright/MetaData.h>
-#include <media/stagefright/foundation/base64.h>
-#include <utils/StrongPointer.h>
namespace android {
+class FuzzSource : public MediaSource {
+ public:
+ FuzzSource(sp<MetaData> meta, FuzzedDataProvider* fdp) : mMetaData(meta), mFdp(fdp) {}
+
+ status_t start(MetaData*) { return OK; }
+
+ virtual status_t stop() { return OK; }
+
+ status_t read(MediaBufferBase** buffer, const ReadOptions*) {
+ // Ensuring that mBuffer has at least two bytes to avoid check failure
+ // in MPEG2TSWriter::SourceInfo::onMessageReceived().
+ if (mFdp->remaining_bytes() > 2) {
+ auto size = mFdp->ConsumeIntegralInRange<uint8_t>(2, INT8_MAX);
+ mBuffer = mFdp->ConsumeBytes<uint8_t>(size);
+ MediaBufferBase* mbb = new MediaBuffer(mBuffer.data(), mBuffer.size());
+
+ size_t length = mFdp->ConsumeIntegralInRange<size_t>(2, mbb->size());
+ size_t offset = mFdp->ConsumeIntegralInRange<size_t>(0, mbb->size() - length);
+ mbb->set_range(offset, length);
+
+ mbb->meta_data().setInt32(kKeyIsEndOfStream, mFdp->ConsumeBool());
+ mbb->meta_data().setInt64(kKeyTime, mFdp->ConsumeIntegral<uint32_t>() / 2);
+ *buffer = mbb;
+
+ return OK;
+ }
+
+ return ERROR_END_OF_STREAM;
+ }
+
+ sp<MetaData> getFormat() { return mMetaData; }
+
+ private:
+ sp<MetaData> mMetaData = nullptr;
+ FuzzedDataProvider* mFdp = nullptr;
+ std::vector<uint8_t> mBuffer;
+};
+
enum StandardWriters {
OGG,
AAC,
@@ -42,54 +74,22 @@
kMaxValue = MPEG2TS,
};
-static std::string kTestedMimeTypes[] = {"audio/3gpp",
- "audio/amr-wb",
- "audio/vorbis",
- "audio/opus",
- "audio/mp4a-latm",
- "audio/mpeg",
- "audio/mpeg-L1",
- "audio/mpeg-L2",
- "audio/midi",
- "audio/qcelp",
- "audio/g711-alaw",
- "audio/g711-mlaw",
- "audio/flac",
- "audio/aac-adts",
- "audio/gsm",
- "audio/ac3",
- "audio/eac3",
- "audio/eac3-joc",
- "audio/ac4",
- "audio/scrambled",
- "audio/alac",
- "audio/x-ms-wma",
- "audio/x-adpcm-ms",
- "audio/x-adpcm-dvi-ima",
- "video/avc",
- "video/hevc",
- "video/mp4v-es",
- "video/3gpp",
- "video/x-vnd.on2.vp8",
- "video/x-vnd.on2.vp9",
- "video/av01",
- "video/mpeg2",
- "video/dolby-vision",
- "video/scrambled",
- "video/divx",
- "video/divx3",
- "video/xvid",
- "video/x-motion-jpeg",
- "text/3gpp-tt",
- "application/x-subrip",
- "text/vtt",
- "text/cea-608",
- "text/cea-708",
- "application/x-id3v4"};
+static const uint32_t kSampleRateTable[] = {
+ 8000, 11025, 12000, 16000, 22050, 24000, 32000, 44100, 48000, 64000, 88200, 96000,
+};
+static const std::string kMpeg4MimeTypes[] = {
+ MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC, MEDIA_MIMETYPE_IMAGE_AVIF,
-std::string genMimeType(FuzzedDataProvider *dataProvider);
-sp<IMediaExtractor> genMediaExtractor(FuzzedDataProvider *dataProvider, uint16_t dataAmount);
-sp<MediaSource> genMediaSource(FuzzedDataProvider *dataProvider, uint16_t maxMediaBlobSize);
+ MEDIA_MIMETYPE_VIDEO_AV1, MEDIA_MIMETYPE_VIDEO_AVC,
+ MEDIA_MIMETYPE_VIDEO_HEVC, MEDIA_MIMETYPE_VIDEO_MPEG4,
+ MEDIA_MIMETYPE_VIDEO_H263, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION,
-sp<MediaWriter> createWriter(int32_t fd, StandardWriters writerType, sp<MetaData> fileMeta);
+ MEDIA_MIMETYPE_AUDIO_AMR_NB, MEDIA_MIMETYPE_AUDIO_AMR_WB,
+ MEDIA_MIMETYPE_AUDIO_AAC,
+};
+
+sp<MediaWriter> createWriter(int32_t fd, StandardWriters writerType, sp<MetaData> writerMeta,
+ FuzzedDataProvider* fdp);
+
+sp<FuzzSource> createSource(StandardWriters writerType, FuzzedDataProvider* fdp);
} // namespace android
diff --git a/media/libstagefright/tests/fuzzers/WriterFuzzer.cpp b/media/libstagefright/tests/fuzzers/WriterFuzzer.cpp
index 97d1160..cd0a866 100644
--- a/media/libstagefright/tests/fuzzers/WriterFuzzer.cpp
+++ b/media/libstagefright/tests/fuzzers/WriterFuzzer.cpp
@@ -13,216 +13,49 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-// Authors: corbin.souffrant@leviathansecurity.com
-// dylan.katz@leviathansecurity.com
-
-#include <android-base/file.h>
-#include <android/content/AttributionSourceState.h>
-#include <ctype.h>
-#include <media/mediarecorder.h>
-#include <media/stagefright/MPEG4Writer.h>
-#include <media/stagefright/MediaDefs.h>
-#include <stdlib.h>
-#include <utils/StrongPointer.h>
-#include <utils/Vector.h>
-
-#include <functional>
-#include <string>
#include "FuzzerMediaUtility.h"
-#include "fuzzer/FuzzedDataProvider.h"
-
-static constexpr uint16_t kMaxOperations = 5000;
-static constexpr uint8_t kMaxPackageNameLen = 50;
-// For other strings in mpeg we want a higher limit.
-static constexpr uint16_t kMaxMPEGStrLen = 1000;
-static constexpr uint16_t kMaxMediaBlobSize = 1000;
namespace android {
-using android::content::AttributionSourceState;
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ FuzzedDataProvider fdp(data, size);
-std::string getFourCC(FuzzedDataProvider *fdp) {
- std::string fourCC = fdp->ConsumeRandomLengthString(4);
- // Replace any existing nulls
- for (size_t pos = 0; pos < fourCC.length(); pos++) {
- if (fourCC.at(pos) == '\0') {
- fourCC.replace(pos, 1, "a");
- }
+ // memfd_create() creates an anonymous file and returns a file
+ // descriptor that refers to it. MFD_ALLOW_SEALING allows sealing
+ // operations on this file.
+ int32_t fd = memfd_create("WriterFuzzer", MFD_ALLOW_SEALING);
+ if (fd == -1) {
+ ALOGE("memfd_create() failed: %s", strerror(errno));
+ return 0;
}
- // If our string is too short, fill the remainder with "a"s.
- while (fourCC.length() < 4) {
- fourCC += 'a';
- }
- return fourCC;
-}
+ StandardWriters writerType = fdp.ConsumeEnum<StandardWriters>();
+ sp<MetaData> writerMeta = sp<MetaData>::make();
-typedef std::vector<std::function<void(FuzzedDataProvider*,
- sp<MediaWriter>, sp<MetaData>, int tmpFileFd)>> OperationVec;
-typedef std::vector<std::function<void(FuzzedDataProvider*, MPEG4Writer*)>> MPEG4OperationVec;
-static const OperationVec operations = {
- [](FuzzedDataProvider*, sp<MediaWriter> mediaWriter, sp<MetaData>, int) {
- mediaWriter->pause();
- },
- [](FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter, sp<MetaData>, int tmpFd) {
- bool valid_fd = dataProvider->ConsumeBool();
- int fd = -1;
- if (valid_fd) {
- fd = tmpFd;
- }
- // Args don't seem to be used
- Vector<String16> args;
- mediaWriter->dump(fd, args);
- },
- [](FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter, sp<MetaData>, int tmpFd) {
- bool valid_fd = dataProvider->ConsumeBool();
- int fd = -1;
- if (valid_fd) {
- fd = tmpFd;
- }
- mediaWriter->setNextFd(fd);
- },
- [](FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter, sp<MetaData>, int) {
- mediaWriter->setCaptureRate(dataProvider->ConsumeFloatingPoint<float>());
- },
- [](FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter, sp<MetaData>, int) {
- mediaWriter->setMaxFileDuration(dataProvider->ConsumeIntegral<int64_t>());
- },
- [](FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter, sp<MetaData>, int) {
- mediaWriter->setStartTimeOffsetMs(dataProvider->ConsumeIntegral<int>());
-
- // Likely won't do much, but might as well as do a quick check
- // while we're here.
- mediaWriter->getStartTimeOffsetMs();
- },
- [](FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter, sp<MetaData>, int) {
- mediaWriter->setMaxFileDuration(dataProvider->ConsumeIntegral<int64_t>());
- },
- [](FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter, sp<MetaData>, int) {
- mediaWriter->setMaxFileDuration(dataProvider->ConsumeIntegral<int64_t>());
- },
-};
-
-static const MPEG4OperationVec mpeg4Operations = {
- [](FuzzedDataProvider*, MPEG4Writer *mediaWriter) { mediaWriter->notifyApproachingLimit(); },
- // Lower level write methods.
- // High-level startBox/endBox/etc are all called elsewhere,
- [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
- uint8_t val = dataProvider->ConsumeIntegral<uint8_t>();
- mediaWriter->writeInt8(val);
- },
- [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
- uint16_t val = dataProvider->ConsumeIntegral<uint16_t>();
- mediaWriter->writeInt16(val);
- },
- [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
- uint32_t val = dataProvider->ConsumeIntegral<uint32_t>();
- mediaWriter->writeInt32(val);
- },
- [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
- uint64_t val = dataProvider->ConsumeIntegral<uint64_t>();
- mediaWriter->writeInt64(val);
- },
- [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
- std::string strVal = dataProvider->ConsumeRandomLengthString(kMaxMPEGStrLen);
- mediaWriter->writeCString(strVal.c_str());
- },
- [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
- std::string fourCC = getFourCC(dataProvider);
- mediaWriter->writeFourcc(fourCC.c_str());
- },
-
- // Misc setters
- [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
- uint32_t layers = dataProvider->ConsumeIntegral<uint32_t>();
- mediaWriter->setTemporalLayerCount(layers);
- },
- [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
- uint32_t duration = dataProvider->ConsumeIntegral<uint32_t>();
- mediaWriter->setInterleaveDuration(duration);
- },
- [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
- int lat = dataProvider->ConsumeIntegral<int>();
- int lon = dataProvider->ConsumeIntegral<int>();
- mediaWriter->setGeoData(lat, lon);
- },
-};
-
-// Not all writers can always add new sources, so we'll need additional checks.
-void addSource(FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter) {
- sp<MediaSource> mediaSource = genMediaSource(dataProvider, kMaxMediaBlobSize);
- if (mediaSource == NULL) {
- // There's a static check preventing NULLs in addSource.
- return;
- }
- mediaWriter->addSource(mediaSource);
-}
-
-extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
- FuzzedDataProvider dataProvider(data, size);
- TemporaryFile tf;
- sp<MetaData> fileMeta = new MetaData;
- StandardWriters writerType = dataProvider.ConsumeEnum<StandardWriters>();
- sp<MediaWriter> writer = createWriter(tf.fd, writerType, fileMeta);
-
- AttributionSourceState attributionSource;
- attributionSource.packageName = dataProvider.ConsumeRandomLengthString(kMaxPackageNameLen);
- attributionSource.uid = dataProvider.ConsumeIntegral<int32_t>();
- attributionSource.pid = dataProvider.ConsumeIntegral<int32_t>();
- attributionSource.token = sp<BBinder>::make();
- sp<MediaRecorder> mr = new MediaRecorder(attributionSource);
- writer->setListener(mr);
-
- uint8_t baseOpLen = operations.size();
- uint8_t totalLen = baseOpLen;
- uint8_t maxSources;
- // Different writers support different amounts of sources.
- switch (writerType) {
- case StandardWriters::AAC:
- case StandardWriters::AAC_ADTS:
- case StandardWriters::AMR_NB:
- case StandardWriters::AMR_WB:
- case StandardWriters::OGG:
- maxSources = 1;
- break;
- case StandardWriters::WEBM:
- maxSources = 2;
- break;
- default:
- maxSources = UINT8_MAX;
- break;
- }
- // Initialize some number of sources and add them to our writer.
- uint8_t sourceCount = dataProvider.ConsumeIntegralInRange<uint8_t>(0, maxSources);
- for (uint8_t i = 0; i < sourceCount; i++) {
- addSource(&dataProvider, writer);
+ sp<MediaWriter> writer = createWriter(fd, writerType, writerMeta, &fdp);
+ if (writer == nullptr) {
+ close(fd);
+ return 0;
}
- // Increase our range if additional operations are implemented.
- // Currently only MPEG4 has additiona public operations on their writer.
- if (writerType == StandardWriters::MPEG4) {
- totalLen += mpeg4Operations.size();
+ if (writerType == StandardWriters::WEBM) {
+ // This range is set to avoid CHECK failure in WEBMWriter::reset() -> EbmlVoid::EBmlVoid().
+ writer->setMaxFileSize(fdp.ConsumeIntegralInRange<int64_t>(5 * 1024 * 1024, INT64_MAX));
+ } else {
+ writer->setMaxFileSize(fdp.ConsumeIntegral<int64_t>());
}
+ writer->setMaxFileDuration(fdp.ConsumeIntegral<int64_t>());
+ writer->setCaptureRate(fdp.ConsumeFloatingPoint<float>());
- // Many operations require the writer to be started.
- writer->start(fileMeta.get());
- for (size_t ops_run = 0; dataProvider.remaining_bytes() > 0 && ops_run < kMaxOperations - 1;
- ops_run++) {
- uint8_t op = dataProvider.ConsumeIntegralInRange<uint8_t>(0, totalLen - 1);
- if (op < baseOpLen) {
- operations[op](&dataProvider, writer, fileMeta, tf.fd);
- } else if (writerType == StandardWriters::MPEG4) {
- mpeg4Operations[op - baseOpLen](&dataProvider, (MPEG4Writer*)writer.get());
- } else {
- // Here just in case, will error out.
- operations[op](&dataProvider, writer, fileMeta, tf.fd);
- }
- }
+ sp<MediaSource> source = createSource(writerType, &fdp);
+ writer->addSource(source);
+ writer->start(writerMeta.get());
+ writer->pause();
writer->stop();
- writer.clear();
- writer = nullptr;
+ close(fd);
+
return 0;
}
} // namespace android
diff --git a/media/libstagefright/timedtext/test/fuzzer/Android.bp b/media/libstagefright/timedtext/test/fuzzer/Android.bp
index 6590ebb..8724d51 100644
--- a/media/libstagefright/timedtext/test/fuzzer/Android.bp
+++ b/media/libstagefright/timedtext/test/fuzzer/Android.bp
@@ -48,8 +48,16 @@
],
fuzz_config: {
cc: [
- "android-media-fuzzing-reports@google.com",
+ "android-media-playback@google.com",
],
- componentid: 155276,
+ componentid: 42195,
+ hotlists: [
+ "4593311",
+ ],
+ description: "This fuzzer targets the APIs of libstagefright_timedtext",
+ vector: "local_no_privileges_required",
+ service_privilege: "constrained",
+ users: "multi_user",
+ fuzzed_code_usage: "shipped",
},
}
diff --git a/media/libstagefright/webm/WebmFrameThread.cpp b/media/libstagefright/webm/WebmFrameThread.cpp
index 7d1442b..e20a08d 100644
--- a/media/libstagefright/webm/WebmFrameThread.cpp
+++ b/media/libstagefright/webm/WebmFrameThread.cpp
@@ -354,6 +354,17 @@
}
MetaDataBase &md = buffer->meta_data();
+
+ if (mType == kVideoType) {
+ int32_t isCodecConfig = 0;
+ if (md.findInt32(kKeyIsCodecConfig, &isCodecConfig) && isCodecConfig) {
+ ALOGI("ignoring CSD for video track");
+ buffer->release();
+ buffer = NULL;
+ continue;
+ }
+ }
+
CHECK(md.findInt64(kKeyTime, ×tampUs));
if (mStartTimeUs == kUninitialized) {
mStartTimeUs = timestampUs;
diff --git a/media/libstagefright/xmlparser/Android.bp b/media/libstagefright/xmlparser/Android.bp
index 2f204f9..2c5e81a 100644
--- a/media/libstagefright/xmlparser/Android.bp
+++ b/media/libstagefright/xmlparser/Android.bp
@@ -16,9 +16,6 @@
cc_library_shared {
name: "libstagefright_xmlparser",
vendor_available: true,
- vndk: {
- enabled: true,
- },
double_loadable: true,
srcs: [
diff --git a/media/module/aidlpersistentsurface/aidl/android/media/IAidlNode.aidl b/media/module/aidlpersistentsurface/aidl/android/media/IAidlNode.aidl
index cf880c2..fe3caf3 100644
--- a/media/module/aidlpersistentsurface/aidl/android/media/IAidlNode.aidl
+++ b/media/module/aidlpersistentsurface/aidl/android/media/IAidlNode.aidl
@@ -44,7 +44,7 @@
void setInputSurface(IAidlBufferSource bufferSource);
void submitBuffer(
int buffer,
- in HardwareBuffer hBuffer,
+ in @nullable HardwareBuffer hBuffer,
int flags,
long timestampUs,
in @nullable ParcelFileDescriptor fence);
diff --git a/media/module/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.cpp b/media/module/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.cpp
index 5526b10..a5c72d6 100644
--- a/media/module/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.cpp
+++ b/media/module/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.cpp
@@ -51,19 +51,24 @@
int32_t bufferId, uint32_t flags,
const sp<GraphicBuffer> &buffer,
int64_t timestamp, int fenceFd) override {
- AHardwareBuffer *ahwBuffer = nullptr;
- ::aidl::android::hardware::HardwareBuffer hBuffer;
+ ::ndk::ScopedFileDescriptor fence(fenceFd);
if (buffer.get()) {
- ahwBuffer = AHardwareBuffer_from_GraphicBuffer(buffer.get());
+ ::aidl::android::hardware::HardwareBuffer hBuffer;
+ AHardwareBuffer *ahwBuffer = AHardwareBuffer_from_GraphicBuffer(buffer.get());
AHardwareBuffer_acquire(ahwBuffer);
hBuffer.reset(ahwBuffer);
- }
- ::ndk::ScopedFileDescriptor fence(fenceFd);
+ return fromAidlStatus(mNode->submitBuffer(
+ bufferId,
+ std::move(hBuffer),
+ flags,
+ timestamp,
+ fence));
+ }
return fromAidlStatus(mNode->submitBuffer(
bufferId,
- hBuffer,
+ {},
flags,
timestamp,
fence));
diff --git a/media/module/bqhelper/Android.bp b/media/module/bqhelper/Android.bp
index c4dadd0..f9b7dea 100644
--- a/media/module/bqhelper/Android.bp
+++ b/media/module/bqhelper/Android.bp
@@ -69,9 +69,6 @@
name: "libstagefright_bufferqueue_helper",
defaults: ["libstagefright_bufferqueue-defaults"],
vendor_available: true,
- vndk: {
- enabled: true,
- },
min_sdk_version: "29",
shared_libs: [ "libgui" ],
diff --git a/media/module/bufferpool/2.0/AccessorImpl.cpp b/media/module/bufferpool/2.0/AccessorImpl.cpp
index 1d2562e..202d803 100644
--- a/media/module/bufferpool/2.0/AccessorImpl.cpp
+++ b/media/module/bufferpool/2.0/AccessorImpl.cpp
@@ -609,7 +609,7 @@
}
if (ret == false) {
ALOGW("buffer status message processing failure - message : %d connection : %lld",
- message.newStatus, (long long)message.connectionId);
+ (int)message.newStatus, (long long)message.connectionId);
}
}
messages.clear();
diff --git a/media/module/bufferpool/2.0/Android.bp b/media/module/bufferpool/2.0/Android.bp
index 930b026..bdab103 100644
--- a/media/module/bufferpool/2.0/Android.bp
+++ b/media/module/bufferpool/2.0/Android.bp
@@ -60,7 +60,4 @@
vendor_available: true,
// TODO: b/147147992
double_loadable: true,
- vndk: {
- enabled: true,
- },
}
diff --git a/media/module/foundation/Android.bp b/media/module/foundation/Android.bp
index dc8384d..edf4cb5 100644
--- a/media/module/foundation/Android.bp
+++ b/media/module/foundation/Android.bp
@@ -33,9 +33,6 @@
cc_defaults {
name: "libstagefright_foundation_defaults",
vendor_available: true,
- vndk: {
- enabled: true,
- },
host_supported: true,
double_loadable: true,
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index 9ec7700..3d873df 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -192,7 +192,6 @@
header_libs: [
"libstagefright_headers",
"libmedia_headers",
- "libstagefright_headers",
],
shared_libs: [
diff --git a/media/utils/Android.bp b/media/utils/Android.bp
index 5b7319a..e340b40 100644
--- a/media/utils/Android.bp
+++ b/media/utils/Android.bp
@@ -113,6 +113,7 @@
export_shared_lib_headers: [
"libpermission",
+ "packagemanager_aidl-cpp",
],
required: [
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 97c80a8..c8b0aa1 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -193,6 +193,7 @@
BINDER_METHOD_ENTRY(getSoundDoseInterface) \
BINDER_METHOD_ENTRY(getAudioPolicyConfig) \
BINDER_METHOD_ENTRY(getAudioMixPort) \
+BINDER_METHOD_ENTRY(resetReferencesForTest) \
// singleton for Binder Method Statistics for IAudioFlinger
static auto& getIAudioFlingerStatistics() {
@@ -465,6 +466,8 @@
sMediaLogService->unregisterWriter(iMemory);
}
}
+ mMediaLogNotifier->requestExit();
+ mPatchCommandThread->exit();
}
//static
@@ -1922,10 +1925,11 @@
if (mPrimaryHardwareDev == nullptr) {
return 0;
}
+ if (mInputBufferSizeOrderedDevs.empty()) {
+ return 0;
+ }
mHardwareStatus = AUDIO_HW_GET_INPUT_BUFFER_SIZE;
- sp<DeviceHalInterface> dev = mPrimaryHardwareDev.load()->hwDevice();
-
std::vector<audio_channel_mask_t> channelMasks = {channelMask};
if (channelMask != AUDIO_CHANNEL_IN_MONO) {
channelMasks.push_back(AUDIO_CHANNEL_IN_MONO);
@@ -1955,6 +1959,22 @@
mHardwareStatus = AUDIO_HW_IDLE;
+ auto getInputBufferSize = [](const sp<DeviceHalInterface>& dev, audio_config_t config,
+ size_t* bytes) -> status_t {
+ if (!dev) {
+ return BAD_VALUE;
+ }
+ status_t result = dev->getInputBufferSize(&config, bytes);
+ if (result == BAD_VALUE) {
+ // Retry with the config suggested by the HAL.
+ result = dev->getInputBufferSize(&config, bytes);
+ }
+ if (result != OK || *bytes == 0) {
+ return BAD_VALUE;
+ }
+ return result;
+ };
+
// Change parameters of the configuration each iteration until we find a
// configuration that the device will support, or HAL suggests what it supports.
audio_config_t config = AUDIO_CONFIG_INITIALIZER;
@@ -1966,16 +1986,15 @@
config.sample_rate = testSampleRate;
size_t bytes = 0;
- audio_config_t loopConfig = config;
- status_t result = dev->getInputBufferSize(&config, &bytes);
- if (result == BAD_VALUE) {
- // Retry with the config suggested by the HAL.
- result = dev->getInputBufferSize(&config, &bytes);
+ ret = BAD_VALUE;
+ for (const AudioHwDevice* dev : mInputBufferSizeOrderedDevs) {
+ ret = getInputBufferSize(dev->hwDevice(), config, &bytes);
+ if (ret == OK) {
+ break;
+ }
}
- if (result != OK || bytes == 0) {
- config = loopConfig;
- continue;
- }
+ if (ret == BAD_VALUE) continue;
+
if (config.sample_rate != sampleRate || config.channel_mask != channelMask ||
config.format != format) {
uint32_t dstChannelCount = audio_channel_count_from_in_mask(channelMask);
@@ -2160,6 +2179,13 @@
}
}
+void AudioFlinger::onHardError(std::set<audio_port_handle_t>& trackPortIds) {
+ ALOGI("releasing tracks due to a hard error occurred on an I/O thread");
+ for (const auto portId : trackPortIds) {
+ AudioSystem::releaseOutput(portId);
+ }
+}
+
// removeClient_l() must be called with AudioFlinger::clientMutex() held
void AudioFlinger::removeClient_l(pid_t pid)
{
@@ -2603,12 +2629,43 @@
}
mAudioHwDevs.add(handle, audioDevice);
+ if (strcmp(name, AUDIO_HARDWARE_MODULE_ID_STUB) != 0) {
+ mInputBufferSizeOrderedDevs.insert(audioDevice);
+ }
ALOGI("loadHwModule() Loaded %s audio interface, handle %d", name, handle);
return audioDevice;
}
+// Sort AudioHwDevice to be traversed in the getInputBufferSize call in the following order:
+// Primary, Usb, Bluetooth, A2DP, other modules, remote submix.
+/* static */
+bool AudioFlinger::inputBufferSizeDevsCmp(const AudioHwDevice* lhs, const AudioHwDevice* rhs) {
+ static const std::map<std::string_view, int> kPriorities = {
+ { AUDIO_HARDWARE_MODULE_ID_PRIMARY, 0 }, { AUDIO_HARDWARE_MODULE_ID_USB, 1 },
+ { AUDIO_HARDWARE_MODULE_ID_BLUETOOTH, 2 }, { AUDIO_HARDWARE_MODULE_ID_A2DP, 3 },
+ { AUDIO_HARDWARE_MODULE_ID_REMOTE_SUBMIX, std::numeric_limits<int>::max() }
+ };
+
+ const std::string_view lhsName = lhs->moduleName();
+ const std::string_view rhsName = rhs->moduleName();
+
+ auto lhsPriority = std::numeric_limits<int>::max() - 1;
+ if (const auto lhsIt = kPriorities.find(lhsName); lhsIt != kPriorities.end()) {
+ lhsPriority = lhsIt->second;
+ }
+ auto rhsPriority = std::numeric_limits<int>::max() - 1;
+ if (const auto rhsIt = kPriorities.find(rhsName); rhsIt != kPriorities.end()) {
+ rhsPriority = rhsIt->second;
+ }
+
+ if (lhsPriority != rhsPriority) {
+ return lhsPriority < rhsPriority;
+ }
+ return lhsName < rhsName;
+}
+
// ----------------------------------------------------------------------------
uint32_t AudioFlinger::getPrimaryOutputSamplingRate() const
@@ -4754,6 +4811,13 @@
return mPatchPanel->getAudioMixPort_l(devicePort, mixPort);
}
+status_t AudioFlinger::resetReferencesForTest() {
+ mDeviceEffectManager.clear();
+ mPatchPanel.clear();
+ mMelReporter->resetReferencesForTest();
+ return NO_ERROR;
+}
+
// ----------------------------------------------------------------------------
status_t AudioFlinger::onTransactWrapper(TransactionCode code,
@@ -4788,6 +4852,7 @@
case TransactionCode::INVALIDATE_TRACKS:
case TransactionCode::GET_AUDIO_POLICY_CONFIG:
case TransactionCode::GET_AUDIO_MIX_PORT:
+ case TransactionCode::RESET_REFERENCES_FOR_TEST:
ALOGW("%s: transaction %d received from PID %d",
__func__, static_cast<int>(code), IPCThreadState::self()->getCallingPid());
// return status only for non void methods
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 0f75d6e..3885465 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -61,6 +61,8 @@
public:
static void instantiate() ANDROID_API;
+ status_t resetReferencesForTest();
+
private:
// ---- begin IAudioFlinger interface
@@ -396,6 +398,8 @@
void onSupportedLatencyModesChanged(
audio_io_handle_t output, const std::vector<audio_latency_mode_t>& modes) final
EXCLUDES_AudioFlinger_ClientMutex;
+ void onHardError(std::set<audio_port_handle_t>& trackPortIds) final
+ EXCLUDES_AudioFlinger_ClientMutex;
// ---- end of IAfThreadCallback interface
@@ -629,6 +633,10 @@
DefaultKeyedVector<audio_module_handle_t, AudioHwDevice*> mAudioHwDevs
GUARDED_BY(hardwareMutex()) {nullptr /* defValue */};
+ static bool inputBufferSizeDevsCmp(const AudioHwDevice* lhs, const AudioHwDevice* rhs);
+ std::set<AudioHwDevice*, decltype(&inputBufferSizeDevsCmp)>
+ mInputBufferSizeOrderedDevs GUARDED_BY(hardwareMutex()) {inputBufferSizeDevsCmp};
+
const sp<DevicesFactoryHalInterface> mDevicesFactoryHal =
DevicesFactoryHalInterface::create();
/* const */ sp<DevicesFactoryHalCallback> mDevicesFactoryHalCallback; // set onFirstRef().
diff --git a/services/audioflinger/IAfThread.h b/services/audioflinger/IAfThread.h
index 46a67e8..c2a58c6 100644
--- a/services/audioflinger/IAfThread.h
+++ b/services/audioflinger/IAfThread.h
@@ -115,9 +115,11 @@
const sp<AudioIoDescriptor>& ioDesc,
pid_t pid = 0) EXCLUDES_AudioFlinger_ClientMutex = 0;
virtual void onNonOffloadableGlobalEffectEnable() EXCLUDES_AudioFlinger_Mutex = 0;
- virtual void onSupportedLatencyModesChanged(
- audio_io_handle_t output, const std::vector<audio_latency_mode_t>& modes)
+ virtual void onSupportedLatencyModesChanged(audio_io_handle_t output,
+ const std::vector<audio_latency_mode_t>& modes)
EXCLUDES_AudioFlinger_ClientMutex = 0;
+
+ virtual void onHardError(std::set<audio_port_handle_t>& trackPortIds) = 0;
};
class IAfThreadBase : public virtual RefBase {
diff --git a/services/audioflinger/MelReporter.cpp b/services/audioflinger/MelReporter.cpp
index 41c5096..d210a10 100644
--- a/services/audioflinger/MelReporter.cpp
+++ b/services/audioflinger/MelReporter.cpp
@@ -117,6 +117,11 @@
}
}
+void MelReporter::resetReferencesForTest() {
+ mAfMelReporterCallback.clear();
+ mSoundDoseManager->resetReferencesForTest();
+}
+
void MelReporter::onCreateAudioPatch(audio_patch_handle_t handle,
const IAfPatchPanel::Patch& patch) {
if (!mSoundDoseManager->isCsdEnabled()) {
diff --git a/services/audioflinger/MelReporter.h b/services/audioflinger/MelReporter.h
index 235dd11..f1f35b3 100644
--- a/services/audioflinger/MelReporter.h
+++ b/services/audioflinger/MelReporter.h
@@ -102,6 +102,8 @@
const std::vector<playback_track_metadata_v7_t>& metadataVec)
EXCLUDES_AudioFlinger_Mutex;
+ void resetReferencesForTest();
+
private:
struct ActiveMelPatch {
audio_io_handle_t streamHandle{AUDIO_IO_HANDLE_NONE};
@@ -130,7 +132,7 @@
bool useHalSoundDoseInterface_l() REQUIRES(mutex());
- const sp<IAfMelReporterCallback> mAfMelReporterCallback;
+ sp<IAfMelReporterCallback> mAfMelReporterCallback;
/* const */ sp<SoundDoseManager> mSoundDoseManager; // set onFirstRef
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 1d7c356..0766a0d 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -2690,14 +2690,17 @@
}
}
- // Set DIRECT flag if current thread is DirectOutputThread. This can
- // happen when the playback is rerouted to direct output thread by
+ // Set DIRECT/OFFLOAD flag if current thread is DirectOutputThread/OffloadThread.
+ // This can happen when the playback is rerouted to direct output/offload thread by
// dynamic audio policy.
// Do NOT report the flag changes back to client, since the client
- // doesn't explicitly request a direct flag.
+ // doesn't explicitly request a direct/offload flag.
audio_output_flags_t trackFlags = *flags;
if (mType == DIRECT) {
trackFlags = static_cast<audio_output_flags_t>(trackFlags | AUDIO_OUTPUT_FLAG_DIRECT);
+ } else if (mType == OFFLOAD) {
+ trackFlags = static_cast<audio_output_flags_t>(trackFlags |
+ AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD | AUDIO_OUTPUT_FLAG_DIRECT);
}
*afTrackFlags = trackFlags;
@@ -2997,6 +3000,23 @@
}
}
+std::set<audio_port_handle_t> PlaybackThread::getTrackPortIds_l()
+{
+ std::set<int32_t> result;
+ for (const auto& t : mTracks) {
+ if (t->isExternalTrack()) {
+ result.insert(t->portId());
+ }
+ }
+ return result;
+}
+
+std::set<audio_port_handle_t> PlaybackThread::getTrackPortIds()
+{
+ audio_utils::lock_guard _l(mutex());
+ return getTrackPortIds_l();
+}
+
String8 PlaybackThread::getParameters(const String8& keys)
{
audio_utils::lock_guard _l(mutex());
@@ -3050,9 +3070,9 @@
mCallbackThread->resetDraining();
}
-void PlaybackThread::onError()
+void PlaybackThread::onError(bool isHardError)
{
- mCallbackThread->setAsyncError();
+ mCallbackThread->setAsyncError(isHardError);
}
void PlaybackThread::onCodecFormatChanged(
@@ -3357,9 +3377,9 @@
return NO_ERROR;
} else {
status_t status;
- uint32_t frames;
+ uint64_t frames = 0;
status = mOutput->getRenderPosition(&frames);
- *dspFrames = (size_t)frames;
+ *dspFrames = (uint32_t)frames;
return status;
}
}
@@ -5357,11 +5377,15 @@
broadcast_l();
}
-void PlaybackThread::onAsyncError()
+void PlaybackThread::onAsyncError(bool isHardError)
{
+ auto allTrackPortIds = getTrackPortIds();
for (int i = AUDIO_STREAM_SYSTEM; i < (int)AUDIO_STREAM_CNT; i++) {
invalidateTracks((audio_stream_type_t)i);
}
+ if (isHardError) {
+ mAfThreadCallback->onHardError(allTrackPortIds);
+ }
}
void MixerThread::threadLoop_mix()
@@ -5903,7 +5927,7 @@
vaf = v * sendLevel * (1. / MAX_GAIN_INT);
}
- track->setFinalVolume(vrf, vlf);
+ track->setFinalVolume(vlf, vrf);
// Delegate volume control to effect in track effect chain if needed
if (chain != 0 && chain->setVolume_l(&vl, &vr)) {
@@ -7077,11 +7101,14 @@
{
PlaybackThread::flushHw_l();
mOutput->flush();
- mHwPaused = false;
mFlushPending = false;
mTimestampVerifier.discontinuity(discontinuityForStandbyOrFlush());
mTimestamp.clear();
mMonotonicFrameCounter.onFlush();
+ // We do not reset mHwPaused which is hidden from the Track client.
+ // Note: the client track in Tracks.cpp and AudioTrack.cpp
+ // has a FLUSHED state but the DirectOutputThread does not;
+ // those tracks will continue to show isStopped().
}
int64_t DirectOutputThread::computeWaitTimeNs_l() const {
@@ -7099,7 +7126,7 @@
mPlaybackThread(playbackThread),
mWriteAckSequence(0),
mDrainSequence(0),
- mAsyncError(false)
+ mAsyncError(ASYNC_ERROR_NONE)
{
}
@@ -7113,7 +7140,7 @@
while (!exitPending()) {
uint32_t writeAckSequence;
uint32_t drainSequence;
- bool asyncError;
+ AsyncError asyncError;
{
audio_utils::unique_lock _l(mutex());
@@ -7134,7 +7161,7 @@
drainSequence = mDrainSequence;
mDrainSequence &= ~1;
asyncError = mAsyncError;
- mAsyncError = false;
+ mAsyncError = ASYNC_ERROR_NONE;
}
{
const sp<PlaybackThread> playbackThread = mPlaybackThread.promote();
@@ -7145,8 +7172,8 @@
if (drainSequence & 1) {
playbackThread->resetDraining(drainSequence >> 1);
}
- if (asyncError) {
- playbackThread->onAsyncError();
+ if (asyncError != ASYNC_ERROR_NONE) {
+ playbackThread->onAsyncError(asyncError == ASYNC_ERROR_HARD);
}
}
}
@@ -7196,10 +7223,10 @@
}
}
-void AsyncCallbackThread::setAsyncError()
+void AsyncCallbackThread::setAsyncError(bool isHardError)
{
audio_utils::lock_guard _l(mutex());
- mAsyncError = true;
+ mAsyncError = isHardError ? ASYNC_ERROR_HARD : ASYNC_ERROR_SOFT;
mWaitWorkCV.notify_one();
}
@@ -8170,6 +8197,7 @@
for (int64_t loopCount = 0;; ++loopCount) { // loopCount used for statistics tracking
// Note: these sp<> are released at the end of the for loop outside of the mutex() lock.
sp<IAfRecordTrack> activeTrack;
+ std::vector<sp<IAfRecordTrack>> oldActiveTracks;
Vector<sp<IAfEffectChain>> effectChains;
// activeTracks accumulates a copy of a subset of mActiveTracks
@@ -8219,7 +8247,9 @@
bool doBroadcast = false;
bool allStopped = true;
for (size_t i = 0; i < size; ) {
-
+ if (activeTrack) { // ensure track release is outside lock.
+ oldActiveTracks.emplace_back(std::move(activeTrack));
+ }
activeTrack = mActiveTracks[i];
if (activeTrack->isTerminated()) {
if (activeTrack->isFastTrack()) {
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index ddf0669..98e3298 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -946,7 +946,7 @@
// StreamOutHalInterfaceCallback implementation
virtual void onWriteReady();
virtual void onDrainReady();
- virtual void onError();
+ virtual void onError(bool /*isHardError*/);
public: // AsyncCallbackThread
void resetWriteBlocked(uint32_t sequence);
@@ -958,7 +958,7 @@
virtual bool shouldStandby_l() REQUIRES(mutex(), ThreadBase_ThreadLoop);
virtual void onAddNewTrack_l() REQUIRES(mutex());
public: // AsyncCallbackThread
- void onAsyncError(); // error reported by AsyncCallbackThread
+ void onAsyncError(bool isHardError); // error reported by AsyncCallbackThread
protected:
// StreamHalInterfaceCodecFormatCallback implementation
void onCodecFormatChanged(
@@ -1371,6 +1371,8 @@
bool destroyTrack_l(const sp<IAfTrack>& track) final REQUIRES(mutex());
void removeTrack_l(const sp<IAfTrack>& track) REQUIRES(mutex());
+ std::set<audio_port_handle_t> getTrackPortIds_l() REQUIRES(mutex());
+ std::set<audio_port_handle_t> getTrackPortIds();
void readOutputParameters_l() REQUIRES(mutex());
MetadataUpdate updateMetadata_l() final REQUIRES(mutex());
@@ -1834,7 +1836,7 @@
void resetWriteBlocked();
void setDraining(uint32_t sequence);
void resetDraining();
- void setAsyncError();
+ void setAsyncError(bool isHardError);
private:
const wp<PlaybackThread> mPlaybackThread;
@@ -1848,7 +1850,8 @@
uint32_t mDrainSequence;
audio_utils::condition_variable mWaitWorkCV;
mutable audio_utils::mutex mMutex{audio_utils::MutexOrder::kAsyncCallbackThread_Mutex};
- bool mAsyncError;
+ enum AsyncError { ASYNC_ERROR_NONE, ASYNC_ERROR_SOFT, ASYNC_ERROR_HARD };
+ AsyncError mAsyncError;
audio_utils::mutex& mutex() const RETURN_CAPABILITY(audio_utils::AsyncCallbackThread_Mutex) {
return mMutex;
diff --git a/services/audioflinger/datapath/AudioStreamIn.cpp b/services/audioflinger/datapath/AudioStreamIn.cpp
index 76618f4..165ac25 100644
--- a/services/audioflinger/datapath/AudioStreamIn.cpp
+++ b/services/audioflinger/datapath/AudioStreamIn.cpp
@@ -58,7 +58,7 @@
if (mHalFormatHasProportionalFrames &&
(flags & AUDIO_INPUT_FLAG_DIRECT) == AUDIO_INPUT_FLAG_DIRECT) {
- // For DirectRecord reset timestamp to 0 on standby.
+ // For DirectRecord reset position to 0 on standby.
const uint64_t adjustedPosition = (halPosition <= mFramesReadAtStandby) ?
0 : (halPosition - mFramesReadAtStandby);
// Scale from HAL sample rate to application rate.
diff --git a/services/audioflinger/datapath/AudioStreamOut.cpp b/services/audioflinger/datapath/AudioStreamOut.cpp
index 9851f3a..a686ff6 100644
--- a/services/audioflinger/datapath/AudioStreamOut.cpp
+++ b/services/audioflinger/datapath/AudioStreamOut.cpp
@@ -51,42 +51,17 @@
return NO_INIT;
}
- uint32_t halPosition = 0;
+ uint64_t halPosition = 0;
const status_t status = stream->getRenderPosition(&halPosition);
if (status != NO_ERROR) {
return status;
}
-
- // Maintain a 64-bit render position using the 32-bit result from the HAL.
- // This delta calculation relies on the arithmetic overflow behavior
- // of integers. For example (100 - 0xFFFFFFF0) = 116.
- const auto truncatedPosition = (uint32_t)mRenderPosition;
- int32_t deltaHalPosition; // initialization not needed, overwitten by __builtin_sub_overflow()
- (void) __builtin_sub_overflow(halPosition, truncatedPosition, &deltaHalPosition);
-
- if (deltaHalPosition > 0) {
- mRenderPosition += deltaHalPosition;
- } else if (mExpectRetrograde) {
- mExpectRetrograde = false;
- mRenderPosition -= static_cast<uint64_t>(-deltaHalPosition);
- }
// Scale from HAL sample rate to application rate.
- *frames = mRenderPosition / mRateMultiplier;
+ *frames = halPosition / mRateMultiplier;
return status;
}
-// return bottom 32-bits of the render position
-status_t AudioStreamOut::getRenderPosition(uint32_t *frames)
-{
- uint64_t position64 = 0;
- const status_t status = getRenderPosition(&position64);
- if (status == NO_ERROR) {
- *frames = (uint32_t)position64;
- }
- return status;
-}
-
status_t AudioStreamOut::getPresentationPosition(uint64_t *frames, struct timespec *timestamp)
{
if (stream == nullptr) {
@@ -101,7 +76,7 @@
if (mHalFormatHasProportionalFrames &&
(flags & AUDIO_OUTPUT_FLAG_DIRECT) == AUDIO_OUTPUT_FLAG_DIRECT) {
- // For DirectTrack reset timestamp to 0 on standby.
+ // For DirectTrack reset position to 0 on standby.
const uint64_t adjustedPosition = (halPosition <= mFramesWrittenAtStandby) ?
0 : (halPosition - mFramesWrittenAtStandby);
// Scale from HAL sample rate to application rate.
@@ -179,8 +154,6 @@
int AudioStreamOut::flush()
{
- mRenderPosition = 0;
- mExpectRetrograde = false;
mFramesWritten = 0;
mFramesWrittenAtStandby = 0;
const status_t result = stream->flush();
@@ -189,12 +162,14 @@
int AudioStreamOut::standby()
{
- mRenderPosition = 0;
- mExpectRetrograde = false;
mFramesWrittenAtStandby = mFramesWritten;
return stream->standby();
}
+void AudioStreamOut::presentationComplete() {
+ stream->presentationComplete();
+}
+
ssize_t AudioStreamOut::write(const void *buffer, size_t numBytes)
{
size_t bytesWritten;
diff --git a/services/audioflinger/datapath/AudioStreamOut.h b/services/audioflinger/datapath/AudioStreamOut.h
index ea41bba..2c9fb3e 100644
--- a/services/audioflinger/datapath/AudioStreamOut.h
+++ b/services/audioflinger/datapath/AudioStreamOut.h
@@ -51,9 +51,6 @@
virtual ~AudioStreamOut();
- // Get the bottom 32-bits of the 64-bit render position.
- status_t getRenderPosition(uint32_t *frames);
-
virtual status_t getRenderPosition(uint64_t *frames);
virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp);
@@ -91,21 +88,14 @@
virtual status_t flush();
virtual status_t standby();
- // Avoid suppressing retrograde motion in mRenderPosition for gapless offload/direct when
- // transitioning between tracks.
- // The HAL resets the frame position without flush/stop being called, but calls back prior to
- // this event. So, on the next occurrence of retrograde motion, we permit backwards movement of
- // mRenderPosition.
- virtual void presentationComplete() { mExpectRetrograde = true; }
+ virtual void presentationComplete();
protected:
uint64_t mFramesWritten = 0; // reset by flush
uint64_t mFramesWrittenAtStandby = 0;
- uint64_t mRenderPosition = 0; // reset by flush, standby, or presentation complete
int mRateMultiplier = 1;
bool mHalFormatHasProportionalFrames = false;
size_t mHalFrameSize = 0;
- bool mExpectRetrograde = false; // see presentationComplete
};
} // namespace android
diff --git a/services/audioflinger/sounddose/SoundDoseManager.cpp b/services/audioflinger/sounddose/SoundDoseManager.cpp
index 1ff08dc..564c569 100644
--- a/services/audioflinger/sounddose/SoundDoseManager.cpp
+++ b/services/audioflinger/sounddose/SoundDoseManager.cpp
@@ -666,6 +666,10 @@
}
}
+void SoundDoseManager::resetReferencesForTest() {
+ mMelReporterCallback.clear();
+}
+
sp<media::ISoundDose> SoundDoseManager::getSoundDoseInterface(
const sp<media::ISoundDoseCallback>& callback) {
ALOGV("%s: Register ISoundDoseCallback", __func__);
diff --git a/services/audioflinger/sounddose/SoundDoseManager.h b/services/audioflinger/sounddose/SoundDoseManager.h
index 347eabe..6a8238ea 100644
--- a/services/audioflinger/sounddose/SoundDoseManager.h
+++ b/services/audioflinger/sounddose/SoundDoseManager.h
@@ -148,6 +148,8 @@
void onMomentaryExposure(float currentMel, audio_port_handle_t deviceId) const override;
+ void resetReferencesForTest();
+
private:
class SoundDose : public media::BnSoundDose,
public IBinder::DeathRecipient {
@@ -218,7 +220,7 @@
mutable std::mutex mLock;
- const sp<IMelReporterCallback> mMelReporterCallback;
+ sp<IMelReporterCallback> mMelReporterCallback;
// no need for lock since MelAggregator is thread-safe
const sp<audio_utils::MelAggregator> mMelAggregator;
diff --git a/services/audioparameterparser/Android.bp b/services/audioparameterparser/Android.bp
index f5feece..1c1c1e1 100644
--- a/services/audioparameterparser/Android.bp
+++ b/services/audioparameterparser/Android.bp
@@ -35,10 +35,10 @@
name: "android.hardware.audio.parameter_parser.example_defaults",
defaults: [
"latest_android_hardware_audio_core_ndk_shared",
+ "latest_av_audio_types_aidl_ndk_shared",
],
shared_libs: [
- "av-audio-types-aidl-V1-ndk",
"libbase",
"libbinder_ndk",
],
diff --git a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
index f3a9518..688772c 100644
--- a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
+++ b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
@@ -70,10 +70,17 @@
return mMixerBehaviors;
}
+ enum CompatibilityScore{
+ NO_MATCH = 0,
+ PARTIAL_MATCH = 1,
+ EXACT_MATCH = 2
+ };
+
/**
- * @brief isCompatibleProfile: This method is used for input and direct output,
+ * @brief compatibilityScore: This method is used for input and direct output,
* and is not used for other output.
- * Checks if the IO profile is compatible with specified parameters.
+ * Return the compatibility score to measure how much the IO profile is compatible
+ * with specified parameters.
* For input, flags is interpreted as audio_input_flags_t.
* TODO: merge audio_output_flags_t and audio_input_flags_t.
*
@@ -86,18 +93,18 @@
* @param updatedChannelMask if non-NULL, it is assigned the actual channel mask
* @param flags to be checked for compatibility
* @param exactMatchRequiredForInputFlags true if exact match is required on flags
- * @return true if the profile is compatible, false otherwise.
+ * @return how the IO profile is compatible with the given parameters.
*/
- bool isCompatibleProfile(const DeviceVector &devices,
- uint32_t samplingRate,
- uint32_t *updatedSamplingRate,
- audio_format_t format,
- audio_format_t *updatedFormat,
- audio_channel_mask_t channelMask,
- audio_channel_mask_t *updatedChannelMask,
- // FIXME parameter type
- uint32_t flags,
- bool exactMatchRequiredForInputFlags = false) const;
+ CompatibilityScore getCompatibilityScore(const DeviceVector &devices,
+ uint32_t samplingRate,
+ uint32_t *updatedSamplingRate,
+ audio_format_t format,
+ audio_format_t *updatedFormat,
+ audio_channel_mask_t channelMask,
+ audio_channel_mask_t *updatedChannelMask,
+ // FIXME parameter type
+ uint32_t flags,
+ bool exactMatchRequiredForInputFlags = false) const;
/**
* @brief areAllDevicesSupported: Checks if the given devices are supported by the IO profile.
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index d027564..747af4a 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -561,6 +561,7 @@
audio_port_config config = {};
devicePort->toAudioPortConfig(&config);
config.config_mask = AUDIO_PORT_CONFIG_GAIN;
+ config.gain.mode = gains[0]->getMode();
config.gain.values[0] = gainValueMb;
return mClientInterface->setAudioPortConfig(&config, 0) == NO_ERROR;
}
diff --git a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
index c7d2e6b..d9fbd89 100644
--- a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
@@ -33,17 +33,17 @@
}
}
-bool IOProfile::isCompatibleProfile(const DeviceVector &devices,
- uint32_t samplingRate,
- uint32_t *updatedSamplingRate,
- audio_format_t format,
- audio_format_t *updatedFormat,
- audio_channel_mask_t channelMask,
- audio_channel_mask_t *updatedChannelMask,
- // FIXME type punning here
- uint32_t flags,
- bool exactMatchRequiredForInputFlags) const
-{
+IOProfile::CompatibilityScore IOProfile::getCompatibilityScore(
+ const android::DeviceVector &devices,
+ uint32_t samplingRate,
+ uint32_t *updatedSamplingRate,
+ audio_format_t format,
+ audio_format_t *updatedFormat,
+ audio_channel_mask_t channelMask,
+ audio_channel_mask_t *updatedChannelMask,
+ // FIXME type punning here
+ uint32_t flags,
+ bool exactMatchRequiredForInputFlags) const {
const bool isPlaybackThread =
getType() == AUDIO_PORT_TYPE_MIX && getRole() == AUDIO_PORT_ROLE_SOURCE;
const bool isRecordThread =
@@ -51,13 +51,13 @@
ALOG_ASSERT(isPlaybackThread != isRecordThread);
if (!areAllDevicesSupported(devices) ||
!isCompatibleProfileForFlags(flags, exactMatchRequiredForInputFlags)) {
- return false;
+ return NO_MATCH;
}
if (!audio_is_valid_format(format) ||
(isPlaybackThread && (samplingRate == 0 || !audio_is_output_channel(channelMask))) ||
(isRecordThread && (!audio_is_input_channel(channelMask)))) {
- return false;
+ return NO_MATCH;
}
audio_format_t myUpdatedFormat = format;
@@ -69,32 +69,40 @@
.channel_mask = channelMask,
.format = format,
};
+ auto result = NO_MATCH;
if (isRecordThread)
{
if ((flags & AUDIO_INPUT_FLAG_MMAP_NOIRQ) != 0) {
if (checkExactAudioProfile(&config) != NO_ERROR) {
- return false;
+ return result;
}
- } else if (checkExactAudioProfile(&config) != NO_ERROR && checkCompatibleAudioProfile(
- myUpdatedSamplingRate, myUpdatedChannelMask, myUpdatedFormat) != NO_ERROR) {
- return false;
+ result = EXACT_MATCH;
+ } else if (checkExactAudioProfile(&config) == NO_ERROR) {
+ result = EXACT_MATCH;
+ } else if (checkCompatibleAudioProfile(
+ myUpdatedSamplingRate, myUpdatedChannelMask, myUpdatedFormat) == NO_ERROR) {
+ result = PARTIAL_MATCH;
+ } else {
+ return result;
}
} else {
- if (checkExactAudioProfile(&config) != NO_ERROR) {
- return false;
+ if (checkExactAudioProfile(&config) == NO_ERROR) {
+ result = EXACT_MATCH;
+ } else {
+ return result;
}
}
- if (updatedSamplingRate != NULL) {
+ if (updatedSamplingRate != nullptr) {
*updatedSamplingRate = myUpdatedSamplingRate;
}
- if (updatedFormat != NULL) {
+ if (updatedFormat != nullptr) {
*updatedFormat = myUpdatedFormat;
}
- if (updatedChannelMask != NULL) {
+ if (updatedChannelMask != nullptr) {
*updatedChannelMask = myUpdatedChannelMask;
}
- return true;
+ return result;
}
bool IOProfile::areAllDevicesSupported(const DeviceVector &devices) const {
diff --git a/services/audiopolicy/engine/common/Android.bp b/services/audiopolicy/engine/common/Android.bp
index d7eb2c8..878e0e9 100644
--- a/services/audiopolicy/engine/common/Android.bp
+++ b/services/audiopolicy/engine/common/Android.bp
@@ -61,4 +61,7 @@
"com.android.media.audio-aconfig-cc",
"server_configurable_flags",
],
+ defaults: [
+ "aconfig_lib_cc_static_link.defaults",
+ ],
}
diff --git a/services/audiopolicy/engine/config/src/EngineConfig.cpp b/services/audiopolicy/engine/config/src/EngineConfig.cpp
index ca78ce7..3f9ae19 100644
--- a/services/audiopolicy/engine/config/src/EngineConfig.cpp
+++ b/services/audiopolicy/engine/config/src/EngineConfig.cpp
@@ -22,6 +22,7 @@
#include <string>
#include <string>
#include <vector>
+#include <unordered_map>
#define LOG_TAG "APM::AudioPolicyEngine/Config"
//#define LOG_NDEBUG 0
@@ -51,6 +52,27 @@
namespace {
+ConversionResult<std::string> aidl2legacy_AudioHalProductStrategy_ProductStrategyType(int id) {
+ using AudioProductStrategyType = media::audio::common::AudioProductStrategyType;
+
+#define STRATEGY_ENTRY(name) {static_cast<int>(AudioProductStrategyType::name), "STRATEGY_" #name}
+ static const std::unordered_map<int, std::string> productStrategyMap = {STRATEGY_ENTRY(MEDIA),
+ STRATEGY_ENTRY(PHONE),
+ STRATEGY_ENTRY(SONIFICATION),
+ STRATEGY_ENTRY(SONIFICATION_RESPECTFUL),
+ STRATEGY_ENTRY(DTMF),
+ STRATEGY_ENTRY(ENFORCED_AUDIBLE),
+ STRATEGY_ENTRY(TRANSMITTED_THROUGH_SPEAKER),
+ STRATEGY_ENTRY(ACCESSIBILITY)};
+#undef STRATEGY_ENTRY
+
+ auto it = productStrategyMap.find(id);
+ if (it == productStrategyMap.end()) {
+ return base::unexpected(BAD_VALUE);
+ }
+ return it->second;
+}
+
ConversionResult<AttributesGroup> aidl2legacy_AudioHalAttributeGroup_AttributesGroup(
const media::audio::common::AudioHalAttributesGroup& aidl) {
AttributesGroup legacy;
@@ -65,7 +87,8 @@
ConversionResult<ProductStrategy> aidl2legacy_AudioHalProductStrategy_ProductStrategy(
const media::audio::common::AudioHalProductStrategy& aidl) {
ProductStrategy legacy;
- legacy.name = "strategy_" + std::to_string(aidl.id);
+ legacy.name = VALUE_OR_RETURN(
+ aidl2legacy_AudioHalProductStrategy_ProductStrategyType(aidl.id));
legacy.attributesGroups = VALUE_OR_RETURN(convertContainer<AttributesGroups>(
aidl.attributesGroups,
aidl2legacy_AudioHalAttributeGroup_AttributesGroup));
diff --git a/services/audiopolicy/engineconfigurable/Android.bp b/services/audiopolicy/engineconfigurable/Android.bp
index aaf89a0..2c3c4be 100644
--- a/services/audiopolicy/engineconfigurable/Android.bp
+++ b/services/audiopolicy/engineconfigurable/Android.bp
@@ -53,4 +53,7 @@
"libutils",
"libxml2",
],
+ defaults: [
+ "aconfig_lib_cc_static_link.defaults",
+ ],
}
diff --git a/services/audiopolicy/enginedefault/Android.bp b/services/audiopolicy/enginedefault/Android.bp
index 1563d5f..f5958ba 100644
--- a/services/audiopolicy/enginedefault/Android.bp
+++ b/services/audiopolicy/enginedefault/Android.bp
@@ -40,4 +40,7 @@
"libutils",
"libxml2",
],
+ defaults: [
+ "aconfig_lib_cc_static_link.defaults",
+ ],
}
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index d427de4..15c6a75 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -390,8 +390,12 @@
// Before checking intputs, broadcast connect event to allow HAL to retrieve dynamic
// parameters on newly connected devices (instead of opening the inputs...)
broadcastDeviceConnectionState(device, media::DeviceConnectedState::CONNECTED);
+ // Propagate device availability to Engine
+ setEngineDeviceConnectionState(device, state);
if (checkInputsForDevice(device, state) != NO_ERROR) {
+ setEngineDeviceConnectionState(device, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE);
+
mAvailableInputDevices.remove(device);
broadcastDeviceConnectionState(device, media::DeviceConnectedState::DISCONNECTED);
@@ -425,6 +429,9 @@
// remove device from mReportedFormatsMap cache
mReportedFormatsMap.erase(device);
+
+ // Propagate device availability to Engine
+ setEngineDeviceConnectionState(device, state);
} break;
default:
@@ -432,9 +439,6 @@
return BAD_VALUE;
}
- // Propagate device availability to Engine
- setEngineDeviceConnectionState(device, state);
-
checkCloseInputs();
// As the input device list can impact the output device selection, update
// getDeviceForStrategy() cache
@@ -1043,11 +1047,11 @@
sp<IOProfile> profile;
for (const auto& hwModule : hwModules) {
for (const auto& curProfile : hwModule->getOutputProfiles()) {
- if (!curProfile->isCompatibleProfile(devices,
+ if (curProfile->getCompatibilityScore(devices,
samplingRate, NULL /*updatedSamplingRate*/,
format, NULL /*updatedFormat*/,
channelMask, NULL /*updatedChannelMask*/,
- flags)) {
+ flags) == IOProfile::NO_MATCH) {
continue;
}
// reject profiles not corresponding to a device currently available
@@ -3175,6 +3179,23 @@
releaseInput(portId);
}
+bool AudioPolicyManager::checkCloseInput(const sp<AudioInputDescriptor>& input) {
+ if (input->clientsList().size() == 0
+ || !mAvailableInputDevices.containsAtLeastOne(input->supportedDevices())) {
+ return true;
+ }
+ for (const auto& client : input->clientsList()) {
+ sp<DeviceDescriptor> device =
+ mEngine->getInputDeviceForAttributes(client->attributes(), client->uid(),
+ client->session());
+ if (!input->supportedDevices().contains(device)) {
+ return true;
+ }
+ }
+ setInputDevice(input->mIoHandle, getNewInputDevice(input));
+ return false;
+}
+
void AudioPolicyManager::checkCloseInputs() {
// After connecting or disconnecting an input device, close input if:
// - it has no client (was just opened to check profile) OR
@@ -3183,29 +3204,10 @@
// devices anymore. Otherwise update device selection
std::vector<audio_io_handle_t> inputsToClose;
for (size_t i = 0; i < mInputs.size(); i++) {
- const sp<AudioInputDescriptor> input = mInputs.valueAt(i);
- if (input->clientsList().size() == 0
- || !mAvailableInputDevices.containsAtLeastOne(input->supportedDevices())) {
+ if (checkCloseInput(mInputs.valueAt(i))) {
inputsToClose.push_back(mInputs.keyAt(i));
- } else {
- bool close = false;
- for (const auto& client : input->clientsList()) {
- sp<DeviceDescriptor> device =
- mEngine->getInputDeviceForAttributes(client->attributes(), client->uid(),
- client->session());
- if (!input->supportedDevices().contains(device)) {
- close = true;
- break;
- }
- }
- if (close) {
- inputsToClose.push_back(mInputs.keyAt(i));
- } else {
- setInputDevice(input->mIoHandle, getNewInputDevice(input));
- }
}
}
-
for (const audio_io_handle_t handle : inputsToClose) {
ALOGV("%s closing input %d", __func__, handle);
closeInput(handle);
@@ -4463,11 +4465,11 @@
outputDevices = getMsdAudioOutDevices();
}
for (const auto& curProfile : hwModule->getOutputProfiles()) {
- if (!curProfile->isCompatibleProfile(outputDevices,
+ if (curProfile->getCompatibilityScore(outputDevices,
config->sample_rate, nullptr /*updatedSamplingRate*/,
config->format, nullptr /*updatedFormat*/,
config->channel_mask, nullptr /*updatedChannelMask*/,
- flags)) {
+ flags) == IOProfile::NO_MATCH) {
continue;
}
// reject profiles not corresponding to a device currently available
@@ -4573,15 +4575,17 @@
for (const auto& hwModule : mHwModules) {
for (const auto& curProfile : hwModule->getOutputProfiles()) {
if (curProfile->hasDynamicAudioProfile()
- && curProfile->isCompatibleProfile(devices,
- mixerAttributes->config.sample_rate,
- nullptr /*updatedSamplingRate*/,
- mixerAttributes->config.format,
- nullptr /*updatedFormat*/,
- mixerAttributes->config.channel_mask,
- nullptr /*updatedChannelMask*/,
- flags,
- false /*exactMatchRequiredForInputFlags*/)) {
+ && curProfile->getCompatibilityScore(
+ devices,
+ mixerAttributes->config.sample_rate,
+ nullptr /*updatedSamplingRate*/,
+ mixerAttributes->config.format,
+ nullptr /*updatedFormat*/,
+ mixerAttributes->config.channel_mask,
+ nullptr /*updatedChannelMask*/,
+ flags,
+ false /*exactMatchRequiredForInputFlags*/)
+ != IOProfile::NO_MATCH) {
profile = curProfile;
break;
}
@@ -4984,14 +4988,15 @@
return BAD_VALUE;
}
- if (!outputDesc->mProfile->isCompatibleProfile(DeviceVector(devDesc),
- patch->sources[0].sample_rate,
- NULL, // updatedSamplingRate
- patch->sources[0].format,
- NULL, // updatedFormat
- patch->sources[0].channel_mask,
- NULL, // updatedChannelMask
- AUDIO_OUTPUT_FLAG_NONE /*FIXME*/)) {
+ if (outputDesc->mProfile->getCompatibilityScore(
+ DeviceVector(devDesc),
+ patch->sources[0].sample_rate,
+ nullptr, // updatedSamplingRate
+ patch->sources[0].format,
+ nullptr, // updatedFormat
+ patch->sources[0].channel_mask,
+ nullptr, // updatedChannelMask
+ AUDIO_OUTPUT_FLAG_NONE /*FIXME*/) == IOProfile::NO_MATCH) {
ALOGV("%s profile not supported for device %08x", __func__, devDesc->type());
return INVALID_OPERATION;
}
@@ -5039,17 +5044,18 @@
return BAD_VALUE;
}
- if (!inputDesc->mProfile->isCompatibleProfile(DeviceVector(device),
- patch->sinks[0].sample_rate,
- NULL, /*updatedSampleRate*/
- patch->sinks[0].format,
- NULL, /*updatedFormat*/
- patch->sinks[0].channel_mask,
- NULL, /*updatedChannelMask*/
- // FIXME for the parameter type,
- // and the NONE
- (audio_output_flags_t)
- AUDIO_INPUT_FLAG_NONE)) {
+ if (inputDesc->mProfile->getCompatibilityScore(
+ DeviceVector(device),
+ patch->sinks[0].sample_rate,
+ nullptr, /*updatedSampleRate*/
+ patch->sinks[0].format,
+ nullptr, /*updatedFormat*/
+ patch->sinks[0].channel_mask,
+ nullptr, /*updatedChannelMask*/
+ // FIXME for the parameter type,
+ // and the NONE
+ (audio_output_flags_t)
+ AUDIO_INPUT_FLAG_NONE) == IOProfile::NO_MATCH) {
return INVALID_OPERATION;
}
// TODO: reconfigure output format and channels here
@@ -6397,6 +6403,15 @@
if ((desc->mFlags & AUDIO_OUTPUT_FLAG_SPATIALIZER) != 0
&& !isOutputOnlyAvailableRouteToSomeDevice(desc)) {
outputsClosed.push_back(desc->mIoHandle);
+ nextAudioPortGeneration();
+ ssize_t index = mAudioPatches.indexOfKey(desc->getPatchHandle());
+ if (index >= 0) {
+ sp<AudioPatch> patchDesc = mAudioPatches.valueAt(index);
+ (void) /*status_t status*/ mpClientInterface->releaseAudioPatch(
+ patchDesc->getAfHandle(), 0);
+ mAudioPatches.removeItemsAt(index);
+ mpClientInterface->onAudioPatchListUpdate();
+ }
desc->close();
}
}
@@ -6597,14 +6612,14 @@
status_t AudioPolicyManager::checkInputsForDevice(const sp<DeviceDescriptor>& device,
audio_policy_dev_state_t state)
{
- sp<AudioInputDescriptor> desc;
-
if (audio_device_is_digital(device->type())) {
// erase all current sample rates, formats and channel masks
device->clearAudioProfiles();
}
if (state == AUDIO_POLICY_DEVICE_STATE_AVAILABLE) {
+ sp<AudioInputDescriptor> desc;
+
// first call getAudioPort to get the supported attributes from the HAL
struct audio_port_v7 port = {};
device->toAudioPort(&port);
@@ -6695,6 +6710,11 @@
device->importAudioPortAndPickAudioProfile(profile);
}
ALOGV("checkInputsForDevice(): adding input %d", input);
+
+ if (checkCloseInput(desc)) {
+ ALOGV("%s closing input %d", __func__, input);
+ closeInput(input);
+ }
}
} // end scan profiles
@@ -7666,9 +7686,6 @@
// Choose an input profile based on the requested capture parameters: select the first available
// profile supporting all requested parameters.
// The flags can be ignored if it doesn't contain a much match flag.
- //
- // TODO: perhaps isCompatibleProfile should return a "matching" score so we can return
- // the best matching profile, not the first one.
using underlying_input_flag_t = std::underlying_type_t<audio_input_flags_t>;
const underlying_input_flag_t mustMatchFlag = AUDIO_INPUT_FLAG_MMAP_NOIRQ |
@@ -7685,27 +7702,35 @@
for (const auto& profile : hwModule->getInputProfiles()) {
// profile->log();
//updatedFormat = format;
- if (profile->isCompatibleProfile(DeviceVector(device), samplingRate,
- &samplingRate /*updatedSamplingRate*/,
- format,
- &format, /*updatedFormat*/
- channelMask,
- &channelMask /*updatedChannelMask*/,
- // FIXME ugly cast
- (audio_output_flags_t) flags,
- true /*exactMatchRequiredForInputFlags*/)) {
+ if (profile->getCompatibilityScore(
+ DeviceVector(device),
+ samplingRate,
+ &updatedSamplingRate,
+ format,
+ &updatedFormat,
+ channelMask,
+ &updatedChannelMask,
+ // FIXME ugly cast
+ (audio_output_flags_t) flags,
+ true /*exactMatchRequiredForInputFlags*/) == IOProfile::EXACT_MATCH) {
+ samplingRate = updatedSamplingRate;
+ format = updatedFormat;
+ channelMask = updatedChannelMask;
return profile;
}
- if (firstInexact == nullptr && profile->isCompatibleProfile(DeviceVector(device),
- samplingRate,
- &updatedSamplingRate,
- format,
- &updatedFormat,
- channelMask,
- &updatedChannelMask,
- // FIXME ugly cast
- (audio_output_flags_t) flags,
- false /*exactMatchRequiredForInputFlags*/)) {
+ if (firstInexact == nullptr
+ && profile->getCompatibilityScore(
+ DeviceVector(device),
+ samplingRate,
+ &updatedSamplingRate,
+ format,
+ &updatedFormat,
+ channelMask,
+ &updatedChannelMask,
+ // FIXME ugly cast
+ (audio_output_flags_t) flags,
+ false /*exactMatchRequiredForInputFlags*/)
+ != IOProfile::NO_MATCH) {
firstInexact = profile;
}
}
@@ -7910,7 +7935,7 @@
if (deviceTypes.empty()) {
deviceTypes = outputDesc->devices().types();
index = curves.getVolumeIndex(deviceTypes);
- ALOGD("%s if deviceTypes is change from none to device %s, need get index %d",
+ ALOGV("%s if deviceTypes is change from none to device %s, need get index %d",
__func__, dumpDeviceTypes(deviceTypes).c_str(), index);
}
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 61be09f..7513952 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -526,6 +526,7 @@
void addOutput(audio_io_handle_t output, const sp<SwAudioOutputDescriptor>& outputDesc);
void removeOutput(audio_io_handle_t output);
void addInput(audio_io_handle_t input, const sp<AudioInputDescriptor>& inputDesc);
+ bool checkCloseInput(const sp<AudioInputDescriptor>& input);
/**
* @brief setOutputDevices change the route of the specified output.
diff --git a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
index 7ef0266..072d9c0 100644
--- a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
@@ -70,6 +70,22 @@
return BAD_VALUE;
}
*input = mNextIoHandle++;
+ mOpenedInputs.insert(*input);
+ ALOGD("%s: opened input %d", __func__, *input);
+ return NO_ERROR;
+ }
+
+ status_t closeInput(audio_io_handle_t input) override {
+ if (mOpenedInputs.erase(input) != 1) {
+ if (input >= mNextIoHandle) {
+ ALOGE("%s: I/O handle %d has not been allocated yet (next is %d)",
+ __func__, input, mNextIoHandle);
+ } else {
+ ALOGE("%s: Attempt to close input %d twice", __func__, input);
+ }
+ return BAD_VALUE;
+ }
+ ALOGD("%s: closed input %d", __func__, input);
return NO_ERROR;
}
@@ -124,6 +140,8 @@
return &it->second;
};
+ size_t getOpenedInputsCount() const { return mOpenedInputs.size(); }
+
audio_module_handle_t peekNextModuleHandle() const { return mNextModuleHandle; }
void swapAllowedModuleNames(std::set<std::string>&& names = {}) {
@@ -241,6 +259,7 @@
std::vector<struct audio_port_v7> mDisconnectedDevicePorts;
std::set<audio_format_t> mSupportedFormats;
std::set<audio_channel_mask_t> mSupportedChannelMasks;
+ std::set<audio_io_handle_t> mOpenedInputs;
};
} // namespace android
diff --git a/services/audiopolicy/tests/AudioPolicyTestManager.h b/services/audiopolicy/tests/AudioPolicyTestManager.h
index 31ee252..34ceeab 100644
--- a/services/audiopolicy/tests/AudioPolicyTestManager.h
+++ b/services/audiopolicy/tests/AudioPolicyTestManager.h
@@ -31,8 +31,10 @@
using AudioPolicyManager::getConfig;
using AudioPolicyManager::initialize;
using AudioPolicyManager::getOutputs;
+ using AudioPolicyManager::getInputs;
using AudioPolicyManager::getAvailableOutputDevices;
using AudioPolicyManager::getAvailableInputDevices;
+ using AudioPolicyManager::checkInputsForDevice;
using AudioPolicyManager::setSurroundFormatEnabled;
using AudioPolicyManager::releaseMsdOutputPatches;
using AudioPolicyManager::setMsdOutputPatches;
@@ -43,6 +45,7 @@
using AudioPolicyManager::deviceToAudioPort;
using AudioPolicyManager::handleDeviceConfigChange;
uint32_t getAudioPortGeneration() const { return mAudioPortGeneration; }
+ HwModuleCollection getHwModules() const { return mHwModules; }
};
} // namespace android
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index 8642fd4..4d9d87c 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -92,6 +92,12 @@
return attributionSourceState;
}
+bool equals(const audio_config_base_t& config1, const audio_config_base_t& config2) {
+ return config1.format == config2.format
+ && config1.sample_rate == config2.sample_rate
+ && config1.channel_mask == config2.channel_mask;
+}
+
} // namespace
TEST(AudioPolicyConfigTest, DefaultConfigForTestsIsEmpty) {
@@ -1266,6 +1272,82 @@
"", "", AUDIO_FORMAT_LDAC));
}
+TEST_F(AudioPolicyManagerTestWithConfigurationFile, PreferExactConfigForInput) {
+ const audio_channel_mask_t deviceChannelMask = AUDIO_CHANNEL_IN_3POINT1;
+ mClient->addSupportedFormat(AUDIO_FORMAT_PCM_16_BIT);
+ mClient->addSupportedChannelMask(deviceChannelMask);
+ ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_IN_USB_DEVICE,
+ AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+ "", "", AUDIO_FORMAT_DEFAULT));
+
+ audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ audio_attributes_t attr = {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
+ AUDIO_SOURCE_VOICE_COMMUNICATION,AUDIO_FLAG_NONE, ""};
+ AudioPolicyInterface::input_type_t inputType;
+ audio_io_handle_t input = AUDIO_PORT_HANDLE_NONE;
+ AttributionSourceState attributionSource = createAttributionSourceState(/*uid=*/ 0);
+ audio_config_base_t requestedConfig = {
+ .channel_mask = AUDIO_CHANNEL_IN_STEREO,
+ .format = AUDIO_FORMAT_PCM_16_BIT,
+ .sample_rate = 48000
+ };
+ audio_config_base_t config = requestedConfig;
+ audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
+ ASSERT_EQ(OK, mManager->getInputForAttr(
+ &attr, &input, 1 /*riid*/, AUDIO_SESSION_NONE, attributionSource, &config,
+ AUDIO_INPUT_FLAG_NONE,
+ &selectedDeviceId, &inputType, &portId));
+ ASSERT_NE(AUDIO_PORT_HANDLE_NONE, portId);
+ ASSERT_TRUE(equals(requestedConfig, config));
+
+ attr = {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
+ AUDIO_SOURCE_VOICE_COMMUNICATION, AUDIO_FLAG_NONE, ""};
+ requestedConfig.channel_mask = deviceChannelMask;
+ config = requestedConfig;
+ selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ input = AUDIO_PORT_HANDLE_NONE;
+ portId = AUDIO_PORT_HANDLE_NONE;
+ ASSERT_EQ(OK, mManager->getInputForAttr(
+ &attr, &input, 1 /*riid*/, AUDIO_SESSION_NONE, attributionSource, &config,
+ AUDIO_INPUT_FLAG_NONE,
+ &selectedDeviceId, &inputType, &portId));
+ ASSERT_NE(AUDIO_PORT_HANDLE_NONE, portId);
+ ASSERT_TRUE(equals(requestedConfig, config));
+
+ ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_IN_USB_DEVICE,
+ AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+ "", "", AUDIO_FORMAT_DEFAULT));
+}
+
+TEST_F(AudioPolicyManagerTestWithConfigurationFile, CheckInputsForDeviceClosesStreams) {
+ mClient->addSupportedFormat(AUDIO_FORMAT_PCM_16_BIT);
+ mClient->addSupportedFormat(AUDIO_FORMAT_PCM_24_BIT_PACKED);
+ mClient->addSupportedChannelMask(AUDIO_CHANNEL_IN_MONO);
+ mClient->addSupportedChannelMask(AUDIO_CHANNEL_IN_STEREO);
+ // Since 'checkInputsForDevice' is called as part of the 'setDeviceConnectionState',
+ // call it directly here, as we need to ensure that it does not keep all intermediate
+ // streams opened, as it may cause a rejection from the HAL based on the cap.
+ const size_t streamCountBefore = mClient->getOpenedInputsCount();
+ sp<DeviceDescriptor> device = mManager->getHwModules().getDeviceDescriptor(
+ AUDIO_DEVICE_IN_USB_DEVICE, "", "", AUDIO_FORMAT_DEFAULT, true /*allowToCreate*/);
+ ASSERT_NE(nullptr, device.get());
+ EXPECT_EQ(NO_ERROR,
+ mManager->checkInputsForDevice(device, AUDIO_POLICY_DEVICE_STATE_AVAILABLE));
+ EXPECT_EQ(streamCountBefore, mClient->getOpenedInputsCount());
+}
+
+TEST_F(AudioPolicyManagerTestWithConfigurationFile, SetDeviceConnectionStateClosesStreams) {
+ mClient->addSupportedFormat(AUDIO_FORMAT_PCM_16_BIT);
+ mClient->addSupportedFormat(AUDIO_FORMAT_PCM_24_BIT_PACKED);
+ mClient->addSupportedChannelMask(AUDIO_CHANNEL_IN_MONO);
+ mClient->addSupportedChannelMask(AUDIO_CHANNEL_IN_STEREO);
+ const size_t streamCountBefore = mClient->getOpenedInputsCount();
+ EXPECT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_IN_USB_DEVICE,
+ AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+ "", "", AUDIO_FORMAT_DEFAULT));
+ EXPECT_EQ(streamCountBefore, mClient->getOpenedInputsCount());
+}
+
class AudioPolicyManagerTestDynamicPolicy : public AudioPolicyManagerTestWithConfigurationFile {
protected:
void TearDown() override;
diff --git a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
index 4efdf8a..bbc19fa 100644
--- a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
+++ b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
@@ -65,6 +65,7 @@
samplingRates="48000"
channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
</mixPort>
+ <mixPort name="hifi_input" role="sink" />
</mixPorts>
<devicePorts>
<devicePort tagName="Speaker" type="AUDIO_DEVICE_OUT_SPEAKER" role="sink">
@@ -98,7 +99,7 @@
<route type="mix" sink="primary input"
sources="Built-In Mic,Hdmi-In Mic,USB Device In"/>
<route type="mix" sink="voip_tx"
- sources="Built-In Mic"/>
+ sources="Built-In Mic,USB Device In"/>
<route type="mix" sink="Hdmi"
sources="primary output"/>
<route type="mix" sink="BT SCO"
@@ -111,6 +112,8 @@
sources="primary output,hifi_output,mmap_no_irq_out"/>
<route type="mix" sink="mixport_bus_input"
sources="BUS Device In"/>
+ <route type="mix" sink="hifi_input"
+ sources="USB Device In" />
</routes>
</module>
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 2718604..58b3e51 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -3649,19 +3649,18 @@
cleanUpFailedRequests(/*sendRequestError*/ true);
// Check if any stream is abandoned.
checkAndStopRepeatingRequest();
+ // Inform waitUntilRequestProcessed thread of a failed request ID
+ wakeupLatestRequest(/*failedRequestId*/true, latestRequestId);
return true;
} else if (res != OK) {
cleanUpFailedRequests(/*sendRequestError*/ false);
+ // Inform waitUntilRequestProcessed thread of a failed request ID
+ wakeupLatestRequest(/*failedRequestId*/true, latestRequestId);
return false;
}
// Inform waitUntilRequestProcessed thread of a new request ID
- {
- Mutex::Autolock al(mLatestRequestMutex);
-
- mLatestRequestId = latestRequestId;
- mLatestRequestSignal.signal();
- }
+ wakeupLatestRequest(/*failedRequestId*/false, latestRequestId);
// Submit a batch of requests to HAL.
// Use flush lock only when submitting multilple requests in a batch.
@@ -4393,12 +4392,7 @@
hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST,
captureRequest->mResultExtras);
}
- {
- Mutex::Autolock al(mLatestRequestMutex);
-
- mLatestFailedRequestId = captureRequest->mResultExtras.requestId;
- mLatestRequestSignal.signal();
- }
+ wakeupLatestRequest(/*failedRequestId*/true, captureRequest->mResultExtras.requestId);
}
// Remove yet-to-be submitted inflight request from inflightMap
@@ -5060,6 +5054,20 @@
return OK;
}
+void Camera3Device::RequestThread::wakeupLatestRequest(
+ bool latestRequestFailed,
+ int32_t latestRequestId) {
+ Mutex::Autolock al(mLatestRequestMutex);
+
+ if (latestRequestFailed) {
+ mLatestFailedRequestId = latestRequestId;
+ } else {
+ mLatestRequestId = latestRequestId;
+ }
+ mLatestRequestSignal.signal();
+}
+
+
/**
* PreparerThread inner class methods
*/
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 9a2f2b1..1820702 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -1026,6 +1026,11 @@
const sp<CaptureRequest> &request,
const CameraMetadata& injectedSessionParams);
+ /**
+ * signal mLatestRequestmutex
+ **/
+ void wakeupLatestRequest(bool latestRequestFailed, int32_t latestRequestId);
+
protected:
virtual bool threadLoop();
diff --git a/services/camera/virtualcamera/VirtualCameraStream.cc b/services/camera/virtualcamera/VirtualCameraStream.cc
index 03da171..fad6cac 100644
--- a/services/camera/virtualcamera/VirtualCameraStream.cc
+++ b/services/camera/virtualcamera/VirtualCameraStream.cc
@@ -26,8 +26,6 @@
#include "EGL/egl.h"
#include "aidl/android/hardware/camera/device/Stream.h"
-#include "aidl/android/hardware/camera/device/StreamBuffer.h"
-#include "aidl/android/hardware/graphics/common/PixelFormat.h"
#include "aidlcommonsupport/NativeHandle.h"
#include "android/hardware_buffer.h"
#include "cutils/native_handle.h"
@@ -39,52 +37,33 @@
namespace virtualcamera {
using ::aidl::android::hardware::camera::device::Stream;
-using ::aidl::android::hardware::camera::device::StreamBuffer;
using ::aidl::android::hardware::common::NativeHandle;
-using ::aidl::android::hardware::graphics::common::PixelFormat;
namespace {
-sp<GraphicBuffer> createBlobGraphicBuffer(GraphicBufferMapper& mapper,
- buffer_handle_t bufferHandle) {
- uint64_t allocationSize;
- uint64_t usage;
- uint64_t layerCount;
- if (mapper.getAllocationSize(bufferHandle, &allocationSize) != NO_ERROR ||
- mapper.getUsage(bufferHandle, &usage) != NO_ERROR ||
- mapper.getLayerCount(bufferHandle, &layerCount) != NO_ERROR) {
- ALOGE("Error fetching metadata for the imported BLOB buffer handle.");
- return nullptr;
- }
-
- return sp<GraphicBuffer>::make(
- bufferHandle, GraphicBuffer::HandleWrapMethod::TAKE_HANDLE,
- allocationSize, /*height=*/1, static_cast<int>(ui::PixelFormat::BLOB),
- layerCount, usage, 0);
-}
-
-sp<GraphicBuffer> createYCbCr420GraphicBuffer(GraphicBufferMapper& mapper,
- buffer_handle_t bufferHandle) {
+sp<GraphicBuffer> createGraphicBuffer(GraphicBufferMapper& mapper,
+ const buffer_handle_t bufferHandle) {
uint64_t width;
uint64_t height;
uint64_t usage;
uint64_t layerCount;
+ ui::PixelFormat pixelFormat;
if (mapper.getWidth(bufferHandle, &width) != NO_ERROR ||
mapper.getHeight(bufferHandle, &height) != NO_ERROR ||
mapper.getUsage(bufferHandle, &usage) != NO_ERROR ||
- mapper.getLayerCount(bufferHandle, &layerCount) != NO_ERROR) {
+ mapper.getLayerCount(bufferHandle, &layerCount) != NO_ERROR ||
+ mapper.getPixelFormatRequested(bufferHandle, &pixelFormat) != NO_ERROR) {
ALOGE("Error fetching metadata for the imported YCbCr420 buffer handle.");
return nullptr;
}
return sp<GraphicBuffer>::make(
bufferHandle, GraphicBuffer::HandleWrapMethod::TAKE_HANDLE, width, height,
- static_cast<int>(ui::PixelFormat::YCBCR_420_888), /*layers=*/1, usage,
- width);
+ static_cast<int>(pixelFormat), layerCount, usage, width);
}
std::shared_ptr<AHardwareBuffer> importBufferInternal(
- const NativeHandle& aidlHandle, const Stream& streamConfig) {
+ const NativeHandle& aidlHandle) {
if (aidlHandle.fds.empty()) {
ALOGE("Empty handle - nothing to import");
return nullptr;
@@ -103,12 +82,9 @@
return nullptr;
}
- sp<GraphicBuffer> buf =
- streamConfig.format == PixelFormat::BLOB
- ? createBlobGraphicBuffer(mapper, bufferHandle)
- : createYCbCr420GraphicBuffer(mapper, bufferHandle);
+ sp<GraphicBuffer> buf = createGraphicBuffer(mapper, bufferHandle);
- if (buf->initCheck() != NO_ERROR) {
+ if (buf == nullptr || buf->initCheck() != NO_ERROR) {
ALOGE("Imported graphic buffer is not correcly initialized.");
return nullptr;
}
@@ -128,7 +104,7 @@
std::shared_ptr<AHardwareBuffer> VirtualCameraStream::importBuffer(
const ::aidl::android::hardware::camera::device::StreamBuffer& buffer) {
- auto hwBufferPtr = importBufferInternal(buffer.buffer, mStreamConfig);
+ auto hwBufferPtr = importBufferInternal(buffer.buffer);
if (hwBufferPtr != nullptr) {
std::lock_guard<std::mutex> lock(mLock);
mBuffers.emplace(std::piecewise_construct,
diff --git a/services/camera/virtualcamera/util/EglProgram.cc b/services/camera/virtualcamera/util/EglProgram.cc
index 510fd33..85ff735 100644
--- a/services/camera/virtualcamera/util/EglProgram.cc
+++ b/services/camera/virtualcamera/util/EglProgram.cc
@@ -88,7 +88,7 @@
})";
constexpr char kExternalRgbaTextureFragmentShader[] = R"(#version 300 es
- #extension GL_OES_EGL_image_external : require
+ #extension GL_OES_EGL_image_external_essl3 : require
#extension GL_EXT_YUV_target : require
precision mediump float;
in vec2 vTextureCoord;
diff --git a/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp b/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
index c6793a9..c7b4297 100644
--- a/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
+++ b/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
@@ -17,6 +17,7 @@
*****************************************************************************
* Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
*/
+#include <binder/IPCThreadState.h>
#include <fuzzer/FuzzedDataProvider.h>
#include <media/MediaMetricsItem.h>
#include <mediametricsservice/AudioTypes.h>
@@ -26,210 +27,158 @@
#include <string.h>
#include <utils/Log.h>
#include <algorithm>
+#include <set>
using namespace android;
+static constexpr size_t STATSD_LOG_LINES_MAX = 48;
+static unsigned long long kPackedCallingUid = (unsigned long long)AID_SYSTEM << 32;
+constexpr int8_t kMaxBytes = 100;
+constexpr int8_t kMinBytes = 0;
+constexpr size_t kMaxItemLength = 16;
// low water mark
constexpr size_t kLogItemsLowWater = 1;
// high water mark
constexpr size_t kLogItemsHighWater = 2;
-constexpr size_t kMaxItemLength = 16;
-constexpr size_t kMaxApis = 64;
+
+/*
+ * Concatenating strings to generate keys in such a way that the
+ * lambda function inside AudioAnalytics() added in the 'mAction' object is covered
+ */
+
+std::string keyMediaValues[] = {
+ "metrics.manager",
+ "mediadrm",
+ "audio.device.a2dp",
+ AMEDIAMETRICS_KEY_AUDIO_MIDI,
+ AMEDIAMETRICS_KEY_PREFIX_AUDIO_SPATIALIZER "*",
+ AMEDIAMETRICS_KEY_PREFIX_AUDIO_THREAD "*",
+ AMEDIAMETRICS_KEY_AUDIO_FLINGER,
+ AMEDIAMETRICS_KEY_AUDIO_POLICY,
+ AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK "*",
+ AMEDIAMETRICS_KEY_PREFIX_AUDIO_RECORD "*",
+ AMEDIAMETRICS_KEY_PREFIX_AUDIO_STREAM "*",
+ AMEDIAMETRICS_KEY_PREFIX_AUDIO_DEVICE
+ "postBluetoothA2dpDeviceConnectionStateSuppressNoisyIntent",
+};
+
+std::string keyMediaAction[] = {
+ "createAudioPatch",
+ "connected",
+ AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE,
+ AMEDIAMETRICS_PROP_EVENT_VALUE_TIMEOUT,
+ AMEDIAMETRICS_PROP_EVENT_VALUE_CTOR,
+ AMEDIAMETRICS_PROP_EVENT_VALUE_ENDAAUDIOSTREAM,
+ AMEDIAMETRICS_PROP_EVENT_VALUE_DEVICECLOSED,
+ AMEDIAMETRICS_PROP_EVENT_VALUE_SETVOICEVOLUME,
+ AMEDIAMETRICS_PROP_EVENT_VALUE_SETMODE,
+ AMEDIAMETRICS_PROP_EVENT_VALUE_ENDAUDIOINTERVALGROUP,
+};
class MediaMetricsServiceFuzzer {
- public:
- void invokeStartsWith(const uint8_t *data, size_t size);
- void invokeInstantiate(const uint8_t *data, size_t size);
- void invokePackageInstallerCheck(const uint8_t *data, size_t size);
- void invokeItemManipulation(const uint8_t *data, size_t size);
- void invokeItemExpansion(const uint8_t *data, size_t size);
- void invokeTimeMachineStorage(const uint8_t *data, size_t size);
- void invokeTransactionLog(const uint8_t *data, size_t size);
- void invokeAnalyticsAction(const uint8_t *data, size_t size);
- void invokeAudioAnalytics(const uint8_t *data, size_t size);
- void invokeTimedAction(const uint8_t *data, size_t size);
- void process(const uint8_t *data, size_t size);
+ public:
+ MediaMetricsServiceFuzzer(const uint8_t* data, size_t size) : mFdp(data, size){};
+ void process();
+ void invokeStartsWith();
+ void invokeInstantiate();
+ void invokePackageInstallerCheck();
+ void invokeTimeMachineStorage();
+ void invokeTransactionLog();
+ void invokeAnalyticsAction();
+ void invokeAudioAnalytics();
+ void invokeTimedAction();
+ void setKeyValues(std::shared_ptr<mediametrics::Item>& item, std::string keyValue);
+ std::shared_ptr<mediametrics::Item> CreateItem();
+ sp<MediaMetricsService> mMediaMetricsService;
+ FuzzedDataProvider mFdp;
std::atomic_int mValue = 0;
};
-void MediaMetricsServiceFuzzer::invokeStartsWith(const uint8_t *data, size_t size) {
- FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
- while (fdp.remaining_bytes()) {
- android::mediametrics::startsWith(fdp.ConsumeRandomLengthString(),
- fdp.ConsumeRandomLengthString());
- }
+void MediaMetricsServiceFuzzer::setKeyValues(std::shared_ptr<mediametrics::Item>& item,
+ std::string keyValue) {
+ auto invokeActionAPIs = mFdp.PickValueInArray<const std::function<void()>>({
+ [&]() { item->setInt32(keyValue.c_str(), mFdp.ConsumeIntegral<int32_t>()); },
+ [&]() { item->addInt32(keyValue.c_str(), mFdp.ConsumeIntegral<int32_t>()); },
+ [&]() { item->setInt64(keyValue.c_str(), mFdp.ConsumeIntegral<int64_t>()); },
+ [&]() { item->addInt64(keyValue.c_str(), mFdp.ConsumeIntegral<int64_t>()); },
+ [&]() { item->setDouble(keyValue.c_str(), mFdp.ConsumeFloatingPoint<double>()); },
+ [&]() { item->addDouble(keyValue.c_str(), mFdp.ConsumeFloatingPoint<double>()); },
+ [&]() { item->setTimestamp(mFdp.ConsumeIntegral<int64_t>()); },
+ [&]() {
+ std::string value = mFdp.ConsumeBool()
+ ? mFdp.ConsumeRandomLengthString(kMaxBytes)
+ : mFdp.PickValueInArray<std::string>(keyMediaAction);
+ item->setCString(keyValue.c_str(), value.c_str());
+ },
+ [&]() {
+ item->setRate(keyValue.c_str(), mFdp.ConsumeIntegral<int64_t>(),
+ mFdp.ConsumeIntegral<int64_t>());
+ },
+ [&]() {
+ mediametrics::LogItem<1> itemTemp(mFdp.ConsumeRandomLengthString(kMaxBytes));
+ itemTemp.setPid(mFdp.ConsumeIntegral<int16_t>())
+ .setUid(mFdp.ConsumeIntegral<int16_t>());
+
+ int32_t i = mFdp.ConsumeIntegral<int32_t>();
+ itemTemp.set(std::to_string(i).c_str(), (int32_t)i);
+ itemTemp.updateHeader();
+ (void)item->readFromByteString(itemTemp.getBuffer(), itemTemp.getLength());
+ },
+
+ });
+ invokeActionAPIs();
}
-void MediaMetricsServiceFuzzer::invokeInstantiate(const uint8_t *data, size_t size) {
- FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
- sp mediaMetricsService = new MediaMetricsService();
-
- while (fdp.remaining_bytes()) {
- std::unique_ptr<mediametrics::Item> random_key(
- mediametrics::Item::create(fdp.ConsumeRandomLengthString()));
- mediaMetricsService->submit(random_key.get());
- random_key->setInt32(fdp.ConsumeRandomLengthString().c_str(),
- fdp.ConsumeIntegral<int32_t>());
- mediaMetricsService->submit(random_key.get());
-
- std::unique_ptr<mediametrics::Item> audiotrack_key(
- mediametrics::Item::create("audiotrack"));
- mediaMetricsService->submit(audiotrack_key.get());
- audiotrack_key->addInt32(fdp.ConsumeRandomLengthString().c_str(),
- fdp.ConsumeIntegral<int32_t>());
- mediaMetricsService->submit(audiotrack_key.get());
+std::shared_ptr<mediametrics::Item> MediaMetricsServiceFuzzer::CreateItem() {
+ std::string key;
+ if (mFdp.ConsumeBool()) {
+ key = mFdp.ConsumeRandomLengthString(kMaxItemLength);
+ } else {
+ key = mFdp.PickValueInArray<std::string>(keyMediaValues);
}
-}
-void MediaMetricsServiceFuzzer::invokePackageInstallerCheck(const uint8_t *data, size_t size) {
- FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
- while (fdp.remaining_bytes()) {
- MediaMetricsService::useUidForPackage(fdp.ConsumeRandomLengthString().c_str(),
- fdp.ConsumeRandomLengthString().c_str());
- }
-}
-
-void MediaMetricsServiceFuzzer::invokeItemManipulation(const uint8_t *data, size_t size) {
- FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
-
- mediametrics::Item item(fdp.ConsumeRandomLengthString().c_str());
- while (fdp.remaining_bytes()) {
- const uint8_t action = fdp.ConsumeIntegralInRange<uint8_t>(0, 16);
- const std::string key = fdp.ConsumeRandomLengthString();
- if (fdp.remaining_bytes() < 1 || key.length() < 1) {
- break;
+ std::shared_ptr<mediametrics::Item> item = std::make_shared<mediametrics::Item>(key.c_str());
+ size_t numKeys = mFdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes);
+ std::set<std::string> keySet;
+ for (size_t i = 0; i < numKeys; ++i) {
+ std::string keyValue;
+ if (mFdp.ConsumeBool()) {
+ keyValue = mFdp.ConsumeRandomLengthString(kMaxBytes);
+ } else {
+ keyValue = mFdp.PickValueInArray<std::string>(
+ {AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_STATE, "logSessionIkeyd"});
}
- switch (action) {
- case 0: {
- item.setInt32(key.c_str(), fdp.ConsumeIntegral<int32_t>());
- break;
- }
- case 1: {
- item.addInt32(key.c_str(), fdp.ConsumeIntegral<int32_t>());
- break;
- }
- case 2: {
- int32_t i32 = 0;
- item.getInt32(key.c_str(), &i32);
- break;
- }
- case 3: {
- item.setInt64(key.c_str(), fdp.ConsumeIntegral<int64_t>());
- break;
- }
- case 4: {
- item.addInt64(key.c_str(), fdp.ConsumeIntegral<int64_t>());
- break;
- }
- case 5: {
- int64_t i64 = 0;
- item.getInt64(key.c_str(), &i64);
- break;
- }
- case 6: {
- item.setDouble(key.c_str(), fdp.ConsumeFloatingPoint<double>());
- break;
- }
- case 7: {
- item.addDouble(key.c_str(), fdp.ConsumeFloatingPoint<double>());
- break;
- }
- case 8: {
- double d = 0;
- item.getDouble(key.c_str(), &d);
- break;
- }
- case 9: {
- item.setCString(key.c_str(), fdp.ConsumeRandomLengthString().c_str());
- break;
- }
- case 10: {
- char *s = nullptr;
- item.getCString(key.c_str(), &s);
- if (s) free(s);
- break;
- }
- case 11: {
- std::string s;
- item.getString(key.c_str(), &s);
- break;
- }
- case 12: {
- item.setRate(key.c_str(), fdp.ConsumeIntegral<int64_t>(),
- fdp.ConsumeIntegral<int64_t>());
- break;
- }
- case 13: {
- int64_t b = 0, h = 0;
- double d = 0;
- item.getRate(key.c_str(), &b, &h, &d);
- break;
- }
- case 14: {
- (void)item.filter(key.c_str());
- break;
- }
- case 15: {
- const char *arr[1] = {""};
- arr[0] = const_cast<char *>(key.c_str());
- (void)item.filterNot(1, arr);
- break;
- }
- case 16: {
- (void)item.toString().c_str();
- break;
- }
+ if (keySet.find(keyValue) == keySet.end()) {
+ setKeyValues(item, keyValue);
+ keySet.insert(keyValue);
}
}
-
- Parcel p;
- mediametrics::Item item2;
-
- (void)item.writeToParcel(&p);
- p.setDataPosition(0); // rewind for reading
- (void)item2.readFromParcel(p);
-
- char *byteData = nullptr;
- size_t length = 0;
- (void)item.writeToByteString(&byteData, &length);
- (void)item2.readFromByteString(byteData, length);
- if (byteData) {
- free(byteData);
- }
-
- sp mediaMetricsService = new MediaMetricsService();
- mediaMetricsService->submit(&item2);
+ return item;
}
-void MediaMetricsServiceFuzzer::invokeItemExpansion(const uint8_t *data, size_t size) {
- FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
-
- mediametrics::LogItem<1> item("FuzzItem");
- item.setPid(fdp.ConsumeIntegral<int16_t>()).setUid(fdp.ConsumeIntegral<int16_t>());
-
- while (fdp.remaining_bytes()) {
- int32_t i = fdp.ConsumeIntegral<int32_t>();
- item.set(std::to_string(i).c_str(), (int32_t)i);
- }
- item.updateHeader();
-
- mediametrics::Item item2;
- (void)item2.readFromByteString(item.getBuffer(), item.getLength());
-
- sp mediaMetricsService = new MediaMetricsService();
- mediaMetricsService->submit(&item2);
+void MediaMetricsServiceFuzzer::invokeStartsWith() {
+ android::mediametrics::startsWith(mFdp.ConsumeRandomLengthString(kMaxBytes),
+ mFdp.ConsumeRandomLengthString(kMaxBytes));
}
-void MediaMetricsServiceFuzzer::invokeTimeMachineStorage(const uint8_t *data, size_t size) {
- FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+void MediaMetricsServiceFuzzer::invokeInstantiate() {
+ auto item = CreateItem();
+ mMediaMetricsService->submit(item.get());
+}
- auto item = std::make_shared<mediametrics::Item>("FuzzKey");
- int32_t i32 = fdp.ConsumeIntegral<int32_t>();
- int64_t i64 = fdp.ConsumeIntegral<int64_t>();
- double d = fdp.ConsumeFloatingPoint<double>();
- std::string str = fdp.ConsumeRandomLengthString();
- std::pair<int64_t, int64_t> pair(fdp.ConsumeIntegral<int64_t>(),
- fdp.ConsumeIntegral<int64_t>());
+void MediaMetricsServiceFuzzer::invokePackageInstallerCheck() {
+ MediaMetricsService::useUidForPackage(mFdp.ConsumeRandomLengthString(kMaxBytes).c_str(),
+ mFdp.ConsumeRandomLengthString(kMaxBytes).c_str());
+}
+
+void MediaMetricsServiceFuzzer::invokeTimeMachineStorage() {
+ auto item = CreateItem();
+ int32_t i32 = mFdp.ConsumeIntegral<int32_t>();
+ int64_t i64 = mFdp.ConsumeIntegral<int64_t>();
+ double d = mFdp.ConsumeFloatingPoint<double>();
+ std::string str = mFdp.ConsumeRandomLengthString(kMaxBytes);
+ std::pair<int64_t, int64_t> pair(mFdp.ConsumeIntegral<int64_t>(),
+ mFdp.ConsumeIntegral<int64_t>());
(*item).set("i32", i32).set("i64", i64).set("double", d).set("string", str).set("rate", pair);
android::mediametrics::TimeMachine timeMachine;
@@ -253,124 +202,89 @@
timeMachine.get("Key.string", &str, -1);
}
-void MediaMetricsServiceFuzzer::invokeTransactionLog(const uint8_t *data, size_t size) {
- FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
-
- auto item = std::make_shared<mediametrics::Item>("Key1");
- (*item)
- .set("one", fdp.ConsumeIntegral<int32_t>())
- .set("two", fdp.ConsumeIntegral<int32_t>())
- .setTimestamp(fdp.ConsumeIntegral<int32_t>());
+void MediaMetricsServiceFuzzer::invokeTransactionLog() {
+ auto item = CreateItem();
android::mediametrics::TransactionLog transactionLog(
kLogItemsLowWater, kLogItemsHighWater); // keep at most 2 items
transactionLog.size();
transactionLog.put(item);
- transactionLog.size();
-
- auto item2 = std::make_shared<mediametrics::Item>("Key2");
- (*item2)
- .set("three", fdp.ConsumeIntegral<int32_t>())
- .set("[Key1]three", fdp.ConsumeIntegral<int32_t>())
- .setTimestamp(fdp.ConsumeIntegral<int32_t>());
-
- transactionLog.put(item2);
- transactionLog.size();
-
- auto item3 = std::make_shared<mediametrics::Item>("Key3");
- (*item3)
- .set("six", fdp.ConsumeIntegral<int32_t>())
- .set("[Key1]four", fdp.ConsumeIntegral<int32_t>()) // affects Key1
- .set("[Key1]five", fdp.ConsumeIntegral<int32_t>()) // affects key1
- .setTimestamp(fdp.ConsumeIntegral<int32_t>());
-
- transactionLog.put(item3);
- transactionLog.size();
}
-void MediaMetricsServiceFuzzer::invokeAnalyticsAction(const uint8_t *data, size_t size) {
- FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
-
+void MediaMetricsServiceFuzzer::invokeAnalyticsAction() {
mediametrics::AnalyticsActions analyticsActions;
bool action = false;
- while (fdp.remaining_bytes()) {
- analyticsActions.addAction(
- (fdp.ConsumeRandomLengthString() + std::string(".event")).c_str(),
- fdp.ConsumeRandomLengthString(),
+ analyticsActions.addAction(
+ (mFdp.ConsumeRandomLengthString(kMaxBytes) + std::string(".event")).c_str(),
+ mFdp.ConsumeRandomLengthString(kMaxBytes),
std::make_shared<mediametrics::AnalyticsActions::Function>(
- [&](const std::shared_ptr<const android::mediametrics::Item> &) {
- action = true;
- }));
- }
+ [&](const std::shared_ptr<const android::mediametrics::Item>&) {
+ action = true;
+ }));
- FuzzedDataProvider fdp2 = FuzzedDataProvider(data, size);
- size_t apiCount = 0;
- while (fdp2.remaining_bytes() && ++apiCount <= kMaxApis) {
- // make a test item
- auto item = std::make_shared<mediametrics::Item>(
- fdp2.ConsumeRandomLengthString(kMaxItemLength).c_str());
- (*item).set("event", fdp2.ConsumeRandomLengthString().c_str());
+ // make a test item
+ auto item = CreateItem();
+ (*item).set("event", mFdp.ConsumeRandomLengthString(kMaxBytes).c_str());
- // get the actions and execute them
- auto actions = analyticsActions.getActionsForItem(item);
- for (const auto &action : actions) {
- action->operator()(item);
+ // get the actions and execute them
+ auto actions = analyticsActions.getActionsForItem(item);
+ for (const auto& action : actions) {
+ action->operator()(item);
}
- }
}
-void MediaMetricsServiceFuzzer::invokeAudioAnalytics(const uint8_t *data, size_t size) {
- FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+void MediaMetricsServiceFuzzer::invokeAudioAnalytics() {
+ int32_t maxLogLine = mFdp.ConsumeIntegralInRange<int32_t>(0, STATSD_LOG_LINES_MAX);
std::shared_ptr<android::mediametrics::StatsdLog> statsdLog =
- std::make_shared<android::mediametrics::StatsdLog>(10);
+ std::make_shared<android::mediametrics::StatsdLog>(maxLogLine);
android::mediametrics::AudioAnalytics audioAnalytics{statsdLog};
- while (fdp.remaining_bytes()) {
- auto item = std::make_shared<mediametrics::Item>(fdp.ConsumeRandomLengthString().c_str());
- int32_t transactionUid = fdp.ConsumeIntegral<int32_t>(); // arbitrary
- (*item)
- .set(fdp.ConsumeRandomLengthString().c_str(), fdp.ConsumeIntegral<int32_t>())
- .set(fdp.ConsumeRandomLengthString().c_str(), fdp.ConsumeIntegral<int32_t>())
- .set(AMEDIAMETRICS_PROP_ALLOWUID, transactionUid)
- .setUid(transactionUid)
- .setTimestamp(fdp.ConsumeIntegral<int32_t>());
- audioAnalytics.submit(item, fdp.ConsumeBool());
+ auto item = CreateItem();
+ Parcel parcel;
+ item->writeToParcel(&parcel);
+ parcel.setDataPosition(0);
+ if (mFdp.ConsumeBool()) {
+ item->readFromParcel(parcel);
}
-
- audioAnalytics.dump(1000);
+ audioAnalytics.submit(item, mFdp.ConsumeBool());
}
-void MediaMetricsServiceFuzzer::invokeTimedAction(const uint8_t *data, size_t size) {
- FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+void MediaMetricsServiceFuzzer::invokeTimedAction() {
android::mediametrics::TimedAction timedAction;
+ timedAction.postIn(std::chrono::seconds(mFdp.ConsumeIntegral<uint32_t>()),
+ [this] { ++mValue; });
+ timedAction.size();
+}
- while (fdp.remaining_bytes()) {
- timedAction.postIn(std::chrono::seconds(fdp.ConsumeIntegral<int32_t>()),
- [this] { ++mValue; });
- timedAction.size();
+void MediaMetricsServiceFuzzer::process() {
+ mMediaMetricsService = sp<MediaMetricsService>::make();
+
+ if (mFdp.ConsumeBool()) {
+ IPCThreadState::self()->restoreCallingIdentity(kPackedCallingUid);
+ } else {
+ IPCThreadState::self()->restoreCallingIdentity(mFdp.ConsumeIntegral<size_t>());
+ }
+ while (mFdp.remaining_bytes()) {
+ auto invokeAPIs = mFdp.PickValueInArray<const std::function<void()>>({
+ [&]() { invokeStartsWith(); },
+ [&]() { invokeInstantiate(); },
+ [&]() { invokePackageInstallerCheck(); },
+ [&]() { invokeTimeMachineStorage(); },
+ [&]() { invokeTransactionLog(); },
+ [&]() { invokeAudioAnalytics(); },
+ [&]() { invokeTimedAction(); },
+ });
+ invokeAPIs();
}
}
-void MediaMetricsServiceFuzzer::process(const uint8_t *data, size_t size) {
- invokeStartsWith(data, size);
- invokeInstantiate(data, size);
- invokePackageInstallerCheck(data, size);
- invokeItemManipulation(data, size);
- invokeItemExpansion(data, size);
- invokeTimeMachineStorage(data, size);
- invokeTransactionLog(data, size);
- invokeAnalyticsAction(data, size);
- invokeAudioAnalytics(data, size);
- invokeTimedAction(data, size);
-}
-
-extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
if (size < 1) {
return 0;
}
- MediaMetricsServiceFuzzer mediaMetricsServiceFuzzer;
- mediaMetricsServiceFuzzer.process(data, size);
+ MediaMetricsServiceFuzzer mediaMetricsServiceFuzzer(data, size);
+ mediaMetricsServiceFuzzer.process();
return 0;
}
diff --git a/services/mediametrics/include/mediametricsservice/AudioTypes.h b/services/mediametrics/include/mediametricsservice/AudioTypes.h
index b5fe28b..59654bf 100644
--- a/services/mediametrics/include/mediametricsservice/AudioTypes.h
+++ b/services/mediametrics/include/mediametricsservice/AudioTypes.h
@@ -18,6 +18,7 @@
#include <string>
#include <unordered_map>
+#include <vector>
namespace android::mediametrics::types {
diff --git a/services/mediaresourcemanager/fuzzer/Android.bp b/services/mediaresourcemanager/fuzzer/Android.bp
index 5bac062..3f04f69 100644
--- a/services/mediaresourcemanager/fuzzer/Android.bp
+++ b/services/mediaresourcemanager/fuzzer/Android.bp
@@ -47,7 +47,7 @@
],
fuzz_config: {
cc: [
- "android-media-fuzzing-reports@google.com",
+ "girishshetty@google.com",
],
componentid: 155276,
hotlists: [
diff --git a/services/mediaresourcemanager/test/Android.bp b/services/mediaresourcemanager/test/Android.bp
index 6a64823..5dfec30 100644
--- a/services/mediaresourcemanager/test/Android.bp
+++ b/services/mediaresourcemanager/test/Android.bp
@@ -29,6 +29,9 @@
"libactivitymanager_aidl",
"server_configurable_flags",
],
+ defaults: [
+ "aconfig_lib_cc_static_link.defaults",
+ ],
include_dirs: [
"frameworks/av/include",
"frameworks/av/services/mediaresourcemanager",