Merge "IDrm metrics to statsd"
diff --git a/apex/ld.config.txt b/apex/ld.config.txt
index 713f0b7..4dc5fb1 100644
--- a/apex/ld.config.txt
+++ b/apex/ld.config.txt
@@ -33,7 +33,7 @@
# TODO: replace the following when apex has a way to auto-generate this list
# namespace.default.link.platform.shared_libs = %LLNDK_LIBRARIES%
# namespace.default.link.platform.shared_libs += %SANITIZER_RUNTIME_LIBRARIES%
-namespace.default.link.platform.shared_libs = libEGL.so:libGLESv1_CM.so:libGLESv2.so:libGLESv3.so:libRS.so:libandroid_net.so:libc.so:libcgrouprc.so:libclang_rt.asan-aarch64-android.so:libclang_rt.asan-arm-android.so:libclang_rt.hwasan-aarch64-android.so:libclang_rt.asan-i686-android.so:libclang_rt.asan-x86_64-android.so:libdl.so:libft2.so:liblog.so:libm.so:libmediandk.so:libnativewindow.so:libneuralnetworks.so:libsync.so:libvndksupport.so:libdl_android.so:libvulkan.so:libbinder_ndk.so
+namespace.default.link.platform.shared_libs = libEGL.so:libGLESv1_CM.so:libGLESv2.so:libGLESv3.so:libandroid_net.so:libc.so:libcgrouprc.so:libclang_rt.asan-aarch64-android.so:libclang_rt.asan-arm-android.so:libclang_rt.hwasan-aarch64-android.so:libclang_rt.asan-i686-android.so:libclang_rt.asan-x86_64-android.so:libdl.so:libft2.so:liblog.so:libm.so:libmediandk.so:libnativewindow.so:libneuralnetworks.so:libsync.so:libvndksupport.so:libdl_android.so:libvulkan.so:libbinder_ndk.so
###############################################################################
# "platform" namespace
@@ -138,7 +138,7 @@
# TODO: replace the following when apex has a way to auto-generate this list
# namespace.sphal.link.platform.shared_libs = %LLNDK_LIBRARIES%
# namespace.sphal.link.platform.shared_libs += %SANITIZER_RUNTIME_LIBRARIES%
-namespace.sphal.link.platform.shared_libs = libEGL.so:libGLESv1_CM.so:libGLESv2.so:libGLESv3.so:libRS.so:libandroid_net.so:libc.so:libcgrouprc.so:libclang_rt.asan-aarch64-android.so:libclang_rt.asan-arm-android.so:libclang_rt.hwasan-aarch64-android.so:libclang_rt.asan-i686-android.so:libclang_rt.asan-x86_64-android.so:libdl.so:libft2.so:liblog.so:libm.so:libmediandk.so:libnativewindow.so:libneuralnetworks.so:libsync.so:libvndksupport.so:libvulkan.so:libbinder_ndk.so
+namespace.sphal.link.platform.shared_libs = libEGL.so:libGLESv1_CM.so:libGLESv2.so:libGLESv3.so:libandroid_net.so:libc.so:libcgrouprc.so:libclang_rt.asan-aarch64-android.so:libclang_rt.asan-arm-android.so:libclang_rt.hwasan-aarch64-android.so:libclang_rt.asan-i686-android.so:libclang_rt.asan-x86_64-android.so:libdl.so:libft2.so:liblog.so:libm.so:libmediandk.so:libnativewindow.so:libneuralnetworks.so:libsync.so:libvndksupport.so:libvulkan.so:libbinder_ndk.so
# Add a link for libz.so which is llndk on devices where VNDK is not enforced.
namespace.sphal.link.platform.shared_libs += libz.so
diff --git a/camera/ndk/include/camera/NdkCameraDevice.h b/camera/ndk/include/camera/NdkCameraDevice.h
index 7be4bd3..239cb31 100644
--- a/camera/ndk/include/camera/NdkCameraDevice.h
+++ b/camera/ndk/include/camera/NdkCameraDevice.h
@@ -470,18 +470,6 @@
* <a href="http://developer.android.com/reference/android/media/CamcorderProfile.html">
* CamcorderProfiles</a>.</li>
*
- * <li>For efficient YUV processing with <a href=
- * "http://developer.android.com/reference/android/renderscript/package-summary.html">
- * RenderScript</a>:
- * Create a RenderScript
- * <a href="http://developer.android.com/reference/android/renderscript/Allocation.html">
- * Allocation</a> with a supported YUV
- * type, the IO_INPUT flag, and one of the YUV output sizes returned by
- * {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS},
- * Then obtain the Surface with
- * <a href="http://developer.android.com/reference/android/renderscript/Allocation.html#getSurface()">
- * Allocation#getSurface}</a>.</li>
- *
* <li>For access to RAW, uncompressed YUV, or compressed JPEG data in the application: Create an
* {@link AImageReader} object using the {@link AImageReader_new} method with one of the supported
* output formats given by {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS}. Then obtain a
diff --git a/cmds/stagefright/Android.bp b/cmds/stagefright/Android.bp
index e1fe07e..445541e 100644
--- a/cmds/stagefright/Android.bp
+++ b/cmds/stagefright/Android.bp
@@ -211,46 +211,6 @@
}
cc_binary {
- name: "mediafilter",
-
- srcs: [
- "filters/argbtorgba.rscript",
- "filters/nightvision.rscript",
- "filters/saturation.rscript",
- "mediafilter.cpp",
- ],
-
- header_libs: [
- "libmediadrm_headers",
- "libmediametrics_headers",
- "libstagefright_headers",
- "rs-headers",
- ],
-
- shared_libs: [
- "libstagefright",
- "liblog",
- "libutils",
- "libbinder",
- "libstagefright_foundation",
- "libmedia_omx",
- "libui",
- "libgui",
- "libRScpp",
- ],
-
- static_libs: ["libstagefright_mediafilter"],
-
- cflags: [
- "-Wno-multichar",
- ],
-
- sanitize: {
- cfi: true,
- },
-}
-
-cc_binary {
name: "muxer",
srcs: ["muxer.cpp"],
diff --git a/cmds/stagefright/SineSource.cpp b/cmds/stagefright/SineSource.cpp
index 0ecc16c..0656030 100644
--- a/cmds/stagefright/SineSource.cpp
+++ b/cmds/stagefright/SineSource.cpp
@@ -63,12 +63,15 @@
MediaBufferBase **out, const ReadOptions * /* options */) {
*out = NULL;
- MediaBufferBase *buffer;
+ MediaBufferBase *buffer = nullptr;
status_t err = mGroup->acquire_buffer(&buffer);
if (err != OK) {
return err;
}
+ if (buffer == nullptr) {
+ return AMEDIA_ERROR_UNKNOWN;
+ }
size_t frameSize = mNumChannels * sizeof(int16_t);
size_t numFramesPerBuffer = buffer->size() / frameSize;
diff --git a/cmds/stagefright/filters/argbtorgba.rscript b/cmds/stagefright/filters/argbtorgba.rscript
deleted file mode 100644
index 229ff8c..0000000
--- a/cmds/stagefright/filters/argbtorgba.rscript
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#pragma version(1)
-#pragma rs java_package_name(com.android.rs.cppbasic)
-#pragma rs_fp_relaxed
-
-void root(const uchar4 *v_in, uchar4 *v_out) {
- v_out->x = v_in->y;
- v_out->y = v_in->z;
- v_out->z = v_in->w;
- v_out->w = v_in->x;
-}
\ No newline at end of file
diff --git a/cmds/stagefright/filters/nightvision.rscript b/cmds/stagefright/filters/nightvision.rscript
deleted file mode 100644
index f61413c..0000000
--- a/cmds/stagefright/filters/nightvision.rscript
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#pragma version(1)
-#pragma rs java_package_name(com.android.rs.cppbasic)
-#pragma rs_fp_relaxed
-
-const static float3 gMonoMult = {0.299f, 0.587f, 0.114f};
-const static float3 gNightVisionMult = {0.5f, 1.f, 0.5f};
-
-// calculates luminance of pixel, then biases color balance toward green
-void root(const uchar4 *v_in, uchar4 *v_out) {
- v_out->x = v_in->x; // don't modify A
-
- // get RGB, scale 0-255 uchar to 0-1.0 float
- float3 rgb = {v_in->y * 0.003921569f, v_in->z * 0.003921569f,
- v_in->w * 0.003921569f};
-
- // apply filter
- float3 result = dot(rgb, gMonoMult) * gNightVisionMult;
-
- v_out->y = (uchar)clamp((result.r * 255.f + 0.5f), 0.f, 255.f);
- v_out->z = (uchar)clamp((result.g * 255.f + 0.5f), 0.f, 255.f);
- v_out->w = (uchar)clamp((result.b * 255.f + 0.5f), 0.f, 255.f);
-}
diff --git a/cmds/stagefright/filters/saturation.rscript b/cmds/stagefright/filters/saturation.rscript
deleted file mode 100644
index 1de9dd8..0000000
--- a/cmds/stagefright/filters/saturation.rscript
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#pragma version(1)
-#pragma rs java_package_name(com.android.rs.cppbasic)
-#pragma rs_fp_relaxed
-
-const static float3 gMonoMult = {0.299f, 0.587f, 0.114f};
-
-// global variables (parameters accessible to application code)
-float gSaturation = 1.0f;
-
-void root(const uchar4 *v_in, uchar4 *v_out) {
- v_out->x = v_in->x; // don't modify A
-
- // get RGB, scale 0-255 uchar to 0-1.0 float
- float3 rgb = {v_in->y * 0.003921569f, v_in->z * 0.003921569f,
- v_in->w * 0.003921569f};
-
- // apply saturation filter
- float3 result = dot(rgb, gMonoMult);
- result = mix(result, rgb, gSaturation);
-
- v_out->y = (uchar)clamp((result.r * 255.f + 0.5f), 0.f, 255.f);
- v_out->z = (uchar)clamp((result.g * 255.f + 0.5f), 0.f, 255.f);
- v_out->w = (uchar)clamp((result.b * 255.f + 0.5f), 0.f, 255.f);
-}
diff --git a/cmds/stagefright/mediafilter.cpp b/cmds/stagefright/mediafilter.cpp
deleted file mode 100644
index 67c68e6..0000000
--- a/cmds/stagefright/mediafilter.cpp
+++ /dev/null
@@ -1,789 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "mediafilterTest"
-
-#include <inttypes.h>
-
-#include <binder/ProcessState.h>
-#include <filters/ColorConvert.h>
-#include <gui/ISurfaceComposer.h>
-#include <gui/SurfaceComposerClient.h>
-#include <gui/Surface.h>
-#include <media/IMediaHTTPService.h>
-#include <media/MediaCodecBuffer.h>
-#include <mediadrm/ICrypto.h>
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/MediaCodec.h>
-#include <media/stagefright/NuMediaExtractor.h>
-#include <media/stagefright/RenderScriptWrapper.h>
-#include <OMX_IVCommon.h>
-#include <ui/DisplayMode.h>
-
-#include "RenderScript.h"
-#include "ScriptC_argbtorgba.h"
-#include "ScriptC_nightvision.h"
-#include "ScriptC_saturation.h"
-
-// test parameters
-static const bool kTestFlush = true; // Note: true will drop 1 out of
-static const int kFlushAfterFrames = 25; // kFlushAfterFrames output frames
-static const int64_t kTimeout = 500ll;
-
-// built-in filter parameters
-static const int32_t kInvert = false; // ZeroFilter param
-static const float kBlurRadius = 15.0f; // IntrinsicBlurFilter param
-static const float kSaturation = 0.0f; // SaturationFilter param
-
-static void usage(const char *me) {
- fprintf(stderr, "usage: [flags] %s\n"
- "\t[-b] use IntrinsicBlurFilter\n"
- "\t[-c] use argb to rgba conversion RSFilter\n"
- "\t[-n] use night vision RSFilter\n"
- "\t[-r] use saturation RSFilter\n"
- "\t[-s] use SaturationFilter\n"
- "\t[-z] use ZeroFilter (copy filter)\n"
- "\t[-R] render output to surface (enables -S)\n"
- "\t[-S] allocate buffers from a surface\n"
- "\t[-T] use render timestamps (enables -R)\n",
- me);
- exit(1);
-}
-
-namespace android {
-
-struct SaturationRSFilter : RenderScriptWrapper::RSFilterCallback {
- void init(const RSC::sp<RSC::RS> &context) {
- mScript = new ScriptC_saturation(context);
- mScript->set_gSaturation(3.f);
- }
-
- virtual status_t processBuffers(
- RSC::Allocation *inBuffer, RSC::Allocation *outBuffer) {
- mScript->forEach_root(inBuffer, outBuffer);
-
- return OK;
- }
-
- status_t handleSetParameters(const sp<AMessage> &msg __unused) {
- return OK;
- }
-
-private:
- RSC::sp<ScriptC_saturation> mScript;
-};
-
-struct NightVisionRSFilter : RenderScriptWrapper::RSFilterCallback {
- void init(const RSC::sp<RSC::RS> &context) {
- mScript = new ScriptC_nightvision(context);
- }
-
- virtual status_t processBuffers(
- RSC::Allocation *inBuffer, RSC::Allocation *outBuffer) {
- mScript->forEach_root(inBuffer, outBuffer);
-
- return OK;
- }
-
- status_t handleSetParameters(const sp<AMessage> &msg __unused) {
- return OK;
- }
-
-private:
- RSC::sp<ScriptC_nightvision> mScript;
-};
-
-struct ARGBToRGBARSFilter : RenderScriptWrapper::RSFilterCallback {
- void init(const RSC::sp<RSC::RS> &context) {
- mScript = new ScriptC_argbtorgba(context);
- }
-
- virtual status_t processBuffers(
- RSC::Allocation *inBuffer, RSC::Allocation *outBuffer) {
- mScript->forEach_root(inBuffer, outBuffer);
-
- return OK;
- }
-
- status_t handleSetParameters(const sp<AMessage> &msg __unused) {
- return OK;
- }
-
-private:
- RSC::sp<ScriptC_argbtorgba> mScript;
-};
-
-struct CodecState {
- sp<MediaCodec> mCodec;
- Vector<sp<MediaCodecBuffer> > mInBuffers;
- Vector<sp<MediaCodecBuffer> > mOutBuffers;
- bool mSignalledInputEOS;
- bool mSawOutputEOS;
- int64_t mNumBuffersDecoded;
-};
-
-struct DecodedFrame {
- size_t index;
- size_t offset;
- size_t size;
- int64_t presentationTimeUs;
- uint32_t flags;
-};
-
-enum FilterType {
- FILTERTYPE_ZERO,
- FILTERTYPE_INTRINSIC_BLUR,
- FILTERTYPE_SATURATION,
- FILTERTYPE_RS_SATURATION,
- FILTERTYPE_RS_NIGHT_VISION,
- FILTERTYPE_RS_ARGB_TO_RGBA,
-};
-
-size_t inputFramesSinceFlush = 0;
-void tryCopyDecodedBuffer(
- List<DecodedFrame> *decodedFrameIndices,
- CodecState *filterState,
- CodecState *vidState) {
- if (decodedFrameIndices->empty()) {
- return;
- }
-
- size_t filterIndex;
- status_t err = filterState->mCodec->dequeueInputBuffer(
- &filterIndex, kTimeout);
- if (err != OK) {
- return;
- }
-
- ++inputFramesSinceFlush;
-
- DecodedFrame frame = *decodedFrameIndices->begin();
-
- // only consume a buffer if we are not going to flush, since we expect
- // the dequeue -> flush -> queue operation to cause an error and
- // not produce an output frame
- if (!kTestFlush || inputFramesSinceFlush < kFlushAfterFrames) {
- decodedFrameIndices->erase(decodedFrameIndices->begin());
- }
- size_t outIndex = frame.index;
-
- const sp<MediaCodecBuffer> &srcBuffer =
- vidState->mOutBuffers.itemAt(outIndex);
- const sp<MediaCodecBuffer> &destBuffer =
- filterState->mInBuffers.itemAt(filterIndex);
-
- sp<AMessage> srcFormat, destFormat;
- vidState->mCodec->getOutputFormat(&srcFormat);
- filterState->mCodec->getInputFormat(&destFormat);
-
- int32_t srcWidth, srcHeight, srcStride, srcSliceHeight;
- int32_t srcColorFormat, destColorFormat;
- int32_t destWidth, destHeight, destStride, destSliceHeight;
- CHECK(srcFormat->findInt32("stride", &srcStride)
- && srcFormat->findInt32("slice-height", &srcSliceHeight)
- && srcFormat->findInt32("width", &srcWidth)
- && srcFormat->findInt32("height", & srcHeight)
- && srcFormat->findInt32("color-format", &srcColorFormat));
- CHECK(destFormat->findInt32("stride", &destStride)
- && destFormat->findInt32("slice-height", &destSliceHeight)
- && destFormat->findInt32("width", &destWidth)
- && destFormat->findInt32("height", & destHeight)
- && destFormat->findInt32("color-format", &destColorFormat));
-
- CHECK(srcWidth <= destStride && srcHeight <= destSliceHeight);
-
- convertYUV420spToARGB(
- srcBuffer->data(),
- srcBuffer->data() + srcStride * srcSliceHeight,
- srcWidth,
- srcHeight,
- destBuffer->data());
-
- // copy timestamp
- int64_t timeUs;
- CHECK(srcBuffer->meta()->findInt64("timeUs", &timeUs));
- destBuffer->meta()->setInt64("timeUs", timeUs);
-
- if (kTestFlush && inputFramesSinceFlush >= kFlushAfterFrames) {
- inputFramesSinceFlush = 0;
-
- // check that queueing a buffer that was dequeued before flush
- // fails with expected error EACCES
- filterState->mCodec->flush();
-
- err = filterState->mCodec->queueInputBuffer(
- filterIndex, 0 /* offset */, destBuffer->size(),
- timeUs, frame.flags);
-
- if (err == OK) {
- ALOGE("FAIL: queue after flush returned OK");
- } else if (err != -EACCES) {
- ALOGE("queueInputBuffer after flush returned %d, "
- "expected -EACCES (-13)", err);
- }
- } else {
- err = filterState->mCodec->queueInputBuffer(
- filterIndex, 0 /* offset */, destBuffer->size(),
- timeUs, frame.flags);
- CHECK(err == OK);
-
- err = vidState->mCodec->releaseOutputBuffer(outIndex);
- CHECK(err == OK);
- }
-}
-
-size_t outputFramesSinceFlush = 0;
-void tryDrainOutputBuffer(
- CodecState *filterState,
- const sp<Surface> &surface, bool renderSurface,
- bool useTimestamp, int64_t *startTimeRender) {
- size_t index;
- size_t offset;
- size_t size;
- int64_t presentationTimeUs;
- uint32_t flags;
- status_t err = filterState->mCodec->dequeueOutputBuffer(
- &index, &offset, &size, &presentationTimeUs, &flags,
- kTimeout);
-
- if (err != OK) {
- return;
- }
-
- ++outputFramesSinceFlush;
-
- if (kTestFlush && outputFramesSinceFlush >= kFlushAfterFrames) {
- filterState->mCodec->flush();
- }
-
- if (surface == NULL || !renderSurface) {
- err = filterState->mCodec->releaseOutputBuffer(index);
- } else if (useTimestamp) {
- if (*startTimeRender == -1) {
- // begin rendering 2 vsyncs after first decode
- *startTimeRender = systemTime(SYSTEM_TIME_MONOTONIC)
- + 33000000 - (presentationTimeUs * 1000);
- }
- presentationTimeUs =
- (presentationTimeUs * 1000) + *startTimeRender;
- err = filterState->mCodec->renderOutputBufferAndRelease(
- index, presentationTimeUs);
- } else {
- err = filterState->mCodec->renderOutputBufferAndRelease(index);
- }
-
- if (kTestFlush && outputFramesSinceFlush >= kFlushAfterFrames) {
- outputFramesSinceFlush = 0;
-
- // releasing the buffer dequeued before flush should cause an error
- // if so, the frame will also be skipped in output stream
- if (err == OK) {
- ALOGE("FAIL: release after flush returned OK");
- } else if (err != -EACCES) {
- ALOGE("releaseOutputBuffer after flush returned %d, "
- "expected -EACCES (-13)", err);
- }
- } else {
- CHECK(err == OK);
- }
-
- if (flags & MediaCodec::BUFFER_FLAG_EOS) {
- ALOGV("reached EOS on output.");
- filterState->mSawOutputEOS = true;
- }
-}
-
-static int decode(
- const sp<android::ALooper> &looper,
- const char *path,
- const sp<Surface> &surface,
- bool renderSurface,
- bool useTimestamp,
- FilterType filterType) {
-
- static int64_t kTimeout = 500ll;
-
- sp<NuMediaExtractor> extractor = new NuMediaExtractor(NuMediaExtractor::EntryPoint::OTHER);
-
- if (extractor->setDataSource(NULL /* httpService */, path) != OK) {
- fprintf(stderr, "unable to instantiate extractor.\n");
- return 1;
- }
-
- KeyedVector<size_t, CodecState> stateByTrack;
-
- CodecState *vidState = NULL;
- for (size_t i = 0; i < extractor->countTracks(); ++i) {
- sp<AMessage> format;
- status_t err = extractor->getTrackFormat(i, &format);
- CHECK(err == OK);
-
- AString mime;
- CHECK(format->findString("mime", &mime));
- bool isVideo = !strncasecmp(mime.c_str(), "video/", 6);
- if (!isVideo) {
- continue;
- }
-
- ALOGV("selecting track %zu", i);
-
- err = extractor->selectTrack(i);
- CHECK(err == OK);
-
- CodecState *state =
- &stateByTrack.editValueAt(stateByTrack.add(i, CodecState()));
-
- vidState = state;
-
- state->mNumBuffersDecoded = 0;
-
- state->mCodec = MediaCodec::CreateByType(
- looper, mime.c_str(), false /* encoder */);
-
- CHECK(state->mCodec != NULL);
-
- err = state->mCodec->configure(
- format, NULL /* surface */, NULL /* crypto */, 0 /* flags */);
-
- CHECK(err == OK);
-
- state->mSignalledInputEOS = false;
- state->mSawOutputEOS = false;
-
- break;
- }
- CHECK(!stateByTrack.isEmpty());
- CHECK(vidState != NULL);
- sp<AMessage> vidFormat;
- vidState->mCodec->getOutputFormat(&vidFormat);
-
- // set filter to use ARGB8888
- vidFormat->setInt32("color-format", OMX_COLOR_Format32bitARGB8888);
- // set app cache directory path
- vidFormat->setString("cacheDir", "/system/bin");
-
- // create RenderScript context for RSFilters
- RSC::sp<RSC::RS> context = new RSC::RS();
- context->init("/system/bin");
-
- sp<RenderScriptWrapper::RSFilterCallback> rsFilter;
-
- // create renderscript wrapper for RSFilters
- sp<RenderScriptWrapper> rsWrapper = new RenderScriptWrapper;
- rsWrapper->mContext = context.get();
-
- CodecState *filterState = new CodecState();
- filterState->mNumBuffersDecoded = 0;
-
- sp<AMessage> params = new AMessage();
-
- switch (filterType) {
- case FILTERTYPE_ZERO:
- {
- filterState->mCodec = MediaCodec::CreateByComponentName(
- looper, "android.filter.zerofilter");
- params->setInt32("invert", kInvert);
- break;
- }
- case FILTERTYPE_INTRINSIC_BLUR:
- {
- filterState->mCodec = MediaCodec::CreateByComponentName(
- looper, "android.filter.intrinsicblur");
- params->setFloat("blur-radius", kBlurRadius);
- break;
- }
- case FILTERTYPE_SATURATION:
- {
- filterState->mCodec = MediaCodec::CreateByComponentName(
- looper, "android.filter.saturation");
- params->setFloat("saturation", kSaturation);
- break;
- }
- case FILTERTYPE_RS_SATURATION:
- {
- SaturationRSFilter *satFilter = new SaturationRSFilter;
- satFilter->init(context);
- rsFilter = satFilter;
- rsWrapper->mCallback = rsFilter;
- vidFormat->setObject("rs-wrapper", rsWrapper);
-
- filterState->mCodec = MediaCodec::CreateByComponentName(
- looper, "android.filter.RenderScript");
- break;
- }
- case FILTERTYPE_RS_NIGHT_VISION:
- {
- NightVisionRSFilter *nightVisionFilter = new NightVisionRSFilter;
- nightVisionFilter->init(context);
- rsFilter = nightVisionFilter;
- rsWrapper->mCallback = rsFilter;
- vidFormat->setObject("rs-wrapper", rsWrapper);
-
- filterState->mCodec = MediaCodec::CreateByComponentName(
- looper, "android.filter.RenderScript");
- break;
- }
- case FILTERTYPE_RS_ARGB_TO_RGBA:
- {
- ARGBToRGBARSFilter *argbToRgbaFilter = new ARGBToRGBARSFilter;
- argbToRgbaFilter->init(context);
- rsFilter = argbToRgbaFilter;
- rsWrapper->mCallback = rsFilter;
- vidFormat->setObject("rs-wrapper", rsWrapper);
-
- filterState->mCodec = MediaCodec::CreateByComponentName(
- looper, "android.filter.RenderScript");
- break;
- }
- default:
- {
- LOG_ALWAYS_FATAL("mediacodec.cpp error: unrecognized FilterType");
- break;
- }
- }
- CHECK(filterState->mCodec != NULL);
-
- status_t err = filterState->mCodec->configure(
- vidFormat /* format */, surface, NULL /* crypto */, 0 /* flags */);
- CHECK(err == OK);
-
- filterState->mSignalledInputEOS = false;
- filterState->mSawOutputEOS = false;
-
- int64_t startTimeUs = android::ALooper::GetNowUs();
- int64_t startTimeRender = -1;
-
- for (size_t i = 0; i < stateByTrack.size(); ++i) {
- CodecState *state = &stateByTrack.editValueAt(i);
-
- sp<MediaCodec> codec = state->mCodec;
-
- CHECK_EQ((status_t)OK, codec->start());
-
- CHECK_EQ((status_t)OK, codec->getInputBuffers(&state->mInBuffers));
- CHECK_EQ((status_t)OK, codec->getOutputBuffers(&state->mOutBuffers));
-
- ALOGV("got %zu input and %zu output buffers",
- state->mInBuffers.size(), state->mOutBuffers.size());
- }
-
- CHECK_EQ((status_t)OK, filterState->mCodec->setParameters(params));
-
- if (kTestFlush) {
- status_t flushErr = filterState->mCodec->flush();
- if (flushErr == OK) {
- ALOGE("FAIL: Flush before start returned OK");
- } else {
- ALOGV("Flush before start returned status %d, usually ENOSYS (-38)",
- flushErr);
- }
- }
-
- CHECK_EQ((status_t)OK, filterState->mCodec->start());
- CHECK_EQ((status_t)OK, filterState->mCodec->getInputBuffers(
- &filterState->mInBuffers));
- CHECK_EQ((status_t)OK, filterState->mCodec->getOutputBuffers(
- &filterState->mOutBuffers));
-
- if (kTestFlush) {
- status_t flushErr = filterState->mCodec->flush();
- if (flushErr != OK) {
- ALOGE("FAIL: Flush after start returned %d, expect OK (0)",
- flushErr);
- } else {
- ALOGV("Flush immediately after start OK");
- }
- }
-
- List<DecodedFrame> decodedFrameIndices;
-
- // loop until decoder reaches EOS
- bool sawInputEOS = false;
- bool sawOutputEOSOnAllTracks = false;
- while (!sawOutputEOSOnAllTracks) {
- if (!sawInputEOS) {
- size_t trackIndex;
- status_t err = extractor->getSampleTrackIndex(&trackIndex);
-
- if (err != OK) {
- ALOGV("saw input eos");
- sawInputEOS = true;
- } else {
- CodecState *state = &stateByTrack.editValueFor(trackIndex);
-
- size_t index;
- err = state->mCodec->dequeueInputBuffer(&index, kTimeout);
-
- if (err == OK) {
- ALOGV("filling input buffer %zu", index);
-
- const sp<MediaCodecBuffer> &buffer = state->mInBuffers.itemAt(index);
- sp<ABuffer> abuffer = new ABuffer(buffer->base(), buffer->capacity());
-
- err = extractor->readSampleData(abuffer);
- CHECK(err == OK);
- buffer->setRange(abuffer->offset(), abuffer->size());
-
- int64_t timeUs;
- err = extractor->getSampleTime(&timeUs);
- CHECK(err == OK);
-
- uint32_t bufferFlags = 0;
-
- err = state->mCodec->queueInputBuffer(
- index, 0 /* offset */, buffer->size(),
- timeUs, bufferFlags);
-
- CHECK(err == OK);
-
- extractor->advance();
- } else {
- CHECK_EQ(err, -EAGAIN);
- }
- }
- } else {
- for (size_t i = 0; i < stateByTrack.size(); ++i) {
- CodecState *state = &stateByTrack.editValueAt(i);
-
- if (!state->mSignalledInputEOS) {
- size_t index;
- status_t err =
- state->mCodec->dequeueInputBuffer(&index, kTimeout);
-
- if (err == OK) {
- ALOGV("signalling input EOS on track %zu", i);
-
- err = state->mCodec->queueInputBuffer(
- index, 0 /* offset */, 0 /* size */,
- 0ll /* timeUs */, MediaCodec::BUFFER_FLAG_EOS);
-
- CHECK(err == OK);
-
- state->mSignalledInputEOS = true;
- } else {
- CHECK_EQ(err, -EAGAIN);
- }
- }
- }
- }
-
- sawOutputEOSOnAllTracks = true;
- for (size_t i = 0; i < stateByTrack.size(); ++i) {
- CodecState *state = &stateByTrack.editValueAt(i);
-
- if (state->mSawOutputEOS) {
- continue;
- } else {
- sawOutputEOSOnAllTracks = false;
- }
-
- DecodedFrame frame;
- status_t err = state->mCodec->dequeueOutputBuffer(
- &frame.index, &frame.offset, &frame.size,
- &frame.presentationTimeUs, &frame.flags, kTimeout);
-
- if (err == OK) {
- ALOGV("draining decoded buffer %zu, time = %lld us",
- frame.index, (long long)frame.presentationTimeUs);
-
- ++(state->mNumBuffersDecoded);
-
- decodedFrameIndices.push_back(frame);
-
- if (frame.flags & MediaCodec::BUFFER_FLAG_EOS) {
- ALOGV("reached EOS on decoder output.");
- state->mSawOutputEOS = true;
- }
-
- } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) {
- ALOGV("INFO_OUTPUT_BUFFERS_CHANGED");
- CHECK_EQ((status_t)OK, state->mCodec->getOutputBuffers(
- &state->mOutBuffers));
-
- ALOGV("got %zu output buffers", state->mOutBuffers.size());
- } else if (err == INFO_FORMAT_CHANGED) {
- sp<AMessage> format;
- CHECK_EQ((status_t)OK, state->mCodec->getOutputFormat(&format));
-
- ALOGV("INFO_FORMAT_CHANGED: %s",
- format->debugString().c_str());
- } else {
- CHECK_EQ(err, -EAGAIN);
- }
-
- tryCopyDecodedBuffer(&decodedFrameIndices, filterState, vidState);
-
- tryDrainOutputBuffer(
- filterState, surface, renderSurface,
- useTimestamp, &startTimeRender);
- }
- }
-
- // after EOS on decoder, let filter reach EOS
- while (!filterState->mSawOutputEOS) {
- tryCopyDecodedBuffer(&decodedFrameIndices, filterState, vidState);
-
- tryDrainOutputBuffer(
- filterState, surface, renderSurface,
- useTimestamp, &startTimeRender);
- }
-
- int64_t elapsedTimeUs = android::ALooper::GetNowUs() - startTimeUs;
-
- for (size_t i = 0; i < stateByTrack.size(); ++i) {
- CodecState *state = &stateByTrack.editValueAt(i);
-
- CHECK_EQ((status_t)OK, state->mCodec->release());
-
- printf("track %zu: %" PRId64 " frames decoded and filtered, "
- "%.2f fps.\n", i, state->mNumBuffersDecoded,
- state->mNumBuffersDecoded * 1E6 / elapsedTimeUs);
- }
-
- return 0;
-}
-
-} // namespace android
-
-int main(int argc, char **argv) {
- using namespace android;
-
- const char *me = argv[0];
-
- bool useSurface = false;
- bool renderSurface = false;
- bool useTimestamp = false;
- FilterType filterType = FILTERTYPE_ZERO;
-
- int res;
- while ((res = getopt(argc, argv, "bcnrszTRSh")) >= 0) {
- switch (res) {
- case 'b':
- {
- filterType = FILTERTYPE_INTRINSIC_BLUR;
- break;
- }
- case 'c':
- {
- filterType = FILTERTYPE_RS_ARGB_TO_RGBA;
- break;
- }
- case 'n':
- {
- filterType = FILTERTYPE_RS_NIGHT_VISION;
- break;
- }
- case 'r':
- {
- filterType = FILTERTYPE_RS_SATURATION;
- break;
- }
- case 's':
- {
- filterType = FILTERTYPE_SATURATION;
- break;
- }
- case 'z':
- {
- filterType = FILTERTYPE_ZERO;
- break;
- }
- case 'T':
- {
- useTimestamp = true;
- FALLTHROUGH_INTENDED;
- }
- case 'R':
- {
- renderSurface = true;
- FALLTHROUGH_INTENDED;
- }
- case 'S':
- {
- useSurface = true;
- break;
- }
- case '?':
- case 'h':
- default:
- {
- usage(me);
- break;
- }
- }
- }
-
- argc -= optind;
- argv += optind;
-
- if (argc != 1) {
- usage(me);
- }
-
- ProcessState::self()->startThreadPool();
-
- android::sp<android::ALooper> looper = new android::ALooper;
- looper->start();
-
- android::sp<SurfaceComposerClient> composerClient;
- android::sp<SurfaceControl> control;
- android::sp<Surface> surface;
-
- if (useSurface) {
- composerClient = new SurfaceComposerClient;
- CHECK_EQ((status_t)OK, composerClient->initCheck());
-
- const android::sp<IBinder> display = SurfaceComposerClient::getInternalDisplayToken();
- CHECK(display != nullptr);
-
- ui::DisplayMode mode;
- CHECK_EQ(SurfaceComposerClient::getActiveDisplayMode(display, &mode), NO_ERROR);
-
- const ui::Size& resolution = mode.resolution;
- const ssize_t displayWidth = resolution.getWidth();
- const ssize_t displayHeight = resolution.getHeight();
-
- ALOGV("display is %zd x %zd", displayWidth, displayHeight);
-
- control = composerClient->createSurface(
- String8("A Surface"), displayWidth, displayHeight,
- PIXEL_FORMAT_RGBA_8888, 0);
-
- CHECK(control != NULL);
- CHECK(control->isValid());
-
- SurfaceComposerClient::Transaction{}
- .setLayer(control, INT_MAX)
- .show(control)
- .apply();
-
- surface = control->getSurface();
- CHECK(surface != NULL);
- }
-
- decode(looper, argv[0], surface, renderSurface, useTimestamp, filterType);
-
- if (useSurface) {
- composerClient->dispose();
- }
-
- looper->stop();
-
- return 0;
-}
diff --git a/cmds/stagefright/record.cpp b/cmds/stagefright/record.cpp
index 5743ad6..87e8832 100644
--- a/cmds/stagefright/record.cpp
+++ b/cmds/stagefright/record.cpp
@@ -89,6 +89,9 @@
if (err != OK) {
return err;
}
+ if (buffer == nullptr) {
+ return AMEDIA_ERROR_UNKNOWN;
+ }
char x = (char)((double)rand() / RAND_MAX * 255);
memset((*buffer)->data(), x, mSize);
diff --git a/drm/README.md b/drm/README.md
new file mode 100644
index 0000000..2681aac
--- /dev/null
+++ b/drm/README.md
@@ -0,0 +1,13 @@
+## AIDL error handling
+
+Starting in **Android U (14)**, `libmediadrm` (app-side) understands extra error
+details from **AIDL** DRM HALs passed through the binder exception message
+as a json string. The supported fields are:
+* `cdmError` (*int*)
+* `oemError` (*int*)
+* `context` (*int*)
+* `errorMessage` (*str*)
+
+The errors details will be reported to apps through the java interface
+`android.media.MediaDrmThrowable`. Please see the javadoc of `MediaDrmThrowable`
+for detailed definitions of each field above.
diff --git a/drm/libmediadrm/DrmHalHidl.cpp b/drm/libmediadrm/DrmHalHidl.cpp
index 7d045ac..6010739 100644
--- a/drm/libmediadrm/DrmHalHidl.cpp
+++ b/drm/libmediadrm/DrmHalHidl.cpp
@@ -514,10 +514,14 @@
if (mimeType == "") {
// isCryptoSchemeSupported(uuid)
*isSupported = true;
- } else {
- // isCryptoSchemeSupported(uuid, mimeType)
- *isSupported = factory->isContentTypeSupported(mimeType.string());
+ return DrmStatus(OK);
}
+ // isCryptoSchemeSupported(uuid, mimeType)
+ auto hResult = factory->isContentTypeSupported(mimeType.string());
+ if (!hResult.isOk()) {
+ return DrmStatus(DEAD_OBJECT);
+ }
+ *isSupported = hResult;
return DrmStatus(OK);
} else if (mimeType == "") {
return DrmStatus(BAD_VALUE);
@@ -527,8 +531,12 @@
if (factoryV1_2 == NULL) {
return DrmStatus(ERROR_UNSUPPORTED);
} else {
- *isSupported = factoryV1_2->isCryptoSchemeSupported_1_2(uuid, mimeType.string(),
+ auto hResult = factoryV1_2->isCryptoSchemeSupported_1_2(uuid, mimeType.string(),
toHidlSecurityLevel(level));
+ if (!hResult.isOk()) {
+ return DrmStatus(DEAD_OBJECT);
+ }
+ *isSupported = hResult;
return DrmStatus(OK);
}
}
@@ -538,7 +546,8 @@
Mutex::Autolock autoLock(mLock);
*isSupported = false;
for (ssize_t i = mFactories.size() - 1; i >= 0; i--) {
- if (mFactories[i]->isCryptoSchemeSupported(uuid)) {
+ auto hResult = mFactories[i]->isCryptoSchemeSupported(uuid);
+ if (hResult.isOk() && hResult) {
return matchMimeTypeAndSecurityLevel(mFactories[i], uuid, mimeType, level, isSupported);
}
}
diff --git a/drm/libmediadrm/DrmMetricsLogger.cpp b/drm/libmediadrm/DrmMetricsLogger.cpp
index bcdfadf..f7653ff 100644
--- a/drm/libmediadrm/DrmMetricsLogger.cpp
+++ b/drm/libmediadrm/DrmMetricsLogger.cpp
@@ -18,6 +18,8 @@
#define LOG_TAG "DrmMetricsLogger"
#include <media/MediaMetrics.h>
+#include <media/stagefright/foundation/AString.h>
+#include <media/stagefright/foundation/base64.h>
#include <mediadrm/DrmHal.h>
#include <mediadrm/DrmMetricsLogger.h>
#include <mediadrm/DrmUtils.h>
@@ -34,7 +36,7 @@
} // namespace
DrmMetricsLogger::DrmMetricsLogger(IDrmFrontend frontend)
- : mImpl(sp<DrmHal>::make()), mUuid(), mObjNonceMsb(0), mObjNonceLsb(0), mFrontend(frontend) {}
+ : mImpl(sp<DrmHal>::make()), mUuid(), mObjNonce(), mFrontend(frontend) {}
DrmMetricsLogger::~DrmMetricsLogger() {}
@@ -57,18 +59,22 @@
}
DrmStatus DrmMetricsLogger::createPlugin(const uint8_t uuid[16], const String8& appPackageName) {
- std::memcpy(mUuid, uuid, sizeof(mUuid));
- if (checkGetRandom(&mObjNonceMsb, __func__) == OK &&
- checkGetRandom(&mObjNonceLsb, __func__) == OK) {
- DrmStatus status = mImpl->createPlugin(uuid, appPackageName);
- if (status == OK) {
- reportMediaDrmCreated();
- } else {
- reportMediaDrmErrored(status, __func__);
- }
- return status;
+ std::memcpy(mUuid.data(), uuid, mUuid.size());
+ if (kUuidSchemeMap.count(mUuid)) {
+ mScheme = kUuidSchemeMap.at(mUuid);
+ } else {
+ mScheme = "Other";
}
- return ERROR_DRM_RESOURCE_BUSY;
+ if (generateNonce(&mObjNonce, kNonceSize, __func__) != OK) {
+ return ERROR_DRM_RESOURCE_BUSY;
+ }
+ DrmStatus status = mImpl->createPlugin(uuid, appPackageName);
+ if (status == OK) {
+ reportMediaDrmCreated();
+ } else {
+ reportMediaDrmErrored(status, __func__);
+ }
+ return status;
}
DrmStatus DrmMetricsLogger::destroyPlugin() {
@@ -82,26 +88,25 @@
DrmStatus DrmMetricsLogger::openSession(DrmPlugin::SecurityLevel securityLevel,
Vector<uint8_t>& sessionId) {
SessionContext ctx{};
- if (checkGetRandom(&ctx.mNonceMsb, __func__) == OK &&
- checkGetRandom(&ctx.mNonceLsb, __func__) == OK) {
- DrmStatus status = mImpl->openSession(securityLevel, sessionId);
- if (status == OK) {
- std::vector<uint8_t> sessionKey = toStdVec(sessionId);
- ctx.mTargetSecurityLevel = securityLevel;
- if (getSecurityLevel(sessionId, &ctx.mActualSecurityLevel) != OK) {
- ctx.mActualSecurityLevel = DrmPlugin::kSecurityLevelUnknown;
- }
- {
- const std::lock_guard<std::mutex> lock(mSessionMapMutex);
- mSessionMap.insert({sessionKey, ctx});
- }
- reportMediaDrmSessionOpened(sessionKey);
- } else {
- reportMediaDrmErrored(status, __func__);
- }
- return status;
+ if (generateNonce(&ctx.mNonce, kNonceSize, __func__) != OK) {
+ return ERROR_DRM_RESOURCE_BUSY;
}
- return ERROR_DRM_RESOURCE_BUSY;
+ DrmStatus status = mImpl->openSession(securityLevel, sessionId);
+ if (status == OK) {
+ std::vector<uint8_t> sessionKey = toStdVec(sessionId);
+ ctx.mTargetSecurityLevel = securityLevel;
+ if (getSecurityLevel(sessionId, &ctx.mActualSecurityLevel) != OK) {
+ ctx.mActualSecurityLevel = DrmPlugin::kSecurityLevelUnknown;
+ }
+ {
+ const std::lock_guard<std::mutex> lock(mSessionMapMutex);
+ mSessionMap.insert({sessionKey, ctx});
+ }
+ reportMediaDrmSessionOpened(sessionKey);
+ } else {
+ reportMediaDrmErrored(status, __func__);
+ }
+ return status;
}
DrmStatus DrmMetricsLogger::closeSession(Vector<uint8_t> const& sessionId) {
@@ -419,7 +424,7 @@
bool* required) const {
DrmStatus status = mImpl->requiresSecureDecoder(mime, securityLevel, required);
if (status != OK) {
- reportMediaDrmErrored(status, __func__);
+ reportMediaDrmErrored(status, "requiresSecureDecoderLevel");
}
return status;
}
@@ -451,60 +456,104 @@
void DrmMetricsLogger::reportMediaDrmCreated() const {
mediametrics_handle_t handle(mediametrics_create("mediadrm.created"));
+ mediametrics_setCString(handle, "scheme", mScheme.c_str());
mediametrics_setInt64(handle, "uuid_msb", be64toh(mUuid[0]));
mediametrics_setInt64(handle, "uuid_lsb", be64toh(mUuid[1]));
mediametrics_setInt32(handle, "frontend", mFrontend);
+ mediametrics_setCString(handle, "object_nonce", mObjNonce.c_str());
mediametrics_selfRecord(handle);
mediametrics_delete(handle);
}
-void DrmMetricsLogger::reportMediaDrmSessionOpened(std::vector<uint8_t> sessionId) const {
+void DrmMetricsLogger::reportMediaDrmSessionOpened(const std::vector<uint8_t>& sessionId) const {
mediametrics_handle_t handle(mediametrics_create("mediadrm.session_opened"));
- mediametrics_setInt64(handle, "obj_nonce_msb", mObjNonceMsb);
- mediametrics_setInt64(handle, "obj_nonce_lsb", mObjNonceLsb);
+ mediametrics_setCString(handle, "scheme", mScheme.c_str());
+ mediametrics_setInt64(handle, "uuid_msb", be64toh(mUuid[0]));
+ mediametrics_setInt64(handle, "uuid_lsb", be64toh(mUuid[1]));
+ mediametrics_setInt32(handle, "frontend", mFrontend);
+ mediametrics_setCString(handle, "object_nonce", mObjNonce.c_str());
const std::lock_guard<std::mutex> lock(mSessionMapMutex);
auto it = mSessionMap.find(sessionId);
if (it != mSessionMap.end()) {
- mediametrics_setInt64(handle, "session_nonce_msb", it->second.mNonceMsb);
- mediametrics_setInt64(handle, "session_nonce_lsb", it->second.mNonceLsb);
- mediametrics_setInt64(handle, "target_seucrity_level", it->second.mTargetSecurityLevel);
- mediametrics_setInt64(handle, "actual_seucrity_level", it->second.mActualSecurityLevel);
+ mediametrics_setCString(handle, "session_nonce", it->second.mNonce.c_str());
+ mediametrics_setInt64(handle, "requested_seucrity_level", it->second.mTargetSecurityLevel);
+ mediametrics_setInt64(handle, "opened_seucrity_level", it->second.mActualSecurityLevel);
}
- mediametrics_setInt32(handle, "frontend", mFrontend);
mediametrics_selfRecord(handle);
mediametrics_delete(handle);
}
-void DrmMetricsLogger::reportMediaDrmErrored(DrmStatus error_code, const char* api,
- std::vector<uint8_t> sessionId) const {
+void DrmMetricsLogger::reportMediaDrmErrored(const DrmStatus& error_code, const char* api,
+ const std::vector<uint8_t>& sessionId) const {
mediametrics_handle_t handle(mediametrics_create("mediadrm.errored"));
- mediametrics_setInt64(handle, "obj_nonce_msb", mObjNonceMsb);
- mediametrics_setInt64(handle, "obj_nonce_lsb", mObjNonceLsb);
+ mediametrics_setCString(handle, "scheme", mScheme.c_str());
+ mediametrics_setInt64(handle, "uuid_msb", be64toh(mUuid[0]));
+ mediametrics_setInt64(handle, "uuid_lsb", be64toh(mUuid[1]));
+ mediametrics_setInt32(handle, "frontend", mFrontend);
+ mediametrics_setCString(handle, "object_nonce", mObjNonce.c_str());
if (!sessionId.empty()) {
const std::lock_guard<std::mutex> lock(mSessionMapMutex);
auto it = mSessionMap.find(sessionId);
if (it != mSessionMap.end()) {
- mediametrics_setInt64(handle, "session_nonce_msb", it->second.mNonceMsb);
- mediametrics_setInt64(handle, "session_nonce_lsb", it->second.mNonceLsb);
+ mediametrics_setCString(handle, "session_nonce", it->second.mNonce.c_str());
+ mediametrics_setInt64(handle, "seucrity_level", it->second.mActualSecurityLevel);
}
}
- mediametrics_setInt64(handle, "uuid_msb", be64toh(mUuid[0]));
- mediametrics_setInt64(handle, "uuid_lsb", be64toh(mUuid[1]));
- mediametrics_setInt32(handle, "error_code", error_code);
mediametrics_setCString(handle, "api", api);
- mediametrics_setInt32(handle, "frontend", mFrontend);
+ mediametrics_setInt32(handle, "error_code", error_code);
+ mediametrics_setInt32(handle, "cdm_err", error_code.getCdmErr());
+ mediametrics_setInt32(handle, "oem_err", error_code.getOemErr());
+ mediametrics_setInt32(handle, "error_context", error_code.getContext());
mediametrics_selfRecord(handle);
mediametrics_delete(handle);
}
-DrmStatus DrmMetricsLogger::checkGetRandom(int64_t* nonce, const char* api) {
- ssize_t bytes = getrandom(nonce, sizeof(int64_t), GRND_NONBLOCK);
- if (bytes < sizeof(int64_t)) {
+DrmStatus DrmMetricsLogger::generateNonce(std::string* out, size_t size, const char* api) {
+ std::vector<uint8_t> buf(size);
+ ssize_t bytes = getrandom(buf.data(), size, GRND_NONBLOCK);
+ if (bytes < size) {
ALOGE("getrandom failed: %d", errno);
reportMediaDrmErrored(ERROR_DRM_RESOURCE_BUSY, api);
return ERROR_DRM_RESOURCE_BUSY;
}
+ android::AString tmp;
+ encodeBase64(buf.data(), size, &tmp);
+ out->assign(tmp.c_str());
return OK;
}
+const std::map<std::array<int64_t, 2>, std::string> DrmMetricsLogger::kUuidSchemeMap {
+ {{(int64_t)0x6DD8B3C345F44A68, (int64_t)0xBF3A64168D01A4A6}, "ABV DRM (MoDRM)"},
+ {{(int64_t)0xF239E769EFA34850, (int64_t)0x9C16A903C6932EFB},
+ "Adobe Primetime DRM version 4"},
+ {{(int64_t)0x616C746963617374, (int64_t)0x2D50726F74656374}, "Alticast"},
+ {{(int64_t)0x94CE86FB07FF4F43, (int64_t)0xADB893D2FA968CA2}, "Apple FairPlay"},
+ {{(int64_t)0x279FE473512C48FE, (int64_t)0xADE8D176FEE6B40F}, "Arris Titanium"},
+ {{(int64_t)0x3D5E6D359B9A41E8, (int64_t)0xB843DD3C6E72C42C}, "ChinaDRM"},
+ {{(int64_t)0x3EA8778F77424BF9, (int64_t)0xB18BE834B2ACBD47}, "Clear Key AES-128"},
+ {{(int64_t)0xBE58615B19C44684, (int64_t)0x88B3C8C57E99E957}, "Clear Key SAMPLE-AES"},
+ {{(int64_t)0xE2719D58A985B3C9, (int64_t)0x781AB030AF78D30E}, "Clear Key DASH-IF"},
+ {{(int64_t)0x644FE7B5260F4FAD, (int64_t)0x949A0762FFB054B4}, "CMLA (OMA DRM)"},
+ {{(int64_t)0x37C332587B994C7E, (int64_t)0xB15D19AF74482154}, "Commscope Titanium V3"},
+ {{(int64_t)0x45D481CB8FE049C0, (int64_t)0xADA9AB2D2455B2F2}, "CoreCrypt"},
+ {{(int64_t)0xDCF4E3E362F15818, (int64_t)0x7BA60A6FE33FF3DD}, "DigiCAP SmartXess"},
+ {{(int64_t)0x35BF197B530E42D7, (int64_t)0x8B651B4BF415070F}, "DivX DRM Series 5"},
+ {{(int64_t)0x80A6BE7E14484C37, (int64_t)0x9E70D5AEBE04C8D2}, "Irdeto Content Protection"},
+ {{(int64_t)0x5E629AF538DA4063, (int64_t)0x897797FFBD9902D4},
+ "Marlin Adaptive Streaming Simple Profile V1.0"},
+ {{(int64_t)0x9A04F07998404286, (int64_t)0xAB92E65BE0885F95}, "Microsoft PlayReady"},
+ {{(int64_t)0x6A99532D869F5922, (int64_t)0x9A91113AB7B1E2F3}, "MobiTV DRM"},
+ {{(int64_t)0xADB41C242DBF4A6D, (int64_t)0x958B4457C0D27B95}, "Nagra MediaAccess PRM 3.0"},
+ {{(int64_t)0x1F83E1E86EE94F0D, (int64_t)0xBA2F5EC4E3ED1A66}, "SecureMedia"},
+ {{(int64_t)0x992C46E6C4374899, (int64_t)0xB6A050FA91AD0E39}, "SecureMedia SteelKnot"},
+ {{(int64_t)0xA68129D3575B4F1A, (int64_t)0x9CBA3223846CF7C3},
+ "Synamedia/Cisco/NDS VideoGuard DRM"},
+ {{(int64_t)0xAA11967FCC014A4A, (int64_t)0x8E99C5D3DDDFEA2D}, "Unitend DRM (UDRM)"},
+ {{(int64_t)0x9A27DD82FDE24725, (int64_t)0x8CBC4234AA06EC09}, "Verimatrix VCAS"},
+ {{(int64_t)0xB4413586C58CFFB0, (int64_t)0x94A5D4896C1AF6C3}, "Viaccess-Orca DRM (VODRM)"},
+ {{(int64_t)0x793B79569F944946, (int64_t)0xA94223E7EF7E44B4}, "VisionCrypt"},
+ {{(int64_t)0x1077EFECC0B24D02, (int64_t)0xACE33C1E52E2FB4B}, "W3C Common PSSH box"},
+ {{(int64_t)0xEDEF8BA979D64ACE, (int64_t)0xA3C827DCD51D21ED}, "Widevine Content Protection"},
+};
+
} // namespace android
\ No newline at end of file
diff --git a/drm/libmediadrm/DrmStatus.cpp b/drm/libmediadrm/DrmStatus.cpp
index 0258801..f622160 100644
--- a/drm/libmediadrm/DrmStatus.cpp
+++ b/drm/libmediadrm/DrmStatus.cpp
@@ -27,21 +27,20 @@
return;
}
- std::string errMsg;
auto val = errorDetails["cdmError"];
- if (!val.isNull()) {
+ if (val.isInt()) {
mCdmErr = val.asInt();
}
val = errorDetails["oemError"];
- if (!val.isNull()) {
+ if (val.isInt()) {
mOemErr = val.asInt();
}
val = errorDetails["context"];
- if (!val.isNull()) {
+ if (val.isInt()) {
mCtx = val.asInt();
}
val = errorDetails["errorMessage"];
- if (!val.isNull()) {
+ if (val.isString()) {
mErrMsg = val.asString();
} else {
mErrMsg = msg;
diff --git a/drm/libmediadrm/include/mediadrm/DrmMetricsLogger.h b/drm/libmediadrm/include/mediadrm/DrmMetricsLogger.h
index 638fb35..d2e9a56 100644
--- a/drm/libmediadrm/include/mediadrm/DrmMetricsLogger.h
+++ b/drm/libmediadrm/include/mediadrm/DrmMetricsLogger.h
@@ -26,8 +26,7 @@
namespace android {
struct SessionContext {
- int64_t mNonceMsb;
- int64_t mNonceLsb;
+ std::string mNonce;
int64_t mTargetSecurityLevel;
DrmPlugin::SecurityLevel mActualSecurityLevel;
};
@@ -144,17 +143,21 @@
void reportMediaDrmCreated() const;
- void reportMediaDrmSessionOpened(std::vector<uint8_t> sessionId) const;
+ void reportMediaDrmSessionOpened(const std::vector<uint8_t>& sessionId) const;
- void reportMediaDrmErrored(DrmStatus error_code, const char* api,
- std::vector<uint8_t> sessionId = std::vector<uint8_t>()) const;
+ void reportMediaDrmErrored(
+ const DrmStatus& error_code, const char* api,
+ const std::vector<uint8_t>& sessionId = std::vector<uint8_t>()) const;
- DrmStatus checkGetRandom(int64_t* nonce, const char* api);
+ DrmStatus generateNonce(std::string* out, size_t size, const char* api);
private:
+ static const size_t kNonceSize = 16;
+ static const std::map<std::array<int64_t, 2>, std::string> kUuidSchemeMap;
sp<IDrm> mImpl;
- int64_t mUuid[2] = {};
- int64_t mObjNonceMsb, mObjNonceLsb;
+ std::array<int64_t, 2> mUuid;
+ std::string mObjNonce;
+ std::string mScheme;
std::map<std::vector<uint8_t>, SessionContext> mSessionMap;
mutable std::mutex mSessionMapMutex;
IDrmFrontend mFrontend;
diff --git a/drm/mediadrm/plugins/clearkey/aidl/Android.bp b/drm/mediadrm/plugins/clearkey/aidl/Android.bp
index 2d1f741..eaf5051 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/Android.bp
+++ b/drm/mediadrm/plugins/clearkey/aidl/Android.bp
@@ -37,6 +37,7 @@
static_libs: [
"android.hardware.common-V2-ndk",
"libclearkeybase",
+ "libjsoncpp",
],
local_include_dirs: ["include"],
@@ -69,3 +70,56 @@
"android.hardware.drm-service.clearkey",
],
}
+
+cc_defaults {
+ name: "fuzz_aidl_clearkey_service_defaults",
+
+ srcs: [
+ "CreatePluginFactories.cpp",
+ "CryptoPlugin.cpp",
+ "DrmFactory.cpp",
+ "DrmPlugin.cpp",
+ ],
+
+ relative_install_path: "hw",
+
+ cflags: ["-Wall", "-Werror", "-Wthread-safety"],
+
+ include_dirs: ["frameworks/av/include"],
+
+ shared_libs: [
+ "libbase",
+ "libbinder_ndk",
+ "libcrypto",
+ "liblog",
+ "libprotobuf-cpp-lite",
+ "libutils",
+ "android.hardware.drm-V1-ndk",
+ ],
+
+ static_libs: [
+ "android.hardware.common-V2-ndk",
+ "libclearkeybase_fuzz",
+ "libjsoncpp",
+ ],
+
+ local_include_dirs: ["include"],
+
+ sanitize: {
+ integer_overflow: true,
+ },
+}
+
+cc_fuzz {
+ name: "android.hardware.drm-service.clearkey.aidl_fuzzer",
+ defaults: [
+ "fuzz_aidl_clearkey_service_defaults",
+ "service_fuzzer_defaults",
+ ],
+ srcs: ["fuzzer.cpp"],
+ fuzz_config: {
+ cc: [
+ "hamzeh@google.com",
+ ],
+ },
+}
diff --git a/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp b/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
index ea51e9d..e8dec80 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
@@ -16,6 +16,7 @@
#define LOG_TAG "clearkey-DrmPlugin"
#include <aidl/android/hardware/drm/DrmMetric.h>
+#include <android-base/parseint.h>
#include <utils/Log.h>
#include <inttypes.h>
@@ -83,12 +84,14 @@
void DrmPlugin::initProperties() {
mStringProperties.clear();
mStringProperties[kVendorKey] = kAidlVendorValue;
- mStringProperties[kVersionKey] = kAidlVersionValue;
+ mStringProperties[kVersionKey] = kVersionValue;
mStringProperties[kPluginDescriptionKey] = kAidlPluginDescriptionValue;
mStringProperties[kAlgorithmsKey] = kAidlAlgorithmsValue;
mStringProperties[kListenerTestSupportKey] = kAidlListenerTestSupportValue;
mStringProperties[kDrmErrorTestKey] = kAidlDrmErrorTestValue;
mStringProperties[kAidlVersionKey] = kAidlVersionValue;
+ mStringProperties[kOemErrorKey] = "0";
+ mStringProperties[kErrorContextKey] = "0";
std::vector<uint8_t> valueVector;
valueVector.clear();
@@ -102,6 +105,26 @@
mByteArrayProperties[kMetricsKey] = valueVector;
}
+int32_t DrmPlugin::getIntProperty(const std::string& prop, int32_t defaultVal) const {
+ if (!mStringProperties.count(prop)) {
+ return defaultVal;
+ }
+ int32_t out = defaultVal;
+ if (!::android::base::ParseInt(mStringProperties.at(prop), &out)) {
+ return defaultVal;
+ }
+ return out;
+}
+
+int32_t DrmPlugin::getOemError() const {
+ return getIntProperty(kOemErrorKey);
+}
+
+int32_t DrmPlugin::getErrorContext() const {
+ return getIntProperty(kErrorContextKey);
+}
+
+//
// The secure stop in ClearKey implementation is not installed securely.
// This function merely creates a test environment for testing secure stops APIs.
// The content in this secure stop is implementation dependent, the clearkey
@@ -127,7 +150,10 @@
mSessionLibrary->destroySession(session);
if (session->getMockError() != clearkeydrm::OK) {
sendSessionLostState(in_sessionId);
- return toNdkScopedAStatus(Status::ERROR_DRM_INVALID_STATE);
+ return toNdkScopedAStatus(Status::ERROR_DRM_INVALID_STATE,
+ nullptr,
+ getOemError(),
+ getErrorContext());
}
mCloseSessionOkCount++;
return toNdkScopedAStatus(Status::OK);
@@ -198,7 +224,8 @@
if (!session.get()) {
return toNdkScopedAStatus(Status::ERROR_DRM_SESSION_NOT_OPENED);
} else if (session->getMockError() != clearkeydrm::OK) {
- return toNdkScopedAStatus(session->getMockError());
+ auto err = static_cast<Status>(session->getMockError());
+ return toNdkScopedAStatus(err, nullptr, getOemError(), getErrorContext());
}
keyRequestType = KeyRequestType::INITIAL;
}
@@ -381,6 +408,10 @@
value = mStringProperties[kDrmErrorTestKey];
} else if (name == kAidlVersionKey) {
value = mStringProperties[kAidlVersionKey];
+ } else if (name == kOemErrorKey) {
+ value = mStringProperties[kOemErrorKey];
+ } else if (name == kErrorContextKey) {
+ value = mStringProperties[kErrorContextKey];
} else {
ALOGE("App requested unknown string property %s", name.c_str());
status = Status::ERROR_DRM_CANNOT_HANDLE;
@@ -920,6 +951,13 @@
}
}
+ if (in_propertyName == kOemErrorKey || in_propertyName == kErrorContextKey) {
+ int32_t err = 0;
+ if (!::android::base::ParseInt(in_value, &err)) {
+ return toNdkScopedAStatus(Status::BAD_VALUE);
+ }
+ }
+
mStringProperties[key] = std::string(in_value.c_str());
return toNdkScopedAStatus(Status::OK);
}
diff --git a/drm/mediadrm/plugins/clearkey/aidl/fuzzer.cpp b/drm/mediadrm/plugins/clearkey/aidl/fuzzer.cpp
new file mode 100644
index 0000000..9ef331f
--- /dev/null
+++ b/drm/mediadrm/plugins/clearkey/aidl/fuzzer.cpp
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <fuzzbinder/libbinder_ndk_driver.h>
+#include <fuzzer/FuzzedDataProvider.h>
+
+#include "CreatePluginFactories.h"
+
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+
+using ::aidl::android::hardware::drm::clearkey::createDrmFactory;
+using ::aidl::android::hardware::drm::clearkey::DrmFactory;
+
+using android::fuzzService;
+using ndk::SharedRefBase;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ std::shared_ptr<DrmFactory> drmFactory = createDrmFactory();
+ fuzzService(drmFactory->asBinder().get(), FuzzedDataProvider(data, size));
+
+ return 0;
+}
diff --git a/drm/mediadrm/plugins/clearkey/aidl/include/AidlUtils.h b/drm/mediadrm/plugins/clearkey/aidl/include/AidlUtils.h
index 9257b17..0db3c37 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/include/AidlUtils.h
+++ b/drm/mediadrm/plugins/clearkey/aidl/include/AidlUtils.h
@@ -15,9 +15,12 @@
*/
#pragma once
+#include <cstdint>
#include <string>
#include <vector>
+#include <json/json.h>
+
#include <android/binder_auto_utils.h>
#include "aidl/android/hardware/drm/Status.h"
#include "ClearKeyTypes.h"
@@ -41,17 +44,32 @@
}
inline ::ndk::ScopedAStatus toNdkScopedAStatus(::aidl::android::hardware::drm::Status status,
- const char* msg = nullptr) {
+ const char* msg = nullptr,
+ int32_t oemError = 0,
+ int32_t errorContext = 0) {
+
+
if (Status::OK == status) {
return ::ndk::ScopedAStatus::ok();
- } else {
- auto err = static_cast<int32_t>(status);
- if (msg) {
- return ::ndk::ScopedAStatus::fromServiceSpecificErrorWithMessage(err, msg);
- } else {
- return ::ndk::ScopedAStatus::fromServiceSpecificError(err);
- }
}
+
+ Json::Value errObj(Json::objectValue);
+ auto err = static_cast<int32_t>(status);
+ errObj["cdmError"] = err;
+
+ if (oemError) {
+ errObj["oemError"] = oemError;
+ }
+ if (errorContext) {
+ errObj["context"] = errorContext;
+ }
+ if (msg) {
+ errObj["errorMessage"] = msg;
+ }
+
+ Json::FastWriter writer;
+ return ::ndk::ScopedAStatus::fromServiceSpecificErrorWithMessage(
+ err, writer.write(errObj).c_str());
}
inline ::ndk::ScopedAStatus toNdkScopedAStatus(clearkeydrm::CdmResponseType res) {
diff --git a/drm/mediadrm/plugins/clearkey/aidl/include/DrmPlugin.h b/drm/mediadrm/plugins/clearkey/aidl/include/DrmPlugin.h
index 25c05f0..ea85ac8 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/include/DrmPlugin.h
+++ b/drm/mediadrm/plugins/clearkey/aidl/include/DrmPlugin.h
@@ -148,6 +148,9 @@
private:
void initProperties();
+ int32_t getIntProperty(const std::string& prop, int32_t defaultVal = 0) const;
+ int32_t getOemError() const;
+ int32_t getErrorContext() const;
void installSecureStop(const std::vector<uint8_t>& sessionId);
bool makeKeySetId(std::string* keySetId);
void setPlayPolicy();
diff --git a/drm/mediadrm/plugins/clearkey/common/Android.bp b/drm/mediadrm/plugins/clearkey/common/Android.bp
index a6a5b28..6913df4 100644
--- a/drm/mediadrm/plugins/clearkey/common/Android.bp
+++ b/drm/mediadrm/plugins/clearkey/common/Android.bp
@@ -97,3 +97,54 @@
integer_overflow: true,
},
}
+
+cc_library_static {
+ name: "libclearkeydevicefiles-protos.common_fuzz",
+
+ proto: {
+ export_proto_headers: true,
+ type: "lite",
+ },
+ srcs: ["protos/DeviceFiles.proto"],
+}
+
+cc_library_static {
+ name: "libclearkeybase_fuzz",
+
+ srcs: [
+ "AesCtrDecryptor.cpp",
+ "Base64.cpp",
+ "Buffer.cpp",
+ "ClearKeyUUID.cpp",
+ "DeviceFiles.cpp",
+ "InitDataParser.cpp",
+ "JsonWebKey.cpp",
+ "MemoryFileSystem.cpp",
+ "Session.cpp",
+ "SessionLibrary.cpp",
+ "Utils.cpp",
+ ],
+
+ cflags: ["-Wall", "-Werror"],
+
+ include_dirs: ["frameworks/av/include"],
+
+ shared_libs: [
+ "libutils",
+ "libcrypto",
+ ],
+
+ whole_static_libs: [
+ "libjsmn",
+ "libclearkeydevicefiles-protos.common_fuzz",
+ ],
+
+ export_include_dirs: [
+ "include",
+ "include/clearkeydrm",
+ ],
+
+ sanitize: {
+ integer_overflow: true,
+ },
+}
diff --git a/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/ClearKeyDrmProperties.h b/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/ClearKeyDrmProperties.h
index bfda388..d4e641e 100644
--- a/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/ClearKeyDrmProperties.h
+++ b/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/ClearKeyDrmProperties.h
@@ -21,7 +21,7 @@
static const std::string kVendorKey("vendor");
static const std::string kVendorValue("Google");
static const std::string kVersionKey("version");
-static const std::string kVersionValue("1.2");
+static const std::string kVersionValue("14"); // sync with Android OS version
static const std::string kPluginDescriptionKey("description");
static const std::string kPluginDescriptionValue("ClearKey CDM");
static const std::string kAlgorithmsKey("algorithms");
@@ -35,6 +35,8 @@
static const std::string kFrameTooLargeValue("frameTooLarge");
static const std::string kInvalidStateValue("invalidState");
static const std::string kAidlVersionKey("aidlVersion");
+static const std::string kOemErrorKey("oemError");
+static const std::string kErrorContextKey("errorContext");
static const std::string kDeviceIdKey("deviceId");
static const uint8_t kTestDeviceIdData[] = {0x0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7,
diff --git a/include/media/MicrophoneInfo.h b/include/media/MicrophoneInfo.h
index a5045b9..6d6c594 100644
--- a/include/media/MicrophoneInfo.h
+++ b/include/media/MicrophoneInfo.h
@@ -70,6 +70,9 @@
}
virtual status_t writeToParcelable(MicrophoneInfoData* parcelable) const {
+#if defined(BACKEND_NDK)
+ using ::aidl::android::convertReinterpret;
+#endif
parcelable->deviceId = mDeviceId;
parcelable->portId = mPortId;
parcelable->type = VALUE_OR_RETURN_STATUS(convertReinterpret<int32_t>(mType));
@@ -98,6 +101,9 @@
}
virtual status_t readFromParcelable(const MicrophoneInfoData& parcelable) {
+#if defined(BACKEND_NDK)
+ using ::aidl::android::convertReinterpret;
+#endif
mDeviceId = parcelable.deviceId;
mPortId = parcelable.portId;
mType = VALUE_OR_RETURN_STATUS(convertReinterpret<uint32_t>(parcelable.type));
@@ -208,6 +214,10 @@
int32_t mDirectionality;
};
+#if defined(BACKEND_NDK)
+using ::aidl::ConversionResult;
+#endif
+
// Conversion routines, according to AidlConversion.h conventions.
inline ConversionResult<MicrophoneInfo>
aidl2legacy_MicrophoneInfo(const media::MicrophoneInfoData& aidl) {
diff --git a/media/audioaidlconversion/AidlConversionCppNdk.cpp b/media/audioaidlconversion/AidlConversionCppNdk.cpp
index 37887f6..8704314 100644
--- a/media/audioaidlconversion/AidlConversionCppNdk.cpp
+++ b/media/audioaidlconversion/AidlConversionCppNdk.cpp
@@ -69,6 +69,8 @@
using media::audio::common::AudioOffloadInfo;
using media::audio::common::AudioOutputFlags;
using media::audio::common::AudioPlaybackRate;
+using media::audio::common::AudioPort;
+using media::audio::common::AudioPortConfig;
using media::audio::common::AudioPortDeviceExt;
using media::audio::common::AudioPortExt;
using media::audio::common::AudioPortMixExt;
@@ -477,6 +479,15 @@
{
AUDIO_DEVICE_IN_ECHO_REFERENCE, make_AudioDeviceDescription(
AudioDeviceType::IN_ECHO_REFERENCE)
+ },
+ {
+ AUDIO_DEVICE_IN_REMOTE_SUBMIX, make_AudioDeviceDescription(
+ AudioDeviceType::IN_SUBMIX)
+ },
+ {
+ AUDIO_DEVICE_OUT_REMOTE_SUBMIX, make_AudioDeviceDescription(
+ AudioDeviceType::OUT_SUBMIX,
+ GET_DEVICE_DESC_CONNECTION(VIRTUAL))
}
}};
append_AudioDeviceDescription(pairs,
@@ -495,9 +506,6 @@
AudioDeviceType::IN_DEVICE, AudioDeviceType::OUT_DEVICE,
GET_DEVICE_DESC_CONNECTION(HDMI));
append_AudioDeviceDescription(pairs,
- AUDIO_DEVICE_IN_REMOTE_SUBMIX, AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
- AudioDeviceType::IN_SUBMIX, AudioDeviceType::OUT_SUBMIX);
- append_AudioDeviceDescription(pairs,
AUDIO_DEVICE_IN_ANLG_DOCK_HEADSET, AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET,
AudioDeviceType::IN_DOCK, AudioDeviceType::OUT_DOCK,
GET_DEVICE_DESC_CONNECTION(ANALOG));
@@ -1760,6 +1768,60 @@
return aidl;
}
+ConversionResult<AudioPortDirection> portDirection(audio_port_role_t role, audio_port_type_t type) {
+ switch (type) {
+ case AUDIO_PORT_TYPE_NONE:
+ case AUDIO_PORT_TYPE_SESSION:
+ break; // must be listed -Werror,-Wswitch
+ case AUDIO_PORT_TYPE_DEVICE:
+ switch (role) {
+ case AUDIO_PORT_ROLE_NONE:
+ break; // must be listed -Werror,-Wswitch
+ case AUDIO_PORT_ROLE_SOURCE:
+ return AudioPortDirection::INPUT;
+ case AUDIO_PORT_ROLE_SINK:
+ return AudioPortDirection::OUTPUT;
+ }
+ break;
+ case AUDIO_PORT_TYPE_MIX:
+ switch (role) {
+ case AUDIO_PORT_ROLE_NONE:
+ break; // must be listed -Werror,-Wswitch
+ case AUDIO_PORT_ROLE_SOURCE:
+ return AudioPortDirection::OUTPUT;
+ case AUDIO_PORT_ROLE_SINK:
+ return AudioPortDirection::INPUT;
+ }
+ break;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_port_role_t> portRole(AudioPortDirection direction, audio_port_type_t type) {
+ switch (type) {
+ case AUDIO_PORT_TYPE_NONE:
+ case AUDIO_PORT_TYPE_SESSION:
+ break; // must be listed -Werror,-Wswitch
+ case AUDIO_PORT_TYPE_DEVICE:
+ switch (direction) {
+ case AudioPortDirection::INPUT:
+ return AUDIO_PORT_ROLE_SOURCE;
+ case AudioPortDirection::OUTPUT:
+ return AUDIO_PORT_ROLE_SINK;
+ }
+ break;
+ case AUDIO_PORT_TYPE_MIX:
+ switch (direction) {
+ case AudioPortDirection::OUTPUT:
+ return AUDIO_PORT_ROLE_SOURCE;
+ case AudioPortDirection::INPUT:
+ return AUDIO_PORT_ROLE_SINK;
+ }
+ break;
+ }
+ return unexpected(BAD_VALUE);
+}
+
ConversionResult<audio_config_t>
aidl2legacy_AudioConfig_audio_config_t(const AudioConfig& aidl, bool isInput) {
const audio_config_base_t legacyBase = VALUE_OR_RETURN(
@@ -1903,6 +1965,396 @@
enumToMask_index<int32_t, AudioEncapsulationMetadataType>);
}
+ConversionResult<audio_port_config_mix_ext_usecase>
+aidl2legacy_AudioPortMixExtUseCase_audio_port_config_mix_ext_usecase(
+ const AudioPortMixExtUseCase& aidl, bool isInput) {
+ audio_port_config_mix_ext_usecase legacy{};
+ if (aidl.getTag() != AudioPortMixExtUseCase::Tag::unspecified) {
+ if (!isInput) {
+ legacy.stream = VALUE_OR_RETURN(aidl2legacy_AudioStreamType_audio_stream_type_t(
+ VALUE_OR_RETURN(UNION_GET(aidl, stream))));
+ } else {
+ legacy.source = VALUE_OR_RETURN(aidl2legacy_AudioSource_audio_source_t(
+ VALUE_OR_RETURN(UNION_GET(aidl, source))));
+ }
+ }
+ return legacy;
+}
+
+ConversionResult<AudioPortMixExtUseCase>
+legacy2aidl_audio_port_config_mix_ext_usecase_AudioPortMixExtUseCase(
+ const audio_port_config_mix_ext_usecase& legacy, bool isInput) {
+ AudioPortMixExtUseCase aidl;
+ if (!isInput) {
+ UNION_SET(aidl, stream, VALUE_OR_RETURN(
+ legacy2aidl_audio_stream_type_t_AudioStreamType(legacy.stream)));
+ } else {
+ UNION_SET(aidl, source, VALUE_OR_RETURN(
+ legacy2aidl_audio_source_t_AudioSource(legacy.source)));
+ }
+ return aidl;
+}
+
+ConversionResult<audio_port_config_mix_ext> aidl2legacy_AudioPortMixExt_audio_port_config_mix_ext(
+ const AudioPortMixExt& aidl, bool isInput) {
+ audio_port_config_mix_ext legacy{};
+ legacy.handle = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_io_handle_t(aidl.handle));
+ legacy.usecase = VALUE_OR_RETURN(
+ aidl2legacy_AudioPortMixExtUseCase_audio_port_config_mix_ext_usecase(
+ aidl.usecase, isInput));
+ return legacy;
+}
+
+ConversionResult<AudioPortMixExt> legacy2aidl_audio_port_config_mix_ext_AudioPortMixExt(
+ const audio_port_config_mix_ext& legacy, bool isInput) {
+ AudioPortMixExt aidl;
+ aidl.handle = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(legacy.handle));
+ aidl.usecase = VALUE_OR_RETURN(
+ legacy2aidl_audio_port_config_mix_ext_usecase_AudioPortMixExtUseCase(
+ legacy.usecase, isInput));
+ return aidl;
+}
+
+ConversionResult<audio_port_config_device_ext>
+aidl2legacy_AudioPortDeviceExt_audio_port_config_device_ext(const AudioPortDeviceExt& aidl) {
+ audio_port_config_device_ext legacy{};
+ RETURN_IF_ERROR(aidl2legacy_AudioDevice_audio_device(
+ aidl.device, &legacy.type, legacy.address));
+ return legacy;
+}
+
+ConversionResult<AudioPortDeviceExt> legacy2aidl_audio_port_config_device_ext_AudioPortDeviceExt(
+ const audio_port_config_device_ext& legacy) {
+ AudioPortDeviceExt aidl;
+ aidl.device = VALUE_OR_RETURN(
+ legacy2aidl_audio_device_AudioDevice(legacy.type, legacy.address));
+ return aidl;
+}
+
+// This type is unnamed in the original definition, thus we name it here.
+using audio_port_config_ext = decltype(audio_port_config::ext);
+
+status_t aidl2legacy_AudioPortExt_audio_port_config_ext(
+ const AudioPortExt& aidl, bool isInput,
+ audio_port_config_ext* legacy, audio_port_type_t* type) {
+ switch (aidl.getTag()) {
+ case AudioPortExt::Tag::unspecified:
+ // Just verify that the union is empty.
+ VALUE_OR_RETURN_STATUS(UNION_GET(aidl, unspecified));
+ *legacy = {};
+ *type = AUDIO_PORT_TYPE_NONE;
+ return OK;
+ case AudioPortExt::Tag::device:
+ legacy->device = VALUE_OR_RETURN_STATUS(
+ aidl2legacy_AudioPortDeviceExt_audio_port_config_device_ext(
+ VALUE_OR_RETURN_STATUS(UNION_GET(aidl, device))));
+ *type = AUDIO_PORT_TYPE_DEVICE;
+ return OK;
+ case AudioPortExt::Tag::mix:
+ legacy->mix = VALUE_OR_RETURN_STATUS(
+ aidl2legacy_AudioPortMixExt_audio_port_config_mix_ext(
+ VALUE_OR_RETURN_STATUS(UNION_GET(aidl, mix)), isInput));
+ *type = AUDIO_PORT_TYPE_MIX;
+ return OK;
+ case AudioPortExt::Tag::session:
+ // This variant is not used in the HAL scenario.
+ legacy->session.session = AUDIO_SESSION_NONE;
+ *type = AUDIO_PORT_TYPE_SESSION;
+ return OK;
+
+ }
+ LOG_ALWAYS_FATAL("Shouldn't get here"); // with -Werror,-Wswitch may compile-time fail
+}
+
+ConversionResult<AudioPortExt> legacy2aidl_audio_port_config_ext_AudioPortExt(
+ const audio_port_config_ext& legacy, audio_port_type_t type, bool isInput) {
+ AudioPortExt aidl;
+ switch (type) {
+ case AUDIO_PORT_TYPE_NONE:
+ UNION_SET(aidl, unspecified, false);
+ return aidl;
+ case AUDIO_PORT_TYPE_DEVICE: {
+ AudioPortDeviceExt device = VALUE_OR_RETURN(
+ legacy2aidl_audio_port_config_device_ext_AudioPortDeviceExt(legacy.device));
+ UNION_SET(aidl, device, device);
+ return aidl;
+ }
+ case AUDIO_PORT_TYPE_MIX: {
+ AudioPortMixExt mix = VALUE_OR_RETURN(
+ legacy2aidl_audio_port_config_mix_ext_AudioPortMixExt(legacy.mix, isInput));
+ UNION_SET(aidl, mix, mix);
+ return aidl;
+ }
+ case AUDIO_PORT_TYPE_SESSION:
+ // This variant is not used in the HAL scenario.
+ UNION_SET(aidl, unspecified, false);
+ return aidl;
+ }
+ LOG_ALWAYS_FATAL("Shouldn't get here"); // with -Werror,-Wswitch may compile-time fail
+}
+
+status_t aidl2legacy_AudioPortConfig_audio_port_config(
+ const AudioPortConfig& aidl, bool isInput, audio_port_config* legacy, int32_t* portId) {
+ legacy->id = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_port_handle_t(aidl.id));
+ *portId = aidl.portId;
+ if (aidl.sampleRate.has_value()) {
+ legacy->sample_rate = VALUE_OR_RETURN_STATUS(
+ convertIntegral<unsigned int>(aidl.sampleRate.value().value));
+ legacy->config_mask |= AUDIO_PORT_CONFIG_SAMPLE_RATE;
+ }
+ if (aidl.channelMask.has_value()) {
+ legacy->channel_mask =
+ VALUE_OR_RETURN_STATUS(
+ aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
+ aidl.channelMask.value(), isInput));
+ legacy->config_mask |= AUDIO_PORT_CONFIG_CHANNEL_MASK;
+ }
+ if (aidl.format.has_value()) {
+ legacy->format = VALUE_OR_RETURN_STATUS(
+ aidl2legacy_AudioFormatDescription_audio_format_t(aidl.format.value()));
+ legacy->config_mask |= AUDIO_PORT_CONFIG_FORMAT;
+ }
+ if (aidl.gain.has_value()) {
+ legacy->gain = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioGainConfig_audio_gain_config(
+ aidl.gain.value(), isInput));
+ legacy->config_mask |= AUDIO_PORT_CONFIG_GAIN;
+ }
+ if (aidl.flags.has_value()) {
+ legacy->flags = VALUE_OR_RETURN_STATUS(
+ aidl2legacy_AudioIoFlags_audio_io_flags(aidl.flags.value(), isInput));
+ legacy->config_mask |= AUDIO_PORT_CONFIG_FLAGS;
+ }
+ RETURN_STATUS_IF_ERROR(aidl2legacy_AudioPortExt_audio_port_config_ext(
+ aidl.ext, isInput, &legacy->ext, &legacy->type));
+ legacy->role = VALUE_OR_RETURN_STATUS(portRole(isInput ?
+ AudioPortDirection::INPUT : AudioPortDirection::OUTPUT, legacy->type));
+ return OK;
+}
+
+ConversionResult<AudioPortConfig>
+legacy2aidl_audio_port_config_AudioPortConfig(
+ const audio_port_config& legacy, bool isInput, int32_t portId) {
+ AudioPortConfig aidl;
+ aidl.id = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(legacy.id));
+ aidl.portId = portId;
+ if (legacy.config_mask & AUDIO_PORT_CONFIG_SAMPLE_RATE) {
+ Int aidl_sampleRate;
+ aidl_sampleRate.value = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.sample_rate));
+ aidl.sampleRate = aidl_sampleRate;
+ }
+ if (legacy.config_mask & AUDIO_PORT_CONFIG_CHANNEL_MASK) {
+ aidl.channelMask = VALUE_OR_RETURN(
+ legacy2aidl_audio_channel_mask_t_AudioChannelLayout(legacy.channel_mask, isInput));
+ }
+ if (legacy.config_mask & AUDIO_PORT_CONFIG_FORMAT) {
+ aidl.format = VALUE_OR_RETURN(
+ legacy2aidl_audio_format_t_AudioFormatDescription(legacy.format));
+ }
+ if (legacy.config_mask & AUDIO_PORT_CONFIG_GAIN) {
+ aidl.gain = VALUE_OR_RETURN(
+ legacy2aidl_audio_gain_config_AudioGainConfig(legacy.gain, isInput));
+ }
+ if (legacy.config_mask & AUDIO_PORT_CONFIG_FLAGS) {
+ aidl.flags = VALUE_OR_RETURN(
+ legacy2aidl_audio_io_flags_AudioIoFlags(legacy.flags, isInput));
+ }
+ aidl.ext = VALUE_OR_RETURN(
+ legacy2aidl_audio_port_config_ext_AudioPortExt(legacy.ext, legacy.type, isInput));
+ return aidl;
+}
+
+ConversionResult<audio_port_mix_ext> aidl2legacy_AudioPortMixExt_audio_port_mix_ext(
+ const AudioPortMixExt& aidl) {
+ audio_port_mix_ext legacy{};
+ legacy.handle = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_io_handle_t(aidl.handle));
+ return legacy;
+}
+
+ConversionResult<AudioPortMixExt> legacy2aidl_audio_port_mix_ext_AudioPortMixExt(
+ const audio_port_mix_ext& legacy) {
+ AudioPortMixExt aidl;
+ aidl.handle = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(legacy.handle));
+ return aidl;
+}
+
+ConversionResult<audio_port_device_ext>
+aidl2legacy_AudioPortDeviceExt_audio_port_device_ext(const AudioPortDeviceExt& aidl) {
+ audio_port_device_ext legacy{};
+ RETURN_IF_ERROR(aidl2legacy_AudioDevice_audio_device(
+ aidl.device, &legacy.type, legacy.address));
+ return legacy;
+}
+
+ConversionResult<AudioPortDeviceExt> legacy2aidl_audio_port_device_ext_AudioPortDeviceExt(
+ const audio_port_device_ext& legacy) {
+ AudioPortDeviceExt aidl;
+ aidl.device = VALUE_OR_RETURN(
+ legacy2aidl_audio_device_AudioDevice(legacy.type, legacy.address));
+ return aidl;
+}
+
+// This type is unnamed in the original definition, thus we name it here.
+using audio_port_v7_ext = decltype(audio_port_v7::ext);
+
+status_t aidl2legacy_AudioPortExt_audio_port_v7_ext(
+ const AudioPortExt& aidl, audio_port_v7_ext* legacy, audio_port_type_t* type) {
+ switch (aidl.getTag()) {
+ case AudioPortExt::Tag::unspecified:
+ // Just verify that the union is empty.
+ VALUE_OR_RETURN_STATUS(UNION_GET(aidl, unspecified));
+ *legacy = {};
+ *type = AUDIO_PORT_TYPE_NONE;
+ return OK;
+ case AudioPortExt::Tag::device:
+ legacy->device = VALUE_OR_RETURN_STATUS(
+ aidl2legacy_AudioPortDeviceExt_audio_port_device_ext(
+ VALUE_OR_RETURN_STATUS(UNION_GET(aidl, device))));
+ *type = AUDIO_PORT_TYPE_DEVICE;
+ return OK;
+ case AudioPortExt::Tag::mix:
+ legacy->mix = VALUE_OR_RETURN_STATUS(
+ aidl2legacy_AudioPortMixExt_audio_port_mix_ext(
+ VALUE_OR_RETURN_STATUS(UNION_GET(aidl, mix))));
+ *type = AUDIO_PORT_TYPE_MIX;
+ return OK;
+ case AudioPortExt::Tag::session:
+ // This variant is not used in the HAL scenario.
+ legacy->session.session = AUDIO_SESSION_NONE;
+ *type = AUDIO_PORT_TYPE_SESSION;
+ return OK;
+
+ }
+ LOG_ALWAYS_FATAL("Shouldn't get here"); // with -Werror,-Wswitch may compile-time fail
+}
+
+ConversionResult<AudioPortExt> legacy2aidl_audio_port_v7_ext_AudioPortExt(
+ const audio_port_v7_ext& legacy, audio_port_type_t type) {
+ AudioPortExt aidl;
+ switch (type) {
+ case AUDIO_PORT_TYPE_NONE:
+ UNION_SET(aidl, unspecified, false);
+ return aidl;
+ case AUDIO_PORT_TYPE_DEVICE: {
+ AudioPortDeviceExt device = VALUE_OR_RETURN(
+ legacy2aidl_audio_port_device_ext_AudioPortDeviceExt(legacy.device));
+ UNION_SET(aidl, device, device);
+ return aidl;
+ }
+ case AUDIO_PORT_TYPE_MIX: {
+ AudioPortMixExt mix = VALUE_OR_RETURN(
+ legacy2aidl_audio_port_mix_ext_AudioPortMixExt(legacy.mix));
+ UNION_SET(aidl, mix, mix);
+ return aidl;
+ }
+ case AUDIO_PORT_TYPE_SESSION:
+ // This variant is not used in the HAL scenario.
+ UNION_SET(aidl, unspecified, false);
+ return aidl;
+ }
+ LOG_ALWAYS_FATAL("Shouldn't get here"); // with -Werror,-Wswitch may compile-time fail
+}
+
+ConversionResult<audio_port_v7>
+aidl2legacy_AudioPort_audio_port_v7(const AudioPort& aidl, bool isInput) {
+ audio_port_v7 legacy;
+ legacy.id = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_port_handle_t(aidl.id));
+ RETURN_IF_ERROR(aidl2legacy_string(aidl.name, legacy.name, sizeof(legacy.name)));
+
+ if (aidl.profiles.size() > std::size(legacy.audio_profiles)) {
+ return unexpected(BAD_VALUE);
+ }
+ RETURN_IF_ERROR(convertRange(
+ aidl.profiles.begin(), aidl.profiles.end(), legacy.audio_profiles,
+ [isInput](const AudioProfile& p) {
+ return aidl2legacy_AudioProfile_audio_profile(p, isInput);
+ }));
+ legacy.num_audio_profiles = aidl.profiles.size();
+
+ if (aidl.extraAudioDescriptors.size() > std::size(legacy.extra_audio_descriptors)) {
+ return unexpected(BAD_VALUE);
+ }
+ RETURN_IF_ERROR(
+ convertRange(
+ aidl.extraAudioDescriptors.begin(), aidl.extraAudioDescriptors.end(),
+ legacy.extra_audio_descriptors,
+ aidl2legacy_ExtraAudioDescriptor_audio_extra_audio_descriptor));
+ legacy.num_extra_audio_descriptors = aidl.extraAudioDescriptors.size();
+
+ if (aidl.gains.size() > std::size(legacy.gains)) {
+ return unexpected(BAD_VALUE);
+ }
+ RETURN_IF_ERROR(convertRange(aidl.gains.begin(), aidl.gains.end(), legacy.gains,
+ [isInput](const AudioGain& g) {
+ return aidl2legacy_AudioGain_audio_gain(g, isInput);
+ }));
+ legacy.num_gains = aidl.gains.size();
+
+ RETURN_IF_ERROR(aidl2legacy_AudioPortExt_audio_port_v7_ext(
+ aidl.ext, &legacy.ext, &legacy.type));
+ legacy.role = VALUE_OR_RETURN(portRole(
+ isInput ? AudioPortDirection::INPUT : AudioPortDirection::OUTPUT, legacy.type));
+
+ AudioPortConfig aidlPortConfig;
+ int32_t portId;
+ aidlPortConfig.flags = aidl.flags;
+ aidlPortConfig.ext = aidl.ext;
+ RETURN_IF_ERROR(aidl2legacy_AudioPortConfig_audio_port_config(
+ aidlPortConfig, isInput, &legacy.active_config, &portId));
+ return legacy;
+}
+
+ConversionResult<AudioPort>
+legacy2aidl_audio_port_v7_AudioPort(const audio_port_v7& legacy, bool isInput) {
+ AudioPort aidl;
+ aidl.id = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(legacy.id));
+ aidl.name = VALUE_OR_RETURN(legacy2aidl_string(legacy.name, sizeof(legacy.name)));
+
+ if (legacy.num_audio_profiles > std::size(legacy.audio_profiles)) {
+ return unexpected(BAD_VALUE);
+ }
+ RETURN_IF_ERROR(
+ convertRange(legacy.audio_profiles, legacy.audio_profiles + legacy.num_audio_profiles,
+ std::back_inserter(aidl.profiles),
+ [isInput](const audio_profile& p) {
+ return legacy2aidl_audio_profile_AudioProfile(p, isInput);
+ }));
+
+ if (legacy.num_extra_audio_descriptors > std::size(legacy.extra_audio_descriptors)) {
+ return unexpected(BAD_VALUE);
+ }
+ aidl.profiles.resize(legacy.num_audio_profiles);
+ RETURN_IF_ERROR(
+ convertRange(legacy.extra_audio_descriptors,
+ legacy.extra_audio_descriptors + legacy.num_extra_audio_descriptors,
+ std::back_inserter(aidl.extraAudioDescriptors),
+ legacy2aidl_audio_extra_audio_descriptor_ExtraAudioDescriptor));
+
+ if (legacy.num_gains > std::size(legacy.gains)) {
+ return unexpected(BAD_VALUE);
+ }
+ RETURN_IF_ERROR(
+ convertRange(legacy.gains, legacy.gains + legacy.num_gains,
+ std::back_inserter(aidl.gains),
+ [isInput](const audio_gain& g) {
+ return legacy2aidl_audio_gain_AudioGain(g, isInput);
+ }));
+ aidl.gains.resize(legacy.num_gains);
+
+ aidl.ext = VALUE_OR_RETURN(
+ legacy2aidl_audio_port_v7_ext_AudioPortExt(legacy.ext, legacy.type));
+
+ AudioPortConfig aidlPortConfig = VALUE_OR_RETURN(legacy2aidl_audio_port_config_AudioPortConfig(
+ legacy.active_config, isInput, aidl.id));
+ if (aidlPortConfig.flags.has_value()) {
+ aidl.flags = aidlPortConfig.flags.value();
+ } else {
+ aidl.flags = isInput ?
+ AudioIoFlags::make<AudioIoFlags::Tag::input>(0) :
+ AudioIoFlags::make<AudioIoFlags::Tag::output>(0);
+ }
+ return aidl;
+}
+
ConversionResult<audio_profile>
aidl2legacy_AudioProfile_audio_profile(const AudioProfile& aidl, bool isInput) {
audio_profile legacy;
@@ -2263,4 +2715,4 @@
#if defined(BACKEND_NDK)
} // aidl
-#endif
\ No newline at end of file
+#endif
diff --git a/media/audioaidlconversion/AidlConversionEffect.cpp b/media/audioaidlconversion/AidlConversionEffect.cpp
new file mode 100644
index 0000000..2f33f00
--- /dev/null
+++ b/media/audioaidlconversion/AidlConversionEffect.cpp
@@ -0,0 +1,326 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <utility>
+
+#define LOG_TAG "AidlConversionEffect"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include <media/AidlConversionCppNdk.h>
+#include <media/AidlConversionEffect.h>
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+// AIDL NDK backend to legacy audio data structure conversion utilities.
+
+namespace aidl {
+namespace android {
+
+using ::aidl::android::hardware::audio::effect::AcousticEchoCanceler;
+using ::aidl::android::hardware::audio::effect::AutomaticGainControl;
+using ::aidl::android::hardware::audio::effect::BassBoost;
+using ::aidl::android::hardware::audio::effect::Descriptor;
+using ::aidl::android::hardware::audio::effect::Downmix;
+using ::aidl::android::hardware::audio::effect::Flags;
+using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::media::audio::common::AudioDeviceDescription;
+
+using ::android::BAD_VALUE;
+using ::android::base::unexpected;
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+// Converters
+
+ConversionResult<uint32_t> aidl2legacy_Flags_Type_uint32(Flags::Type type) {
+ switch (type) {
+ case Flags::Type::INSERT:
+ return EFFECT_FLAG_TYPE_INSERT;
+ case Flags::Type::AUXILIARY:
+ return EFFECT_FLAG_TYPE_AUXILIARY;
+ case Flags::Type::REPLACE:
+ return EFFECT_FLAG_TYPE_REPLACE;
+ case Flags::Type::PRE_PROC:
+ return EFFECT_FLAG_TYPE_PRE_PROC;
+ case Flags::Type::POST_PROC:
+ return EFFECT_FLAG_TYPE_POST_PROC;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<Flags::Type> legacy2aidl_uint32_Flags_Type(uint32_t legacy) {
+ switch (legacy & EFFECT_FLAG_TYPE_MASK) {
+ case EFFECT_FLAG_TYPE_INSERT:
+ return Flags::Type::INSERT;
+ case EFFECT_FLAG_TYPE_AUXILIARY:
+ return Flags::Type::AUXILIARY;
+ case EFFECT_FLAG_TYPE_REPLACE:
+ return Flags::Type::REPLACE;
+ case EFFECT_FLAG_TYPE_PRE_PROC:
+ return Flags::Type::PRE_PROC;
+ case EFFECT_FLAG_TYPE_POST_PROC:
+ return Flags::Type::POST_PROC;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<uint32_t> aidl2legacy_Flags_Insert_uint32(Flags::Insert insert) {
+ switch (insert) {
+ case Flags::Insert::ANY:
+ return EFFECT_FLAG_INSERT_ANY;
+ case Flags::Insert::FIRST:
+ return EFFECT_FLAG_INSERT_FIRST;
+ case Flags::Insert::LAST:
+ return EFFECT_FLAG_INSERT_LAST;
+ case Flags::Insert::EXCLUSIVE:
+ return EFFECT_FLAG_INSERT_EXCLUSIVE;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<Flags::Insert> legacy2aidl_uint32_Flags_Insert(uint32_t legacy) {
+ switch (legacy & EFFECT_FLAG_INSERT_MASK) {
+ case EFFECT_FLAG_INSERT_ANY:
+ return Flags::Insert::ANY;
+ case EFFECT_FLAG_INSERT_FIRST:
+ return Flags::Insert::FIRST;
+ case EFFECT_FLAG_INSERT_LAST:
+ return Flags::Insert::LAST;
+ case EFFECT_FLAG_INSERT_EXCLUSIVE:
+ return Flags::Insert::EXCLUSIVE;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<uint32_t> aidl2legacy_Flags_Volume_uint32(Flags::Volume volume) {
+ switch (volume) {
+ case Flags::Volume::NONE:
+ return 0;
+ case Flags::Volume::CTRL:
+ return EFFECT_FLAG_VOLUME_CTRL;
+ case Flags::Volume::IND:
+ return EFFECT_FLAG_VOLUME_IND;
+ case Flags::Volume::MONITOR:
+ return EFFECT_FLAG_VOLUME_MONITOR;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<Flags::Volume> legacy2aidl_uint32_Flags_Volume(uint32_t legacy) {
+ switch (legacy & EFFECT_FLAG_VOLUME_MASK) {
+ case EFFECT_FLAG_VOLUME_CTRL:
+ return Flags::Volume::CTRL;
+ case EFFECT_FLAG_VOLUME_IND:
+ return Flags::Volume::IND;
+ case EFFECT_FLAG_VOLUME_MONITOR:
+ return Flags::Volume::MONITOR;
+ case EFFECT_FLAG_VOLUME_NONE:
+ return Flags::Volume::NONE;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<uint32_t> aidl2legacy_Flags_uint32(Flags aidl) {
+ uint32_t legacy = 0;
+ legacy |= VALUE_OR_RETURN(aidl2legacy_Flags_Type_uint32(aidl.type));
+ legacy |= VALUE_OR_RETURN(aidl2legacy_Flags_Insert_uint32(aidl.insert));
+ legacy |= VALUE_OR_RETURN(aidl2legacy_Flags_Volume_uint32(aidl.volume));
+ legacy |= VALUE_OR_RETURN(aidl2legacy_Flags_HardwareAccelerator_uint32(aidl.hwAcceleratorMode));
+
+ if (aidl.offloadIndication) {
+ legacy |= EFFECT_FLAG_OFFLOAD_SUPPORTED;
+ }
+ if (aidl.deviceIndication) {
+ legacy |= EFFECT_FLAG_DEVICE_IND;
+ }
+ if (aidl.audioModeIndication) {
+ legacy |= EFFECT_FLAG_AUDIO_MODE_IND;
+ }
+ if (aidl.audioSourceIndication) {
+ legacy |= EFFECT_FLAG_AUDIO_SOURCE_IND;
+ }
+ if (aidl.noProcessing) {
+ legacy |= EFFECT_FLAG_NO_PROCESS;
+ }
+ return legacy;
+}
+
+ConversionResult<Flags> legacy2aidl_uint32_Flags(uint32_t legacy) {
+ Flags aidl;
+
+ aidl.type = VALUE_OR_RETURN(legacy2aidl_uint32_Flags_Type(legacy));
+ aidl.insert = VALUE_OR_RETURN(legacy2aidl_uint32_Flags_Insert(legacy));
+ aidl.volume = VALUE_OR_RETURN(legacy2aidl_uint32_Flags_Volume(legacy));
+ aidl.hwAcceleratorMode = VALUE_OR_RETURN(legacy2aidl_uint32_Flags_HardwareAccelerator(legacy));
+ aidl.offloadIndication = (legacy & EFFECT_FLAG_OFFLOAD_SUPPORTED);
+ aidl.deviceIndication = (legacy & EFFECT_FLAG_DEVICE_IND);
+ aidl.audioModeIndication = (legacy & EFFECT_FLAG_AUDIO_MODE_IND);
+ aidl.audioSourceIndication = (legacy & EFFECT_FLAG_AUDIO_SOURCE_IND);
+ aidl.noProcessing = (legacy & EFFECT_FLAG_NO_PROCESS);
+ return aidl;
+}
+
+ConversionResult<uint32_t> aidl2legacy_Flags_HardwareAccelerator_uint32(
+ Flags::HardwareAccelerator hwAcceleratorMode) {
+ switch (hwAcceleratorMode) {
+ case Flags::HardwareAccelerator::NONE:
+ return 0;
+ case Flags::HardwareAccelerator::SIMPLE:
+ return EFFECT_FLAG_HW_ACC_SIMPLE;
+ case Flags::HardwareAccelerator::TUNNEL:
+ return EFFECT_FLAG_HW_ACC_TUNNEL;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<Flags::HardwareAccelerator> legacy2aidl_uint32_Flags_HardwareAccelerator(
+ uint32_t legacy) {
+ switch (legacy & EFFECT_FLAG_HW_ACC_MASK) {
+ case EFFECT_FLAG_HW_ACC_SIMPLE:
+ return Flags::HardwareAccelerator::SIMPLE;
+ case EFFECT_FLAG_HW_ACC_TUNNEL:
+ return Flags::HardwareAccelerator::TUNNEL;
+ case 0:
+ return Flags::HardwareAccelerator::NONE;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<effect_descriptor_t>
+aidl2legacy_Descriptor_effect_descriptor(const Descriptor& aidl) {
+ effect_descriptor_t legacy;
+ legacy.type = VALUE_OR_RETURN(aidl2legacy_AudioUuid_audio_uuid_t(aidl.common.id.type));
+ legacy.uuid = VALUE_OR_RETURN(aidl2legacy_AudioUuid_audio_uuid_t(aidl.common.id.uuid));
+ // legacy descriptor doesn't have proxy information
+ // proxy = VALUE_OR_RETURN(aidl2legacy_AudioUuid_audio_uuid_t(aidl.proxy));
+ legacy.apiVersion = EFFECT_CONTROL_API_VERSION;
+ legacy.flags = VALUE_OR_RETURN(aidl2legacy_Flags_uint32(aidl.common.flags));
+ legacy.cpuLoad = VALUE_OR_RETURN(convertIntegral<uint16_t>(aidl.common.cpuLoad));
+ legacy.memoryUsage = VALUE_OR_RETURN(convertIntegral<uint16_t>(aidl.common.memoryUsage));
+ RETURN_IF_ERROR(aidl2legacy_string(aidl.common.name, legacy.name, sizeof(legacy.name)));
+ RETURN_IF_ERROR(aidl2legacy_string(aidl.common.implementor, legacy.implementor,
+ sizeof(legacy.implementor)));
+ return legacy;
+}
+
+ConversionResult<Descriptor>
+legacy2aidl_effect_descriptor_Descriptor(const effect_descriptor_t& legacy) {
+ Descriptor aidl;
+ aidl.common.id.type = VALUE_OR_RETURN(legacy2aidl_audio_uuid_t_AudioUuid(legacy.type));
+ aidl.common.id.uuid = VALUE_OR_RETURN(legacy2aidl_audio_uuid_t_AudioUuid(legacy.uuid));
+ // legacy descriptor doesn't have proxy information
+ // aidl.common.id.proxy
+ aidl.common.flags = VALUE_OR_RETURN(legacy2aidl_uint32_Flags(legacy.flags));
+ aidl.common.cpuLoad = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.cpuLoad));
+ aidl.common.memoryUsage = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.memoryUsage));
+ aidl.common.name = VALUE_OR_RETURN(legacy2aidl_string(legacy.name, sizeof(legacy.name)));
+ aidl.common.implementor =
+ VALUE_OR_RETURN(legacy2aidl_string(legacy.implementor, sizeof(legacy.implementor)));
+ return aidl;
+}
+
+ConversionResult<uint32_t> aidl2legacy_Parameter_aec_uint32_echoDelay(const Parameter& aidl) {
+ int echoDelay = VALUE_OR_RETURN(GET_PARAMETER_SPECIFIC_FIELD(
+ aidl, AcousticEchoCanceler, acousticEchoCanceler, echoDelayUs, int));
+ return VALUE_OR_RETURN(convertReinterpret<uint32_t>(echoDelay));
+}
+
+ConversionResult<Parameter> legacy2aidl_uint32_echoDelay_Parameter_aec(uint32_t legacy) {
+ int delay = VALUE_OR_RETURN(convertReinterpret<int32_t>(legacy));
+ return MAKE_SPECIFIC_PARAMETER(AcousticEchoCanceler, acousticEchoCanceler, echoDelayUs, delay);
+}
+
+ConversionResult<uint32_t> aidl2legacy_Parameter_aec_uint32_mobileMode(const Parameter& aidl) {
+ bool mobileMode = VALUE_OR_RETURN(GET_PARAMETER_SPECIFIC_FIELD(
+ aidl, AcousticEchoCanceler, acousticEchoCanceler, mobileMode, bool));
+ return VALUE_OR_RETURN(convertIntegral<uint32_t>(mobileMode));
+}
+
+ConversionResult<Parameter> legacy2aidl_uint32_mobileMode_Parameter_aec(uint32_t legacy) {
+ bool mode = VALUE_OR_RETURN(convertIntegral<bool>(legacy));
+ return MAKE_SPECIFIC_PARAMETER(AcousticEchoCanceler, acousticEchoCanceler, mobileMode, mode);
+}
+
+ConversionResult<uint32_t> aidl2legacy_Parameter_agc_uint32_fixedDigitalGain(
+ const Parameter& aidl) {
+ int gain = VALUE_OR_RETURN(GET_PARAMETER_SPECIFIC_FIELD(
+ aidl, AutomaticGainControl, automaticGainControl, fixedDigitalGainMb, int));
+ return VALUE_OR_RETURN(convertReinterpret<uint32_t>(gain));
+}
+
+ConversionResult<Parameter> legacy2aidl_uint32_fixedDigitalGain_Parameter_agc(uint32_t legacy) {
+ int gain = VALUE_OR_RETURN(convertReinterpret<int>(legacy));
+ return MAKE_SPECIFIC_PARAMETER(AutomaticGainControl, automaticGainControl, fixedDigitalGainMb,
+ gain);
+}
+
+ConversionResult<uint32_t> aidl2legacy_Parameter_agc_uint32_levelEstimator(
+ const Parameter& aidl) {
+ const auto& le = VALUE_OR_RETURN(
+ GET_PARAMETER_SPECIFIC_FIELD(aidl, AutomaticGainControl, automaticGainControl,
+ levelEstimator, AutomaticGainControl::LevelEstimator));
+ return static_cast<uint32_t>(le);
+}
+
+ConversionResult<Parameter> legacy2aidl_uint32_levelEstimator_Parameter_agc(uint32_t legacy) {
+ if (legacy > (uint32_t) AutomaticGainControl::LevelEstimator::PEAK) {
+ return unexpected(BAD_VALUE);
+ }
+ AutomaticGainControl::LevelEstimator le =
+ static_cast<AutomaticGainControl::LevelEstimator>(legacy);
+ return MAKE_SPECIFIC_PARAMETER(AutomaticGainControl, automaticGainControl, levelEstimator, le);
+}
+
+ConversionResult<uint32_t> aidl2legacy_Parameter_agc_uint32_saturationMargin(
+ const Parameter& aidl) {
+ int saturationMargin = VALUE_OR_RETURN(GET_PARAMETER_SPECIFIC_FIELD(
+ aidl, AutomaticGainControl, automaticGainControl, saturationMarginMb, int));
+ return VALUE_OR_RETURN(convertIntegral<uint32_t>(saturationMargin));
+}
+
+ConversionResult<Parameter> legacy2aidl_uint32_saturationMargin_Parameter_agc(uint32_t legacy) {
+ int saturationMargin = VALUE_OR_RETURN(convertIntegral<int>(legacy));
+ return MAKE_SPECIFIC_PARAMETER(AutomaticGainControl, automaticGainControl, saturationMarginMb,
+ saturationMargin);
+}
+
+ConversionResult<uint16_t> aidl2legacy_Parameter_BassBoost_uint16_strengthPm(
+ const Parameter& aidl) {
+ int strength = VALUE_OR_RETURN(
+ GET_PARAMETER_SPECIFIC_FIELD(aidl, BassBoost, bassBoost, strengthPm, int));
+ return VALUE_OR_RETURN(convertIntegral<uint16_t>(strength));
+}
+
+ConversionResult<Parameter> legacy2aidl_uint16_strengthPm_Parameter_BassBoost(uint16_t legacy) {
+ int strength = VALUE_OR_RETURN(convertIntegral<int>(legacy));
+ return MAKE_SPECIFIC_PARAMETER(BassBoost, bassBoost, strengthPm, strength);
+}
+
+ConversionResult<int16_t> aidl2legacy_Parameter_Downmix_int16_type(const Parameter& aidl) {
+ Downmix::Type aidlType = VALUE_OR_RETURN(
+ GET_PARAMETER_SPECIFIC_FIELD(aidl, Downmix, downmix, type, Downmix::Type));
+ return VALUE_OR_RETURN(convertIntegral<int16_t>(static_cast<uint32_t>(aidlType)));
+}
+
+ConversionResult<Parameter> legacy2aidl_int16_type_Parameter_Downmix(int16_t legacy) {
+ if (legacy > (uint32_t) Downmix::Type::FOLD) {
+ return unexpected(BAD_VALUE);
+ }
+ Downmix::Type aidlType = static_cast<Downmix::Type>(legacy);
+ return MAKE_SPECIFIC_PARAMETER(Downmix, downmix, type, aidlType);
+}
+
+} // namespace android
+} // aidl
diff --git a/media/audioaidlconversion/AidlConversionNdk.cpp b/media/audioaidlconversion/AidlConversionNdk.cpp
index a3e39c7..7c63339 100644
--- a/media/audioaidlconversion/AidlConversionNdk.cpp
+++ b/media/audioaidlconversion/AidlConversionNdk.cpp
@@ -29,197 +29,7 @@
namespace aidl {
namespace android {
-using ::aidl::android::hardware::audio::effect::Descriptor;
-using ::aidl::android::hardware::audio::effect::Flags;
-
-using ::android::BAD_VALUE;
-using ::android::base::unexpected;
-
-////////////////////////////////////////////////////////////////////////////////////////////////////
-// Converters
-
-ConversionResult<uint32_t> aidl2legacy_Flags_Type_uint32(Flags::Type type) {
- switch (type) {
- case Flags::Type::INSERT:
- return EFFECT_FLAG_TYPE_INSERT;
- case Flags::Type::AUXILIARY:
- return EFFECT_FLAG_TYPE_AUXILIARY;
- case Flags::Type::REPLACE:
- return EFFECT_FLAG_TYPE_REPLACE;
- case Flags::Type::PRE_PROC:
- return EFFECT_FLAG_TYPE_PRE_PROC;
- case Flags::Type::POST_PROC:
- return EFFECT_FLAG_TYPE_POST_PROC;
- }
- return unexpected(BAD_VALUE);
-}
-
-ConversionResult<uint32_t> aidl2legacy_Flags_Insert_uint32(Flags::Insert insert) {
- switch (insert) {
- case Flags::Insert::ANY:
- return EFFECT_FLAG_INSERT_ANY;
- case Flags::Insert::FIRST:
- return EFFECT_FLAG_INSERT_FIRST;
- case Flags::Insert::LAST:
- return EFFECT_FLAG_INSERT_LAST;
- case Flags::Insert::EXCLUSIVE:
- return EFFECT_FLAG_INSERT_EXCLUSIVE;
- }
- return unexpected(BAD_VALUE);
-}
-
-ConversionResult<uint32_t> aidl2legacy_Flags_Volume_uint32(Flags::Volume volume) {
- switch (volume) {
- case Flags::Volume::NONE:
- return 0;
- case Flags::Volume::CTRL:
- return EFFECT_FLAG_VOLUME_CTRL;
- case Flags::Volume::IND:
- return EFFECT_FLAG_VOLUME_IND;
- case Flags::Volume::MONITOR:
- return EFFECT_FLAG_VOLUME_MONITOR;
- }
- return unexpected(BAD_VALUE);
-}
-ConversionResult<uint32_t> aidl2legacy_Flags_HardwareAccelerator_uint32(
- Flags::HardwareAccelerator hwAcceleratorMode) {
- switch (hwAcceleratorMode) {
- case Flags::HardwareAccelerator::NONE:
- return 0;
- case Flags::HardwareAccelerator::SIMPLE:
- return EFFECT_FLAG_HW_ACC_SIMPLE;
- case Flags::HardwareAccelerator::TUNNEL:
- return EFFECT_FLAG_HW_ACC_TUNNEL;
- }
- return unexpected(BAD_VALUE);
-}
-
-ConversionResult<uint32_t> aidl2legacy_Flags_uint32(Flags aidl) {
- uint32_t legacy = 0;
- legacy |= VALUE_OR_RETURN(aidl2legacy_Flags_Type_uint32(aidl.type));
- legacy |= VALUE_OR_RETURN(aidl2legacy_Flags_Insert_uint32(aidl.insert));
- legacy |= VALUE_OR_RETURN(aidl2legacy_Flags_Volume_uint32(aidl.volume));
- legacy |= VALUE_OR_RETURN(aidl2legacy_Flags_HardwareAccelerator_uint32(aidl.hwAcceleratorMode));
-
- if (aidl.offloadIndication) {
- legacy |= EFFECT_FLAG_OFFLOAD_SUPPORTED;
- }
- if (aidl.deviceIndication) {
- legacy |= EFFECT_FLAG_DEVICE_IND;
- }
- if (aidl.audioModeIndication) {
- legacy |= EFFECT_FLAG_AUDIO_MODE_IND;
- }
- if (aidl.audioSourceIndication) {
- legacy |= EFFECT_FLAG_AUDIO_SOURCE_IND;
- }
- if (aidl.noProcessing) {
- legacy |= EFFECT_FLAG_NO_PROCESS;
- }
- return legacy;
-}
-
-ConversionResult<Flags::Type> legacy2aidl_uint32_Flags_Type(uint32_t legacy) {
- switch (legacy & EFFECT_FLAG_TYPE_MASK) {
- case EFFECT_FLAG_TYPE_INSERT:
- return Flags::Type::INSERT;
- case EFFECT_FLAG_TYPE_AUXILIARY:
- return Flags::Type::AUXILIARY;
- case EFFECT_FLAG_TYPE_REPLACE:
- return Flags::Type::REPLACE;
- case EFFECT_FLAG_TYPE_PRE_PROC:
- return Flags::Type::PRE_PROC;
- case EFFECT_FLAG_TYPE_POST_PROC:
- return Flags::Type::POST_PROC;
- }
- return unexpected(BAD_VALUE);
-}
-
-ConversionResult<Flags::Insert> legacy2aidl_uint32_Flags_Insert(uint32_t legacy) {
- switch (legacy & EFFECT_FLAG_INSERT_MASK) {
- case EFFECT_FLAG_INSERT_ANY:
- return Flags::Insert::ANY;
- case EFFECT_FLAG_INSERT_FIRST:
- return Flags::Insert::FIRST;
- case EFFECT_FLAG_INSERT_LAST:
- return Flags::Insert::LAST;
- case EFFECT_FLAG_INSERT_EXCLUSIVE:
- return Flags::Insert::EXCLUSIVE;
- }
- return unexpected(BAD_VALUE);
-}
-
-ConversionResult<Flags::Volume> legacy2aidl_uint32_Flags_Volume(uint32_t legacy) {
- switch (legacy & EFFECT_FLAG_VOLUME_MASK) {
- case EFFECT_FLAG_VOLUME_IND:
- return Flags::Volume::IND;
- case EFFECT_FLAG_VOLUME_MONITOR:
- return Flags::Volume::MONITOR;
- case EFFECT_FLAG_VOLUME_NONE:
- return Flags::Volume::NONE;
- }
- return unexpected(BAD_VALUE);
-}
-
-ConversionResult<Flags::HardwareAccelerator> legacy2aidl_uint32_Flags_HardwareAccelerator(
- uint32_t legacy) {
- switch (legacy & EFFECT_FLAG_HW_ACC_MASK) {
- case EFFECT_FLAG_HW_ACC_SIMPLE:
- return Flags::HardwareAccelerator::SIMPLE;
- case EFFECT_FLAG_HW_ACC_TUNNEL:
- return Flags::HardwareAccelerator::TUNNEL;
- }
- return unexpected(BAD_VALUE);
-}
-
-ConversionResult<Flags> legacy2aidl_uint32_Flags(uint32_t legacy) {
- Flags aidl;
-
- aidl.type = VALUE_OR_RETURN(legacy2aidl_uint32_Flags_Type(legacy));
- aidl.insert = VALUE_OR_RETURN(legacy2aidl_uint32_Flags_Insert(legacy));
- aidl.volume = VALUE_OR_RETURN(legacy2aidl_uint32_Flags_Volume(legacy));
- aidl.hwAcceleratorMode = VALUE_OR_RETURN(legacy2aidl_uint32_Flags_HardwareAccelerator(legacy));
- aidl.offloadIndication = (legacy & EFFECT_FLAG_OFFLOAD_SUPPORTED);
- aidl.deviceIndication = (legacy & EFFECT_FLAG_DEVICE_IND);
- aidl.audioModeIndication = (legacy & EFFECT_FLAG_AUDIO_MODE_IND);
- aidl.audioSourceIndication = (legacy & EFFECT_FLAG_AUDIO_SOURCE_IND);
- aidl.noProcessing = (legacy & EFFECT_FLAG_NO_PROCESS);
- return aidl;
-}
-
-ConversionResult<effect_descriptor_t>
-aidl2legacy_Descriptor_effect_descriptor(const Descriptor& aidl) {
- effect_descriptor_t legacy;
- legacy.type = VALUE_OR_RETURN(aidl2legacy_AudioUuid_audio_uuid_t(aidl.common.id.type));
- legacy.uuid = VALUE_OR_RETURN(aidl2legacy_AudioUuid_audio_uuid_t(aidl.common.id.uuid));
- // legacy descriptor doesn't have proxy information
- // proxy = VALUE_OR_RETURN(aidl2legacy_AudioUuid_audio_uuid_t(aidl.proxy));
- legacy.apiVersion = EFFECT_CONTROL_API_VERSION;
- legacy.flags = VALUE_OR_RETURN(aidl2legacy_Flags_uint32(aidl.common.flags));
- legacy.cpuLoad = VALUE_OR_RETURN(convertIntegral<uint16_t>(aidl.common.cpuLoad));
- legacy.memoryUsage = VALUE_OR_RETURN(convertIntegral<uint16_t>(aidl.common.memoryUsage));
- RETURN_IF_ERROR(aidl2legacy_string(aidl.common.name, legacy.name, sizeof(legacy.name)));
- RETURN_IF_ERROR(aidl2legacy_string(aidl.common.implementor, legacy.implementor,
- sizeof(legacy.implementor)));
- return legacy;
-}
-
-ConversionResult<Descriptor>
-legacy2aidl_effect_descriptor_Descriptor(const effect_descriptor_t& legacy) {
- Descriptor aidl;
- aidl.common.id.type = VALUE_OR_RETURN(legacy2aidl_audio_uuid_t_AudioUuid(legacy.type));
- aidl.common.id.uuid = VALUE_OR_RETURN(legacy2aidl_audio_uuid_t_AudioUuid(legacy.uuid));
- // legacy descriptor doesn't have proxy information
- // aidl.common.id.proxy
- aidl.common.flags = VALUE_OR_RETURN(legacy2aidl_uint32_Flags(legacy.flags));
- aidl.common.cpuLoad = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.cpuLoad));
- aidl.common.memoryUsage = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.memoryUsage));
- aidl.common.name = VALUE_OR_RETURN(legacy2aidl_string(legacy.name, sizeof(legacy.name)));
- aidl.common.implementor =
- VALUE_OR_RETURN(legacy2aidl_string(legacy.implementor, sizeof(legacy.implementor)));
- return aidl;
-}
-
+// buffer_provider_t is not supported thus skipped
ConversionResult<buffer_config_t> aidl2legacy_AudioConfigBase_buffer_config_t(
const media::audio::common::AudioConfigBase& aidl, bool isInput) {
buffer_config_t legacy;
@@ -234,11 +44,12 @@
legacy.format = VALUE_OR_RETURN(aidl2legacy_AudioFormatDescription_audio_format_t(aidl.format));
legacy.mask |= EFFECT_CONFIG_FORMAT;
+ // TODO: add accessMode and mask
return legacy;
}
ConversionResult<media::audio::common::AudioConfigBase>
-legacy2aidl_AudioConfigBase_buffer_config_t(const buffer_config_t& legacy, bool isInput) {
+legacy2aidl_buffer_config_t_AudioConfigBase(const buffer_config_t& legacy, bool isInput) {
media::audio::common::AudioConfigBase aidl;
if (legacy.mask & EFFECT_CONFIG_SMP_RATE) {
@@ -252,6 +63,8 @@
aidl.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormatDescription(
static_cast<audio_format_t>(legacy.format)));
}
+
+ // TODO: add accessMode and mask
return aidl;
}
diff --git a/media/audioaidlconversion/Android.bp b/media/audioaidlconversion/Android.bp
index 86f455e..c0024ef 100644
--- a/media/audioaidlconversion/Android.bp
+++ b/media/audioaidlconversion/Android.bp
@@ -135,11 +135,40 @@
],
defaults: [
"audio_aidl_conversion_common_default",
+ "latest_android_media_audio_common_types_ndk_shared",
+ ],
+ shared_libs: [
+ "libbinder_ndk",
+ "libbase",
+ ],
+ cflags: [
+ "-DBACKEND_NDK",
+ ],
+ min_sdk_version: "31", //AParcelableHolder has been introduced in 31
+}
+
+/**
+ * Only including AIDL effect HAL conversion.
+ */
+cc_library {
+ name: "libaudio_aidl_conversion_effect_ndk",
+ srcs: [
+ "AidlConversionEffect.cpp",
+ ],
+ header_libs: [
+ "libaudio_aidl_conversion_common_util_ndk",
+ ],
+ export_header_lib_headers: [
+ "libaudio_aidl_conversion_common_util_ndk",
+ ],
+ defaults: [
+ "audio_aidl_conversion_common_default",
"latest_android_hardware_audio_common_ndk_shared",
"latest_android_hardware_audio_effect_ndk_shared",
"latest_android_media_audio_common_types_ndk_shared",
],
shared_libs: [
+ "libaudio_aidl_conversion_common_ndk",
"libbinder_ndk",
"libbase",
],
diff --git a/media/audioaidlconversion/include/media/AidlConversionCppNdk.h b/media/audioaidlconversion/include/media/AidlConversionCppNdk.h
index c25ddb1..c412238 100644
--- a/media/audioaidlconversion/include/media/AidlConversionCppNdk.h
+++ b/media/audioaidlconversion/include/media/AidlConversionCppNdk.h
@@ -49,6 +49,8 @@
#include PREFIX(android/media/audio/common/AudioMode.h)
#include PREFIX(android/media/audio/common/AudioOffloadInfo.h)
#include PREFIX(android/media/audio/common/AudioOutputFlags.h)
+#include PREFIX(android/media/audio/common/AudioPort.h)
+#include PREFIX(android/media/audio/common/AudioPortConfig.h)
#include PREFIX(android/media/audio/common/AudioPortExt.h)
#include PREFIX(android/media/audio/common/AudioPortMixExt.h)
#include PREFIX(android/media/audio/common/AudioPlaybackRate.h)
@@ -67,6 +69,7 @@
using ::android::String16;
using ::android::String8;
+using ::android::status_t;
#if defined(BACKEND_NDK)
namespace aidl {
@@ -76,7 +79,7 @@
// maxSize is the size of the C-string buffer (including the 0-terminator), NOT the max length of
// the string.
-::android::status_t aidl2legacy_string(std::string_view aidl, char* dest, size_t maxSize);
+status_t aidl2legacy_string(std::string_view aidl, char* dest, size_t maxSize);
ConversionResult<std::string> legacy2aidl_string(const char* legacy, size_t maxSize);
ConversionResult<audio_module_handle_t> aidl2legacy_int32_t_audio_module_handle_t(int32_t aidl);
@@ -122,6 +125,12 @@
ConversionResult<media::audio::common::AudioChannelLayout>
legacy2aidl_audio_channel_mask_t_AudioChannelLayout(audio_channel_mask_t legacy, bool isInput);
+enum class AudioPortDirection {
+ INPUT, OUTPUT
+};
+ConversionResult<AudioPortDirection> portDirection(audio_port_role_t role, audio_port_type_t type);
+ConversionResult<audio_port_role_t> portRole(AudioPortDirection direction, audio_port_type_t type);
+
ConversionResult<audio_config_t>
aidl2legacy_AudioConfig_audio_config_t(const media::audio::common::AudioConfig& aidl, bool isInput);
ConversionResult<media::audio::common::AudioConfig>
@@ -172,13 +181,13 @@
ConversionResult<media::audio::common::AudioDeviceDescription>
legacy2aidl_audio_devices_t_AudioDeviceDescription(audio_devices_t legacy);
-::android::status_t aidl2legacy_AudioDevice_audio_device(
+status_t aidl2legacy_AudioDevice_audio_device(
const media::audio::common::AudioDevice& aidl, audio_devices_t* legacyType,
char* legacyAddress);
-::android::status_t aidl2legacy_AudioDevice_audio_device(
+status_t aidl2legacy_AudioDevice_audio_device(
const media::audio::common::AudioDevice& aidl, audio_devices_t* legacyType,
String8* legacyAddress);
-::android::status_t aidl2legacy_AudioDevice_audio_device(
+status_t aidl2legacy_AudioDevice_audio_device(
const media::audio::common::AudioDevice& aidl, audio_devices_t* legacyType,
std::string* legacyAddress);
@@ -265,6 +274,48 @@
ConversionResult<media::audio::common::AudioOutputFlags>
legacy2aidl_audio_output_flags_t_AudioOutputFlags(audio_output_flags_t legacy);
+// This type is unnamed in the original definition, thus we name it here.
+using audio_port_config_mix_ext_usecase = decltype(audio_port_config_mix_ext::usecase);
+ConversionResult<audio_port_config_mix_ext_usecase>
+aidl2legacy_AudioPortMixExtUseCase_audio_port_config_mix_ext_usecase(
+ const media::audio::common::AudioPortMixExtUseCase& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioPortMixExtUseCase>
+legacy2aidl_audio_port_config_mix_ext_usecase_AudioPortMixExtUseCase(
+ const audio_port_config_mix_ext_usecase& legacy, bool isInput);
+
+ConversionResult<audio_port_config_device_ext>
+aidl2legacy_AudioPortDeviceExt_audio_port_config_device_ext(
+ const media::audio::common::AudioPortDeviceExt& aidl);
+ConversionResult<media::audio::common::AudioPortDeviceExt>
+ legacy2aidl_audio_port_config_device_ext_AudioPortDeviceExt(
+ const audio_port_config_device_ext& legacy);
+
+status_t aidl2legacy_AudioPortConfig_audio_port_config(
+ const media::audio::common::AudioPortConfig& aidl, bool isInput,
+ audio_port_config* legacy, int32_t* portId);
+ConversionResult<media::audio::common::AudioPortConfig>
+legacy2aidl_audio_port_config_AudioPortConfig(
+ const audio_port_config& legacy, bool isInput, int32_t portId);
+
+ConversionResult<audio_port_mix_ext> aidl2legacy_AudioPortMixExt_audio_port_mix_ext(
+ const media::audio::common::AudioPortMixExt& aidl);
+ConversionResult<media::audio::common::AudioPortMixExt>
+legacy2aidl_audio_port_mix_ext_AudioPortMixExt(
+ const audio_port_mix_ext& legacy);
+
+ConversionResult<audio_port_device_ext>
+aidl2legacy_AudioPortDeviceExt_audio_port_device_ext(
+ const media::audio::common::AudioPortDeviceExt& aidl);
+ConversionResult<media::audio::common::AudioPortDeviceExt>
+legacy2aidl_audio_port_device_ext_AudioPortDeviceExt(
+ const audio_port_device_ext& legacy);
+
+ConversionResult<audio_port_v7>
+aidl2legacy_AudioPort_audio_port_v7(
+ const media::audio::common::AudioPort& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioPort>
+legacy2aidl_audio_port_v7_AudioPort(const audio_port_v7& legacy, bool isInput);
+
ConversionResult<audio_profile> aidl2legacy_AudioProfile_audio_profile(
const media::audio::common::AudioProfile& aidl, bool isInput);
ConversionResult<media::audio::common::AudioProfile> legacy2aidl_audio_profile_AudioProfile(
diff --git a/media/audioaidlconversion/include/media/AidlConversionEffect.h b/media/audioaidlconversion/include/media/AidlConversionEffect.h
new file mode 100644
index 0000000..6f81432
--- /dev/null
+++ b/media/audioaidlconversion/include/media/AidlConversionEffect.h
@@ -0,0 +1,125 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android/binder_auto_utils.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+
+/**
+ * Can only handle conversion between AIDL (NDK backend) and legacy type.
+ */
+#include <hardware/audio_effect.h>
+#include <media/AidlConversionUtil.h>
+#include <system/audio_effect.h>
+
+#include <aidl/android/hardware/audio/effect/IEffect.h>
+
+namespace aidl {
+namespace android {
+
+template <typename P, typename T, typename P::Specific::Tag tag>
+ConversionResult<T> getParameterSpecific(const P& u) {
+ const auto& spec = VALUE_OR_RETURN(UNION_GET(u, specific));
+ return unionGetField<typename P::Specific, tag>(spec);
+}
+
+template <typename P, typename T, typename P::Specific::Tag tag, typename T::Tag field, typename F>
+ConversionResult<F> getParameterSpecificField(const P& u) {
+ const auto& spec =
+ VALUE_OR_RETURN((getParameterSpecific<std::decay_t<decltype(u)>, T, tag>(u)));
+ return VALUE_OR_RETURN((unionGetField<T, field>(spec)));
+}
+
+#define GET_PARAMETER_SPECIFIC_FIELD(u, specific, tag, field, fieldType) \
+ getParameterSpecificField<std::decay_t<decltype(u)>, specific, \
+ aidl::android::hardware::audio::effect::Parameter::Specific::tag, \
+ specific::field, fieldType>(u)
+
+#define MAKE_SPECIFIC_PARAMETER(spec, tag, field, value) \
+ UNION_MAKE(aidl::android::hardware::audio::effect::Parameter, specific, \
+ UNION_MAKE(aidl::android::hardware::audio::effect::Parameter::Specific, tag, \
+ UNION_MAKE(spec, field, value)))
+
+#define MAKE_SPECIFIC_PARAMETER_ID(spec, tag, field) \
+ UNION_MAKE(aidl::android::hardware::audio::effect::Parameter::Id, tag, \
+ UNION_MAKE(spec::Id, commonTag, spec::field))
+
+ConversionResult<uint32_t> aidl2legacy_Flags_Type_uint32(
+ ::aidl::android::hardware::audio::effect::Flags::Type type);
+ConversionResult<uint32_t> aidl2legacy_Flags_Insert_uint32(
+ ::aidl::android::hardware::audio::effect::Flags::Insert insert);
+ConversionResult<uint32_t> aidl2legacy_Flags_Volume_uint32(
+ ::aidl::android::hardware::audio::effect::Flags::Volume volume);
+ConversionResult<uint32_t> aidl2legacy_Flags_HardwareAccelerator_uint32(
+ ::aidl::android::hardware::audio::effect::Flags::HardwareAccelerator hwAcceleratorMode);
+ConversionResult<uint32_t> aidl2legacy_Flags_uint32(
+ const ::aidl::android::hardware::audio::effect::Flags aidl);
+
+ConversionResult<::aidl::android::hardware::audio::effect::Flags::Type>
+legacy2aidl_uint32_Flags_Type(uint32_t legacy);
+ConversionResult<::aidl::android::hardware::audio::effect::Flags::Insert>
+legacy2aidl_uint32_Flags_Insert(uint32_t legacy);
+ConversionResult<::aidl::android::hardware::audio::effect::Flags::Volume>
+legacy2aidl_uint32_Flags_Volume(uint32_t legacy);
+ConversionResult<::aidl::android::hardware::audio::effect::Flags::HardwareAccelerator>
+legacy2aidl_uint32_Flags_HardwareAccelerator(uint32_t legacy);
+ConversionResult<::aidl::android::hardware::audio::effect::Flags> legacy2aidl_uint32_Flags(
+ uint32_t hal);
+
+ConversionResult<effect_descriptor_t> aidl2legacy_Descriptor_effect_descriptor(
+ const ::aidl::android::hardware::audio::effect::Descriptor& aidl);
+ConversionResult<::aidl::android::hardware::audio::effect::Descriptor>
+legacy2aidl_effect_descriptor_Descriptor(const effect_descriptor_t& hal);
+
+ConversionResult<uint32_t> aidl2legacy_Parameter_aec_uint32_echoDelay(
+ const ::aidl::android::hardware::audio::effect::Parameter& aidl);
+ConversionResult<::aidl::android::hardware::audio::effect::Parameter>
+legacy2aidl_uint32_echoDelay_Parameter_aec(uint32_t legacy);
+
+ConversionResult<uint32_t> aidl2legacy_Parameter_aec_uint32_mobileMode(
+ const ::aidl::android::hardware::audio::effect::Parameter& aidl);
+ConversionResult<::aidl::android::hardware::audio::effect::Parameter>
+legacy2aidl_uint32_mobileMode_Parameter_aec(uint32_t legacy);
+
+ConversionResult<uint32_t> aidl2legacy_Parameter_agc_uint32_fixedDigitalGain(
+ const ::aidl::android::hardware::audio::effect::Parameter& aidl);
+ConversionResult<::aidl::android::hardware::audio::effect::Parameter>
+legacy2aidl_uint32_fixedDigitalGain_Parameter_agc(uint32_t legacy);
+
+ConversionResult<uint32_t> aidl2legacy_Parameter_agc_uint32_levelEstimator(
+ const ::aidl::android::hardware::audio::effect::Parameter& aidl);
+ConversionResult<::aidl::android::hardware::audio::effect::Parameter>
+legacy2aidl_uint32_levelEstimator_Parameter_agc(uint32_t legacy);
+
+ConversionResult<uint32_t> aidl2legacy_Parameter_agc_uint32_saturationMargin(
+ const ::aidl::android::hardware::audio::effect::Parameter& aidl);
+ConversionResult<::aidl::android::hardware::audio::effect::Parameter>
+legacy2aidl_uint32_saturationMargin_Parameter_agc(uint32_t legacy);
+
+ConversionResult<uint16_t> aidl2legacy_Parameter_BassBoost_uint16_strengthPm(
+ const ::aidl::android::hardware::audio::effect::Parameter& aidl);
+ConversionResult<::aidl::android::hardware::audio::effect::Parameter>
+legacy2aidl_uint16_strengthPm_Parameter_BassBoost(uint16_t legacy);
+
+ConversionResult<int16_t> aidl2legacy_Parameter_Downmix_int16_type(
+ const ::aidl::android::hardware::audio::effect::Parameter& aidl);
+ConversionResult<::aidl::android::hardware::audio::effect::Parameter>
+legacy2aidl_int16_type_Parameter_Downmix(int16_t legacy);
+
+} // namespace android
+} // namespace aidl
diff --git a/media/audioaidlconversion/include/media/AidlConversionNdk.h b/media/audioaidlconversion/include/media/AidlConversionNdk.h
index a3176f6..98a7d41 100644
--- a/media/audioaidlconversion/include/media/AidlConversionNdk.h
+++ b/media/audioaidlconversion/include/media/AidlConversionNdk.h
@@ -26,42 +26,14 @@
#include <hardware/audio_effect.h>
#include <media/AidlConversionUtil.h>
#include <system/audio_effect.h>
-
-#include <aidl/android/hardware/audio/effect/IEffect.h>
+#include <aidl/android/media/audio/common/AudioConfig.h>
namespace aidl {
namespace android {
-ConversionResult<uint32_t> aidl2legacy_Flags_Type_uint32(
- ::aidl::android::hardware::audio::effect::Flags::Type type);
-ConversionResult<uint32_t> aidl2legacy_Flags_Insert_uint32(
- ::aidl::android::hardware::audio::effect::Flags::Insert insert);
-ConversionResult<uint32_t> aidl2legacy_Flags_Volume_uint32(
- ::aidl::android::hardware::audio::effect::Flags::Volume volume);
-ConversionResult<uint32_t> aidl2legacy_Flags_HardwareAccelerator_uint32(
- ::aidl::android::hardware::audio::effect::Flags::HardwareAccelerator hwAcceleratorMode);
-ConversionResult<uint32_t> aidl2legacy_Flags_uint32(
- const ::aidl::android::hardware::audio::effect::Flags aidl);
-
-ConversionResult<::aidl::android::hardware::audio::effect::Flags::Type>
-legacy2aidl_uint32_Flags_Type(uint32_t legacy);
-ConversionResult<::aidl::android::hardware::audio::effect::Flags::Insert>
-legacy2aidl_uint32_Flags_Insert(uint32_t legacy);
-ConversionResult<::aidl::android::hardware::audio::effect::Flags::Volume>
-legacy2aidl_uint32_Flags_Volume(uint32_t legacy);
-ConversionResult<::aidl::android::hardware::audio::effect::Flags::HardwareAccelerator>
-legacy2aidl_uint32_Flags_HardwareAccelerator(uint32_t legacy);
-ConversionResult<::aidl::android::hardware::audio::effect::Flags> legacy2aidl_uint32_Flags(
- uint32_t hal);
-
-ConversionResult<effect_descriptor_t> aidl2legacy_Descriptor_effect_descriptor(
- const ::aidl::android::hardware::audio::effect::Descriptor& aidl);
-ConversionResult<::aidl::android::hardware::audio::effect::Descriptor>
-legacy2aidl_effect_descriptor_Descriptor(const effect_descriptor_t& hal);
-
ConversionResult<buffer_config_t> aidl2legacy_AudioConfigBase_buffer_config_t(
const media::audio::common::AudioConfigBase& aidl, bool isInput);
-ConversionResult<media::audio::common::AudioConfigBase> legacy2aidl_AudioConfigBase_buffer_config_t(
+ConversionResult<media::audio::common::AudioConfigBase> legacy2aidl_buffer_config_t_AudioConfigBase(
const buffer_config_t& legacy, bool isInput);
} // namespace android
diff --git a/media/audioaidlconversion/include/media/AidlConversionUtil.h b/media/audioaidlconversion/include/media/AidlConversionUtil.h
index 28c7522..8b2e0de 100644
--- a/media/audioaidlconversion/include/media/AidlConversionUtil.h
+++ b/media/audioaidlconversion/include/media/AidlConversionUtil.h
@@ -25,6 +25,7 @@
#include <error/Result.h>
#if defined(BACKEND_NDK)
+#include <android/binder_auto_utils.h>
#include <android/binder_enums.h>
#include <android/binder_status.h>
@@ -277,6 +278,8 @@
#define UNION_SET(u, field, value) \
(u).set<std::decay_t<decltype(u)>::Tag::field>(value)
+#define UNION_MAKE(u, field, value) u::make<u::Tag::field>(value)
+
namespace aidl_utils {
/**
@@ -362,6 +365,20 @@
// standard Java exception (fromExceptionCode)
}
+#if defined(BACKEND_NDK)
+static inline ::android::status_t statusTFromBinderStatus(const ::ndk::ScopedAStatus &status) {
+ // What we want to do is to 'return statusTFromBinderStatus(status.get()->get())'
+ // However, since the definition of AStatus is not exposed, we have to do the same
+ // via methods of ScopedAStatus:
+ return status.isOk() ? ::android::OK // check ::android::OK,
+ : status.getServiceSpecificError() // service-side error, not standard Java exception
+ // (fromServiceSpecificError)
+ ?: status.getStatus() // a native binder transaction error (fromStatusT)
+ ?: statusTFromExceptionCode(status.getExceptionCode()); // a service-side error with a
+ // standard Java exception (fromExceptionCode)
+}
+#endif
+
/**
* Return a binder::Status from native service status.
*
@@ -396,11 +413,10 @@
return Status::fromServiceSpecificError(status, emptyIfNull);
}
-
} // namespace aidl_utils
} // namespace android
#if defined(BACKEND_NDK)
} // namespace aidl
-#endif
\ No newline at end of file
+#endif
diff --git a/media/codec2/hidl/plugin/samples/Android.bp b/media/codec2/hidl/plugin/samples/Android.bp
index 32b760d..e0f8280 100644
--- a/media/codec2/hidl/plugin/samples/Android.bp
+++ b/media/codec2/hidl/plugin/samples/Android.bp
@@ -28,6 +28,7 @@
"libGLESv1_CM",
"libGLESv2",
"libGLESv3",
+ "libvulkan",
"libbase",
"libcodec2",
"libcutils",
diff --git a/media/libaudioclient/AidlConversion.cpp b/media/libaudioclient/AidlConversion.cpp
index f75e58d..b3c8643 100644
--- a/media/libaudioclient/AidlConversion.cpp
+++ b/media/libaudioclient/AidlConversion.cpp
@@ -66,125 +66,6 @@
using media::audio::common::Int;
using media::audio::common::PcmType;
-namespace {
-
-enum class Direction {
- INPUT, OUTPUT
-};
-
-ConversionResult<Direction> direction(media::AudioPortRole role, media::AudioPortType type) {
- switch (type) {
- case media::AudioPortType::NONE:
- case media::AudioPortType::SESSION:
- break; // must be listed -Werror,-Wswitch
- case media::AudioPortType::DEVICE:
- switch (role) {
- case media::AudioPortRole::NONE:
- break; // must be listed -Werror,-Wswitch
- case media::AudioPortRole::SOURCE:
- return Direction::INPUT;
- case media::AudioPortRole::SINK:
- return Direction::OUTPUT;
- }
- break;
- case media::AudioPortType::MIX:
- switch (role) {
- case media::AudioPortRole::NONE:
- break; // must be listed -Werror,-Wswitch
- case media::AudioPortRole::SOURCE:
- return Direction::OUTPUT;
- case media::AudioPortRole::SINK:
- return Direction::INPUT;
- }
- break;
- }
- return unexpected(BAD_VALUE);
-}
-
-ConversionResult<Direction> direction(audio_port_role_t role, audio_port_type_t type) {
- switch (type) {
- case AUDIO_PORT_TYPE_NONE:
- case AUDIO_PORT_TYPE_SESSION:
- break; // must be listed -Werror,-Wswitch
- case AUDIO_PORT_TYPE_DEVICE:
- switch (role) {
- case AUDIO_PORT_ROLE_NONE:
- break; // must be listed -Werror,-Wswitch
- case AUDIO_PORT_ROLE_SOURCE:
- return Direction::INPUT;
- case AUDIO_PORT_ROLE_SINK:
- return Direction::OUTPUT;
- }
- break;
- case AUDIO_PORT_TYPE_MIX:
- switch (role) {
- case AUDIO_PORT_ROLE_NONE:
- break; // must be listed -Werror,-Wswitch
- case AUDIO_PORT_ROLE_SOURCE:
- return Direction::OUTPUT;
- case AUDIO_PORT_ROLE_SINK:
- return Direction::INPUT;
- }
- break;
- }
- return unexpected(BAD_VALUE);
-}
-
-} // namespace
-
-////////////////////////////////////////////////////////////////////////////////////////////////////
-// Converters
-
-ConversionResult<audio_io_config_event_t> aidl2legacy_AudioIoConfigEvent_audio_io_config_event_t(
- media::AudioIoConfigEvent aidl) {
- switch (aidl) {
- case media::AudioIoConfigEvent::OUTPUT_REGISTERED:
- return AUDIO_OUTPUT_REGISTERED;
- case media::AudioIoConfigEvent::OUTPUT_OPENED:
- return AUDIO_OUTPUT_OPENED;
- case media::AudioIoConfigEvent::OUTPUT_CLOSED:
- return AUDIO_OUTPUT_CLOSED;
- case media::AudioIoConfigEvent::OUTPUT_CONFIG_CHANGED:
- return AUDIO_OUTPUT_CONFIG_CHANGED;
- case media::AudioIoConfigEvent::INPUT_REGISTERED:
- return AUDIO_INPUT_REGISTERED;
- case media::AudioIoConfigEvent::INPUT_OPENED:
- return AUDIO_INPUT_OPENED;
- case media::AudioIoConfigEvent::INPUT_CLOSED:
- return AUDIO_INPUT_CLOSED;
- case media::AudioIoConfigEvent::INPUT_CONFIG_CHANGED:
- return AUDIO_INPUT_CONFIG_CHANGED;
- case media::AudioIoConfigEvent::CLIENT_STARTED:
- return AUDIO_CLIENT_STARTED;
- }
- return unexpected(BAD_VALUE);
-}
-
-ConversionResult<media::AudioIoConfigEvent> legacy2aidl_audio_io_config_event_t_AudioIoConfigEvent(
- audio_io_config_event_t legacy) {
- switch (legacy) {
- case AUDIO_OUTPUT_REGISTERED:
- return media::AudioIoConfigEvent::OUTPUT_REGISTERED;
- case AUDIO_OUTPUT_OPENED:
- return media::AudioIoConfigEvent::OUTPUT_OPENED;
- case AUDIO_OUTPUT_CLOSED:
- return media::AudioIoConfigEvent::OUTPUT_CLOSED;
- case AUDIO_OUTPUT_CONFIG_CHANGED:
- return media::AudioIoConfigEvent::OUTPUT_CONFIG_CHANGED;
- case AUDIO_INPUT_REGISTERED:
- return media::AudioIoConfigEvent::INPUT_REGISTERED;
- case AUDIO_INPUT_OPENED:
- return media::AudioIoConfigEvent::INPUT_OPENED;
- case AUDIO_INPUT_CLOSED:
- return media::AudioIoConfigEvent::INPUT_CLOSED;
- case AUDIO_INPUT_CONFIG_CHANGED:
- return media::AudioIoConfigEvent::INPUT_CONFIG_CHANGED;
- case AUDIO_CLIENT_STARTED:
- return media::AudioIoConfigEvent::CLIENT_STARTED;
- }
- return unexpected(BAD_VALUE);
-}
-
ConversionResult<audio_port_role_t> aidl2legacy_AudioPortRole_audio_port_role_t(
media::AudioPortRole aidl) {
switch (aidl) {
@@ -241,52 +122,97 @@
return unexpected(BAD_VALUE);
}
-// This type is unnamed in the original definition, thus we name it here.
-using audio_port_config_mix_ext_usecase = decltype(audio_port_config_mix_ext::usecase);
+ConversionResult<AudioPortDirection> portDirection(
+ media::AudioPortRole role, media::AudioPortType type) {
+ audio_port_role_t legacyRole = VALUE_OR_RETURN(
+ aidl2legacy_AudioPortRole_audio_port_role_t(role));
+ audio_port_type_t legacyType = VALUE_OR_RETURN(
+ aidl2legacy_AudioPortType_audio_port_type_t(type));
+ return portDirection(legacyRole, legacyType);
+}
+ConversionResult<audio_io_config_event_t> aidl2legacy_AudioIoConfigEvent_audio_io_config_event_t(
+ media::AudioIoConfigEvent aidl) {
+ switch (aidl) {
+ case media::AudioIoConfigEvent::OUTPUT_REGISTERED:
+ return AUDIO_OUTPUT_REGISTERED;
+ case media::AudioIoConfigEvent::OUTPUT_OPENED:
+ return AUDIO_OUTPUT_OPENED;
+ case media::AudioIoConfigEvent::OUTPUT_CLOSED:
+ return AUDIO_OUTPUT_CLOSED;
+ case media::AudioIoConfigEvent::OUTPUT_CONFIG_CHANGED:
+ return AUDIO_OUTPUT_CONFIG_CHANGED;
+ case media::AudioIoConfigEvent::INPUT_REGISTERED:
+ return AUDIO_INPUT_REGISTERED;
+ case media::AudioIoConfigEvent::INPUT_OPENED:
+ return AUDIO_INPUT_OPENED;
+ case media::AudioIoConfigEvent::INPUT_CLOSED:
+ return AUDIO_INPUT_CLOSED;
+ case media::AudioIoConfigEvent::INPUT_CONFIG_CHANGED:
+ return AUDIO_INPUT_CONFIG_CHANGED;
+ case media::AudioIoConfigEvent::CLIENT_STARTED:
+ return AUDIO_CLIENT_STARTED;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::AudioIoConfigEvent> legacy2aidl_audio_io_config_event_t_AudioIoConfigEvent(
+ audio_io_config_event_t legacy) {
+ switch (legacy) {
+ case AUDIO_OUTPUT_REGISTERED:
+ return media::AudioIoConfigEvent::OUTPUT_REGISTERED;
+ case AUDIO_OUTPUT_OPENED:
+ return media::AudioIoConfigEvent::OUTPUT_OPENED;
+ case AUDIO_OUTPUT_CLOSED:
+ return media::AudioIoConfigEvent::OUTPUT_CLOSED;
+ case AUDIO_OUTPUT_CONFIG_CHANGED:
+ return media::AudioIoConfigEvent::OUTPUT_CONFIG_CHANGED;
+ case AUDIO_INPUT_REGISTERED:
+ return media::AudioIoConfigEvent::INPUT_REGISTERED;
+ case AUDIO_INPUT_OPENED:
+ return media::AudioIoConfigEvent::INPUT_OPENED;
+ case AUDIO_INPUT_CLOSED:
+ return media::AudioIoConfigEvent::INPUT_CLOSED;
+ case AUDIO_INPUT_CONFIG_CHANGED:
+ return media::AudioIoConfigEvent::INPUT_CONFIG_CHANGED;
+ case AUDIO_CLIENT_STARTED:
+ return media::AudioIoConfigEvent::CLIENT_STARTED;
+ }
+ return unexpected(BAD_VALUE);
+}
ConversionResult<audio_port_config_mix_ext_usecase> aidl2legacy_AudioPortMixExtUseCase(
const AudioPortMixExtUseCase& aidl, media::AudioPortRole role) {
- audio_port_config_mix_ext_usecase legacy;
-
switch (role) {
- case media::AudioPortRole::NONE:
+ case media::AudioPortRole::NONE: {
+ audio_port_config_mix_ext_usecase legacy;
// Just verify that the union is empty.
VALUE_OR_RETURN(UNION_GET(aidl, unspecified));
return legacy;
-
+ }
case media::AudioPortRole::SOURCE:
- // This is not a bug. A SOURCE role corresponds to the stream field.
- legacy.stream = VALUE_OR_RETURN(aidl2legacy_AudioStreamType_audio_stream_type_t(
- VALUE_OR_RETURN(UNION_GET(aidl, stream))));
- return legacy;
-
+ return aidl2legacy_AudioPortMixExtUseCase_audio_port_config_mix_ext_usecase(
+ aidl, false /*isInput*/);
case media::AudioPortRole::SINK:
- // This is not a bug. A SINK role corresponds to the source field.
- legacy.source = VALUE_OR_RETURN(aidl2legacy_AudioSource_audio_source_t(
- VALUE_OR_RETURN(UNION_GET(aidl, source))));
- return legacy;
+ return aidl2legacy_AudioPortMixExtUseCase_audio_port_config_mix_ext_usecase(
+ aidl, true /*isInput*/);
}
LOG_ALWAYS_FATAL("Shouldn't get here"); // with -Werror,-Wswitch may compile-time fail
}
ConversionResult<AudioPortMixExtUseCase> legacy2aidl_AudioPortMixExtUseCase(
const audio_port_config_mix_ext_usecase& legacy, audio_port_role_t role) {
- AudioPortMixExtUseCase aidl;
-
switch (role) {
- case AUDIO_PORT_ROLE_NONE:
+ case AUDIO_PORT_ROLE_NONE: {
+ AudioPortMixExtUseCase aidl;
UNION_SET(aidl, unspecified, false);
return aidl;
+ }
case AUDIO_PORT_ROLE_SOURCE:
- // This is not a bug. A SOURCE role corresponds to the stream field.
- UNION_SET(aidl, stream, VALUE_OR_RETURN(
- legacy2aidl_audio_stream_type_t_AudioStreamType(legacy.stream)));
- return aidl;
+ return legacy2aidl_audio_port_config_mix_ext_usecase_AudioPortMixExtUseCase(
+ legacy, false /*isInput*/);
case AUDIO_PORT_ROLE_SINK:
- // This is not a bug. A SINK role corresponds to the source field.
- UNION_SET(aidl, source,
- VALUE_OR_RETURN(legacy2aidl_audio_source_t_AudioSource(legacy.source)));
- return aidl;
+ return legacy2aidl_audio_port_config_mix_ext_usecase_AudioPortMixExtUseCase(
+ legacy, true /*isInput*/);
}
LOG_ALWAYS_FATAL("Shouldn't get here"); // with -Werror,-Wswitch may compile-time fail
}
@@ -294,6 +220,8 @@
ConversionResult<audio_port_config_mix_ext> aidl2legacy_AudioPortMixExt(
const AudioPortMixExt& aidl, media::AudioPortRole role,
const media::AudioPortMixExtSys& aidlMixExt) {
+ // Not using HAL-level 'aidl2legacy_AudioPortMixExt' as it does not support
+ // 'media::AudioPortRole::NONE'.
audio_port_config_mix_ext legacy;
legacy.hw_module = VALUE_OR_RETURN(
aidl2legacy_int32_t_audio_module_handle_t(aidlMixExt.hwModule));
@@ -305,6 +233,8 @@
status_t legacy2aidl_AudioPortMixExt(
const audio_port_config_mix_ext& legacy, audio_port_role_t role,
AudioPortMixExt* aidl, media::AudioPortMixExtSys* aidlMixExt) {
+ // Not using HAL-level 'legacy2aidl_AudioPortMixExt' as it does not support
+ // 'AUDIO_PORT_ROLE_NONE'.
aidlMixExt->hwModule = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_module_handle_t_int32_t(legacy.hw_module));
aidl->handle = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(legacy.handle));
@@ -329,21 +259,20 @@
ConversionResult<audio_port_config_device_ext>
aidl2legacy_AudioPortDeviceExt_audio_port_config_device_ext(
const AudioPortDeviceExt& aidl, const media::AudioPortDeviceExtSys& aidlDeviceExt) {
- audio_port_config_device_ext legacy;
+ audio_port_config_device_ext legacy = VALUE_OR_RETURN(
+ aidl2legacy_AudioPortDeviceExt_audio_port_config_device_ext(aidl));
legacy.hw_module = VALUE_OR_RETURN(
aidl2legacy_int32_t_audio_module_handle_t(aidlDeviceExt.hwModule));
- RETURN_IF_ERROR(aidl2legacy_AudioDevice_audio_device(
- aidl.device, &legacy.type, legacy.address));
return legacy;
}
status_t legacy2aidl_audio_port_config_device_ext_AudioPortDeviceExt(
const audio_port_config_device_ext& legacy,
AudioPortDeviceExt* aidl, media::AudioPortDeviceExtSys* aidlDeviceExt) {
+ *aidl = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_audio_port_config_device_ext_AudioPortDeviceExt(legacy));
aidlDeviceExt->hwModule = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_module_handle_t_int32_t(legacy.hw_module));
- aidl->device = VALUE_OR_RETURN_STATUS(
- legacy2aidl_audio_device_AudioDevice(legacy.type, legacy.address));
return OK;
}
@@ -353,6 +282,8 @@
ConversionResult<audio_port_config_ext> aidl2legacy_AudioPortExt_audio_port_config_ext(
const AudioPortExt& aidl, media::AudioPortType type,
media::AudioPortRole role, const media::AudioPortExtSys& aidlSys) {
+ // Not using HAL-level 'aidl2legacy_AudioPortExt_audio_port_config_ext' as it does not support
+ // 'media::AudioPortType::SESSION'.
audio_port_config_ext legacy;
switch (type) {
case media::AudioPortType::NONE:
@@ -384,6 +315,8 @@
status_t legacy2aidl_AudioPortExt(
const audio_port_config_ext& legacy, audio_port_type_t type, audio_port_role_t role,
AudioPortExt* aidl, media::AudioPortExtSys* aidlSys) {
+ // Not using HAL-level 'aidl2legacy_AudioPortExt_audio_port_config_ext' as it does not support
+ // 'AUDIO_PORT_TYPE_SESSION'.
switch (type) {
case AUDIO_PORT_TYPE_NONE:
UNION_SET(*aidl, unspecified, false);
@@ -416,83 +349,39 @@
LOG_ALWAYS_FATAL("Shouldn't get here"); // with -Werror,-Wswitch may compile-time fail
}
-ConversionResult<audio_port_config> aidl2legacy_AudioPortConfig_audio_port_config(
- const media::AudioPortConfig& aidl) {
- audio_port_config legacy{};
- legacy.id = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_port_handle_t(aidl.hal.id));
+ConversionResult<audio_port_config> aidl2legacy_AudioPortConfigFw_audio_port_config(
+ const media::AudioPortConfigFw& aidl, int32_t* aidlPortId) {
+ const bool isInput = VALUE_OR_RETURN(
+ portDirection(aidl.sys.role, aidl.sys.type)) == AudioPortDirection::INPUT;
+ audio_port_config legacy;
+ int32_t aidlPortIdHolder;
+ RETURN_IF_ERROR(aidl2legacy_AudioPortConfig_audio_port_config(
+ aidl.hal, isInput, &legacy, &aidlPortIdHolder));
+ if (aidlPortId != nullptr) *aidlPortId = aidlPortIdHolder;
legacy.role = VALUE_OR_RETURN(aidl2legacy_AudioPortRole_audio_port_role_t(aidl.sys.role));
legacy.type = VALUE_OR_RETURN(aidl2legacy_AudioPortType_audio_port_type_t(aidl.sys.type));
- const bool isInput =
- VALUE_OR_RETURN(direction(aidl.sys.role, aidl.sys.type)) == Direction::INPUT;
- if (aidl.hal.sampleRate.has_value()) {
- legacy.sample_rate = VALUE_OR_RETURN(
- convertIntegral<unsigned int>(aidl.hal.sampleRate.value().value));
- legacy.config_mask |= AUDIO_PORT_CONFIG_SAMPLE_RATE;
- }
- if (aidl.hal.channelMask.has_value()) {
- legacy.channel_mask =
- VALUE_OR_RETURN(
- aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
- aidl.hal.channelMask.value(), isInput));
- legacy.config_mask |= AUDIO_PORT_CONFIG_CHANNEL_MASK;
- }
- if (aidl.hal.format.has_value()) {
- legacy.format = VALUE_OR_RETURN(
- aidl2legacy_AudioFormatDescription_audio_format_t(aidl.hal.format.value()));
- legacy.config_mask |= AUDIO_PORT_CONFIG_FORMAT;
- }
- if (aidl.hal.gain.has_value()) {
- legacy.gain = VALUE_OR_RETURN(aidl2legacy_AudioGainConfig_audio_gain_config(
- aidl.hal.gain.value(), isInput));
- legacy.config_mask |= AUDIO_PORT_CONFIG_GAIN;
- }
- if (aidl.hal.flags.has_value()) {
- legacy.flags = VALUE_OR_RETURN(
- aidl2legacy_AudioIoFlags_audio_io_flags(aidl.hal.flags.value(), isInput));
- legacy.config_mask |= AUDIO_PORT_CONFIG_FLAGS;
- }
legacy.ext = VALUE_OR_RETURN(
aidl2legacy_AudioPortExt_audio_port_config_ext(
aidl.hal.ext, aidl.sys.type, aidl.sys.role, aidl.sys.ext));
return legacy;
}
-ConversionResult<media::AudioPortConfig> legacy2aidl_audio_port_config_AudioPortConfig(
- const audio_port_config& legacy) {
- media::AudioPortConfig aidl;
- aidl.hal.id = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(legacy.id));
+ConversionResult<media::AudioPortConfigFw> legacy2aidl_audio_port_config_AudioPortConfigFw(
+ const audio_port_config& legacy, int32_t portId) {
+ const bool isInput = VALUE_OR_RETURN(
+ portDirection(legacy.role, legacy.type)) == AudioPortDirection::INPUT;
+ media::AudioPortConfigFw aidl;
+ aidl.hal = VALUE_OR_RETURN(
+ legacy2aidl_audio_port_config_AudioPortConfig(legacy, isInput, portId));
aidl.sys.role = VALUE_OR_RETURN(legacy2aidl_audio_port_role_t_AudioPortRole(legacy.role));
aidl.sys.type = VALUE_OR_RETURN(legacy2aidl_audio_port_type_t_AudioPortType(legacy.type));
- const bool isInput = VALUE_OR_RETURN(
- direction(legacy.role, legacy.type)) == Direction::INPUT;
- if (legacy.config_mask & AUDIO_PORT_CONFIG_SAMPLE_RATE) {
- Int aidl_sampleRate;
- aidl_sampleRate.value = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.sample_rate));
- aidl.hal.sampleRate = aidl_sampleRate;
- }
- if (legacy.config_mask & AUDIO_PORT_CONFIG_CHANNEL_MASK) {
- aidl.hal.channelMask = VALUE_OR_RETURN(
- legacy2aidl_audio_channel_mask_t_AudioChannelLayout(legacy.channel_mask, isInput));
- }
- if (legacy.config_mask & AUDIO_PORT_CONFIG_FORMAT) {
- aidl.hal.format = VALUE_OR_RETURN(
- legacy2aidl_audio_format_t_AudioFormatDescription(legacy.format));
- }
- if (legacy.config_mask & AUDIO_PORT_CONFIG_GAIN) {
- aidl.hal.gain = VALUE_OR_RETURN(
- legacy2aidl_audio_gain_config_AudioGainConfig(legacy.gain, isInput));
- }
- if (legacy.config_mask & AUDIO_PORT_CONFIG_FLAGS) {
- aidl.hal.flags = VALUE_OR_RETURN(
- legacy2aidl_audio_io_flags_AudioIoFlags(legacy.flags, isInput));
- }
RETURN_IF_ERROR(legacy2aidl_AudioPortExt(legacy.ext, legacy.type, legacy.role,
&aidl.hal.ext, &aidl.sys.ext));
return aidl;
}
-ConversionResult<struct audio_patch> aidl2legacy_AudioPatch_audio_patch(
- const media::AudioPatch& aidl) {
+ConversionResult<struct audio_patch> aidl2legacy_AudioPatchFw_audio_patch(
+ const media::AudioPatchFw& aidl) {
struct audio_patch legacy;
legacy.id = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_patch_handle_t(aidl.id));
legacy.num_sinks = VALUE_OR_RETURN(convertIntegral<unsigned int>(aidl.sinks.size()));
@@ -501,7 +390,7 @@
}
for (size_t i = 0; i < legacy.num_sinks; ++i) {
legacy.sinks[i] =
- VALUE_OR_RETURN(aidl2legacy_AudioPortConfig_audio_port_config(aidl.sinks[i]));
+ VALUE_OR_RETURN(aidl2legacy_AudioPortConfigFw_audio_port_config(aidl.sinks[i]));
}
legacy.num_sources = VALUE_OR_RETURN(convertIntegral<unsigned int>(aidl.sources.size()));
if (legacy.num_sources > AUDIO_PATCH_PORTS_MAX) {
@@ -509,14 +398,14 @@
}
for (size_t i = 0; i < legacy.num_sources; ++i) {
legacy.sources[i] =
- VALUE_OR_RETURN(aidl2legacy_AudioPortConfig_audio_port_config(aidl.sources[i]));
+ VALUE_OR_RETURN(aidl2legacy_AudioPortConfigFw_audio_port_config(aidl.sources[i]));
}
return legacy;
}
-ConversionResult<media::AudioPatch> legacy2aidl_audio_patch_AudioPatch(
+ConversionResult<media::AudioPatchFw> legacy2aidl_audio_patch_AudioPatchFw(
const struct audio_patch& legacy) {
- media::AudioPatch aidl;
+ media::AudioPatchFw aidl;
aidl.id = VALUE_OR_RETURN(legacy2aidl_audio_patch_handle_t_int32_t(legacy.id));
if (legacy.num_sinks > AUDIO_PATCH_PORTS_MAX) {
@@ -524,14 +413,14 @@
}
for (unsigned int i = 0; i < legacy.num_sinks; ++i) {
aidl.sinks.push_back(
- VALUE_OR_RETURN(legacy2aidl_audio_port_config_AudioPortConfig(legacy.sinks[i])));
+ VALUE_OR_RETURN(legacy2aidl_audio_port_config_AudioPortConfigFw(legacy.sinks[i])));
}
if (legacy.num_sources > AUDIO_PATCH_PORTS_MAX) {
return unexpected(BAD_VALUE);
}
for (unsigned int i = 0; i < legacy.num_sources; ++i) {
aidl.sources.push_back(
- VALUE_OR_RETURN(legacy2aidl_audio_port_config_AudioPortConfig(legacy.sources[i])));
+ VALUE_OR_RETURN(legacy2aidl_audio_port_config_AudioPortConfigFw(legacy.sources[i])));
}
return aidl;
}
@@ -541,7 +430,7 @@
const audio_io_handle_t io_handle = VALUE_OR_RETURN(
aidl2legacy_int32_t_audio_io_handle_t(aidl.ioHandle));
const struct audio_patch patch = VALUE_OR_RETURN(
- aidl2legacy_AudioPatch_audio_patch(aidl.patch));
+ aidl2legacy_AudioPatchFw_audio_patch(aidl.patch));
const bool isInput = aidl.isInput;
const uint32_t sampling_rate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.samplingRate));
const audio_format_t format = VALUE_OR_RETURN(
@@ -561,7 +450,7 @@
const sp<AudioIoDescriptor>& legacy) {
media::AudioIoDescriptor aidl;
aidl.ioHandle = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(legacy->getIoHandle()));
- aidl.patch = VALUE_OR_RETURN(legacy2aidl_audio_patch_AudioPatch(legacy->getPatch()));
+ aidl.patch = VALUE_OR_RETURN(legacy2aidl_audio_patch_AudioPatchFw(legacy->getPatch()));
aidl.isInput = legacy->getIsInput();
aidl.samplingRate = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy->getSamplingRate()));
aidl.format = VALUE_OR_RETURN(
@@ -801,18 +690,18 @@
ConversionResult<audio_port_mix_ext>
aidl2legacy_AudioPortMixExt_audio_port_mix_ext(
const AudioPortMixExt& aidl, const media::AudioPortMixExtSys& aidlSys) {
- audio_port_mix_ext legacy{};
+ audio_port_mix_ext legacy = VALUE_OR_RETURN(
+ aidl2legacy_AudioPortMixExt_audio_port_mix_ext(aidl));
legacy.hw_module = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_module_handle_t(aidlSys.hwModule));
- legacy.handle = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_io_handle_t(aidl.handle));
return legacy;
}
status_t
legacy2aidl_audio_port_mix_ext_AudioPortMixExt(const audio_port_mix_ext& legacy,
AudioPortMixExt* aidl, media::AudioPortMixExtSys* aidlMixExt) {
+ *aidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_mix_ext_AudioPortMixExt(legacy));
aidlMixExt->hwModule = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_module_handle_t_int32_t(legacy.hw_module));
- aidl->handle = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(legacy.handle));
return OK;
}
@@ -831,11 +720,10 @@
ConversionResult<audio_port_device_ext>
aidl2legacy_AudioPortDeviceExt_audio_port_device_ext(
const AudioPortDeviceExt& aidl, const media::AudioPortDeviceExtSys& aidlSys) {
- audio_port_device_ext legacy;
+ audio_port_device_ext legacy = VALUE_OR_RETURN(
+ aidl2legacy_AudioPortDeviceExt_audio_port_device_ext(aidl));
legacy.hw_module = VALUE_OR_RETURN(
aidl2legacy_int32_t_audio_module_handle_t(aidlSys.hwModule));
- RETURN_IF_ERROR(aidl2legacy_AudioDevice_audio_device(
- aidl.device, &legacy.type, legacy.address));
legacy.encapsulation_modes = VALUE_OR_RETURN(
aidl2legacy_AudioEncapsulationMode_mask(aidlSys.encapsulationModes));
legacy.encapsulation_metadata_types = VALUE_OR_RETURN(
@@ -847,10 +735,9 @@
status_t legacy2aidl_audio_port_device_ext_AudioPortDeviceExt(
const audio_port_device_ext& legacy,
AudioPortDeviceExt* aidl, media::AudioPortDeviceExtSys* aidlDeviceExt) {
+ *aidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_device_ext_AudioPortDeviceExt(legacy));
aidlDeviceExt->hwModule = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_module_handle_t_int32_t(legacy.hw_module));
- aidl->device = VALUE_OR_RETURN_STATUS(
- legacy2aidl_audio_device_AudioDevice(legacy.type, legacy.address));
aidlDeviceExt->encapsulationModes = VALUE_OR_RETURN_STATUS(
legacy2aidl_AudioEncapsulationMode_mask(legacy.encapsulation_modes));
aidlDeviceExt->encapsulationMetadataTypes = VALUE_OR_RETURN_STATUS(
@@ -930,93 +817,33 @@
}
ConversionResult<audio_port_v7>
-aidl2legacy_AudioPort_audio_port_v7(const media::AudioPort& aidl) {
- audio_port_v7 legacy;
- legacy.id = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_port_handle_t(aidl.hal.id));
+aidl2legacy_AudioPortFw_audio_port_v7(const media::AudioPortFw& aidl) {
+ const bool isInput = VALUE_OR_RETURN(
+ portDirection(aidl.sys.role, aidl.sys.type)) == AudioPortDirection::INPUT;
+ audio_port_v7 legacy = VALUE_OR_RETURN(aidl2legacy_AudioPort_audio_port_v7(aidl.hal, isInput));
legacy.role = VALUE_OR_RETURN(aidl2legacy_AudioPortRole_audio_port_role_t(aidl.sys.role));
legacy.type = VALUE_OR_RETURN(aidl2legacy_AudioPortType_audio_port_type_t(aidl.sys.type));
- RETURN_IF_ERROR(aidl2legacy_string(aidl.hal.name, legacy.name, sizeof(legacy.name)));
-
- if (aidl.hal.profiles.size() > std::size(legacy.audio_profiles)) {
- return unexpected(BAD_VALUE);
- }
- const bool isInput =
- VALUE_OR_RETURN(direction(aidl.sys.role, aidl.sys.type)) == Direction::INPUT;
- RETURN_IF_ERROR(convertRange(
- aidl.hal.profiles.begin(), aidl.hal.profiles.end(), legacy.audio_profiles,
- [isInput](const AudioProfile& p) {
- return aidl2legacy_AudioProfile_audio_profile(p, isInput);
- }));
- legacy.num_audio_profiles = aidl.hal.profiles.size();
-
- if (aidl.hal.extraAudioDescriptors.size() > std::size(legacy.extra_audio_descriptors)) {
- return unexpected(BAD_VALUE);
- }
- RETURN_IF_ERROR(
- convertRange(
- aidl.hal.extraAudioDescriptors.begin(), aidl.hal.extraAudioDescriptors.end(),
- legacy.extra_audio_descriptors,
- aidl2legacy_ExtraAudioDescriptor_audio_extra_audio_descriptor));
- legacy.num_extra_audio_descriptors = aidl.hal.extraAudioDescriptors.size();
-
- if (aidl.hal.gains.size() > std::size(legacy.gains)) {
- return unexpected(BAD_VALUE);
- }
- RETURN_IF_ERROR(convertRange(aidl.hal.gains.begin(), aidl.hal.gains.end(), legacy.gains,
- [isInput](const AudioGain& g) {
- return aidl2legacy_AudioGain_audio_gain(g, isInput);
- }));
- legacy.num_gains = aidl.hal.gains.size();
legacy.active_config = VALUE_OR_RETURN(
- aidl2legacy_AudioPortConfig_audio_port_config(aidl.sys.activeConfig));
+ aidl2legacy_AudioPortConfigFw_audio_port_config(aidl.sys.activeConfig));
legacy.ext = VALUE_OR_RETURN(
aidl2legacy_AudioPortExt_audio_port_v7_ext(aidl.hal.ext, aidl.sys.type, aidl.sys.ext));
return legacy;
}
-ConversionResult<media::AudioPort>
-legacy2aidl_audio_port_v7_AudioPort(const audio_port_v7& legacy) {
- media::AudioPort aidl;
- aidl.hal.id = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(legacy.id));
+ConversionResult<media::AudioPortFw>
+legacy2aidl_audio_port_v7_AudioPortFw(const audio_port_v7& legacy) {
+ const bool isInput = VALUE_OR_RETURN(
+ portDirection(legacy.role, legacy.type)) == AudioPortDirection::INPUT;
+ media::AudioPortFw aidl;
+ aidl.hal = VALUE_OR_RETURN(legacy2aidl_audio_port_v7_AudioPort(legacy, isInput));
aidl.sys.role = VALUE_OR_RETURN(legacy2aidl_audio_port_role_t_AudioPortRole(legacy.role));
aidl.sys.type = VALUE_OR_RETURN(legacy2aidl_audio_port_type_t_AudioPortType(legacy.type));
- aidl.hal.name = VALUE_OR_RETURN(legacy2aidl_string(legacy.name, sizeof(legacy.name)));
-
- if (legacy.num_audio_profiles > std::size(legacy.audio_profiles)) {
- return unexpected(BAD_VALUE);
- }
- const bool isInput = VALUE_OR_RETURN(direction(legacy.role, legacy.type)) == Direction::INPUT;
- RETURN_IF_ERROR(
- convertRange(legacy.audio_profiles, legacy.audio_profiles + legacy.num_audio_profiles,
- std::back_inserter(aidl.hal.profiles),
- [isInput](const audio_profile& p) {
- return legacy2aidl_audio_profile_AudioProfile(p, isInput);
- }));
-
- if (legacy.num_extra_audio_descriptors > std::size(legacy.extra_audio_descriptors)) {
- return unexpected(BAD_VALUE);
- }
+ // These get filled by the call to 'legacy2aidl_AudioPortExt' below.
aidl.sys.profiles.resize(legacy.num_audio_profiles);
- RETURN_IF_ERROR(
- convertRange(legacy.extra_audio_descriptors,
- legacy.extra_audio_descriptors + legacy.num_extra_audio_descriptors,
- std::back_inserter(aidl.hal.extraAudioDescriptors),
- legacy2aidl_audio_extra_audio_descriptor_ExtraAudioDescriptor));
-
- if (legacy.num_gains > std::size(legacy.gains)) {
- return unexpected(BAD_VALUE);
- }
- RETURN_IF_ERROR(
- convertRange(legacy.gains, legacy.gains + legacy.num_gains,
- std::back_inserter(aidl.hal.gains),
- [isInput](const audio_gain& g) {
- return legacy2aidl_audio_gain_AudioGain(g, isInput);
- }));
aidl.sys.gains.resize(legacy.num_gains);
-
aidl.sys.activeConfig = VALUE_OR_RETURN(
- legacy2aidl_audio_port_config_AudioPortConfig(legacy.active_config));
+ legacy2aidl_audio_port_config_AudioPortConfigFw(legacy.active_config, legacy.id));
aidl.sys.activeConfig.hal.portId = aidl.hal.id;
RETURN_IF_ERROR(
legacy2aidl_AudioPortExt(legacy.ext, legacy.type, &aidl.hal.ext, &aidl.sys.ext));
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index bcaaa6e..1c0535d 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -287,10 +287,10 @@
"aidl/android/media/AudioHalVersion.aidl",
"aidl/android/media/AudioIoConfigEvent.aidl",
"aidl/android/media/AudioIoDescriptor.aidl",
- "aidl/android/media/AudioPatch.aidl",
- "aidl/android/media/AudioPort.aidl",
+ "aidl/android/media/AudioPatchFw.aidl",
+ "aidl/android/media/AudioPortFw.aidl",
"aidl/android/media/AudioPortSys.aidl",
- "aidl/android/media/AudioPortConfig.aidl",
+ "aidl/android/media/AudioPortConfigFw.aidl",
"aidl/android/media/AudioPortConfigSys.aidl",
"aidl/android/media/AudioPortDeviceExtSys.aidl",
"aidl/android/media/AudioPortExtSys.aidl",
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index 0d16f47..31d95e6 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -1532,7 +1532,7 @@
legacy2aidl_audio_port_type_t_AudioPortType(type));
Int numPortsAidl;
numPortsAidl.value = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(*num_ports));
- std::vector<media::AudioPort> portsAidl;
+ std::vector<media::AudioPortFw> portsAidl;
int32_t generationAidl;
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
@@ -1540,7 +1540,7 @@
*num_ports = VALUE_OR_RETURN_STATUS(convertIntegral<unsigned int>(numPortsAidl.value));
*generation = VALUE_OR_RETURN_STATUS(convertIntegral<unsigned int>(generationAidl));
RETURN_STATUS_IF_ERROR(convertRange(portsAidl.begin(), portsAidl.end(), ports,
- aidl2legacy_AudioPort_audio_port_v7));
+ aidl2legacy_AudioPortFw_audio_port_v7));
return OK;
}
@@ -1551,10 +1551,10 @@
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
- media::AudioPort portAidl;
+ media::AudioPortFw portAidl;
RETURN_STATUS_IF_ERROR(
statusTFromBinderStatus(aps->getAudioPort(port->id, &portAidl)));
- *port = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioPort_audio_port_v7(portAidl));
+ *port = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioPortFw_audio_port_v7(portAidl));
return OK;
}
@@ -1567,8 +1567,8 @@
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
- media::AudioPatch patchAidl = VALUE_OR_RETURN_STATUS(
- legacy2aidl_audio_patch_AudioPatch(*patch));
+ media::AudioPatchFw patchAidl = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_audio_patch_AudioPatchFw(*patch));
int32_t handleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_patch_handle_t_int32_t(*handle));
RETURN_STATUS_IF_ERROR(
statusTFromBinderStatus(aps->createAudioPatch(patchAidl, handleAidl, &handleAidl)));
@@ -1598,7 +1598,7 @@
Int numPatchesAidl;
numPatchesAidl.value = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(*num_patches));
- std::vector<media::AudioPatch> patchesAidl;
+ std::vector<media::AudioPatchFw> patchesAidl;
int32_t generationAidl;
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
@@ -1606,7 +1606,7 @@
*num_patches = VALUE_OR_RETURN_STATUS(convertIntegral<unsigned int>(numPatchesAidl.value));
*generation = VALUE_OR_RETURN_STATUS(convertIntegral<unsigned int>(generationAidl));
RETURN_STATUS_IF_ERROR(convertRange(patchesAidl.begin(), patchesAidl.end(), patches,
- aidl2legacy_AudioPatch_audio_patch));
+ aidl2legacy_AudioPatchFw_audio_patch));
return OK;
}
@@ -1618,8 +1618,8 @@
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
- media::AudioPortConfig configAidl = VALUE_OR_RETURN_STATUS(
- legacy2aidl_audio_port_config_AudioPortConfig(*config));
+ media::AudioPortConfigFw configAidl = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_audio_port_config_AudioPortConfigFw(*config));
return statusTFromBinderStatus(aps->setAudioPortConfig(configAidl));
}
@@ -1839,8 +1839,8 @@
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
- media::AudioPortConfig sourceAidl = VALUE_OR_RETURN_STATUS(
- legacy2aidl_audio_port_config_AudioPortConfig(*source));
+ media::AudioPortConfigFw sourceAidl = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_audio_port_config_AudioPortConfigFw(*source));
media::AudioAttributesInternal attributesAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_attributes_t_AudioAttributesInternal(*attributes));
int32_t portIdAidl;
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index b3c9f07..255fd1e 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -666,17 +666,19 @@
}
status_t AudioFlingerClientAdapter::getAudioPort(struct audio_port_v7* port) {
- media::AudioPort portAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_v7_AudioPort(*port));
- media::AudioPort aidlRet;
+ media::AudioPortFw portAidl = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_audio_port_v7_AudioPortFw(*port));
+ media::AudioPortFw aidlRet;
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
mDelegate->getAudioPort(portAidl, &aidlRet)));
- *port = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioPort_audio_port_v7(aidlRet));
+ *port = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioPortFw_audio_port_v7(aidlRet));
return OK;
}
status_t AudioFlingerClientAdapter::createAudioPatch(const struct audio_patch* patch,
audio_patch_handle_t* handle) {
- media::AudioPatch patchAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_patch_AudioPatch(*patch));
+ media::AudioPatchFw patchAidl = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_audio_patch_AudioPatchFw(*patch));
int32_t aidlRet = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_patch_handle_t_int32_t(
AUDIO_PATCH_HANDLE_NONE));
if (handle != nullptr) {
@@ -697,18 +699,18 @@
status_t AudioFlingerClientAdapter::listAudioPatches(unsigned int* num_patches,
struct audio_patch* patches) {
- std::vector<media::AudioPatch> aidlRet;
+ std::vector<media::AudioPatchFw> aidlRet;
int32_t maxPatches = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(*num_patches));
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
mDelegate->listAudioPatches(maxPatches, &aidlRet)));
*num_patches = VALUE_OR_RETURN_STATUS(convertIntegral<unsigned int>(aidlRet.size()));
return convertRange(aidlRet.begin(), aidlRet.end(), patches,
- aidl2legacy_AudioPatch_audio_patch);
+ aidl2legacy_AudioPatchFw_audio_patch);
}
status_t AudioFlingerClientAdapter::setAudioPortConfig(const struct audio_port_config* config) {
- media::AudioPortConfig configAidl = VALUE_OR_RETURN_STATUS(
- legacy2aidl_audio_port_config_AudioPortConfig(*config));
+ media::AudioPortConfigFw configAidl = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_audio_port_config_AudioPortConfigFw(*config));
return statusTFromBinderStatus(mDelegate->setAudioPortConfig(configAidl));
}
@@ -805,8 +807,8 @@
status_t AudioFlingerClientAdapter::setDeviceConnectedState(
const struct audio_port_v7 *port, bool connected) {
- media::AudioPort aidlPort = VALUE_OR_RETURN_STATUS(
- legacy2aidl_audio_port_v7_AudioPort(*port));
+ media::AudioPortFw aidlPort = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_audio_port_v7_AudioPortFw(*port));
return statusTFromBinderStatus(mDelegate->setDeviceConnectedState(aidlPort, connected));
}
@@ -1237,17 +1239,17 @@
return Status::fromStatusT(mDelegate->setLowRamDevice(isLowRamDevice, totalMemory));
}
-Status AudioFlingerServerAdapter::getAudioPort(const media::AudioPort& port,
- media::AudioPort* _aidl_return) {
- audio_port_v7 portLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_AudioPort_audio_port_v7(port));
+Status AudioFlingerServerAdapter::getAudioPort(const media::AudioPortFw& port,
+ media::AudioPortFw* _aidl_return) {
+ audio_port_v7 portLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_AudioPortFw_audio_port_v7(port));
RETURN_BINDER_IF_ERROR(mDelegate->getAudioPort(&portLegacy));
- *_aidl_return = VALUE_OR_RETURN_BINDER(legacy2aidl_audio_port_v7_AudioPort(portLegacy));
+ *_aidl_return = VALUE_OR_RETURN_BINDER(legacy2aidl_audio_port_v7_AudioPortFw(portLegacy));
return Status::ok();
}
-Status AudioFlingerServerAdapter::createAudioPatch(const media::AudioPatch& patch,
+Status AudioFlingerServerAdapter::createAudioPatch(const media::AudioPatchFw& patch,
int32_t* _aidl_return) {
- audio_patch patchLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_AudioPatch_audio_patch(patch));
+ audio_patch patchLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_AudioPatchFw_audio_patch(patch));
audio_patch_handle_t handleLegacy = VALUE_OR_RETURN_BINDER(
aidl2legacy_int32_t_audio_patch_handle_t(*_aidl_return));
RETURN_BINDER_IF_ERROR(mDelegate->createAudioPatch(&patchLegacy, &handleLegacy));
@@ -1262,7 +1264,7 @@
}
Status AudioFlingerServerAdapter::listAudioPatches(int32_t maxCount,
- std::vector<media::AudioPatch>* _aidl_return) {
+ std::vector<media::AudioPatchFw>* _aidl_return) {
unsigned int count = VALUE_OR_RETURN_BINDER(convertIntegral<unsigned int>(maxCount));
count = std::min(count, static_cast<unsigned int>(MAX_ITEMS_PER_LIST));
std::unique_ptr<audio_patch[]> patchesLegacy(new audio_patch[count]);
@@ -1270,13 +1272,13 @@
RETURN_BINDER_IF_ERROR(convertRange(&patchesLegacy[0],
&patchesLegacy[count],
std::back_inserter(*_aidl_return),
- legacy2aidl_audio_patch_AudioPatch));
+ legacy2aidl_audio_patch_AudioPatchFw));
return Status::ok();
}
-Status AudioFlingerServerAdapter::setAudioPortConfig(const media::AudioPortConfig& config) {
+Status AudioFlingerServerAdapter::setAudioPortConfig(const media::AudioPortConfigFw& config) {
audio_port_config configLegacy = VALUE_OR_RETURN_BINDER(
- aidl2legacy_AudioPortConfig_audio_port_config(config));
+ aidl2legacy_AudioPortConfigFw_audio_port_config(config));
return Status::fromStatusT(mDelegate->setAudioPortConfig(&configLegacy));
}
@@ -1354,8 +1356,8 @@
}
Status AudioFlingerServerAdapter::setDeviceConnectedState(
- const media::AudioPort& port, bool connected) {
- audio_port_v7 portLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_AudioPort_audio_port_v7(port));
+ const media::AudioPortFw& port, bool connected) {
+ audio_port_v7 portLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_AudioPortFw_audio_port_v7(port));
return Status::fromStatusT(mDelegate->setDeviceConnectedState(&portLegacy, connected));
}
diff --git a/media/libaudioclient/ToneGenerator.cpp b/media/libaudioclient/ToneGenerator.cpp
index f968a4b..f0b4d11 100644
--- a/media/libaudioclient/ToneGenerator.cpp
+++ b/media/libaudioclient/ToneGenerator.cpp
@@ -1027,7 +1027,7 @@
if (property_get("gsm.operator.iso-country", value, "") == 0) {
property_get("gsm.sim.operator.iso-country", value, "");
}
- // If dual sim device has two SIM cards inserted and is not registerd to any network,
+ // If dual sim device has two SIM cards inserted and is not registered to any network,
// "," is set to "gsm.operator.iso-country" prop.
// In this case, "gsm.sim.operator.iso-country" prop should be used.
if (strlen(value) == 1 && strstr(value, ",") != NULL) {
diff --git a/media/libaudioclient/aidl/android/media/AudioIoDescriptor.aidl b/media/libaudioclient/aidl/android/media/AudioIoDescriptor.aidl
index b01f902..5dd898c 100644
--- a/media/libaudioclient/aidl/android/media/AudioIoDescriptor.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioIoDescriptor.aidl
@@ -16,7 +16,7 @@
package android.media;
-import android.media.AudioPatch;
+import android.media.AudioPatchFw;
import android.media.audio.common.AudioChannelLayout;
import android.media.audio.common.AudioFormatDescription;
@@ -26,7 +26,7 @@
parcelable AudioIoDescriptor {
/** Interpreted as audio_io_handle_t. */
int ioHandle;
- AudioPatch patch;
+ AudioPatchFw patch;
boolean isInput;
int samplingRate;
AudioFormatDescription format;
diff --git a/media/libaudioclient/aidl/android/media/AudioPatch.aidl b/media/libaudioclient/aidl/android/media/AudioPatchFw.aidl
similarity index 74%
rename from media/libaudioclient/aidl/android/media/AudioPatch.aidl
rename to media/libaudioclient/aidl/android/media/AudioPatchFw.aidl
index 8519faf..9ec3fa9 100644
--- a/media/libaudioclient/aidl/android/media/AudioPatch.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioPatchFw.aidl
@@ -16,17 +16,19 @@
package android.media;
-import android.media.AudioPortConfig;
+import android.media.AudioPortConfigFw;
/**
* {@hide}
+ * The Fw suffix is used to break a namespace collision with an SDK API.
+ * It contains the framework version of AudioPortConfig.
*/
-parcelable AudioPatch {
+parcelable AudioPatchFw {
/**
* Patch unique ID.
* Interpreted as audio_patch_handle_t.
*/
int id;
- AudioPortConfig[] sources;
- AudioPortConfig[] sinks;
+ AudioPortConfigFw[] sources;
+ AudioPortConfigFw[] sinks;
}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfig.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfigFw.aidl
similarity index 89%
rename from media/libaudioclient/aidl/android/media/AudioPortConfig.aidl
rename to media/libaudioclient/aidl/android/media/AudioPortConfigFw.aidl
index 3a4ca31..e7565d7 100644
--- a/media/libaudioclient/aidl/android/media/AudioPortConfig.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioPortConfigFw.aidl
@@ -21,8 +21,9 @@
/**
* {@hide}
+ * Suffixed with Fw to avoid name conflict with SDK class.
*/
-parcelable AudioPortConfig {
+parcelable AudioPortConfigFw {
AudioPortConfig hal;
AudioPortConfigSys sys;
}
diff --git a/media/libaudioclient/aidl/android/media/AudioPort.aidl b/media/libaudioclient/aidl/android/media/AudioPortFw.aidl
similarity index 88%
rename from media/libaudioclient/aidl/android/media/AudioPort.aidl
rename to media/libaudioclient/aidl/android/media/AudioPortFw.aidl
index ff177c0..5580e35 100644
--- a/media/libaudioclient/aidl/android/media/AudioPort.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioPortFw.aidl
@@ -21,8 +21,9 @@
/**
* {@hide}
+ * The Fw suffix is used to break a namespace collision with an SDK API.
*/
-parcelable AudioPort {
+parcelable AudioPortFw {
AudioPort hal;
AudioPortSys sys;
}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortSys.aidl b/media/libaudioclient/aidl/android/media/AudioPortSys.aidl
index f3b5c19..756c469 100644
--- a/media/libaudioclient/aidl/android/media/AudioPortSys.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioPortSys.aidl
@@ -17,7 +17,7 @@
package android.media;
import android.media.AudioGainSys;
-import android.media.AudioPortConfig;
+import android.media.AudioPortConfigFw;
import android.media.AudioPortExtSys;
import android.media.AudioPortRole;
import android.media.AudioPortType;
@@ -36,7 +36,7 @@
/** System-only parameters for each AudioGain from 'port.gains'. */
AudioGainSys[] gains;
/** Current audio port configuration. */
- AudioPortConfig activeConfig;
+ AudioPortConfigFw activeConfig;
/** System-only extra parameters for 'port.ext'. */
AudioPortExtSys ext;
}
diff --git a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
index 1111160..51bf05a 100644
--- a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
@@ -16,9 +16,9 @@
package android.media;
-import android.media.AudioPatch;
-import android.media.AudioPort;
-import android.media.AudioPortConfig;
+import android.media.AudioPatchFw;
+import android.media.AudioPortFw;
+import android.media.AudioPortConfigFw;
import android.media.AudioUniqueIdUse;
import android.media.AudioVibratorInfo;
import android.media.CreateEffectRequest;
@@ -182,18 +182,18 @@
void setLowRamDevice(boolean isLowRamDevice, long totalMemory);
/* Get attributes for a given audio port */
- AudioPort getAudioPort(in AudioPort port);
+ AudioPortFw getAudioPort(in AudioPortFw port);
/* Create an audio patch between several source and sink ports */
- int /* audio_patch_handle_t */ createAudioPatch(in AudioPatch patch);
+ int /* audio_patch_handle_t */ createAudioPatch(in AudioPatchFw patch);
/* Release an audio patch */
void releaseAudioPatch(int /* audio_patch_handle_t */ handle);
/* List existing audio patches */
- AudioPatch[] listAudioPatches(int maxCount);
+ AudioPatchFw[] listAudioPatches(int maxCount);
/* Set audio port configuration */
- void setAudioPortConfig(in AudioPortConfig config);
+ void setAudioPortConfig(in AudioPortConfigFw config);
/* Get the HW synchronization source used for an audio session */
int /* audio_hw_sync_t */ getAudioHwSyncForSession(int /* audio_session_t */ sessionId);
@@ -227,7 +227,7 @@
int getAAudioHardwareBurstMinUsec();
- void setDeviceConnectedState(in AudioPort devicePort, boolean connected);
+ void setDeviceConnectedState(in AudioPortFw devicePort, boolean connected);
/**
* Requests a given latency mode (See AudioLatencyMode.aidl) on an output stream.
diff --git a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
index 8ac89a8..ed7e243 100644
--- a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
@@ -23,12 +23,12 @@
import android.media.AudioDirectMode;
import android.media.AudioMix;
import android.media.AudioOffloadMode;
-import android.media.AudioPatch;
+import android.media.AudioPatchFw;
import android.media.AudioPolicyDeviceState;
import android.media.AudioPolicyForcedConfig;
import android.media.AudioPolicyForceUse;
-import android.media.AudioPort;
-import android.media.AudioPortConfig;
+import android.media.AudioPortFw;
+import android.media.AudioPortConfigFw;
import android.media.AudioPortRole;
import android.media.AudioPortType;
import android.media.AudioProductStrategy;
@@ -212,16 +212,16 @@
int listAudioPorts(AudioPortRole role,
AudioPortType type,
inout Int count,
- out AudioPort[] ports);
+ out AudioPortFw[] ports);
/** Get attributes for the audio port with the given id (AudioPort.hal.id field). */
- AudioPort getAudioPort(int /* audio_port_handle_t */ portId);
+ AudioPortFw getAudioPort(int /* audio_port_handle_t */ portId);
/**
* Create an audio patch between several source and sink ports.
* The handle argument is used when updating an existing patch.
*/
- int /* audio_patch_handle_t */ createAudioPatch(in AudioPatch patch, int handle);
+ int /* audio_patch_handle_t */ createAudioPatch(in AudioPatchFw patch, int handle);
/** Release an audio patch. */
void releaseAudioPatch(int /* audio_patch_handle_t */ handle);
@@ -234,10 +234,10 @@
* Passing '0' on input and inspecting the value on output is a common way of determining the
* number of elements without actually retrieving them.
*/
- int listAudioPatches(inout Int count, out AudioPatch[] patches);
+ int listAudioPatches(inout Int count, out AudioPatchFw[] patches);
/** Set audio port configuration. */
- void setAudioPortConfig(in AudioPortConfig config);
+ void setAudioPortConfig(in AudioPortConfigFw config);
void registerClient(IAudioPolicyServiceClient client);
@@ -261,7 +261,7 @@
void removeUserIdDeviceAffinities(int userId);
- int /* audio_port_handle_t */ startAudioSource(in AudioPortConfig source,
+ int /* audio_port_handle_t */ startAudioSource(in AudioPortConfigFw source,
in AudioAttributesInternal attributes);
void stopAudioSource(int /* audio_port_handle_t */ portId);
diff --git a/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl b/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl
index 90e7ea6..ddda8bb 100644
--- a/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl
+++ b/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl
@@ -16,7 +16,7 @@
package android.media;
-import android.media.AudioPort;
+import android.media.AudioPortFw;
import android.media.audio.common.AudioConfig;
import android.media.audio.common.AudioConfigBase;
@@ -29,7 +29,7 @@
AudioConfig halConfig;
AudioConfigBase mixerConfig;
/** Type must be DEVICE. */
- AudioPort device;
+ AudioPortFw device;
/** Bitmask, indexed by AudioOutputFlag. */
int flags;
}
diff --git a/media/libaudioclient/include/media/AidlConversion.h b/media/libaudioclient/include/media/AidlConversion.h
index b0f84a4..f0e58ae 100644
--- a/media/libaudioclient/include/media/AidlConversion.h
+++ b/media/libaudioclient/include/media/AidlConversion.h
@@ -27,8 +27,8 @@
#include <android/media/AudioFlag.h>
#include <android/media/AudioIoConfigEvent.h>
#include <android/media/AudioIoDescriptor.h>
-#include <android/media/AudioPort.h>
-#include <android/media/AudioPortConfig.h>
+#include <android/media/AudioPortFw.h>
+#include <android/media/AudioPortConfigFw.h>
#include <android/media/AudioPortDeviceExtSys.h>
#include <android/media/AudioTimestampInternal.h>
#include <android/media/AudioUniqueIdUse.h>
@@ -88,14 +88,15 @@
ConversionResult<int32_t> legacy2aidl_audio_port_config_session_ext_AudioPortConfigSessionExt(
const audio_port_config_session_ext& legacy);
-ConversionResult<audio_port_config> aidl2legacy_AudioPortConfig_audio_port_config(
- const media::AudioPortConfig& aidl);
-ConversionResult<media::AudioPortConfig> legacy2aidl_audio_port_config_AudioPortConfig(
- const audio_port_config& legacy);
+// portId needs to be set when dealing with the HAL.
+ConversionResult<audio_port_config> aidl2legacy_AudioPortConfigFw_audio_port_config(
+ const media::AudioPortConfigFw& aidl, int32_t* aidlPortId = nullptr);
+ConversionResult<media::AudioPortConfigFw> legacy2aidl_audio_port_config_AudioPortConfigFw(
+ const audio_port_config& legacy, int32_t portId = 0);
-ConversionResult<struct audio_patch> aidl2legacy_AudioPatch_audio_patch(
- const media::AudioPatch& aidl);
-ConversionResult<media::AudioPatch> legacy2aidl_audio_patch_AudioPatch(
+ConversionResult<struct audio_patch> aidl2legacy_AudioPatchFw_audio_patch(
+ const media::AudioPatchFw& aidl);
+ConversionResult<media::AudioPatchFw> legacy2aidl_audio_patch_AudioPatchFw(
const struct audio_patch& legacy);
ConversionResult<sp<AudioIoDescriptor>> aidl2legacy_AudioIoDescriptor_AudioIoDescriptor(
@@ -167,9 +168,9 @@
legacy2aidl_audio_port_session_ext_int32_t(const audio_port_session_ext& legacy);
ConversionResult<audio_port_v7>
-aidl2legacy_AudioPort_audio_port_v7(const media::AudioPort& aidl);
-ConversionResult<media::AudioPort>
-legacy2aidl_audio_port_v7_AudioPort(const audio_port_v7& legacy);
+aidl2legacy_AudioPortFw_audio_port_v7(const media::AudioPortFw& aidl);
+ConversionResult<media::AudioPortFw>
+legacy2aidl_audio_port_v7_AudioPortFw(const audio_port_v7& legacy);
ConversionResult<audio_unique_id_use_t>
aidl2legacy_AudioUniqueIdUse_audio_unique_id_use_t(media::AudioUniqueIdUse aidl);
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index 36ee96b..1177e5a 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -678,12 +678,12 @@
Status getPrimaryOutputSamplingRate(int32_t* _aidl_return) override;
Status getPrimaryOutputFrameCount(int64_t* _aidl_return) override;
Status setLowRamDevice(bool isLowRamDevice, int64_t totalMemory) override;
- Status getAudioPort(const media::AudioPort& port, media::AudioPort* _aidl_return) override;
- Status createAudioPatch(const media::AudioPatch& patch, int32_t* _aidl_return) override;
+ Status getAudioPort(const media::AudioPortFw& port, media::AudioPortFw* _aidl_return) override;
+ Status createAudioPatch(const media::AudioPatchFw& patch, int32_t* _aidl_return) override;
Status releaseAudioPatch(int32_t handle) override;
Status listAudioPatches(int32_t maxCount,
- std::vector<media::AudioPatch>* _aidl_return) override;
- Status setAudioPortConfig(const media::AudioPortConfig& config) override;
+ std::vector<media::AudioPatchFw>* _aidl_return) override;
+ Status setAudioPortConfig(const media::AudioPortConfigFw& config) override;
Status getAudioHwSyncForSession(int32_t sessionId, int32_t* _aidl_return) override;
Status systemReady() override;
Status audioPolicyReady() override;
@@ -698,7 +698,7 @@
std::vector<media::audio::common::AudioMMapPolicyInfo> *_aidl_return) override;
Status getAAudioMixerBurstCount(int32_t* _aidl_return) override;
Status getAAudioHardwareBurstMinUsec(int32_t* _aidl_return) override;
- Status setDeviceConnectedState(const media::AudioPort& port, bool connected) override;
+ Status setDeviceConnectedState(const media::AudioPortFw& port, bool connected) override;
Status setRequestedLatencyMode(
int output, media::audio::common::AudioLatencyMode mode) override;
Status getSupportedLatencyModes(int output,
diff --git a/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp b/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
index 9e663bc..5fbd090 100644
--- a/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
+++ b/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
@@ -14,6 +14,8 @@
* limitations under the License.
*/
+#include <iostream>
+
#include <gtest/gtest.h>
#include <media/AidlConversion.h>
@@ -22,19 +24,52 @@
using namespace android;
using namespace android::aidl_utils;
-using android::media::AudioDirectMode;
+using media::AudioDirectMode;
+using media::AudioPortConfigFw;
+using media::AudioPortDeviceExtSys;
+using media::AudioPortFw;
+using media::AudioPortRole;
+using media::AudioPortType;
using media::audio::common::AudioChannelLayout;
+using media::audio::common::AudioDevice;
using media::audio::common::AudioDeviceDescription;
using media::audio::common::AudioDeviceType;
using media::audio::common::AudioEncapsulationMetadataType;
using media::audio::common::AudioEncapsulationType;
using media::audio::common::AudioFormatDescription;
using media::audio::common::AudioFormatType;
+using media::audio::common::AudioGain;
+using media::audio::common::AudioGainConfig;
using media::audio::common::AudioGainMode;
+using media::audio::common::AudioIoFlags;
+using media::audio::common::AudioPortDeviceExt;
+using media::audio::common::AudioProfile;
using media::audio::common::AudioStandard;
using media::audio::common::ExtraAudioDescriptor;
+using media::audio::common::Int;
using media::audio::common::PcmType;
+// Provide value printers for types generated from AIDL
+// They need to be in the same namespace as the types we intend to print
+namespace android::media {
+#define DEFINE_PRINTING_TEMPLATES() \
+ template <typename P> \
+ std::enable_if_t<std::is_base_of_v<::android::Parcelable, P>, std::ostream&> operator<<( \
+ std::ostream& os, const P& p) { \
+ return os << p.toString(); \
+ } \
+ template <typename E> \
+ std::enable_if_t<std::is_enum_v<E>, std::ostream&> operator<<(std::ostream& os, const E& e) { \
+ return os << toString(e); \
+ }
+DEFINE_PRINTING_TEMPLATES();
+
+namespace audio::common {
+DEFINE_PRINTING_TEMPLATES();
+} // namespace audio::common
+#undef DEFINE_PRINTING_TEMPLATES
+} // namespace android::media
+
namespace {
template <typename T>
@@ -367,6 +402,134 @@
testing::Values(make_AFD_Invalid(), AudioFormatDescription{},
make_AFD_Pcm16Bit()));
+AudioPortConfigFw createAudioPortConfigFw(const AudioChannelLayout& layout,
+ const AudioFormatDescription& format,
+ const AudioDeviceDescription& device) {
+ const bool isInput = device.type < AudioDeviceType::OUT_DEFAULT;
+ AudioPortConfigFw result;
+ result.hal.id = 43;
+ result.hal.portId = 42;
+ Int sr44100;
+ sr44100.value = 44100;
+ result.hal.sampleRate = sr44100;
+ result.hal.channelMask = layout;
+ result.hal.format = format;
+ AudioGainConfig gain;
+ gain.mode = 1 << static_cast<int>(AudioGainMode::JOINT);
+ gain.values = std::vector<int32_t>({100});
+ result.hal.gain = gain;
+ AudioPortDeviceExt ext;
+ AudioDevice audioDevice;
+ audioDevice.type = device;
+ ext.device = audioDevice;
+ result.hal.ext = ext;
+ result.sys.role = isInput ? AudioPortRole::SOURCE : AudioPortRole::SINK;
+ result.sys.type = AudioPortType::DEVICE;
+ AudioPortDeviceExtSys sysDevice;
+ sysDevice.hwModule = 1;
+ result.sys.ext = sysDevice;
+ return result;
+}
+
+using AudioPortConfigParam =
+ std::tuple<AudioChannelLayout, AudioFormatDescription, AudioDeviceDescription>;
+class AudioPortConfigRoundTripTest : public testing::TestWithParam<AudioPortConfigParam> {};
+TEST_P(AudioPortConfigRoundTripTest, Aidl2Legacy2Aidl) {
+ const AudioChannelLayout layout = std::get<0>(GetParam());
+ const AudioFormatDescription format = std::get<1>(GetParam());
+ const AudioDeviceDescription device = std::get<2>(GetParam());
+ const bool isInput = device.type < AudioDeviceType::OUT_DEFAULT;
+ AudioPortConfigFw initial = createAudioPortConfigFw(layout, format, device);
+ {
+ audio_port_config conv{};
+ int32_t portId = -1;
+ status_t status =
+ aidl2legacy_AudioPortConfig_audio_port_config(initial.hal, isInput, &conv, &portId);
+ ASSERT_EQ(OK, status);
+ EXPECT_NE(-1, portId);
+ auto convBack = legacy2aidl_audio_port_config_AudioPortConfig(conv, isInput, portId);
+ ASSERT_TRUE(convBack.ok());
+ EXPECT_EQ(initial.hal, convBack.value());
+ }
+ {
+ int32_t portId = -1;
+ auto conv = aidl2legacy_AudioPortConfigFw_audio_port_config(initial, &portId);
+ ASSERT_TRUE(conv.ok());
+ EXPECT_NE(-1, portId);
+ auto convBack = legacy2aidl_audio_port_config_AudioPortConfigFw(conv.value(), portId);
+ ASSERT_TRUE(convBack.ok());
+ EXPECT_EQ(initial, convBack.value());
+ }
+}
+INSTANTIATE_TEST_SUITE_P(
+ AudioPortConfig, AudioPortConfigRoundTripTest,
+ testing::Combine(testing::Values(make_ACL_Stereo(), make_ACL_ChannelIndex2()),
+ testing::Values(make_AFD_Pcm16Bit()),
+ testing::Values(make_ADD_DefaultIn(), make_ADD_DefaultOut(),
+ make_ADD_WiredHeadset())));
+
+class AudioPortFwRoundTripTest : public testing::TestWithParam<AudioDeviceDescription> {
+ public:
+ AudioProfile createProfile(const AudioFormatDescription& format,
+ const std::vector<AudioChannelLayout>& channelMasks,
+ const std::vector<int32_t>& sampleRates) {
+ AudioProfile profile;
+ profile.format = format;
+ profile.channelMasks = channelMasks;
+ profile.sampleRates = sampleRates;
+ return profile;
+ }
+};
+TEST_P(AudioPortFwRoundTripTest, Aidl2Legacy2Aidl) {
+ const AudioDeviceDescription device = GetParam();
+ const bool isInput = device.type < AudioDeviceType::OUT_DEFAULT;
+ AudioPortFw initial;
+ initial.hal.id = 42;
+ initial.hal.profiles.push_back(createProfile(
+ make_AFD_Pcm16Bit(), {make_ACL_Stereo(), make_ACL_ChannelIndex2()}, {44100, 48000}));
+ if (isInput) {
+ initial.hal.flags = AudioIoFlags::make<AudioIoFlags::Tag::input>(0);
+ } else {
+ initial.hal.flags = AudioIoFlags::make<AudioIoFlags::Tag::output>(0);
+ }
+ AudioGain initialGain;
+ initialGain.mode = 1 << static_cast<int>(AudioGainMode::JOINT);
+ initialGain.channelMask = make_ACL_Stereo();
+ initial.hal.gains.push_back(initialGain);
+ AudioPortDeviceExt initialExt;
+ AudioDevice initialDevice;
+ initialDevice.type = device;
+ initialExt.device = initialDevice;
+ initial.hal.ext = initialExt;
+ {
+ auto conv = aidl2legacy_AudioPort_audio_port_v7(initial.hal, isInput);
+ ASSERT_TRUE(conv.ok());
+ auto convBack = legacy2aidl_audio_port_v7_AudioPort(conv.value(), isInput);
+ ASSERT_TRUE(convBack.ok());
+ EXPECT_EQ(initial.hal, convBack.value());
+ }
+ initial.sys.role = isInput ? AudioPortRole::SOURCE : AudioPortRole::SINK;
+ initial.sys.type = AudioPortType::DEVICE;
+ initial.sys.profiles.resize(initial.hal.profiles.size());
+ initial.sys.gains.resize(initial.hal.gains.size());
+ initial.sys.activeConfig =
+ createAudioPortConfigFw(make_ACL_Stereo(), make_AFD_Pcm16Bit(), device);
+ initial.sys.activeConfig.hal.flags = initial.hal.flags;
+ AudioPortDeviceExtSys initialSysDevice;
+ initialSysDevice.hwModule = 1;
+ initial.sys.ext = initialSysDevice;
+ {
+ auto conv = aidl2legacy_AudioPortFw_audio_port_v7(initial);
+ ASSERT_TRUE(conv.ok());
+ auto convBack = legacy2aidl_audio_port_v7_AudioPortFw(conv.value());
+ ASSERT_TRUE(convBack.ok());
+ EXPECT_EQ(initial, convBack.value());
+ }
+}
+INSTANTIATE_TEST_SUITE_P(AudioPortFw, AudioPortFwRoundTripTest,
+ testing::Values(make_ADD_DefaultIn(), make_ADD_DefaultOut(),
+ make_ADD_WiredHeadset()));
+
class AudioDirectModeRoundTripTest : public testing::TestWithParam<AudioDirectMode> {};
TEST_P(AudioDirectModeRoundTripTest, Aidl2Legacy2Aidl) {
const auto initial = GetParam();
diff --git a/media/libaudiofoundation/AudioPort.cpp b/media/libaudiofoundation/AudioPort.cpp
index 4513323..6e05abc 100644
--- a/media/libaudiofoundation/AudioPort.cpp
+++ b/media/libaudiofoundation/AudioPort.cpp
@@ -222,7 +222,7 @@
mExtraAudioDescriptors == other->getExtraAudioDescriptors();
}
-status_t AudioPort::writeToParcelable(media::AudioPort* parcelable) const {
+status_t AudioPort::writeToParcelable(media::AudioPortFw* parcelable) const {
parcelable->hal.name = mName;
parcelable->sys.type = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_port_type_t_AudioPortType(mType));
@@ -249,7 +249,7 @@
return OK;
}
-status_t AudioPort::readFromParcelable(const media::AudioPort& parcelable) {
+status_t AudioPort::readFromParcelable(const media::AudioPortFw& parcelable) {
mName = parcelable.hal.name;
mType = VALUE_OR_RETURN_STATUS(
aidl2legacy_AudioPortType_audio_port_type_t(parcelable.sys.type));
diff --git a/media/libaudiofoundation/DeviceDescriptorBase.cpp b/media/libaudiofoundation/DeviceDescriptorBase.cpp
index 5ffbffc..4185b5f 100644
--- a/media/libaudiofoundation/DeviceDescriptorBase.cpp
+++ b/media/libaudiofoundation/DeviceDescriptorBase.cpp
@@ -174,7 +174,7 @@
return false;
}
-status_t DeviceDescriptorBase::writeToParcelable(media::AudioPort* parcelable) const {
+status_t DeviceDescriptorBase::writeToParcelable(media::AudioPortFw* parcelable) const {
AudioPort::writeToParcelable(parcelable);
AudioPortConfig::writeToParcelable(&parcelable->sys.activeConfig.hal, useInputChannelMask());
parcelable->hal.id = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(mId));
@@ -196,7 +196,7 @@
return OK;
}
-status_t DeviceDescriptorBase::readFromParcelable(const media::AudioPort& parcelable) {
+status_t DeviceDescriptorBase::readFromParcelable(const media::AudioPortFw& parcelable) {
if (parcelable.sys.type != media::AudioPortType::DEVICE) {
return BAD_VALUE;
}
@@ -245,7 +245,7 @@
}
ConversionResult<sp<DeviceDescriptorBase>>
-aidl2legacy_DeviceDescriptorBase(const media::AudioPort& aidl) {
+aidl2legacy_DeviceDescriptorBase(const media::AudioPortFw& aidl) {
sp<DeviceDescriptorBase> result = new DeviceDescriptorBase(AUDIO_DEVICE_NONE);
status_t status = result->readFromParcelable(aidl);
if (status != OK) {
@@ -254,9 +254,9 @@
return result;
}
-ConversionResult<media::AudioPort>
+ConversionResult<media::AudioPortFw>
legacy2aidl_DeviceDescriptorBase(const sp<DeviceDescriptorBase>& legacy) {
- media::AudioPort aidl;
+ media::AudioPortFw aidl;
status_t status = legacy->writeToParcelable(&aidl);
if (status != OK) {
return base::unexpected(status);
diff --git a/media/libaudiofoundation/include/media/AudioPort.h b/media/libaudiofoundation/include/media/AudioPort.h
index b1235f5..77e58ed 100644
--- a/media/libaudiofoundation/include/media/AudioPort.h
+++ b/media/libaudiofoundation/include/media/AudioPort.h
@@ -19,8 +19,8 @@
#include <string>
#include <type_traits>
-#include <android/media/AudioPort.h>
-#include <android/media/AudioPortConfig.h>
+#include <android/media/AudioPortFw.h>
+#include <android/media/AudioPortConfigFw.h>
#include <android/media/audio/common/ExtraAudioDescriptor.h>
#include <binder/Parcel.h>
#include <binder/Parcelable.h>
@@ -118,8 +118,8 @@
bool equals(const sp<AudioPort>& other) const;
- status_t writeToParcelable(media::AudioPort* parcelable) const;
- status_t readFromParcelable(const media::AudioPort& parcelable);
+ status_t writeToParcelable(media::AudioPortFw* parcelable) const;
+ status_t readFromParcelable(const media::AudioPortFw& parcelable);
AudioGains mGains; // gain controllers
// Maximum number of input or output streams that can be simultaneously
diff --git a/media/libaudiofoundation/include/media/DeviceDescriptorBase.h b/media/libaudiofoundation/include/media/DeviceDescriptorBase.h
index dc2899a..501831d 100644
--- a/media/libaudiofoundation/include/media/DeviceDescriptorBase.h
+++ b/media/libaudiofoundation/include/media/DeviceDescriptorBase.h
@@ -18,7 +18,7 @@
#include <vector>
-#include <android/media/AudioPort.h>
+#include <android/media/AudioPortFw.h>
#include <binder/Parcel.h>
#include <binder/Parcelable.h>
#include <media/AudioContainers.h>
@@ -79,8 +79,8 @@
bool equals(const sp<DeviceDescriptorBase>& other) const;
- status_t writeToParcelable(media::AudioPort* parcelable) const;
- status_t readFromParcelable(const media::AudioPort& parcelable);
+ status_t writeToParcelable(media::AudioPortFw* parcelable) const;
+ status_t readFromParcelable(const media::AudioPortFw& parcelable);
protected:
AudioDeviceTypeAddr mDeviceTypeAddr;
@@ -116,8 +116,8 @@
// Conversion routines, according to AidlConversion.h conventions.
ConversionResult<sp<DeviceDescriptorBase>>
-aidl2legacy_DeviceDescriptorBase(const media::AudioPort& aidl);
-ConversionResult<media::AudioPort>
+aidl2legacy_DeviceDescriptorBase(const media::AudioPortFw& aidl);
+ConversionResult<media::AudioPortFw>
legacy2aidl_DeviceDescriptorBase(const sp<DeviceDescriptorBase>& legacy);
} // namespace android
diff --git a/media/libaudiofoundation/tests/audiofoundation_parcelable_test.cpp b/media/libaudiofoundation/tests/audiofoundation_parcelable_test.cpp
index 50d8dc8..e315858 100644
--- a/media/libaudiofoundation/tests/audiofoundation_parcelable_test.cpp
+++ b/media/libaudiofoundation/tests/audiofoundation_parcelable_test.cpp
@@ -117,7 +117,7 @@
audioPort->setGains(getAudioGainsForTest());
audioPort->setAudioProfiles(getAudioProfileVectorForTest());
- media::AudioPort parcelable;
+ media::AudioPortFw parcelable;
ASSERT_EQ(NO_ERROR, audioPort->writeToParcelable(&parcelable));
sp<AudioPort> audioPortFromParcel = new AudioPort(
"", AUDIO_PORT_TYPE_NONE, AUDIO_PORT_ROLE_NONE);
@@ -152,7 +152,7 @@
ASSERT_EQ(desc->setEncapsulationMetadataTypes(
AUDIO_ENCAPSULATION_METADATA_TYPE_ALL_POSITION_BITS), NO_ERROR);
- media::AudioPort parcelable;
+ media::AudioPortFw parcelable;
ASSERT_EQ(NO_ERROR, desc->writeToParcelable(&parcelable));
sp<DeviceDescriptorBase> descFromParcel = new DeviceDescriptorBase(AUDIO_DEVICE_NONE);
ASSERT_EQ(NO_ERROR, descFromParcel->readFromParcelable(parcelable));
diff --git a/media/libaudiohal/impl/Android.bp b/media/libaudiohal/impl/Android.bp
index d151817..822a0fd 100644
--- a/media/libaudiohal/impl/Android.bp
+++ b/media/libaudiohal/impl/Android.bp
@@ -245,12 +245,15 @@
"latest_android_media_audio_common_types_ndk_shared",
],
srcs: [
+ "DeviceHalAidl.cpp",
"DevicesFactoryHalEntry.cpp",
"DevicesFactoryHalAidl.cpp",
+ "EffectConversionHelperAidl.cpp",
"EffectBufferHalAidl.cpp",
"EffectHalAidl.cpp",
"EffectsFactoryHalAidl.cpp",
"EffectsFactoryHalEntry.cpp",
+ "StreamHalAidl.cpp",
],
static_libs: [
"android.hardware.common-V2-ndk",
@@ -259,6 +262,7 @@
shared_libs: [
"libbinder_ndk",
"libaudio_aidl_conversion_common_ndk",
+ "libaudio_aidl_conversion_effect_ndk",
],
header_libs: [
"libaudio_aidl_conversion_common_util_ndk",
@@ -271,4 +275,4 @@
"-Wthread-safety",
"-DBACKEND_NDK",
],
-}
\ No newline at end of file
+}
diff --git a/media/libaudiohal/impl/ConversionHelperAidl.h b/media/libaudiohal/impl/ConversionHelperAidl.h
new file mode 100644
index 0000000..db6b6cf
--- /dev/null
+++ b/media/libaudiohal/impl/ConversionHelperAidl.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <string>
+#include <string_view>
+#include <vector>
+
+#include <utils/String16.h>
+#include <utils/Vector.h>
+
+namespace android {
+
+class Args {
+ public:
+ explicit Args(const Vector<String16>& args)
+ : mValues(args.size()), mPtrs(args.size()) {
+ for (size_t i = 0; i < args.size(); ++i) {
+ mValues[i] = std::string(String8(args[i]));
+ mPtrs[i] = mValues[i].c_str();
+ }
+ }
+ const char** args() { return mPtrs.data(); }
+ private:
+ std::vector<std::string> mValues;
+ std::vector<const char*> mPtrs;
+};
+
+class ConversionHelperAidl {
+ protected:
+ ConversionHelperAidl(std::string_view className) : mClassName(className) {}
+
+ const std::string& getClassName() const {
+ return mClassName;
+ }
+
+ const std::string mClassName;
+};
+
+} // namespace android
diff --git a/media/libaudiohal/impl/DeviceHalAidl.cpp b/media/libaudiohal/impl/DeviceHalAidl.cpp
index d85d960..179a655 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalAidl.cpp
@@ -16,94 +16,164 @@
#define LOG_TAG "DeviceHalAidl"
-#include "DeviceHalAidl.h"
+#include <aidl/android/hardware/audio/core/StreamDescriptor.h>
+#include <error/expected_utils.h>
+#include <media/AidlConversionCppNdk.h>
+#include <media/AidlConversionUtil.h>
+#include <mediautils/TimeCheck.h>
+#include <utils/Log.h>
-status_t DeviceHalAidl::getSupportedDevices(uint32_t* devices) {
- ALOGE("%s not implemented yet devices %p", __func__, devices);
- return OK;
+#include "DeviceHalAidl.h"
+#include "StreamHalAidl.h"
+
+using aidl::android::aidl_utils::statusTFromBinderStatus;
+using aidl::android::media::audio::common::AudioMode;
+using aidl::android::media::audio::common::Float;
+using aidl::android::hardware::audio::core::IModule;
+using aidl::android::hardware::audio::core::ITelephony;
+using aidl::android::hardware::audio::core::StreamDescriptor;
+
+namespace android {
+
+status_t DeviceHalAidl::getSupportedDevices(uint32_t*) {
+ // Obsolete.
+ return INVALID_OPERATION;
}
status_t DeviceHalAidl::initCheck() {
- ALOGE("%s not implemented yet", __func__);
+ if (mModule == nullptr) return NO_INIT;
+ // HAL modules are already initialized by the time they are published to the SM.
return OK;
}
status_t DeviceHalAidl::setVoiceVolume(float volume) {
- mVoiceVolume = volume;
- ALOGE("%s not implemented yet %f", __func__, volume);
- return OK;
+ TIME_CHECK();
+ if (!mModule) return NO_INIT;
+ std::shared_ptr<ITelephony> telephony;
+ if (ndk::ScopedAStatus status = mModule->getTelephony(&telephony);
+ status.isOk() && telephony != nullptr) {
+ ITelephony::TelecomConfig inConfig{ .voiceVolume = Float{volume} }, outConfig;
+ RETURN_STATUS_IF_ERROR(
+ statusTFromBinderStatus(telephony->setTelecomConfig(inConfig, &outConfig)));
+ ALOGW_IF(outConfig.voiceVolume.has_value() && volume != outConfig.voiceVolume.value().value,
+ "%s: the resulting voice volume %f is not the same as requested %f",
+ __func__, outConfig.voiceVolume.value().value, volume);
+ }
+ return INVALID_OPERATION;
}
status_t DeviceHalAidl::setMasterVolume(float volume) {
- mMasterVolume = volume;
- ALOGE("%s not implemented yet %f", __func__, volume);
- return OK;
+ TIME_CHECK();
+ if (!mModule) return NO_INIT;
+ return statusTFromBinderStatus(mModule->setMasterVolume(volume));
}
status_t DeviceHalAidl::getMasterVolume(float *volume) {
- *volume = mMasterVolume;
- ALOGE("%s not implemented yet %f", __func__, *volume);
- return OK;
+ TIME_CHECK();
+ if (!mModule) return NO_INIT;
+ return statusTFromBinderStatus(mModule->getMasterVolume(volume));
}
status_t DeviceHalAidl::setMode(audio_mode_t mode) {
- ALOGE("%s not implemented yet %u", __func__, mode);
- return OK;
+ TIME_CHECK();
+ if (!mModule) return NO_INIT;
+ AudioMode audioMode = VALUE_OR_FATAL(::aidl::android::legacy2aidl_audio_mode_t_AudioMode(mode));
+ std::shared_ptr<ITelephony> telephony;
+ if (ndk::ScopedAStatus status = mModule->getTelephony(&telephony);
+ status.isOk() && telephony != nullptr) {
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(telephony->switchAudioMode(audioMode)));
+ }
+ return statusTFromBinderStatus(mModule->updateAudioMode(audioMode));
}
status_t DeviceHalAidl::setMicMute(bool state) {
- mMicMute = state;
- ALOGE("%s not implemented yet %d", __func__, state);
- return OK;
+ TIME_CHECK();
+ if (!mModule) return NO_INIT;
+ return statusTFromBinderStatus(mModule->setMicMute(state));
}
+
status_t DeviceHalAidl::getMicMute(bool *state) {
- *state = mMicMute;
- ALOGE("%s not implemented yet %d", __func__, *state);
- return OK;
+ TIME_CHECK();
+ if (!mModule) return NO_INIT;
+ return statusTFromBinderStatus(mModule->getMicMute(state));
}
+
status_t DeviceHalAidl::setMasterMute(bool state) {
- mMasterMute = state;
- ALOGE("%s not implemented yet %d", __func__, state);
- return OK;
+ TIME_CHECK();
+ if (!mModule) return NO_INIT;
+ return statusTFromBinderStatus(mModule->setMasterMute(state));
}
+
status_t DeviceHalAidl::getMasterMute(bool *state) {
- *state = mMasterMute;
- ALOGE("%s not implemented yet %d", __func__, *state);
+ TIME_CHECK();
+ if (!mModule) return NO_INIT;
+ return statusTFromBinderStatus(mModule->getMasterMute(state));
+}
+
+status_t DeviceHalAidl::setParameters(const String8& kvPairs __unused) {
+ TIME_CHECK();
+ if (!mModule) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
return OK;
}
-status_t DeviceHalAidl::setParameters(const String8& kvPairs) {
- ALOGE("%s not implemented yet %s", __func__, kvPairs.c_str());
+status_t DeviceHalAidl::getParameters(const String8& keys __unused, String8 *values) {
+ TIME_CHECK();
+ values->clear();
+ if (!mModule) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
return OK;
}
-status_t DeviceHalAidl::getParameters(const String8& keys, String8 *values) {
- ALOGE("%s not implemented yet %s %s", __func__, keys.c_str(), values->c_str());
+status_t DeviceHalAidl::getInputBufferSize(
+ const struct audio_config* config __unused, size_t* size __unused) {
+ TIME_CHECK();
+ if (!mModule) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
return OK;
}
-status_t DeviceHalAidl::getInputBufferSize(const struct audio_config* config, size_t* size) {
- ALOGE("%s not implemented yet %p %zu", __func__, config, *size);
+status_t DeviceHalAidl::openOutputStream(
+ audio_io_handle_t handle __unused, audio_devices_t devices __unused,
+ audio_output_flags_t flags __unused, struct audio_config* config,
+ const char* address __unused,
+ sp<StreamOutHalInterface>* outStream) {
+ if (!outStream || !config) {
+ return BAD_VALUE;
+ }
+ TIME_CHECK();
+ if (!mModule) return NO_INIT;
+ config->sample_rate = 48000;
+ config->format = AUDIO_FORMAT_PCM_24_BIT_PACKED;
+ config->channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+ StreamDescriptor descriptor;
+ descriptor.frameSizeBytes = audio_bytes_per_sample(config->format) *
+ audio_channel_count_from_out_mask(config->channel_mask);
+ descriptor.bufferSizeFrames = 600;
+ *outStream = sp<StreamOutHalAidl>::make(descriptor, nullptr);
return OK;
}
-status_t DeviceHalAidl::openOutputStream(audio_io_handle_t handle, audio_devices_t devices,
- audio_output_flags_t flags, struct audio_config* config,
- const char* address,
- sp<StreamOutHalInterface>* outStream) {
- ALOGE("%s not implemented yet %d %u %u %p %s %p", __func__, handle, devices, flags, config,
- address, outStream);
- return OK;
-}
-
-status_t DeviceHalAidl::openInputStream(audio_io_handle_t handle, audio_devices_t devices,
- struct audio_config* config, audio_input_flags_t flags,
- const char* address, audio_source_t source,
- audio_devices_t outputDevice,
- const char* outputDeviceAddress,
- sp<StreamInHalInterface>* inStream) {
- ALOGE("%s not implemented yet %d %u %u %u %p %s %s %p %d", __func__, handle, devices,
- outputDevice, flags, config, address, outputDeviceAddress, inStream, source);
+status_t DeviceHalAidl::openInputStream(
+ audio_io_handle_t handle __unused, audio_devices_t devices __unused,
+ struct audio_config* config, audio_input_flags_t flags __unused,
+ const char* address __unused, audio_source_t source __unused,
+ audio_devices_t outputDevice __unused,
+ const char* outputDeviceAddress __unused,
+ sp<StreamInHalInterface>* inStream) {
+ if (!inStream || !config) {
+ return BAD_VALUE;
+ }
+ TIME_CHECK();
+ if (!mModule) return NO_INIT;
+ config->sample_rate = 48000;
+ config->format = AUDIO_FORMAT_PCM_24_BIT_PACKED;
+ config->channel_mask = AUDIO_CHANNEL_IN_STEREO;
+ StreamDescriptor descriptor;
+ descriptor.frameSizeBytes = audio_bytes_per_sample(config->format) *
+ audio_channel_count_from_out_mask(config->channel_mask);
+ descriptor.bufferSizeFrames = 600;
+ *inStream = sp<StreamInHalAidl>::make(descriptor, nullptr);
return OK;
}
@@ -112,76 +182,108 @@
return OK;
}
-status_t DeviceHalAidl::createAudioPatch(unsigned int num_sources,
- const struct audio_port_config* sources,
- unsigned int num_sinks,
- const struct audio_port_config* sinks,
- audio_patch_handle_t* patch) {
- ALOGE("%s not implemented yet %d %p %d %p %p", __func__, num_sources, sources, num_sinks,
- sinks, patch);
+status_t DeviceHalAidl::createAudioPatch(unsigned int num_sources __unused,
+ const struct audio_port_config* sources __unused,
+ unsigned int num_sinks __unused,
+ const struct audio_port_config* sinks __unused,
+ audio_patch_handle_t* patch __unused) {
+ TIME_CHECK();
+ if (!mModule) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
return OK;
}
-status_t DeviceHalAidl::releaseAudioPatch(audio_patch_handle_t patch) {
- ALOGE("%s not implemented yet patch %d", __func__, patch);
+status_t DeviceHalAidl::releaseAudioPatch(audio_patch_handle_t patch __unused) {
+ TIME_CHECK();
+ if (!mModule) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
return OK;
}
-status_t DeviceHalAidl::setAudioPortConfig(const struct audio_port_config* config) {
- ALOGE("%s not implemented yet config %p", __func__, config);
+status_t DeviceHalAidl::getAudioPort(struct audio_port* port __unused) {
+ TIME_CHECK();
+ ALOGE("%s not implemented yet", __func__);
+ return INVALID_OPERATION;
+}
+
+status_t DeviceHalAidl::getAudioPort(struct audio_port_v7 *port __unused) {
+ TIME_CHECK();
+ ALOGE("%s not implemented yet", __func__);
+ return INVALID_OPERATION;
+}
+
+status_t DeviceHalAidl::setAudioPortConfig(const struct audio_port_config* config __unused) {
+ TIME_CHECK();
+ if (!mModule) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
return OK;
}
status_t DeviceHalAidl::getMicrophones(
- std::vector<audio_microphone_characteristic_t>* microphones) {
- ALOGE("%s not implemented yet microphones %p", __func__, microphones);
+ std::vector<audio_microphone_characteristic_t>* microphones __unused) {
+ TIME_CHECK();
+ if (!mModule) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
return OK;
}
-status_t DeviceHalAidl::addDeviceEffect(audio_port_handle_t device, sp<EffectHalInterface> effect) {
+status_t DeviceHalAidl::addDeviceEffect(audio_port_handle_t device __unused,
+ sp<EffectHalInterface> effect) {
if (!effect) {
return BAD_VALUE;
}
- ALOGE("%s not implemented yet device %d", __func__, device);
+ TIME_CHECK();
+ if (!mModule) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
return OK;
}
-status_t DeviceHalAidl::removeDeviceEffect(audio_port_handle_t device,
+status_t DeviceHalAidl::removeDeviceEffect(audio_port_handle_t device __unused,
sp<EffectHalInterface> effect) {
if (!effect) {
return BAD_VALUE;
}
- ALOGE("%s not implemented yet device %d", __func__, device);
+ TIME_CHECK();
+ if (!mModule) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
return OK;
}
status_t DeviceHalAidl::getMmapPolicyInfos(
media::audio::common::AudioMMapPolicyType policyType __unused,
std::vector<media::audio::common::AudioMMapPolicyInfo>* policyInfos __unused) {
+ TIME_CHECK();
ALOGE("%s not implemented yet", __func__);
return OK;
}
int32_t DeviceHalAidl::getAAudioMixerBurstCount() {
+ TIME_CHECK();
ALOGE("%s not implemented yet", __func__);
return OK;
}
int32_t DeviceHalAidl::getAAudioHardwareBurstMinUsec() {
+ TIME_CHECK();
ALOGE("%s not implemented yet", __func__);
return OK;
}
error::Result<audio_hw_sync_t> DeviceHalAidl::getHwAvSync() {
+ TIME_CHECK();
ALOGE("%s not implemented yet", __func__);
return base::unexpected(INVALID_OPERATION);
}
-status_t DeviceHalAidl::dump(int __unused, const Vector<String16>& __unused) {
- ALOGE("%s not implemented yet", __func__);
- return OK;
+status_t DeviceHalAidl::dump(int fd, const Vector<String16>& args) {
+ TIME_CHECK();
+ if (!mModule) return NO_INIT;
+ return mModule->dump(fd, Args(args).args(), args.size());
};
-int32_t DeviceHalAidl::supportsBluetoothVariableLatency(bool* supports __unused) override {
+int32_t DeviceHalAidl::supportsBluetoothVariableLatency(bool* supports __unused) {
+ TIME_CHECK();
ALOGE("%s not implemented yet", __func__);
return INVALID_OPERATION;
}
+
+} // namespace android
diff --git a/media/libaudiohal/impl/DeviceHalAidl.h b/media/libaudiohal/impl/DeviceHalAidl.h
index 5e8a8dd..99e28d8 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.h
+++ b/media/libaudiohal/impl/DeviceHalAidl.h
@@ -16,14 +16,15 @@
#pragma once
+#include <aidl/android/hardware/audio/core/BpModule.h>
#include <media/audiohal/DeviceHalInterface.h>
#include <media/audiohal/EffectHalInterface.h>
-#include <aidl/android/hardware/audio/core/BpModule.h>
+#include "ConversionHelperAidl.h"
namespace android {
-class DeviceHalAidl : public DeviceHalInterface {
+class DeviceHalAidl : public DeviceHalInterface, public ConversionHelperAidl {
public:
// Sets the value of 'devices' to a bitmask of 1 or more values of audio_devices_t.
status_t getSupportedDevices(uint32_t *devices) override;
@@ -86,6 +87,12 @@
// Releases an audio patch.
status_t releaseAudioPatch(audio_patch_handle_t patch) override;
+ // Fills the list of supported attributes for a given audio port.
+ status_t getAudioPort(struct audio_port* port) override;
+
+ // Fills the list of supported attributes for a given audio port.
+ status_t getAudioPort(struct audio_port_v7 *port) override;
+
// Set audio port configuration.
status_t setAudioPortConfig(const struct audio_port_config* config) override;
@@ -111,20 +118,16 @@
int32_t supportsBluetoothVariableLatency(bool* supports __unused) override;
private:
- friend class DevicesFactoryHalAidl;
- const std::shared_ptr<::aidl::android::hardware::audio::core::IModule> mCore;
- float mMasterVolume = 0.0f;
- float mVoiceVolume = 0.0f;
- bool mMasterMute = false;
- bool mMicMute = false;
+ friend class sp<DeviceHalAidl>;
+
+ const std::shared_ptr<::aidl::android::hardware::audio::core::IModule> mModule;
// Can not be constructed directly by clients.
explicit DeviceHalAidl(
- const std::shared_ptr<::aidl::android::hardware::audio::core::IModule>& core)
- : mCore(core) {}
+ const std::shared_ptr<::aidl::android::hardware::audio::core::IModule>& module)
+ : ConversionHelperAidl("DeviceHalAidl"), mModule(module) {}
- // The destructor automatically closes the device.
- ~DeviceHalAidl();
+ ~DeviceHalAidl() override = default;
};
} // namespace android
diff --git a/media/libaudiohal/impl/DeviceHalHidl.cpp b/media/libaudiohal/impl/DeviceHalHidl.cpp
index be063ab..12acebd 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalHidl.cpp
@@ -46,9 +46,6 @@
using namespace ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION;
using namespace ::android::hardware::audio::CORE_TYPES_CPP_VERSION;
-#define TIME_CHECK() auto timeCheck = \
- mediautils::makeTimeCheckStatsForClassMethod(getClassName(), __func__)
-
DeviceHalHidl::DeviceHalHidl(const sp<::android::hardware::audio::CPP_VERSION::IDevice>& device)
: CoreConversionHelperHidl("DeviceHalHidl"), mDevice(device) {
}
diff --git a/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp b/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp
index b9ca164..78d03e7 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp
+++ b/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp
@@ -19,6 +19,7 @@
#include <aidl/android/hardware/audio/core/IModule.h>
#include <android/binder_manager.h>
+#include <binder/IServiceManager.h>
#include <memory>
#include <utils/Log.h>
@@ -35,27 +36,28 @@
ALOG_ASSERT(iconfig != nullptr, "Provided default IConfig service is NULL");
}
-void DevicesFactoryHalAidl::onFirstRef() {
- ALOGE("%s not implemented yet", __func__);
-}
-
// Opens a device with the specified name. To close the device, it is
// necessary to release references to the returned object.
status_t DevicesFactoryHalAidl::openDevice(const char *name, sp<DeviceHalInterface> *device) {
if (name == nullptr || device == nullptr) {
return BAD_VALUE;
}
- ALOGE("%s not implemented yet %s", __func__, name);
- return INVALID_OPERATION;
- // TODO: only support primary now ("default" means "primary")
- if (strcmp(name, "primary") != 0) {
- auto serviceName = std::string() + IModule::descriptor + "/default";
- auto service = IModule::fromBinder(
+ std::shared_ptr<IModule> service;
+ // FIXME: Normally we will list available HAL modules and connect to them,
+ // however currently we still get the list of module names from the config.
+ // Since the example service does not have all modules, the SM will wait
+ // for the missing ones forever.
+ if (strcmp(name, "primary") == 0 || strcmp(name, "r_submix") == 0) {
+ if (strcmp(name, "primary") == 0) name = "default";
+ auto serviceName = std::string(IModule::descriptor) + "/" + name;
+ service = IModule::fromBinder(
ndk::SpAIBinder(AServiceManager_waitForService(serviceName.c_str())));
- ALOGW("%s fromBinder %s %s", __func__, IModule::descriptor, service ? "succ" : "fail");
- *device = new DeviceHalAidl(service);
+ ALOGE_IF(service == nullptr, "%s fromBinder %s failed", __func__, serviceName.c_str());
}
+ // If the service is a nullptr, the device will not be really functional,
+ // but will not crash either.
+ *device = sp<DeviceHalAidl>::make(service);
return OK;
}
@@ -63,26 +65,45 @@
if (pids == nullptr) {
return BAD_VALUE;
}
- ALOGE("%s not implemented yet", __func__);
- return INVALID_OPERATION;
+ // The functionality for retrieving debug infos of services is not exposed via the NDK.
+ sp<IServiceManager> sm = defaultServiceManager();
+ if (sm == nullptr) {
+ return NO_INIT;
+ }
+ std::set<pid_t> pidsSet;
+ const auto moduleServiceName = std::string(IModule::descriptor) + "/";
+ auto debugInfos = sm->getServiceDebugInfo();
+ for (const auto& info : debugInfos) {
+ if (info.pid > 0 &&
+ info.name.size() > moduleServiceName.size() && // '>' as there must be instance name
+ info.name.substr(0, moduleServiceName.size()) == moduleServiceName) {
+ pidsSet.insert(info.pid);
+ }
+ }
+ *pids = {pidsSet.begin(), pidsSet.end()};
+ return NO_ERROR;
}
status_t DevicesFactoryHalAidl::setCallbackOnce(sp<DevicesFactoryHalCallback> callback) {
- if (callback == nullptr) {
- return BAD_VALUE;
+ // Dynamic registration of module instances is not supported. The functionality
+ // in the audio server which is related to this callback can be removed together
+ // with HIDL support.
+ ALOG_ASSERT(callback != nullptr);
+ if (callback != nullptr) {
+ callback->onNewDevicesAvailable();
}
- ALOGE("%s not implemented yet", __func__);
- return INVALID_OPERATION;
+ return NO_ERROR;
}
AudioHalVersionInfo DevicesFactoryHalAidl::getHalVersion() const {
int32_t versionNumber = 0;
- if (mIConfig) {
- if (!mIConfig->getInterfaceVersion(&versionNumber).isOk()) {
- ALOGE("%s getInterfaceVersion failed", __func__);
- } else {
- ALOGI("%s getInterfaceVersion %d", __func__, versionNumber);
+ if (mIConfig != 0) {
+ if (ndk::ScopedAStatus status = mIConfig->getInterfaceVersion(&versionNumber);
+ !status.isOk()) {
+ ALOGE("%s getInterfaceVersion failed: %s", __func__, status.getDescription().c_str());
}
+ } else {
+ ALOGW("%s no IConfig instance", __func__);
}
// AIDL does not have minor version, fill 0 for all versions
return AudioHalVersionInfo(AudioHalVersionInfo::Type::AIDL, versionNumber);
diff --git a/media/libaudiohal/impl/DevicesFactoryHalAidl.h b/media/libaudiohal/impl/DevicesFactoryHalAidl.h
index 71138a0..cb627bc 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalAidl.h
+++ b/media/libaudiohal/impl/DevicesFactoryHalAidl.h
@@ -27,7 +27,6 @@
public:
explicit DevicesFactoryHalAidl(
std::shared_ptr<::aidl::android::hardware::audio::core::IConfig> iConfig);
- void onFirstRef() override;
// Opens a device with the specified name. To close the device, it is
// necessary to release references to the returned object.
diff --git a/media/libaudiohal/impl/EffectConversionHelperAidl.cpp b/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
new file mode 100644
index 0000000..ff521aa
--- /dev/null
+++ b/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
@@ -0,0 +1,569 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cstdint>
+#include <cstring>
+#include <optional>
+#define LOG_TAG "EffectConversionHelperAidl"
+//#define LOG_NDEBUG 0
+
+#include <error/expected_utils.h>
+#include <media/AidlConversionNdk.h>
+#include <media/AidlConversionEffect.h>
+#include <media/audiohal/AudioEffectUuid.h>
+#include <system/audio_effects/effect_aec.h>
+#include <system/audio_effects/effect_agc2.h>
+#include <system/audio_effects/effect_bassboost.h>
+#include <system/audio_effects/effect_downmix.h>
+#include <system/audio_effects/effect_dynamicsprocessing.h>
+#include <system/audio_effects/effect_environmentalreverb.h>
+#include <system/audio_effects/effect_equalizer.h>
+#include <system/audio_effects/effect_hapticgenerator.h>
+#include <system/audio_effects/effect_loudnessenhancer.h>
+#include <system/audio_effects/effect_ns.h>
+#include <system/audio_effects/effect_presetreverb.h>
+#include <system/audio_effects/effect_spatializer.h>
+#include <system/audio_effects/effect_virtualizer.h>
+#include <system/audio_effects/effect_visualizer.h>
+
+#include <utils/Log.h>
+
+#include "EffectConversionHelperAidl.h"
+
+namespace android {
+namespace effect {
+
+using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::hardware::audio::effect::AcousticEchoCanceler;
+using ::aidl::android::hardware::audio::effect::AutomaticGainControl;
+using ::aidl::android::hardware::audio::effect::BassBoost;
+using ::aidl::android::hardware::audio::effect::CommandId;
+using ::aidl::android::hardware::audio::effect::Descriptor;
+using ::aidl::android::hardware::audio::effect::Downmix;
+using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::media::audio::common::AudioDeviceDescription;
+using ::aidl::android::media::audio::common::AudioUuid;
+using android::effect::utils::EffectParamReader;
+using android::effect::utils::EffectParamWriter;
+
+using ::android::status_t;
+
+const std::map<uint32_t /* effect_command_e */, EffectConversionHelperAidl::CommandHandler>
+ EffectConversionHelperAidl::mCommandHandlerMap = {
+ {EFFECT_CMD_INIT, &EffectConversionHelperAidl::handleInit},
+ {EFFECT_CMD_SET_PARAM, &EffectConversionHelperAidl::handleSetParameter},
+ {EFFECT_CMD_GET_PARAM, &EffectConversionHelperAidl::handleGetParameter},
+ {EFFECT_CMD_SET_CONFIG, &EffectConversionHelperAidl::handleSetConfig},
+ {EFFECT_CMD_GET_CONFIG, &EffectConversionHelperAidl::handleGetConfig},
+ {EFFECT_CMD_RESET, &EffectConversionHelperAidl::handleReset},
+ {EFFECT_CMD_ENABLE, &EffectConversionHelperAidl::handleEnable},
+ {EFFECT_CMD_DISABLE, &EffectConversionHelperAidl::handleDisable},
+ {EFFECT_CMD_SET_DEVICE, &EffectConversionHelperAidl::handleSetDevice},
+ {EFFECT_CMD_SET_INPUT_DEVICE, &EffectConversionHelperAidl::handleSetDevice},
+ {EFFECT_CMD_SET_VOLUME, &EffectConversionHelperAidl::handleSetVolume},
+ {EFFECT_CMD_OFFLOAD, &EffectConversionHelperAidl::handleSetOffload},
+ {EFFECT_CMD_FIRST_PROPRIETARY, &EffectConversionHelperAidl::handleFirstPriority}};
+
+const std::map<AudioUuid /* TypeUUID */, std::pair<EffectConversionHelperAidl::SetParameter,
+ EffectConversionHelperAidl::GetParameter>>
+ EffectConversionHelperAidl::mParameterHandlerMap = {
+ {kAcousticEchoCancelerTypeUUID,
+ {&EffectConversionHelperAidl::setAecParameter,
+ &EffectConversionHelperAidl::getAecParameter}},
+ {kAutomaticGainControlTypeUUID,
+ {&EffectConversionHelperAidl::setAgcParameter,
+ &EffectConversionHelperAidl::getAgcParameter}},
+ {kBassBoostTypeUUID,
+ {&EffectConversionHelperAidl::setBassBoostParameter,
+ &EffectConversionHelperAidl::getBassBoostParameter}},
+ {kDownmixTypeUUID,
+ {&EffectConversionHelperAidl::setDownmixParameter,
+ &EffectConversionHelperAidl::getDownmixParameter}}};
+
+EffectConversionHelperAidl::EffectConversionHelperAidl(
+ std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
+ int32_t sessionId, int32_t ioId,
+ const ::aidl::android::hardware::audio::effect::Descriptor& desc)
+ : mSessionId(sessionId), mIoId(ioId), mDesc(desc), mEffect(std::move(effect)) {
+ mCommon.session = sessionId;
+ mCommon.ioHandle = ioId;
+ mCommon.input = mCommon.output = kDefaultAudioConfig;
+}
+
+status_t EffectConversionHelperAidl::handleCommand(uint32_t cmdCode, uint32_t cmdSize,
+ void* pCmdData, uint32_t* replySize,
+ void* pReplyData) {
+ const auto& handler = mCommandHandlerMap.find(cmdCode);
+ if (handler == mCommandHandlerMap.end() || !handler->second) {
+ ALOGE("%s handler for command %u doesn't exist", __func__, cmdCode);
+ return BAD_VALUE;
+ }
+ return (this->*handler->second)(cmdSize, pCmdData, replySize, pReplyData);
+}
+
+status_t EffectConversionHelperAidl::handleInit(uint32_t cmdSize __unused,
+ const void* pCmdData __unused, uint32_t* replySize,
+ void* pReplyData) {
+ if (!replySize || *replySize < sizeof(int) || !pReplyData) {
+ return BAD_VALUE;
+ }
+
+ return *(status_t*)pReplyData =
+ statusTFromBinderStatus(mEffect->open(mCommon, std::nullopt, &mOpenReturn));
+}
+
+status_t EffectConversionHelperAidl::handleSetParameter(uint32_t cmdSize, const void* pCmdData,
+ uint32_t* replySize, void* pReplyData) {
+ if (cmdSize < sizeof(effect_param_t) || !pCmdData || !replySize ||
+ *replySize < sizeof(int) || !pReplyData) {
+ return BAD_VALUE;
+ }
+
+ auto reader = EffectParamReader(*(effect_param_t*)pCmdData);
+ if (!reader.validateCmdSize(cmdSize)) {
+ ALOGE("%s illegal param %s size %u", __func__, reader.toString().c_str(), cmdSize);
+ return BAD_VALUE;
+ }
+
+ const auto& handler = mParameterHandlerMap.find(mDesc.common.id.type);
+ if (handler == mParameterHandlerMap.end() || !handler->second.first) {
+ ALOGE("%s handler for uuid %s not found", __func__,
+ mDesc.common.id.type.toString().c_str());
+ return BAD_VALUE;
+ }
+ const SetParameter& functor = handler->second.first;
+ return *(status_t*)pReplyData = (this->*functor)(reader);
+}
+
+status_t EffectConversionHelperAidl::handleGetParameter(uint32_t cmdSize, const void* pCmdData,
+ uint32_t* replySize, void* pReplyData) {
+ if (cmdSize < sizeof(effect_param_t) || !pCmdData || !replySize || !pReplyData) {
+ return BAD_VALUE;
+ }
+
+ const auto reader = EffectParamReader(*(effect_param_t*)pCmdData);
+ if (!reader.validateCmdSize(cmdSize)) {
+ ALOGE("%s illegal param %s, replysize %u", __func__, reader.toString().c_str(),
+ *replySize);
+ return BAD_VALUE;
+ }
+
+ const auto& handler = mParameterHandlerMap.find(mDesc.common.id.type);
+ if (handler == mParameterHandlerMap.end() || !handler->second.second) {
+ ALOGE("%s handler for uuid %s not found", __func__,
+ mDesc.common.id.type.toString().c_str());
+ return BAD_VALUE;
+ }
+ const GetParameter& functor = handler->second.second;
+ memcpy(pReplyData, pCmdData, sizeof(effect_param_t) + reader.getParameterSize());
+ auto writer = EffectParamWriter(*(effect_param_t *)pReplyData);
+ (this->*functor)(writer);
+ *replySize = writer.getTotalSize();
+ return writer.getStatus();
+}
+
+status_t EffectConversionHelperAidl::handleSetConfig(uint32_t cmdSize, const void* pCmdData,
+ uint32_t* replySize, void* pReplyData) {
+ if (!replySize || *replySize != sizeof(int) || !pReplyData ||
+ cmdSize != sizeof(effect_config_t)) {
+ return BAD_VALUE;
+ }
+
+ const auto& legacyConfig = static_cast<const effect_config_t*>(pCmdData);
+ // already open, apply latest settings
+ mCommon.input.base =
+ VALUE_OR_RETURN_STATUS(::aidl::android::legacy2aidl_buffer_config_t_AudioConfigBase(
+ legacyConfig->inputCfg, true /* isInput */));
+ mCommon.output.base =
+ VALUE_OR_RETURN_STATUS(::aidl::android::legacy2aidl_buffer_config_t_AudioConfigBase(
+ legacyConfig->outputCfg, false /* isInput */));
+ mCommon.session = mSessionId;
+ mCommon.ioHandle = mIoId;
+ // TODO: add access mode support
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+ mEffect->setParameter(Parameter::make<Parameter::common>(mCommon))));
+ return *static_cast<int32_t*>(pReplyData) = OK;
+}
+
+status_t EffectConversionHelperAidl::handleGetConfig(uint32_t cmdSize __unused,
+ const void* pCmdData __unused,
+ uint32_t* replySize, void* pReplyData) {
+ if (!replySize || *replySize != sizeof(effect_config_t) || !pReplyData) {
+ ALOGE("%s parameter invalid %p %p", __func__, replySize, pReplyData);
+ return BAD_VALUE;
+ }
+
+ Parameter param;
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(
+ Parameter::Id::make<Parameter::Id::commonTag>(Parameter::common), ¶m)));
+
+ const auto& common = param.get<Parameter::common>();
+ effect_config_t* pConfig = (effect_config_t*)pReplyData;
+ pConfig->inputCfg = VALUE_OR_RETURN_STATUS(
+ ::aidl::android::aidl2legacy_AudioConfigBase_buffer_config_t(common.input.base, true));
+ pConfig->outputCfg =
+ VALUE_OR_RETURN_STATUS(::aidl::android::aidl2legacy_AudioConfigBase_buffer_config_t(
+ common.output.base, false));
+ return OK;
+}
+
+status_t EffectConversionHelperAidl::handleReset(uint32_t cmdSize __unused,
+ const void* pCmdData __unused, uint32_t* replySize,
+ void* pReplyData) {
+ if (!replySize || *replySize != sizeof(effect_config_t) || !pReplyData) {
+ ALOGE("%s parameter invalid %p %p", __func__, replySize, pReplyData);
+ return BAD_VALUE;
+ }
+
+ return statusTFromBinderStatus(mEffect->command(CommandId::RESET));
+}
+
+status_t EffectConversionHelperAidl::handleEnable(uint32_t cmdSize __unused,
+ const void* pCmdData __unused, uint32_t* replySize,
+ void* pReplyData) {
+ if (!replySize || *replySize != sizeof(effect_config_t) || !pReplyData) {
+ ALOGE("%s parameter invalid %p %p", __func__, replySize, pReplyData);
+ return BAD_VALUE;
+ }
+
+ return statusTFromBinderStatus(mEffect->command(CommandId::START));
+}
+
+status_t EffectConversionHelperAidl::handleDisable(uint32_t cmdSize __unused,
+ const void* pCmdData __unused,
+ uint32_t* replySize, void* pReplyData) {
+ if (!replySize || *replySize != sizeof(effect_config_t) || !pReplyData) {
+ ALOGE("%s parameter invalid %p %p", __func__, replySize, pReplyData);
+ return BAD_VALUE;
+ }
+
+ return statusTFromBinderStatus(mEffect->command(CommandId::STOP));
+}
+
+status_t EffectConversionHelperAidl::handleSetDevice(uint32_t cmdSize, const void* pCmdData,
+ uint32_t* replySize, void* pReplyData) {
+ if (cmdSize != sizeof(uint32_t) || !pCmdData || !replySize ||
+ *replySize != sizeof(effect_config_t) || !pReplyData) {
+ ALOGE("%s parameter invalid %u %p %p %p", __func__, cmdSize, pCmdData, replySize,
+ pReplyData);
+ return BAD_VALUE;
+ }
+ // TODO: convert from audio_devices_t to std::vector<AudioDeviceDescription>
+ // const auto& legacyDevice = *(uint32_t*)(pCmdData);
+ std::vector<AudioDeviceDescription> aidlDevices;
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+ mEffect->setParameter(Parameter::make<Parameter::deviceDescription>(aidlDevices))));
+ return *static_cast<int32_t*>(pReplyData) = OK;
+}
+
+status_t EffectConversionHelperAidl::handleSetVolume(uint32_t cmdSize, const void* pCmdData,
+ uint32_t* replySize, void* pReplyData) {
+ if (cmdSize != 2 * sizeof(uint32_t) || !pCmdData || !replySize ||
+ *replySize != sizeof(effect_config_t) || !pReplyData) {
+ ALOGE("%s parameter invalid %u %p %p %p", __func__, cmdSize, pCmdData, replySize,
+ pReplyData);
+ return BAD_VALUE;
+ }
+ Parameter::VolumeStereo volume = {.left = (float)(*(uint32_t*)pCmdData) / (1 << 24),
+ .right = (float)(*(uint32_t*)pCmdData + 1) / (1 << 24)};
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+ mEffect->setParameter(Parameter::make<Parameter::volumeStereo>(volume))));
+ return *static_cast<int32_t*>(pReplyData) = OK;
+}
+
+status_t EffectConversionHelperAidl::handleSetOffload(uint32_t cmdSize, const void* pCmdData,
+ uint32_t* replySize, void* pReplyData) {
+ if (cmdSize < sizeof(effect_offload_param_t) || !pCmdData || !replySize ||
+ *replySize != sizeof(effect_config_t) || !pReplyData) {
+ ALOGE("%s parameter invalid %u %p %p %p", __func__, cmdSize, pCmdData, replySize,
+ pReplyData);
+ return BAD_VALUE;
+ }
+ // TODO: handle this after effectproxy implemented in libaudiohal
+ return *static_cast<int32_t*>(pReplyData) = OK;
+}
+
+status_t EffectConversionHelperAidl::handleFirstPriority(uint32_t cmdSize __unused,
+ const void* pCmdData __unused,
+ uint32_t* replySize, void* pReplyData) {
+ if (!replySize || *replySize != sizeof(effect_config_t) || !pReplyData) {
+ ALOGE("%s parameter invalid %p %p", __func__, replySize, pReplyData);
+ return BAD_VALUE;
+ }
+
+ // TODO to be implemented
+ return OK;
+}
+
+status_t EffectConversionHelperAidl::setAecParameter(EffectParamReader& param) {
+ uint32_t type, value = 0;
+ if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(uint32_t)) ||
+ OK != param.readFromParameter(&type) ||
+ OK != param.readFromValue(&value)) {
+ ALOGW("%s invalid param %s", __func__, param.toString().c_str());
+ return BAD_VALUE;
+ }
+
+ Parameter aidlParam;
+ switch (type) {
+ case AEC_PARAM_ECHO_DELAY:
+ FALLTHROUGH_INTENDED;
+ case AEC_PARAM_PROPERTIES: {
+ aidlParam = VALUE_OR_RETURN_STATUS(
+ aidl::android::legacy2aidl_uint32_echoDelay_Parameter_aec(value));
+ break;
+ }
+ case AEC_PARAM_MOBILE_MODE: {
+ aidlParam = VALUE_OR_RETURN_STATUS(
+ aidl::android::legacy2aidl_uint32_mobileMode_Parameter_aec(value));
+ break;
+ }
+ default: {
+ ALOGW("%s unknown param %s", __func__, param.toString().c_str());
+ return BAD_VALUE;
+ }
+ }
+
+ return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+}
+
+status_t EffectConversionHelperAidl::getAecParameter(EffectParamWriter& param) {
+ uint32_t type = 0, value = 0;
+ if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(uint32_t)) ||
+ OK != param.readFromParameter(&type) || OK != param.readFromValue(&value)) {
+ param.setStatus(BAD_VALUE);
+ ALOGW("%s invalid param %s", __func__, param.toString().c_str());
+ return BAD_VALUE;
+ }
+ Parameter aidlParam;
+ switch (type) {
+ case AEC_PARAM_ECHO_DELAY:
+ FALLTHROUGH_INTENDED;
+ case AEC_PARAM_PROPERTIES: {
+ Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(AcousticEchoCanceler,
+ acousticEchoCancelerTag, echoDelayUs);
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+ value = VALUE_OR_RETURN_STATUS(
+ aidl::android::aidl2legacy_Parameter_aec_uint32_echoDelay(aidlParam));
+ break;
+ }
+ case AEC_PARAM_MOBILE_MODE: {
+ Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(AcousticEchoCanceler,
+ acousticEchoCancelerTag, mobileMode);
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+ value = VALUE_OR_RETURN_STATUS(
+ aidl::android::aidl2legacy_Parameter_aec_uint32_mobileMode(aidlParam));
+ break;
+ }
+ default:
+ ALOGW("%s unknown param %s", __func__, param.toString().c_str());
+ return BAD_VALUE;
+ }
+ param.writeToValue(&value);
+ return OK;
+}
+
+status_t EffectConversionHelperAidl::setAgcParameter(EffectParamReader& param) {
+ uint32_t type = 0, value = 0;
+ if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(uint32_t)) ||
+ OK != param.readFromParameter(&type) || OK != param.readFromValue(&value)) {
+ ALOGW("%s invalid param %s", __func__, param.toString().c_str());
+ return BAD_VALUE;
+ }
+ Parameter aidlParam;
+ switch (type) {
+ case AGC2_PARAM_FIXED_DIGITAL_GAIN: {
+ aidlParam = VALUE_OR_RETURN_STATUS(
+ aidl::android::legacy2aidl_uint32_fixedDigitalGain_Parameter_agc(value));
+ break;
+ }
+ case AGC2_PARAM_ADAPT_DIGI_LEVEL_ESTIMATOR: {
+ aidlParam = VALUE_OR_RETURN_STATUS(
+ aidl::android::legacy2aidl_uint32_levelEstimator_Parameter_agc(value));
+ break;
+ }
+ case AGC2_PARAM_ADAPT_DIGI_EXTRA_SATURATION_MARGIN: {
+ aidlParam = VALUE_OR_RETURN_STATUS(
+ aidl::android::legacy2aidl_uint32_saturationMargin_Parameter_agc(value));
+ break;
+ }
+ default: {
+ ALOGW("%s unknown param %s", __func__, param.toString().c_str());
+ return BAD_VALUE;
+ }
+ }
+
+ return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+}
+
+status_t EffectConversionHelperAidl::getAgcParameter(EffectParamWriter& param) {
+ uint32_t type = 0, value = 0;
+ if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(uint32_t)) ||
+ OK != param.readFromParameter(&type) || OK != param.readFromValue(&value)) {
+ ALOGW("%s invalid param %s", __func__, param.toString().c_str());
+ return BAD_VALUE;
+ }
+ Parameter aidlParam;
+ switch (type) {
+ case AGC2_PARAM_FIXED_DIGITAL_GAIN: {
+ Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(
+ AutomaticGainControl, automaticGainControlTag, fixedDigitalGainMb);
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+ value = VALUE_OR_RETURN_STATUS(
+ aidl::android::aidl2legacy_Parameter_agc_uint32_fixedDigitalGain(aidlParam));
+ break;
+ }
+ case AGC2_PARAM_ADAPT_DIGI_LEVEL_ESTIMATOR: {
+ Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(AutomaticGainControl,
+ automaticGainControlTag, levelEstimator);
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+ value = VALUE_OR_RETURN_STATUS(
+ aidl::android::aidl2legacy_Parameter_agc_uint32_levelEstimator(aidlParam));
+ break;
+ }
+ case AGC2_PARAM_ADAPT_DIGI_EXTRA_SATURATION_MARGIN: {
+ Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(
+ AutomaticGainControl, automaticGainControlTag, saturationMarginMb);
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+ value = VALUE_OR_RETURN_STATUS(
+ aidl::android::aidl2legacy_Parameter_agc_uint32_saturationMargin(aidlParam));
+ break;
+ }
+ default: {
+ ALOGW("%s unknown param %s", __func__, param.toString().c_str());
+ return BAD_VALUE;
+ }
+ }
+
+ param.writeToValue(&value);
+ return OK;
+}
+
+status_t EffectConversionHelperAidl::setBassBoostParameter(EffectParamReader& param) {
+ uint32_t type = 0;
+ uint16_t value = 0;
+ if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(uint16_t)) ||
+ OK != param.readFromParameter(&type) || OK != param.readFromValue(&value)) {
+ ALOGW("%s invalid param %s", __func__, param.toString().c_str());
+ return BAD_VALUE;
+ }
+ Parameter aidlParam;
+ switch (type) {
+ case BASSBOOST_PARAM_STRENGTH: {
+ aidlParam = VALUE_OR_RETURN_STATUS(
+ aidl::android::legacy2aidl_uint16_strengthPm_Parameter_BassBoost(value));
+ break;
+ }
+ case BASSBOOST_PARAM_STRENGTH_SUPPORTED: {
+ ALOGW("%s set BASSBOOST_PARAM_STRENGTH_SUPPORTED not supported", __func__);
+ return BAD_VALUE;
+ }
+ default: {
+ ALOGW("%s unknown param %s", __func__, param.toString().c_str());
+ return BAD_VALUE;
+ }
+ }
+
+ return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+}
+
+status_t EffectConversionHelperAidl::getBassBoostParameter(EffectParamWriter& param) {
+ uint32_t type = 0, value = 0;
+ if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(uint32_t)) ||
+ OK != param.readFromParameter(&type) || OK != param.readFromValue(&value)) {
+ ALOGW("%s invalid param %s", __func__, param.toString().c_str());
+ param.setStatus(BAD_VALUE);
+ return BAD_VALUE;
+ }
+ Parameter aidlParam;
+ switch (type) {
+ case BASSBOOST_PARAM_STRENGTH: {
+ Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(BassBoost, bassBoostTag, strengthPm);
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+ value = VALUE_OR_RETURN_STATUS(
+ aidl::android::aidl2legacy_Parameter_BassBoost_uint16_strengthPm(aidlParam));
+ break;
+ }
+ case BASSBOOST_PARAM_STRENGTH_SUPPORTED: {
+ const auto& cap =
+ VALUE_OR_RETURN_STATUS(aidl::android::UNION_GET(mDesc.capability, bassBoost));
+ value = VALUE_OR_RETURN_STATUS(
+ aidl::android::convertIntegral<uint32_t>(cap.strengthSupported));
+ break;
+ }
+ default: {
+ ALOGW("%s unknown param %s", __func__, param.toString().c_str());
+ return BAD_VALUE;
+ }
+ }
+
+ param.writeToValue(&value);
+ return OK;
+}
+
+status_t EffectConversionHelperAidl::setDownmixParameter(EffectParamReader& param) {
+ uint32_t type = 0;
+ int16_t value = 0;
+ if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(int16_t)) ||
+ OK != param.readFromParameter(&type) || OK != param.readFromValue(&value)) {
+ ALOGW("%s invalid param %s", __func__, param.toString().c_str());
+ return BAD_VALUE;
+ }
+ Parameter aidlParam;
+ switch (type) {
+ case DOWNMIX_PARAM_TYPE: {
+ aidlParam = VALUE_OR_RETURN_STATUS(
+ aidl::android::legacy2aidl_int16_type_Parameter_Downmix(value));
+ break;
+ }
+ default: {
+ ALOGW("%s unknown param %s", __func__, param.toString().c_str());
+ return BAD_VALUE;
+ }
+ }
+
+ return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+}
+
+status_t EffectConversionHelperAidl::getDownmixParameter(EffectParamWriter& param) {
+ int16_t value = 0;
+ uint32_t type = 0;
+ if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(uint16_t)) ||
+ OK != param.readFromParameter(&type) || OK != param.readFromValue(&value)) {
+ param.setStatus(BAD_VALUE);
+ return BAD_VALUE;
+ }
+ Parameter aidlParam;
+ switch (type) {
+ case DOWNMIX_PARAM_TYPE: {
+ Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(Downmix, downmixTag, type);
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+ value = VALUE_OR_RETURN_STATUS(
+ aidl::android::aidl2legacy_Parameter_Downmix_int16_type(aidlParam));
+ break;
+ }
+ default: {
+ ALOGW("%s unknown param %s", __func__, param.toString().c_str());
+ return BAD_VALUE;
+ }
+ }
+
+ param.writeToValue(&value);
+ return OK;
+}
+
+} // namespace effect
+} // namespace android
diff --git a/media/libaudiohal/impl/EffectConversionHelperAidl.h b/media/libaudiohal/impl/EffectConversionHelperAidl.h
new file mode 100644
index 0000000..84f8cc6
--- /dev/null
+++ b/media/libaudiohal/impl/EffectConversionHelperAidl.h
@@ -0,0 +1,117 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <cstddef>
+#include <map>
+#include <memory>
+#include <utils/Errors.h>
+
+#include <aidl/android/hardware/audio/effect/IEffect.h>
+
+#include <system/audio_effect.h>
+#include <system/audio_effects/audio_effects_utils.h>
+
+namespace android {
+namespace effect {
+
+class EffectConversionHelperAidl {
+ protected:
+ EffectConversionHelperAidl(
+ std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
+ int32_t sessionId, int32_t ioId,
+ const ::aidl::android::hardware::audio::effect::Descriptor& desc);
+
+ status_t handleCommand(uint32_t cmdCode, uint32_t cmdSize, void* pCmdData, uint32_t* replySize,
+ void* pReplyData);
+
+ private:
+ const int32_t mSessionId;
+ const int32_t mIoId;
+ const ::aidl::android::hardware::audio::effect::Descriptor mDesc;
+ ::aidl::android::media::audio::common::AudioUuid mTypeUuid;
+ const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> mEffect;
+ ::aidl::android::hardware::audio::effect::IEffect::OpenEffectReturn mOpenReturn;
+ ::aidl::android::hardware::audio::effect::Parameter::Common mCommon;
+
+ const aidl::android::media::audio::common::AudioFormatDescription kDefaultFormatDescription = {
+ .type = aidl::android::media::audio::common::AudioFormatType::PCM,
+ .pcm = aidl::android::media::audio::common::PcmType::FLOAT_32_BIT};
+
+ static constexpr int kDefaultframeCount = 0x100;
+
+ using AudioChannelLayout = aidl::android::media::audio::common::AudioChannelLayout;
+ const aidl::android::media::audio::common::AudioConfig kDefaultAudioConfig = {
+ .base = {.sampleRate = 44100,
+ .channelMask = AudioChannelLayout::make<AudioChannelLayout::layoutMask>(
+ AudioChannelLayout::LAYOUT_STEREO),
+ .format = kDefaultFormatDescription},
+ .frameCount = kDefaultframeCount};
+ // command handler map
+ typedef status_t (EffectConversionHelperAidl::*CommandHandler)(uint32_t /* cmdSize */,
+ const void* /* pCmdData */,
+ uint32_t* /* replySize */,
+ void* /* pReplyData */);
+ static const std::map<uint32_t /* effect_command_e */, CommandHandler> mCommandHandlerMap;
+
+ // parameter set/get handler map
+ typedef status_t (EffectConversionHelperAidl::*SetParameter)(
+ android::effect::utils::EffectParamReader& param);
+ typedef status_t (EffectConversionHelperAidl::*GetParameter)(
+ android::effect::utils::EffectParamWriter& param);
+ static const std::map<::aidl::android::media::audio::common::AudioUuid /* TypeUUID */,
+ std::pair<SetParameter, GetParameter>>
+ mParameterHandlerMap;
+
+ status_t handleInit(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
+ void* pReplyData);
+ status_t handleSetParameter(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
+ void* pReplyData);
+ status_t handleGetParameter(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
+ void* pReplyData);
+ status_t handleSetConfig(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
+ void* pReplyData);
+ status_t handleGetConfig(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
+ void* pReplyData);
+ status_t handleEnable(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
+ void* pReplyData);
+ status_t handleDisable(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
+ void* pReplyData);
+ status_t handleReset(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
+ void* pReplyData);
+ status_t handleSetDevice(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
+ void* pReplyData);
+ status_t handleSetVolume(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
+ void* pReplyData);
+ status_t handleSetOffload(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
+ void* pReplyData);
+ status_t handleFirstPriority(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
+ void* pReplyData);
+
+ // set/get parameter handler
+ status_t setAecParameter(android::effect::utils::EffectParamReader& param);
+ status_t getAecParameter(android::effect::utils::EffectParamWriter& param);
+ status_t setAgcParameter(android::effect::utils::EffectParamReader& param);
+ status_t getAgcParameter(android::effect::utils::EffectParamWriter& param);
+ status_t setBassBoostParameter(android::effect::utils::EffectParamReader& param);
+ status_t getBassBoostParameter(android::effect::utils::EffectParamWriter& param);
+ status_t setDownmixParameter(android::effect::utils::EffectParamReader& param);
+ status_t getDownmixParameter(android::effect::utils::EffectParamWriter& param);
+};
+
+} // namespace effect
+} // namespace android
diff --git a/media/libaudiohal/impl/EffectHalAidl.cpp b/media/libaudiohal/impl/EffectHalAidl.cpp
index 31c5ca5..5090f11 100644
--- a/media/libaudiohal/impl/EffectHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectHalAidl.cpp
@@ -17,33 +17,55 @@
#define LOG_TAG "EffectHalAidl"
//#define LOG_NDEBUG 0
+#include <error/expected_utils.h>
#include <media/AidlConversionCppNdk.h>
-#include <media/AidlConversionNdk.h>
-#include <media/audiohal/AudioHalUtils.h>
+#include <media/AidlConversionEffect.h>
+#include <media/AidlConversionUtil.h>
#include <media/EffectsFactoryApi.h>
#include <mediautils/TimeCheck.h>
#include <utils/Log.h>
+#include <system/audio_effects/effect_aec.h>
+#include <system/audio_effects/effect_downmix.h>
+#include <system/audio_effects/effect_dynamicsprocessing.h>
+#include <system/audio_effects/effect_hapticgenerator.h>
+#include <system/audio_effects/effect_ns.h>
+#include <system/audio_effects/effect_spatializer.h>
+#include <system/audio_effects/effect_visualizer.h>
+
#include "EffectHalAidl.h"
#include <system/audio.h>
-
#include <aidl/android/hardware/audio/effect/IEffect.h>
+using ::aidl::android::aidl_utils::statusTFromBinderStatus;
using ::aidl::android::hardware::audio::effect::CommandId;
using ::aidl::android::hardware::audio::effect::Descriptor;
using ::aidl::android::hardware::audio::effect::IEffect;
-using ::aidl::android::hardware::audio::effect::State;
+using ::aidl::android::hardware::audio::effect::IFactory;
using ::aidl::android::hardware::audio::effect::Parameter;
namespace android {
namespace effect {
-EffectHalAidl::EffectHalAidl(const std::shared_ptr<IEffect>& effect, uint64_t effectId,
- int32_t sessionId, int32_t ioId)
- : mEffectId(effectId), mSessionId(sessionId), mIoId(ioId), mEffect(effect) {}
+EffectHalAidl::EffectHalAidl(
+ const std::shared_ptr<::aidl::android::hardware::audio::effect::IFactory>& factory,
+ const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& effect,
+ uint64_t effectId, int32_t sessionId, int32_t ioId,
+ const ::aidl::android::hardware::audio::effect::Descriptor& desc)
+ : EffectConversionHelperAidl(effect, sessionId, ioId, desc),
+ mFactory(factory),
+ mEffect(effect),
+ mEffectId(effectId),
+ mSessionId(sessionId),
+ mIoId(ioId),
+ mDesc(desc) {}
-EffectHalAidl::~EffectHalAidl() {}
+EffectHalAidl::~EffectHalAidl() {
+ if (mFactory) {
+ mFactory->destroyEffect(mEffect);
+ }
+}
status_t EffectHalAidl::setInBuffer(const sp<EffectBufferHalInterface>& buffer) {
if (buffer == nullptr) {
@@ -63,7 +85,7 @@
status_t EffectHalAidl::process() {
ALOGW("%s not implemented yet", __func__);
- // write to input FMQ here?
+ // write to input FMQ here, and wait for statusMQ STATUS_OK
return OK;
}
@@ -73,137 +95,9 @@
return OK;
}
-status_t EffectHalAidl::handleSetConfig(uint32_t cmdCode, uint32_t cmdSize, void* pCmdData,
- uint32_t* replySize, void* pReplyData) {
- if (pCmdData == NULL || cmdSize != sizeof(effect_config_t) || replySize == NULL ||
- *replySize != sizeof(int32_t) || pReplyData == NULL) {
- ALOGE("%s parameter error code %u", __func__, cmdCode);
- return BAD_VALUE;
- }
-
- *static_cast<int32_t*>(pReplyData) = FAILED_TRANSACTION;
- memcpy(&mConfig, pCmdData, cmdSize);
-
- State state;
- RETURN_IF_BINDER_FAIL(mEffect->getState(&state));
- // effect not open yet, save settings locally
- if (state != State::INIT) {
- effect_config_t* legacyConfig = static_cast<effect_config_t*>(pCmdData);
- // already open, apply latest settings
- Parameter aidlParam;
- Parameter::Common aidlCommon;
- aidlCommon.input.base =
- VALUE_OR_RETURN_STATUS(::aidl::android::legacy2aidl_AudioConfigBase_buffer_config_t(
- legacyConfig->inputCfg, true /* isInput */));
- aidlCommon.output.base =
- VALUE_OR_RETURN_STATUS(::aidl::android::legacy2aidl_AudioConfigBase_buffer_config_t(
- legacyConfig->outputCfg, false /* isInput */));
- aidlCommon.session = mSessionId;
- aidlCommon.ioHandle = mIoId;
- Parameter::Id id;
- id.set<Parameter::Id::commonTag>(Parameter::common);
- aidlParam.set<Parameter::common>(aidlCommon);
- RETURN_IF_BINDER_FAIL(mEffect->setParameter(aidlParam));
- }
- *(int*)pReplyData = 0;
- *static_cast<int32_t*>(pReplyData) = OK;
- return OK;
-}
-
-status_t EffectHalAidl::handleGetConfig(uint32_t cmdCode, uint32_t cmdSize, void* pCmdData,
- uint32_t* replySize, void* pReplyData) {
- if (pCmdData == NULL || cmdSize == 0 || replySize == NULL ||
- *replySize != sizeof(effect_config_t) || pReplyData == NULL) {
- ALOGE("%s parameter error with cmdCode %d", __func__, cmdCode);
- return BAD_VALUE;
- }
-
- *(effect_config_t*)pReplyData = mConfig;
- return OK;
-}
-
-status_t EffectHalAidl::handleSetParameter(uint32_t cmdCode, uint32_t cmdSize, void* pCmdData,
- uint32_t* replySize, void* pReplyData) {
- ALOGW("%s not implemented yet", __func__);
- if (pCmdData == NULL || cmdSize == 0 || replySize == NULL ||
- *replySize != sizeof(effect_config_t) || pReplyData == NULL) {
- ALOGE("%s parameter error with cmdCode %d", __func__, cmdCode);
- return BAD_VALUE;
- }
- return OK;
-}
-
-status_t EffectHalAidl::handleGetParameter(uint32_t cmdCode, uint32_t cmdSize, void* pCmdData,
- uint32_t* replySize, void* pReplyData) {
- ALOGW("%s not implemented yet", __func__);
- if (pCmdData == NULL || cmdSize == 0 || replySize == NULL ||
- *replySize != sizeof(effect_config_t) || pReplyData == NULL) {
- ALOGE("%s parameter error with cmdCode %d", __func__, cmdCode);
- return BAD_VALUE;
- }
- return OK;
-}
-
status_t EffectHalAidl::command(uint32_t cmdCode, uint32_t cmdSize, void* pCmdData,
uint32_t* replySize, void* pReplyData) {
- ALOGW("%s code %d not implemented yet", __func__, cmdCode);
- ::ndk::ScopedAStatus status;
- switch (cmdCode) {
- case EFFECT_CMD_INIT: {
- // open with default effect_config_t (convert to Parameter.Common)
- IEffect::OpenEffectReturn ret;
- Parameter::Common common;
- RETURN_IF_BINDER_FAIL(mEffect->open(common, std::nullopt, &ret));
- return OK;
- }
- case EFFECT_CMD_SET_CONFIG:
- return handleSetConfig(cmdCode, cmdSize, pCmdData, replySize, pReplyData);
- case EFFECT_CMD_GET_CONFIG:
- return handleGetConfig(cmdCode, cmdSize, pCmdData, replySize, pReplyData);
- case EFFECT_CMD_RESET:
- return mEffect->command(CommandId::RESET).getStatus();
- case EFFECT_CMD_ENABLE:
- return mEffect->command(CommandId::START).getStatus();
- case EFFECT_CMD_DISABLE:
- return mEffect->command(CommandId::STOP).getStatus();
- case EFFECT_CMD_SET_PARAM:
- return handleSetParameter(cmdCode, cmdSize, pCmdData, replySize, pReplyData);
- case EFFECT_CMD_SET_PARAM_DEFERRED:
- case EFFECT_CMD_SET_PARAM_COMMIT:
- // TODO
- return OK;
- case EFFECT_CMD_GET_PARAM:
- return handleGetParameter(cmdCode, cmdSize, pCmdData, replySize, pReplyData);
- case EFFECT_CMD_SET_DEVICE:
- return OK;
- case EFFECT_CMD_SET_VOLUME:
- return OK;
- case EFFECT_CMD_SET_AUDIO_MODE:
- return OK;
- case EFFECT_CMD_SET_CONFIG_REVERSE:
- return OK;
- case EFFECT_CMD_SET_INPUT_DEVICE:
- return OK;
- case EFFECT_CMD_GET_CONFIG_REVERSE:
- return OK;
- case EFFECT_CMD_GET_FEATURE_SUPPORTED_CONFIGS:
- return OK;
- case EFFECT_CMD_GET_FEATURE_CONFIG:
- return OK;
- case EFFECT_CMD_SET_FEATURE_CONFIG:
- return OK;
- case EFFECT_CMD_SET_AUDIO_SOURCE:
- return OK;
- case EFFECT_CMD_OFFLOAD:
- return OK;
- case EFFECT_CMD_DUMP:
- return OK;
- case EFFECT_CMD_FIRST_PROPRIETARY:
- return OK;
- default:
- return INVALID_OPERATION;
- }
- return INVALID_OPERATION;
+ return handleCommand(cmdCode, cmdSize, pCmdData, replySize, pReplyData);
}
status_t EffectHalAidl::getDescriptor(effect_descriptor_t* pDescriptor) {
@@ -212,7 +106,7 @@
return BAD_VALUE;
}
Descriptor aidlDesc;
- RETURN_IF_BINDER_FAIL(mEffect->getDescriptor(&aidlDesc));
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getDescriptor(&aidlDesc)));
*pDescriptor = VALUE_OR_RETURN_STATUS(
::aidl::android::aidl2legacy_Descriptor_effect_descriptor(aidlDesc));
@@ -220,9 +114,7 @@
}
status_t EffectHalAidl::close() {
- auto ret = mEffect->close();
- ALOGI("%s %s", __func__, ret.getMessage());
- return ret.getStatus();
+ return statusTFromBinderStatus(mEffect->close());
}
status_t EffectHalAidl::dump(int fd) {
diff --git a/media/libaudiohal/impl/EffectHalAidl.h b/media/libaudiohal/impl/EffectHalAidl.h
index 76bb240..6a1ec1c 100644
--- a/media/libaudiohal/impl/EffectHalAidl.h
+++ b/media/libaudiohal/impl/EffectHalAidl.h
@@ -17,13 +17,16 @@
#pragma once
#include <aidl/android/hardware/audio/effect/IEffect.h>
+#include <aidl/android/hardware/audio/effect/IFactory.h>
#include <media/audiohal/EffectHalInterface.h>
#include <system/audio_effect.h>
+#include "EffectConversionHelperAidl.h"
+
namespace android {
namespace effect {
-class EffectHalAidl : public EffectHalInterface {
+class EffectHalAidl : public EffectHalInterface, public EffectConversionHelperAidl {
public:
// Set the input buffer.
status_t setInBuffer(const sp<EffectBufferHalInterface>& buffer) override;
@@ -55,28 +58,30 @@
uint64_t effectId() const override { return mEffectId; }
- private:
- friend class EffectsFactoryHalAidl;
+ const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> getIEffect() const {
+ return mEffect;
+ }
+ private:
+ friend class sp<EffectHalAidl>;
+
+ const std::shared_ptr<::aidl::android::hardware::audio::effect::IFactory> mFactory;
+ const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> mEffect;
const uint64_t mEffectId;
const int32_t mSessionId;
const int32_t mIoId;
+ const ::aidl::android::hardware::audio::effect::Descriptor mDesc;
+
sp<EffectBufferHalInterface> mInBuffer, mOutBuffer;
effect_config_t mConfig;
- std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> mEffect;
// Can not be constructed directly by clients.
- EffectHalAidl(const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& effect,
- uint64_t effectId, int32_t sessionId, int32_t ioId);
-
- status_t handleSetConfig(uint32_t cmdCode, uint32_t cmdSize, void* pCmdData,
- uint32_t* replySize, void* pReplyData);
- status_t handleGetConfig(uint32_t cmdCode, uint32_t cmdSize, void* pCmdData,
- uint32_t* replySize, void* pReplyData);
- status_t handleSetParameter(uint32_t cmdCode, uint32_t cmdSize, void* pCmdData,
- uint32_t* replySize, void* pReplyData);
- status_t handleGetParameter(uint32_t cmdCode, uint32_t cmdSize, void* pCmdData,
- uint32_t* replySize, void* pReplyData);
+ EffectHalAidl(
+ const std::shared_ptr<::aidl::android::hardware::audio::effect::IFactory>& factory,
+ const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& effect,
+ uint64_t effectId, int32_t sessionId, int32_t ioId,
+ const ::aidl::android::hardware::audio::effect::Descriptor& desc);
+ bool setEffectReverse(bool reverse);
// The destructor automatically releases the effect.
virtual ~EffectHalAidl();
diff --git a/media/libaudiohal/impl/EffectHalHidl.cpp b/media/libaudiohal/impl/EffectHalHidl.cpp
index 3956a6c..ed952a3 100644
--- a/media/libaudiohal/impl/EffectHalHidl.cpp
+++ b/media/libaudiohal/impl/EffectHalHidl.cpp
@@ -46,9 +46,6 @@
using namespace ::android::hardware::audio::common::CPP_VERSION;
using namespace ::android::hardware::audio::effect::CPP_VERSION;
-#define TIME_CHECK() auto timeCheck = \
- mediautils::makeTimeCheckStatsForClassMethod(getClassName(), __func__)
-
EffectHalHidl::EffectHalHidl(const sp<IEffect>& effect, uint64_t effectId)
: EffectConversionHelperHidl("EffectHalHidl"),
mEffect(effect), mEffectId(effectId), mBuffersChanged(true), mEfGroup(nullptr) {
diff --git a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
index 0039c86..0aae87b 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
@@ -21,16 +21,19 @@
//#define LOG_NDEBUG 0
#include <aidl/android/hardware/audio/effect/IFactory.h>
+#include <error/expected_utils.h>
#include <android/binder_manager.h>
#include <media/AidlConversionCppNdk.h>
-#include <media/AidlConversionNdk.h>
-#include <media/audiohal/AudioHalUtils.h>
+#include <media/AidlConversionEffect.h>
+#include <system/audio.h>
#include <utils/Log.h>
#include "EffectBufferHalAidl.h"
#include "EffectHalAidl.h"
#include "EffectsFactoryHalAidl.h"
+using ::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid;
+using aidl::android::aidl_utils::statusTFromBinderStatus;
using aidl::android::hardware::audio::effect::IFactory;
using aidl::android::media::audio::common::AudioUuid;
using android::detail::AudioHalVersionInfo;
@@ -55,7 +58,7 @@
{
std::lock_guard lg(mLock);
- RETURN_IF_NOT_OK(queryEffectList_l());
+ RETURN_STATUS_IF_ERROR(queryEffectList_l());
*pNumEffects = mDescList->size();
}
ALOGI("%s %d", __func__, *pNumEffects);
@@ -68,7 +71,7 @@
}
std::lock_guard lg(mLock);
- RETURN_IF_NOT_OK(queryEffectList_l());
+ RETURN_STATUS_IF_ERROR(queryEffectList_l());
auto listSize = mDescList->size();
if (index >= listSize) {
@@ -87,8 +90,7 @@
return BAD_VALUE;
}
- AudioUuid uuid =
- VALUE_OR_RETURN_STATUS(::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid(*halUuid));
+ AudioUuid uuid = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_uuid_t_AudioUuid(*halUuid));
std::lock_guard lg(mLock);
return getHalDescriptorWithImplUuid_l(uuid, pDescriptor);
}
@@ -99,8 +101,7 @@
return BAD_VALUE;
}
- AudioUuid type =
- VALUE_OR_RETURN_STATUS(::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid(*halType));
+ AudioUuid type = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_uuid_t_AudioUuid(*halType));
std::lock_guard lg(mLock);
return getHalDescriptorWithTypeUuid_l(type, descriptors);
}
@@ -111,24 +112,29 @@
if (uuid == nullptr || effect == nullptr) {
return BAD_VALUE;
}
- ALOGI("%s session %d ioId %d", __func__, sessionId, ioId);
-
- AudioUuid aidlUuid =
- VALUE_OR_RETURN_STATUS(::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid(*uuid));
- std::shared_ptr<IEffect> aidlEffect;
- ndk::ScopedAStatus status = mFactory->createEffect(aidlUuid, &aidlEffect);
- if (!status.isOk() || aidlEffect == nullptr) {
- ALOGE("%s IFactory::createFactory failed %s UUID %s", __func__,
- status.getDescription().c_str(), aidlUuid.toString().c_str());
+ if (sessionId == AUDIO_SESSION_DEVICE && ioId == AUDIO_IO_HANDLE_NONE) {
return INVALID_OPERATION;
}
+
+ ALOGI("%s session %d ioId %d", __func__, sessionId, ioId);
+
+ AudioUuid aidlUuid = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_uuid_t_AudioUuid(*uuid));
+ std::shared_ptr<IEffect> aidlEffect;
+ Descriptor desc;
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mFactory->createEffect(aidlUuid, &aidlEffect)));
+ if (aidlEffect == nullptr) {
+ ALOGE("%s IFactory::createFactory failed UUID %s", __func__, aidlUuid.toString().c_str());
+ return NAME_NOT_FOUND;
+ }
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(aidlEffect->getDescriptor(&desc)));
+
uint64_t effectId;
{
std::lock_guard lg(mLock);
effectId = ++mEffectIdCounter;
}
- *effect = new EffectHalAidl(aidlEffect, effectId, sessionId, ioId);
+ *effect = sp<EffectHalAidl>::make(mFactory, aidlEffect, effectId, sessionId, ioId, desc);
return OK;
}
@@ -174,7 +180,7 @@
return BAD_VALUE;
}
if (!mDescList) {
- RETURN_IF_NOT_OK(queryEffectList_l());
+ RETURN_STATUS_IF_ERROR(queryEffectList_l());
}
auto matchIt = std::find_if(mDescList->begin(), mDescList->end(),
@@ -195,7 +201,7 @@
return BAD_VALUE;
}
if (!mDescList) {
- RETURN_IF_NOT_OK(queryEffectList_l());
+ RETURN_STATUS_IF_ERROR(queryEffectList_l());
}
std::vector<Descriptor> result;
std::copy_if(mDescList->begin(), mDescList->end(), std::back_inserter(result),
diff --git a/media/libaudiohal/impl/StreamHalAidl.cpp b/media/libaudiohal/impl/StreamHalAidl.cpp
new file mode 100644
index 0000000..1c6a014
--- /dev/null
+++ b/media/libaudiohal/impl/StreamHalAidl.cpp
@@ -0,0 +1,517 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "StreamHalAidl"
+//#define LOG_NDEBUG 0
+
+#include <aidl/android/hardware/audio/core/BnStreamCallback.h>
+#include <mediautils/TimeCheck.h>
+#include <utils/Log.h>
+
+#include "DeviceHalAidl.h"
+#include "StreamHalAidl.h"
+
+using ::aidl::android::hardware::audio::core::IStreamCommon;
+using ::aidl::android::hardware::audio::core::IStreamIn;
+using ::aidl::android::hardware::audio::core::IStreamOut;
+using ::aidl::android::hardware::audio::core::StreamDescriptor;
+
+namespace android {
+
+// static
+template<class T>
+std::shared_ptr<IStreamCommon> StreamHalAidl::getStreamCommon(const std::shared_ptr<T>& stream) {
+ std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon> streamCommon;
+ if (stream != nullptr) {
+ if (ndk::ScopedAStatus status = stream->getStreamCommon(&streamCommon);
+ !status.isOk()) {
+ ALOGE("%s: failed to retrieve IStreamCommon instance: %s", __func__,
+ status.getDescription().c_str());
+ }
+ }
+ return streamCommon;
+}
+
+StreamHalAidl::StreamHalAidl(
+ std::string_view className, bool isInput, const StreamDescriptor& descriptor,
+ const std::shared_ptr<IStreamCommon>& stream)
+ : ConversionHelperAidl(className),
+ mIsInput(isInput),
+ mFrameSizeBytes(descriptor.frameSizeBytes),
+ mBufferSizeFrames(descriptor.bufferSizeFrames),
+ mCommandMQ(new CommandMQ(descriptor.command)),
+ mReplyMQ(new ReplyMQ(descriptor.reply)),
+ mDataMQ(maybeCreateDataMQ(descriptor)),
+ mStream(stream) {
+ // Instrument audio signal power logging.
+ // Note: This assumes channel mask, format, and sample rate do not change after creation.
+ if (audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
+ /* mStreamPowerLog.isUserDebugOrEngBuild() && */
+ StreamHalAidl::getAudioProperties(&config) == NO_ERROR) {
+ mStreamPowerLog.init(config.sample_rate, config.channel_mask, config.format);
+ }
+}
+
+StreamHalAidl::~StreamHalAidl() {
+ if (mStream != nullptr) {
+ ndk::ScopedAStatus status = mStream->close();
+ ALOGE_IF(!status.isOk(), "%s: status %s", __func__, status.getDescription().c_str());
+ }
+}
+
+status_t StreamHalAidl::getBufferSize(size_t *size) {
+ if (size == nullptr) {
+ return BAD_VALUE;
+ }
+ if (mFrameSizeBytes == 0 || mBufferSizeFrames == 0) {
+ return NO_INIT;
+ }
+ *size = mFrameSizeBytes * mBufferSizeFrames;
+ return OK;
+}
+
+status_t StreamHalAidl::getAudioProperties(audio_config_base_t *configBase) {
+ if (configBase == nullptr) {
+ return BAD_VALUE;
+ }
+ TIME_CHECK();
+ *configBase = AUDIO_CONFIG_BASE_INITIALIZER;
+ configBase->sample_rate = 48000;
+ configBase->format = AUDIO_FORMAT_PCM_24_BIT_PACKED;
+ configBase->channel_mask = mIsInput ? AUDIO_CHANNEL_IN_STEREO : AUDIO_CHANNEL_OUT_STEREO;
+ // if (!mStream) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
+ return OK;
+}
+
+status_t StreamHalAidl::setParameters(const String8& kvPairs __unused) {
+ TIME_CHECK();
+ if (!mStream) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
+ return OK;
+}
+
+status_t StreamHalAidl::getParameters(const String8& keys __unused, String8 *values) {
+ TIME_CHECK();
+ values->clear();
+ if (!mStream) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
+ return OK;
+}
+
+status_t StreamHalAidl::getFrameSize(size_t *size) {
+ if (size == nullptr) {
+ return BAD_VALUE;
+ }
+ if (mFrameSizeBytes == 0) {
+ return NO_INIT;
+ }
+ *size = mFrameSizeBytes;
+ return OK;
+}
+
+status_t StreamHalAidl::addEffect(sp<EffectHalInterface> effect __unused) {
+ TIME_CHECK();
+ if (!mStream) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
+ return OK;
+}
+
+status_t StreamHalAidl::removeEffect(sp<EffectHalInterface> effect __unused) {
+ TIME_CHECK();
+ if (!mStream) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
+ return OK;
+}
+
+status_t StreamHalAidl::standby() {
+ TIME_CHECK();
+ if (!mStream) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
+ return OK;
+}
+
+status_t StreamHalAidl::dump(int fd, const Vector<String16>& args) {
+ TIME_CHECK();
+ if (!mStream) return NO_INIT;
+ return mStream->dump(fd, Args(args).args(), args.size());
+}
+
+status_t StreamHalAidl::start() {
+ TIME_CHECK();
+ if (!mStream) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
+ return OK;
+}
+
+status_t StreamHalAidl::stop() {
+ TIME_CHECK();
+ if (!mStream) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
+ return OK;
+}
+
+status_t StreamHalAidl::createMmapBuffer(int32_t minSizeFrames __unused,
+ struct audio_mmap_buffer_info *info __unused) {
+ TIME_CHECK();
+ if (!mStream) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
+ return OK;
+}
+
+status_t StreamHalAidl::getMmapPosition(struct audio_mmap_position *position __unused) {
+ TIME_CHECK();
+ if (!mStream) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
+ return OK;
+}
+
+status_t StreamHalAidl::setHalThreadPriority(int priority __unused) {
+ mHalThreadPriority = priority;
+ return OK;
+}
+
+status_t StreamHalAidl::getHalPid(pid_t *pid __unused) {
+ TIME_CHECK();
+ if (!mStream) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
+ return OK;
+}
+
+bool StreamHalAidl::requestHalThreadPriority(pid_t threadPid __unused, pid_t threadId __unused) {
+ if (mHalThreadPriority == HAL_THREAD_PRIORITY_DEFAULT) {
+ return true;
+ }
+ if (!mStream) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
+ return OK;
+}
+
+status_t StreamHalAidl::legacyCreateAudioPatch(const struct audio_port_config& port __unused,
+ std::optional<audio_source_t> source __unused,
+ audio_devices_t type __unused) {
+ // Obsolete since 'DeviceHalAidl.supportsAudioPatches' always returns 'true'.
+ return INVALID_OPERATION;
+}
+
+status_t StreamHalAidl::legacyReleaseAudioPatch() {
+ // Obsolete since 'DeviceHalAidl.supportsAudioPatches' always returns 'true'.
+ return INVALID_OPERATION;
+}
+
+namespace {
+
+/* Notes on callback ownership.
+
+This is how Binder ownership model looks like. The server implementation
+is owned by Binder framework (via sp<>). Proxies are owned by clients.
+When the last proxy disappears, Binder framework releases the server impl.
+
+Thus, it is not needed to keep any references to StreamCallback (this is
+the server impl) -- it will live as long as HAL server holds a strong ref to
+IStreamCallback proxy.
+
+The callback only keeps a weak reference to the stream. The stream is owned
+by AudioFlinger.
+
+*/
+
+class StreamCallback : public ::aidl::android::hardware::audio::core::BnStreamCallback {
+ ndk::ScopedAStatus onTransferReady() override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus onError() override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus onDrainReady() override {
+ return ndk::ScopedAStatus::ok();
+ }
+};
+
+} // namespace
+
+StreamOutHalAidl::StreamOutHalAidl(
+ const StreamDescriptor& descriptor, const std::shared_ptr<IStreamOut>& stream)
+ : StreamHalAidl("StreamOutHalAidl", false /*isInput*/, descriptor, getStreamCommon(stream)),
+ mStream(stream) {}
+
+status_t StreamOutHalAidl::getLatency(uint32_t *latency) {
+ TIME_CHECK();
+ *latency = 0;
+ if (!mStream) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
+ return OK;
+}
+
+status_t StreamOutHalAidl::setVolume(float left __unused, float right __unused) {
+ TIME_CHECK();
+ if (!mStream) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
+ return OK;
+}
+
+status_t StreamOutHalAidl::selectPresentation(int presentationId __unused, int programId __unused) {
+ TIME_CHECK();
+ if (!mStream) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
+ return OK;
+}
+
+status_t StreamOutHalAidl::write(
+ const void *buffer __unused, size_t bytes __unused, size_t *written __unused) {
+ // TIME_CHECK(); // TODO(b/238654698) reenable only when optimized.
+ if (!mStream) return NO_INIT;
+ *written = 0;
+ ALOGE("%s not implemented yet", __func__);
+ return OK;
+}
+
+status_t StreamOutHalAidl::getRenderPosition(uint32_t *dspFrames __unused) {
+ // TIME_CHECK(); // TODO(b/238654698) reenable only when optimized.
+ if (!mStream) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
+ return OK;
+}
+
+status_t StreamOutHalAidl::getNextWriteTimestamp(int64_t *timestamp __unused) {
+ TIME_CHECK();
+ if (!mStream) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
+ return OK;
+}
+
+status_t StreamOutHalAidl::setCallback(wp<StreamOutHalInterfaceCallback> callback __unused) {
+ TIME_CHECK();
+ if (!mStream) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
+ return OK;
+}
+
+status_t StreamOutHalAidl::supportsPauseAndResume(
+ bool *supportsPause __unused, bool *supportsResume __unused) {
+ TIME_CHECK();
+ if (!mStream) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
+ return OK;
+}
+
+status_t StreamOutHalAidl::pause() {
+ TIME_CHECK();
+ if (!mStream) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
+ return OK;
+}
+
+status_t StreamOutHalAidl::resume() {
+ TIME_CHECK();
+ if (!mStream) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
+ return OK;
+}
+
+status_t StreamOutHalAidl::supportsDrain(bool *supportsDrain __unused) {
+ TIME_CHECK();
+ if (!mStream) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
+ return OK;
+}
+
+status_t StreamOutHalAidl::drain(bool earlyNotify __unused) {
+ TIME_CHECK();
+ if (!mStream) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
+ return OK;
+}
+
+status_t StreamOutHalAidl::flush() {
+ TIME_CHECK();
+ if (!mStream) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
+ return OK;
+}
+
+status_t StreamOutHalAidl::getPresentationPosition(
+ uint64_t *frames __unused, struct timespec *timestamp __unused) {
+ // TIME_CHECK(); // TODO(b/238654698) reenable only when optimized.
+ if (!mStream) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
+ return OK;
+}
+
+status_t StreamOutHalAidl::updateSourceMetadata(
+ const StreamOutHalInterface::SourceMetadata& sourceMetadata __unused) {
+ TIME_CHECK();
+ if (!mStream) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
+ return OK;
+}
+
+status_t StreamOutHalAidl::getDualMonoMode(audio_dual_mono_mode_t* mode __unused) {
+ return INVALID_OPERATION;
+}
+
+status_t StreamOutHalAidl::setDualMonoMode(audio_dual_mono_mode_t mode __unused) {
+ return INVALID_OPERATION;
+}
+
+status_t StreamOutHalAidl::getAudioDescriptionMixLevel(float* leveldB __unused) {
+ return INVALID_OPERATION;
+}
+
+status_t StreamOutHalAidl::setAudioDescriptionMixLevel(float leveldB __unused) {
+ return INVALID_OPERATION;
+}
+
+status_t StreamOutHalAidl::getPlaybackRateParameters(
+ audio_playback_rate_t* playbackRate __unused) {
+ return INVALID_OPERATION;
+}
+
+status_t StreamOutHalAidl::setPlaybackRateParameters(
+ const audio_playback_rate_t& playbackRate __unused) {
+ return INVALID_OPERATION;
+}
+
+status_t StreamOutHalAidl::setEventCallback(
+ const sp<StreamOutHalInterfaceEventCallback>& callback __unused) {
+ return INVALID_OPERATION;
+}
+
+namespace {
+
+struct StreamOutEventCallback {
+ StreamOutEventCallback(const wp<StreamOutHalAidl>& stream) : mStream(stream) {}
+ private:
+ wp<StreamOutHalAidl> mStream;
+};
+
+} // namespace
+
+status_t StreamOutHalAidl::setLatencyMode(audio_latency_mode_t mode __unused) {
+ return INVALID_OPERATION;
+};
+
+status_t StreamOutHalAidl::getRecommendedLatencyModes(
+ std::vector<audio_latency_mode_t> *modes __unused) {
+ return INVALID_OPERATION;
+};
+
+status_t StreamOutHalAidl::setLatencyModeCallback(
+ const sp<StreamOutHalInterfaceLatencyModeCallback>& callback __unused) {
+ return INVALID_OPERATION;
+};
+
+void StreamOutHalAidl::onWriteReady() {
+ sp<StreamOutHalInterfaceCallback> callback = mCallback.load().promote();
+ if (callback == 0) return;
+ ALOGV("asyncCallback onWriteReady");
+ callback->onWriteReady();
+}
+
+void StreamOutHalAidl::onDrainReady() {
+ sp<StreamOutHalInterfaceCallback> callback = mCallback.load().promote();
+ if (callback == 0) return;
+ ALOGV("asyncCallback onDrainReady");
+ callback->onDrainReady();
+}
+
+void StreamOutHalAidl::onError() {
+ sp<StreamOutHalInterfaceCallback> callback = mCallback.load().promote();
+ if (callback == 0) return;
+ ALOGV("asyncCallback onError");
+ callback->onError();
+}
+
+void StreamOutHalAidl::onCodecFormatChanged(const std::basic_string<uint8_t>& metadataBs __unused) {
+ sp<StreamOutHalInterfaceEventCallback> callback = mEventCallback.load().promote();
+ if (callback == nullptr) return;
+ ALOGV("asyncCodecFormatCallback %s", __func__);
+ callback->onCodecFormatChanged(metadataBs);
+}
+
+void StreamOutHalAidl::onRecommendedLatencyModeChanged(
+ const std::vector<audio_latency_mode_t>& modes __unused) {
+ sp<StreamOutHalInterfaceLatencyModeCallback> callback = mLatencyModeCallback.load().promote();
+ if (callback == nullptr) return;
+ callback->onRecommendedLatencyModeChanged(modes);
+}
+
+status_t StreamOutHalAidl::exit() {
+ // FIXME this is using hard-coded strings but in the future, this functionality will be
+ // converted to use audio HAL extensions required to support tunneling
+ if (!mStream) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
+ return OK;
+}
+
+StreamInHalAidl::StreamInHalAidl(
+ const StreamDescriptor& descriptor, const std::shared_ptr<IStreamIn>& stream)
+ : StreamHalAidl("StreamInHalAidl", true /*isInput*/, descriptor, getStreamCommon(stream)),
+ mStream(stream) {}
+
+status_t StreamInHalAidl::setGain(float gain __unused) {
+ TIME_CHECK();
+ if (!mStream) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
+ return OK;
+}
+
+status_t StreamInHalAidl::read(
+ void *buffer __unused, size_t bytes __unused, size_t *read __unused) {
+ // TIME_CHECK(); // TODO(b/238654698) reenable only when optimized.
+ if (!mStream) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
+ // FIXME: Don't forget to update mPowerLog
+ return OK;
+}
+
+status_t StreamInHalAidl::getInputFramesLost(uint32_t *framesLost __unused) {
+ TIME_CHECK();
+ if (!mStream) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
+ return OK;
+}
+
+status_t StreamInHalAidl::getCapturePosition(int64_t *frames __unused, int64_t *time __unused) {
+ // TIME_CHECK(); // TODO(b/238654698) reenable only when optimized.
+ if (!mStream) return NO_INIT;
+ ALOGE("%s not implemented yet", __func__);
+ return OK;
+}
+
+status_t StreamInHalAidl::getActiveMicrophones(
+ std::vector<media::MicrophoneInfo> *microphones __unused) {
+ if (mStream == 0) return NO_INIT;
+ return INVALID_OPERATION;
+}
+
+status_t StreamInHalAidl::updateSinkMetadata(
+ const StreamInHalInterface::SinkMetadata& sinkMetadata __unused) {
+ return INVALID_OPERATION;
+}
+
+status_t StreamInHalAidl::setPreferredMicrophoneDirection(
+ audio_microphone_direction_t direction __unused) {
+ if (mStream == 0) return NO_INIT;
+ return INVALID_OPERATION;
+}
+
+status_t StreamInHalAidl::setPreferredMicrophoneFieldDimension(float zoom __unused) {
+ if (mStream == 0) return NO_INIT;
+ return INVALID_OPERATION;
+}
+
+} // namespace android
diff --git a/media/libaudiohal/impl/StreamHalAidl.h b/media/libaudiohal/impl/StreamHalAidl.h
new file mode 100644
index 0000000..c56d5e3
--- /dev/null
+++ b/media/libaudiohal/impl/StreamHalAidl.h
@@ -0,0 +1,280 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <memory>
+#include <string_view>
+
+#include <aidl/android/hardware/audio/core/BpStreamCommon.h>
+#include <aidl/android/hardware/audio/core/BpStreamIn.h>
+#include <aidl/android/hardware/audio/core/BpStreamOut.h>
+#include <fmq/AidlMessageQueue.h>
+#include <media/audiohal/EffectHalInterface.h>
+#include <media/audiohal/StreamHalInterface.h>
+#include <mediautils/Synchronization.h>
+
+#include "ConversionHelperAidl.h"
+#include "StreamPowerLog.h"
+
+namespace android {
+
+class DeviceHalAidl;
+
+class StreamHalAidl : public virtual StreamHalInterface, public ConversionHelperAidl {
+ public:
+ // Return size of input/output buffer in bytes for this stream - eg. 4800.
+ status_t getBufferSize(size_t *size) override;
+
+ // Return the base configuration of the stream:
+ // - channel mask;
+ // - format - e.g. AUDIO_FORMAT_PCM_16_BIT;
+ // - sampling rate in Hz - eg. 44100.
+ status_t getAudioProperties(audio_config_base_t *configBase) override;
+
+ // Set audio stream parameters.
+ status_t setParameters(const String8& kvPairs) override;
+
+ // Get audio stream parameters.
+ status_t getParameters(const String8& keys, String8 *values) override;
+
+ // Return the frame size (number of bytes per sample) of a stream.
+ status_t getFrameSize(size_t *size) override;
+
+ // Add or remove the effect on the stream.
+ status_t addEffect(sp<EffectHalInterface> effect) override;
+ status_t removeEffect(sp<EffectHalInterface> effect) override;
+
+ // Put the audio hardware input/output into standby mode.
+ status_t standby() override;
+
+ status_t dump(int fd, const Vector<String16>& args) override;
+
+ // Start a stream operating in mmap mode.
+ status_t start() override;
+
+ // Stop a stream operating in mmap mode.
+ status_t stop() override;
+
+ // Retrieve information on the data buffer in mmap mode.
+ status_t createMmapBuffer(int32_t minSizeFrames,
+ struct audio_mmap_buffer_info *info) override;
+
+ // Get current read/write position in the mmap buffer
+ status_t getMmapPosition(struct audio_mmap_position *position) override;
+
+ // Set the priority of the thread that interacts with the HAL
+ // (must match the priority of the audioflinger's thread that calls 'read' / 'write')
+ status_t setHalThreadPriority(int priority) override;
+
+ status_t legacyCreateAudioPatch(const struct audio_port_config& port,
+ std::optional<audio_source_t> source,
+ audio_devices_t type) override;
+
+ status_t legacyReleaseAudioPatch() override;
+
+ protected:
+ typedef AidlMessageQueue<::aidl::android::hardware::audio::core::StreamDescriptor::Command,
+ ::aidl::android::hardware::common::fmq::SynchronizedReadWrite> CommandMQ;
+ typedef AidlMessageQueue<::aidl::android::hardware::audio::core::StreamDescriptor::Reply,
+ ::aidl::android::hardware::common::fmq::SynchronizedReadWrite> ReplyMQ;
+ typedef AidlMessageQueue<int8_t,
+ ::aidl::android::hardware::common::fmq::SynchronizedReadWrite> DataMQ;
+
+ template<class T>
+ static std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon> getStreamCommon(
+ const std::shared_ptr<T>& stream);
+
+ // Subclasses can not be constructed directly by clients.
+ StreamHalAidl(std::string_view className,
+ bool isInput,
+ const ::aidl::android::hardware::audio::core::StreamDescriptor& descriptor,
+ const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon>& stream);
+
+ ~StreamHalAidl() override;
+
+ status_t getHalPid(pid_t *pid);
+
+ bool requestHalThreadPriority(pid_t threadPid, pid_t threadId);
+
+ const bool mIsInput;
+ const size_t mFrameSizeBytes;
+ const size_t mBufferSizeFrames;
+ const std::unique_ptr<CommandMQ> mCommandMQ;
+ const std::unique_ptr<ReplyMQ> mReplyMQ;
+ const std::unique_ptr<DataMQ> mDataMQ;
+ // mStreamPowerLog is used for audio signal power logging.
+ StreamPowerLog mStreamPowerLog;
+
+ private:
+ static std::unique_ptr<DataMQ> maybeCreateDataMQ(
+ const ::aidl::android::hardware::audio::core::StreamDescriptor& descriptor) {
+ using Tag = ::aidl::android::hardware::audio::core::StreamDescriptor::AudioBuffer::Tag;
+ if (descriptor.audio.getTag() == Tag::fmq) {
+ return std::make_unique<DataMQ>(descriptor.audio.get<Tag::fmq>());
+ }
+ return nullptr;
+ }
+
+ const int HAL_THREAD_PRIORITY_DEFAULT = -1;
+ const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon> mStream;
+ int mHalThreadPriority = HAL_THREAD_PRIORITY_DEFAULT;
+};
+
+class StreamOutHalAidl : public StreamOutHalInterface, public StreamHalAidl {
+ public:
+ // Return the audio hardware driver estimated latency in milliseconds.
+ status_t getLatency(uint32_t *latency) override;
+
+ // Use this method in situations where audio mixing is done in the hardware.
+ status_t setVolume(float left, float right) override;
+
+ // Selects the audio presentation (if available).
+ status_t selectPresentation(int presentationId, int programId) override;
+
+ // Write audio buffer to driver.
+ status_t write(const void *buffer, size_t bytes, size_t *written) override;
+
+ // Return the number of audio frames written by the audio dsp to DAC since
+ // the output has exited standby.
+ status_t getRenderPosition(uint32_t *dspFrames) override;
+
+ // Get the local time at which the next write to the audio driver will be presented.
+ status_t getNextWriteTimestamp(int64_t *timestamp) override;
+
+ // Set the callback for notifying completion of non-blocking write and drain.
+ status_t setCallback(wp<StreamOutHalInterfaceCallback> callback) override;
+
+ // Returns whether pause and resume operations are supported.
+ status_t supportsPauseAndResume(bool *supportsPause, bool *supportsResume) override;
+
+ // Notifies to the audio driver to resume playback following a pause.
+ status_t pause() override;
+
+ // Notifies to the audio driver to resume playback following a pause.
+ status_t resume() override;
+
+ // Returns whether drain operation is supported.
+ status_t supportsDrain(bool *supportsDrain) override;
+
+ // Requests notification when data buffered by the driver/hardware has been played.
+ status_t drain(bool earlyNotify) override;
+
+ // Notifies to the audio driver to flush the queued data.
+ status_t flush() override;
+
+ // Return a recent count of the number of audio frames presented to an external observer.
+ status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp) override;
+
+ // Called when the metadata of the stream's source has been changed.
+ status_t updateSourceMetadata(const SourceMetadata& sourceMetadata) override;
+
+ // Returns the Dual Mono mode presentation setting.
+ status_t getDualMonoMode(audio_dual_mono_mode_t* mode) override;
+
+ // Sets the Dual Mono mode presentation on the output device.
+ status_t setDualMonoMode(audio_dual_mono_mode_t mode) override;
+
+ // Returns the Audio Description Mix level in dB.
+ status_t getAudioDescriptionMixLevel(float* leveldB) override;
+
+ // Sets the Audio Description Mix level in dB.
+ status_t setAudioDescriptionMixLevel(float leveldB) override;
+
+ // Retrieves current playback rate parameters.
+ status_t getPlaybackRateParameters(audio_playback_rate_t* playbackRate) override;
+
+ // Sets the playback rate parameters that control playback behavior.
+ status_t setPlaybackRateParameters(const audio_playback_rate_t& playbackRate) override;
+
+ status_t setEventCallback(const sp<StreamOutHalInterfaceEventCallback>& callback) override;
+
+ status_t setLatencyMode(audio_latency_mode_t mode) override;
+ status_t getRecommendedLatencyModes(std::vector<audio_latency_mode_t> *modes) override;
+ status_t setLatencyModeCallback(
+ const sp<StreamOutHalInterfaceLatencyModeCallback>& callback) override;
+
+ void onRecommendedLatencyModeChanged(const std::vector<audio_latency_mode_t>& modes);
+
+ status_t exit() override;
+
+ void onCodecFormatChanged(const std::basic_string<uint8_t>& metadataBs);
+
+ // Methods used by StreamOutCallback ().
+ // FIXME: Consider the required visibility.
+ void onWriteReady();
+ void onDrainReady();
+ void onError();
+
+ private:
+ friend class sp<StreamOutHalAidl>;
+
+ mediautils::atomic_wp<StreamOutHalInterfaceCallback> mCallback;
+ mediautils::atomic_wp<StreamOutHalInterfaceEventCallback> mEventCallback;
+ mediautils::atomic_wp<StreamOutHalInterfaceLatencyModeCallback> mLatencyModeCallback;
+
+ const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamOut> mStream;
+
+ // Can not be constructed directly by clients.
+ StreamOutHalAidl(
+ const ::aidl::android::hardware::audio::core::StreamDescriptor& descriptor,
+ const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamOut>& stream);
+
+ ~StreamOutHalAidl() override = default;
+};
+
+class StreamInHalAidl : public StreamInHalInterface, public StreamHalAidl {
+ public:
+ // Set the input gain for the audio driver.
+ status_t setGain(float gain) override;
+
+ // Read audio buffer in from driver.
+ status_t read(void *buffer, size_t bytes, size_t *read) override;
+
+ // Return the amount of input frames lost in the audio driver.
+ status_t getInputFramesLost(uint32_t *framesLost) override;
+
+ // Return a recent count of the number of audio frames received and
+ // the clock time associated with that frame count.
+ status_t getCapturePosition(int64_t *frames, int64_t *time) override;
+
+ // Get active microphones
+ status_t getActiveMicrophones(std::vector<media::MicrophoneInfo> *microphones) override;
+
+ // Set microphone direction (for processing)
+ status_t setPreferredMicrophoneDirection(
+ audio_microphone_direction_t direction) override;
+
+ // Set microphone zoom (for processing)
+ status_t setPreferredMicrophoneFieldDimension(float zoom) override;
+
+ // Called when the metadata of the stream's sink has been changed.
+ status_t updateSinkMetadata(const SinkMetadata& sinkMetadata) override;
+
+ private:
+ friend class sp<StreamInHalAidl>;
+
+ const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamIn> mStream;
+
+ // Can not be constructed directly by clients.
+ StreamInHalAidl(
+ const ::aidl::android::hardware::audio::core::StreamDescriptor& descriptor,
+ const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamIn>& stream);
+
+ ~StreamInHalAidl() override = default;
+};
+
+} // namespace android
diff --git a/media/libaudiohal/impl/StreamHalHidl.cpp b/media/libaudiohal/impl/StreamHalHidl.cpp
index 76f9a60..2c289e1 100644
--- a/media/libaudiohal/impl/StreamHalHidl.cpp
+++ b/media/libaudiohal/impl/StreamHalHidl.cpp
@@ -46,9 +46,6 @@
using namespace ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION;
using namespace ::android::hardware::audio::CORE_TYPES_CPP_VERSION;
-#define TIME_CHECK() auto TimeCheck = \
- mediautils::makeTimeCheckStatsForClassMethod(getClassName(), __func__)
-
StreamHalHidl::StreamHalHidl(std::string_view className, IStream *stream)
: CoreConversionHelperHidl(className),
mStream(stream),
diff --git a/media/libaudiohal/include/media/audiohal/AudioEffectUuid.h b/media/libaudiohal/include/media/audiohal/AudioEffectUuid.h
new file mode 100644
index 0000000..20a10f6
--- /dev/null
+++ b/media/libaudiohal/include/media/audiohal/AudioEffectUuid.h
@@ -0,0 +1,112 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/media/audio/common/AudioUuid.h>
+
+namespace android {
+namespace effect {
+
+using ::aidl::android::media::audio::common::AudioUuid;
+
+// 7b491460-8d4d-11e0-bd61-0002a5d5c51b.
+static const AudioUuid kAcousticEchoCancelerTypeUUID = {static_cast<int32_t>(0x7b491460),
+ 0x8d4d,
+ 0x11e0,
+ 0xbd61,
+ {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
+// 0xae3c653b-be18-4ab8-8938-418f0a7f06ac
+static const AudioUuid kAutomaticGainControlTypeUUID = {static_cast<int32_t>(0xae3c653b),
+ 0xbe18,
+ 0x4ab8,
+ 0x8938,
+ {0x41, 0x8f, 0x0a, 0x7f, 0x06, 0xac}};
+// 0634f220-ddd4-11db-a0fc-0002a5d5c51b
+static const AudioUuid kBassBoostTypeUUID = {static_cast<int32_t>(0x0634f220),
+ 0xddd4,
+ 0x11db,
+ 0xa0fc,
+ {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
+// fa81862a-588b-11ed-9b6a-0242ac120002
+static const AudioUuid kDownmixTypeUUID = {static_cast<int32_t>(0xfa81862a),
+ 0x588b,
+ 0x11ed,
+ 0x9b6a,
+ {0x02, 0x42, 0xac, 0x12, 0x00, 0x02}};
+// 0bed4300-ddd6-11db-8f34-0002a5d5c51b.
+static const AudioUuid kEqualizerTypeUUID = {static_cast<int32_t>(0x0bed4300),
+ 0xddd6,
+ 0x11db,
+ 0x8f34,
+ {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
+// 7261676f-6d75-7369-6364-28e2fd3ac39e
+static const AudioUuid kDynamicsProcessingTypeUUID = {static_cast<int32_t>(0x7261676f),
+ 0x6d75,
+ 0x7369,
+ 0x6364,
+ {0x28, 0xe2, 0xfd, 0x3a, 0xc3, 0x9e}};
+// 1411e6d6-aecd-4021-a1cf-a6aceb0d71e5
+static const AudioUuid kHapticGeneratorTypeUUID = {static_cast<int32_t>(0x1411e6d6),
+ 0xaecd,
+ 0x4021,
+ 0xa1cf,
+ {0xa6, 0xac, 0xeb, 0x0d, 0x71, 0xe5}};
+// fe3199be-aed0-413f-87bb-11260eb63cf1
+static const AudioUuid kLoudnessEnhancerTypeUUID = {static_cast<int32_t>(0xfe3199be),
+ 0xaed0,
+ 0x413f,
+ 0x87bb,
+ {0x11, 0x26, 0x0e, 0xb6, 0x3c, 0xf1}};
+// c2e5d5f0-94bd-4763-9cac-4e234d06839e
+static const AudioUuid kEnvReverbTypeUUID = {static_cast<int32_t>(0xc2e5d5f0),
+ 0x94bd,
+ 0x4763,
+ 0x9cac,
+ {0x4e, 0x23, 0x4d, 0x06, 0x83, 0x9e}};
+// 58b4b260-8e06-11e0-aa8e-0002a5d5c51b
+static const AudioUuid kNoiseSuppressionTypeUUID = {static_cast<int32_t>(0x58b4b260),
+ 0x8e06,
+ 0x11e0,
+ 0xaa8e,
+ {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
+// 47382d60-ddd8-11db-bf3a-0002a5d5c51b
+static const AudioUuid kPresetReverbTypeUUID = {static_cast<int32_t>(0x47382d60),
+ 0xddd8,
+ 0x11db,
+ 0xbf3a,
+ {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
+// 37cc2c00-dddd-11db-8577-0002a5d5c51b
+static const AudioUuid kVirtualizerTypeUUID = {static_cast<int32_t>(0x37cc2c00),
+ 0xdddd,
+ 0x11db,
+ 0x8577,
+ {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
+// fa819f3e-588b-11ed-9b6a-0242ac120002
+static const AudioUuid kVisualizerTypeUUID = {static_cast<int32_t>(0xfa819f3e),
+ 0x588b,
+ 0x11ed,
+ 0x9b6a,
+ {0x02, 0x42, 0xac, 0x12, 0x00, 0x02}};
+// fa81a2b8-588b-11ed-9b6a-0242ac120002
+static const AudioUuid kVolumeTypeUUID = {static_cast<int32_t>(0xfa81a2b8),
+ 0x588b,
+ 0x11ed,
+ 0x9b6a,
+ {0x02, 0x42, 0xac, 0x12, 0x00, 0x02}};
+
+} // namespace effect
+} // namespace android
diff --git a/media/libaudiohal/include/media/audiohal/AudioHalUtils.h b/media/libaudiohal/include/media/audiohal/AudioHalUtils.h
deleted file mode 100644
index 4862cba..0000000
--- a/media/libaudiohal/include/media/audiohal/AudioHalUtils.h
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright (C) 2022 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#pragma once
-
-#define RETURN_IF_BINDER_FAIL(expr) \
- do { \
- const ::ndk::ScopedAStatus _temp_status_ = (expr); \
- if (!_temp_status_.isOk()) { \
- ALOGE("%s:%d return with expr %s msg %s", __func__, __LINE__, #expr, \
- _temp_status_.getMessage()); \
- return _temp_status_.getStatus(); \
- } \
- } while (false)
-
-#define RETURN_IF_NOT_OK(statement) \
- do { \
- auto tmp = (statement); \
- if (tmp != OK) { \
- return tmp; \
- } \
- } while (false)
diff --git a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
index 2c8219e..094b415 100644
--- a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
@@ -106,16 +106,10 @@
virtual status_t releaseAudioPatch(audio_patch_handle_t patch) = 0;
// Fills the list of supported attributes for a given audio port.
- virtual status_t getAudioPort(struct audio_port* port) {
- ALOGE("%s override me port %p", __func__, port);
- return OK;
- }
+ virtual status_t getAudioPort(struct audio_port* port) = 0;
// Fills the list of supported attributes for a given audio port.
- virtual status_t getAudioPort(struct audio_port_v7 *port) {
- ALOGE("%s override me port %p", __func__, port);
- return OK;
- }
+ virtual status_t getAudioPort(struct audio_port_v7 *port) = 0;
// Set audio port configuration.
virtual status_t setAudioPortConfig(const struct audio_port_config *config) = 0;
diff --git a/media/libaudiohal/tests/EffectsFactoryHalInterface_test.cpp b/media/libaudiohal/tests/EffectsFactoryHalInterface_test.cpp
index 83c7809..c2e2ba7 100644
--- a/media/libaudiohal/tests/EffectsFactoryHalInterface_test.cpp
+++ b/media/libaudiohal/tests/EffectsFactoryHalInterface_test.cpp
@@ -20,11 +20,17 @@
#include <media/audiohal/EffectsFactoryHalInterface.h>
+#include <system/audio_effects/audio_effects_utils.h>
+#include <system/audio_effects/effect_aec.h>
+#include <system/audio_effect.h>
+
#include <gtest/gtest.h>
#include <utils/RefBase.h>
namespace android {
+using effect::utils::EffectParamWriter;
+
// EffectsFactoryHalInterface
TEST(libAudioHalTest, createEffectsFactoryHalInterface) {
ASSERT_NE(nullptr, EffectsFactoryHalInterface::create());
@@ -78,6 +84,81 @@
EXPECT_NE(0, version.getMajorVersion());
}
+static char testDataBuffer[sizeof(effect_param_t) + 0xff] = {};
+static char testResponseBuffer[sizeof(effect_param_t) + 0xff] = {};
+TEST(libAudioHalTest, agcNotInit) {
+ auto factory = EffectsFactoryHalInterface::create();
+ ASSERT_NE(nullptr, factory);
+
+ std::vector<effect_descriptor_t> descs;
+ EXPECT_EQ(OK, factory->getDescriptors(&FX_IID_AEC_, &descs));
+ for (const auto& desc : descs) {
+ ASSERT_EQ(0, std::memcmp(&desc.type, &FX_IID_AEC_, sizeof(FX_IID_AEC_)));
+ sp<EffectHalInterface> interface;
+ EXPECT_EQ(OK, factory->createEffect(&desc.uuid, 1 /* sessionId */, 1 /* ioId */,
+ 1 /* deviceId */, &interface));
+ EXPECT_NE(nullptr, interface);
+ effect_param_t* param = (effect_param_t*)testDataBuffer;
+ uint32_t type = AEC_PARAM_ECHO_DELAY, value = 0xbead;
+ param->psize = sizeof(type);
+ param->vsize = sizeof(value);
+ //EXPECT_EQ(1, 0) << param->psize << " " << param->vsize;
+ EffectParamWriter writer(*param);
+ EXPECT_EQ(OK, writer.writeToParameter(&type)) << writer.toString();
+ EXPECT_EQ(OK, writer.writeToValue(&value)) << writer.toString();
+ status_t reply = 0;
+ uint32_t replySize = sizeof(reply);
+ EXPECT_NE(OK, interface->command(EFFECT_CMD_SET_PARAM, (uint32_t)writer.getTotalSize(),
+ param, &replySize, &reply));
+ EXPECT_EQ(replySize, sizeof(reply));
+ EXPECT_NE(OK, reply);
+ }
+}
+
+// TODO: rethink about this test case to make it general for all types of effects
+TEST(libAudioHalTest, aecInitSetAndGet) {
+ auto factory = EffectsFactoryHalInterface::create();
+ ASSERT_NE(nullptr, factory);
+
+ std::vector<effect_descriptor_t> descs;
+ EXPECT_EQ(OK, factory->getDescriptors(&FX_IID_AEC_, &descs));
+ static constexpr uint32_t delayValue = 0x20;
+ for (const auto& desc : descs) {
+ ASSERT_EQ(0, std::memcmp(&desc.type, &FX_IID_AEC_, sizeof(FX_IID_AEC_)));
+ sp<EffectHalInterface> interface;
+ EXPECT_EQ(OK, factory->createEffect(&desc.uuid, 1 /* sessionId */, 1 /* ioId */,
+ 1 /* deviceId */, &interface));
+ EXPECT_NE(nullptr, interface);
+ effect_param_t* param = (effect_param_t*)testDataBuffer;
+ uint32_t type = AEC_PARAM_ECHO_DELAY, value = delayValue;
+ param->psize = sizeof(type);
+ param->vsize = sizeof(value);
+ //EXPECT_EQ(1, 0) << param->psize << " " << param->vsize;
+ EffectParamWriter writer(*param);
+ EXPECT_EQ(OK, writer.writeToParameter(&type)) << writer.toString();
+ EXPECT_EQ(OK, writer.writeToValue(&value)) << writer.toString();
+ status_t reply = 0;
+ uint32_t replySize = sizeof(reply);
+ EXPECT_EQ(OK, interface->command(EFFECT_CMD_INIT, 0, nullptr, &replySize, &reply));
+ EXPECT_EQ(OK, interface->command(EFFECT_CMD_SET_PARAM, (uint32_t)writer.getTotalSize(),
+ param, &replySize, &reply));
+ EXPECT_EQ(replySize, sizeof(reply));
+ EXPECT_EQ(OK, reply);
+
+ effect_param_t* responseParam = (effect_param_t*)testResponseBuffer;
+ param->psize = sizeof(type);
+ param->vsize = sizeof(value);
+ EffectParamWriter response(*param);
+ EXPECT_EQ(OK, response.writeToParameter(&type)) << response.toString();
+ replySize = response.getTotalSize();
+ EXPECT_EQ(OK, interface->command(EFFECT_CMD_GET_PARAM, (uint32_t)writer.getTotalSize(),
+ param, &replySize, responseParam));
+ EXPECT_EQ(replySize, response.getTotalSize());
+ EXPECT_EQ(OK, response.readFromValue(&value));
+ EXPECT_EQ(delayValue, value);
+ }
+}
+
// TODO: b/263986405 Add multi-thread testing
} // namespace android
diff --git a/media/libeffects/downmix/aidl/DownmixContext.cpp b/media/libeffects/downmix/aidl/DownmixContext.cpp
index 6869689..43bfeed 100644
--- a/media/libeffects/downmix/aidl/DownmixContext.cpp
+++ b/media/libeffects/downmix/aidl/DownmixContext.cpp
@@ -21,6 +21,7 @@
#include "DownmixContext.h"
using aidl::android::hardware::audio::effect::IEffect;
+using ::aidl::android::media::audio::common::AudioChannelLayout;
using ::android::hardware::audio::common::getChannelCount;
namespace aidl::android::hardware::audio::effect {
diff --git a/media/libeffects/downmix/aidl/DownmixContext.h b/media/libeffects/downmix/aidl/DownmixContext.h
index 8a244ac..9a9f2da 100644
--- a/media/libeffects/downmix/aidl/DownmixContext.h
+++ b/media/libeffects/downmix/aidl/DownmixContext.h
@@ -22,9 +22,6 @@
namespace aidl::android::hardware::audio::effect {
-using media::audio::common::AudioChannelLayout;
-using media::audio::common::AudioDeviceDescription;
-
enum DownmixState {
DOWNMIX_STATE_UNINITIALIZED,
DOWNMIX_STATE_INITIALIZED,
@@ -45,34 +42,25 @@
}
Downmix::Type getDmType() const { return mType; }
- RetCode setVolumeStereo(const Parameter::VolumeStereo& volumeStereo) override {
- // FIXME change volume
- mVolumeStereo = volumeStereo;
- return RetCode::SUCCESS;
- }
- Parameter::VolumeStereo getVolumeStereo() override { return mVolumeStereo; }
-
- RetCode setOutputDevice(const AudioDeviceDescription& device) override {
+ RetCode setOutputDevice(
+ const std::vector<::aidl::android::media::audio::common::AudioDeviceDescription>&
+ device) override {
// FIXME change type if playing on headset vs speaker
mOutputDevice = device;
return RetCode::SUCCESS;
}
- AudioDeviceDescription getOutputDevice() { return mOutputDevice; }
IEffect::Status lvmProcess(float* in, float* out, int samples);
private:
DownmixState mState;
Downmix::Type mType;
- AudioChannelLayout mChMask;
+ ::aidl::android::media::audio::common::AudioChannelLayout mChMask;
::android::audio_utils::channels::ChannelMix mChannelMix;
// Common Params
- AudioDeviceDescription mOutputDevice;
- Parameter::VolumeStereo mVolumeStereo;
-
void init_params(const Parameter::Common& common);
- bool isChannelMaskValid(AudioChannelLayout channelMask);
+ bool isChannelMaskValid(::aidl::android::media::audio::common::AudioChannelLayout channelMask);
};
} // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
index 64f51c3..8ed579b 100644
--- a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
+++ b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
@@ -67,7 +67,7 @@
}
RetCode HapticGeneratorContext::setHgHapticScales(
- const std::vector<HapticGenerator::HapticScale> hapticScales) {
+ const std::vector<HapticGenerator::HapticScale>& hapticScales) {
std::lock_guard lg(mMutex);
for (auto hapticScale : hapticScales) {
mParams.mHapticScales.insert_or_assign(hapticScale.id, hapticScale.scale);
diff --git a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h
index dc43feb..a0a0a4c 100644
--- a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h
+++ b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h
@@ -69,7 +69,7 @@
RetCode disable();
void reset();
- RetCode setHgHapticScales(const std::vector<HapticGenerator::HapticScale> hapticScales);
+ RetCode setHgHapticScales(const std::vector<HapticGenerator::HapticScale>& hapticScales);
std::vector<HapticGenerator::HapticScale> getHgHapticScales();
RetCode setHgVibratorInformation(const HapticGenerator::VibratorInformation& vibratorInfo);
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
index 3aee721..c601c38 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
@@ -287,32 +287,42 @@
}
bool BundleContext::isDeviceSupportedBassBoost(
- const aidl::android::media::audio::common::AudioDeviceDescription& device) {
- return (device == AudioDeviceDescription{AudioDeviceType::OUT_SPEAKER, ""} ||
- device == AudioDeviceDescription{AudioDeviceType::OUT_CARKIT,
- AudioDeviceDescription::CONNECTION_BT_SCO} ||
- device == AudioDeviceDescription{AudioDeviceType::OUT_SPEAKER,
- AudioDeviceDescription::CONNECTION_BT_A2DP});
+ const std::vector<aidl::android::media::audio::common::AudioDeviceDescription>& devices) {
+ for (const auto& device : devices) {
+ if (device != AudioDeviceDescription{AudioDeviceType::OUT_SPEAKER, ""} &&
+ device != AudioDeviceDescription{AudioDeviceType::OUT_CARKIT,
+ AudioDeviceDescription::CONNECTION_BT_SCO} &&
+ device != AudioDeviceDescription{AudioDeviceType::OUT_SPEAKER,
+ AudioDeviceDescription::CONNECTION_BT_A2DP}) {
+ return false;
+ }
+ }
+ return true;
}
bool BundleContext::isDeviceSupportedVirtualizer(
- const aidl::android::media::audio::common::AudioDeviceDescription& device) {
- return (device == AudioDeviceDescription{AudioDeviceType::OUT_HEADSET,
- AudioDeviceDescription::CONNECTION_ANALOG} ||
- device == AudioDeviceDescription{AudioDeviceType::OUT_HEADPHONE,
- AudioDeviceDescription::CONNECTION_ANALOG} ||
- device == AudioDeviceDescription{AudioDeviceType::OUT_HEADPHONE,
- AudioDeviceDescription::CONNECTION_BT_A2DP} ||
- device == AudioDeviceDescription{AudioDeviceType::OUT_HEADSET,
- AudioDeviceDescription::CONNECTION_USB});
+ const std::vector<aidl::android::media::audio::common::AudioDeviceDescription>& devices) {
+ for (const auto& device : devices) {
+ if (device != AudioDeviceDescription{AudioDeviceType::OUT_HEADSET,
+ AudioDeviceDescription::CONNECTION_ANALOG} &&
+ device != AudioDeviceDescription{AudioDeviceType::OUT_HEADPHONE,
+ AudioDeviceDescription::CONNECTION_ANALOG} &&
+ device != AudioDeviceDescription{AudioDeviceType::OUT_HEADPHONE,
+ AudioDeviceDescription::CONNECTION_BT_A2DP} &&
+ device != AudioDeviceDescription{AudioDeviceType::OUT_HEADSET,
+ AudioDeviceDescription::CONNECTION_USB}) {
+ return false;
+ }
+ }
+ return true;
}
RetCode BundleContext::setOutputDevice(
- const aidl::android::media::audio::common::AudioDeviceDescription& device) {
- mOutputDevice = device;
+ const std::vector<aidl::android::media::audio::common::AudioDeviceDescription>& devices) {
+ mOutputDevice = devices;
switch (mType) {
case lvm::BundleEffectType::BASS_BOOST:
- if (isDeviceSupportedBassBoost(device)) {
+ if (!isDeviceSupportedBassBoost(devices)) {
// If a device doesn't support bass boost, the effect must be temporarily disabled.
// The effect must still report its original state as this can only be changed by
// the start/stop commands.
@@ -330,7 +340,7 @@
}
break;
case lvm::BundleEffectType::VIRTUALIZER:
- if (isDeviceSupportedVirtualizer(device)) {
+ if (!isDeviceSupportedVirtualizer(devices)) {
if (mEnabled) {
disableOperatingMode();
}
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleContext.h b/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
index be723f7..1f328fc 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
@@ -57,11 +57,14 @@
return mChMask;
}
bool isDeviceSupportedBassBoost(
- const aidl::android::media::audio::common::AudioDeviceDescription& device);
+ const std::vector<aidl::android::media::audio::common::AudioDeviceDescription>&
+ devices);
bool isDeviceSupportedVirtualizer(
- const aidl::android::media::audio::common::AudioDeviceDescription& device);
+ const std::vector<aidl::android::media::audio::common::AudioDeviceDescription>&
+ devices);
RetCode setOutputDevice(
- const aidl::android::media::audio::common::AudioDeviceDescription& device) override;
+ const std::vector<aidl::android::media::audio::common::AudioDeviceDescription>& devices)
+ override;
RetCode setEqualizerPreset(const std::size_t presetIdx);
int getEqualizerPreset() const { return mCurPresetIdx; }
diff --git a/media/libeffects/lvm/wrapper/Android.bp b/media/libeffects/lvm/wrapper/Android.bp
index aef9295..bc19379 100644
--- a/media/libeffects/lvm/wrapper/Android.bp
+++ b/media/libeffects/lvm/wrapper/Android.bp
@@ -128,4 +128,36 @@
visibility: [
"//hardware/interfaces/audio/aidl/default",
],
-}
\ No newline at end of file
+}
+
+cc_library_shared {
+ name: "libreverbaidl",
+ srcs: [
+ "Reverb/aidl/ReverbContext.cpp",
+ "Reverb/aidl/EffectReverb.cpp",
+ ":effectCommonFile",
+ ],
+ static_libs: ["libreverb"],
+ defaults: [
+ "aidlaudioservice_defaults",
+ "latest_android_hardware_audio_effect_ndk_shared",
+ "latest_android_media_audio_common_types_ndk_shared",
+ ],
+ local_include_dirs: ["Reverb/aidl"],
+ header_libs: [
+ "libaudioeffects",
+ "libhardware_headers",
+ ],
+ shared_libs: [
+ "libbase",
+ "libaudioutils",
+ "libcutils",
+ "liblog",
+ ],
+ cflags: [
+ "-Wthread-safety",
+ ],
+ visibility: [
+ "//hardware/interfaces/audio/aidl/default",
+ ],
+}
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp b/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp
new file mode 100644
index 0000000..51825ca
--- /dev/null
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp
@@ -0,0 +1,373 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "EffectReverb"
+#include <Utils.h>
+#include <algorithm>
+#include <unordered_set>
+
+#include <android-base/logging.h>
+#include <fmq/AidlMessageQueue.h>
+#include <audio_effects/effect_bassboost.h>
+#include <audio_effects/effect_equalizer.h>
+#include <audio_effects/effect_virtualizer.h>
+
+#include "EffectReverb.h"
+#include <limits.h>
+
+using aidl::android::hardware::audio::effect::Descriptor;
+using aidl::android::hardware::audio::effect::EffectReverb;
+using aidl::android::hardware::audio::effect::IEffect;
+using aidl::android::hardware::audio::effect::kAuxEnvReverbImplUUID;
+using aidl::android::hardware::audio::effect::kAuxPresetReverbImplUUID;
+using aidl::android::hardware::audio::effect::kInsertEnvReverbImplUUID;
+using aidl::android::hardware::audio::effect::kInsertPresetReverbImplUUID;
+using aidl::android::hardware::audio::effect::State;
+using aidl::android::media::audio::common::AudioUuid;
+
+bool isReverbUuidSupported(const AudioUuid* uuid) {
+ return (*uuid == kAuxEnvReverbImplUUID || *uuid == kInsertEnvReverbImplUUID ||
+ *uuid == kAuxPresetReverbImplUUID || *uuid == kInsertPresetReverbImplUUID);
+}
+
+extern "C" binder_exception_t createEffect(const AudioUuid* uuid,
+ std::shared_ptr<IEffect>* instanceSpp) {
+ if (uuid == nullptr || !isReverbUuidSupported(uuid)) {
+ LOG(ERROR) << __func__ << "uuid not supported";
+ return EX_ILLEGAL_ARGUMENT;
+ }
+ if (instanceSpp) {
+ *instanceSpp = ndk::SharedRefBase::make<EffectReverb>(*uuid);
+ LOG(DEBUG) << __func__ << " instance " << instanceSpp->get() << " created";
+ return EX_NONE;
+ } else {
+ LOG(ERROR) << __func__ << " invalid input parameter!";
+ return EX_ILLEGAL_ARGUMENT;
+ }
+}
+
+extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) {
+ if (!in_impl_uuid || !isReverbUuidSupported(in_impl_uuid)) {
+ LOG(ERROR) << __func__ << "uuid not supported";
+ return EX_ILLEGAL_ARGUMENT;
+ }
+ if (*in_impl_uuid == kAuxEnvReverbImplUUID) {
+ *_aidl_return = aidl::android::hardware::audio::effect::lvm::kAuxEnvReverbDesc;
+ } else if (*in_impl_uuid == kInsertEnvReverbImplUUID) {
+ *_aidl_return = aidl::android::hardware::audio::effect::lvm::kInsertEnvReverbDesc;
+ } else if (*in_impl_uuid == kAuxPresetReverbImplUUID) {
+ *_aidl_return = aidl::android::hardware::audio::effect::lvm::kAuxPresetReverbDesc;
+ } else if (*in_impl_uuid == kInsertPresetReverbImplUUID) {
+ *_aidl_return = aidl::android::hardware::audio::effect::lvm::kInsertPresetReverbDesc;
+ }
+ return EX_NONE;
+}
+
+namespace aidl::android::hardware::audio::effect {
+
+EffectReverb::EffectReverb(const AudioUuid& uuid) {
+ LOG(DEBUG) << __func__ << uuid.toString();
+ if (uuid == kAuxEnvReverbImplUUID) {
+ mType = lvm::ReverbEffectType::AUX_ENV;
+ mDescriptor = &lvm::kAuxEnvReverbDesc;
+ mEffectName = &lvm::kAuxEnvReverbEffectName;
+ } else if (uuid == kInsertEnvReverbImplUUID) {
+ mType = lvm::ReverbEffectType::INSERT_ENV;
+ mDescriptor = &lvm::kInsertEnvReverbDesc;
+ mEffectName = &lvm::kInsertEnvReverbEffectName;
+ } else if (uuid == kAuxPresetReverbImplUUID) {
+ mType = lvm::ReverbEffectType::AUX_PRESET;
+ mDescriptor = &lvm::kAuxPresetReverbDesc;
+ mEffectName = &lvm::kAuxPresetReverbEffectName;
+ } else if (uuid == kInsertPresetReverbImplUUID) {
+ mType = lvm::ReverbEffectType::INSERT_PRESET;
+ mDescriptor = &lvm::kInsertPresetReverbDesc;
+ mEffectName = &lvm::kInsertPresetReverbEffectName;
+ } else {
+ LOG(ERROR) << __func__ << uuid.toString() << " not supported!";
+ }
+}
+
+EffectReverb::~EffectReverb() {
+ cleanUp();
+ LOG(DEBUG) << __func__;
+}
+
+ndk::ScopedAStatus EffectReverb::getDescriptor(Descriptor* _aidl_return) {
+ RETURN_IF(!_aidl_return, EX_ILLEGAL_ARGUMENT, "Parameter:nullptr");
+ LOG(DEBUG) << _aidl_return->toString();
+ *_aidl_return = *mDescriptor;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus EffectReverb::setParameterSpecific(const Parameter::Specific& specific) {
+ LOG(DEBUG) << __func__ << " specific " << specific.toString();
+ RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+
+ auto tag = specific.getTag();
+ switch (tag) {
+ case Parameter::Specific::presetReverb:
+ return setParameterPresetReverb(specific);
+ case Parameter::Specific::environmentalReverb:
+ return setParameterEnvironmentalReverb(specific);
+ default:
+ LOG(ERROR) << __func__ << " unsupported tag " << toString(tag);
+ return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+ "specificParamNotSupported");
+ }
+}
+
+ndk::ScopedAStatus EffectReverb::setParameterPresetReverb(const Parameter::Specific& specific) {
+ auto& prParam = specific.get<Parameter::Specific::presetReverb>();
+ auto tag = prParam.getTag();
+
+ switch (tag) {
+ case PresetReverb::preset: {
+ RETURN_IF(mContext->setPresetReverbPreset(prParam.get<PresetReverb::preset>()) !=
+ RetCode::SUCCESS,
+ EX_ILLEGAL_ARGUMENT, "setPresetFailed");
+ return ndk::ScopedAStatus::ok();
+ }
+ default: {
+ LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+ return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+ "PresetReverbTagNotSupported");
+ }
+ }
+}
+
+ndk::ScopedAStatus EffectReverb::setParameterEnvironmentalReverb(
+ const Parameter::Specific& specific) {
+ auto& erParam = specific.get<Parameter::Specific::environmentalReverb>();
+ auto tag = erParam.getTag();
+
+ switch (tag) {
+ case EnvironmentalReverb::roomLevelMb: {
+ RETURN_IF(mContext->setEnvironmentalReverbRoomLevel(
+ erParam.get<EnvironmentalReverb::roomLevelMb>()) != RetCode::SUCCESS,
+ EX_ILLEGAL_ARGUMENT, "setRoomLevelFailed");
+ return ndk::ScopedAStatus::ok();
+ }
+ case EnvironmentalReverb::roomHfLevelMb: {
+ RETURN_IF(
+ mContext->setEnvironmentalReverbRoomHfLevel(
+ erParam.get<EnvironmentalReverb::roomHfLevelMb>()) != RetCode::SUCCESS,
+ EX_ILLEGAL_ARGUMENT, "setRoomHfLevelFailed");
+ return ndk::ScopedAStatus::ok();
+ }
+ case EnvironmentalReverb::decayTimeMs: {
+ RETURN_IF(mContext->setEnvironmentalReverbDecayTime(
+ erParam.get<EnvironmentalReverb::decayTimeMs>()) != RetCode::SUCCESS,
+ EX_ILLEGAL_ARGUMENT, "setDecayTimeFailed");
+ return ndk::ScopedAStatus::ok();
+ }
+ case EnvironmentalReverb::decayHfRatioPm: {
+ RETURN_IF(
+ mContext->setEnvironmentalReverbDecayHfRatio(
+ erParam.get<EnvironmentalReverb::decayHfRatioPm>()) != RetCode::SUCCESS,
+ EX_ILLEGAL_ARGUMENT, "setDecayHfRatioFailed");
+ return ndk::ScopedAStatus::ok();
+ }
+ case EnvironmentalReverb::levelMb: {
+ RETURN_IF(mContext->setEnvironmentalReverbLevel(
+ erParam.get<EnvironmentalReverb::levelMb>()) != RetCode::SUCCESS,
+ EX_ILLEGAL_ARGUMENT, "setLevelFailed");
+ return ndk::ScopedAStatus::ok();
+ }
+ case EnvironmentalReverb::delayMs: {
+ RETURN_IF(mContext->setEnvironmentalReverbDelay(
+ erParam.get<EnvironmentalReverb::delayMs>()) != RetCode::SUCCESS,
+ EX_ILLEGAL_ARGUMENT, "setDelayFailed");
+ return ndk::ScopedAStatus::ok();
+ }
+ case EnvironmentalReverb::diffusionPm: {
+ RETURN_IF(mContext->setEnvironmentalReverbDiffusion(
+ erParam.get<EnvironmentalReverb::diffusionPm>()) != RetCode::SUCCESS,
+ EX_ILLEGAL_ARGUMENT, "setDiffusionFailed");
+ return ndk::ScopedAStatus::ok();
+ }
+ case EnvironmentalReverb::densityPm: {
+ RETURN_IF(mContext->setEnvironmentalReverbDensity(
+ erParam.get<EnvironmentalReverb::densityPm>()) != RetCode::SUCCESS,
+ EX_ILLEGAL_ARGUMENT, "setDensityFailed");
+ return ndk::ScopedAStatus::ok();
+ }
+ case EnvironmentalReverb::bypass: {
+ RETURN_IF(mContext->setEnvironmentalReverbBypass(
+ erParam.get<EnvironmentalReverb::bypass>()) != RetCode::SUCCESS,
+ EX_ILLEGAL_ARGUMENT, "setBypassFailed");
+ return ndk::ScopedAStatus::ok();
+ }
+ default: {
+ LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+ return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+ EX_ILLEGAL_ARGUMENT, "EnvironmentalReverbTagNotSupported");
+ }
+ }
+}
+
+ndk::ScopedAStatus EffectReverb::getParameterSpecific(const Parameter::Id& id,
+ Parameter::Specific* specific) {
+ RETURN_IF(!specific, EX_NULL_POINTER, "nullPtr");
+ auto tag = id.getTag();
+
+ switch (tag) {
+ case Parameter::Id::presetReverbTag:
+ return getParameterPresetReverb(id.get<Parameter::Id::presetReverbTag>(), specific);
+ case Parameter::Id::environmentalReverbTag:
+ return getParameterEnvironmentalReverb(id.get<Parameter::Id::environmentalReverbTag>(),
+ specific);
+ default:
+ LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+ return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+ "wrongIdTag");
+ }
+}
+
+ndk::ScopedAStatus EffectReverb::getParameterPresetReverb(const PresetReverb::Id& id,
+ Parameter::Specific* specific) {
+ RETURN_IF(id.getTag() != PresetReverb::Id::commonTag, EX_ILLEGAL_ARGUMENT,
+ "PresetReverbTagNotSupported");
+ RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+ PresetReverb prParam;
+ auto tag = id.get<PresetReverb::Id::commonTag>();
+ switch (tag) {
+ case PresetReverb::preset: {
+ prParam.set<PresetReverb::preset>(mContext->getPresetReverbPreset());
+ break;
+ }
+ default: {
+ LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+ return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+ "PresetReverbTagNotSupported");
+ }
+ }
+
+ specific->set<Parameter::Specific::presetReverb>(prParam);
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus EffectReverb::getParameterEnvironmentalReverb(const EnvironmentalReverb::Id& id,
+ Parameter::Specific* specific) {
+ RETURN_IF(id.getTag() != EnvironmentalReverb::Id::commonTag, EX_ILLEGAL_ARGUMENT,
+ "EnvironmentalReverbTagNotSupported");
+ RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+ EnvironmentalReverb erParam;
+
+ auto tag = id.get<EnvironmentalReverb::Id::commonTag>();
+ switch (tag) {
+ case EnvironmentalReverb::roomLevelMb: {
+ erParam.set<EnvironmentalReverb::roomLevelMb>(
+ mContext->getEnvironmentalReverbRoomLevel());
+ break;
+ }
+ case EnvironmentalReverb::roomHfLevelMb: {
+ erParam.set<EnvironmentalReverb::roomHfLevelMb>(
+ mContext->getEnvironmentalReverbRoomHfLevel());
+ break;
+ }
+ case EnvironmentalReverb::decayTimeMs: {
+ erParam.set<EnvironmentalReverb::decayTimeMs>(
+ mContext->getEnvironmentalReverbDecayTime());
+ break;
+ }
+ case EnvironmentalReverb::decayHfRatioPm: {
+ erParam.set<EnvironmentalReverb::decayHfRatioPm>(
+ mContext->getEnvironmentalReverbDecayHfRatio());
+ break;
+ }
+ case EnvironmentalReverb::levelMb: {
+ erParam.set<EnvironmentalReverb::levelMb>(mContext->getEnvironmentalReverbLevel());
+ break;
+ }
+ case EnvironmentalReverb::delayMs: {
+ erParam.set<EnvironmentalReverb::delayMs>(mContext->getEnvironmentalReverbDelay());
+ break;
+ }
+ case EnvironmentalReverb::diffusionPm: {
+ erParam.set<EnvironmentalReverb::diffusionPm>(
+ mContext->getEnvironmentalReverbDiffusion());
+ break;
+ }
+ case EnvironmentalReverb::densityPm: {
+ erParam.set<EnvironmentalReverb::densityPm>(mContext->getEnvironmentalReverbDensity());
+ break;
+ }
+ case EnvironmentalReverb::bypass: {
+ erParam.set<EnvironmentalReverb::bypass>(mContext->getEnvironmentalReverbBypass());
+ break;
+ }
+ default: {
+ LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
+ return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+ EX_ILLEGAL_ARGUMENT, "EnvironmentalReverbTagNotSupported");
+ }
+ }
+
+ specific->set<Parameter::Specific::environmentalReverb>(erParam);
+ return ndk::ScopedAStatus::ok();
+}
+
+std::shared_ptr<EffectContext> EffectReverb::createContext(const Parameter::Common& common) {
+ if (mContext) {
+ LOG(DEBUG) << __func__ << " context already exist";
+ } else {
+ mContext = std::make_shared<ReverbContext>(1 /* statusFmqDepth */, common, mType);
+ }
+
+ return mContext;
+}
+
+std::shared_ptr<EffectContext> EffectReverb::getContext() {
+ return mContext;
+}
+
+RetCode EffectReverb::releaseContext() {
+ if (mContext) {
+ mContext.reset();
+ }
+ return RetCode::SUCCESS;
+}
+
+ndk::ScopedAStatus EffectReverb::commandImpl(CommandId command) {
+ RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+ switch (command) {
+ case CommandId::START:
+ mContext->enable();
+ break;
+ case CommandId::STOP:
+ mContext->disable();
+ break;
+ case CommandId::RESET:
+ mContext->disable();
+ mContext->resetBuffer();
+ break;
+ default:
+ LOG(ERROR) << __func__ << " commandId " << toString(command) << " not supported";
+ return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+ "commandIdNotSupported");
+ }
+ return ndk::ScopedAStatus::ok();
+}
+
+// Processing method running in EffectWorker thread.
+IEffect::Status EffectReverb::effectProcessImpl(float* in, float* out, int sampleToProcess) {
+ IEffect::Status status = {EX_NULL_POINTER, 0, 0};
+ RETURN_VALUE_IF(!mContext, status, "nullContext");
+ return mContext->lvmProcess(in, out, sampleToProcess);
+}
+
+} // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.h b/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.h
new file mode 100644
index 0000000..d7d2bbd
--- /dev/null
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/audio/effect/BnEffect.h>
+
+#include "effect-impl/EffectImpl.h"
+#include "ReverbContext.h"
+
+namespace aidl::android::hardware::audio::effect {
+
+class EffectReverb final : public EffectImpl {
+ public:
+ explicit EffectReverb(const AudioUuid& uuid);
+ ~EffectReverb() override;
+
+ ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
+
+ ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) override;
+ ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id,
+ Parameter::Specific* specific) override;
+
+ std::shared_ptr<EffectContext> createContext(const Parameter::Common& common) override;
+ std::shared_ptr<EffectContext> getContext() override;
+ RetCode releaseContext() override;
+
+ IEffect::Status effectProcessImpl(float* in, float* out, int samples) override;
+
+ ndk::ScopedAStatus commandImpl(CommandId command) override;
+
+ std::string getEffectName() override { return *mEffectName; }
+
+ private:
+ std::shared_ptr<ReverbContext> mContext;
+ const Descriptor* mDescriptor;
+ const std::string* mEffectName;
+ lvm::ReverbEffectType mType;
+
+ IEffect::Status status(binder_status_t status, size_t consumed, size_t produced);
+
+ ndk::ScopedAStatus setParameterPresetReverb(const Parameter::Specific& specific);
+ ndk::ScopedAStatus getParameterPresetReverb(const PresetReverb::Id& id,
+ Parameter::Specific* specific);
+
+ ndk::ScopedAStatus setParameterEnvironmentalReverb(const Parameter::Specific& specific);
+ ndk::ScopedAStatus getParameterEnvironmentalReverb(const EnvironmentalReverb::Id& id,
+ Parameter::Specific* specific);
+};
+
+} // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
new file mode 100644
index 0000000..d35c22b
--- /dev/null
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
@@ -0,0 +1,560 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cstddef>
+#define LOG_TAG "ReverbContext"
+#include <Utils.h>
+
+#include "ReverbContext.h"
+#include "VectorArithmetic.h"
+#include "math.h"
+
+namespace aidl::android::hardware::audio::effect {
+
+using aidl::android::media::audio::common::AudioDeviceDescription;
+using aidl::android::media::audio::common::AudioDeviceType;
+
+#define GOTO_IF_LVREV_ERROR(status, tag, log) \
+ do { \
+ LVREV_ReturnStatus_en temp = (status); \
+ if (temp != LVREV_SUCCESS) { \
+ LOG(ERROR) << __func__ << " return status: " << temp << " " << (log); \
+ goto tag; \
+ } \
+ } while (0)
+
+RetCode ReverbContext::init() {
+ if (isPreset()) {
+ // force reloading preset at first call to process()
+ mPreset = PresetReverb::Presets::NONE;
+ mNextPreset = PresetReverb::Presets::NONE;
+ }
+
+ mVolume.left = kUnitVolume;
+ mVolume.right = kUnitVolume;
+ mPrevVolume.left = kUnitVolume;
+ mPrevVolume.right = kUnitVolume;
+ volumeMode = VOLUME_FLAT;
+
+ mSamplesToExitCount = kDefaultDecayTime * mCommon.input.base.sampleRate / 1000;
+
+ /* Saved strength is used to return the exact strength that was used in the set to the get
+ * because we map the original strength range of 0:1000 to 1:15, and this will avoid
+ * quantisation like effect when returning
+ */
+ mRoomLevel = lvm::kMinLevel;
+ mRoomHfLevel = 0;
+ mEnabled = LVM_FALSE;
+ mDecayTime = kDefaultDecayTime;
+ mDecayHfRatio = kDefaultDamping * 20;
+ mDensity = kDefaultRoomSize * 10;
+ mDiffusion = kDefaultDensity * 10;
+ mLevel = lvm::kMinLevel;
+
+ // allocate lvm reverb instance
+ LVREV_ReturnStatus_en status = LVREV_SUCCESS;
+ {
+ std::lock_guard lg(mMutex);
+ LVREV_InstanceParams_st params = {
+ .MaxBlockSize = lvm::kMaxCallSize,
+ // Max format, could be mono during process
+ .SourceFormat = LVM_STEREO,
+ .NumDelays = LVREV_DELAYLINES_4,
+ };
+ /* Init sets the instance handle */
+ status = LVREV_GetInstanceHandle(&mInstance, ¶ms);
+ GOTO_IF_LVREV_ERROR(status, deinit, "LVREV_GetInstanceHandleFailed");
+
+ // set control
+ LVREV_ControlParams_st controlParams;
+ initControlParameter(controlParams);
+ status = LVREV_SetControlParameters(mInstance, &controlParams);
+ GOTO_IF_LVREV_ERROR(status, deinit, "LVREV_SetControlParametersFailed");
+ }
+
+ return RetCode::SUCCESS;
+
+deinit:
+ deInit();
+ return RetCode::ERROR_EFFECT_LIB_ERROR;
+}
+
+void ReverbContext::deInit() {
+ std::lock_guard lg(mMutex);
+ if (mInstance) {
+ LVREV_FreeInstance(mInstance);
+ mInstance = nullptr;
+ }
+}
+
+RetCode ReverbContext::enable() {
+ if (mEnabled) return RetCode::ERROR_ILLEGAL_PARAMETER;
+ mEnabled = true;
+ mSamplesToExitCount = (mDecayTime * mCommon.input.base.sampleRate) / 1000;
+ // force no volume ramp for first buffer processed after enabling the effect
+ volumeMode = VOLUME_FLAT;
+ return RetCode::SUCCESS;
+}
+
+RetCode ReverbContext::disable() {
+ if (!mEnabled) return RetCode::ERROR_ILLEGAL_PARAMETER;
+ mEnabled = false;
+ return RetCode::SUCCESS;
+}
+
+bool ReverbContext::isAuxiliary() {
+ return (mType == lvm::ReverbEffectType::AUX_ENV || mType == lvm::ReverbEffectType::AUX_PRESET);
+}
+
+bool ReverbContext::isPreset() {
+ return (mType == lvm::ReverbEffectType::AUX_PRESET ||
+ mType == lvm::ReverbEffectType::INSERT_PRESET);
+}
+
+RetCode ReverbContext::setVolumeStereo(const Parameter::VolumeStereo& volume) {
+ if (volumeMode == VOLUME_OFF) {
+ // force no volume ramp for first buffer processed after getting volume control
+ volumeMode = VOLUME_FLAT;
+ }
+ mVolumeStereo = volume;
+ return RetCode::SUCCESS;
+}
+
+RetCode ReverbContext::setPresetReverbPreset(const PresetReverb::Presets& preset) {
+ mNextPreset = preset;
+ return RetCode::SUCCESS;
+}
+
+RetCode ReverbContext::setEnvironmentalReverbRoomLevel(int roomLevel) {
+ if (roomLevel < lvm::kEnvReverbCap.minRoomLevelMb ||
+ roomLevel > lvm::kEnvReverbCap.maxRoomLevelMb) {
+ LOG(ERROR) << __func__ << " invalid roomLevel: " << roomLevel;
+ return RetCode::ERROR_ILLEGAL_PARAMETER;
+ }
+
+ // Update Control Parameter
+ LVREV_ControlParams_st params;
+ {
+ std::lock_guard lg(mMutex);
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+
+ // Sum of room and reverb level controls
+ // needs to subtract max levels for both room level and reverb level
+ int combinedLevel = (roomLevel + mLevel) - lvm::kMaxReverbLevel;
+ params.Level = convertLevel(combinedLevel);
+
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+ }
+ mRoomLevel = roomLevel;
+ return RetCode::SUCCESS;
+}
+
+RetCode ReverbContext::setEnvironmentalReverbRoomHfLevel(int roomHfLevel) {
+ if (roomHfLevel < lvm::kEnvReverbCap.minRoomHfLevelMb ||
+ roomHfLevel > lvm::kEnvReverbCap.maxRoomHfLevelMb) {
+ LOG(ERROR) << __func__ << " invalid roomHfLevel: " << roomHfLevel;
+ return RetCode::ERROR_ILLEGAL_PARAMETER;
+ }
+
+ // Update Control Parameter
+ LVREV_ControlParams_st params;
+ {
+ std::lock_guard lg(mMutex);
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+
+ params.LPF = convertHfLevel(roomHfLevel);
+
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+ }
+ mRoomHfLevel = roomHfLevel;
+ return RetCode::SUCCESS;
+}
+
+RetCode ReverbContext::setEnvironmentalReverbDecayTime(int decayTime) {
+ if (decayTime < 0 || decayTime > lvm::kEnvReverbCap.maxDecayTimeMs) {
+ LOG(ERROR) << __func__ << " invalid decayTime: " << decayTime;
+ return RetCode::ERROR_ILLEGAL_PARAMETER;
+ }
+ int time = decayTime;
+ if (time > lvm::kMaxT60) {
+ time = lvm::kMaxT60;
+ }
+
+ // Update Control Parameter
+ LVREV_ControlParams_st params;
+ {
+ std::lock_guard lg(mMutex);
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+
+ params.T60 = (LVM_UINT16)time;
+ mSamplesToExitCount = (params.T60 * mCommon.input.base.sampleRate) / 1000;
+
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+ }
+ mDecayTime = time;
+ return RetCode::SUCCESS;
+}
+
+RetCode ReverbContext::setEnvironmentalReverbDecayHfRatio(int decayHfRatio) {
+ if (decayHfRatio < lvm::kEnvReverbCap.minDecayHfRatioPm ||
+ decayHfRatio > lvm::kEnvReverbCap.maxDecayHfRatioPm) {
+ LOG(ERROR) << __func__ << " invalid decayHfRatio: " << decayHfRatio;
+ return RetCode::ERROR_ILLEGAL_PARAMETER;
+ }
+
+ // Update Control Parameter
+ LVREV_ControlParams_st params;
+ {
+ std::lock_guard lg(mMutex);
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+
+ params.Damping = (LVM_INT16)(decayHfRatio / 20);
+
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+ }
+ mDecayHfRatio = decayHfRatio;
+ return RetCode::SUCCESS;
+}
+
+RetCode ReverbContext::setEnvironmentalReverbLevel(int level) {
+ if (level < lvm::kEnvReverbCap.minLevelMb || level > lvm::kEnvReverbCap.maxLevelMb) {
+ LOG(ERROR) << __func__ << " invalid level: " << level;
+ return RetCode::ERROR_ILLEGAL_PARAMETER;
+ }
+
+ // Update Control Parameter
+ LVREV_ControlParams_st params;
+ {
+ std::lock_guard lg(mMutex);
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+
+ // Sum of room and reverb level controls
+ // needs to subtract max levels for both room level and level
+ int combinedLevel = (level + mRoomLevel) - lvm::kMaxReverbLevel;
+ params.Level = convertLevel(combinedLevel);
+
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+ }
+ mLevel = level;
+ return RetCode::SUCCESS;
+}
+
+RetCode ReverbContext::setEnvironmentalReverbDelay(int delay) {
+ if (delay < 0 || delay > lvm::kEnvReverbCap.maxDelayMs) {
+ LOG(ERROR) << __func__ << " invalid delay: " << delay;
+ return RetCode::ERROR_ILLEGAL_PARAMETER;
+ }
+ mDelay = delay;
+ return RetCode::SUCCESS;
+}
+
+RetCode ReverbContext::setEnvironmentalReverbDiffusion(int diffusion) {
+ if (diffusion < 0 || diffusion > lvm::kEnvReverbCap.maxDiffusionPm) {
+ LOG(ERROR) << __func__ << " invalid diffusion: " << diffusion;
+ return RetCode::ERROR_ILLEGAL_PARAMETER;
+ }
+
+ // Update Control Parameter
+ LVREV_ControlParams_st params;
+ {
+ std::lock_guard lg(mMutex);
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+
+ params.Density = (LVM_INT16)(diffusion / 10);
+
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+ }
+ mDiffusion = diffusion;
+ return RetCode::SUCCESS;
+}
+
+RetCode ReverbContext::setEnvironmentalReverbDensity(int density) {
+ if (density < 0 || density > lvm::kEnvReverbCap.maxDensityPm) {
+ LOG(ERROR) << __func__ << " invalid density: " << density;
+ return RetCode::ERROR_ILLEGAL_PARAMETER;
+ }
+
+ // Update Control Parameter
+ LVREV_ControlParams_st params;
+ {
+ std::lock_guard lg(mMutex);
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_GetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " getControlParamFailed");
+
+ params.RoomSize = (LVM_INT16)(((density * 99) / 1000) + 1);
+
+ RETURN_VALUE_IF(LVREV_SUCCESS != LVREV_SetControlParameters(mInstance, ¶ms),
+ RetCode::ERROR_EFFECT_LIB_ERROR, " setControlParamFailed");
+ }
+ mDensity = density;
+ return RetCode::SUCCESS;
+}
+
+RetCode ReverbContext::setEnvironmentalReverbBypass(bool bypass) {
+ mBypass = bypass;
+ return RetCode::SUCCESS;
+}
+
+void ReverbContext::loadPreset() {
+ // TODO: add delay when early reflections are implemented
+ mPreset = mNextPreset;
+
+ if (mPreset != PresetReverb::Presets::NONE) {
+ const t_reverb_settings preset = mReverbPresets[mPreset];
+ setEnvironmentalReverbRoomLevel(preset.roomLevel);
+ setEnvironmentalReverbRoomHfLevel(preset.roomHFLevel);
+ setEnvironmentalReverbDecayTime(preset.decayTime);
+ setEnvironmentalReverbDecayHfRatio(preset.decayHFRatio);
+ setEnvironmentalReverbLevel(preset.reverbLevel);
+ // reverbDelay
+ setEnvironmentalReverbDiffusion(preset.diffusion);
+ setEnvironmentalReverbDensity(preset.density);
+ }
+}
+
+void ReverbContext::initControlParameter(LVREV_ControlParams_st& params) {
+ /* Set the initial process parameters */
+ /* General parameters */
+ params.OperatingMode = LVM_MODE_ON;
+ params.SampleRate = LVM_FS_44100;
+ params.SourceFormat = (::android::hardware::audio::common::getChannelCount(
+ mCommon.input.base.channelMask) == 1
+ ? LVM_MONO
+ : LVM_STEREO);
+
+ if (!isAuxiliary() && params.SourceFormat == LVM_MONO) {
+ params.SourceFormat = LVM_STEREO;
+ }
+
+ /* Reverb parameters */
+ params.Level = kDefaultLevel;
+ params.LPF = kDefaultLPF;
+ params.HPF = kDefaultHPF;
+ params.T60 = kDefaultDecayTime;
+ params.Density = kDefaultDensity;
+ params.Damping = kDefaultDamping;
+ params.RoomSize = kDefaultRoomSize;
+}
+
+/*
+ * Convert level from OpenSL ES format to LVM format
+ *
+ * @param level : level to be applied
+ */
+
+int ReverbContext::convertLevel(int level) {
+ for (int i = 0; i < kLevelMapping.size(); i++) {
+ if (level <= kLevelMapping[i]) {
+ return i;
+ }
+ }
+ return kDefaultLevel;
+}
+
+/*
+ * Convert level HF from OpenSL ES format to LVM format
+ *
+ * @param hfLevel : level to be applied
+ */
+
+int16_t ReverbContext::convertHfLevel(int hfLevel) {
+ for (auto lpfPair : kLPFMapping) {
+ if (hfLevel <= lpfPair.roomHf) {
+ return lpfPair.lpf;
+ }
+ }
+ return kDefaultLPF;
+}
+
+IEffect::Status ReverbContext::lvmProcess(float* in, float* out, int samples) {
+ IEffect::Status status = {EX_NULL_POINTER, 0, 0};
+ RETURN_VALUE_IF(!in, status, "nullInput");
+ RETURN_VALUE_IF(!out, status, "nullOutput");
+ status = {EX_ILLEGAL_STATE, 0, 0};
+ int64_t inputFrameCount = getCommon().input.frameCount;
+ int64_t outputFrameCount = getCommon().output.frameCount;
+ RETURN_VALUE_IF(inputFrameCount != outputFrameCount, status, "FrameCountMismatch");
+ RETURN_VALUE_IF(0 == getInputFrameSize(), status, "zeroFrameSize");
+
+ LOG(DEBUG) << __func__ << " start processing";
+ std::lock_guard lg(mMutex);
+
+ int channels =
+ ::android::hardware::audio::common::getChannelCount(mCommon.input.base.channelMask);
+ int outChannels =
+ ::android::hardware::audio::common::getChannelCount(mCommon.output.base.channelMask);
+ int frameCount = mCommon.input.frameCount;
+
+ // Reverb only effects the stereo channels in multichannel source.
+ if (channels < 1 || channels > LVM_MAX_CHANNELS) {
+ LOG(ERROR) << __func__ << " process invalid PCM channels " << channels;
+ return status;
+ }
+
+ std::vector<float> inFrames(samples);
+ std::vector<float> outFrames(frameCount * FCC_2);
+
+ if (isPreset() && mNextPreset != mPreset) {
+ loadPreset();
+ }
+
+ if (isAuxiliary()) {
+ inFrames.assign(in, in + samples);
+ } else {
+ // mono input is duplicated
+ if (channels >= FCC_2) {
+ for (int i = 0; i < frameCount; i++) {
+ inFrames[FCC_2 * i] = in[channels * i] * kSendLevel;
+ inFrames[FCC_2 * i + 1] = in[channels * i + 1] * kSendLevel;
+ }
+ } else {
+ for (int i = 0; i < frameCount; i++) {
+ inFrames[FCC_2 * i] = inFrames[FCC_2 * i + 1] = in[i] * kSendLevel;
+ }
+ }
+ }
+
+ if (isPreset() && mPreset == PresetReverb::Presets::NONE) {
+ std::fill(outFrames.begin(), outFrames.end(), 0); // always stereo here
+ } else {
+ if (!mEnabled && mSamplesToExitCount > 0) {
+ std::fill(outFrames.begin(), outFrames.end(), 0);
+ LOG(VERBOSE) << "Zeroing " << channels << " samples per frame at the end of call ";
+ }
+
+ /* Process the samples, producing a stereo output */
+ LVREV_ReturnStatus_en lvrevStatus =
+ LVREV_Process(mInstance, /* Instance handle */
+ inFrames.data(), /* Input buffer */
+ outFrames.data(), /* Output buffer */
+ frameCount); /* Number of samples to read */
+ if (lvrevStatus != LVREV_SUCCESS) {
+ LOG(ERROR) << __func__ << lvrevStatus;
+ return {EX_UNSUPPORTED_OPERATION, 0, 0};
+ }
+ }
+ // Convert to 16 bits
+ if (isAuxiliary()) {
+ // nothing to do here
+ } else {
+ if (channels >= FCC_2) {
+ for (int i = 0; i < frameCount; i++) {
+ // Mix with dry input
+ outFrames[FCC_2 * i] += in[channels * i];
+ outFrames[FCC_2 * i + 1] += in[channels * i + 1];
+ }
+ } else {
+ for (int i = 0; i < frameCount; i++) {
+ // Mix with dry input
+ outFrames[FCC_2 * i] += in[i];
+ outFrames[FCC_2 * i + 1] += in[i];
+ }
+ }
+
+ // apply volume with ramp if needed
+ if (mVolume != mPrevVolume && volumeMode == VOLUME_RAMP) {
+ float vl = mPrevVolume.left;
+ float incl = (mVolume.left - vl) / frameCount;
+ float vr = mPrevVolume.right;
+ float incr = (mVolume.right - vr) / frameCount;
+
+ for (int i = 0; i < frameCount; i++) {
+ outFrames[FCC_2 * i] *= vl;
+ outFrames[FCC_2 * i + 1] *= vr;
+
+ vl += incl;
+ vr += incr;
+ }
+ mPrevVolume = mVolume;
+ } else if (volumeMode != VOLUME_OFF) {
+ if (mVolume.left != kUnitVolume || mVolume.right != kUnitVolume) {
+ for (int i = 0; i < frameCount; i++) {
+ outFrames[FCC_2 * i] *= mVolume.left;
+ outFrames[FCC_2 * i + 1] *= mVolume.right;
+ }
+ }
+ mPrevVolume = mVolume;
+ volumeMode = VOLUME_RAMP;
+ }
+ }
+
+ bool accumulate = false;
+ if (outChannels > 2) {
+ // Accumulate if required
+ if (accumulate) {
+ for (int i = 0; i < frameCount; i++) {
+ out[outChannels * i] += outFrames[FCC_2 * i];
+ out[outChannels * i + 1] += outFrames[FCC_2 * i + 1];
+ }
+ } else {
+ for (int i = 0; i < frameCount; i++) {
+ out[outChannels * i] = outFrames[FCC_2 * i];
+ out[outChannels * i + 1] = outFrames[FCC_2 * i + 1];
+ }
+ }
+ if (!isAuxiliary()) {
+ for (int i = 0; i < frameCount; i++) {
+ // channels and outChannels are expected to be same.
+ for (int j = FCC_2; j < outChannels; j++) {
+ out[outChannels * i + j] = in[outChannels * i + j];
+ }
+ }
+ }
+ } else {
+ if (accumulate) {
+ if (outChannels == FCC_1) {
+ for (int i = 0; i < frameCount; i++) {
+ out[i] += ((outFrames[i * FCC_2] + outFrames[i * FCC_2 + 1]) * 0.5f);
+ }
+ } else {
+ for (int i = 0; i < frameCount * FCC_2; i++) {
+ out[i] += outFrames[i];
+ }
+ }
+ } else {
+ if (outChannels == FCC_1) {
+ From2iToMono_Float(outFrames.data(), out, frameCount);
+ } else {
+ for (int i = 0; i < frameCount * FCC_2; i++) {
+ out[i] = outFrames[i];
+ }
+ }
+ }
+ }
+
+ LOG(DEBUG) << __func__ << " done processing";
+
+ if (!mEnabled && mSamplesToExitCount > 0) {
+ // signed - unsigned will trigger integer overflow if result becomes negative.
+ mSamplesToExitCount -= samples;
+ }
+
+ return {STATUS_OK, samples, outChannels * frameCount};
+}
+
+} // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.h b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.h
new file mode 100644
index 0000000..af49a25
--- /dev/null
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.h
@@ -0,0 +1,174 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android-base/logging.h>
+#include <android-base/thread_annotations.h>
+#include <unordered_map>
+
+#include "ReverbTypes.h"
+#include "effect-impl/EffectContext.h"
+
+namespace aidl::android::hardware::audio::effect {
+
+enum VolumeMode {
+ VOLUME_OFF,
+ VOLUME_FLAT,
+ VOLUME_RAMP,
+};
+
+struct LPFPair {
+ int roomHf;
+ int lpf;
+};
+
+class ReverbContext final : public EffectContext {
+ public:
+ ReverbContext(int statusDepth, const Parameter::Common& common,
+ const lvm::ReverbEffectType& type)
+ : EffectContext(statusDepth, common), mType(type) {
+ LOG(DEBUG) << __func__ << type;
+ init();
+ }
+ ~ReverbContext() override {
+ LOG(DEBUG) << __func__;
+ deInit();
+ }
+
+ RetCode init();
+ void deInit();
+
+ RetCode enable();
+ RetCode disable();
+
+ bool isAuxiliary();
+ bool isPreset();
+
+ RetCode setPresetReverbPreset(const PresetReverb::Presets& preset);
+ PresetReverb::Presets getPresetReverbPreset() const { return mNextPreset; }
+
+ RetCode setEnvironmentalReverbRoomLevel(int roomLevel);
+ int getEnvironmentalReverbRoomLevel() const { return mRoomLevel; }
+ RetCode setEnvironmentalReverbRoomHfLevel(int roomHfLevel);
+ int getEnvironmentalReverbRoomHfLevel() const { return mRoomHfLevel; }
+ RetCode setEnvironmentalReverbDecayTime(int decayTime);
+ int getEnvironmentalReverbDecayTime() const { return mDecayTime; }
+ RetCode setEnvironmentalReverbDecayHfRatio(int decayHfRatio);
+ int getEnvironmentalReverbDecayHfRatio() const { return mDecayHfRatio; }
+ RetCode setEnvironmentalReverbLevel(int level);
+ int getEnvironmentalReverbLevel() const { return mLevel; }
+ RetCode setEnvironmentalReverbDelay(int delay);
+ int getEnvironmentalReverbDelay() const { return mDelay; }
+ RetCode setEnvironmentalReverbDiffusion(int diffusion);
+ int getEnvironmentalReverbDiffusion() const { return mDiffusion; }
+ RetCode setEnvironmentalReverbDensity(int density);
+ int getEnvironmentalReverbDensity() const { return mDensity; }
+ RetCode setEnvironmentalReverbBypass(bool bypass);
+ bool getEnvironmentalReverbBypass() const { return mBypass; }
+
+ RetCode setVolumeStereo(const Parameter::VolumeStereo& volumeStereo) override;
+ Parameter::VolumeStereo getVolumeStereo() override { return mVolumeStereo; }
+
+ IEffect::Status lvmProcess(float* in, float* out, int samples);
+
+ private:
+ static constexpr inline float kUnitVolume = 1;
+ static constexpr inline float kSendLevel = 0.75f;
+ static constexpr inline int kDefaultLevel = 0;
+ static constexpr inline int kDefaultLPF = 23999; /* Default low pass filter, in Hz */
+ static constexpr inline int kDefaultHPF = 50; /* Default high pass filter, in Hz */
+ static constexpr inline int kDefaultDecayTime = 1490; /* Default Decay time, in ms */
+ static constexpr inline int kDefaultDensity = 100; /* Default Echo density */
+ static constexpr inline int kDefaultDamping = 21;
+ static constexpr inline int kDefaultRoomSize = 100;
+
+ static inline const std::vector<LPFPair> kLPFMapping = {
+ // Limit range to 50 for LVREV parameter range
+ {-10000, 50}, {-5000, 50}, {-4000, 50}, {-3000, 158}, {-2000, 502}, {-1000, 1666},
+ {-900, 1897}, {-800, 2169}, {-700, 2496}, {-600, 2895}, {-500, 3400}, {-400, 4066},
+ {-300, 5011}, {-200, 6537}, {-100, 9826}, {-99, 9881}, {-98, 9937}, {-97, 9994},
+ {-96, 10052}, {-95, 10111}, {-94, 10171}, {-93, 10231}, {-92, 10293}, {-91, 10356},
+ {-90, 10419}, {-89, 10484}, {-88, 10549}, {-87, 10616}, {-86, 10684}, {-85, 10753},
+ {-84, 10823}, {-83, 10895}, {-82, 10968}, {-81, 11042}, {-80, 11117}, {-79, 11194},
+ {-78, 11272}, {-77, 11352}, {-76, 11433}, {-75, 11516}, {-74, 11600}, {-73, 11686},
+ {-72, 11774}, {-71, 11864}, {-70, 11955}, {-69, 12049}, {-68, 12144}, {-67, 12242},
+ {-66, 12341}, {-65, 12443}, {-64, 12548}, {-63, 12654}, {-62, 12763}, {-61, 12875},
+ {-60, 12990}, {-59, 13107}, {-58, 13227}, {-57, 13351}, {-56, 13477}, {-55, 13607},
+ {-54, 13741}, {-53, 13878}, {-52, 14019}, {-51, 14164}, {-50, 14313}, {-49, 14467},
+ {-48, 14626}, {-47, 14789}, {-46, 14958}, {-45, 15132}, {-44, 15312}, {-43, 15498},
+ {-42, 15691}, {-41, 15890}, {-40, 16097}, {-39, 16311}, {-38, 16534}, {-37, 16766},
+ {-36, 17007}, {-35, 17259}, {-34, 17521}, {-33, 17795}, {-32, 18081}, {-31, 18381},
+ {-30, 18696}, {-29, 19027}, {-28, 19375}, {-27, 19742}, {-26, 20129}, {-25, 20540},
+ {-24, 20976}, {-23, 21439}, {-22, 21934}, {-21, 22463}, {-20, 23031}, {-19, 23643},
+ {-18, 23999}};
+
+ static inline const std::vector<int> kLevelMapping = {
+ -12000, -4000, -3398, -3046, -2796, -2603, -2444, -2310, -2194, -2092, -2000, -1918,
+ -1842, -1773, -1708, -1648, -1592, -1540, -1490, -1443, -1398, -1356, -1316, -1277,
+ -1240, -1205, -1171, -1138, -1106, -1076, -1046, -1018, -990, -963, -938, -912,
+ -888, -864, -841, -818, -796, -775, -754, -734, -714, -694, -675, -656,
+ -638, -620, -603, -585, -568, -552, -536, -520, -504, -489, -474, -459,
+ -444, -430, -416, -402, -388, -375, -361, -348, -335, -323, -310, -298,
+ -286, -274, -262, -250, -239, -228, -216, -205, -194, -184, -173, -162,
+ -152, -142, -132, -121, -112, -102, -92, -82, -73, -64, -54, -45,
+ -36, -27, -18, -9, 0};
+
+ static inline std::unordered_map<PresetReverb::Presets, t_reverb_settings> mReverbPresets = {
+ {PresetReverb::Presets::NONE, {0, 0, 0, 0, 0, 0, 0, 0, 0, 0}},
+ {PresetReverb::Presets::SMALLROOM,
+ {-400, -600, 1100, 830, -400, 5, 500, 10, 1000, 1000}},
+ {PresetReverb::Presets::MEDIUMROOM,
+ {-400, -600, 1300, 830, -1000, 20, -200, 20, 1000, 1000}},
+ {PresetReverb::Presets::LARGEROOM,
+ {-400, -600, 1500, 830, -1600, 5, -1000, 40, 1000, 1000}},
+ {PresetReverb::Presets::MEDIUMHALL,
+ {-400, -600, 1800, 700, -1300, 15, -800, 30, 1000, 1000}},
+ {PresetReverb::Presets::LARGEHALL,
+ {-400, -600, 1800, 700, -2000, 30, -1400, 60, 1000, 1000}},
+ {PresetReverb::Presets::PLATE, {-400, -200, 1300, 900, 0, 2, 0, 10, 1000, 750}}};
+
+ std::mutex mMutex;
+ const lvm::ReverbEffectType mType;
+ bool mEnabled = false;
+ LVREV_Handle_t mInstance GUARDED_BY(mMutex);
+
+ int mRoomLevel;
+ int mRoomHfLevel;
+ int mDecayTime;
+ int mDecayHfRatio;
+ int mLevel;
+ int mDelay;
+ int mDiffusion;
+ int mDensity;
+ bool mBypass;
+
+ PresetReverb::Presets mPreset;
+ PresetReverb::Presets mNextPreset;
+
+ int mSamplesToExitCount;
+
+ Parameter::VolumeStereo mVolume;
+ Parameter::VolumeStereo mPrevVolume;
+ VolumeMode volumeMode;
+
+ void initControlParameter(LVREV_ControlParams_st& params);
+ int16_t convertHfLevel(int hfLevel);
+ int convertLevel(int level);
+ void loadPreset();
+};
+
+} // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbTypes.h b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbTypes.h
new file mode 100644
index 0000000..e37602c
--- /dev/null
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbTypes.h
@@ -0,0 +1,150 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/audio/effect/BnEffect.h>
+#include <android/binder_enums.h>
+#include <audio_effects/effect_environmentalreverb.h>
+#include <audio_effects/effect_presetreverb.h>
+#include "effect-impl/EffectUUID.h"
+// from Reverb/lib
+#include "LVREV.h"
+
+namespace aidl::android::hardware::audio::effect {
+namespace lvm {
+
+constexpr inline int kMaxCallSize = 256;
+constexpr inline int kMinLevel = -6000;
+constexpr inline int kMaxT60 = 7000; /* Maximum decay time */
+constexpr inline int kMaxReverbLevel = 2000;
+constexpr inline int kMaxFrameSize = 2560;
+constexpr inline int kCpuLoadARM9E = 470; // Expressed in 0.1 MIPS
+constexpr inline int kMemUsage = (71 + (kMaxFrameSize >> 7)); // Expressed in kB
+
+static const EnvironmentalReverb::Capability kEnvReverbCap = {.minRoomLevelMb = lvm::kMinLevel,
+ .maxRoomLevelMb = 0,
+ .minRoomHfLevelMb = -4000,
+ .maxRoomHfLevelMb = 0,
+ .maxDecayTimeMs = lvm::kMaxT60,
+ .minDecayHfRatioPm = 100,
+ .maxDecayHfRatioPm = 2000,
+ .minLevelMb = lvm::kMinLevel,
+ .maxLevelMb = 0,
+ .maxDelayMs = 65,
+ .maxDiffusionPm = 1000,
+ .maxDensityPm = 1000};
+
+// NXP SW auxiliary environmental reverb
+static const std::string kAuxEnvReverbEffectName = "Auxiliary Environmental Reverb";
+static const Descriptor kAuxEnvReverbDesc = {
+ .common = {.id = {.type = kEnvReverbTypeUUID,
+ .uuid = kAuxEnvReverbImplUUID,
+ .proxy = std::nullopt},
+ .flags = {.type = Flags::Type::AUXILIARY},
+ .cpuLoad = kCpuLoadARM9E,
+ .memoryUsage = kMemUsage,
+ .name = kAuxEnvReverbEffectName,
+ .implementor = "NXP Software Ltd."},
+ .capability = Capability::make<Capability::environmentalReverb>(kEnvReverbCap)};
+
+// NXP SW insert environmental reverb
+static const std::string kInsertEnvReverbEffectName = "Insert Environmental Reverb";
+static const Descriptor kInsertEnvReverbDesc = {
+ .common = {.id = {.type = kEnvReverbTypeUUID,
+ .uuid = kInsertEnvReverbImplUUID,
+ .proxy = std::nullopt},
+ .flags = {.type = Flags::Type::INSERT,
+ .insert = Flags::Insert::FIRST,
+ .volume = Flags::Volume::CTRL},
+ .cpuLoad = kCpuLoadARM9E,
+ .memoryUsage = kMemUsage,
+ .name = kInsertEnvReverbEffectName,
+ .implementor = "NXP Software Ltd."},
+ .capability = Capability::make<Capability::environmentalReverb>(kEnvReverbCap)};
+
+static const std::vector<PresetReverb::Presets> kSupportedPresets{
+ ndk::enum_range<PresetReverb::Presets>().begin(),
+ ndk::enum_range<PresetReverb::Presets>().end()};
+static const PresetReverb::Capability kPresetReverbCap = {.supportedPresets = kSupportedPresets};
+
+// NXP SW auxiliary preset reverb
+static const std::string kAuxPresetReverbEffectName = "Auxiliary Preset Reverb";
+static const Descriptor kAuxPresetReverbDesc = {
+ .common = {.id = {.type = kPresetReverbTypeUUID,
+ .uuid = kAuxPresetReverbImplUUID,
+ .proxy = std::nullopt},
+ .flags = {.type = Flags::Type::AUXILIARY},
+ .cpuLoad = kCpuLoadARM9E,
+ .memoryUsage = kMemUsage,
+ .name = kAuxPresetReverbEffectName,
+ .implementor = "NXP Software Ltd."},
+ .capability = Capability::make<Capability::presetReverb>(kPresetReverbCap)};
+
+// NXP SW insert preset reverb
+static const std::string kInsertPresetReverbEffectName = "Insert Preset Reverb";
+static const Descriptor kInsertPresetReverbDesc = {
+ .common = {.id = {.type = kPresetReverbTypeUUID,
+ .uuid = kInsertPresetReverbImplUUID,
+ .proxy = std::nullopt},
+ .flags = {.type = Flags::Type::INSERT,
+ .insert = Flags::Insert::FIRST,
+ .volume = Flags::Volume::CTRL},
+ .cpuLoad = kCpuLoadARM9E,
+ .memoryUsage = kMemUsage,
+ .name = kInsertPresetReverbEffectName,
+ .implementor = "NXP Software Ltd."},
+ .capability = Capability::make<Capability::presetReverb>(kPresetReverbCap)};
+
+enum class ReverbEffectType {
+ AUX_ENV,
+ INSERT_ENV,
+ AUX_PRESET,
+ INSERT_PRESET,
+};
+
+inline std::ostream& operator<<(std::ostream& out, const ReverbEffectType& type) {
+ switch (type) {
+ case ReverbEffectType::AUX_ENV:
+ return out << kAuxEnvReverbEffectName;
+ case ReverbEffectType::INSERT_ENV:
+ return out << kInsertEnvReverbEffectName;
+ case ReverbEffectType::AUX_PRESET:
+ return out << kAuxPresetReverbEffectName;
+ case ReverbEffectType::INSERT_PRESET:
+ return out << kInsertPresetReverbEffectName;
+ }
+ return out << "EnumReverbEffectTypeError";
+}
+
+inline std::ostream& operator<<(std::ostream& out, const LVREV_ReturnStatus_en& status) {
+ switch (status) {
+ case LVREV_SUCCESS:
+ return out << "LVREV_SUCCESS";
+ case LVREV_NULLADDRESS:
+ return out << "LVREV_NULLADDRESS";
+ case LVREV_OUTOFRANGE:
+ return out << "LVREV_OUTOFRANGE";
+ case LVREV_INVALIDNUMSAMPLES:
+ return out << "LVREV_INVALIDNUMSAMPLES";
+ case LVREV_RETURNSTATUS_DUMMY:
+ return out << "LVREV_RETURNSTATUS_DUMMY";
+ }
+ return out << "EnumLvrevRetStatusError";
+}
+
+} // namespace lvm
+} // namespace aidl::android::hardware::audio::effect
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index ab1cf69..590a7b7 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -384,7 +384,6 @@
export_shared_lib_headers: [
"libaudioclient",
"libbinder",
- "libandroidicu",
//"libsonivox",
"libmedia_omx",
"framework-permission-aidl-cpp",
diff --git a/media/libmedia/include/media/CharacterEncodingDetector.h b/media/libmedia/include/media/CharacterEncodingDetector.h
index 62564b1..2acc868 100644
--- a/media/libmedia/include/media/CharacterEncodingDetector.h
+++ b/media/libmedia/include/media/CharacterEncodingDetector.h
@@ -21,9 +21,12 @@
#include "StringArray.h"
-#include "unicode/ucnv.h"
-#include "unicode/ucsdet.h"
-#include "unicode/ustring.h"
+/** Declare opaque structures from ICU4C. */
+struct UConverter;
+typedef struct UConverter UConverter;
+
+struct UCharsetMatch;
+typedef struct UCharsetMatch UCharsetMatch;
namespace android {
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 8c469df..32e40c3 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -155,6 +155,7 @@
"libEGL",
"libGLESv1_CM",
"libGLESv2",
+ "libvulkan",
"libgui",
"liblog",
"libprocessgroup",
@@ -300,7 +301,6 @@
"libstagefright_codecbase",
"libstagefright_foundation",
"libstagefright_omx_utils",
- "libRScpp",
"libhidlallocatorutils",
"libhidlbase",
"libhidlmemory",
@@ -317,7 +317,6 @@
"libstagefright_esds",
"libstagefright_color_conversion",
"libyuv_static",
- "libstagefright_mediafilter",
"libstagefright_webm",
"libstagefright_timedtext",
"libogg",
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index e995931..cf4b849 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -75,7 +75,6 @@
#include <media/stagefright/MediaCodecConstants.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MediaFilter.h>
#include <media/stagefright/OMXClient.h>
#include <media/stagefright/PersistentSurface.h>
#include <media/stagefright/SurfaceUtils.h>
@@ -1457,8 +1456,6 @@
} else if (name.startsWithIgnoreCase("omx.")) {
// at this time only ACodec specifies a mime type.
return new ACodec;
- } else if (name.startsWithIgnoreCase("android.filter.")) {
- return new MediaFilter;
} else {
return NULL;
}
diff --git a/media/libstagefright/filters/Android.bp b/media/libstagefright/filters/Android.bp
deleted file mode 100644
index e6d59ad..0000000
--- a/media/libstagefright/filters/Android.bp
+++ /dev/null
@@ -1,52 +0,0 @@
-package {
- // See: http://go/android-license-faq
- // A large-scale-change added 'default_applicable_licenses' to import
- // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
- // to get the below license kinds:
- // SPDX-license-identifier-Apache-2.0
- default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
-}
-
-cc_library_static {
- name: "libstagefright_mediafilter",
-
- srcs: [
- "ColorConvert.cpp",
- "GraphicBufferListener.cpp",
- "IntrinsicBlurFilter.cpp",
- "MediaFilter.cpp",
- "RSFilter.cpp",
- "SaturationFilter.cpp",
- "saturationARGB.rscript",
- "SimpleFilter.cpp",
- "ZeroFilter.cpp",
- ],
-
- export_include_dirs: [
- "include",
- ],
-
- local_include_dirs: [
- "include/filters",
- ],
-
- cflags: [
- "-Wno-multichar",
- "-Werror",
- "-Wall",
- ],
-
- header_libs: [
- "libmediadrm_headers",
- ],
-
- shared_libs: [
- "libgui",
- "libmedia",
- "libhidlmemory",
- ],
-
- sanitize: {
- cfi: true,
- },
-}
diff --git a/media/libstagefright/filters/ColorConvert.cpp b/media/libstagefright/filters/ColorConvert.cpp
deleted file mode 100644
index a8d5dd2..0000000
--- a/media/libstagefright/filters/ColorConvert.cpp
+++ /dev/null
@@ -1,111 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "ColorConvert.h"
-
-#ifndef max
-#define max(a,b) ((a) > (b) ? (a) : (b))
-#endif
-#ifndef min
-#define min(a,b) ((a) < (b) ? (a) : (b))
-#endif
-
-namespace android {
-
-void YUVToRGB(
- int32_t y, int32_t u, int32_t v,
- int32_t* r, int32_t* g, int32_t* b) {
- y -= 16;
- u -= 128;
- v -= 128;
-
- *b = 1192 * y + 2066 * u;
- *g = 1192 * y - 833 * v - 400 * u;
- *r = 1192 * y + 1634 * v;
-
- *r = min(262143, max(0, *r));
- *g = min(262143, max(0, *g));
- *b = min(262143, max(0, *b));
-
- *r >>= 10;
- *g >>= 10;
- *b >>= 10;
-}
-
-void convertYUV420spToARGB(
- uint8_t *pY, uint8_t *pUV, int32_t width, int32_t height,
- uint8_t *dest) {
- const int32_t bytes_per_pixel = 2;
-
- for (int32_t i = 0; i < height; i++) {
- for (int32_t j = 0; j < width; j++) {
- int32_t y = *(pY + i * width + j);
- int32_t u = *(pUV + (i/2) * width + bytes_per_pixel * (j/2));
- int32_t v = *(pUV + (i/2) * width + bytes_per_pixel * (j/2) + 1);
-
- int32_t r, g, b;
- YUVToRGB(y, u, v, &r, &g, &b);
-
- *dest++ = 0xFF;
- *dest++ = r;
- *dest++ = g;
- *dest++ = b;
- }
- }
-}
-
-void convertYUV420spToRGB888(
- uint8_t *pY, uint8_t *pUV, int32_t width, int32_t height,
- uint8_t *dest) {
- const int32_t bytes_per_pixel = 2;
-
- for (int32_t i = 0; i < height; i++) {
- for (int32_t j = 0; j < width; j++) {
- int32_t y = *(pY + i * width + j);
- int32_t u = *(pUV + (i/2) * width + bytes_per_pixel * (j/2));
- int32_t v = *(pUV + (i/2) * width + bytes_per_pixel * (j/2) + 1);
-
- int32_t r, g, b;
- YUVToRGB(y, u, v, &r, &g, &b);
-
- *dest++ = r;
- *dest++ = g;
- *dest++ = b;
- }
- }
-}
-
-// HACK - not even slightly optimized
-// TODO: remove when RGBA support is added to SoftwareRenderer
-void convertRGBAToARGB(
- uint8_t *src, int32_t width, int32_t height, uint32_t stride,
- uint8_t *dest) {
- for (int32_t i = 0; i < height; ++i) {
- for (int32_t j = 0; j < width; ++j) {
- uint8_t r = *src++;
- uint8_t g = *src++;
- uint8_t b = *src++;
- uint8_t a = *src++;
- *dest++ = a;
- *dest++ = r;
- *dest++ = g;
- *dest++ = b;
- }
- src += (stride - width) * 4;
- }
-}
-
-} // namespace android
diff --git a/media/libstagefright/filters/GraphicBufferListener.cpp b/media/libstagefright/filters/GraphicBufferListener.cpp
deleted file mode 100644
index db061c1..0000000
--- a/media/libstagefright/filters/GraphicBufferListener.cpp
+++ /dev/null
@@ -1,155 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "GraphicBufferListener"
-
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/MediaErrors.h>
-
-#include <gui/BufferItem.h>
-#include <utils/String8.h>
-
-#include "GraphicBufferListener.h"
-
-namespace android {
-
-status_t GraphicBufferListener::init(
- const sp<AMessage> ¬ify,
- size_t bufferWidth, size_t bufferHeight, size_t bufferCount) {
- mNotify = notify;
-
- String8 name("GraphicBufferListener");
- BufferQueue::createBufferQueue(&mProducer, &mConsumer);
- mConsumer->setConsumerName(name);
- mConsumer->setDefaultBufferSize(bufferWidth, bufferHeight);
- mConsumer->setConsumerUsageBits(GRALLOC_USAGE_SW_READ_OFTEN);
-
- status_t err = mConsumer->setMaxAcquiredBufferCount(bufferCount);
- if (err != NO_ERROR) {
- ALOGE("Unable to set BQ max acquired buffer count to %zu: %d",
- bufferCount, err);
- return err;
- }
-
- wp<BufferQueue::ConsumerListener> listener =
- static_cast<BufferQueue::ConsumerListener*>(this);
- sp<BufferQueue::ProxyConsumerListener> proxy =
- new BufferQueue::ProxyConsumerListener(listener);
-
- err = mConsumer->consumerConnect(proxy, false);
- if (err != NO_ERROR) {
- ALOGE("Error connecting to BufferQueue: %s (%d)",
- strerror(-err), err);
- return err;
- }
-
- ALOGV("init() successful.");
-
- return OK;
-}
-
-void GraphicBufferListener::onFrameAvailable(const BufferItem& /* item */) {
- ALOGV("onFrameAvailable() called");
-
- {
- Mutex::Autolock autoLock(mMutex);
- mNumFramesAvailable++;
- }
-
- sp<AMessage> notify = mNotify->dup();
- mNotify->setWhat(kWhatFrameAvailable);
- mNotify->post();
-}
-
-void GraphicBufferListener::onBuffersReleased() {
- ALOGV("onBuffersReleased() called");
- // nothing to do
-}
-
-void GraphicBufferListener::onSidebandStreamChanged() {
- ALOGW("GraphicBufferListener cannot consume sideband streams.");
- // nothing to do
-}
-
-BufferItem GraphicBufferListener::getBufferItem() {
- BufferItem item;
-
- {
- Mutex::Autolock autoLock(mMutex);
- if (mNumFramesAvailable <= 0) {
- ALOGE("getBuffer() called with no frames available");
- return item;
- }
- mNumFramesAvailable--;
- }
-
- status_t err = mConsumer->acquireBuffer(&item, 0);
- if (err == BufferQueue::NO_BUFFER_AVAILABLE) {
- // shouldn't happen, since we track num frames available
- ALOGE("frame was not available");
- item.mSlot = -1;
- return item;
- } else if (err != OK) {
- ALOGE("acquireBuffer returned err=%d", err);
- item.mSlot = -1;
- return item;
- }
-
- // Wait for it to become available.
- err = item.mFence->waitForever("GraphicBufferListener::getBufferItem");
- if (err != OK) {
- ALOGW("failed to wait for buffer fence: %d", err);
- // keep going
- }
-
- // If this is the first time we're seeing this buffer, add it to our
- // slot table.
- if (item.mGraphicBuffer != NULL) {
- ALOGV("setting mBufferSlot %d", item.mSlot);
- mBufferSlot[item.mSlot] = item.mGraphicBuffer;
- }
-
- return item;
-}
-
-sp<GraphicBuffer> GraphicBufferListener::getBuffer(BufferItem item) {
- sp<GraphicBuffer> buf;
- if (item.mSlot < 0 || item.mSlot >= BufferQueue::NUM_BUFFER_SLOTS) {
- ALOGE("getBuffer() received invalid BufferItem: mSlot==%d", item.mSlot);
- return buf;
- }
-
- buf = mBufferSlot[item.mSlot];
- CHECK(buf.get() != NULL);
-
- return buf;
-}
-
-status_t GraphicBufferListener::releaseBuffer(BufferItem item) {
- if (item.mSlot < 0 || item.mSlot >= BufferQueue::NUM_BUFFER_SLOTS) {
- ALOGE("getBuffer() received invalid BufferItem: mSlot==%d", item.mSlot);
- return ERROR_OUT_OF_RANGE;
- }
-
- mConsumer->releaseBuffer(item.mSlot, item.mFrameNumber,
- EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE);
-
- return OK;
-}
-
-} // namespace android
diff --git a/media/libstagefright/filters/IntrinsicBlurFilter.cpp b/media/libstagefright/filters/IntrinsicBlurFilter.cpp
deleted file mode 100644
index e00afd9..0000000
--- a/media/libstagefright/filters/IntrinsicBlurFilter.cpp
+++ /dev/null
@@ -1,99 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "IntrinsicBlurFilter"
-
-#include <utils/Log.h>
-
-#include <media/MediaCodecBuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-
-#include "IntrinsicBlurFilter.h"
-
-namespace android {
-
-status_t IntrinsicBlurFilter::configure(const sp<AMessage> &msg) {
- status_t err = SimpleFilter::configure(msg);
- if (err != OK) {
- return err;
- }
-
- if (!msg->findString("cacheDir", &mCacheDir)) {
- ALOGE("Failed to find cache directory in config message.");
- return NAME_NOT_FOUND;
- }
-
- return OK;
-}
-
-status_t IntrinsicBlurFilter::start() {
- // TODO: use a single RS context object for entire application
- mRS = new RSC::RS();
-
- if (!mRS->init(mCacheDir.c_str())) {
- ALOGE("Failed to initialize RenderScript context.");
- return NO_INIT;
- }
-
- // 32-bit elements for ARGB8888
- RSC::sp<const RSC::Element> e = RSC::Element::U8_4(mRS);
-
- RSC::Type::Builder tb(mRS, e);
- tb.setX(mWidth);
- tb.setY(mHeight);
- RSC::sp<const RSC::Type> t = tb.create();
-
- mAllocIn = RSC::Allocation::createTyped(mRS, t);
- mAllocOut = RSC::Allocation::createTyped(mRS, t);
-
- mBlur = RSC::ScriptIntrinsicBlur::create(mRS, e);
- mBlur->setRadius(mBlurRadius);
- mBlur->setInput(mAllocIn);
-
- return OK;
-}
-
-void IntrinsicBlurFilter::reset() {
- mBlur.clear();
- mAllocOut.clear();
- mAllocIn.clear();
- mRS.clear();
-}
-
-status_t IntrinsicBlurFilter::setParameters(const sp<AMessage> &msg) {
- sp<AMessage> params;
- CHECK(msg->findMessage("params", ¶ms));
-
- float blurRadius;
- if (params->findFloat("blur-radius", &blurRadius)) {
- mBlurRadius = blurRadius;
- }
-
- return OK;
-}
-
-status_t IntrinsicBlurFilter::processBuffers(
- const sp<MediaCodecBuffer> &srcBuffer, const sp<MediaCodecBuffer> &outBuffer) {
- mAllocIn->copy1DRangeFrom(0, mWidth * mHeight, srcBuffer->data());
- mBlur->forEach(mAllocOut);
- mAllocOut->copy1DRangeTo(0, mWidth * mHeight, outBuffer->data());
-
- return OK;
-}
-
-} // namespace android
diff --git a/media/libstagefright/filters/MediaFilter.cpp b/media/libstagefright/filters/MediaFilter.cpp
deleted file mode 100644
index c7baa73..0000000
--- a/media/libstagefright/filters/MediaFilter.cpp
+++ /dev/null
@@ -1,840 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "MediaFilter"
-
-#include <inttypes.h>
-#include <utils/Trace.h>
-
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-
-#include <media/stagefright/BufferProducerWrapper.h>
-#include <media/stagefright/MediaCodecConstants.h>
-#include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MediaFilter.h>
-
-#include <media/MediaCodecBuffer.h>
-
-#include <gui/BufferItem.h>
-
-#include "ColorConvert.h"
-#include "GraphicBufferListener.h"
-#include "IntrinsicBlurFilter.h"
-#include "RSFilter.h"
-#include "SaturationFilter.h"
-#include "ZeroFilter.h"
-
-namespace android {
-
-class MediaFilter::BufferChannel : public BufferChannelBase {
-public:
- BufferChannel(const sp<AMessage> &in, const sp<AMessage> &out)
- : mInputBufferFilled(in), mOutputBufferDrained(out) {
- }
-
- ~BufferChannel() override = default;
-
- // BufferChannelBase
-
- status_t queueInputBuffer(const sp<MediaCodecBuffer> &buffer) override {
- sp<AMessage> msg = mInputBufferFilled->dup();
- msg->setObject("buffer", buffer);
- msg->post();
- return OK;
- }
-
- status_t queueSecureInputBuffer(
- const sp<MediaCodecBuffer> &,
- bool,
- const uint8_t *,
- const uint8_t *,
- CryptoPlugin::Mode,
- CryptoPlugin::Pattern,
- const CryptoPlugin::SubSample *,
- size_t,
- AString *) override {
- return INVALID_OPERATION;
- }
-
- status_t renderOutputBuffer(
- const sp<MediaCodecBuffer> &buffer, int64_t /* timestampNs */) override {
- sp<AMessage> msg = mOutputBufferDrained->dup();
- msg->setObject("buffer", buffer);
- msg->post();
- return OK;
- }
-
- status_t discardBuffer(const sp<MediaCodecBuffer> &buffer) override {
- if (FindBufferIndex(&mInputBuffers, buffer) >= 0) {
- sp<AMessage> msg = mInputBufferFilled->dup();
- msg->setObject("buffer", buffer);
- msg->post();
- return OK;
- }
- sp<AMessage> msg = mOutputBufferDrained->dup();
- msg->setObject("buffer", buffer);
- msg->post();
- return OK;
- }
-
- void getInputBufferArray(Vector<sp<MediaCodecBuffer>> *array) {
- if (!array) {
- return;
- }
- array->clear();
- array->appendVector(mInputBuffers);
- }
-
- void getOutputBufferArray(Vector<sp<MediaCodecBuffer>> *array) {
- if (!array) {
- return;
- }
- array->clear();
- array->appendVector(mOutputBuffers);
- }
-
- // For MediaFilter
-
- void fillThisBuffer(const sp<MediaCodecBuffer> &buffer) {
- ssize_t index = FindBufferIndex(&mInputBuffers, buffer);
- mCallback->onInputBufferAvailable(index, buffer);
- }
-
- void drainThisBuffer(const sp<MediaCodecBuffer> &buffer, int flags) {
- ssize_t index = FindBufferIndex(&mOutputBuffers, buffer);
- buffer->meta()->setInt32("flags", flags);
- mCallback->onOutputBufferAvailable(index, buffer);
- }
-
- template <class T>
- void setInputBuffers(T begin, T end) {
- mInputBuffers.clear();
- for (T it = begin; it != end; ++it) {
- mInputBuffers.push_back(it->mData);
- }
- }
-
- template <class T>
- void setOutputBuffers(T begin, T end) {
- mOutputBuffers.clear();
- for (T it = begin; it != end; ++it) {
- mOutputBuffers.push_back(it->mData);
- }
- }
-
-private:
- sp<AMessage> mInputBufferFilled;
- sp<AMessage> mOutputBufferDrained;
- Vector<sp<MediaCodecBuffer>> mInputBuffers;
- Vector<sp<MediaCodecBuffer>> mOutputBuffers;
-
- static ssize_t FindBufferIndex(
- Vector<sp<MediaCodecBuffer>> *array, const sp<MediaCodecBuffer> &buffer) {
- for (size_t i = 0; i < array->size(); ++i) {
- if (array->itemAt(i) == buffer) {
- return i;
- }
- }
- return -1;
- }
-};
-
-// parameter: number of input and output buffers
-static const size_t kBufferCountActual = 4;
-
-MediaFilter::MediaFilter()
- : mState(UNINITIALIZED),
- mGeneration(0),
- mGraphicBufferListener(NULL) {
-}
-
-MediaFilter::~MediaFilter() {
-}
-
-//////////////////// PUBLIC FUNCTIONS //////////////////////////////////////////
-
-std::shared_ptr<BufferChannelBase> MediaFilter::getBufferChannel() {
- if (!mBufferChannel) {
- mBufferChannel = std::make_shared<BufferChannel>(
- new AMessage(kWhatInputBufferFilled, this),
- new AMessage(kWhatOutputBufferDrained, this));
- }
- return mBufferChannel;
-}
-
-void MediaFilter::initiateAllocateComponent(const sp<AMessage> &msg) {
- msg->setWhat(kWhatAllocateComponent);
- msg->setTarget(this);
- msg->post();
-}
-
-void MediaFilter::initiateConfigureComponent(const sp<AMessage> &msg) {
- msg->setWhat(kWhatConfigureComponent);
- msg->setTarget(this);
- msg->post();
-}
-
-void MediaFilter::initiateCreateInputSurface() {
- (new AMessage(kWhatCreateInputSurface, this))->post();
-}
-
-void MediaFilter::initiateSetInputSurface(
- const sp<PersistentSurface> & /* surface */) {
- ALOGW("initiateSetInputSurface() unsupported");
-}
-
-void MediaFilter::initiateStart() {
- (new AMessage(kWhatStart, this))->post();
-}
-
-void MediaFilter::initiateShutdown(bool keepComponentAllocated) {
- sp<AMessage> msg = new AMessage(kWhatShutdown, this);
- msg->setInt32("keepComponentAllocated", keepComponentAllocated);
- msg->post();
-}
-
-void MediaFilter::signalFlush() {
- (new AMessage(kWhatFlush, this))->post();
-}
-
-void MediaFilter::signalResume() {
- (new AMessage(kWhatResume, this))->post();
-}
-
-// nothing to do
-void MediaFilter::signalRequestIDRFrame() {
- return;
-}
-
-void MediaFilter::signalSetParameters(const sp<AMessage> ¶ms) {
- sp<AMessage> msg = new AMessage(kWhatSetParameters, this);
- msg->setMessage("params", params);
- msg->post();
-}
-
-void MediaFilter::signalEndOfInputStream() {
- (new AMessage(kWhatSignalEndOfInputStream, this))->post();
-}
-
-void MediaFilter::onMessageReceived(const sp<AMessage> &msg) {
- switch (msg->what()) {
- case kWhatAllocateComponent:
- {
- onAllocateComponent(msg);
- break;
- }
- case kWhatConfigureComponent:
- {
- onConfigureComponent(msg);
- break;
- }
- case kWhatStart:
- {
- onStart();
- break;
- }
- case kWhatProcessBuffers:
- {
- processBuffers();
- break;
- }
- case kWhatInputBufferFilled:
- {
- onInputBufferFilled(msg);
- break;
- }
- case kWhatOutputBufferDrained:
- {
- onOutputBufferDrained(msg);
- break;
- }
- case kWhatShutdown:
- {
- onShutdown(msg);
- break;
- }
- case kWhatFlush:
- {
- onFlush();
- break;
- }
- case kWhatResume:
- {
- // nothing to do
- break;
- }
- case kWhatSetParameters:
- {
- onSetParameters(msg);
- break;
- }
- case kWhatCreateInputSurface:
- {
- onCreateInputSurface();
- break;
- }
- case GraphicBufferListener::kWhatFrameAvailable:
- {
- onInputFrameAvailable();
- break;
- }
- case kWhatSignalEndOfInputStream:
- {
- onSignalEndOfInputStream();
- break;
- }
- default:
- {
- ALOGE("Message not handled:\n%s", msg->debugString().c_str());
- break;
- }
- }
-}
-
-//////////////////// HELPER FUNCTIONS //////////////////////////////////////////
-
-void MediaFilter::signalProcessBuffers() {
- (new AMessage(kWhatProcessBuffers, this))->post();
-}
-
-void MediaFilter::signalError(status_t error) {
- mCallback->onError(error, ACTION_CODE_FATAL);
-}
-
-status_t MediaFilter::allocateBuffersOnPort(OMX_U32 portIndex) {
- CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
- const bool isInput = portIndex == kPortIndexInput;
- const size_t bufferSize = isInput ? mMaxInputSize : mMaxOutputSize;
-
- CHECK(mBuffers[portIndex].isEmpty());
-
- ALOGV("Allocating %zu buffers of size %zu on %s port",
- kBufferCountActual, bufferSize,
- isInput ? "input" : "output");
-
- // trigger output format change
- sp<AMessage> outputFormat = mOutputFormat->dup();
- for (size_t i = 0; i < kBufferCountActual; ++i) {
- BufferInfo info;
- info.mStatus = BufferInfo::OWNED_BY_US;
- info.mBufferID = i;
- info.mGeneration = mGeneration;
- info.mOutputFlags = 0;
- info.mData = new MediaCodecBuffer(
- isInput ? mInputFormat : outputFormat,
- new ABuffer(bufferSize));
- info.mData->meta()->setInt64("timeUs", 0);
-
- mBuffers[portIndex].push_back(info);
-
- if (!isInput) {
- mAvailableOutputBuffers.push(
- &mBuffers[portIndex].editItemAt(i));
- }
- }
- if (isInput) {
- mBufferChannel->setInputBuffers(
- mBuffers[portIndex].begin(), mBuffers[portIndex].end());
- } else {
- mBufferChannel->setOutputBuffers(
- mBuffers[portIndex].begin(), mBuffers[portIndex].end());
- }
-
- return OK;
-}
-
-MediaFilter::BufferInfo* MediaFilter::findBuffer(
- uint32_t portIndex, const sp<MediaCodecBuffer> &buffer,
- ssize_t *index) {
- for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
- BufferInfo *info = &mBuffers[portIndex].editItemAt(i);
-
- if (info->mData == buffer) {
- if (index != NULL) {
- *index = i;
- }
- return info;
- }
- }
-
- TRESPASS();
-
- return NULL;
-}
-
-void MediaFilter::postFillThisBuffer(BufferInfo *info) {
- ALOGV("postFillThisBuffer on buffer %d", info->mBufferID);
- if (mPortEOS[kPortIndexInput]) {
- return;
- }
-
- CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US);
-
- info->mGeneration = mGeneration;
-
- info->mData->meta()->clear();
-
- sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, this);
- reply->setInt32("buffer-id", info->mBufferID);
-
- info->mStatus = BufferInfo::OWNED_BY_UPSTREAM;
-
- mBufferChannel->fillThisBuffer(info->mData);
-}
-
-void MediaFilter::postDrainThisBuffer(BufferInfo *info) {
- CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US);
-
- info->mGeneration = mGeneration;
-
- sp<AMessage> reply = new AMessage(kWhatOutputBufferDrained, this);
- reply->setInt32("buffer-id", info->mBufferID);
-
- mBufferChannel->drainThisBuffer(info->mData, info->mOutputFlags);
-
- info->mStatus = BufferInfo::OWNED_BY_UPSTREAM;
-}
-
-void MediaFilter::postEOS() {
- mCallback->onEos(ERROR_END_OF_STREAM);
-
- ALOGV("Sent kWhatEOS.");
-}
-
-void MediaFilter::requestFillEmptyInput() {
- if (mPortEOS[kPortIndexInput]) {
- return;
- }
-
- for (size_t i = 0; i < mBuffers[kPortIndexInput].size(); ++i) {
- BufferInfo *info = &mBuffers[kPortIndexInput].editItemAt(i);
-
- if (info->mStatus == BufferInfo::OWNED_BY_US) {
- postFillThisBuffer(info);
- }
- }
-}
-
-void MediaFilter::processBuffers() {
- if (mAvailableInputBuffers.empty() || mAvailableOutputBuffers.empty()) {
- ALOGV("Skipping process (buffers unavailable)");
- return;
- }
-
- if (mPortEOS[kPortIndexOutput]) {
- // TODO notify caller of queueInput error when it is supported
- // in MediaCodec
- ALOGW("Tried to process a buffer after EOS.");
- return;
- }
-
- BufferInfo *inputInfo = mAvailableInputBuffers[0];
- mAvailableInputBuffers.removeAt(0);
- BufferInfo *outputInfo = mAvailableOutputBuffers[0];
- mAvailableOutputBuffers.removeAt(0);
-
- status_t err;
- err = mFilter->processBuffers(inputInfo->mData, outputInfo->mData);
- if (err != (status_t)OK) {
- outputInfo->mData->meta()->setInt32("err", err);
- }
-
- int64_t timeUs;
- CHECK(inputInfo->mData->meta()->findInt64("timeUs", &timeUs));
- outputInfo->mData->meta()->setInt64("timeUs", timeUs);
- outputInfo->mOutputFlags = 0;
- int32_t eos = 0;
- if (inputInfo->mData->meta()->findInt32("eos", &eos) && eos != 0) {
- outputInfo->mOutputFlags |= BUFFER_FLAG_END_OF_STREAM;
- mPortEOS[kPortIndexOutput] = true;
- outputInfo->mData->meta()->setInt32("eos", eos);
- postEOS();
- ALOGV("Output stream saw EOS.");
- }
-
- ALOGV("Processed input buffer %u [%zu], output buffer %u [%zu]",
- inputInfo->mBufferID, inputInfo->mData->size(),
- outputInfo->mBufferID, outputInfo->mData->size());
-
- if (mGraphicBufferListener != NULL) {
- delete inputInfo;
- } else {
- postFillThisBuffer(inputInfo);
- }
- postDrainThisBuffer(outputInfo);
-
- // prevent any corner case where buffers could get stuck in queue
- signalProcessBuffers();
-}
-
-void MediaFilter::onAllocateComponent(const sp<AMessage> &msg) {
- CHECK_EQ(mState, UNINITIALIZED);
-
- CHECK(msg->findString("componentName", &mComponentName));
- const char* name = mComponentName.c_str();
- if (!strcasecmp(name, "android.filter.zerofilter")) {
- mFilter = new ZeroFilter;
- } else if (!strcasecmp(name, "android.filter.saturation")) {
- mFilter = new SaturationFilter;
- } else if (!strcasecmp(name, "android.filter.intrinsicblur")) {
- mFilter = new IntrinsicBlurFilter;
- } else if (!strcasecmp(name, "android.filter.RenderScript")) {
- mFilter = new RSFilter;
- } else {
- ALOGE("Unrecognized filter name: %s", name);
- signalError(NAME_NOT_FOUND);
- return;
- }
-
- mCallback->onComponentAllocated(mComponentName.c_str());
- mState = INITIALIZED;
- ALOGV("Handled kWhatAllocateComponent.");
-}
-
-void MediaFilter::onConfigureComponent(const sp<AMessage> &msg) {
- // TODO: generalize to allow audio filters as well as video
-
- CHECK_EQ(mState, INITIALIZED);
-
- // get params - at least mime, width & height
- AString mime;
- CHECK(msg->findString("mime", &mime));
- if (strcasecmp(mime.c_str(), MEDIA_MIMETYPE_VIDEO_RAW)) {
- ALOGE("Bad mime: %s", mime.c_str());
- signalError(BAD_VALUE);
- return;
- }
-
- CHECK(msg->findInt32("width", &mWidth));
- CHECK(msg->findInt32("height", &mHeight));
- if (!msg->findInt32("stride", &mStride)) {
- mStride = mWidth;
- }
- if (!msg->findInt32("slice-height", &mSliceHeight)) {
- mSliceHeight = mHeight;
- }
-
- mMaxInputSize = mWidth * mHeight * 4; // room for ARGB8888
- int32_t maxInputSize;
- if (msg->findInt32("max-input-size", &maxInputSize)
- && (size_t)maxInputSize > mMaxInputSize) {
- mMaxInputSize = maxInputSize;
- }
-
- if (!msg->findInt32("color-format", &mColorFormatIn)) {
- // default to OMX_COLOR_Format32bitARGB8888
- mColorFormatIn = OMX_COLOR_Format32bitARGB8888;
- msg->setInt32("color-format", mColorFormatIn);
- }
- mColorFormatOut = mColorFormatIn;
-
- mMaxOutputSize = mWidth * mHeight * 4; // room for ARGB8888
-
- AString cacheDir;
- if (!msg->findString("cacheDir", &cacheDir)) {
- ALOGE("Failed to find cache directory in config message.");
- signalError(NAME_NOT_FOUND);
- return;
- }
-
- status_t err;
- err = mFilter->configure(msg);
- if (err != (status_t)OK) {
- ALOGE("Failed to configure filter component, err %d", err);
- signalError(err);
- return;
- }
-
- mInputFormat = new AMessage();
- mInputFormat->setString("mime", mime.c_str());
- mInputFormat->setInt32("stride", mStride);
- mInputFormat->setInt32("slice-height", mSliceHeight);
- mInputFormat->setInt32("color-format", mColorFormatIn);
- mInputFormat->setRect("crop", 0, 0, mStride, mSliceHeight);
- mInputFormat->setInt32("width", mWidth);
- mInputFormat->setInt32("height", mHeight);
-
- mOutputFormat = new AMessage();
- mOutputFormat->setString("mime", mime.c_str());
- mOutputFormat->setInt32("stride", mStride);
- mOutputFormat->setInt32("slice-height", mSliceHeight);
- mOutputFormat->setInt32("color-format", mColorFormatOut);
- mOutputFormat->setRect("crop", 0, 0, mStride, mSliceHeight);
- mOutputFormat->setInt32("width", mWidth);
- mOutputFormat->setInt32("height", mHeight);
- mOutputFormat->setInt32("using-sw-renderer", 1);
-
- mCallback->onComponentConfigured(mInputFormat, mOutputFormat);
- mState = CONFIGURED;
- ALOGV("Handled kWhatConfigureComponent.");
-}
-
-void MediaFilter::onStart() {
- CHECK_EQ(mState, CONFIGURED);
-
- allocateBuffersOnPort(kPortIndexInput);
-
- allocateBuffersOnPort(kPortIndexOutput);
-
- mCallback->onStartCompleted();
-
- status_t err = mFilter->start();
- if (err != (status_t)OK) {
- ALOGE("Failed to start filter component, err %d", err);
- signalError(err);
- return;
- }
-
- mPortEOS[kPortIndexInput] = false;
- mPortEOS[kPortIndexOutput] = false;
- mInputEOSResult = OK;
- mState = STARTED;
-
- requestFillEmptyInput();
- ALOGV("Handled kWhatStart.");
-}
-
-void MediaFilter::onInputBufferFilled(const sp<AMessage> &msg) {
- sp<RefBase> obj;
- CHECK(msg->findObject("buffer", &obj));
- sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
- ssize_t index = -1;
- BufferInfo *info = findBuffer(kPortIndexInput, buffer, &index);
-
- if (mState != STARTED) {
- // we're not running, so we'll just keep that buffer...
- info->mStatus = BufferInfo::OWNED_BY_US;
- return;
- }
-
- if (info->mGeneration != mGeneration) {
- ALOGV("Caught a stale input buffer [index %zd]", index);
- // buffer is stale (taken before a flush/shutdown) - repost it
- CHECK_EQ(info->mStatus, BufferInfo::OWNED_BY_US);
- postFillThisBuffer(info);
- return;
- }
-
- CHECK_EQ(info->mStatus, BufferInfo::OWNED_BY_UPSTREAM);
- info->mStatus = BufferInfo::OWNED_BY_US;
-
- int32_t err = OK;
- bool eos = false;
-
- int32_t isCSD;
- if (buffer != NULL && buffer->meta()->findInt32("csd", &isCSD)
- && isCSD != 0) {
- // ignore codec-specific data buffers
- ALOGW("MediaFilter received a codec-specific data buffer");
- postFillThisBuffer(info);
- return;
- }
-
- int32_t tmp;
- if (buffer != NULL && buffer->meta()->findInt32("eos", &tmp) && tmp) {
- eos = true;
- err = ERROR_END_OF_STREAM;
- }
-
- mAvailableInputBuffers.push_back(info);
- processBuffers();
-
- if (eos) {
- mPortEOS[kPortIndexInput] = true;
- mInputEOSResult = err;
- }
-
- ALOGV("Handled kWhatInputBufferFilled. [index %zd]", index);
-}
-
-void MediaFilter::onOutputBufferDrained(const sp<AMessage> &msg) {
- sp<RefBase> obj;
- CHECK(msg->findObject("buffer", &obj));
- sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
- ssize_t index = -1;
- BufferInfo *info = findBuffer(kPortIndexOutput, buffer, &index);
-
- if (mState != STARTED) {
- // we're not running, so we'll just keep that buffer...
- info->mStatus = BufferInfo::OWNED_BY_US;
- return;
- }
-
- if (info->mGeneration != mGeneration) {
- ALOGV("Caught a stale output buffer [index %zd]", index);
- // buffer is stale (taken before a flush/shutdown) - keep it
- CHECK_EQ(info->mStatus, BufferInfo::OWNED_BY_US);
- return;
- }
-
- CHECK_EQ(info->mStatus, BufferInfo::OWNED_BY_UPSTREAM);
- info->mStatus = BufferInfo::OWNED_BY_US;
-
- mAvailableOutputBuffers.push_back(info);
-
- processBuffers();
-
- ALOGV("Handled kWhatOutputBufferDrained. [index %zd]", index);
-}
-
-void MediaFilter::onShutdown(const sp<AMessage> &msg) {
- mGeneration++;
-
- if (mState != UNINITIALIZED) {
- mFilter->reset();
- }
-
- int32_t keepComponentAllocated;
- CHECK(msg->findInt32("keepComponentAllocated", &keepComponentAllocated));
- if (!keepComponentAllocated || mState == UNINITIALIZED) {
- mState = UNINITIALIZED;
- } else {
- mState = INITIALIZED;
- }
-
- if (keepComponentAllocated) {
- mCallback->onStopCompleted();
- } else {
- mCallback->onReleaseCompleted();
- }
-}
-
-void MediaFilter::onFlush() {
- mGeneration++;
-
- mAvailableInputBuffers.clear();
- for (size_t i = 0; i < mBuffers[kPortIndexInput].size(); ++i) {
- BufferInfo *info = &mBuffers[kPortIndexInput].editItemAt(i);
- info->mStatus = BufferInfo::OWNED_BY_US;
- }
- mAvailableOutputBuffers.clear();
- for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) {
- BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i);
- info->mStatus = BufferInfo::OWNED_BY_US;
- mAvailableOutputBuffers.push_back(info);
- }
-
- mPortEOS[kPortIndexInput] = false;
- mPortEOS[kPortIndexOutput] = false;
- mInputEOSResult = OK;
-
- mCallback->onFlushCompleted();
- ALOGV("Posted kWhatFlushCompleted");
-
- // MediaCodec returns all input buffers after flush, so in
- // onInputBufferFilled we call postFillThisBuffer on them
-}
-
-void MediaFilter::onSetParameters(const sp<AMessage> &msg) {
- CHECK(mState != STARTED);
-
- status_t err = mFilter->setParameters(msg);
- if (err != (status_t)OK) {
- ALOGE("setParameters returned err %d", err);
- }
-}
-
-void MediaFilter::onCreateInputSurface() {
- CHECK(mState == CONFIGURED);
-
- mGraphicBufferListener = new GraphicBufferListener;
-
- sp<AMessage> notify = new AMessage();
- notify->setTarget(this);
- status_t err = mGraphicBufferListener->init(
- notify, mStride, mSliceHeight, kBufferCountActual);
-
- if (err != OK) {
- ALOGE("Failed to init mGraphicBufferListener: %d", err);
- signalError(err);
- return;
- }
-
- mCallback->onInputSurfaceCreated(
- nullptr, nullptr,
- new BufferProducerWrapper(
- mGraphicBufferListener->getIGraphicBufferProducer()));
-}
-
-void MediaFilter::onInputFrameAvailable() {
- BufferItem item = mGraphicBufferListener->getBufferItem();
- sp<GraphicBuffer> buf = mGraphicBufferListener->getBuffer(item);
-
- // get pointer to graphic buffer
- void* bufPtr;
- buf->lock(GraphicBuffer::USAGE_SW_READ_OFTEN, &bufPtr);
-
- // HACK - there is no OMX_COLOR_FORMATTYPE value for RGBA, so the format
- // conversion is hardcoded until we add this.
- // TODO: check input format and convert only if necessary
- // copy RGBA graphic buffer into temporary ARGB input buffer
- BufferInfo *inputInfo = new BufferInfo;
- inputInfo->mData = new MediaCodecBuffer(
- mInputFormat, new ABuffer(buf->getWidth() * buf->getHeight() * 4));
- ALOGV("Copying surface data into temp buffer.");
- convertRGBAToARGB(
- (uint8_t*)bufPtr, buf->getWidth(), buf->getHeight(),
- buf->getStride(), inputInfo->mData->data());
- inputInfo->mBufferID = item.mSlot;
- inputInfo->mGeneration = mGeneration;
- inputInfo->mOutputFlags = 0;
- inputInfo->mStatus = BufferInfo::OWNED_BY_US;
- inputInfo->mData->meta()->setInt64("timeUs", item.mTimestamp / 1000);
-
- mAvailableInputBuffers.push_back(inputInfo);
-
- mGraphicBufferListener->releaseBuffer(item);
-
- signalProcessBuffers();
-}
-
-void MediaFilter::onSignalEndOfInputStream() {
- // if using input surface, need to send an EOS output buffer
- if (mGraphicBufferListener != NULL) {
- Vector<BufferInfo> *outputBufs = &mBuffers[kPortIndexOutput];
- BufferInfo* eosBuf;
- bool foundBuf = false;
- for (size_t i = 0; i < kBufferCountActual; i++) {
- eosBuf = &outputBufs->editItemAt(i);
- if (eosBuf->mStatus == BufferInfo::OWNED_BY_US) {
- foundBuf = true;
- break;
- }
- }
-
- if (!foundBuf) {
- ALOGE("onSignalEndOfInputStream failed to find an output buffer");
- return;
- }
-
- eosBuf->mOutputFlags = BUFFER_FLAG_END_OF_STREAM;
- eosBuf->mGeneration = mGeneration;
- eosBuf->mData->setRange(0, 0);
- postDrainThisBuffer(eosBuf);
- ALOGV("Posted EOS on output buffer %u", eosBuf->mBufferID);
- }
-
- mPortEOS[kPortIndexOutput] = true;
- mCallback->onSignaledInputEOS(OK);
-
- ALOGV("Output stream saw EOS.");
-}
-
-} // namespace android
diff --git a/media/libstagefright/filters/RSFilter.cpp b/media/libstagefright/filters/RSFilter.cpp
deleted file mode 100644
index 225a375..0000000
--- a/media/libstagefright/filters/RSFilter.cpp
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "RSFilter"
-
-#include <utils/Log.h>
-
-#include <media/MediaCodecBuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-
-#include "RSFilter.h"
-
-namespace android {
-
-RSFilter::RSFilter() {
-
-}
-
-RSFilter::~RSFilter() {
-
-}
-
-status_t RSFilter::configure(const sp<AMessage> &msg) {
- status_t err = SimpleFilter::configure(msg);
- if (err != OK) {
- return err;
- }
-
- if (!msg->findString("cacheDir", &mCacheDir)) {
- ALOGE("Failed to find cache directory in config message.");
- return NAME_NOT_FOUND;
- }
-
- sp<RenderScriptWrapper> wrapper;
- if (!msg->findObject("rs-wrapper", (sp<RefBase>*)&wrapper)) {
- ALOGE("Failed to find RenderScriptWrapper in config message.");
- return NAME_NOT_FOUND;
- }
-
- mRS = wrapper->mContext;
- mCallback = wrapper->mCallback;
-
- return OK;
-}
-
-status_t RSFilter::start() {
- // 32-bit elements for ARGB8888
- RSC::sp<const RSC::Element> e = RSC::Element::U8_4(mRS);
-
- RSC::Type::Builder tb(mRS, e);
- tb.setX(mWidth);
- tb.setY(mHeight);
- RSC::sp<const RSC::Type> t = tb.create();
-
- mAllocIn = RSC::Allocation::createTyped(mRS, t);
- mAllocOut = RSC::Allocation::createTyped(mRS, t);
-
- return OK;
-}
-
-void RSFilter::reset() {
- mCallback.clear();
- mAllocOut.clear();
- mAllocIn.clear();
- mRS.clear();
-}
-
-status_t RSFilter::setParameters(const sp<AMessage> &msg) {
- return mCallback->handleSetParameters(msg);
-}
-
-status_t RSFilter::processBuffers(
- const sp<MediaCodecBuffer> &srcBuffer, const sp<MediaCodecBuffer> &outBuffer) {
- mAllocIn->copy1DRangeFrom(0, mWidth * mHeight, srcBuffer->data());
- mCallback->processBuffers(mAllocIn.get(), mAllocOut.get());
- mAllocOut->copy1DRangeTo(0, mWidth * mHeight, outBuffer->data());
-
- return OK;
-}
-
-} // namespace android
diff --git a/media/libstagefright/filters/SaturationFilter.cpp b/media/libstagefright/filters/SaturationFilter.cpp
deleted file mode 100644
index 0a1df05..0000000
--- a/media/libstagefright/filters/SaturationFilter.cpp
+++ /dev/null
@@ -1,99 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "SaturationFilter"
-
-#include <utils/Log.h>
-
-#include <media/MediaCodecBuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-
-#include "SaturationFilter.h"
-
-namespace android {
-
-status_t SaturationFilter::configure(const sp<AMessage> &msg) {
- status_t err = SimpleFilter::configure(msg);
- if (err != OK) {
- return err;
- }
-
- if (!msg->findString("cacheDir", &mCacheDir)) {
- ALOGE("Failed to find cache directory in config message.");
- return NAME_NOT_FOUND;
- }
-
- return OK;
-}
-
-status_t SaturationFilter::start() {
- // TODO: use a single RS context object for entire application
- mRS = new RSC::RS();
-
- if (!mRS->init(mCacheDir.c_str())) {
- ALOGE("Failed to initialize RenderScript context.");
- return NO_INIT;
- }
-
- // 32-bit elements for ARGB8888
- RSC::sp<const RSC::Element> e = RSC::Element::U8_4(mRS);
-
- RSC::Type::Builder tb(mRS, e);
- tb.setX(mWidth);
- tb.setY(mHeight);
- RSC::sp<const RSC::Type> t = tb.create();
-
- mAllocIn = RSC::Allocation::createTyped(mRS, t);
- mAllocOut = RSC::Allocation::createTyped(mRS, t);
-
- mScript = new ScriptC_saturationARGB(mRS);
-
- mScript->set_gSaturation(mSaturation);
-
- return OK;
-}
-
-void SaturationFilter::reset() {
- mScript.clear();
- mAllocOut.clear();
- mAllocIn.clear();
- mRS.clear();
-}
-
-status_t SaturationFilter::setParameters(const sp<AMessage> &msg) {
- sp<AMessage> params;
- CHECK(msg->findMessage("params", ¶ms));
-
- float saturation;
- if (params->findFloat("saturation", &saturation)) {
- mSaturation = saturation;
- }
-
- return OK;
-}
-
-status_t SaturationFilter::processBuffers(
- const sp<MediaCodecBuffer> &srcBuffer, const sp<MediaCodecBuffer> &outBuffer) {
- mAllocIn->copy1DRangeFrom(0, mWidth * mHeight, srcBuffer->data());
- mScript->forEach_root(mAllocIn, mAllocOut);
- mAllocOut->copy1DRangeTo(0, mWidth * mHeight, outBuffer->data());
-
- return OK;
-}
-
-} // namespace android
diff --git a/media/libstagefright/filters/SimpleFilter.cpp b/media/libstagefright/filters/SimpleFilter.cpp
deleted file mode 100644
index 6c1ca2c..0000000
--- a/media/libstagefright/filters/SimpleFilter.cpp
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-
-#include "SimpleFilter.h"
-
-namespace android {
-
-status_t SimpleFilter::configure(const sp<AMessage> &msg) {
- CHECK(msg->findInt32("width", &mWidth));
- CHECK(msg->findInt32("height", &mHeight));
- if (!msg->findInt32("stride", &mStride)) {
- mStride = mWidth;
- }
- if (!msg->findInt32("slice-height", &mSliceHeight)) {
- mSliceHeight = mHeight;
- }
- CHECK(msg->findInt32("color-format", &mColorFormatIn));
- mColorFormatOut = mColorFormatIn;
-
- return OK;
-}
-
-} // namespace android
diff --git a/media/libstagefright/filters/ZeroFilter.cpp b/media/libstagefright/filters/ZeroFilter.cpp
deleted file mode 100644
index 74b94b7..0000000
--- a/media/libstagefright/filters/ZeroFilter.cpp
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "ZeroFilter"
-
-#include <media/MediaCodecBuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-
-#include "ZeroFilter.h"
-
-namespace android {
-
-status_t ZeroFilter::setParameters(const sp<AMessage> &msg) {
- sp<AMessage> params;
- CHECK(msg->findMessage("params", ¶ms));
-
- int32_t invert;
- if (params->findInt32("invert", &invert)) {
- mInvertData = (invert != 0);
- }
-
- return OK;
-}
-
-status_t ZeroFilter::processBuffers(
- const sp<MediaCodecBuffer> &srcBuffer, const sp<MediaCodecBuffer> &outBuffer) {
- // assuming identical input & output buffers, since we're a copy filter
- if (mInvertData) {
- uint32_t* src = (uint32_t*)srcBuffer->data();
- uint32_t* dest = (uint32_t*)outBuffer->data();
- for (size_t i = 0; i < srcBuffer->size() / 4; ++i) {
- *(dest++) = *(src++) ^ 0xFFFFFFFF;
- }
- } else {
- memcpy(outBuffer->data(), srcBuffer->data(), srcBuffer->size());
- }
- outBuffer->setRange(0, srcBuffer->size());
-
- return OK;
-}
-
-} // namespace android
diff --git a/media/libstagefright/filters/include/filters/ColorConvert.h b/media/libstagefright/filters/include/filters/ColorConvert.h
deleted file mode 100644
index 13faa02..0000000
--- a/media/libstagefright/filters/include/filters/ColorConvert.h
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef COLOR_CONVERT_H_
-#define COLOR_CONVERT_H_
-
-#include <inttypes.h>
-
-namespace android {
-
-void YUVToRGB(
- int32_t y, int32_t u, int32_t v,
- int32_t* r, int32_t* g, int32_t* b);
-
-void convertYUV420spToARGB(
- uint8_t *pY, uint8_t *pUV, int32_t width, int32_t height,
- uint8_t *dest);
-
-void convertYUV420spToRGB888(
- uint8_t *pY, uint8_t *pUV, int32_t width, int32_t height,
- uint8_t *dest);
-
-// TODO: remove when RGBA support is added to SoftwareRenderer
-void convertRGBAToARGB(
- uint8_t *src, int32_t width, int32_t height, uint32_t stride,
- uint8_t *dest);
-
-} // namespace android
-
-#endif // COLOR_CONVERT_H_
diff --git a/media/libstagefright/filters/include/filters/GraphicBufferListener.h b/media/libstagefright/filters/include/filters/GraphicBufferListener.h
deleted file mode 100644
index 586bf65..0000000
--- a/media/libstagefright/filters/include/filters/GraphicBufferListener.h
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef GRAPHIC_BUFFER_LISTENER_H_
-#define GRAPHIC_BUFFER_LISTENER_H_
-
-#include <gui/BufferQueue.h>
-
-namespace android {
-
-struct AMessage;
-
-struct GraphicBufferListener : public BufferQueue::ConsumerListener {
-public:
- GraphicBufferListener() {};
-
- status_t init(
- const sp<AMessage> ¬ify,
- size_t bufferWidth, size_t bufferHeight, size_t bufferCount);
-
- virtual void onFrameAvailable(const BufferItem& item);
- virtual void onBuffersReleased();
- virtual void onSidebandStreamChanged();
-
- // Returns the handle to the producer side of the BufferQueue. Buffers
- // queued on this will be received by GraphicBufferListener.
- sp<IGraphicBufferProducer> getIGraphicBufferProducer() const {
- return mProducer;
- }
-
- BufferItem getBufferItem();
- sp<GraphicBuffer> getBuffer(BufferItem item);
- status_t releaseBuffer(BufferItem item);
-
- enum {
- kWhatFrameAvailable = 'frav',
- };
-
-private:
- sp<AMessage> mNotify;
- size_t mNumFramesAvailable;
-
- mutable Mutex mMutex;
-
- // Our BufferQueue interfaces. mProducer is passed to the producer through
- // getIGraphicBufferProducer, and mConsumer is used internally to retrieve
- // the buffers queued by the producer.
- sp<IGraphicBufferProducer> mProducer;
- sp<IGraphicBufferConsumer> mConsumer;
-
- // Cache of GraphicBuffers from the buffer queue.
- sp<GraphicBuffer> mBufferSlot[BufferQueue::NUM_BUFFER_SLOTS];
-};
-
-} // namespace android
-
-#endif // GRAPHIC_BUFFER_LISTENER_H
diff --git a/media/libstagefright/filters/include/filters/IntrinsicBlurFilter.h b/media/libstagefright/filters/include/filters/IntrinsicBlurFilter.h
deleted file mode 100644
index a2aabfa..0000000
--- a/media/libstagefright/filters/include/filters/IntrinsicBlurFilter.h
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef INTRINSIC_BLUR_FILTER_H_
-#define INTRINSIC_BLUR_FILTER_H_
-
-#include "RenderScript.h"
-#include "SimpleFilter.h"
-
-namespace android {
-
-struct IntrinsicBlurFilter : public SimpleFilter {
-public:
- IntrinsicBlurFilter() : mBlurRadius(1.f) {};
-
- virtual status_t configure(const sp<AMessage> &msg);
- virtual status_t start();
- virtual void reset();
- virtual status_t setParameters(const sp<AMessage> &msg);
- virtual status_t processBuffers(
- const sp<MediaCodecBuffer> &srcBuffer, const sp<MediaCodecBuffer> &outBuffer);
-
-protected:
- virtual ~IntrinsicBlurFilter() {};
-
-private:
- AString mCacheDir;
- RSC::sp<RSC::RS> mRS;
- RSC::sp<RSC::Allocation> mAllocIn;
- RSC::sp<RSC::Allocation> mAllocOut;
- RSC::sp<RSC::ScriptIntrinsicBlur> mBlur;
- float mBlurRadius;
-};
-
-} // namespace android
-
-#endif // INTRINSIC_BLUR_FILTER_H_
diff --git a/media/libstagefright/filters/include/filters/RSFilter.h b/media/libstagefright/filters/include/filters/RSFilter.h
deleted file mode 100644
index 3326284..0000000
--- a/media/libstagefright/filters/include/filters/RSFilter.h
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef RS_FILTER_H_
-#define RS_FILTER_H_
-
-#include <media/stagefright/RenderScriptWrapper.h>
-#include <RenderScript.h>
-
-#include "SimpleFilter.h"
-
-namespace android {
-
-struct AString;
-
-struct RSFilter : public SimpleFilter {
-public:
- RSFilter();
-
- virtual status_t configure(const sp<AMessage> &msg);
- virtual status_t start();
- virtual void reset();
- virtual status_t setParameters(const sp<AMessage> &msg);
- virtual status_t processBuffers(
- const sp<MediaCodecBuffer> &srcBuffer, const sp<MediaCodecBuffer> &outBuffer);
-
-protected:
- virtual ~RSFilter();
-
-private:
- AString mCacheDir;
- sp<RenderScriptWrapper::RSFilterCallback> mCallback;
- RSC::sp<RSC::RS> mRS;
- RSC::sp<RSC::Allocation> mAllocIn;
- RSC::sp<RSC::Allocation> mAllocOut;
-};
-
-} // namespace android
-
-#endif // RS_FILTER_H_
diff --git a/media/libstagefright/filters/include/filters/SaturationFilter.h b/media/libstagefright/filters/include/filters/SaturationFilter.h
deleted file mode 100644
index 317e469..0000000
--- a/media/libstagefright/filters/include/filters/SaturationFilter.h
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SATURATION_FILTER_H_
-#define SATURATION_FILTER_H_
-
-#include <RenderScript.h>
-
-#include "ScriptC_saturationARGB.h"
-#include "SimpleFilter.h"
-
-namespace android {
-
-struct SaturationFilter : public SimpleFilter {
-public:
- SaturationFilter() : mSaturation(1.f) {};
-
- virtual status_t configure(const sp<AMessage> &msg);
- virtual status_t start();
- virtual void reset();
- virtual status_t setParameters(const sp<AMessage> &msg);
- virtual status_t processBuffers(
- const sp<MediaCodecBuffer> &srcBuffer, const sp<MediaCodecBuffer> &outBuffer);
-
-protected:
- virtual ~SaturationFilter() {};
-
-private:
- AString mCacheDir;
- RSC::sp<RSC::RS> mRS;
- RSC::sp<RSC::Allocation> mAllocIn;
- RSC::sp<RSC::Allocation> mAllocOut;
- RSC::sp<ScriptC_saturationARGB> mScript;
- float mSaturation;
-};
-
-} // namespace android
-
-#endif // SATURATION_FILTER_H_
diff --git a/media/libstagefright/filters/include/filters/SimpleFilter.h b/media/libstagefright/filters/include/filters/SimpleFilter.h
deleted file mode 100644
index a3c2d76..0000000
--- a/media/libstagefright/filters/include/filters/SimpleFilter.h
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SIMPLE_FILTER_H_
-#define SIMPLE_FILTER_H_
-
-#include <stdint.h>
-#include <utils/Errors.h>
-#include <utils/RefBase.h>
-
-namespace android {
-
-struct AMessage;
-class MediaCodecBuffer;
-
-struct SimpleFilter : public RefBase {
-public:
- SimpleFilter() : mWidth(0), mHeight(0), mStride(0), mSliceHeight(0),
- mColorFormatIn(0), mColorFormatOut(0) {};
-
- virtual status_t configure(const sp<AMessage> &msg);
-
- virtual status_t start() = 0;
- virtual void reset() = 0;
- virtual status_t setParameters(const sp<AMessage> &msg) = 0;
- virtual status_t processBuffers(
- const sp<MediaCodecBuffer> &srcBuffer, const sp<MediaCodecBuffer> &outBuffer) = 0;
-
-protected:
- int32_t mWidth, mHeight;
- int32_t mStride, mSliceHeight;
- int32_t mColorFormatIn, mColorFormatOut;
-
- virtual ~SimpleFilter() {};
-};
-
-} // namespace android
-
-#endif // SIMPLE_FILTER_H_
diff --git a/media/libstagefright/filters/include/filters/ZeroFilter.h b/media/libstagefright/filters/include/filters/ZeroFilter.h
deleted file mode 100644
index f941cc8..0000000
--- a/media/libstagefright/filters/include/filters/ZeroFilter.h
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ZERO_FILTER_H_
-#define ZERO_FILTER_H_
-
-#include "SimpleFilter.h"
-
-namespace android {
-
-struct ZeroFilter : public SimpleFilter {
-public:
- ZeroFilter() : mInvertData(false) {};
-
- virtual status_t start() { return OK; };
- virtual void reset() {};
- virtual status_t setParameters(const sp<AMessage> &msg);
- virtual status_t processBuffers(
- const sp<MediaCodecBuffer> &srcBuffer, const sp<MediaCodecBuffer> &outBuffer);
-
-protected:
- virtual ~ZeroFilter() {};
-
-private:
- bool mInvertData;
-};
-
-} // namespace android
-
-#endif // ZERO_FILTER_H_
diff --git a/media/libstagefright/filters/saturation.rscript b/media/libstagefright/filters/saturation.rscript
deleted file mode 100644
index 2c867ac..0000000
--- a/media/libstagefright/filters/saturation.rscript
+++ /dev/null
@@ -1,40 +0,0 @@
-// Sample script for RGB888 support (compare to saturationARGB.rs)
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#pragma version(1)
-#pragma rs java_package_name(com.android.rs.cppbasic)
-#pragma rs_fp_relaxed
-
-const static float3 gMonoMult = {0.299f, 0.587f, 0.114f};
-
-// global variables (parameters accessible to application code)
-float gSaturation = 1.0f;
-
-void root(const uchar3 *v_in, uchar3 *v_out) {
- // scale 0-255 uchar to 0-1.0 float
- float3 in = {v_in->r * 0.003921569f, v_in->g * 0.003921569f,
- v_in->b * 0.003921569f};
-
- // apply saturation filter
- float3 result = dot(in, gMonoMult);
- result = mix(result, in, gSaturation);
-
- // convert to uchar, copied from rsPackColorTo8888
- v_out->x = (uchar)clamp((result.r * 255.f + 0.5f), 0.f, 255.f);
- v_out->y = (uchar)clamp((result.g * 255.f + 0.5f), 0.f, 255.f);
- v_out->z = (uchar)clamp((result.b * 255.f + 0.5f), 0.f, 255.f);
-}
diff --git a/media/libstagefright/filters/saturationARGB.rscript b/media/libstagefright/filters/saturationARGB.rscript
deleted file mode 100644
index 1de9dd8..0000000
--- a/media/libstagefright/filters/saturationARGB.rscript
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#pragma version(1)
-#pragma rs java_package_name(com.android.rs.cppbasic)
-#pragma rs_fp_relaxed
-
-const static float3 gMonoMult = {0.299f, 0.587f, 0.114f};
-
-// global variables (parameters accessible to application code)
-float gSaturation = 1.0f;
-
-void root(const uchar4 *v_in, uchar4 *v_out) {
- v_out->x = v_in->x; // don't modify A
-
- // get RGB, scale 0-255 uchar to 0-1.0 float
- float3 rgb = {v_in->y * 0.003921569f, v_in->z * 0.003921569f,
- v_in->w * 0.003921569f};
-
- // apply saturation filter
- float3 result = dot(rgb, gMonoMult);
- result = mix(result, rgb, gSaturation);
-
- v_out->y = (uchar)clamp((result.r * 255.f + 0.5f), 0.f, 255.f);
- v_out->z = (uchar)clamp((result.g * 255.f + 0.5f), 0.f, 255.f);
- v_out->w = (uchar)clamp((result.b * 255.f + 0.5f), 0.f, 255.f);
-}
diff --git a/media/libstagefright/include/media/stagefright/MediaFilter.h b/media/libstagefright/include/media/stagefright/MediaFilter.h
deleted file mode 100644
index 1255e0f..0000000
--- a/media/libstagefright/include/media/stagefright/MediaFilter.h
+++ /dev/null
@@ -1,148 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef MEDIA_FILTER_H_
-#define MEDIA_FILTER_H_
-
-#include <media/stagefright/CodecBase.h>
-
-namespace android {
-
-struct GraphicBufferListener;
-struct SimpleFilter;
-
-struct MediaFilter : public CodecBase {
- MediaFilter();
-
- virtual std::shared_ptr<BufferChannelBase> getBufferChannel() override;
- virtual void initiateAllocateComponent(const sp<AMessage> &msg);
- virtual void initiateConfigureComponent(const sp<AMessage> &msg);
- virtual void initiateCreateInputSurface();
- virtual void initiateSetInputSurface(const sp<PersistentSurface> &surface);
-
- virtual void initiateStart();
- virtual void initiateShutdown(bool keepComponentAllocated = false);
-
- virtual void signalFlush();
- virtual void signalResume();
-
- virtual void signalRequestIDRFrame();
- virtual void signalSetParameters(const sp<AMessage> &msg);
- virtual void signalEndOfInputStream();
-
- virtual void onMessageReceived(const sp<AMessage> &msg);
-
-protected:
- virtual ~MediaFilter();
-
-private:
- struct BufferInfo {
- enum Status {
- OWNED_BY_US,
- OWNED_BY_UPSTREAM,
- };
-
- uint32_t mBufferID;
- int32_t mGeneration;
- int32_t mOutputFlags;
- Status mStatus;
-
- sp<MediaCodecBuffer> mData;
- };
-
- class BufferChannel;
-
- enum State {
- UNINITIALIZED,
- INITIALIZED,
- CONFIGURED,
- STARTED,
- };
-
- enum {
- kWhatInputBufferFilled = 'inpF',
- kWhatOutputBufferDrained = 'outD',
- kWhatShutdown = 'shut',
- kWhatFlush = 'flus',
- kWhatResume = 'resm',
- kWhatAllocateComponent = 'allo',
- kWhatConfigureComponent = 'conf',
- kWhatCreateInputSurface = 'cisf',
- kWhatSignalEndOfInputStream = 'eois',
- kWhatStart = 'star',
- kWhatSetParameters = 'setP',
- kWhatProcessBuffers = 'proc',
- };
-
- enum {
- kPortIndexInput = 0,
- kPortIndexOutput = 1
- };
-
- // member variables
- AString mComponentName;
- State mState;
- status_t mInputEOSResult;
- int32_t mWidth, mHeight;
- int32_t mStride, mSliceHeight;
- int32_t mColorFormatIn, mColorFormatOut;
- size_t mMaxInputSize, mMaxOutputSize;
- int32_t mGeneration;
- sp<AMessage> mInputFormat;
- sp<AMessage> mOutputFormat;
-
- Vector<BufferInfo> mBuffers[2];
- Vector<BufferInfo*> mAvailableInputBuffers;
- Vector<BufferInfo*> mAvailableOutputBuffers;
- bool mPortEOS[2];
-
- sp<SimpleFilter> mFilter;
- sp<GraphicBufferListener> mGraphicBufferListener;
-
- std::shared_ptr<BufferChannel> mBufferChannel;
-
- // helper functions
- void signalProcessBuffers();
- void signalError(status_t error);
-
- status_t allocateBuffersOnPort(OMX_U32 portIndex);
- BufferInfo *findBuffer(
- uint32_t portIndex, const sp<MediaCodecBuffer> &buffer,
- ssize_t *index = NULL);
- void postFillThisBuffer(BufferInfo *info);
- void postDrainThisBuffer(BufferInfo *info);
- void postEOS();
- void requestFillEmptyInput();
- void processBuffers();
-
- void onAllocateComponent(const sp<AMessage> &msg);
- void onConfigureComponent(const sp<AMessage> &msg);
- void onStart();
- void onInputBufferFilled(const sp<AMessage> &msg);
- void onOutputBufferDrained(const sp<AMessage> &msg);
- void onShutdown(const sp<AMessage> &msg);
- void onFlush();
- void onSetParameters(const sp<AMessage> &msg);
- void onCreateInputSurface();
- void onInputFrameAvailable();
- void onSignalEndOfInputStream();
-
- DISALLOW_EVIL_CONSTRUCTORS(MediaFilter);
-};
-
-} // namespace android
-
-#endif // MEDIA_FILTER_H_
diff --git a/media/libstagefright/include/media/stagefright/RenderScriptWrapper.h b/media/libstagefright/include/media/stagefright/RenderScriptWrapper.h
deleted file mode 100644
index b42649e..0000000
--- a/media/libstagefright/include/media/stagefright/RenderScriptWrapper.h
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef RENDERSCRIPT_WRAPPER_H_
-#define RENDERSCRIPT_WRAPPER_H_
-
-#include <RenderScript.h>
-
-namespace android {
-
-struct RenderScriptWrapper : public RefBase {
-public:
- struct RSFilterCallback : public RefBase {
- public:
- // called by RSFilter to process each input buffer
- virtual status_t processBuffers(
- RSC::Allocation* inBuffer,
- RSC::Allocation* outBuffer) = 0;
-
- virtual status_t handleSetParameters(const sp<AMessage> &msg) = 0;
- };
-
- sp<RSFilterCallback> mCallback;
- RSC::sp<RSC::RS> mContext;
-};
-
-} // namespace android
-
-#endif // RENDERSCRIPT_WRAPPER_H_
diff --git a/media/libstagefright/renderfright/Android.bp b/media/libstagefright/renderfright/Android.bp
index 3c00a1c..3598e8d 100644
--- a/media/libstagefright/renderfright/Android.bp
+++ b/media/libstagefright/renderfright/Android.bp
@@ -32,6 +32,7 @@
"libEGL",
"libGLESv1_CM",
"libGLESv2",
+ "libvulkan",
"liblog",
"libnativewindow",
"libprocessgroup",
diff --git a/media/module/extractors/aac/AACExtractor.cpp b/media/module/extractors/aac/AACExtractor.cpp
index 2fc4584..a44fb61 100644
--- a/media/module/extractors/aac/AACExtractor.cpp
+++ b/media/module/extractors/aac/AACExtractor.cpp
@@ -310,9 +310,9 @@
return AMEDIA_ERROR_END_OF_STREAM;
}
- MediaBufferHelper *buffer;
+ MediaBufferHelper *buffer = nullptr;
status_t err = mBufferGroup->acquire_buffer(&buffer);
- if (err != OK) {
+ if (err != OK || buffer == nullptr) {
return AMEDIA_ERROR_UNKNOWN;
}
diff --git a/media/module/extractors/amr/AMRExtractor.cpp b/media/module/extractors/amr/AMRExtractor.cpp
index e26ff0a..b0f69ce 100644
--- a/media/module/extractors/amr/AMRExtractor.cpp
+++ b/media/module/extractors/amr/AMRExtractor.cpp
@@ -341,9 +341,9 @@
return AMEDIA_ERROR_MALFORMED;
}
- MediaBufferHelper *buffer;
+ MediaBufferHelper *buffer = nullptr;
status_t err = mBufferGroup->acquire_buffer(&buffer);
- if (err != OK) {
+ if (err != OK || buffer == nullptr) {
return AMEDIA_ERROR_UNKNOWN;
}
diff --git a/media/module/extractors/flac/FLACExtractor.cpp b/media/module/extractors/flac/FLACExtractor.cpp
index ec7cb24..2434e41 100644
--- a/media/module/extractors/flac/FLACExtractor.cpp
+++ b/media/module/extractors/flac/FLACExtractor.cpp
@@ -614,9 +614,9 @@
}
// acquire a media buffer
CHECK(mGroup != NULL);
- MediaBufferHelper *buffer;
+ MediaBufferHelper *buffer = nullptr;
status_t err = mGroup->acquire_buffer(&buffer);
- if (err != OK) {
+ if (err != OK || buffer == nullptr) {
return NULL;
}
const size_t bufferSize = blocksize * getChannels() * getOutputSampleSize();
diff --git a/media/module/extractors/midi/MidiExtractor.cpp b/media/module/extractors/midi/MidiExtractor.cpp
index d0efb2f..167cc40 100644
--- a/media/module/extractors/midi/MidiExtractor.cpp
+++ b/media/module/extractors/midi/MidiExtractor.cpp
@@ -240,9 +240,9 @@
if ((state == EAS_STATE_STOPPED) || (state == EAS_STATE_ERROR)) {
return NULL;
}
- MediaBufferHelper *buffer;
+ MediaBufferHelper *buffer = nullptr;
status_t err = mGroup->acquire_buffer(&buffer);
- if (err != OK) {
+ if (err != OK || buffer == nullptr) {
ALOGE("readBuffer: no buffer");
return NULL;
}
diff --git a/media/module/extractors/mkv/MatroskaExtractor.cpp b/media/module/extractors/mkv/MatroskaExtractor.cpp
index 443e26c..2b72387 100644
--- a/media/module/extractors/mkv/MatroskaExtractor.cpp
+++ b/media/module/extractors/mkv/MatroskaExtractor.cpp
@@ -790,6 +790,7 @@
int64_t timeUs = mBlockIter.blockTimeUs();
for (int i = 0; i < block->GetFrameCount(); ++i) {
+ status_t err;
MatroskaExtractor::TrackInfo *trackInfo = &mExtractor->mTracks.editItemAt(mTrackIndex);
const mkvparser::Block::Frame &frame = block->GetFrame(i);
size_t len = frame.len;
@@ -798,8 +799,13 @@
}
len += trackInfo->mHeaderLen;
- MediaBufferHelper *mbuf;
- mBufferGroup->acquire_buffer(&mbuf, false /* nonblocking */, len /* requested size */);
+ MediaBufferHelper *mbuf = nullptr;
+ err = mBufferGroup->acquire_buffer(&mbuf, false /* nonblocking */,
+ len /* requested size */);
+ if (err != OK || mbuf == nullptr) {
+ ALOGE("readBlock: no buffer");
+ return AMEDIA_ERROR_UNKNOWN;
+ }
mbuf->set_range(0, len);
uint8_t *data = static_cast<uint8_t *>(mbuf->data());
if (trackInfo->mHeader) {
@@ -832,7 +838,7 @@
}
}
- status_t err = frame.Read(mExtractor->mReader, data + trackInfo->mHeaderLen);
+ err = frame.Read(mExtractor->mReader, data + trackInfo->mHeaderLen);
if (err == OK
&& mExtractor->mIsWebm
&& trackInfo->mEncrypted) {
diff --git a/media/module/extractors/mp3/MP3Extractor.cpp b/media/module/extractors/mp3/MP3Extractor.cpp
index 248a39c..328b790 100644
--- a/media/module/extractors/mp3/MP3Extractor.cpp
+++ b/media/module/extractors/mp3/MP3Extractor.cpp
@@ -521,9 +521,9 @@
mSamplesRead = 0;
}
- MediaBufferHelper *buffer;
+ MediaBufferHelper *buffer = nullptr;
status_t err = mBufferGroup->acquire_buffer(&buffer);
- if (err != OK) {
+ if (err != OK || buffer == nullptr) {
return AMEDIA_ERROR_UNKNOWN;
}
diff --git a/media/module/extractors/mp4/MPEG4Extractor.cpp b/media/module/extractors/mp4/MPEG4Extractor.cpp
index 3a5a869..1d88785 100644
--- a/media/module/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/module/extractors/mp4/MPEG4Extractor.cpp
@@ -6337,7 +6337,7 @@
err = mBufferGroup->acquire_buffer(&mBuffer);
- if (err != OK) {
+ if (err != OK || mBuffer == nullptr) {
CHECK(mBuffer == NULL);
return AMEDIA_ERROR_UNKNOWN;
}
diff --git a/media/module/extractors/mpeg2/MPEG2PSExtractor.cpp b/media/module/extractors/mpeg2/MPEG2PSExtractor.cpp
index afd28ef..44c8937 100644
--- a/media/module/extractors/mpeg2/MPEG2PSExtractor.cpp
+++ b/media/module/extractors/mpeg2/MPEG2PSExtractor.cpp
@@ -699,11 +699,26 @@
}
}
- MediaBufferBase *mbuf;
- mSource->read(&mbuf, (MediaTrack::ReadOptions*) options);
+ MediaBufferBase *mbuf = nullptr;
+ status_t err_read = mSource->read(&mbuf, (MediaTrack::ReadOptions*) options);
+ if (mbuf == nullptr) {
+ ALOGE("Track::read: null buffer read from source");
+ return AMEDIA_ERROR_UNKNOWN;
+ }
+ if (err_read != OK) {
+ ALOGE("Track::read: no buffer read from source");
+ mbuf->release();
+ return AMEDIA_ERROR_UNKNOWN;
+ }
+
size_t length = mbuf->range_length();
- MediaBufferHelper *outbuf;
- mBufferGroup->acquire_buffer(&outbuf, false, length);
+ MediaBufferHelper *outbuf = nullptr;
+ status_t err = mBufferGroup->acquire_buffer(&outbuf, false, length);
+ if (err != OK || outbuf == nullptr) {
+ ALOGE("Track::read: no buffer");
+ mbuf->release();
+ return AMEDIA_ERROR_UNKNOWN;
+ }
memcpy(outbuf->data(), mbuf->data(), length);
outbuf->set_range(0, length);
*buffer = outbuf;
diff --git a/media/module/extractors/mpeg2/MPEG2TSExtractor.cpp b/media/module/extractors/mpeg2/MPEG2TSExtractor.cpp
index 9a3cd92..736b817 100644
--- a/media/module/extractors/mpeg2/MPEG2TSExtractor.cpp
+++ b/media/module/extractors/mpeg2/MPEG2TSExtractor.cpp
@@ -182,11 +182,26 @@
return AMEDIA_ERROR_END_OF_STREAM;
}
- MediaBufferBase *mbuf;
- mImpl->read(&mbuf, (MediaTrack::ReadOptions*) options);
+ MediaBufferBase *mbuf = nullptr;
+ status_t err_read = mImpl->read(&mbuf, (MediaTrack::ReadOptions*) options);
+ if (mbuf == nullptr) {
+ ALOGE("Track::read: null buffer read from source");
+ return AMEDIA_ERROR_UNKNOWN;
+ }
+ if (err_read != OK) {
+ ALOGE("Track::read: no buffer read from source");
+ mbuf->release();
+ return AMEDIA_ERROR_UNKNOWN;
+ }
+
size_t length = mbuf->range_length();
- MediaBufferHelper *outbuf;
- mBufferGroup->acquire_buffer(&outbuf, false, length);
+ MediaBufferHelper *outbuf = nullptr;
+ status_t err = mBufferGroup->acquire_buffer(&outbuf, false, length);
+ if (err != OK || outbuf == nullptr) {
+ ALOGE("read: no buffer");
+ mbuf->release();
+ return AMEDIA_ERROR_UNKNOWN;
+ }
memcpy(outbuf->data(), mbuf->data(), length);
outbuf->set_range(0, length);
*out = outbuf;
diff --git a/media/module/extractors/ogg/OggExtractor.cpp b/media/module/extractors/ogg/OggExtractor.cpp
index eb2246d..1c6f516 100644
--- a/media/module/extractors/ogg/OggExtractor.cpp
+++ b/media/module/extractors/ogg/OggExtractor.cpp
@@ -790,7 +790,8 @@
}
MediaBufferHelper *tmp;
if (mBufferGroup) {
- mBufferGroup->acquire_buffer(&tmp, false, fullSize);
+ // ignore return code here. instead, check tmp below.
+ (void) mBufferGroup->acquire_buffer(&tmp, false, fullSize);
ALOGV("acquired buffer %p from group", tmp);
} else {
tmp = new StandAloneMediaBuffer(fullSize);
@@ -924,13 +925,16 @@
status_t MyOggExtractor::init() {
AMediaFormat_setString(mMeta, AMEDIAFORMAT_KEY_MIME, mMimeType);
- media_status_t err;
- MediaBufferHelper *packet;
for (size_t i = 0; i < mNumHeaders; ++i) {
+ media_status_t err;
+ MediaBufferHelper *packet = nullptr;
// ignore timestamp for configuration packets
if ((err = _readNextPacket(&packet, /* calcVorbisTimestamp = */ false)) != AMEDIA_OK) {
return err;
}
+ if (packet == nullptr) {
+ return AMEDIA_ERROR_UNKNOWN;
+ }
ALOGV("read packet of size %zu\n", packet->range_length());
err = verifyHeader(packet, /* type = */ i * 2 + 1);
packet->release();
diff --git a/media/module/extractors/wav/WAVExtractor.cpp b/media/module/extractors/wav/WAVExtractor.cpp
index 9e94587..9c3bac6 100644
--- a/media/module/extractors/wav/WAVExtractor.cpp
+++ b/media/module/extractors/wav/WAVExtractor.cpp
@@ -459,11 +459,15 @@
mCurrentPos = pos + mOffset;
}
- MediaBufferHelper *buffer;
+ MediaBufferHelper *buffer = nullptr;
media_status_t err = mBufferGroup->acquire_buffer(&buffer);
if (err != OK) {
return err;
}
+ if (buffer == nullptr) {
+ ALOGE("acquire_buffer OK, but no buffer");
+ return AMEDIA_ERROR_UNKNOWN;
+ }
// maxBytesToRead may be reduced so that in-place data conversion will fit in buffer size.
const size_t bufferSize = std::min(buffer->size(), kMaxFrameSize);
diff --git a/media/module/libmediatranscoding/tests/TranscodingSessionController_tests.cpp b/media/module/libmediatranscoding/tests/TranscodingSessionController_tests.cpp
index ef9c4f8..fdd327f 100644
--- a/media/module/libmediatranscoding/tests/TranscodingSessionController_tests.cpp
+++ b/media/module/libmediatranscoding/tests/TranscodingSessionController_tests.cpp
@@ -337,6 +337,8 @@
// Should have created new transcoder.
EXPECT_EQ(mTranscoder->getGeneration(), generation);
EXPECT_EQ(mTranscoder.use_count(), 2);
+ // b/240537336: Allow extra time to finish onError call
+ sleep(1);
}
void testPacerHelper(int numSubmits, int sessionDurationMs, int expectedSuccess) {
diff --git a/media/ndk/libmediandk.map.txt b/media/ndk/libmediandk.map.txt
index c8faced..c0eea63 100644
--- a/media/ndk/libmediandk.map.txt
+++ b/media/ndk/libmediandk.map.txt
@@ -181,11 +181,11 @@
AMediaCodecCryptoInfo_setPattern; # introduced=24
AMediaCodec_configure;
AMediaCodec_createCodecByName;
- AMediaCodec_createCodecByNameForClient; # apex # introduced=31
+ AMediaCodec_createCodecByNameForClient; # systemapi # introduced=31
AMediaCodec_createDecoderByType;
- AMediaCodec_createDecoderByTypeForClient; # apex # introduced=31
+ AMediaCodec_createDecoderByTypeForClient; # systemapi # introduced=31
AMediaCodec_createEncoderByType;
- AMediaCodec_createEncoderByTypeForClient; # apex # introduced=31
+ AMediaCodec_createEncoderByTypeForClient; # systemapi # introduced=31
AMediaCodec_delete;
AMediaCodec_dequeueInputBuffer;
AMediaCodec_dequeueOutputBuffer;
diff --git a/media/utils/TimeCheck.cpp b/media/utils/TimeCheck.cpp
index 6823f4f..65b2c52 100644
--- a/media/utils/TimeCheck.cpp
+++ b/media/utils/TimeCheck.cpp
@@ -283,7 +283,7 @@
}
// Automatically create a TimeCheck class for a class and method.
-// This is used for Audio HIDL support.
+// This is used for Audio HAL support.
mediautils::TimeCheck makeTimeCheckStatsForClassMethod(
std::string_view className, std::string_view methodName) {
std::shared_ptr<MethodStatistics<std::string>> statistics =
diff --git a/media/utils/include/mediautils/TimeCheck.h b/media/utils/include/mediautils/TimeCheck.h
index bdb5337..0823669 100644
--- a/media/utils/include/mediautils/TimeCheck.h
+++ b/media/utils/include/mediautils/TimeCheck.h
@@ -148,4 +148,9 @@
TimeCheck makeTimeCheckStatsForClassMethod(
std::string_view className, std::string_view methodName);
+// A handy statement-like macro to put at the beginning of almost every method
+// which calls into HAL. Note that it requires the class to implement 'getClassName'.
+#define TIME_CHECK() auto timeCheck = \
+ mediautils::makeTimeCheckStatsForClassMethod(getClassName(), __func__)
+
} // namespace android::mediautils
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index c0e612d..9837574 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -3535,9 +3535,9 @@
if (result != OK) return result;
#ifdef FLOAT_EFFECT_CHAIN
- buffer = halInBuffer->audioBuffer()->f32;
+ buffer = halInBuffer ? halInBuffer->audioBuffer()->f32 : buffer;
#else
- buffer = halInBuffer->audioBuffer()->s16;
+ buffer = halInBuffer ? halInBuffer->audioBuffer()->s16 : buffer;
#endif
ALOGV("addEffectChain_l() creating new input buffer %p session %d",
buffer, session);
@@ -3566,7 +3566,8 @@
halOutBuffer = halInBuffer;
ALOGV("addEffectChain_l() %p on thread %p for session %d", chain.get(), this, session);
if (!audio_is_global_session(session)) {
- buffer = reinterpret_cast<effect_buffer_t*>(halInBuffer->externalData());
+ buffer = halInBuffer ? reinterpret_cast<effect_buffer_t*>(halInBuffer->externalData())
+ : buffer;
// Only one effect chain can be present in direct output thread and it uses
// the sink buffer as input
if (mType != DIRECT) {
@@ -3578,9 +3579,9 @@
&halInBuffer);
if (result != OK) return result;
#ifdef FLOAT_EFFECT_CHAIN
- buffer = halInBuffer->audioBuffer()->f32;
+ buffer = halInBuffer ? halInBuffer->audioBuffer()->f32 : buffer;
#else
- buffer = halInBuffer->audioBuffer()->s16;
+ buffer = halInBuffer ? halInBuffer->audioBuffer()->s16 : buffer;
#endif
ALOGV("addEffectChain_l() creating new input buffer %p session %d",
buffer, session);
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 3518037..8c8278c 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -104,7 +104,7 @@
const char* device_address,
const char* device_name,
audio_format_t encodedFormat) {
- media::AudioPort aidlPort;
+ media::AudioPortFw aidlPort;
if (status_t status = deviceToAudioPort(device, device_address, device_name, &aidlPort);
status == OK) {
return setDeviceConnectionState(state, aidlPort.hal, encodedFormat);
@@ -162,7 +162,7 @@
const char* device_address,
const char* device_name,
audio_format_t encodedFormat) {
- media::AudioPort aidlPort;
+ media::AudioPortFw aidlPort;
if (status_t status = deviceToAudioPort(deviceType, device_address, device_name, &aidlPort);
status == OK) {
return setDeviceConnectionStateInt(state, aidlPort.hal, encodedFormat);
@@ -444,7 +444,7 @@
status_t AudioPolicyManager::deviceToAudioPort(audio_devices_t device, const char* device_address,
const char* device_name,
- media::AudioPort* aidlPort) {
+ media::AudioPortFw* aidlPort) {
DeviceDescriptorBase devDescr(device, device_address);
devDescr.setName(device_name);
return devDescr.writeToParcelable(aidlPort);
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 87e6974..2159257 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -1010,7 +1010,7 @@
// Called by setDeviceConnectionState()
status_t deviceToAudioPort(audio_devices_t deviceType, const char* device_address,
- const char* device_name, media::AudioPort* aidPort);
+ const char* device_name, media::AudioPortFw* aidPort);
bool isMsdPatch(const audio_patch_handle_t &handle) const;
private:
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index df49bba..4212c1c 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -1521,7 +1521,7 @@
Status AudioPolicyService::listAudioPorts(media::AudioPortRole roleAidl,
media::AudioPortType typeAidl, Int* count,
- std::vector<media::AudioPort>* portsAidl,
+ std::vector<media::AudioPortFw>* portsAidl,
int32_t* _aidl_return) {
audio_port_role_t role = VALUE_OR_RETURN_BINDER_STATUS(
aidl2legacy_AudioPortRole_audio_port_role_t(roleAidl));
@@ -1546,14 +1546,14 @@
numPortsReq = std::min(numPortsReq, num_ports);
RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
convertRange(ports.get(), ports.get() + numPortsReq, std::back_inserter(*portsAidl),
- legacy2aidl_audio_port_v7_AudioPort)));
+ legacy2aidl_audio_port_v7_AudioPortFw)));
count->value = VALUE_OR_RETURN_BINDER_STATUS(convertIntegral<int32_t>(num_ports));
*_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(convertIntegral<int32_t>(generation));
return Status::ok();
}
Status AudioPolicyService::getAudioPort(int portId,
- media::AudioPort* _aidl_return) {
+ media::AudioPortFw* _aidl_return) {
audio_port_v7 port{ .id = portId };
Mutex::Autolock _l(mLock);
if (mAudioPolicyManager == NULL) {
@@ -1561,14 +1561,15 @@
}
AutoCallerClear acc;
RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(mAudioPolicyManager->getAudioPort(&port)));
- *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_audio_port_v7_AudioPort(port));
+ *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_audio_port_v7_AudioPortFw(port));
return Status::ok();
}
-Status AudioPolicyService::createAudioPatch(const media::AudioPatch& patchAidl, int32_t handleAidl,
+Status AudioPolicyService::createAudioPatch(const media::AudioPatchFw& patchAidl,
+ int32_t handleAidl,
int32_t* _aidl_return) {
audio_patch patch = VALUE_OR_RETURN_BINDER_STATUS(
- aidl2legacy_AudioPatch_audio_patch(patchAidl));
+ aidl2legacy_AudioPatchFw_audio_patch(patchAidl));
audio_patch_handle_t handle = VALUE_OR_RETURN_BINDER_STATUS(
aidl2legacy_int32_t_audio_port_handle_t(handleAidl));
RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(AudioValidator::validateAudioPatch(patch)));
@@ -1606,7 +1607,7 @@
}
Status AudioPolicyService::listAudioPatches(Int* count,
- std::vector<media::AudioPatch>* patchesAidl,
+ std::vector<media::AudioPatchFw>* patchesAidl,
int32_t* _aidl_return) {
unsigned int num_patches = VALUE_OR_RETURN_BINDER_STATUS(
convertIntegral<unsigned int>(count->value));
@@ -1627,16 +1628,16 @@
numPatchesReq = std::min(numPatchesReq, num_patches);
RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
convertRange(patches.get(), patches.get() + numPatchesReq,
- std::back_inserter(*patchesAidl), legacy2aidl_audio_patch_AudioPatch)));
+ std::back_inserter(*patchesAidl), legacy2aidl_audio_patch_AudioPatchFw)));
count->value = VALUE_OR_RETURN_BINDER_STATUS(convertIntegral<int32_t>(num_patches));
*_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(convertIntegral<int32_t>(generation));
return Status::ok();
}
-Status AudioPolicyService::setAudioPortConfig(const media::AudioPortConfig& configAidl)
+Status AudioPolicyService::setAudioPortConfig(const media::AudioPortConfigFw& configAidl)
{
audio_port_config config = VALUE_OR_RETURN_BINDER_STATUS(
- aidl2legacy_AudioPortConfig_audio_port_config(configAidl));
+ aidl2legacy_AudioPortConfigFw_audio_port_config(configAidl));
RETURN_IF_BINDER_ERROR(
binderStatusFromStatusT(AudioValidator::validateAudioPortConfig(config)));
@@ -1806,11 +1807,11 @@
return binderStatusFromStatusT(mAudioPolicyManager->removeUserIdDeviceAffinities(userId));
}
-Status AudioPolicyService::startAudioSource(const media::AudioPortConfig& sourceAidl,
+Status AudioPolicyService::startAudioSource(const media::AudioPortConfigFw& sourceAidl,
const media::AudioAttributesInternal& attributesAidl,
int32_t* _aidl_return) {
audio_port_config source = VALUE_OR_RETURN_BINDER_STATUS(
- aidl2legacy_AudioPortConfig_audio_port_config(sourceAidl));
+ aidl2legacy_AudioPortConfigFw_audio_port_config(sourceAidl));
audio_attributes_t attributes = VALUE_OR_RETURN_BINDER_STATUS(
aidl2legacy_AudioAttributesInternal_audio_attributes_t(attributesAidl));
audio_port_handle_t portId;
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 5c37f99..860bd18 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -172,16 +172,16 @@
const media::AudioAttributesInternal& attributes,
bool* _aidl_return) override;
binder::Status listAudioPorts(media::AudioPortRole role, media::AudioPortType type,
- Int* count, std::vector<media::AudioPort>* ports,
+ Int* count, std::vector<media::AudioPortFw>* ports,
int32_t* _aidl_return) override;
binder::Status getAudioPort(int portId,
- media::AudioPort* _aidl_return) override;
- binder::Status createAudioPatch(const media::AudioPatch& patch, int32_t handle,
+ media::AudioPortFw* _aidl_return) override;
+ binder::Status createAudioPatch(const media::AudioPatchFw& patch, int32_t handle,
int32_t* _aidl_return) override;
binder::Status releaseAudioPatch(int32_t handle) override;
- binder::Status listAudioPatches(Int* count, std::vector<media::AudioPatch>* patches,
+ binder::Status listAudioPatches(Int* count, std::vector<media::AudioPatchFw>* patches,
int32_t* _aidl_return) override;
- binder::Status setAudioPortConfig(const media::AudioPortConfig& config) override;
+ binder::Status setAudioPortConfig(const media::AudioPortConfigFw& config) override;
binder::Status registerClient(const sp<media::IAudioPolicyServiceClient>& client) override;
binder::Status setAudioPortCallbacksEnabled(bool enabled) override;
binder::Status setAudioVolumeGroupCallbacksEnabled(bool enabled) override;
@@ -197,7 +197,7 @@
int32_t userId,
const std::vector<AudioDevice>& devices) override;
binder::Status removeUserIdDeviceAffinities(int32_t userId) override;
- binder::Status startAudioSource(const media::AudioPortConfig& source,
+ binder::Status startAudioSource(const media::AudioPortConfigFw& source,
const media::AudioAttributesInternal& attributes,
int32_t* _aidl_return) override;
binder::Status stopAudioSource(int32_t portId) override;
diff --git a/services/audiopolicy/tests/audio_health_tests.cpp b/services/audiopolicy/tests/audio_health_tests.cpp
index 10f8dc0..798332c 100644
--- a/services/audiopolicy/tests/audio_health_tests.cpp
+++ b/services/audiopolicy/tests/audio_health_tests.cpp
@@ -111,7 +111,7 @@
continue;
}
std::string address = "11:22:33:44:55:66";
- media::AudioPort aidlPort;
+ media::AudioPortFw aidlPort;
ASSERT_EQ(OK, manager.deviceToAudioPort(device->type(), address.c_str(), "" /*name*/,
&aidlPort));
ASSERT_EQ(AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index c341b32..1c40cfd 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -1790,7 +1790,7 @@
}
const std::string name = std::get<1>(GetParam());
const std::string address = std::get<2>(GetParam());
- android::media::AudioPort audioPort;
+ android::media::AudioPortFw audioPort;
ASSERT_EQ(NO_ERROR,
mManager->deviceToAudioPort(type, address.c_str(), name.c_str(), &audioPort));
android::media::audio::common::AudioPort& port = audioPort.hal;
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index 71965f2..7a93cc7 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -1599,7 +1599,7 @@
return OK;
}
-void HeicCompositeStream::initCopyRowFunction(int32_t width)
+void HeicCompositeStream::initCopyRowFunction([[maybe_unused]] int32_t width)
{
using namespace libyuv;
diff --git a/services/mediacodec/Android.bp b/services/mediacodec/Android.bp
index 3222950..a2f17c2 100644
--- a/services/mediacodec/Android.bp
+++ b/services/mediacodec/Android.bp
@@ -147,6 +147,9 @@
arm64: {
src: "seccomp_policy/mediacodec-arm64.policy",
},
+ riscv64: {
+ enabled: false,
+ },
x86: {
src: "seccomp_policy/mediacodec-x86.policy",
},
diff --git a/services/mediametrics/Android.bp b/services/mediametrics/Android.bp
index 03647bc..c90488f 100644
--- a/services/mediametrics/Android.bp
+++ b/services/mediametrics/Android.bp
@@ -170,7 +170,7 @@
"libmemunreachable",
"libprotobuf-cpp-lite",
"libstagefright_foundation",
- "libstatslog",
+ "libstats_media_metrics",
"libstatspull",
"libstatssocket",
"libutils",
@@ -178,6 +178,7 @@
],
export_shared_lib_headers: [
+ "libstats_media_metrics",
"libstatspull",
"libstatssocket",
],
@@ -201,3 +202,33 @@
"libaudioutils_headers",
],
}
+
+cc_library {
+ name: "libstats_media_metrics",
+ generated_sources: ["stats_media_metrics.cpp"],
+ generated_headers: ["stats_media_metrics.h"],
+ export_generated_headers: ["stats_media_metrics.h"],
+ shared_libs: [
+ "libcutils",
+ "libstatspull",
+ "libstatssocket",
+ ],
+}
+
+genrule {
+ name: "stats_media_metrics.h",
+ tools: ["stats-log-api-gen"],
+ cmd: "$(location stats-log-api-gen) --header $(genDir)/stats_media_metrics.h --module media_metrics --namespace android,stats,media_metrics",
+ out: [
+ "stats_media_metrics.h",
+ ],
+}
+
+genrule {
+ name: "stats_media_metrics.cpp",
+ tools: ["stats-log-api-gen"],
+ cmd: "$(location stats-log-api-gen) --cpp $(genDir)/stats_media_metrics.cpp --module media_metrics --namespace android,stats,media_metrics --importHeader stats_media_metrics.h",
+ out: [
+ "stats_media_metrics.cpp",
+ ],
+}
diff --git a/services/mediametrics/AudioAnalytics.cpp b/services/mediametrics/AudioAnalytics.cpp
index b03e418..119bb6c 100644
--- a/services/mediametrics/AudioAnalytics.cpp
+++ b/services/mediametrics/AudioAnalytics.cpp
@@ -24,7 +24,7 @@
#include <aaudio/AAudio.h> // error codes
#include <audio_utils/clock.h> // clock conversions
#include <cutils/properties.h>
-#include <statslog.h> // statsd
+#include <stats_media_metrics.h> // statsd
#include <system/audio.h>
#include "AudioTypes.h" // string to int conversions
@@ -292,7 +292,7 @@
int result = 0;
#ifdef STATSD_ENABLE
- result = android::util::stats_write(args...);
+ result = stats::media_metrics::stats_write(args...);
#endif
return result;
}
@@ -308,7 +308,7 @@
std::stringstream ss;
#ifdef STATSD_ENABLE
- result = android::util::stats_write(args...);
+ result = stats::media_metrics::stats_write(args...);
ss << "result:" << result;
#endif
ss << " { ";
@@ -607,7 +607,7 @@
const int atom_status = types::lookup<types::STATUS, int32_t>(statusString);
// currently we only send create status events.
- const int32_t event = android::util::
+ const int32_t event = stats::media_metrics::
MEDIAMETRICS_AUDIO_RECORD_STATUS_REPORTED__EVENT__AUDIO_RECORD_EVENT_CREATE;
// The following fields should all be present in a create event.
@@ -647,7 +647,7 @@
__func__, AMEDIAMETRICS_KEY_PREFIX_AUDIO_RECORD, AMEDIAMETRICS_PROP_SAMPLERATE);
const auto [ result, str ] = sendToStatsd(AudioRecordStatusFields,
- CONDITION(android::util::MEDIAMETRICS_AUDIORECORDSTATUS_REPORTED)
+ CONDITION(stats::media_metrics::MEDIAMETRICS_AUDIORECORDSTATUS_REPORTED)
, atom_status
, message.c_str()
, subCode
@@ -661,7 +661,7 @@
, sampleRate
);
ALOGV("%s: statsd %s", __func__, str.c_str());
- mStatsdLog->log(android::util::MEDIAMETRICS_AUDIORECORDSTATUS_REPORTED, str);
+ mStatsdLog->log(stats::media_metrics::MEDIAMETRICS_AUDIORECORDSTATUS_REPORTED, str);
return true;
}
return false;
@@ -679,7 +679,7 @@
const int atom_status = types::lookup<types::STATUS, int32_t>(statusString);
// currently we only send create status events.
- const int32_t event = android::util::
+ const int32_t event = stats::media_metrics::
MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__EVENT__AUDIO_TRACK_EVENT_CREATE;
// The following fields should all be present in a create event.
@@ -734,7 +734,7 @@
__func__,
AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK, AMEDIAMETRICS_PROP_PLAYBACK_PITCH);
const auto [ result, str ] = sendToStatsd(AudioTrackStatusFields,
- CONDITION(android::util::MEDIAMETRICS_AUDIOTRACKSTATUS_REPORTED)
+ CONDITION(stats::media_metrics::MEDIAMETRICS_AUDIOTRACKSTATUS_REPORTED)
, atom_status
, message.c_str()
, subCode
@@ -751,7 +751,7 @@
, (float)pitch
);
ALOGV("%s: statsd %s", __func__, str.c_str());
- mStatsdLog->log(android::util::MEDIAMETRICS_AUDIOTRACKSTATUS_REPORTED, str);
+ mStatsdLog->log(stats::media_metrics::MEDIAMETRICS_AUDIOTRACKSTATUS_REPORTED, str);
return true;
}
return false;
@@ -860,7 +860,7 @@
if (clientCalled // only log if client app called AudioRecord.
&& mAudioAnalytics.mDeliverStatistics) {
const auto [ result, str ] = sendToStatsd(AudioRecordDeviceUsageFields,
- CONDITION(android::util::MEDIAMETRICS_AUDIORECORDDEVICEUSAGE_REPORTED)
+ CONDITION(stats::media_metrics::MEDIAMETRICS_AUDIORECORDDEVICEUSAGE_REPORTED)
, ENUM_EXTRACT(inputDeviceStatsd)
, inputDeviceNames.c_str()
, deviceTimeNs
@@ -878,7 +878,7 @@
);
ALOGV("%s: statsd %s", __func__, str.c_str());
mAudioAnalytics.mStatsdLog->log(
- android::util::MEDIAMETRICS_AUDIORECORDDEVICEUSAGE_REPORTED, str);
+ stats::media_metrics::MEDIAMETRICS_AUDIORECORDDEVICEUSAGE_REPORTED, str);
}
} break;
case THREAD: {
@@ -930,7 +930,7 @@
<< ")";
if (mAudioAnalytics.mDeliverStatistics) {
const auto [ result, str ] = sendToStatsd(AudioThreadDeviceUsageFields,
- CONDITION(android::util::MEDIAMETRICS_AUDIOTHREADDEVICEUSAGE_REPORTED)
+ CONDITION(stats::media_metrics::MEDIAMETRICS_AUDIOTHREADDEVICEUSAGE_REPORTED)
, ENUM_EXTRACT(deviceStatsd)
, deviceNames.c_str()
, deviceTimeNs
@@ -944,7 +944,7 @@
);
ALOGV("%s: statsd %s", __func__, str.c_str());
mAudioAnalytics.mStatsdLog->log(
- android::util::MEDIAMETRICS_AUDIOTHREADDEVICEUSAGE_REPORTED, str);
+ stats::media_metrics::MEDIAMETRICS_AUDIOTHREADDEVICEUSAGE_REPORTED, str);
}
} break;
case TRACK: {
@@ -1050,7 +1050,7 @@
if (clientCalled // only log if client app called AudioTracks
&& mAudioAnalytics.mDeliverStatistics) {
const auto [ result, str ] = sendToStatsd(AudioTrackDeviceUsageFields,
- CONDITION(android::util::MEDIAMETRICS_AUDIOTRACKDEVICEUSAGE_REPORTED)
+ CONDITION(stats::media_metrics::MEDIAMETRICS_AUDIOTRACKDEVICEUSAGE_REPORTED)
, ENUM_EXTRACT(outputDeviceStatsd)
, outputDeviceNames.c_str()
, deviceTimeNs
@@ -1074,7 +1074,7 @@
);
ALOGV("%s: statsd %s", __func__, str.c_str());
mAudioAnalytics.mStatsdLog->log(
- android::util::MEDIAMETRICS_AUDIOTRACKDEVICEUSAGE_REPORTED, str);
+ stats::media_metrics::MEDIAMETRICS_AUDIOTRACKDEVICEUSAGE_REPORTED, str);
}
} break;
}
@@ -1136,7 +1136,7 @@
const long_enum_type_t inputDeviceBits{};
const auto [ result, str ] = sendToStatsd(AudioDeviceConnectionFields,
- CONDITION(android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED)
+ CONDITION(stats::media_metrics::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED)
, ENUM_EXTRACT(inputDeviceBits)
, ENUM_EXTRACT(outputDeviceBits)
, mA2dpDeviceName.c_str()
@@ -1146,7 +1146,7 @@
);
ALOGV("%s: statsd %s", __func__, str.c_str());
mAudioAnalytics.mStatsdLog->log(
- android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED, str);
+ stats::media_metrics::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED, str);
}
}
}
@@ -1190,7 +1190,7 @@
<< " deviceName:" << mA2dpDeviceName;
if (mAudioAnalytics.mDeliverStatistics) {
const auto [ result, str ] = sendToStatsd(AudioDeviceConnectionFields,
- CONDITION(android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED)
+ CONDITION(stats::media_metrics::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED)
, ENUM_EXTRACT(inputDeviceBits)
, ENUM_EXTRACT(outputDeviceBits)
, mA2dpDeviceName.c_str()
@@ -1200,7 +1200,7 @@
);
ALOGV("%s: statsd %s", __func__, str.c_str());
mAudioAnalytics.mStatsdLog->log(
- android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED, str);
+ stats::media_metrics::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED, str);
}
return;
}
@@ -1217,7 +1217,7 @@
<< " deviceName:" << mA2dpDeviceName;
if (mAudioAnalytics.mDeliverStatistics) {
const auto [ result, str ] = sendToStatsd(AudioDeviceConnectionFields,
- CONDITION(android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED)
+ CONDITION(stats::media_metrics::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED)
, ENUM_EXTRACT(inputDeviceBits)
, ENUM_EXTRACT(outputDeviceBits)
, mA2dpDeviceName.c_str()
@@ -1227,7 +1227,7 @@
);
ALOGV("%s: statsd %s", __func__, str.c_str());
mAudioAnalytics.mStatsdLog->log(
- android::util::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED, str);
+ stats::media_metrics::MEDIAMETRICS_AUDIODEVICECONNECTION_REPORTED, str);
}
}
@@ -1355,10 +1355,10 @@
<< "(" << sharingModeRequestedStr << ")";
if (mAudioAnalytics.mDeliverStatistics) {
- android::util::BytesField bf_serialized(
+ const stats::media_metrics::BytesField bf_serialized(
serializedDeviceTypes.c_str(), serializedDeviceTypes.size());
const auto result = sendToStatsd(
- CONDITION(android::util::MEDIAMETRICS_AAUDIOSTREAM_REPORTED)
+ CONDITION(stats::media_metrics::MEDIAMETRICS_AAUDIOSTREAM_REPORTED)
, path
, direction
, framesPerBurst
@@ -1381,7 +1381,7 @@
std::stringstream ss;
ss << "result:" << result;
const auto fieldsStr = printFields(AAudioStreamFields,
- CONDITION(android::util::MEDIAMETRICS_AAUDIOSTREAM_REPORTED)
+ CONDITION(stats::media_metrics::MEDIAMETRICS_AAUDIOSTREAM_REPORTED)
, path
, direction
, framesPerBurst
@@ -1404,7 +1404,7 @@
ss << " " << fieldsStr;
std::string str = ss.str();
ALOGV("%s: statsd %s", __func__, str.c_str());
- mAudioAnalytics.mStatsdLog->log(android::util::MEDIAMETRICS_AAUDIOSTREAM_REPORTED, str);
+ mAudioAnalytics.mStatsdLog->log(stats::media_metrics::MEDIAMETRICS_AAUDIOSTREAM_REPORTED, str);
}
}
@@ -1544,12 +1544,12 @@
// Classifies the setting event for statsd (use generated statsd enums.proto constants).
static int32_t classifySettingEvent(bool isSetAlready, bool withinBoot) {
if (isSetAlready) {
- return util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__EVENT__SPATIALIZER_SETTING_EVENT_NORMAL;
+ return stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__EVENT__SPATIALIZER_SETTING_EVENT_NORMAL;
}
if (withinBoot) {
- return util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__EVENT__SPATIALIZER_SETTING_EVENT_BOOT;
+ return stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__EVENT__SPATIALIZER_SETTING_EVENT_BOOT;
}
- return util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__EVENT__SPATIALIZER_SETTING_EVENT_FIRST;
+ return stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__EVENT__SPATIALIZER_SETTING_EVENT_FIRST;
}
void AudioAnalytics::Spatializer::onEvent(
@@ -1598,7 +1598,7 @@
types::channelMaskVectorFromString(channelMasks);
const auto [ result, str ] = sendToStatsd(SpatializerCapabilitiesFields,
- CONDITION(android::util::MEDIAMETRICS_SPATIALIZERCAPABILITIES_REPORTED)
+ CONDITION(stats::media_metrics::MEDIAMETRICS_SPATIALIZERCAPABILITIES_REPORTED)
, headTrackingModesVector
, levelsVector
, modesVector
@@ -1606,7 +1606,7 @@
);
mAudioAnalytics.mStatsdLog->log(
- android::util::MEDIAMETRICS_SPATIALIZERCAPABILITIES_REPORTED, str);
+ stats::media_metrics::MEDIAMETRICS_SPATIALIZERCAPABILITIES_REPORTED, str);
std::lock_guard lg(mLock);
if (mFirstCreateTimeNs == 0) {
@@ -1655,13 +1655,13 @@
deviceState.enabled = enabled;
const bool enabledStatsd = enabled == "true";
const auto [ result, str ] = sendToStatsd(SpatializerDeviceEnabledFields,
- CONDITION(android::util::MEDIAMETRICS_SPATIALIZERDEVICEENABLED_REPORTED)
+ CONDITION(stats::media_metrics::MEDIAMETRICS_SPATIALIZERDEVICEENABLED_REPORTED)
, deviceTypeStatsd
, settingEventStatsd
, enabledStatsd
);
mAudioAnalytics.mStatsdLog->log(
- android::util::MEDIAMETRICS_SPATIALIZERDEVICEENABLED_REPORTED, str);
+ stats::media_metrics::MEDIAMETRICS_SPATIALIZERDEVICEENABLED_REPORTED, str);
}
}
if (!hasHeadTracker.empty()) {
@@ -1671,13 +1671,13 @@
deviceState.hasHeadTracker = hasHeadTracker;
const bool supportedStatsd = hasHeadTracker == "true";
const auto [ result, str ] = sendToStatsd(HeadTrackerDeviceSupportedFields,
- CONDITION(android::util::MEDIAMETRICS_HEADTRACKERDEVICESUPPORTED_REPORTED)
+ CONDITION(stats::media_metrics::MEDIAMETRICS_HEADTRACKERDEVICESUPPORTED_REPORTED)
, deviceTypeStatsd
, settingEventStatsd
, supportedStatsd
);
mAudioAnalytics.mStatsdLog->log(
- android::util::MEDIAMETRICS_HEADTRACKERDEVICESUPPORTED_REPORTED, str);
+ stats::media_metrics::MEDIAMETRICS_HEADTRACKERDEVICESUPPORTED_REPORTED, str);
}
}
if (!headTrackerEnabled.empty()) {
@@ -1687,13 +1687,13 @@
deviceState.headTrackerEnabled = headTrackerEnabled;
const bool enabledStatsd = headTrackerEnabled == "true";
const auto [ result, str ] = sendToStatsd(HeadTrackerDeviceEnabledFields,
- CONDITION(android::util::MEDIAMETRICS_HEADTRACKERDEVICEENABLED_REPORTED)
+ CONDITION(stats::media_metrics::MEDIAMETRICS_HEADTRACKERDEVICEENABLED_REPORTED)
, deviceTypeStatsd
, settingEventStatsd
, enabledStatsd
);
mAudioAnalytics.mStatsdLog->log(
- android::util::MEDIAMETRICS_HEADTRACKERDEVICEENABLED_REPORTED, str);
+ stats::media_metrics::MEDIAMETRICS_HEADTRACKERDEVICEENABLED_REPORTED, str);
}
}
mSimpleLog.log("%s deviceKey: %s item: %s",
diff --git a/services/mediametrics/AudioPowerUsage.cpp b/services/mediametrics/AudioPowerUsage.cpp
index 5787e9e..630a436 100644
--- a/services/mediametrics/AudioPowerUsage.cpp
+++ b/services/mediametrics/AudioPowerUsage.cpp
@@ -26,7 +26,7 @@
#include <string>
#include <audio_utils/clock.h>
#include <cutils/properties.h>
-#include <statslog.h>
+#include <stats_media_metrics.h>
#include <sys/timerfd.h>
#include <system/audio.h>
@@ -164,7 +164,7 @@
const int32_t duration_secs = (int32_t)(duration_ns / NANOS_PER_SECOND);
const int32_t min_volume_duration_secs = (int32_t)(min_volume_duration_ns / NANOS_PER_SECOND);
const int32_t max_volume_duration_secs = (int32_t)(max_volume_duration_ns / NANOS_PER_SECOND);
- const int result = android::util::stats_write(android::util::AUDIO_POWER_USAGE_DATA_REPORTED,
+ const int result = stats::media_metrics::stats_write(stats::media_metrics::AUDIO_POWER_USAGE_DATA_REPORTED,
audio_device,
duration_secs,
(float)volume,
@@ -177,7 +177,7 @@
std::stringstream log;
log << "result:" << result << " {"
<< " mediametrics_audio_power_usage_data_reported:"
- << android::util::AUDIO_POWER_USAGE_DATA_REPORTED
+ << stats::media_metrics::AUDIO_POWER_USAGE_DATA_REPORTED
<< " audio_device:" << audio_device
<< " duration_secs:" << duration_secs
<< " average_volume:" << (float)volume
@@ -187,7 +187,7 @@
<< " max_volume_duration_secs:" << max_volume_duration_secs
<< " max_volume:" << (float)max_volume
<< " }";
- mStatsdLog->log(android::util::AUDIO_POWER_USAGE_DATA_REPORTED, log.str());
+ mStatsdLog->log(stats::media_metrics::AUDIO_POWER_USAGE_DATA_REPORTED, log.str());
}
void AudioPowerUsage::updateMinMaxVolumeAndDuration(
diff --git a/services/mediametrics/AudioTypes.cpp b/services/mediametrics/AudioTypes.cpp
index d2b4aab..353ae12 100644
--- a/services/mediametrics/AudioTypes.cpp
+++ b/services/mediametrics/AudioTypes.cpp
@@ -18,7 +18,7 @@
#include "MediaMetricsConstants.h"
#include "StringUtils.h"
#include <media/TypeConverter.h> // requires libmedia_helper to get the Audio code.
-#include <statslog.h> // statsd
+#include <stats_media_metrics.h> // statsd
namespace android::mediametrics::types {
@@ -184,41 +184,41 @@
const std::unordered_map<std::string, int32_t>& getAudioDeviceInfoTypeMap() {
// DO NOT MODIFY VALUES (OK to add new ones).
static std::unordered_map<std::string, int32_t> map{
- {"unknown", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_UNKNOWN},
- {"earpiece", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BUILTIN_EARPIECE},
- {"speaker", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BUILTIN_SPEAKER},
- {"headset", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_WIRED_HEADSET},
- {"headphone", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_WIRED_HEADPHONES}, // sic
- {"bt_sco", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_SCO},
- {"bt_sco_hs", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_SCO},
- {"bt_sco_carkit", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_SCO},
- {"bt_a2dp", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_A2DP},
- {"bt_a2dp_hp", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_A2DP},
- {"bt_a2dp_spk", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_A2DP},
- {"aux_digital", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_HDMI},
- {"hdmi", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_HDMI},
- {"analog_dock", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_DOCK},
- {"digital_dock", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_DOCK},
- {"usb_accessory", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_USB_ACCESSORY},
- {"usb_device", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_USB_DEVICE},
- {"usb_headset", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_USB_HEADSET},
- {"remote_submix", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_REMOTE_SUBMIX},
- {"telephony_tx", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_TELEPHONY},
- {"line", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_LINE_ANALOG},
- {"hdmi_arc", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_HDMI_ARC},
- {"hdmi_earc", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_HDMI_EARC},
- {"spdif", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_LINE_DIGITAL},
- {"fm_transmitter", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_FM},
- {"aux_line", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_AUX_LINE},
- {"speaker_safe", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BUILTIN_SPEAKER_SAFE},
- {"ip", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_IP},
- {"bus", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BUS},
- {"proxy", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_UNKNOWN /* AUDIO_DEVICE_INFO_TYPE_PROXY */},
- {"hearing_aid_out", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_HEARING_AID},
- {"echo_canceller", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_ECHO_REFERENCE}, // sic
- {"ble_headset", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLE_HEADSET},
- {"ble_speaker", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLE_SPEAKER},
- {"ble_broadcast", util::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLE_BROADCAST},
+ {"unknown", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_UNKNOWN},
+ {"earpiece", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BUILTIN_EARPIECE},
+ {"speaker", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BUILTIN_SPEAKER},
+ {"headset", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_WIRED_HEADSET},
+ {"headphone", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_WIRED_HEADPHONES}, // sic
+ {"bt_sco", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_SCO},
+ {"bt_sco_hs", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_SCO},
+ {"bt_sco_carkit", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_SCO},
+ {"bt_a2dp", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_A2DP},
+ {"bt_a2dp_hp", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_A2DP},
+ {"bt_a2dp_spk", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLUETOOTH_A2DP},
+ {"aux_digital", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_HDMI},
+ {"hdmi", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_HDMI},
+ {"analog_dock", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_DOCK},
+ {"digital_dock", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_DOCK},
+ {"usb_accessory", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_USB_ACCESSORY},
+ {"usb_device", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_USB_DEVICE},
+ {"usb_headset", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_USB_HEADSET},
+ {"remote_submix", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_REMOTE_SUBMIX},
+ {"telephony_tx", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_TELEPHONY},
+ {"line", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_LINE_ANALOG},
+ {"hdmi_arc", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_HDMI_ARC},
+ {"hdmi_earc", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_HDMI_EARC},
+ {"spdif", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_LINE_DIGITAL},
+ {"fm_transmitter", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_FM},
+ {"aux_line", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_AUX_LINE},
+ {"speaker_safe", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BUILTIN_SPEAKER_SAFE},
+ {"ip", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_IP},
+ {"bus", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BUS},
+ {"proxy", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_UNKNOWN /* AUDIO_DEVICE_INFO_TYPE_PROXY */},
+ {"hearing_aid_out", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_HEARING_AID},
+ {"echo_canceller", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_ECHO_REFERENCE}, // sic
+ {"ble_headset", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLE_HEADSET},
+ {"ble_speaker", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLE_SPEAKER},
+ {"ble_broadcast", stats::media_metrics::MEDIAMETRICS_SPATIALIZER_DEVICE_ENABLED_REPORTED__TYPE__AUDIO_DEVICE_INFO_TYPE_BLE_BROADCAST},
};
return map;
}
@@ -324,23 +324,23 @@
// DO NOT MODIFY VALUES(OK to add new ones).
static std::unordered_map<std::string, int32_t> map {
{"",
- util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__NO_ERROR},
+ stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__NO_ERROR},
{AMEDIAMETRICS_PROP_STATUS_VALUE_OK,
- util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__NO_ERROR},
+ stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__NO_ERROR},
{AMEDIAMETRICS_PROP_STATUS_VALUE_ARGUMENT,
- util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_ARGUMENT},
+ stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_ARGUMENT},
{AMEDIAMETRICS_PROP_STATUS_VALUE_IO,
- util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_IO},
+ stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_IO},
{AMEDIAMETRICS_PROP_STATUS_VALUE_MEMORY,
- util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_MEMORY},
+ stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_MEMORY},
{AMEDIAMETRICS_PROP_STATUS_VALUE_SECURITY,
- util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_SECURITY},
+ stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_SECURITY},
{AMEDIAMETRICS_PROP_STATUS_VALUE_STATE,
- util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_STATE},
+ stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_STATE},
{AMEDIAMETRICS_PROP_STATUS_VALUE_TIMEOUT,
- util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_TIMEOUT},
+ stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_TIMEOUT},
{AMEDIAMETRICS_PROP_STATUS_VALUE_UNKNOWN,
- util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_UNKNOWN},
+ stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_UNKNOWN},
};
return map;
}
@@ -664,7 +664,7 @@
auto& map = getStatusMap();
auto it = map.find(status);
if (it == map.end()) {
- return util::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_UNKNOWN;
+ return stats::media_metrics::MEDIAMETRICS_AUDIO_TRACK_STATUS_REPORTED__STATUS__ERROR_UNKNOWN;
}
return it->second;
}
diff --git a/services/mediametrics/MediaMetricsService.cpp b/services/mediametrics/MediaMetricsService.cpp
index ceb3e6a..adb2217 100644
--- a/services/mediametrics/MediaMetricsService.cpp
+++ b/services/mediametrics/MediaMetricsService.cpp
@@ -33,7 +33,7 @@
#include <mediautils/MemoryLeakTrackUtil.h>
#include <memunreachable/memunreachable.h>
#include <private/android_filesystem_config.h> // UID
-#include <statslog.h>
+#include <stats_media_metrics.h>
#include <set>
@@ -511,6 +511,8 @@
const std::string &key = item->getKey();
if (startsWith(key, "audio.")) return true;
if (startsWith(key, "drm.vendor.")) return true;
+ if (startsWith(key, "mediadrm.")) return true;
+
// the list of allowedKey uses statsd_handlers
// in iface_statsd.cpp as reference
// drmmanager is from a trusted uid, therefore not needed here
@@ -546,7 +548,7 @@
if (mStatsdRegistered.test_and_set()) {
return;
}
- auto tag = android::util::MEDIA_DRM_ACTIVITY_INFO;
+ auto tag = stats::media_metrics::MEDIA_DRM_ACTIVITY_INFO;
auto cb = MediaMetricsService::pullAtomCallback;
AStatsManager_setPullAtomCallback(tag, /* metadata */ nullptr, cb, this);
}
@@ -564,7 +566,7 @@
std::string MediaMetricsService::atomTagToKey(int32_t atomTag)
{
switch (atomTag) {
- case android::util::MEDIA_DRM_ACTIVITY_INFO:
+ case stats::media_metrics::MEDIA_DRM_ACTIVITY_INFO:
return "mediadrm";
}
return {};
diff --git a/services/mediametrics/fuzzer/Android.bp b/services/mediametrics/fuzzer/Android.bp
index 84d494e..8b33f10 100644
--- a/services/mediametrics/fuzzer/Android.bp
+++ b/services/mediametrics/fuzzer/Android.bp
@@ -51,7 +51,7 @@
"libprotobuf-cpp-lite",
"libstagefright",
"libstagefright_foundation",
- "libstatslog",
+ "libstats_media_metrics",
"libstatspull",
"libstatssocket",
"libutils",
diff --git a/services/mediametrics/iface_statsd.cpp b/services/mediametrics/iface_statsd.cpp
index b98d908..7f4e6e8 100644
--- a/services/mediametrics/iface_statsd.cpp
+++ b/services/mediametrics/iface_statsd.cpp
@@ -37,8 +37,6 @@
#include "MediaMetricsService.h"
#include "iface_statsd.h"
-#include <statslog.h>
-
namespace android {
// set of routines that crack a mediametrics::Item
diff --git a/services/mediametrics/statsd_audiopolicy.cpp b/services/mediametrics/statsd_audiopolicy.cpp
index 3d9376e..9a9bc1d 100644
--- a/services/mediametrics/statsd_audiopolicy.cpp
+++ b/services/mediametrics/statsd_audiopolicy.cpp
@@ -29,7 +29,7 @@
#include <sys/types.h>
#include <unistd.h>
-#include <statslog.h>
+#include <stats_media_metrics.h>
#include "MediaMetricsService.h"
#include "frameworks/proto_logging/stats/message/mediametrics_message.pb.h"
@@ -107,15 +107,16 @@
return false;
}
- android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
- int result = android::util::stats_write(android::util::MEDIAMETRICS_AUDIOPOLICY_REPORTED,
+ const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
+ const int result = stats::media_metrics::stats_write(
+ stats::media_metrics::MEDIAMETRICS_AUDIOPOLICY_REPORTED,
timestamp_nanos, package_name.c_str(), package_version_code,
media_apex_version,
bf_serialized);
std::stringstream log;
log << "result:" << result << " {"
<< " mediametrics_audiopolicy_reported:"
- << android::util::MEDIAMETRICS_AUDIOPOLICY_REPORTED
+ << stats::media_metrics::MEDIAMETRICS_AUDIOPOLICY_REPORTED
<< " timestamp_nanos:" << timestamp_nanos
<< " package_name:" << package_name
<< " package_version_code:" << package_version_code
@@ -131,7 +132,7 @@
<< " active_session:" << active_session
<< " active_device:" << active_device
<< " }";
- statsdLog->log(android::util::MEDIAMETRICS_AUDIOPOLICY_REPORTED, log.str());
+ statsdLog->log(stats::media_metrics::MEDIAMETRICS_AUDIOPOLICY_REPORTED, log.str());
return true;
}
diff --git a/services/mediametrics/statsd_audiorecord.cpp b/services/mediametrics/statsd_audiorecord.cpp
index 01adf7f..63c61ec 100644
--- a/services/mediametrics/statsd_audiorecord.cpp
+++ b/services/mediametrics/statsd_audiorecord.cpp
@@ -29,7 +29,7 @@
#include <sys/types.h>
#include <unistd.h>
-#include <statslog.h>
+#include <stats_media_metrics.h>
#include "MediaMetricsService.h"
#include "ValidateId.h"
@@ -147,8 +147,9 @@
(void)item->getString("android.media.audiorecord.logSessionId", &logSessionId);
const auto log_session_id = mediametrics::ValidateId::get()->validateId(logSessionId);
- android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
- int result = android::util::stats_write(android::util::MEDIAMETRICS_AUDIORECORD_REPORTED,
+ const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
+ const int result = stats::media_metrics::stats_write(
+ stats::media_metrics::MEDIAMETRICS_AUDIORECORD_REPORTED,
timestamp_nanos, package_name.c_str(), package_version_code,
media_apex_version,
bf_serialized,
@@ -156,7 +157,7 @@
std::stringstream log;
log << "result:" << result << " {"
<< " mediametrics_audiorecord_reported:"
- << android::util::MEDIAMETRICS_AUDIORECORD_REPORTED
+ << stats::media_metrics::MEDIAMETRICS_AUDIORECORD_REPORTED
<< " timestamp_nanos:" << timestamp_nanos
<< " package_name:" << package_name
<< " package_version_code:" << package_version_code
@@ -181,7 +182,7 @@
<< " log_session_id:" << log_session_id
<< " }";
- statsdLog->log(android::util::MEDIAMETRICS_AUDIORECORD_REPORTED, log.str());
+ statsdLog->log(stats::media_metrics::MEDIAMETRICS_AUDIORECORD_REPORTED, log.str());
return true;
}
diff --git a/services/mediametrics/statsd_audiothread.cpp b/services/mediametrics/statsd_audiothread.cpp
index e9b6dd6..3056605 100644
--- a/services/mediametrics/statsd_audiothread.cpp
+++ b/services/mediametrics/statsd_audiothread.cpp
@@ -29,7 +29,7 @@
#include <sys/types.h>
#include <unistd.h>
-#include <statslog.h>
+#include <stats_media_metrics.h>
#include "MediaMetricsService.h"
#include "frameworks/proto_logging/stats/message/mediametrics_message.pb.h"
@@ -188,15 +188,16 @@
return false;
}
- android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
- int result = android::util::stats_write(android::util::MEDIAMETRICS_AUDIOTHREAD_REPORTED,
+ const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
+ const int result = stats::media_metrics::stats_write(
+ stats::media_metrics::MEDIAMETRICS_AUDIOTHREAD_REPORTED,
timestamp_nanos, package_name.c_str(), package_version_code,
media_apex_version,
bf_serialized);
std::stringstream log;
log << "result:" << result << " {"
<< " mediametrics_audiothread_reported:"
- << android::util::MEDIAMETRICS_AUDIOTHREAD_REPORTED
+ << stats::media_metrics::MEDIAMETRICS_AUDIOTHREAD_REPORTED
<< " timestamp_nanos:" << timestamp_nanos
<< " package_name:" << package_name
<< " package_version_code:" << package_version_code
@@ -231,7 +232,7 @@
<< " latency_mean_millis:" << latency_mean_millis
<< " latency_stddev_millis:" << latency_stddev_millis
<< " }";
- statsdLog->log(android::util::MEDIAMETRICS_AUDIOTHREAD_REPORTED, log.str());
+ statsdLog->log(stats::media_metrics::MEDIAMETRICS_AUDIOTHREAD_REPORTED, log.str());
return true;
}
diff --git a/services/mediametrics/statsd_audiotrack.cpp b/services/mediametrics/statsd_audiotrack.cpp
index 67514e9..1fc7fb4 100644
--- a/services/mediametrics/statsd_audiotrack.cpp
+++ b/services/mediametrics/statsd_audiotrack.cpp
@@ -29,7 +29,7 @@
#include <sys/types.h>
#include <unistd.h>
-#include <statslog.h>
+#include <stats_media_metrics.h>
#include "MediaMetricsService.h"
#include "ValidateId.h"
@@ -134,8 +134,9 @@
(void)item->getString("android.media.audiotrack.logSessionId", &logSessionId);
const auto log_session_id = mediametrics::ValidateId::get()->validateId(logSessionId);
- android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
- int result = android::util::stats_write(android::util::MEDIAMETRICS_AUDIOTRACK_REPORTED,
+ const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
+ const int result = stats::media_metrics::stats_write(
+ stats::media_metrics::MEDIAMETRICS_AUDIOTRACK_REPORTED,
timestamp_nanos, package_name.c_str(), package_version_code,
media_apex_version,
bf_serialized,
@@ -143,7 +144,7 @@
std::stringstream log;
log << "result:" << result << " {"
<< " mediametrics_audiotrack_reported:"
- << android::util::MEDIAMETRICS_AUDIOTRACK_REPORTED
+ << stats::media_metrics::MEDIAMETRICS_AUDIOTRACK_REPORTED
<< " timestamp_nanos:" << timestamp_nanos
<< " package_name:" << package_name
<< " package_version_code:" << package_version_code
@@ -164,7 +165,7 @@
<< " log_session_id:" << log_session_id
<< " }";
- statsdLog->log(android::util::MEDIAMETRICS_AUDIOTRACK_REPORTED, log.str());
+ statsdLog->log(stats::media_metrics::MEDIAMETRICS_AUDIOTRACK_REPORTED, log.str());
return true;
}
diff --git a/services/mediametrics/statsd_codec.cpp b/services/mediametrics/statsd_codec.cpp
index a737ba0..c5957e9 100644
--- a/services/mediametrics/statsd_codec.cpp
+++ b/services/mediametrics/statsd_codec.cpp
@@ -29,7 +29,7 @@
#include <sys/types.h>
#include <unistd.h>
-#include <statslog.h>
+#include <stats_media_metrics.h>
#include <stats_event.h>
#include "cleaner.h"
@@ -46,7 +46,7 @@
if (item == nullptr) return false;
AStatsEvent* event = AStatsEvent_obtain();
- AStatsEvent_setAtomId(event, android::util::MEDIA_CODEC_REPORTED);
+ AStatsEvent_setAtomId(event, stats::media_metrics::MEDIA_CODEC_REPORTED);
const nsecs_t timestamp_nanos = MediaMetricsService::roundTime(item->getTimestamp());
AStatsEvent_writeInt64(event, timestamp_nanos);
@@ -455,8 +455,8 @@
ALOGE("Failed to serialize codec metrics");
return false;
}
- android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
- int result = android::util::stats_write(android::util::MEDIAMETRICS_CODEC_REPORTED,
+ const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
+ const int result = stats::media_metrics::stats_write(stats::media_metrics::MEDIAMETRICS_CODEC_REPORTED,
timestamp_nanos, package_name.c_str(), package_version_code,
media_apex_version,
bf_serialized);
@@ -464,7 +464,7 @@
std::stringstream log;
log << "result:" << result << " {"
<< " mediametrics_codec_reported:"
- << android::util::MEDIAMETRICS_CODEC_REPORTED
+ << stats::media_metrics::MEDIAMETRICS_CODEC_REPORTED
<< " timestamp_nanos:" << timestamp_nanos
<< " package_name:" << package_name
<< " package_version_code:" << package_version_code
@@ -525,7 +525,7 @@
<< " original_qp_b_min:" << qpBMinOri
<< " original_qp_b_max:" << qpBMaxOri
<< " }";
- statsdLog->log(android::util::MEDIAMETRICS_CODEC_REPORTED, log.str());
+ statsdLog->log(stats::media_metrics::MEDIAMETRICS_CODEC_REPORTED, log.str());
return true;
diff --git a/services/mediametrics/statsd_drm.cpp b/services/mediametrics/statsd_drm.cpp
index b7961a1..863fdbe 100644
--- a/services/mediametrics/statsd_drm.cpp
+++ b/services/mediametrics/statsd_drm.cpp
@@ -37,7 +37,7 @@
#include "StringUtils.h"
#include "iface_statsd.h"
-#include <statslog.h>
+#include <stats_media_metrics.h>
#include <array>
#include <string>
@@ -71,8 +71,9 @@
// This field is left here for backward compatibility.
// This field is not used anymore.
const std::string kUnusedField("");
- android::util::BytesField bf_serialized(kUnusedField.c_str(), kUnusedField.size());
- int result = android::util::stats_write(android::util::MEDIAMETRICS_MEDIADRM_REPORTED,
+ const stats::media_metrics::BytesField bf_serialized(kUnusedField.c_str(), kUnusedField.size());
+ const int result = stats::media_metrics::stats_write(
+ stats::media_metrics::MEDIAMETRICS_MEDIADRM_REPORTED,
timestamp_nanos, package_name.c_str(), package_version_code,
media_apex_version,
vendor.c_str(),
@@ -82,7 +83,7 @@
std::stringstream log;
log << "result:" << result << " {"
<< " mediametrics_mediadrm_reported:"
- << android::util::MEDIAMETRICS_MEDIADRM_REPORTED
+ << stats::media_metrics::MEDIAMETRICS_MEDIADRM_REPORTED
<< " timestamp_nanos:" << timestamp_nanos
<< " package_name:" << package_name
<< " package_version_code:" << package_version_code
@@ -92,7 +93,7 @@
<< " description:" << description
// omitting serialized
<< " }";
- statsdLog->log(android::util::MEDIAMETRICS_MEDIADRM_REPORTED, log.str());
+ statsdLog->log(stats::media_metrics::MEDIAMETRICS_MEDIADRM_REPORTED, log.str());
return true;
}
@@ -124,7 +125,8 @@
item->getInt64(("method"s + std::to_string(i)).c_str(), &methodCounts[i]);
}
- const int result = android::util::stats_write(android::util::MEDIAMETRICS_DRMMANAGER_REPORTED,
+ const int result = stats::media_metrics::stats_write(
+ stats::media_metrics::MEDIAMETRICS_DRMMANAGER_REPORTED,
timestamp_nanos, package_name.c_str(), package_version_code,
media_apex_version,
plugin_id.c_str(), description.c_str(),
@@ -138,7 +140,7 @@
std::stringstream log;
log << "result:" << result << " {"
<< " mediametrics_drmmanager_reported:"
- << android::util::MEDIAMETRICS_DRMMANAGER_REPORTED
+ << stats::media_metrics::MEDIAMETRICS_DRMMANAGER_REPORTED
<< " timestamp_nanos:" << timestamp_nanos
<< " package_name:" << package_name
<< " package_version_code:" << package_version_code
@@ -153,7 +155,7 @@
log << " method_" << i << ":" << methodCounts[i];
}
log << " }";
- statsdLog->log(android::util::MEDIAMETRICS_DRMMANAGER_REPORTED, log.str());
+ statsdLog->log(stats::media_metrics::MEDIAMETRICS_DRMMANAGER_REPORTED, log.str());
return true;
}
@@ -209,7 +211,7 @@
// Memory for |event| is internally managed by statsd.
AStatsEvent* event = AStatsEventList_addStatsEvent(out);
- AStatsEvent_setAtomId(event, android::util::MEDIA_DRM_ACTIVITY_INFO);
+ AStatsEvent_setAtomId(event, stats::media_metrics::MEDIA_DRM_ACTIVITY_INFO);
AStatsEvent_writeString(event, item->getPkgName().c_str());
AStatsEvent_writeInt64(event, item->getPkgVersionCode());
AStatsEvent_writeString(event, vendor.c_str());
@@ -221,7 +223,7 @@
std::stringstream log;
log << "pulled:" << " {"
<< " media_drm_activity_info:"
- << android::util::MEDIA_DRM_ACTIVITY_INFO
+ << stats::media_metrics::MEDIA_DRM_ACTIVITY_INFO
<< " package_name:" << item->getPkgName()
<< " package_version_code:" << item->getPkgVersionCode()
<< " vendor:" << vendor
@@ -229,7 +231,7 @@
<< " framework_metrics:" << mediametrics::stringutils::bytesToString(framework_raw, 8)
<< " vendor_metrics:" << mediametrics::stringutils::bytesToString(plugin_raw, 8)
<< " }";
- statsdLog->log(android::util::MEDIA_DRM_ACTIVITY_INFO, log.str());
+ statsdLog->log(stats::media_metrics::MEDIA_DRM_ACTIVITY_INFO, log.str());
return true;
}
diff --git a/services/mediametrics/statsd_extractor.cpp b/services/mediametrics/statsd_extractor.cpp
index a8bfeaa..9345df6 100644
--- a/services/mediametrics/statsd_extractor.cpp
+++ b/services/mediametrics/statsd_extractor.cpp
@@ -29,7 +29,7 @@
#include <sys/types.h>
#include <unistd.h>
-#include <statslog.h>
+#include <stats_media_metrics.h>
#include "MediaMetricsService.h"
#include "ValidateId.h"
@@ -96,15 +96,16 @@
return false;
}
- android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
- int result = android::util::stats_write(android::util::MEDIAMETRICS_EXTRACTOR_REPORTED,
+ const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
+ const int result = stats::media_metrics::stats_write(
+ stats::media_metrics::MEDIAMETRICS_EXTRACTOR_REPORTED,
timestamp_nanos, package_name.c_str(), package_version_code,
media_apex_version,
bf_serialized);
std::stringstream log;
log << "result:" << result << " {"
<< " mediametrics_extractor_reported:"
- << android::util::MEDIAMETRICS_EXTRACTOR_REPORTED
+ << stats::media_metrics::MEDIAMETRICS_EXTRACTOR_REPORTED
<< " timestamp_nanos:" << timestamp_nanos
<< " package_name:" << package_name
<< " package_version_code:" << package_version_code
@@ -116,7 +117,7 @@
<< " entry_point:" << entry_point_string << "(" << entry_point << ")"
<< " log_session_id:" << log_session_id
<< " }";
- statsdLog->log(android::util::MEDIAMETRICS_EXTRACTOR_REPORTED, log.str());
+ statsdLog->log(stats::media_metrics::MEDIAMETRICS_EXTRACTOR_REPORTED, log.str());
return true;
}
diff --git a/services/mediametrics/statsd_mediaparser.cpp b/services/mediametrics/statsd_mediaparser.cpp
index 67ca874b..458bd32 100644
--- a/services/mediametrics/statsd_mediaparser.cpp
+++ b/services/mediametrics/statsd_mediaparser.cpp
@@ -28,7 +28,7 @@
#include <sys/types.h>
#include <unistd.h>
-#include <statslog.h>
+#include <stats_media_metrics.h>
#include "MediaMetricsService.h"
#include "ValidateId.h"
@@ -83,7 +83,8 @@
item->getString("android.media.mediaparser.logSessionId", &logSessionId);
logSessionId = mediametrics::ValidateId::get()->validateId(logSessionId);
- int result = android::util::stats_write(android::util::MEDIAMETRICS_MEDIAPARSER_REPORTED,
+ const int result = stats::media_metrics::stats_write(
+ stats::media_metrics::MEDIAMETRICS_MEDIAPARSER_REPORTED,
timestamp_nanos,
package_name.c_str(),
package_version_code,
@@ -103,7 +104,7 @@
std::stringstream log;
log << "result:" << result << " {"
<< " mediametrics_mediaparser_reported:"
- << android::util::MEDIAMETRICS_MEDIAPARSER_REPORTED
+ << stats::media_metrics::MEDIAMETRICS_MEDIAPARSER_REPORTED
<< " timestamp_nanos:" << timestamp_nanos
<< " package_name:" << package_name
<< " package_version_code:" << package_version_code
@@ -120,7 +121,7 @@
<< " video_height:" << videoHeight
<< " log_session_id:" << logSessionId
<< " }";
- statsdLog->log(android::util::MEDIAMETRICS_MEDIAPARSER_REPORTED, log.str());
+ statsdLog->log(stats::media_metrics::MEDIAMETRICS_MEDIAPARSER_REPORTED, log.str());
return true;
}
diff --git a/services/mediametrics/statsd_nuplayer.cpp b/services/mediametrics/statsd_nuplayer.cpp
index bdee1f2..fd545f4 100644
--- a/services/mediametrics/statsd_nuplayer.cpp
+++ b/services/mediametrics/statsd_nuplayer.cpp
@@ -29,7 +29,7 @@
#include <sys/types.h>
#include <unistd.h>
-#include <statslog.h>
+#include <stats_media_metrics.h>
#include "MediaMetricsService.h"
#include "frameworks/proto_logging/stats/message/mediametrics_message.pb.h"
@@ -153,8 +153,9 @@
return false;
}
- android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
- int result = android::util::stats_write(android::util::MEDIAMETRICS_NUPLAYER_REPORTED,
+ const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
+ const int result = stats::media_metrics::stats_write(
+ stats::media_metrics::MEDIAMETRICS_NUPLAYER_REPORTED,
timestamp_nanos, package_name.c_str(), package_version_code,
media_apex_version,
bf_serialized);
@@ -162,7 +163,7 @@
std::stringstream log;
log << "result:" << result << " {"
<< " mediametrics_nuplayer_reported:"
- << android::util::MEDIAMETRICS_NUPLAYER_REPORTED
+ << stats::media_metrics::MEDIAMETRICS_NUPLAYER_REPORTED
<< " timestamp_nanos:" << timestamp_nanos
<< " package_name:" << package_name
<< " package_version_code:" << package_version_code
@@ -193,7 +194,7 @@
// TODO NuPlayer - add log_session_id
// << " log_session_id:" << log_session_id
<< " }";
- statsdLog->log(android::util::MEDIAMETRICS_NUPLAYER_REPORTED, log.str());
+ statsdLog->log(stats::media_metrics::MEDIAMETRICS_NUPLAYER_REPORTED, log.str());
return true;
}
diff --git a/services/mediametrics/statsd_recorder.cpp b/services/mediametrics/statsd_recorder.cpp
index 5f54a68..efa284b 100644
--- a/services/mediametrics/statsd_recorder.cpp
+++ b/services/mediametrics/statsd_recorder.cpp
@@ -29,7 +29,7 @@
#include <sys/types.h>
#include <unistd.h>
-#include <statslog.h>
+#include <stats_media_metrics.h>
#include "MediaMetricsService.h"
#include "ValidateId.h"
@@ -179,15 +179,16 @@
return false;
}
- android::util::BytesField bf_serialized( serialized.c_str(), serialized.size());
- int result = android::util::stats_write(android::util::MEDIAMETRICS_RECORDER_REPORTED,
+ const stats::media_metrics::BytesField bf_serialized( serialized.c_str(), serialized.size());
+ const int result = stats::media_metrics::stats_write(
+ stats::media_metrics::MEDIAMETRICS_RECORDER_REPORTED,
timestamp_nanos, package_name.c_str(), package_version_code,
media_apex_version,
bf_serialized);
std::stringstream log;
log << "result:" << result << " {"
<< " mediametrics_recorder_reported:"
- << android::util::MEDIAMETRICS_RECORDER_REPORTED
+ << stats::media_metrics::MEDIAMETRICS_RECORDER_REPORTED
<< " timestamp_nanos:" << timestamp_nanos
<< " package_name:" << package_name
<< " package_version_code:" << package_version_code
@@ -218,7 +219,7 @@
<< " iframe_interval:" << iframe_interval
<< " log_session_id:" << log_session_id
<< " }";
- statsdLog->log(android::util::MEDIAMETRICS_RECORDER_REPORTED, log.str());
+ statsdLog->log(stats::media_metrics::MEDIAMETRICS_RECORDER_REPORTED, log.str());
return true;
}