Merge "Camera: Lift the 32 pixel alignment Jpeg/R width limitation" into udc-dev
diff --git a/apex/TEST_MAPPING b/apex/TEST_MAPPING
index 4b7c019..bb4f089 100644
--- a/apex/TEST_MAPPING
+++ b/apex/TEST_MAPPING
@@ -7,16 +7,16 @@
"presubmit": [
// The following tests validate codec and drm path.
{
- "name": "GtsMediaTestCases",
+ "name": "WvtsDeviceTestCases",
"options" : [
{
"include-annotation": "android.platform.test.annotations.Presubmit"
},
{
- "include-filter": "com.google.android.media.gts.WidevineGenericOpsTests"
+ "include-filter": "com.google.android.media.wvts.WidevineGenericOpsTests"
},
{
- "include-filter": "com.google.android.media.gts.WidevineH264PlaybackTests"
+ "include-filter": "com.google.android.media.wvts.WidevineH264PlaybackTests"
}
]
}
diff --git a/camera/Camera.cpp b/camera/Camera.cpp
index d1618e4..2244682 100644
--- a/camera/Camera.cpp
+++ b/camera/Camera.cpp
@@ -71,10 +71,11 @@
}
sp<Camera> Camera::connect(int cameraId, const String16& clientPackageName,
- int clientUid, int clientPid, int targetSdkVersion, bool overrideToPortrait)
+ int clientUid, int clientPid, int targetSdkVersion, bool overrideToPortrait,
+ bool forceSlowJpegMode)
{
return CameraBaseT::connect(cameraId, clientPackageName, clientUid,
- clientPid, targetSdkVersion, overrideToPortrait);
+ clientPid, targetSdkVersion, overrideToPortrait, forceSlowJpegMode);
}
status_t Camera::reconnect()
diff --git a/camera/CameraBase.cpp b/camera/CameraBase.cpp
index 0a5bc12..9ae4607 100644
--- a/camera/CameraBase.cpp
+++ b/camera/CameraBase.cpp
@@ -163,7 +163,7 @@
sp<TCam> CameraBase<TCam, TCamTraits>::connect(int cameraId,
const String16& clientPackageName,
int clientUid, int clientPid, int targetSdkVersion,
- bool overrideToPortrait)
+ bool overrideToPortrait, bool forceSlowJpegMode)
{
ALOGV("%s: connect", __FUNCTION__);
sp<TCam> c = new TCam(cameraId);
@@ -173,9 +173,11 @@
binder::Status ret;
if (cs != nullptr) {
TCamConnectService fnConnectService = TCamTraits::fnConnectService;
- ALOGI("Connect camera (legacy API) - overrideToPortrait %d", overrideToPortrait);
+ ALOGI("Connect camera (legacy API) - overrideToPortrait %d, forceSlowJpegMode %d",
+ overrideToPortrait, forceSlowJpegMode);
ret = (cs.get()->*fnConnectService)(cl, cameraId, clientPackageName, clientUid,
- clientPid, targetSdkVersion, overrideToPortrait, /*out*/ &c->mCamera);
+ clientPid, targetSdkVersion, overrideToPortrait, forceSlowJpegMode,
+ /*out*/ &c->mCamera);
}
if (ret.isOk() && c->mCamera != nullptr) {
IInterface::asBinder(c->mCamera)->linkToDeath(c);
diff --git a/camera/CameraSessionStats.cpp b/camera/CameraSessionStats.cpp
index 0706ac1..26c612a 100644
--- a/camera/CameraSessionStats.cpp
+++ b/camera/CameraSessionStats.cpp
@@ -271,6 +271,7 @@
mApiLevel(0),
mIsNdk(false),
mLatencyMs(-1),
+ mLogId(0),
mMaxPreviewFps(0),
mSessionType(0),
mInternalReconfigure(0),
@@ -281,7 +282,7 @@
CameraSessionStats::CameraSessionStats(const String16& cameraId,
int facing, int newCameraState, const String16& clientName,
- int apiLevel, bool isNdk, int32_t latencyMs) :
+ int apiLevel, bool isNdk, int32_t latencyMs, int64_t logId) :
mCameraId(cameraId),
mFacing(facing),
mNewCameraState(newCameraState),
@@ -289,6 +290,7 @@
mApiLevel(apiLevel),
mIsNdk(isNdk),
mLatencyMs(latencyMs),
+ mLogId(logId),
mMaxPreviewFps(0),
mSessionType(0),
mInternalReconfigure(0),
@@ -347,6 +349,12 @@
return err;
}
+ int64_t logId;
+ if ((err = parcel->readInt64(&logId)) != OK) {
+ ALOGE("%s: Failed to read log ID from parcel", __FUNCTION__);
+ return err;
+ }
+
float maxPreviewFps;
if ((err = parcel->readFloat(&maxPreviewFps)) != OK) {
ALOGE("%s: Failed to read maxPreviewFps from parcel", __FUNCTION__);
@@ -408,6 +416,7 @@
mApiLevel = apiLevel;
mIsNdk = isNdk;
mLatencyMs = latencyMs;
+ mLogId = logId;
mMaxPreviewFps = maxPreviewFps;
mSessionType = sessionType;
mInternalReconfigure = internalReconfigure;
@@ -464,6 +473,11 @@
return err;
}
+ if ((err = parcel->writeInt64(mLogId)) != OK) {
+ ALOGE("%s: Failed to write log ID!", __FUNCTION__);
+ return err;
+ }
+
if ((err = parcel->writeFloat(mMaxPreviewFps)) != OK) {
ALOGE("%s: Failed to write maxPreviewFps!", __FUNCTION__);
return err;
@@ -508,6 +522,7 @@
ALOGE("%s: Failed to write video stabilization mode!", __FUNCTION__);
return err;
}
+
return OK;
}
diff --git a/camera/aidl/android/hardware/ICameraService.aidl b/camera/aidl/android/hardware/ICameraService.aidl
index 01baba1..9f32595 100644
--- a/camera/aidl/android/hardware/ICameraService.aidl
+++ b/camera/aidl/android/hardware/ICameraService.aidl
@@ -84,7 +84,8 @@
String opPackageName,
int clientUid, int clientPid,
int targetSdkVersion,
- boolean overrideToPortrait);
+ boolean overrideToPortrait,
+ boolean forceSlowJpegMode);
/**
* Open a camera device through the new camera API
diff --git a/camera/include/camera/Camera.h b/camera/include/camera/Camera.h
index 26c36a7..21b57af 100644
--- a/camera/include/camera/Camera.h
+++ b/camera/include/camera/Camera.h
@@ -58,7 +58,7 @@
typedef ::android::hardware::ICameraClient TCamCallbacks;
typedef ::android::binder::Status(::android::hardware::ICameraService::*TCamConnectService)
(const sp<::android::hardware::ICameraClient>&,
- int, const String16&, int, int, int, bool,
+ int, const String16&, int, int, int, bool, bool,
/*out*/
sp<::android::hardware::ICamera>*);
static TCamConnectService fnConnectService;
@@ -82,7 +82,7 @@
static sp<Camera> connect(int cameraId,
const String16& clientPackageName,
int clientUid, int clientPid, int targetSdkVersion,
- bool overrideToPortrait);
+ bool overrideToPortrait, bool forceSlowJpegMode);
virtual ~Camera();
diff --git a/camera/include/camera/CameraBase.h b/camera/include/camera/CameraBase.h
index 9d0721b..b20dc1b 100644
--- a/camera/include/camera/CameraBase.h
+++ b/camera/include/camera/CameraBase.h
@@ -120,7 +120,7 @@
static sp<TCam> connect(int cameraId,
const String16& clientPackageName,
int clientUid, int clientPid, int targetSdkVersion,
- bool overrideToPortrait);
+ bool overrideToPortrait, bool forceSlowJpegMode);
virtual void disconnect();
void setListener(const sp<TCamListener>& listener);
diff --git a/camera/include/camera/CameraSessionStats.h b/camera/include/camera/CameraSessionStats.h
index 90ee924..091a7ff 100644
--- a/camera/include/camera/CameraSessionStats.h
+++ b/camera/include/camera/CameraSessionStats.h
@@ -128,6 +128,22 @@
bool mIsNdk;
// latency in ms for camera open, close, or session creation.
int mLatencyMs;
+
+ /*
+ * A randomly generated identifier to map the open/active/idle/close stats to each other after
+ * being logged. Every 'open' event will have a newly generated id which will be logged with
+ * active/idle/closed that correspond to the particular 'open' event.
+ *
+ * This ID is not meant to be globally unique forever. Probabilistically, this ID can be
+ * safely considered unique across all logs from one android build for 48 to 72 hours from
+ * its generation. Chances of identifier collisions are significant past a week or two.
+ *
+ * NOTE: There are no guarantees that the identifiers will be unique. The probability of
+ * collision within a short timeframe is low, but any system consuming these identifiers at
+ * scale should handle identifier collisions, potentially even from the same device.
+ */
+ int64_t mLogId;
+
float mMaxPreviewFps;
// Session info and statistics
@@ -146,7 +162,8 @@
// Constructors
CameraSessionStats();
CameraSessionStats(const String16& cameraId, int facing, int newCameraState,
- const String16& clientName, int apiLevel, bool isNdk, int32_t latencyMs);
+ const String16& clientName, int apiLevel, bool isNdk, int32_t latencyMs,
+ int64_t logId);
virtual status_t readFromParcel(const android::Parcel* parcel) override;
virtual status_t writeToParcel(android::Parcel* parcel) const override;
diff --git a/camera/ndk/NdkCameraCaptureSession.cpp b/camera/ndk/NdkCameraCaptureSession.cpp
index e6c876b..4387cc6 100644
--- a/camera/ndk/NdkCameraCaptureSession.cpp
+++ b/camera/ndk/NdkCameraCaptureSession.cpp
@@ -194,24 +194,19 @@
EXPORT
camera_status_t ACameraCaptureSession_setWindowPreparedCallback(
- ACameraCaptureSession* session, ACameraCaptureSession_prepareCallbacks *cb) {
+ ACameraCaptureSession* session, void *context,
+ ACameraCaptureSession_prepareCallback cb) {
ATRACE_CALL();
if (session == nullptr || cb == nullptr) {
ALOGE("%s: Error: session %p / callback %p is null", __FUNCTION__, session, cb);
return ACAMERA_ERROR_INVALID_PARAMETER;
}
- if (cb->reserved0 != nullptr || cb->reserved1 != nullptr) {
- ALOGE("%s: Setting reserved 0 and reserved 1 fields of "
- "ACameraCaptureSession_prepareCallbacks is currently not supported "
- " .They must be set to null", __FUNCTION__);
- return ACAMERA_ERROR_INVALID_PARAMETER;
- }
if (session->isClosed()) {
ALOGE("%s: session %p is already closed", __FUNCTION__, session);
return ACAMERA_ERROR_SESSION_CLOSED;
}
- session->setWindowPreparedCallback(cb);
+ session->setWindowPreparedCallback(context, cb);
return ACAMERA_OK;
}
diff --git a/camera/ndk/impl/ACameraCaptureSession.h b/camera/ndk/impl/ACameraCaptureSession.h
index 145473b..88135ba 100644
--- a/camera/ndk/impl/ACameraCaptureSession.h
+++ b/camera/ndk/impl/ACameraCaptureSession.h
@@ -75,6 +75,17 @@
};
/**
+ * Capture session state callbacks used in {@link ACameraDevice_setPrepareCallbacks}
+ */
+typedef struct ACameraCaptureSession_prepareCallbacks {
+ /// optional application context. This will be passed in the context
+ /// parameter of the {@link onWindowPrepared} callback.
+ void* context;
+
+ ACameraCaptureSession_prepareCallback onWindowPrepared;
+} ACameraCaptureSession_prepareCallbacks;
+
+/**
* ACameraCaptureSession opaque struct definition
* Leave outside of android namespace because it's NDK struct
*/
@@ -130,9 +141,11 @@
camera_status_t updateOutputConfiguration(ACaptureSessionOutput *output);
- void setWindowPreparedCallback(ACameraCaptureSession_prepareCallbacks *cb) {
+ void setWindowPreparedCallback(void *context,
+ ACameraCaptureSession_prepareCallback cb) {
Mutex::Autolock _l(mSessionLock);
- mPreparedCb = *cb;
+ mPreparedCb.context = context;
+ mPreparedCb.onWindowPrepared = cb;
}
camera_status_t prepare(ACameraWindowType *window);
diff --git a/camera/ndk/include/camera/NdkCameraCaptureSession.h b/camera/ndk/include/camera/NdkCameraCaptureSession.h
index 0211d83..099c5c5 100644
--- a/camera/ndk/include/camera/NdkCameraCaptureSession.h
+++ b/camera/ndk/include/camera/NdkCameraCaptureSession.h
@@ -101,8 +101,23 @@
/**
* The definition of camera capture session onWindowPrepared callback.
+ *
+ * <p>This callback is called when the buffer pre-allocation for an output window Surface is
+ * complete. </p>
+ *
+ * <p>Buffer pre-allocation for an output window is started by
+ * {@link ACameraCaptureSession_prepare}
+ * call. While allocation is underway, the output must not be used in a capture request.
+ * Once this callback is called, the output provided can be used as a target for a
+ * capture request. In case of an error during pre-allocation (such as running out of
+ * suitable-memory), this callback is still invoked after the error is encountered, though some
+ * buffers may not have been successfully pre-allocated.</p>
+ *
+ * Introduced in API 34.
+ *
* @param context The optional app-provided context pointer that was included in
- * the {@link ACameraCaptureSession_prepareCallbacks} struct.
+ * the {@link ACameraCaptureSession_setWindowPreparedCallback} method
+ * call.
* @param window The window that {@link ACameraCaptureSession_prepare} was called on.
* @param session The camera capture session on which {@link ACameraCaptureSession_prepare} was
* called on.
@@ -112,32 +127,6 @@
ACameraWindowType *window,
ACameraCaptureSession *session);
-/**
- * Capture session state callbacks used in {@link ACameraDevice_setPrepareCallbacks}
- */
-typedef struct ACameraCaptureSession_prepareCallbacks {
- /// optional application context. This will be passed in the context
- /// parameter of the {@link onWindowPrepared} callback.
- void* context;
-
- /**
- * This callback is called when the buffer pre-allocation for an output window Surface is
- * complete.
- * <p>Buffer pre-allocation for an output window is started by
- * {@link ACameraCaptureSession_prepare}
- * call. While allocation is underway, the output must not be used in a capture request.
- * Once this callback is called, the output provided can be used as a target for a
- * capture request. In case of an error during pre-allocation (such as running out of
- * suitable-memory), this callback is still invoked after the error is encountered, though some
- * buffers may not have been successfully pre-allocated </p>
- */
- ACameraCaptureSession_prepareCallback onWindowPrepared;
-
- // Reserved for future callback additions, these must be set to nullptr by the client.
- ACameraCaptureSession_prepareCallback reserved0;
- ACameraCaptureSession_prepareCallback reserved1;
-} ACameraCaptureSession_prepareCallbacks;
-
/// Enum for describing error reason in {@link ACameraCaptureFailure}
enum {
/**
@@ -204,7 +193,7 @@
* capture request sent by application, so the address is different to what
* application sent but the content will match. This request will be freed by
* framework immediately after this callback returns.
- * @param timestamp The timestamp when the capture is started. This timestmap will match
+ * @param timestamp The timestamp when the capture is started. This timestamp will match
* {@link ACAMERA_SENSOR_TIMESTAMP} of the {@link ACameraMetadata} in
* {@link ACameraCaptureSession_captureCallbacks#onCaptureCompleted} callback.
*/
@@ -239,7 +228,7 @@
* capture request sent by application, so the address is different to what
* application sent but the content will match. This request will be freed by
* framework immediately after this callback returns.
- * @param failure The {@link ACameraCaptureFailure} desribes the capture failure. The memory is
+ * @param failure The {@link ACameraCaptureFailure} describes the capture failure. The memory is
* managed by camera framework. Do not access this pointer after this callback
* returns.
*/
@@ -451,7 +440,7 @@
* and any repeating requests are stopped (as if {@link ACameraCaptureSession_stopRepeating} was
* called). However, any in-progress capture requests submitted to the session will be completed as
* normal; once all captures have completed and the session has been torn down,
- * {@link ACameraCaptureSession_stateCallbacks#onClosed} callback will be called and the seesion
+ * {@link ACameraCaptureSession_stateCallbacks#onClosed} callback will be called and the session
* will be removed from memory.</p>
*
* <p>Closing a session is idempotent; closing more than once has no effect.</p>
@@ -538,7 +527,7 @@
*
* <p>Repeating burst requests are a simple way for an application to
* maintain a preview or other continuous stream of frames where each
- * request is different in a predicatable way, without having to continually
+ * request is different in a predictable way, without having to continually
* submit requests through {@link ACameraCaptureSession_capture}.</p>
*
* <p>To stop the repeating capture, call {@link ACameraCaptureSession_stopRepeating}. Any
@@ -749,7 +738,7 @@
* capture request sent by application, so the address is different to what
* application sent but the content will match. This request will be freed by
* framework immediately after this callback returns.
- * @param failure The {@link ALogicalCameraCaptureFailure} desribes the capture failure. The memory
+ * @param failure The {@link ALogicalCameraCaptureFailure} describes the capture failure. The memory
* is managed by camera framework. Do not access this pointer after this callback
* returns.
*/
@@ -1033,8 +1022,10 @@
* pre-allocation of buffers through the {@link ACameraCaptureSession_prepareWindow} call has
* completed the pre-allocation of buffers.
* @param session the ACameraCaptureSession on which ACameraCaptureSession_prepareWindow was called.
- * @param callbacks the callback to be called when the output window's buffer pre-allocation is
- * complete.
+ * @param context optional application provided context. This will be passed into the context
+ * parameter of the {@link onWindowPrepared} callback.
+ * @param callback the callback to be called when the output window's buffer pre-allocation is
+ * complete.
* @return <ul><li> {@link ACAMERA_OK} if the method succeeds</li>
* <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if session or callbacks is
* NULL. Or if the session has not been configured with the window</li>
@@ -1046,7 +1037,8 @@
*/
camera_status_t ACameraCaptureSession_setWindowPreparedCallback(
ACameraCaptureSession* session,
- ACameraCaptureSession_prepareCallbacks* callbacks) __INTRODUCED_IN(34);
+ void *context,
+ ACameraCaptureSession_prepareCallback callback) __INTRODUCED_IN(34);
/**
*
@@ -1087,7 +1079,7 @@
* <p>Once allocation is complete, {@link ACameraCaptureSession_prepareCallback#onWindowPrepared}
* will be invoked with the output provided to this method. Between the prepare call and the
* {@link ACameraCaptureSession_prepareCallback#onWindowPrepared} call,
- * the output provided to prepare must not be used as a target of a capture qequest submitted
+ * the output provided to prepare must not be used as a target of a capture request submitted
* to this session.</p>
*
* <p>{@link android.hardware.camera2.CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY LEGACY}
@@ -1100,7 +1092,7 @@
*
* @return <ul><li>
* {@link ACAMERA_OK} if the method succeeds</li>
- * <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if session/ window or prepareCallbacks is
+ * <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if session/ window is
* NULL. Or if the session has not been configured with the window</li>
* <li>{@link ACAMERA_ERROR_SESSION_CLOSED} if the capture session has been closed</li>
* <li>{@link ACAMERA_ERROR_CAMERA_DISCONNECTED} if the camera device is closed</li>
diff --git a/camera/ndk/include/camera/NdkCameraDevice.h b/camera/ndk/include/camera/NdkCameraDevice.h
index 239cb31..de10eb3 100644
--- a/camera/ndk/include/camera/NdkCameraDevice.h
+++ b/camera/ndk/include/camera/NdkCameraDevice.h
@@ -113,7 +113,7 @@
* @param context The optional context in {@link ACameraDevice_StateCallbacks} will be
* passed to this callback.
* @param device The {@link ACameraDevice} that is being disconnected.
- * @param error The error code describes the cause of this error callback. See the folowing
+ * @param error The error code describes the cause of this error callback. See the following
* links for more detail.
*
* @see ERROR_CAMERA_IN_USE
@@ -447,8 +447,8 @@
* returned by {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS}
* before creating a Surface from the SurfaceTexture with <a href=
* "http://developer.android.com/reference/android/view/Surface.html#Surface(android.graphics.SurfaceTexture)">
- * Surface\#Surface(SurfaceTextrue)</a>. If the size is not set by the application, it will be set to be the
- * smallest supported size less than 1080p, by the camera device.</li>
+ * Surface\#Surface(SurfaceTexture)</a>. If the size is not set by the application, it will be
+ * set to be the smallest supported size less than 1080p, by the camera device.</li>
*
* <li>For recording with <a href=
* "http://developer.android.com/reference/android/media/MediaCodec.html">
@@ -587,7 +587,7 @@
* <tr><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th> </tr>
* <tr> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`PRIV`</td><td id="rb">`MAXIMUM`</td> <td colspan="2" id="rb"></td> <td>Maximum-resolution GPU processing with preview.</td> </tr>
* <tr> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`YUV `</td><td id="rb">`MAXIMUM`</td> <td colspan="2" id="rb"></td> <td>Maximum-resolution in-app processing with preview.</td> </tr>
- * <tr> <td>`YUV `</td><td id="rb">`PREVIEW`</td> <td>`YUV `</td><td id="rb">`MAXIMUM`</td> <td colspan="2" id="rb"></td> <td>Maximum-resolution two-input in-app processsing.</td> </tr>
+ * <tr> <td>`YUV `</td><td id="rb">`PREVIEW`</td> <td>`YUV `</td><td id="rb">`MAXIMUM`</td> <td colspan="2" id="rb"></td> <td>Maximum-resolution two-input in-app processing.</td> </tr>
* <tr> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`JPEG`</td><td id="rb">`MAXIMUM`</td> <td>Video recording with maximum-size video snapshot</td> </tr>
* <tr> <td>`YUV `</td><td id="rb">`640x480`</td> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`YUV `</td><td id="rb">`MAXIMUM`</td> <td>Standard video recording plus maximum-resolution in-app processing.</td> </tr>
* <tr> <td>`YUV `</td><td id="rb">`640x480`</td> <td>`YUV `</td><td id="rb">`PREVIEW`</td> <td>`YUV `</td><td id="rb">`MAXIMUM`</td> <td>Preview plus two-input maximum-resolution in-app processing.</td> </tr>
@@ -629,7 +629,7 @@
* <tr><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th> </tr>
* <tr> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`PRIV`</td><td id="rb">`MAXIMUM`</td> <td>Maximum-resolution GPU processing with preview.</td> </tr>
* <tr> <td>`PRIV`</td><td id="rb">`PREVIEW`</td> <td>`YUV `</td><td id="rb">`MAXIMUM`</td> <td>Maximum-resolution in-app processing with preview.</td> </tr>
- * <tr> <td>`YUV `</td><td id="rb">`PREVIEW`</td> <td>`YUV `</td><td id="rb">`MAXIMUM`</td> <td>Maximum-resolution two-input in-app processsing.</td> </tr>
+ * <tr> <td>`YUV `</td><td id="rb">`PREVIEW`</td> <td>`YUV `</td><td id="rb">`MAXIMUM`</td> <td>Maximum-resolution two-input in-app processing.</td> </tr>
* </table><br>
* </p>
*
diff --git a/camera/ndk/include/camera/NdkCameraError.h b/camera/ndk/include/camera/NdkCameraError.h
index 26db7f2..88063d6 100644
--- a/camera/ndk/include/camera/NdkCameraError.h
+++ b/camera/ndk/include/camera/NdkCameraError.h
@@ -97,7 +97,7 @@
ACAMERA_ERROR_CAMERA_SERVICE = ACAMERA_ERROR_BASE - 6,
/**
- * The {@link ACameraCaptureSession} has been closed and cannnot perform any operation other
+ * The {@link ACameraCaptureSession} has been closed and cannot perform any operation other
* than {@link ACameraCaptureSession_close}.
*/
ACAMERA_ERROR_SESSION_CLOSED = ACAMERA_ERROR_BASE - 7,
diff --git a/camera/ndk/include/camera/NdkCameraManager.h b/camera/ndk/include/camera/NdkCameraManager.h
index 7388678..b4f3bf1 100644
--- a/camera/ndk/include/camera/NdkCameraManager.h
+++ b/camera/ndk/include/camera/NdkCameraManager.h
@@ -218,7 +218,7 @@
* @param manager the {@link ACameraManager} of interest.
* @param cameraId the ID string of the camera device of interest.
* @param characteristics the output {@link ACameraMetadata} will be filled here if the method call
- * succeeeds.
+ * succeeds.
*
* @return <ul>
* <li>{@link ACAMERA_OK} if the method call succeeds.</li>
diff --git a/camera/ndk/include/camera/NdkCameraMetadata.h b/camera/ndk/include/camera/NdkCameraMetadata.h
index a9f53dd..cf29736 100644
--- a/camera/ndk/include/camera/NdkCameraMetadata.h
+++ b/camera/ndk/include/camera/NdkCameraMetadata.h
@@ -190,7 +190,7 @@
* @param metadata the {@link ACameraMetadata} of interest.
* @param tag the tag value of the camera metadata entry to be get.
* @param entry the output {@link ACameraMetadata_const_entry} will be filled here if the method
- * call succeeeds.
+ * call succeeds.
*
* @return <ul>
* <li>{@link ACAMERA_OK} if the method call succeeds.</li>
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index d88c1de..1bd3603 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -545,7 +545,9 @@
* mode.</p>
* <p>For camera devices with the
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
- * capability,
+ * capability or devices where
+ * <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
+ * lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE">ACAMERA_SENSOR_PIXEL_MODE</a>
* ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
* ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION must be used as the
* coordinate system for requests where ACAMERA_SENSOR_PIXEL_MODE is set to
@@ -754,7 +756,10 @@
* mode.</p>
* <p>For camera devices with the
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
- * capability, ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
+ * capability or devices where
+ * <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
+ * lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE">ACAMERA_SENSOR_PIXEL_MODE</a>,
+ * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
* ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION must be used as the
* coordinate system for requests where ACAMERA_SENSOR_PIXEL_MODE is set to
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
@@ -957,7 +962,10 @@
* mode.</p>
* <p>For camera devices with the
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
- * capability, ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
+ * capability or devices where
+ * <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
+ * lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE">ACAMERA_SENSOR_PIXEL_MODE</a>,
+ * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
* ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION must be used as the
* coordinate system for requests where ACAMERA_SENSOR_PIXEL_MODE is set to
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
@@ -3823,7 +3831,9 @@
* ACAMERA_CONTROL_ZOOM_RATIO for details.</p>
* <p>For camera devices with the
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
- * capability, ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
+ * capability or devices where <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
+ * lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE">ACAMERA_SENSOR_PIXEL_MODE</a></p>
+ * <p>ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
* ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION must be used as the
* coordinate system for requests where ACAMERA_SENSOR_PIXEL_MODE is set to
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
@@ -5364,13 +5374,10 @@
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_DEFAULT">CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT</a> mode.
* When operating in
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_DEFAULT">CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT</a> mode, sensors
- * with <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
- * capability would typically perform pixel binning in order to improve low light
+ * would typically perform pixel binning in order to improve low light
* performance, noise reduction etc. However, in
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>
- * mode (supported only
- * by <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
- * sensors), sensors typically operate in unbinned mode allowing for a larger image size.
+ * mode, sensors typically operate in unbinned mode allowing for a larger image size.
* The stream configurations supported in
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>
* mode are also different from those of
@@ -5384,7 +5391,36 @@
* <code>android.scaler.streamConfigurationMap</code>
* must not be mixed in the same CaptureRequest. In other words, these outputs are
* exclusive to each other.
- * This key does not need to be set for reprocess requests.</p>
+ * This key does not need to be set for reprocess requests.
+ * This key will be be present on devices supporting the
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+ * capability. It may also be present on devices which do not support the aforementioned
+ * capability. In that case:</p>
+ * <ul>
+ * <li>
+ * <p>The mandatory stream combinations listed in
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics/mandatoryMaximumResolutionStreamCombinations.html">mandatoryMaximumResolutionStreamCombinations</a>
+ * would not apply.</p>
+ * </li>
+ * <li>
+ * <p>The bayer pattern of {@code RAW} streams when
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>
+ * is selected will be the one listed in <a href="https://developer.android.com/reference/android/sensor/info/binningFactor.html">binningFactor</a>.</p>
+ * </li>
+ * <li>
+ * <p>The following keys will always be present:</p>
+ * <ul>
+ * <li>android.scaler.streamConfigurationMapMaximumResolution</li>
+ * <li>ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION</li>
+ * <li>ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION</li>
+ * <li>ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION</li>
+ * </ul>
+ * </li>
+ * </ul>
+ *
+ * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
+ * @see ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION
+ * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
*/
ACAMERA_SENSOR_PIXEL_MODE = // byte (acamera_metadata_enum_android_sensor_pixel_mode_t)
ACAMERA_SENSOR_START + 32,
@@ -5729,7 +5765,8 @@
* counterparts.
* This key will only be present for devices which advertise the
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
- * capability.</p>
+ * capability or devices where <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
+ * lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE">ACAMERA_SENSOR_PIXEL_MODE</a></p>
* <p>The data representation is <code>int[4]</code>, which maps to <code>(left, top, width, height)</code>.</p>
*
* @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
@@ -5761,7 +5798,8 @@
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.
* This key will only be present for devices which advertise the
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
- * capability.</p>
+ * capability or devices where <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
+ * lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE">ACAMERA_SENSOR_PIXEL_MODE</a></p>
*
* @see ACAMERA_SENSOR_INFO_PHYSICAL_SIZE
* @see ACAMERA_SENSOR_PIXEL_MODE
@@ -5789,7 +5827,8 @@
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.
* This key will only be present for devices which advertise the
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
- * capability.</p>
+ * capability or devices where <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
+ * lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE">ACAMERA_SENSOR_PIXEL_MODE</a></p>
* <p>The data representation is <code>int[4]</code>, which maps to <code>(left, top, width, height)</code>.</p>
*
* @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
@@ -5814,12 +5853,27 @@
* to improve various aspects of imaging such as noise reduction, low light
* performance etc. These groups can be of various sizes such as 2X2 (quad bayer),
* 3X3 (nona-bayer). This key specifies the length and width of the pixels grouped under
- * the same color filter.</p>
- * <p>This key will not be present if REMOSAIC_REPROCESSING is not supported, since RAW images
- * will have a regular bayer pattern.</p>
- * <p>This key will not be present for sensors which don't have the
- * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
- * capability.</p>
+ * the same color filter.
+ * In case the device has the
+ * <a href="https://developer.android.com/reference/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+ * capability :</p>
+ * <ul>
+ * <li>This key will not be present if REMOSAIC_REPROCESSING is not supported, since RAW
+ * images will have a regular bayer pattern.</li>
+ * </ul>
+ * <p>In case the device does not have the
+ * <a href="https://developer.android.com/reference/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+ * capability :</p>
+ * <ul>
+ * <li>This key will be present if
+ * <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
+ * lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE">ACAMERA_SENSOR_PIXEL_MODE</a>, since RAW
+ * images may not necessarily have a regular bayer pattern when
+ * <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE">ACAMERA_SENSOR_PIXEL_MODE</a> is set to
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</li>
+ * </ul>
+ *
+ * @see ACAMERA_SENSOR_PIXEL_MODE
*/
ACAMERA_SENSOR_INFO_BINNING_FACTOR = // int32[2]
ACAMERA_SENSOR_INFO_START + 14,
@@ -7924,7 +7978,7 @@
/**
* <p>An external flash has been turned on.</p>
* <p>It informs the camera device that an external flash has been turned on, and that
- * metering (and continuous focus if active) should be quickly recaculated to account
+ * metering (and continuous focus if active) should be quickly recalculated to account
* for the external flash. Otherwise, this mode acts like ON.</p>
* <p>When the external flash is turned off, AE mode should be changed to one of the
* other available AE modes.</p>
@@ -8907,11 +8961,6 @@
*/
ACAMERA_CONTROL_AUTOFRAMING_ON = 1,
- /**
- * <p>Automatically select ON or OFF based on the system level preferences.</p>
- */
- ACAMERA_CONTROL_AUTOFRAMING_AUTO = 2,
-
} acamera_metadata_enum_android_control_autoframing_t;
// ACAMERA_CONTROL_AUTOFRAMING_AVAILABLE
@@ -9930,82 +9979,14 @@
ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SRGB = 0,
/**
- * <p>RGB color space sRGB standardized as IEC 61966-2.1:1999.</p>
- */
- ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_LINEAR_SRGB = 1,
-
- /**
- * <p>RGB color space scRGB-nl standardized as IEC 61966-2-2:2003.</p>
- */
- ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_EXTENDED_SRGB = 2,
-
- /**
- * <p>RGB color space scRGB standardized as IEC 61966-2-2:2003.</p>
- */
- ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_LINEAR_EXTENDED_SRGB
- = 3,
-
- /**
- * <p>RGB color space BT.709 standardized as Rec. ITU-R BT.709-5.</p>
- */
- ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_BT709 = 4,
-
- /**
- * <p>RGB color space BT.2020 standardized as Rec. ITU-R BT.2020-1.</p>
- */
- ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_BT2020 = 5,
-
- /**
- * <p>RGB color space DCI-P3 standardized as SMPTE RP 431-2-2007.</p>
- */
- ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DCI_P3 = 6,
-
- /**
* <p>RGB color space Display P3 based on SMPTE RP 431-2-2007 and IEC 61966-2.1:1999.</p>
*/
ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3 = 7,
/**
- * <p>RGB color space NTSC, 1953 standard.</p>
+ * <p>RGB color space BT.2100 standardized as Hybrid Log Gamma encoding.</p>
*/
- ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_NTSC_1953 = 8,
-
- /**
- * <p>RGB color space SMPTE C.</p>
- */
- ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SMPTE_C = 9,
-
- /**
- * <p>RGB color space Adobe RGB (1998).</p>
- */
- ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_ADOBE_RGB = 10,
-
- /**
- * <p>RGB color space ProPhoto RGB standardized as ROMM RGB ISO 22028-2:2013.</p>
- */
- ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_PRO_PHOTO_RGB = 11,
-
- /**
- * <p>RGB color space ACES standardized as SMPTE ST 2065-1:2012.</p>
- */
- ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_ACES = 12,
-
- /**
- * <p>RGB color space ACEScg standardized as Academy S-2014-004.</p>
- */
- ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_ACESCG = 13,
-
- /**
- * <p>XYZ color space CIE XYZ. This color space assumes standard illuminant D50 as its white
- * point.</p>
- */
- ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_CIE_XYZ = 14,
-
- /**
- * <p>Lab color space CIE L<em>a</em>b*. This color space uses CIE XYZ D50 as a profile conversion
- * space.</p>
- */
- ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_CIE_LAB = 15,
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_BT2020_HLG = 16,
} acamera_metadata_enum_android_request_available_color_space_profiles_map_t;
@@ -10449,16 +10430,12 @@
// ACAMERA_SENSOR_PIXEL_MODE
typedef enum acamera_metadata_enum_acamera_sensor_pixel_mode {
/**
- * <p>This is the default sensor pixel mode. This is the only sensor pixel mode
- * supported unless a camera device advertises
- * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>.</p>
+ * <p>This is the default sensor pixel mode.</p>
*/
ACAMERA_SENSOR_PIXEL_MODE_DEFAULT = 0,
/**
- * <p>This sensor pixel mode is offered by devices with capability
- * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>.
- * In this mode, sensors typically do not bin pixels, as a result can offer larger
+ * <p>In this mode, sensors typically do not bin pixels, as a result can offer larger
* image sizes.</p>
*/
ACAMERA_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION = 1,
diff --git a/camera/ndk/include/camera/NdkCaptureRequest.h b/camera/ndk/include/camera/NdkCaptureRequest.h
index d83c5b3..dc18544 100644
--- a/camera/ndk/include/camera/NdkCaptureRequest.h
+++ b/camera/ndk/include/camera/NdkCaptureRequest.h
@@ -148,7 +148,7 @@
* @param request the {@link ACaptureRequest} of interest.
* @param tag the tag value of the camera metadata entry to be get.
* @param entry the output {@link ACameraMetadata_const_entry} will be filled here if the method
- * call succeeeds.
+ * call succeeds.
*
* @return <ul>
* <li>{@link ACAMERA_OK} if the method call succeeds.</li>
diff --git a/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp b/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
index 00d66aa..7f6ea9d 100644
--- a/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
+++ b/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
@@ -144,7 +144,8 @@
}
if (prepareWindows) {
// Set window prepared callback
- ACameraCaptureSession_setWindowPreparedCallback(mSession, &mPreparedCb);
+ ACameraCaptureSession_setWindowPreparedCallback(mSession, /*context*/this,
+ mPreparedCb);
// Prepare windows
for (auto &window : configuredWindows) {
ret = ACameraCaptureSession_prepareWindow(mSession, window);
@@ -342,8 +343,7 @@
ACameraDevice_StateCallbacks mDeviceCb{this, nullptr, nullptr};
ACameraCaptureSession_stateCallbacks mSessionCb{ this, nullptr, nullptr, nullptr};
- ACameraCaptureSession_prepareCallbacks mPreparedCb{
- this, onPreparedCb, /*reserved0*/nullptr, /*reserved1*/nullptr};
+ ACameraCaptureSession_prepareCallback mPreparedCb = &onPreparedCb;
const native_handle_t* mImgReaderAnw = nullptr; // not owned by us.
diff --git a/camera/tests/CameraZSLTests.cpp b/camera/tests/CameraZSLTests.cpp
index bdfb84a..6423709 100644
--- a/camera/tests/CameraZSLTests.cpp
+++ b/camera/tests/CameraZSLTests.cpp
@@ -211,7 +211,7 @@
String16("ZSLTest"), hardware::ICameraService::USE_CALLING_UID,
hardware::ICameraService::USE_CALLING_PID,
/*targetSdkVersion*/__ANDROID_API_FUTURE__,
- /*overrideToPortrait*/false, &cameraDevice);
+ /*overrideToPortrait*/false, /*forceSlowJpegMode*/false, &cameraDevice);
EXPECT_TRUE(rc.isOk());
CameraParameters params(cameraDevice->getParameters());
diff --git a/camera/tests/fuzzer/camera_SessionStats_fuzzer.cpp b/camera/tests/fuzzer/camera_SessionStats_fuzzer.cpp
index 5866aaf..2f2ad77 100644
--- a/camera/tests/fuzzer/camera_SessionStats_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_SessionStats_fuzzer.cpp
@@ -131,8 +131,13 @@
parcelCamSessionStats.writeInt32(latencyMs);
}
+ int64_t logId = fdp.ConsumeIntegral<int64_t>();
+ if (fdp.ConsumeBool()) {
+ parcelCamSessionStats.writeInt64(logId);
+ }
+
cameraSessionStats = new CameraSessionStats(cameraId, facing, newCameraState, clientName,
- apiLevel, isNdk, latencyMs);
+ apiLevel, isNdk, latencyMs, logId);
}
if (fdp.ConsumeBool()) {
diff --git a/camera/tests/fuzzer/camera_captureResult_fuzzer.cpp b/camera/tests/fuzzer/camera_captureResult_fuzzer.cpp
index 03cf9c4..1396431 100644
--- a/camera/tests/fuzzer/camera_captureResult_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_captureResult_fuzzer.cpp
@@ -63,7 +63,7 @@
invokeReadWriteNullParcel<CaptureResult>(captureResult);
invokeReadWriteParcel<CaptureResult>(captureResult);
CaptureResult captureResult2(*captureResult);
- CaptureResult captureResult3(move(captureResult2));
+ CaptureResult captureResult3(std::move(captureResult2));
delete captureResult;
delete physicalCaptureResultInfo;
diff --git a/camera/tests/fuzzer/camera_fuzzer.cpp b/camera/tests/fuzzer/camera_fuzzer.cpp
index d41e6b6..f9ef98e 100644
--- a/camera/tests/fuzzer/camera_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_fuzzer.cpp
@@ -152,7 +152,7 @@
String16("CAMERAFUZZ"), hardware::ICameraService::USE_CALLING_UID,
hardware::ICameraService::USE_CALLING_PID,
/*targetSdkVersion*/ __ANDROID_API_FUTURE__,
- /*overrideToPortrait*/false, &cameraDevice);
+ /*overrideToPortrait*/false, /*forceSlowJpegMode*/false, &cameraDevice);
mCamera = Camera::create(cameraDevice);
if (!mCamera) {
return false;
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index d757cd6..cd4932d 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -789,6 +789,13 @@
return NAME_NOT_FOUND;
}
+ DisplayMode displayMode;
+ err = SurfaceComposerClient::getActiveDisplayMode(display, &displayMode);
+ if (err != NO_ERROR) {
+ fprintf(stderr, "ERROR: unable to get display config\n");
+ return err;
+ }
+
ui::DisplayState displayState;
err = SurfaceComposerClient::getDisplayState(display, &displayState);
if (err != NO_ERROR) {
@@ -796,11 +803,9 @@
return err;
}
- DisplayMode displayMode;
- err = SurfaceComposerClient::getActiveDisplayMode(display, &displayMode);
- if (err != NO_ERROR) {
- fprintf(stderr, "ERROR: unable to get display config\n");
- return err;
+ if (displayState.layerStack == ui::INVALID_LAYER_STACK) {
+ fprintf(stderr, "ERROR: INVALID_LAYER_STACK, please check your display state.\n");
+ return INVALID_OPERATION;
}
const ui::Size& layerStackSpaceRect = displayState.layerStackSpaceRect;
diff --git a/drm/TEST_MAPPING b/drm/TEST_MAPPING
index 3642898..a9b4b2a 100644
--- a/drm/TEST_MAPPING
+++ b/drm/TEST_MAPPING
@@ -1,17 +1,17 @@
{
- "presubmit-large": [
+ "presubmit": [
// The following tests validate codec and drm path.
{
- "name": "GtsMediaTestCases",
+ "name": "WvtsDeviceTestCases",
"options" : [
{
"include-annotation": "android.platform.test.annotations.Presubmit"
},
{
- "include-filter": "com.google.android.media.gts.WidevineGenericOpsTests"
+ "include-filter": "com.google.android.media.wvts.WidevineGenericOpsTests"
},
{
- "include-filter": "com.google.android.media.gts.WidevineH264PlaybackTests"
+ "include-filter": "com.google.android.media.wvts.WidevineH264PlaybackTests"
}
]
}
diff --git a/drm/libmediadrm/DrmHalAidl.cpp b/drm/libmediadrm/DrmHalAidl.cpp
index 1844acb..5ec7337 100644
--- a/drm/libmediadrm/DrmHalAidl.cpp
+++ b/drm/libmediadrm/DrmHalAidl.cpp
@@ -459,7 +459,7 @@
DrmStatus DrmHalAidl::createPlugin(const uint8_t uuid[16], const String8& appPackageName) {
Mutex::Autolock autoLock(mLock);
-
+ if (mInitCheck == ERROR_UNSUPPORTED) return mInitCheck;
Uuid uuidAidl = DrmUtils::toAidlUuid(uuid);
std::string appPackageNameAidl = toStdString(appPackageName);
std::shared_ptr<IDrmPluginAidl> pluginAidl;
@@ -1216,7 +1216,7 @@
closeOpenSessions();
Mutex::Autolock autoLock(mLock);
- reportFrameworkMetrics(reportPluginMetrics());
+ if (mInitCheck == OK) reportFrameworkMetrics(reportPluginMetrics());
setListener(NULL);
mInitCheck = NO_INIT;
diff --git a/drm/libmediadrm/DrmHalHidl.cpp b/drm/libmediadrm/DrmHalHidl.cpp
index 56d63c5..00ea004 100644
--- a/drm/libmediadrm/DrmHalHidl.cpp
+++ b/drm/libmediadrm/DrmHalHidl.cpp
@@ -557,6 +557,7 @@
DrmStatus DrmHalHidl::createPlugin(const uint8_t uuid[16], const String8& appPackageName) {
Mutex::Autolock autoLock(mLock);
+ if (mInitCheck == ERROR_UNSUPPORTED) return mInitCheck;
for (ssize_t i = mFactories.size() - 1; i >= 0; i--) {
auto hResult = mFactories[i]->isCryptoSchemeSupported(uuid);
if (hResult.isOk() && hResult) {
diff --git a/drm/libmediadrm/DrmMetricsLogger.cpp b/drm/libmediadrm/DrmMetricsLogger.cpp
index de6d097..ce4d730 100644
--- a/drm/libmediadrm/DrmMetricsLogger.cpp
+++ b/drm/libmediadrm/DrmMetricsLogger.cpp
@@ -41,6 +41,80 @@
DrmMetricsLogger::~DrmMetricsLogger() {}
+int MediaErrorToEnum(status_t err) {
+#define ERROR_BAD_VALUE (BAD_VALUE)
+#define ERROR_DEAD_OBJECT (DEAD_OBJECT)
+#define STATUS_CASE(status) \
+ case ERROR_##status: \
+ return ENUM_##status
+
+ switch (err) {
+ STATUS_CASE(DRM_UNKNOWN);
+ STATUS_CASE(DRM_NO_LICENSE);
+ STATUS_CASE(DRM_LICENSE_EXPIRED);
+ STATUS_CASE(DRM_RESOURCE_BUSY);
+ STATUS_CASE(DRM_INSUFFICIENT_OUTPUT_PROTECTION);
+ STATUS_CASE(DRM_SESSION_NOT_OPENED);
+ STATUS_CASE(DRM_CANNOT_HANDLE);
+ STATUS_CASE(DRM_INSUFFICIENT_SECURITY);
+ STATUS_CASE(DRM_FRAME_TOO_LARGE);
+ STATUS_CASE(DRM_SESSION_LOST_STATE);
+ STATUS_CASE(DRM_CERTIFICATE_MALFORMED);
+ STATUS_CASE(DRM_CERTIFICATE_MISSING);
+ STATUS_CASE(DRM_CRYPTO_LIBRARY);
+ STATUS_CASE(DRM_GENERIC_OEM);
+ STATUS_CASE(DRM_GENERIC_PLUGIN);
+ STATUS_CASE(DRM_INIT_DATA);
+ STATUS_CASE(DRM_KEY_NOT_LOADED);
+ STATUS_CASE(DRM_LICENSE_PARSE);
+ STATUS_CASE(DRM_LICENSE_POLICY);
+ STATUS_CASE(DRM_LICENSE_RELEASE);
+ STATUS_CASE(DRM_LICENSE_REQUEST_REJECTED);
+ STATUS_CASE(DRM_LICENSE_RESTORE);
+ STATUS_CASE(DRM_LICENSE_STATE);
+ STATUS_CASE(DRM_MEDIA_FRAMEWORK);
+ STATUS_CASE(DRM_PROVISIONING_CERTIFICATE);
+ STATUS_CASE(DRM_PROVISIONING_CONFIG);
+ STATUS_CASE(DRM_PROVISIONING_PARSE);
+ STATUS_CASE(DRM_PROVISIONING_REQUEST_REJECTED);
+ STATUS_CASE(DRM_PROVISIONING_RETRY);
+ STATUS_CASE(DRM_RESOURCE_CONTENTION);
+ STATUS_CASE(DRM_SECURE_STOP_RELEASE);
+ STATUS_CASE(DRM_STORAGE_READ);
+ STATUS_CASE(DRM_STORAGE_WRITE);
+ STATUS_CASE(DRM_ZERO_SUBSAMPLES);
+ STATUS_CASE(DRM_INVALID_STATE);
+ STATUS_CASE(BAD_VALUE);
+ STATUS_CASE(DRM_NOT_PROVISIONED);
+ STATUS_CASE(DRM_DEVICE_REVOKED);
+ STATUS_CASE(DRM_DECRYPT);
+ STATUS_CASE(DEAD_OBJECT);
+#undef ERROR_BAD_VALUE
+#undef ERROR_DEAD_OBJECT
+#undef STATUS_CASE
+ }
+ return ENUM_DRM_UNKNOWN;
+}
+
+int DrmPluginSecurityLevelToJavaSecurityLevel(DrmPlugin::SecurityLevel securityLevel) {
+#define STATUS_CASE(status) \
+ case DrmPlugin::k##status: \
+ return J##status
+
+ switch (securityLevel) {
+ STATUS_CASE(SecurityLevelUnknown);
+ STATUS_CASE(SecurityLevelSwSecureCrypto);
+ STATUS_CASE(SecurityLevelSwSecureDecode);
+ STATUS_CASE(SecurityLevelHwSecureCrypto);
+ STATUS_CASE(SecurityLevelHwSecureDecode);
+ STATUS_CASE(SecurityLevelHwSecureAll);
+ STATUS_CASE(SecurityLevelMax);
+#undef STATUS_CASE
+ }
+ return static_cast<int>(securityLevel);
+}
+
+
DrmStatus DrmMetricsLogger::initCheck() const {
DrmStatus status = mImpl->initCheck();
if (status != OK) {
@@ -75,6 +149,10 @@
}
DrmStatus status = mImpl->createPlugin(uuid, appPackageName);
if (status == OK) {
+ String8 version8;
+ if (getPropertyString(String8("version"), version8) == OK) {
+ mVersion = version8.string();
+ }
reportMediaDrmCreated();
} else {
reportMediaDrmErrored(status, __func__);
@@ -103,6 +181,9 @@
if (getSecurityLevel(sessionId, &ctx.mActualSecurityLevel) != OK) {
ctx.mActualSecurityLevel = DrmPlugin::kSecurityLevelUnknown;
}
+ if (!mVersion.empty()) {
+ ctx.mVersion = mVersion;
+ }
{
const std::lock_guard<std::mutex> lock(mSessionMapMutex);
mSessionMap.insert({sessionKey, ctx});
@@ -116,12 +197,12 @@
DrmStatus DrmMetricsLogger::closeSession(Vector<uint8_t> const& sessionId) {
std::vector<uint8_t> sid = toStdVec(sessionId);
- {
+ DrmStatus status = mImpl->closeSession(sessionId);
+ if (status == OK) {
const std::lock_guard<std::mutex> lock(mSessionMapMutex);
mSessionMap.erase(sid);
- }
- DrmStatus status = mImpl->closeSession(sessionId);
- if (status != OK) {
+ } else {
+ // TODO(b/275729711): reclaim sessions that failed to close
reportMediaDrmErrored(status, __func__, sid);
}
return status;
@@ -466,6 +547,7 @@
mediametrics_setInt64(handle, "uuid_lsb", mUuid[1]);
mediametrics_setInt32(handle, "frontend", mFrontend);
mediametrics_setCString(handle, "object_nonce", mObjNonce.c_str());
+ mediametrics_setCString(handle, "version", mVersion.c_str());
mediametrics_selfRecord(handle);
mediametrics_delete(handle);
}
@@ -476,13 +558,16 @@
mediametrics_setInt64(handle, "uuid_msb", mUuid[0]);
mediametrics_setInt64(handle, "uuid_lsb", mUuid[1]);
mediametrics_setInt32(handle, "frontend", mFrontend);
+ mediametrics_setCString(handle, "version", mVersion.c_str());
mediametrics_setCString(handle, "object_nonce", mObjNonce.c_str());
const std::lock_guard<std::mutex> lock(mSessionMapMutex);
auto it = mSessionMap.find(sessionId);
if (it != mSessionMap.end()) {
mediametrics_setCString(handle, "session_nonce", it->second.mNonce.c_str());
- mediametrics_setInt64(handle, "requested_seucrity_level", it->second.mTargetSecurityLevel);
- mediametrics_setInt64(handle, "opened_seucrity_level", it->second.mActualSecurityLevel);
+ mediametrics_setInt32(handle, "requested_security_level",
+ DrmPluginSecurityLevelToJavaSecurityLevel(it->second.mTargetSecurityLevel));
+ mediametrics_setInt32(handle, "opened_security_level",
+ DrmPluginSecurityLevelToJavaSecurityLevel(it->second.mActualSecurityLevel));
}
mediametrics_selfRecord(handle);
mediametrics_delete(handle);
@@ -495,17 +580,19 @@
mediametrics_setInt64(handle, "uuid_msb", mUuid[0]);
mediametrics_setInt64(handle, "uuid_lsb", mUuid[1]);
mediametrics_setInt32(handle, "frontend", mFrontend);
+ mediametrics_setCString(handle, "version", mVersion.c_str());
mediametrics_setCString(handle, "object_nonce", mObjNonce.c_str());
if (!sessionId.empty()) {
const std::lock_guard<std::mutex> lock(mSessionMapMutex);
auto it = mSessionMap.find(sessionId);
if (it != mSessionMap.end()) {
mediametrics_setCString(handle, "session_nonce", it->second.mNonce.c_str());
- mediametrics_setInt64(handle, "seucrity_level", it->second.mActualSecurityLevel);
+ mediametrics_setInt32(handle, "security_level",
+ DrmPluginSecurityLevelToJavaSecurityLevel(it->second.mActualSecurityLevel));
}
}
mediametrics_setCString(handle, "api", api);
- mediametrics_setInt32(handle, "error_code", error_code);
+ mediametrics_setInt32(handle, "error_code", MediaErrorToEnum(error_code));
mediametrics_setInt32(handle, "cdm_err", error_code.getCdmErr());
mediametrics_setInt32(handle, "oem_err", error_code.getOemErr());
mediametrics_setInt32(handle, "error_context", error_code.getContext());
diff --git a/drm/libmediadrm/TEST_MAPPING b/drm/libmediadrm/TEST_MAPPING
index bc15879..8d7be22 100644
--- a/drm/libmediadrm/TEST_MAPPING
+++ b/drm/libmediadrm/TEST_MAPPING
@@ -1,19 +1,19 @@
{
"presubmit": [
{
- "name": "GtsMediaTestCases",
+ "name": "WvtsDeviceTestCases",
"options" : [
{
"include-annotation": "android.platform.test.annotations.Presubmit"
},
{
- "include-filter": "com.google.android.media.gts.WidevineGenericOpsTests"
+ "include-filter": "com.google.android.media.wvts.WidevineGenericOpsTests"
},
{
- "include-filter": "com.google.android.media.gts.MediaDrmTest"
+ "include-filter": "com.google.android.media.wvts.MediaDrmTest"
},
{
- "include-filter": "com.google.android.media.gts.WidevineDashPolicyTests"
+ "include-filter": "com.google.android.media.wvts.WidevineDashPolicyTests"
}
]
}
diff --git a/drm/libmediadrm/include/mediadrm/DrmMetricsLogger.h b/drm/libmediadrm/include/mediadrm/DrmMetricsLogger.h
index f4e3c3e..e72f7f7 100644
--- a/drm/libmediadrm/include/mediadrm/DrmMetricsLogger.h
+++ b/drm/libmediadrm/include/mediadrm/DrmMetricsLogger.h
@@ -25,10 +25,66 @@
namespace android {
+// Keep enums in sync with frameworks/proto_logging/stats/enums/media/drm/enums.proto
+
+enum {
+ ENUM_DRM_UNKNOWN = 0,
+ ENUM_DRM_NO_LICENSE = 1,
+ ENUM_DRM_LICENSE_EXPIRED = 2,
+ ENUM_DRM_RESOURCE_BUSY = 3,
+ ENUM_DRM_INSUFFICIENT_OUTPUT_PROTECTION = 4,
+ ENUM_DRM_SESSION_NOT_OPENED = 5,
+ ENUM_DRM_CANNOT_HANDLE = 6,
+ ENUM_DRM_INSUFFICIENT_SECURITY = 7,
+ ENUM_DRM_FRAME_TOO_LARGE = 8,
+ ENUM_DRM_SESSION_LOST_STATE = 9,
+ ENUM_DRM_CERTIFICATE_MALFORMED = 10,
+ ENUM_DRM_CERTIFICATE_MISSING = 11,
+ ENUM_DRM_CRYPTO_LIBRARY = 12,
+ ENUM_DRM_GENERIC_OEM = 13,
+ ENUM_DRM_GENERIC_PLUGIN = 14,
+ ENUM_DRM_INIT_DATA = 15,
+ ENUM_DRM_KEY_NOT_LOADED = 16,
+ ENUM_DRM_LICENSE_PARSE = 17,
+ ENUM_DRM_LICENSE_POLICY = 18,
+ ENUM_DRM_LICENSE_RELEASE = 19,
+ ENUM_DRM_LICENSE_REQUEST_REJECTED = 20,
+ ENUM_DRM_LICENSE_RESTORE = 21,
+ ENUM_DRM_LICENSE_STATE = 22,
+ ENUM_DRM_MEDIA_FRAMEWORK = 23,
+ ENUM_DRM_PROVISIONING_CERTIFICATE = 24,
+ ENUM_DRM_PROVISIONING_CONFIG = 25,
+ ENUM_DRM_PROVISIONING_PARSE = 26,
+ ENUM_DRM_PROVISIONING_REQUEST_REJECTED = 27,
+ ENUM_DRM_PROVISIONING_RETRY = 28,
+ ENUM_DRM_RESOURCE_CONTENTION = 29,
+ ENUM_DRM_SECURE_STOP_RELEASE = 30,
+ ENUM_DRM_STORAGE_READ = 31,
+ ENUM_DRM_STORAGE_WRITE = 32,
+ ENUM_DRM_ZERO_SUBSAMPLES = 33,
+ ENUM_DRM_INVALID_STATE = 34,
+ ENUM_BAD_VALUE = 35,
+ ENUM_DRM_NOT_PROVISIONED = 36,
+ ENUM_DRM_DEVICE_REVOKED = 37,
+ ENUM_DRM_DECRYPT = 38,
+ ENUM_DEAD_OBJECT = 39,
+};
+
+enum {
+ JSecurityLevelUnknown = 0,
+ JSecurityLevelSwSecureCrypto = 1,
+ JSecurityLevelSwSecureDecode = 2,
+ JSecurityLevelHwSecureCrypto = 3,
+ JSecurityLevelHwSecureDecode = 4,
+ JSecurityLevelHwSecureAll = 5,
+ JSecurityLevelMax = 6,
+};
+
struct SessionContext {
std::string mNonce;
- int64_t mTargetSecurityLevel;
+ DrmPlugin::SecurityLevel mTargetSecurityLevel;
DrmPlugin::SecurityLevel mActualSecurityLevel;
+ std::string mVersion;
};
class DrmMetricsLogger : public IDrm {
@@ -161,6 +217,7 @@
std::array<int64_t, 2> mUuid;
std::string mObjNonce;
std::string mScheme;
+ std::string mVersion;
std::map<std::vector<uint8_t>, SessionContext> mSessionMap;
mutable std::mutex mSessionMapMutex;
IDrmFrontend mFrontend;
diff --git a/drm/libmediadrm/interface/mediadrm/DrmUtils.h b/drm/libmediadrm/interface/mediadrm/DrmUtils.h
index ce8536c..2510f4e 100644
--- a/drm/libmediadrm/interface/mediadrm/DrmUtils.h
+++ b/drm/libmediadrm/interface/mediadrm/DrmUtils.h
@@ -37,6 +37,7 @@
#include <ctime>
#include <deque>
#include <endian.h>
+#include <inttypes.h>
#include <iterator>
#include <mutex>
#include <string>
@@ -105,9 +106,9 @@
void LogToBuffer(android_LogPriority level, const uint8_t uuid[16], const char *fmt, Args... args) {
uint64_t uuid2[2] = {};
std::memcpy(uuid2, uuid, sizeof(uuid2));
- std::string uuidFmt("uuid=[%lx %lx] ");
+ std::string uuidFmt("uuid=[%" PRIx64 " %" PRIx64 "] ");
uuidFmt += fmt;
- LogToBuffer(level, uuidFmt.c_str(), htobe64(uuid2[0]), htobe64(uuid2[1]), args...);
+ LogToBuffer(level, uuidFmt.c_str(), betoh64(uuid2[0]), betoh64(uuid2[1]), args...);
}
#ifndef LOG2BE
diff --git a/drm/mediadrm/plugins/clearkey/aidl/CryptoPlugin.cpp b/drm/mediadrm/plugins/clearkey/aidl/CryptoPlugin.cpp
index afc9b6a..a63471f 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/CryptoPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/aidl/CryptoPlugin.cpp
@@ -137,6 +137,8 @@
*_aidl_return = static_cast<ssize_t>(offset);
return toNdkScopedAStatus(Status::OK);
} else if (in_args.mode == Mode::AES_CTR) {
+ if (!mSession) return toNdkScopedAStatus(Status::ERROR_DRM_CANNOT_HANDLE,
+ "session not found");
size_t bytesDecrypted{};
std::vector<int32_t> clearDataLengths;
std::vector<int32_t> encryptedDataLengths;
@@ -149,6 +151,7 @@
detailedError = "invalid decrypt parameter size";
return toNdkScopedAStatus(Status::ERROR_DRM_CANNOT_HANDLE, detailedError);
}
+
auto res =
mSession->decrypt(in_args.keyId.data(), in_args.iv.data(),
srcPtr, static_cast<uint8_t*>(destPtr),
diff --git a/drm/mediadrm/plugins/clearkey/hidl/AesCtrDecryptor.cpp b/drm/mediadrm/plugins/clearkey/hidl/AesCtrDecryptor.cpp
deleted file mode 100644
index e03a896..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/AesCtrDecryptor.cpp
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "hidl_ClearkeyDecryptor"
-#include <utils/Log.h>
-
-#include <openssl/aes.h>
-
-#include "AesCtrDecryptor.h"
-#include "ClearKeyTypes.h"
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-using ::android::hardware::drm::V1_0::SubSample;
-using ::android::hardware::drm::V1_0::Status;
-
-static const size_t kBlockBitCount = kBlockSize * 8;
-
-Status AesCtrDecryptor::decrypt(
- const std::vector<uint8_t>& key,
- const Iv iv, const uint8_t* source,
- uint8_t* destination,
- const std::vector<SubSample> subSamples,
- size_t numSubSamples,
- size_t* bytesDecryptedOut) {
- uint32_t blockOffset = 0;
- uint8_t previousEncryptedCounter[kBlockSize];
- memset(previousEncryptedCounter, 0, kBlockSize);
-
- if (key.size() != kBlockSize || (sizeof(Iv) / sizeof(uint8_t)) != kBlockSize) {
- android_errorWriteLog(0x534e4554, "63982768");
- return Status::ERROR_DRM_DECRYPT;
- }
-
- size_t offset = 0;
- AES_KEY opensslKey;
- AES_set_encrypt_key(key.data(), kBlockBitCount, &opensslKey);
- Iv opensslIv;
- memcpy(opensslIv, iv, sizeof(opensslIv));
-
- for (size_t i = 0; i < numSubSamples; ++i) {
- const SubSample& subSample = subSamples[i];
-
- if (subSample.numBytesOfClearData > 0) {
- memcpy(destination + offset, source + offset,
- subSample.numBytesOfClearData);
- offset += subSample.numBytesOfClearData;
- }
-
- if (subSample.numBytesOfEncryptedData > 0) {
- AES_ctr128_encrypt(source + offset, destination + offset,
- subSample.numBytesOfEncryptedData, &opensslKey,
- opensslIv, previousEncryptedCounter,
- &blockOffset);
- offset += subSample.numBytesOfEncryptedData;
- }
- }
-
- *bytesDecryptedOut = offset;
- return Status::OK;
-}
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
-
diff --git a/drm/mediadrm/plugins/clearkey/hidl/Android.bp b/drm/mediadrm/plugins/clearkey/hidl/Android.bp
deleted file mode 100644
index b82d996..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/Android.bp
+++ /dev/null
@@ -1,167 +0,0 @@
-//
-// Copyright (C) 2018 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-// *** THIS PACKAGE HAS SPECIAL LICENSING CONDITIONS. PLEASE
-// CONSULT THE OWNERS AND opensource-licensing@google.com BEFORE
-// DEPENDING ON IT IN YOUR PROJECT. ***
-package {
- // See: http://go/android-license-faq
- // A large-scale-change added 'default_applicable_licenses' to import
- // all of the 'license_kinds' from "frameworks_av_license"
- // to get the below license kinds:
- // SPDX-license-identifier-Apache-2.0
- // legacy_by_exception_only (by exception only)
- default_applicable_licenses: ["frameworks_av_license"],
-}
-
-cc_defaults {
- name: "clearkey_service_defaults",
- vendor: true,
-
- srcs: [
- "AesCtrDecryptor.cpp",
- "Base64.cpp",
- "Buffer.cpp",
- "CreatePluginFactories.cpp",
- "CryptoFactory.cpp",
- "CryptoPlugin.cpp",
- "DeviceFiles.cpp",
- "DrmFactory.cpp",
- "DrmPlugin.cpp",
- "InitDataParser.cpp",
- "JsonWebKey.cpp",
- "MemoryFileSystem.cpp",
- "Session.cpp",
- "SessionLibrary.cpp",
- ],
-
- relative_install_path: "hw",
-
- cflags: ["-Wall", "-Werror", "-Wthread-safety"],
-
- shared_libs: [
- "android.hardware.drm@1.0",
- "android.hardware.drm@1.1",
- "android.hardware.drm@1.2",
- "android.hardware.drm@1.3",
- "android.hardware.drm@1.4",
- "libbase",
- "libbinder",
- "libcrypto",
- "libhidlbase",
- "libhidlmemory",
- "liblog",
- "libprotobuf-cpp-lite",
- "libutils",
- ],
-
- static_libs: [
- "libclearkeycommon",
- "libclearkeydevicefiles-protos",
- "libjsmn",
- ],
-
- local_include_dirs: ["include"],
-
- export_static_lib_headers: ["libjsmn"],
-
- sanitize: {
- integer_overflow: true,
- },
-}
-cc_library_static {
- name: "libclearkeydevicefiles-protos",
- vendor: true,
-
- proto: {
- export_proto_headers: true,
- type: "lite",
- },
- srcs: ["protos/DeviceFiles.proto"],
-}
-
-cc_library {
- name: "libclearkeyhidl",
- defaults: ["clearkey_service_defaults"],
-}
-
-cc_binary {
- name: "android.hardware.drm@1.2-service.clearkey",
- defaults: ["clearkey_service_defaults"],
- srcs: ["service.cpp"],
- init_rc: ["android.hardware.drm@1.2-service.clearkey.rc"],
- vintf_fragments: ["manifest_android.hardware.drm@1.2-service.clearkey.xml"],
-}
-
-cc_binary {
- name: "android.hardware.drm@1.2-service-lazy.clearkey",
- overrides: ["android.hardware.drm@1.2-service.clearkey"],
- defaults: ["clearkey_service_defaults"],
- srcs: ["serviceLazy.cpp"],
- init_rc: ["android.hardware.drm@1.2-service-lazy.clearkey.rc"],
- vintf_fragments: ["manifest_android.hardware.drm@1.2-service.clearkey.xml"],
-}
-
-cc_binary {
- name: "android.hardware.drm@1.4-service.clearkey",
- defaults: ["clearkey_service_defaults"],
- srcs: ["service.cpp"],
- init_rc: ["android.hardware.drm@1.4-service.clearkey.rc"],
- vintf_fragments: ["manifest_android.hardware.drm@1.4-service.clearkey.xml"],
-}
-
-cc_binary {
- name: "android.hardware.drm@1.4-service-lazy.clearkey",
- overrides: ["android.hardware.drm@1.4-service.clearkey"],
- defaults: ["clearkey_service_defaults"],
- srcs: ["serviceLazy.cpp"],
- init_rc: ["android.hardware.drm@1.4-service-lazy.clearkey.rc"],
- vintf_fragments: ["manifest_android.hardware.drm@1.4-service.clearkey.xml"],
-}
-
-cc_fuzz {
- name: "clearkeyV1.4_fuzzer",
- vendor: true,
- srcs: [
- "fuzzer/clearkeyV1.4_fuzzer.cpp",
- ],
- static_libs: [
- "libclearkeyhidl",
- "libclearkeycommon",
- "libclearkeydevicefiles-protos",
- "libjsmn",
- "libprotobuf-cpp-lite",
- ],
- shared_libs: [
- "android.hidl.allocator@1.0",
- "android.hardware.drm@1.0",
- "android.hardware.drm@1.1",
- "android.hardware.drm@1.2",
- "android.hardware.drm@1.3",
- "android.hardware.drm@1.4",
- "libcrypto",
- "libhidlbase",
- "libhidlmemory",
- "liblog",
- "libutils",
- ],
- fuzz_config: {
- cc: [
- "android-media-fuzzing-reports@google.com",
- ],
- componentid: 155276,
- },
-}
diff --git a/drm/mediadrm/plugins/clearkey/hidl/Base64.cpp b/drm/mediadrm/plugins/clearkey/hidl/Base64.cpp
deleted file mode 100644
index d81f875..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/Base64.cpp
+++ /dev/null
@@ -1,175 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "Base64.h"
-
-#include <string>
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-sp<Buffer> decodeBase64(const std::string &s) {
- size_t n = s.size();
-
- if ((n % 4) != 0) {
- return nullptr;
- }
-
- size_t padding = 0;
- if (n >= 1 && s.c_str()[n - 1] == '=') {
- padding = 1;
-
- if (n >= 2 && s.c_str()[n - 2] == '=') {
- padding = 2;
-
- if (n >= 3 && s.c_str()[n - 3] == '=') {
- padding = 3;
- }
- }
- }
-
- // We divide first to avoid overflow. It's OK to do this because we
- // already made sure that n % 4 == 0.
- size_t outLen = (n / 4) * 3 - padding;
-
- sp<Buffer> buffer = new Buffer(outLen);
- uint8_t *out = buffer->data();
- if (out == nullptr || buffer->size() < outLen) {
- return nullptr;
- }
-
- size_t j = 0;
- uint32_t accum = 0;
- for (size_t i = 0; i < n; ++i) {
- char c = s.c_str()[i];
- unsigned value;
- if (c >= 'A' && c <= 'Z') {
- value = c - 'A';
- } else if (c >= 'a' && c <= 'z') {
- value = 26 + c - 'a';
- } else if (c >= '0' && c <= '9') {
- value = 52 + c - '0';
- } else if (c == '+' || c == '-') {
- value = 62;
- } else if (c == '/' || c == '_') {
- value = 63;
- } else if (c != '=') {
- return nullptr;
- } else {
- if (i < n - padding) {
- return nullptr;
- }
-
- value = 0;
- }
-
- accum = (accum << 6) | value;
-
- if (((i + 1) % 4) == 0) {
- if (j < outLen) { out[j++] = (accum >> 16); }
- if (j < outLen) { out[j++] = (accum >> 8) & 0xff; }
- if (j < outLen) { out[j++] = accum & 0xff; }
-
- accum = 0;
- }
- }
-
- return buffer;
-}
-
-static char encode6Bit(unsigned x) {
- if (x <= 25) {
- return 'A' + x;
- } else if (x <= 51) {
- return 'a' + x - 26;
- } else if (x <= 61) {
- return '0' + x - 52;
- } else if (x == 62) {
- return '+';
- } else {
- return '/';
- }
-}
-
-void encodeBase64(const void *_data, size_t size, std::string *out) {
- out->clear();
-
- const uint8_t *data = (const uint8_t *)_data;
-
- size_t i;
- for (i = 0; i < (size / 3) * 3; i += 3) {
- uint8_t x1 = data[i];
- uint8_t x2 = data[i + 1];
- uint8_t x3 = data[i + 2];
-
- out->push_back(encode6Bit(x1 >> 2));
- out->push_back(encode6Bit((x1 << 4 | x2 >> 4) & 0x3f));
- out->push_back(encode6Bit((x2 << 2 | x3 >> 6) & 0x3f));
- out->push_back(encode6Bit(x3 & 0x3f));
- }
- switch (size % 3) {
- case 0:
- break;
- case 2:
- {
- uint8_t x1 = data[i];
- uint8_t x2 = data[i + 1];
- out->push_back(encode6Bit(x1 >> 2));
- out->push_back(encode6Bit((x1 << 4 | x2 >> 4) & 0x3f));
- out->push_back(encode6Bit((x2 << 2) & 0x3f));
- out->push_back('=');
- break;
- }
- default:
- {
- uint8_t x1 = data[i];
- out->push_back(encode6Bit(x1 >> 2));
- out->push_back(encode6Bit((x1 << 4) & 0x3f));
- out->append("==");
- break;
- }
- }
-}
-
-void encodeBase64Url(const void *_data, size_t size, std::string *out) {
- encodeBase64(_data, size, out);
-
- if ((std::string::npos != out->find("+")) ||
- (std::string::npos != out->find("/"))) {
- size_t outLen = out->size();
- char *base64url = new char[outLen];
- for (size_t i = 0; i < outLen; ++i) {
- if (out->c_str()[i] == '+')
- base64url[i] = '-';
- else if (out->c_str()[i] == '/')
- base64url[i] = '_';
- else
- base64url[i] = out->c_str()[i];
- }
-
- out->assign(base64url, outLen);
- delete[] base64url;
- }
-}
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/Buffer.cpp b/drm/mediadrm/plugins/clearkey/hidl/Buffer.cpp
deleted file mode 100644
index dcb76f4..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/Buffer.cpp
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "Buffer.h"
-
-#include <android/hardware/drm/1.0/types.h>
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-Buffer::Buffer(size_t capacity)
- : mRangeOffset(0),
- mOwnsData(true) {
- mData = malloc(capacity);
- if (mData == nullptr) {
- mCapacity = 0;
- mRangeLength = 0;
- } else {
- mCapacity = capacity;
- mRangeLength = capacity;
- }
-}
-
-Buffer::~Buffer() {
- if (mOwnsData) {
- if (mData != nullptr) {
- free(mData);
- mData = nullptr;
- }
- }
-}
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/CreatePluginFactories.cpp b/drm/mediadrm/plugins/clearkey/hidl/CreatePluginFactories.cpp
deleted file mode 100644
index 4ab33d3..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/CreatePluginFactories.cpp
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "CreatePluginFactories.h"
-
-#include "CryptoFactory.h"
-#include "DrmFactory.h"
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-extern "C" {
-
-IDrmFactory* createDrmFactory() {
- return new DrmFactory();
-}
-
-ICryptoFactory* createCryptoFactory() {
- return new CryptoFactory();
-}
-
-} // extern "C"
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/CryptoFactory.cpp b/drm/mediadrm/plugins/clearkey/hidl/CryptoFactory.cpp
deleted file mode 100644
index 0bebc3b..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/CryptoFactory.cpp
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "hidl_ClearKeyCryptoFactory"
-#include <utils/Log.h>
-
-#include "CryptoFactory.h"
-
-#include "ClearKeyUUID.h"
-#include "CryptoPlugin.h"
-#include "TypeConvert.h"
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-using ::android::hardware::drm::V1_0::Status;
-using ::android::hardware::drm::V1_4::clearkey::CryptoPlugin;
-
-Return<bool> CryptoFactory::isCryptoSchemeSupported(
- const hidl_array<uint8_t, 16> &uuid)
-{
- return clearkeydrm::isClearKeyUUID(uuid.data());
-}
-
-Return<void> CryptoFactory::createPlugin(
- const hidl_array<uint8_t, 16> &uuid,
- const hidl_vec<uint8_t> &initData,
- createPlugin_cb _hidl_cb) {
-
- if (!isCryptoSchemeSupported(uuid.data())) {
- ALOGE("Clearkey Drm HAL: failed to create clearkey plugin, " \
- "invalid crypto scheme");
- _hidl_cb(Status::BAD_VALUE, nullptr);
- return Void();
- }
-
- CryptoPlugin *cryptoPlugin = new CryptoPlugin(initData);
- Status status = cryptoPlugin->getInitStatus();
- if (status == Status::OK) {
- _hidl_cb(Status::OK, cryptoPlugin);
- } else {
- delete cryptoPlugin;
- _hidl_cb(status, nullptr);
- }
- return Void();
-}
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
-
diff --git a/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp b/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp
deleted file mode 100644
index 7bc320d..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp
+++ /dev/null
@@ -1,262 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "hidl_ClearKeyCryptoPlugin"
-#include <utils/Log.h>
-
-#include "CryptoPlugin.h"
-#include "SessionLibrary.h"
-#include "TypeConvert.h"
-
-#include <hidlmemory/mapping.h>
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-using ::android::hardware::drm::V1_0::BufferType;
-
-Return<void> CryptoPlugin::setSharedBufferBase(
- const hidl_memory& base, uint32_t bufferId) {
- sp<IMemory> hidlMemory = mapMemory(base);
- ALOGE_IF(hidlMemory == nullptr, "mapMemory returns nullptr");
-
- std::lock_guard<std::mutex> shared_buffer_lock(mSharedBufferLock);
-
- // allow mapMemory to return nullptr
- mSharedBufferMap[bufferId] = hidlMemory;
- return Void();
-}
-
-Return<void> CryptoPlugin::decrypt(
- bool secure,
- const hidl_array<uint8_t, 16>& keyId,
- const hidl_array<uint8_t, 16>& iv,
- Mode mode,
- const Pattern& pattern,
- const hidl_vec<SubSample>& subSamples,
- const SharedBuffer& source,
- uint64_t offset,
- const DestinationBuffer& destination,
- decrypt_cb _hidl_cb) {
-
- Status status = Status::ERROR_DRM_UNKNOWN;
- hidl_string detailedError;
- uint32_t bytesWritten = 0;
-
- Return<void> hResult = decrypt_1_2(
- secure, keyId, iv, mode, pattern, subSamples, source, offset, destination,
- [&](Status_V1_2 hStatus, uint32_t hBytesWritten, hidl_string hDetailedError) {
- status = toStatus_1_0(hStatus);
- bytesWritten = hBytesWritten;
- detailedError = hDetailedError;
- }
- );
-
- status = hResult.isOk() ? status : Status::ERROR_DRM_CANNOT_HANDLE;
- _hidl_cb(status, bytesWritten, detailedError);
- return Void();
-}
-
-// Returns negative values for error code and positive values for the size of
-// decrypted data. In theory, the output size can be larger than the input
-// size, but in practice this will never happen for AES-CTR.
-Return<void> CryptoPlugin::decrypt_1_2(
- bool secure,
- const hidl_array<uint8_t, KEY_ID_SIZE>& keyId,
- const hidl_array<uint8_t, KEY_IV_SIZE>& iv,
- Mode mode,
- const Pattern& pattern,
- const hidl_vec<SubSample>& subSamples,
- const SharedBuffer& source,
- uint64_t offset,
- const DestinationBuffer& destination,
- decrypt_1_2_cb _hidl_cb) {
- UNUSED(pattern);
-
- if (secure) {
- _hidl_cb(Status_V1_2::ERROR_DRM_CANNOT_HANDLE, 0,
- "Secure decryption is not supported with ClearKey.");
- return Void();
- }
-
- std::unique_lock<std::mutex> shared_buffer_lock(mSharedBufferLock);
- if (mSharedBufferMap.find(source.bufferId) == mSharedBufferMap.end()) {
- _hidl_cb(Status_V1_2::ERROR_DRM_CANNOT_HANDLE, 0,
- "source decrypt buffer base not set");
- return Void();
- }
-
- if (destination.type == BufferType::SHARED_MEMORY) {
- const SharedBuffer& dest = destination.nonsecureMemory;
- if (mSharedBufferMap.find(dest.bufferId) == mSharedBufferMap.end()) {
- _hidl_cb(Status_V1_2::ERROR_DRM_CANNOT_HANDLE, 0,
- "destination decrypt buffer base not set");
- return Void();
- }
- } else {
- _hidl_cb(Status_V1_2::ERROR_DRM_CANNOT_HANDLE, 0,
- "destination type not supported");
- return Void();
- }
-
- sp<IMemory> sourceBase = mSharedBufferMap[source.bufferId];
- if (sourceBase == nullptr) {
- _hidl_cb(Status_V1_2::ERROR_DRM_CANNOT_HANDLE, 0, "source is a nullptr");
- return Void();
- }
-
- size_t totalSize = 0;
- if (__builtin_add_overflow(source.offset, offset, &totalSize) ||
- __builtin_add_overflow(totalSize, source.size, &totalSize) ||
- totalSize > sourceBase->getSize()) {
- android_errorWriteLog(0x534e4554, "176496160");
- _hidl_cb(Status_V1_2::ERROR_DRM_CANNOT_HANDLE, 0, "invalid buffer size");
- return Void();
- }
-
- uint8_t *base = static_cast<uint8_t *>
- (static_cast<void *>(sourceBase->getPointer()));
- uint8_t* srcPtr = static_cast<uint8_t *>(base + source.offset + offset);
- void* destPtr = NULL;
- // destination.type == BufferType::SHARED_MEMORY
- const SharedBuffer& destBuffer = destination.nonsecureMemory;
- sp<IMemory> destBase = mSharedBufferMap[destBuffer.bufferId];
- if (destBase == nullptr) {
- _hidl_cb(Status_V1_2::ERROR_DRM_CANNOT_HANDLE, 0, "destination is a nullptr");
- return Void();
- }
-
- base = static_cast<uint8_t *>(static_cast<void *>(destBase->getPointer()));
-
- totalSize = 0;
- if (__builtin_add_overflow(destBuffer.offset, destBuffer.size, &totalSize) ||
- totalSize > destBase->getSize()) {
- android_errorWriteLog(0x534e4554, "176444622");
- _hidl_cb(Status_V1_2::ERROR_DRM_FRAME_TOO_LARGE, 0, "invalid buffer size");
- return Void();
- }
- destPtr = static_cast<void*>(base + destination.nonsecureMemory.offset);
-
- // release mSharedBufferLock
- shared_buffer_lock.unlock();
-
- // Calculate the output buffer size and determine if any subsamples are
- // encrypted.
- size_t destSize = 0;
- size_t srcSize = 0;
- bool haveEncryptedSubsamples = false;
- for (size_t i = 0; i < subSamples.size(); i++) {
- const SubSample &subSample = subSamples[i];
- if (__builtin_add_overflow(destSize, subSample.numBytesOfClearData, &destSize) ||
- __builtin_add_overflow(srcSize, subSample.numBytesOfClearData, &srcSize)) {
- _hidl_cb(Status_V1_2::ERROR_DRM_FRAME_TOO_LARGE, 0, "subsample clear size overflow");
- return Void();
- }
- if (__builtin_add_overflow(destSize, subSample.numBytesOfEncryptedData, &destSize) ||
- __builtin_add_overflow(srcSize, subSample.numBytesOfEncryptedData, &srcSize)) {
- _hidl_cb(Status_V1_2::ERROR_DRM_FRAME_TOO_LARGE, 0, "subsample encrypted size overflow");
- return Void();
- }
- if (subSample.numBytesOfEncryptedData > 0) {
- haveEncryptedSubsamples = true;
- }
- }
-
- if (destSize > destBuffer.size || srcSize > source.size) {
- _hidl_cb(Status_V1_2::ERROR_DRM_FRAME_TOO_LARGE, 0, "subsample sum too large");
- return Void();
- }
-
- if (mode == Mode::UNENCRYPTED) {
- if (haveEncryptedSubsamples) {
- _hidl_cb(Status_V1_2::ERROR_DRM_CANNOT_HANDLE, 0,
- "Encrypted subsamples found in allegedly unencrypted data.");
- return Void();
- }
-
- size_t offset = 0;
- for (size_t i = 0; i < subSamples.size(); ++i) {
- const SubSample& subSample = subSamples[i];
- if (subSample.numBytesOfClearData != 0) {
- memcpy(reinterpret_cast<uint8_t*>(destPtr) + offset,
- reinterpret_cast<const uint8_t*>(srcPtr) + offset,
- subSample.numBytesOfClearData);
- offset += subSample.numBytesOfClearData;
- }
- }
-
- _hidl_cb(Status_V1_2::OK, static_cast<ssize_t>(offset), "");
- return Void();
- } else if (mode == Mode::AES_CTR) {
- size_t bytesDecrypted;
- if (keyId.size() != kBlockSize || iv.size() != kBlockSize) {
- android_errorWriteLog(0x534e4554, "244569759");
- _hidl_cb(Status_V1_2::ERROR_DRM_CANNOT_HANDLE, 0, "invalid decrypt parameter size");
- return Void();
- }
- Status_V1_2 res = mSession->decrypt(keyId.data(), iv.data(), srcPtr,
- static_cast<uint8_t*>(destPtr), toVector(subSamples), &bytesDecrypted);
- if (res == Status_V1_2::OK) {
- _hidl_cb(Status_V1_2::OK, static_cast<ssize_t>(bytesDecrypted), "");
- return Void();
- } else {
- _hidl_cb(res, 0, "Decryption Error");
- return Void();
- }
- } else {
- _hidl_cb(Status_V1_2::ERROR_DRM_CANNOT_HANDLE, 0,
- "Selected encryption mode is not supported by the ClearKey DRM Plugin.");
- return Void();
- }
-}
-
-Return<Status> CryptoPlugin::setMediaDrmSession(
- const hidl_vec<uint8_t>& sessionId) {
- if (!sessionId.size()) {
- mSession = nullptr;
- } else {
- mSession = SessionLibrary::get()->findSession(sessionId);
- if (!mSession.get()) {
- return Status::ERROR_DRM_SESSION_NOT_OPENED;
- }
- }
- return Status::OK;
-}
-
-Return<void> CryptoPlugin::getLogMessages(
- getLogMessages_cb _hidl_cb) {
- using std::chrono::duration_cast;
- using std::chrono::milliseconds;
- using std::chrono::system_clock;
-
- auto timeMillis = duration_cast<milliseconds>(
- system_clock::now().time_since_epoch()).count();
-
- std::vector<LogMessage> logs = {
- { timeMillis, LogPriority::ERROR, std::string("Not implemented") }};
- _hidl_cb(drm::V1_4::Status::OK, toHidlVec(logs));
- return Void();
-}
-
-} // namespace clearkey
-} // namespace V1_4.
-} // namespace drm
-} // namespace hardware
-} // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/DeviceFiles.cpp b/drm/mediadrm/plugins/clearkey/hidl/DeviceFiles.cpp
deleted file mode 100644
index 0385d8f..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/DeviceFiles.cpp
+++ /dev/null
@@ -1,252 +0,0 @@
-// Copyright 2018 Google LLC. All Rights Reserved. This file and proprietary
-// source code may only be used and distributed under the Widevine Master
-// License Agreement.
-
-#include <utils/Log.h>
-
-#include <string>
-#include <sys/stat.h>
-
-#include "DeviceFiles.h"
-#include "Utils.h"
-
-#include <openssl/sha.h>
-
-// Protobuf generated classes.
-using android::hardware::drm::V1_2::clearkey::OfflineFile;
-using android::hardware::drm::V1_2::clearkey::HashedFile;
-using android::hardware::drm::V1_2::clearkey::License;
-using android::hardware::drm::V1_2::clearkey::License_LicenseState_ACTIVE;
-using android::hardware::drm::V1_2::clearkey::License_LicenseState_RELEASING;
-
-namespace {
-const char kLicenseFileNameExt[] = ".lic";
-
-bool Hash(const std::string& data, std::string* hash) {
- if (!hash) return false;
-
- hash->resize(SHA256_DIGEST_LENGTH);
-
- const unsigned char* input = reinterpret_cast<const unsigned char*>(data.data());
- unsigned char* output = reinterpret_cast<unsigned char*>(&(*hash)[0]);
- SHA256(input, data.size(), output);
- return true;
-}
-
-} // namespace
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-bool DeviceFiles::StoreLicense(
- const std::string& keySetId, LicenseState state,
- const std::string& licenseResponse) {
-
- OfflineFile file;
- file.set_type(OfflineFile::LICENSE);
- file.set_version(OfflineFile::VERSION_1);
-
- License* license = file.mutable_license();
- switch (state) {
- case kLicenseStateActive:
- license->set_state(License_LicenseState_ACTIVE);
- license->set_license(licenseResponse);
- break;
- case kLicenseStateReleasing:
- license->set_state(License_LicenseState_RELEASING);
- license->set_license(licenseResponse);
- break;
- default:
- ALOGW("StoreLicense: Unknown license state: %u", state);
- return false;
- }
-
- std::string serializedFile;
- file.SerializeToString(&serializedFile);
-
- return StoreFileWithHash(keySetId + kLicenseFileNameExt, serializedFile);
-}
-
-bool DeviceFiles::StoreFileWithHash(const std::string& fileName,
- const std::string& serializedFile) {
- std::string hash;
- if (!Hash(serializedFile, &hash)) {
- ALOGE("StoreFileWithHash: Failed to compute hash");
- return false;
- }
-
- HashedFile hashFile;
- hashFile.set_file(serializedFile);
- hashFile.set_hash(hash);
-
- std::string serializedHashFile;
- hashFile.SerializeToString(&serializedHashFile);
-
- return StoreFileRaw(fileName, serializedHashFile);
-}
-
-bool DeviceFiles::StoreFileRaw(const std::string& fileName, const std::string& serializedHashFile) {
- MemoryFileSystem::MemoryFile memFile;
- memFile.setFileName(fileName);
- memFile.setContent(serializedHashFile);
- memFile.setFileSize(serializedHashFile.size());
- size_t len = mFileHandle.Write(fileName, memFile);
-
- if (len != static_cast<size_t>(serializedHashFile.size())) {
- ALOGE("StoreFileRaw: Failed to write %s", fileName.c_str());
- ALOGD("StoreFileRaw: expected=%zd, actual=%zu", serializedHashFile.size(), len);
- return false;
- }
-
- ALOGD("StoreFileRaw: wrote %zu bytes to %s", serializedHashFile.size(), fileName.c_str());
- return true;
-}
-
-bool DeviceFiles::RetrieveLicense(
- const std::string& keySetId, LicenseState* state, std::string* offlineLicense) {
-
- OfflineFile file;
- if (!RetrieveHashedFile(keySetId + kLicenseFileNameExt, &file)) {
- return false;
- }
-
- if (file.type() != OfflineFile::LICENSE) {
- ALOGE("RetrieveLicense: Invalid file type");
- return false;
- }
-
- if (file.version() != OfflineFile::VERSION_1) {
- ALOGE("RetrieveLicense: Invalid file version");
- return false;
- }
-
- if (!file.has_license()) {
- ALOGE("RetrieveLicense: License not present");
- return false;
- }
-
- License license = file.license();
- switch (license.state()) {
- case License_LicenseState_ACTIVE:
- *state = kLicenseStateActive;
- break;
- case License_LicenseState_RELEASING:
- *state = kLicenseStateReleasing;
- break;
- default:
- ALOGW("RetrieveLicense: Unrecognized license state: %u",
- kLicenseStateUnknown);
- *state = kLicenseStateUnknown;
- break;
- }
- *offlineLicense = license.license();
- return true;
-}
-
-bool DeviceFiles::DeleteLicense(const std::string& keySetId) {
- return mFileHandle.RemoveFile(keySetId + kLicenseFileNameExt);
-}
-
-bool DeviceFiles::DeleteAllLicenses() {
- return mFileHandle.RemoveAllFiles();
-}
-
-bool DeviceFiles::LicenseExists(const std::string& keySetId) {
- return mFileHandle.FileExists(keySetId + kLicenseFileNameExt);
-}
-
-std::vector<std::string> DeviceFiles::ListLicenses() const {
- std::vector<std::string> licenses = mFileHandle.ListFiles();
- for (size_t i = 0; i < licenses.size(); i++) {
- std::string& license = licenses[i];
- license = license.substr(0, license.size() - strlen(kLicenseFileNameExt));
- }
- return licenses;
-}
-
-bool DeviceFiles::RetrieveHashedFile(const std::string& fileName, OfflineFile* deSerializedFile) {
- if (!deSerializedFile) {
- ALOGE("RetrieveHashedFile: invalid file parameter");
- return false;
- }
-
- if (!FileExists(fileName)) {
- ALOGE("RetrieveHashedFile: %s does not exist", fileName.c_str());
- return false;
- }
-
- ssize_t bytes = GetFileSize(fileName);
- if (bytes <= 0) {
- ALOGE("RetrieveHashedFile: invalid file size: %s", fileName.c_str());
- // Remove the corrupted file so the caller will not get the same error
- // when trying to access the file repeatedly, causing the system to stall.
- RemoveFile(fileName);
- return false;
- }
-
- std::string serializedHashFile;
- serializedHashFile.resize(bytes);
- bytes = mFileHandle.Read(fileName, &serializedHashFile);
-
- if (bytes != static_cast<ssize_t>(serializedHashFile.size())) {
- ALOGE("RetrieveHashedFile: Failed to read from %s", fileName.c_str());
- ALOGV("RetrieveHashedFile: expected: %zd, actual: %zd", serializedHashFile.size(), bytes);
- // Remove the corrupted file so the caller will not get the same error
- // when trying to access the file repeatedly, causing the system to stall.
- RemoveFile(fileName);
- return false;
- }
-
- ALOGV("RetrieveHashedFile: read %zd from %s", bytes, fileName.c_str());
-
- HashedFile hashFile;
- if (!hashFile.ParseFromString(serializedHashFile)) {
- ALOGE("RetrieveHashedFile: Unable to parse hash file");
- // Remove corrupt file.
- RemoveFile(fileName);
- return false;
- }
-
- std::string hash;
- if (!Hash(hashFile.file(), &hash)) {
- ALOGE("RetrieveHashedFile: Hash computation failed");
- return false;
- }
-
- if (hash != hashFile.hash()) {
- ALOGE("RetrieveHashedFile: Hash mismatch");
- // Remove corrupt file.
- RemoveFile(fileName);
- return false;
- }
-
- if (!deSerializedFile->ParseFromString(hashFile.file())) {
- ALOGE("RetrieveHashedFile: Unable to parse file");
- // Remove corrupt file.
- RemoveFile(fileName);
- return false;
- }
-
- return true;
-}
-
-bool DeviceFiles::FileExists(const std::string& fileName) const {
- return mFileHandle.FileExists(fileName);
-}
-
-bool DeviceFiles::RemoveFile(const std::string& fileName) {
- return mFileHandle.RemoveFile(fileName);
-}
-
-ssize_t DeviceFiles::GetFileSize(const std::string& fileName) const {
- return mFileHandle.GetFileSize(fileName);
-}
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/DrmFactory.cpp b/drm/mediadrm/plugins/clearkey/hidl/DrmFactory.cpp
deleted file mode 100644
index 14cb5c1..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/DrmFactory.cpp
+++ /dev/null
@@ -1,111 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#include <vector>
-#define LOG_TAG "hidl_ClearKeyDrmFactory"
-#include <utils/Log.h>
-
-#include <utils/Errors.h>
-
-#include "DrmFactory.h"
-
-#include "DrmPlugin.h"
-#include "ClearKeyUUID.h"
-#include "MimeType.h"
-#include "SessionLibrary.h"
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-using ::android::hardware::drm::V1_0::Status;
-using ::android::hardware::drm::V1_1::SecurityLevel;
-using ::android::hardware::drm::V1_4::clearkey::DrmPlugin;
-using ::android::hardware::drm::V1_4::clearkey::SessionLibrary;
-using ::android::hardware::Void;
-
-Return<bool> DrmFactory::isCryptoSchemeSupported(
- const hidl_array<uint8_t, 16>& uuid) {
- return clearkeydrm::isClearKeyUUID(uuid.data());
-}
-
-Return<bool> DrmFactory::isCryptoSchemeSupported_1_2(const hidl_array<uint8_t, 16>& uuid,
- const hidl_string &mimeType,
- SecurityLevel level) {
- return isCryptoSchemeSupported(uuid) && isContentTypeSupported(mimeType) &&
- level == SecurityLevel::SW_SECURE_CRYPTO;
-}
-
-Return<bool> DrmFactory::isContentTypeSupported(const hidl_string &mimeType) {
- // This should match the mimeTypes handed by InitDataParser.
- return mimeType == kIsoBmffVideoMimeType ||
- mimeType == kIsoBmffAudioMimeType ||
- mimeType == kCencInitDataFormat ||
- mimeType == kWebmVideoMimeType ||
- mimeType == kWebmAudioMimeType ||
- mimeType == kWebmInitDataFormat;
-}
-
-Return<void> DrmFactory::createPlugin(
- const hidl_array<uint8_t, 16>& uuid,
- const hidl_string& appPackageName,
- createPlugin_cb _hidl_cb) {
- UNUSED(appPackageName);
-
- DrmPlugin *plugin = NULL;
- if (!isCryptoSchemeSupported(uuid.data())) {
- ALOGE("Clear key Drm HAL: failed to create drm plugin, " \
- "invalid crypto scheme");
- _hidl_cb(Status::BAD_VALUE, plugin);
- return Void();
- }
-
- plugin = new DrmPlugin(SessionLibrary::get());
- _hidl_cb(Status::OK, plugin);
- return Void();
-}
-
-Return<void> DrmFactory::getSupportedCryptoSchemes(
- getSupportedCryptoSchemes_cb _hidl_cb) {
- std::vector<hidl_array<uint8_t, 16>> schemes;
- for (const auto &scheme : clearkeydrm::getSupportedCryptoSchemes()) {
- schemes.push_back(scheme);
- }
- _hidl_cb(schemes);
- return Void();
-}
-
-Return<void> DrmFactory::debug(const hidl_handle& fd, const hidl_vec<hidl_string>& /*args*/) {
- if (fd.getNativeHandle() == nullptr || fd->numFds < 1) {
- ALOGE("%s: missing fd for writing", __FUNCTION__);
- return Void();
- }
-
- FILE* out = fdopen(dup(fd->data[0]), "w");
- uint32_t currentSessions = SessionLibrary::get()->numOpenSessions();
- fprintf(out, "current open sessions: %u\n", currentSessions);
- fclose(out);
- return Void();
-}
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp b/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp
deleted file mode 100644
index e04dd7e..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp
+++ /dev/null
@@ -1,964 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "hidl_ClearKeyPlugin"
-#include <utils/Log.h>
-
-#include <chrono>
-#include <stdio.h>
-#include <inttypes.h>
-
-#include "DrmPlugin.h"
-#include "ClearKeyDrmProperties.h"
-#include "Session.h"
-#include "TypeConvert.h"
-#include "Utils.h"
-
-namespace {
-const std::string kKeySetIdPrefix("ckid");
-const int kKeySetIdLength = 16;
-const int kSecureStopIdStart = 100;
-const std::string kOfflineLicense("\"type\":\"persistent-license\"");
-const std::string kStreaming("Streaming");
-const std::string kTemporaryLicense("\"type\":\"temporary\"");
-const std::string kTrue("True");
-
-const std::string kQueryKeyLicenseType("LicenseType");
- // Value: "Streaming" or "Offline"
-const std::string kQueryKeyPlayAllowed("PlayAllowed");
- // Value: "True" or "False"
-const std::string kQueryKeyRenewAllowed("RenewAllowed");
- // Value: "True" or "False"
-
-const int kSecureStopIdSize = 10;
-
-std::vector<uint8_t> uint32ToVector(uint32_t value) {
- // 10 bytes to display max value 4294967295 + one byte null terminator
- char buffer[kSecureStopIdSize];
- memset(buffer, 0, kSecureStopIdSize);
- snprintf(buffer, kSecureStopIdSize, "%" PRIu32, value);
- return std::vector<uint8_t>(buffer, buffer + sizeof(buffer));
-}
-
-}; // unnamed namespace
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-KeyRequestType toKeyRequestType_V1_0(KeyRequestType_V1_1 keyRequestType) {
- switch (keyRequestType) {
- case KeyRequestType_V1_1::NONE:
- case KeyRequestType_V1_1::UPDATE:
- return KeyRequestType::UNKNOWN;
- default:
- return static_cast<KeyRequestType>(keyRequestType);
- }
-}
-
-DrmPlugin::DrmPlugin(SessionLibrary* sessionLibrary)
- : mSessionLibrary(sessionLibrary),
- mOpenSessionOkCount(0),
- mCloseSessionOkCount(0),
- mCloseSessionNotOpenedCount(0),
- mNextSecureStopId(kSecureStopIdStart),
- mMockError(Status_V1_2::OK) {
- mPlayPolicy.clear();
- initProperties();
- mSecureStops.clear();
- mReleaseKeysMap.clear();
- std::srand(std::time(nullptr));
-}
-
-void DrmPlugin::initProperties() {
- mStringProperties.clear();
- mStringProperties[kVendorKey] = kVendorValue;
- mStringProperties[kVersionKey] = kVersionValue;
- mStringProperties[kPluginDescriptionKey] = kPluginDescriptionValue;
- mStringProperties[kAlgorithmsKey] = kAlgorithmsValue;
- mStringProperties[kListenerTestSupportKey] = kListenerTestSupportValue;
- mStringProperties[kDrmErrorTestKey] = kDrmErrorTestValue;
-
- std::vector<uint8_t> valueVector;
- valueVector.clear();
- valueVector.insert(valueVector.end(),
- kTestDeviceIdData, kTestDeviceIdData + sizeof(kTestDeviceIdData) / sizeof(uint8_t));
- mByteArrayProperties[kDeviceIdKey] = valueVector;
-
- valueVector.clear();
- valueVector.insert(valueVector.end(),
- kMetricsData, kMetricsData + sizeof(kMetricsData) / sizeof(uint8_t));
- mByteArrayProperties[kMetricsKey] = valueVector;
-}
-
-// The secure stop in ClearKey implementation is not installed securely.
-// This function merely creates a test environment for testing secure stops APIs.
-// The content in this secure stop is implementation dependent, the clearkey
-// secureStop does not serve as a reference implementation.
-void DrmPlugin::installSecureStop(const hidl_vec<uint8_t>& sessionId) {
- Mutex::Autolock lock(mSecureStopLock);
-
- ClearkeySecureStop clearkeySecureStop;
- clearkeySecureStop.id = uint32ToVector(++mNextSecureStopId);
- clearkeySecureStop.data.assign(sessionId.begin(), sessionId.end());
-
- mSecureStops.insert(std::pair<std::vector<uint8_t>, ClearkeySecureStop>(
- clearkeySecureStop.id, clearkeySecureStop));
-}
-
-Return<void> DrmPlugin::openSession(openSession_cb _hidl_cb) {
- sp<Session> session = mSessionLibrary->createSession();
- processMockError(session);
- std::vector<uint8_t> sessionId = session->sessionId();
-
- Status status = setSecurityLevel(sessionId, SecurityLevel::SW_SECURE_CRYPTO);
- _hidl_cb(status, toHidlVec(sessionId));
- mOpenSessionOkCount++;
- return Void();
-}
-
-Return<void> DrmPlugin::openSession_1_1(SecurityLevel securityLevel,
- openSession_1_1_cb _hidl_cb) {
- sp<Session> session = mSessionLibrary->createSession();
- processMockError(session);
- std::vector<uint8_t> sessionId = session->sessionId();
-
- Status status = setSecurityLevel(sessionId, securityLevel);
- if (status == Status::OK) {
- mOpenSessionOkCount++;
- } else {
- mSessionLibrary->destroySession(session);
- sessionId.clear();
- }
- _hidl_cb(status, toHidlVec(sessionId));
- return Void();
-}
-
-Return<Status> DrmPlugin::closeSession(const hidl_vec<uint8_t>& sessionId) {
- if (sessionId.size() == 0) {
- return Status::BAD_VALUE;
- }
-
- sp<Session> session = mSessionLibrary->findSession(toVector(sessionId));
- if (session.get()) {
- mSessionLibrary->destroySession(session);
- if (session->getMockError() != Status_V1_2::OK) {
- sendSessionLostState(sessionId);
- return Status::ERROR_DRM_INVALID_STATE;
- }
- mCloseSessionOkCount++;
- return Status::OK;
- }
- mCloseSessionNotOpenedCount++;
- return Status::ERROR_DRM_SESSION_NOT_OPENED;
-}
-
-Status_V1_2 DrmPlugin::getKeyRequestCommon(const hidl_vec<uint8_t>& scope,
- const hidl_vec<uint8_t>& initData,
- const hidl_string& mimeType,
- KeyType keyType,
- const hidl_vec<KeyValue>& optionalParameters,
- std::vector<uint8_t> *request,
- KeyRequestType_V1_1 *keyRequestType,
- std::string *defaultUrl) {
- UNUSED(optionalParameters);
-
- // GetKeyRequestOfflineKeyTypeNotSupported() in vts 1.0 and 1.1 expects
- // KeyType::OFFLINE to return ERROR_DRM_CANNOT_HANDLE in clearkey plugin.
- // Those tests pass in an empty initData, we use the empty initData to
- // signal such specific use case.
- if (keyType == KeyType::OFFLINE && 0 == initData.size()) {
- return Status_V1_2::ERROR_DRM_CANNOT_HANDLE;
- }
-
- *defaultUrl = "https://default.url";
- *keyRequestType = KeyRequestType_V1_1::UNKNOWN;
- *request = std::vector<uint8_t>();
-
- if (scope.size() == 0 ||
- (keyType != KeyType::STREAMING &&
- keyType != KeyType::OFFLINE &&
- keyType != KeyType::RELEASE)) {
- return Status_V1_2::BAD_VALUE;
- }
-
- const std::vector<uint8_t> scopeId = toVector(scope);
- sp<Session> session;
- if (keyType == KeyType::STREAMING || keyType == KeyType::OFFLINE) {
- std::vector<uint8_t> sessionId(scopeId.begin(), scopeId.end());
- session = mSessionLibrary->findSession(sessionId);
- if (!session.get()) {
- return Status_V1_2::ERROR_DRM_SESSION_NOT_OPENED;
- } else if (session->getMockError() != Status_V1_2::OK) {
- return session->getMockError();
- }
-
- *keyRequestType = KeyRequestType_V1_1::INITIAL;
- }
-
- Status_V1_2 status = static_cast<Status_V1_2>(
- session->getKeyRequest(initData, mimeType, keyType, request));
-
- if (keyType == KeyType::RELEASE) {
- std::vector<uint8_t> keySetId(scopeId.begin(), scopeId.end());
- std::string requestString(request->begin(), request->end());
- if (requestString.find(kOfflineLicense) != std::string::npos) {
- std::string emptyResponse;
- std::string keySetIdString(keySetId.begin(), keySetId.end());
- if (!mFileHandle.StoreLicense(keySetIdString,
- DeviceFiles::kLicenseStateReleasing,
- emptyResponse)) {
- ALOGE("Problem releasing offline license");
- return Status_V1_2::ERROR_DRM_UNKNOWN;
- }
- if (mReleaseKeysMap.find(keySetIdString) == mReleaseKeysMap.end()) {
- sp<Session> session = mSessionLibrary->createSession();
- mReleaseKeysMap[keySetIdString] = session->sessionId();
- } else {
- ALOGI("key is in use, ignore release request");
- }
- } else {
- ALOGE("Offline license not found, nothing to release");
- }
- *keyRequestType = KeyRequestType_V1_1::RELEASE;
- }
- return status;
-}
-
-Return<void> DrmPlugin::getKeyRequest(
- const hidl_vec<uint8_t>& scope,
- const hidl_vec<uint8_t>& initData,
- const hidl_string& mimeType,
- KeyType keyType,
- const hidl_vec<KeyValue>& optionalParameters,
- getKeyRequest_cb _hidl_cb) {
- UNUSED(optionalParameters);
-
- KeyRequestType_V1_1 keyRequestType = KeyRequestType_V1_1::UNKNOWN;
- std::string defaultUrl("");
- std::vector<uint8_t> request;
- Status_V1_2 status = getKeyRequestCommon(
- scope, initData, mimeType, keyType, optionalParameters,
- &request, &keyRequestType, &defaultUrl);
-
- _hidl_cb(toStatus_1_0(status), toHidlVec(request),
- toKeyRequestType_V1_0(keyRequestType),
- hidl_string(defaultUrl));
- return Void();
-}
-
-Return<void> DrmPlugin::getKeyRequest_1_1(
- const hidl_vec<uint8_t>& scope,
- const hidl_vec<uint8_t>& initData,
- const hidl_string& mimeType,
- KeyType keyType,
- const hidl_vec<KeyValue>& optionalParameters,
- getKeyRequest_1_1_cb _hidl_cb) {
- UNUSED(optionalParameters);
-
- KeyRequestType_V1_1 keyRequestType = KeyRequestType_V1_1::UNKNOWN;
- std::string defaultUrl("");
- std::vector<uint8_t> request;
- Status_V1_2 status = getKeyRequestCommon(
- scope, initData, mimeType, keyType, optionalParameters,
- &request, &keyRequestType, &defaultUrl);
-
- _hidl_cb(toStatus_1_0(status), toHidlVec(request),
- keyRequestType, hidl_string(defaultUrl));
- return Void();
-}
-
-Return<void> DrmPlugin::getKeyRequest_1_2(
- const hidl_vec<uint8_t>& scope,
- const hidl_vec<uint8_t>& initData,
- const hidl_string& mimeType,
- KeyType keyType,
- const hidl_vec<KeyValue>& optionalParameters,
- getKeyRequest_1_2_cb _hidl_cb) {
- UNUSED(optionalParameters);
-
- KeyRequestType_V1_1 keyRequestType = KeyRequestType_V1_1::UNKNOWN;
- std::string defaultUrl("");
- std::vector<uint8_t> request;
- Status_V1_2 status = getKeyRequestCommon(
- scope, initData, mimeType, keyType, optionalParameters,
- &request, &keyRequestType, &defaultUrl);
-
- _hidl_cb(status, toHidlVec(request), keyRequestType, hidl_string(defaultUrl));
- return Void();
-}
-
-void DrmPlugin::setPlayPolicy() {
- android::Mutex::Autolock lock(mPlayPolicyLock);
- mPlayPolicy.clear();
-
- KeyValue policy;
- policy.key = kQueryKeyLicenseType;
- policy.value = kStreaming;
- mPlayPolicy.push_back(policy);
-
- policy.key = kQueryKeyPlayAllowed;
- policy.value = kTrue;
- mPlayPolicy.push_back(policy);
-
- policy.key = kQueryKeyRenewAllowed;
- mPlayPolicy.push_back(policy);
-}
-
-bool DrmPlugin::makeKeySetId(std::string* keySetId) {
- if (!keySetId) {
- ALOGE("keySetId destination not provided");
- return false;
- }
- std::vector<uint8_t> ksid(kKeySetIdPrefix.begin(), kKeySetIdPrefix.end());
- ksid.resize(kKeySetIdLength);
- std::vector<uint8_t> randomData((kKeySetIdLength - kKeySetIdPrefix.size()) / 2, 0);
-
- while (keySetId->empty()) {
- for (auto itr = randomData.begin(); itr != randomData.end(); ++itr) {
- *itr = std::rand() % 0xff;
- }
- *keySetId = kKeySetIdPrefix + ByteArrayToHexString(
- reinterpret_cast<const uint8_t*>(randomData.data()), randomData.size());
- if (mFileHandle.LicenseExists(*keySetId)) {
- // collision, regenerate
- ALOGV("Retry generating KeySetId");
- keySetId->clear();
- }
- }
- return true;
-}
-
-Return<void> DrmPlugin::provideKeyResponse(
- const hidl_vec<uint8_t>& scope,
- const hidl_vec<uint8_t>& response,
- provideKeyResponse_cb _hidl_cb) {
- if (scope.size() == 0 || response.size() == 0) {
- // Returns empty keySetId
- _hidl_cb(Status::BAD_VALUE, hidl_vec<uint8_t>());
- return Void();
- }
-
- std::string responseString(
- reinterpret_cast<const char*>(response.data()), response.size());
- const std::vector<uint8_t> scopeId = toVector(scope);
- std::vector<uint8_t> sessionId;
- std::string keySetId;
-
- Status status = Status::OK;
- bool isOfflineLicense = responseString.find(kOfflineLicense) != std::string::npos;
- if (scopeId.size() < kKeySetIdPrefix.size()) {
- android_errorWriteLog(0x534e4554, "144507096");
- _hidl_cb(Status::ERROR_DRM_CANNOT_HANDLE, hidl_vec<uint8_t>());
- return Void();
- }
- bool isRelease = (memcmp(scopeId.data(), kKeySetIdPrefix.data(), kKeySetIdPrefix.size()) == 0);
- if (isRelease) {
- keySetId.assign(scopeId.begin(), scopeId.end());
-
- auto iter = mReleaseKeysMap.find(std::string(keySetId.begin(), keySetId.end()));
- if (iter != mReleaseKeysMap.end()) {
- sessionId.assign(iter->second.begin(), iter->second.end());
- }
- } else {
- sessionId.assign(scopeId.begin(), scopeId.end());
- // non offline license returns empty keySetId
- keySetId.clear();
- }
-
- sp<Session> session = mSessionLibrary->findSession(sessionId);
- if (!session.get()) {
- _hidl_cb(Status::ERROR_DRM_SESSION_NOT_OPENED, hidl_vec<uint8_t>());
- return Void();
- }
- setPlayPolicy();
-
- status = session->provideKeyResponse(response);
- if (status == Status::OK) {
- if (isOfflineLicense) {
- if (isRelease) {
- mFileHandle.DeleteLicense(keySetId);
- mSessionLibrary->destroySession(session);
- } else {
- if (!makeKeySetId(&keySetId)) {
- _hidl_cb(Status::ERROR_DRM_UNKNOWN, hidl_vec<uint8_t>());
- return Void();
- }
-
- bool ok = mFileHandle.StoreLicense(
- keySetId,
- DeviceFiles::kLicenseStateActive,
- std::string(response.begin(), response.end()));
- if (!ok) {
- ALOGE("Failed to store offline license");
- }
- }
- }
-
- // Test calling AMediaDrm listeners.
- sendEvent(EventType::VENDOR_DEFINED, sessionId, sessionId);
-
- sendExpirationUpdate(sessionId, 100);
-
- std::vector<KeyStatus_V1_2> keysStatus;
- KeyStatus_V1_2 keyStatus;
-
- std::vector<uint8_t> keyId1 = { 0xA, 0xB, 0xC };
- keyStatus.keyId = keyId1;
- keyStatus.type = V1_2::KeyStatusType::USABLE;
- keysStatus.push_back(keyStatus);
-
- std::vector<uint8_t> keyId2 = { 0xD, 0xE, 0xF };
- keyStatus.keyId = keyId2;
- keyStatus.type = V1_2::KeyStatusType::EXPIRED;
- keysStatus.push_back(keyStatus);
-
- std::vector<uint8_t> keyId3 = { 0x0, 0x1, 0x2 };
- keyStatus.keyId = keyId3;
- keyStatus.type = V1_2::KeyStatusType::USABLEINFUTURE;
- keysStatus.push_back(keyStatus);
-
- sendKeysChange_1_2(sessionId, keysStatus, true);
-
- installSecureStop(sessionId);
- } else {
- ALOGE("provideKeyResponse returns error=%d", status);
- }
-
- std::vector<uint8_t> keySetIdVec(keySetId.begin(), keySetId.end());
- _hidl_cb(status, toHidlVec(keySetIdVec));
- return Void();
-}
-
-Return<Status> DrmPlugin::restoreKeys(
- const hidl_vec<uint8_t>& sessionId, const hidl_vec<uint8_t>& keySetId) {
- if (sessionId.size() == 0 || keySetId.size() == 0) {
- return Status::BAD_VALUE;
- }
-
- DeviceFiles::LicenseState licenseState;
- std::string offlineLicense;
- Status status = Status::OK;
- if (!mFileHandle.RetrieveLicense(std::string(keySetId.begin(), keySetId.end()),
- &licenseState, &offlineLicense)) {
- ALOGE("Failed to restore offline license");
- return Status::ERROR_DRM_NO_LICENSE;
- }
-
- if (DeviceFiles::kLicenseStateUnknown == licenseState ||
- DeviceFiles::kLicenseStateReleasing == licenseState) {
- ALOGE("Invalid license state=%d", licenseState);
- return Status::ERROR_DRM_NO_LICENSE;
- }
-
- sp<Session> session = mSessionLibrary->findSession(toVector(sessionId));
- if (!session.get()) {
- return Status::ERROR_DRM_SESSION_NOT_OPENED;
- }
- status = session->provideKeyResponse(std::vector<uint8_t>(offlineLicense.begin(),
- offlineLicense.end()));
- if (status != Status::OK) {
- ALOGE("Failed to restore keys");
- }
- return status;
-}
-
-Return<void> DrmPlugin::getPropertyString(
- const hidl_string& propertyName, getPropertyString_cb _hidl_cb) {
- std::string name(propertyName.c_str());
- std::string value;
-
- if (name == kVendorKey) {
- value = mStringProperties[kVendorKey];
- } else if (name == kVersionKey) {
- value = mStringProperties[kVersionKey];
- } else if (name == kPluginDescriptionKey) {
- value = mStringProperties[kPluginDescriptionKey];
- } else if (name == kAlgorithmsKey) {
- value = mStringProperties[kAlgorithmsKey];
- } else if (name == kListenerTestSupportKey) {
- value = mStringProperties[kListenerTestSupportKey];
- } else if (name == kDrmErrorTestKey) {
- value = mStringProperties[kDrmErrorTestKey];
- } else {
- ALOGE("App requested unknown string property %s", name.c_str());
- _hidl_cb(Status::ERROR_DRM_CANNOT_HANDLE, "");
- return Void();
- }
- _hidl_cb(Status::OK, value.c_str());
- return Void();
-}
-
-Return<void> DrmPlugin::getPropertyByteArray(
- const hidl_string& propertyName, getPropertyByteArray_cb _hidl_cb) {
- std::map<std::string, std::vector<uint8_t> >::iterator itr =
- mByteArrayProperties.find(std::string(propertyName.c_str()));
- if (itr == mByteArrayProperties.end()) {
- ALOGE("App requested unknown property: %s", propertyName.c_str());
- _hidl_cb(Status::BAD_VALUE, std::vector<uint8_t>());
- return Void();
- }
- _hidl_cb(Status::OK, itr->second);
- return Void();
-
-}
-
-Return<Status> DrmPlugin::setPropertyString(
- const hidl_string& name, const hidl_string& value) {
- std::string immutableKeys;
- immutableKeys.append(kAlgorithmsKey + ",");
- immutableKeys.append(kPluginDescriptionKey + ",");
- immutableKeys.append(kVendorKey + ",");
- immutableKeys.append(kVersionKey + ",");
-
- std::string key = std::string(name.c_str());
- if (immutableKeys.find(key) != std::string::npos) {
- ALOGD("Cannot set immutable property: %s", key.c_str());
- return Status::BAD_VALUE;
- }
-
- std::map<std::string, std::string>::iterator itr =
- mStringProperties.find(key);
- if (itr == mStringProperties.end()) {
- ALOGE("Cannot set undefined property string, key=%s", key.c_str());
- return Status::BAD_VALUE;
- }
-
- if (name == kDrmErrorTestKey) {
- if (value == kResourceContentionValue) {
- mMockError = Status_V1_2::ERROR_DRM_RESOURCE_CONTENTION;
- } else if (value == kLostStateValue) {
- mMockError = Status_V1_2::ERROR_DRM_SESSION_LOST_STATE;
- } else if (value == kFrameTooLargeValue) {
- mMockError = Status_V1_2::ERROR_DRM_FRAME_TOO_LARGE;
- } else if (value == kInvalidStateValue) {
- mMockError = Status_V1_2::ERROR_DRM_INVALID_STATE;
- } else {
- mMockError = Status_V1_2::ERROR_DRM_UNKNOWN;
- }
- }
-
- mStringProperties[key] = std::string(value.c_str());
- return Status::OK;
-}
-
-Return<Status> DrmPlugin::setPropertyByteArray(
- const hidl_string& name, const hidl_vec<uint8_t>& value) {
- UNUSED(value);
- if (name == kDeviceIdKey) {
- ALOGD("Cannot set immutable property: %s", name.c_str());
- return Status::BAD_VALUE;
- } else if (name == kClientIdKey) {
- mByteArrayProperties[kClientIdKey] = toVector(value);
- return Status::OK;
- }
-
- // Setting of undefined properties is not supported
- ALOGE("Failed to set property byte array, key=%s", name.c_str());
- return Status::ERROR_DRM_CANNOT_HANDLE;
-}
-
-Return<void> DrmPlugin::queryKeyStatus(
- const hidl_vec<uint8_t>& sessionId,
- queryKeyStatus_cb _hidl_cb) {
- if (sessionId.size() == 0) {
- // Returns empty key status KeyValue pair
- _hidl_cb(Status::BAD_VALUE, hidl_vec<KeyValue>());
- return Void();
- }
-
- std::vector<KeyValue> infoMapVec;
- infoMapVec.clear();
-
- mPlayPolicyLock.lock();
- KeyValue keyValuePair;
- for (size_t i = 0; i < mPlayPolicy.size(); ++i) {
- keyValuePair.key = mPlayPolicy[i].key;
- keyValuePair.value = mPlayPolicy[i].value;
- infoMapVec.push_back(keyValuePair);
- }
- mPlayPolicyLock.unlock();
- _hidl_cb(Status::OK, toHidlVec(infoMapVec));
- return Void();
-}
-
-Return<void> DrmPlugin::getNumberOfSessions(getNumberOfSessions_cb _hidl_cb) {
- uint32_t currentSessions = mSessionLibrary->numOpenSessions();
- uint32_t maxSessions = 10;
- _hidl_cb(Status::OK, currentSessions, maxSessions);
- return Void();
-}
-
-Return<void> DrmPlugin::getSecurityLevel(const hidl_vec<uint8_t>& sessionId,
- getSecurityLevel_cb _hidl_cb) {
- if (sessionId.size() == 0) {
- _hidl_cb(Status::BAD_VALUE, SecurityLevel::UNKNOWN);
- return Void();
- }
-
- std::vector<uint8_t> sid = toVector(sessionId);
- sp<Session> session = mSessionLibrary->findSession(sid);
- if (!session.get()) {
- _hidl_cb(Status::ERROR_DRM_SESSION_NOT_OPENED, SecurityLevel::UNKNOWN);
- return Void();
- }
-
- Mutex::Autolock lock(mSecurityLevelLock);
- std::map<std::vector<uint8_t>, SecurityLevel>::iterator itr =
- mSecurityLevel.find(sid);
- if (itr == mSecurityLevel.end()) {
- ALOGE("Session id not found");
- _hidl_cb(Status::ERROR_DRM_INVALID_STATE, SecurityLevel::UNKNOWN);
- return Void();
- }
-
- _hidl_cb(Status::OK, itr->second);
- return Void();
-}
-
-Return<void> DrmPlugin::getLogMessages(
- getLogMessages_cb _hidl_cb) {
- using std::chrono::duration_cast;
- using std::chrono::milliseconds;
- using std::chrono::system_clock;
-
- auto timeMillis = duration_cast<milliseconds>(
- system_clock::now().time_since_epoch()).count();
-
- std::vector<LogMessage> logs = {
- { timeMillis, LogPriority::ERROR, std::string("Not implemented") }};
- _hidl_cb(drm::V1_4::Status::OK, toHidlVec(logs));
- return Void();
-}
-
-Return<bool> DrmPlugin::requiresSecureDecoder(
- const hidl_string& mime, SecurityLevel level) {
- UNUSED(mime);
- UNUSED(level);
- return false;
-}
-
-Return<bool> DrmPlugin::requiresSecureDecoderDefault(const hidl_string& mime) {
- UNUSED(mime);
- // Clearkey only supports SW_SECURE_CRYPTO, so we always returns false
- // regardless of mime type.
- return false;
-}
-
-Return<Status> DrmPlugin::setPlaybackId(
- const hidl_vec<uint8_t>& sessionId,
- const hidl_string& playbackId) {
- if (sessionId.size() == 0) {
- ALOGE("Invalid empty session id");
- return Status::BAD_VALUE;
- }
-
- std::vector<uint8_t> sid = toVector(sessionId);
- mPlaybackId[sid] = playbackId;
- return Status::OK;
-}
-
-Return<Status> DrmPlugin::setSecurityLevel(const hidl_vec<uint8_t>& sessionId,
- SecurityLevel level) {
- if (sessionId.size() == 0) {
- ALOGE("Invalid empty session id");
- return Status::BAD_VALUE;
- }
-
- if (level > SecurityLevel::SW_SECURE_CRYPTO) {
- ALOGE("Cannot set security level > max");
- return Status::ERROR_DRM_CANNOT_HANDLE;
- }
-
- std::vector<uint8_t> sid = toVector(sessionId);
- sp<Session> session = mSessionLibrary->findSession(sid);
- if (!session.get()) {
- return Status::ERROR_DRM_SESSION_NOT_OPENED;
- }
-
- Mutex::Autolock lock(mSecurityLevelLock);
- std::map<std::vector<uint8_t>, SecurityLevel>::iterator itr =
- mSecurityLevel.find(sid);
- if (itr != mSecurityLevel.end()) {
- mSecurityLevel[sid] = level;
- } else {
- if (!mSecurityLevel.insert(
- std::pair<std::vector<uint8_t>, SecurityLevel>(sid, level)).second) {
- ALOGE("Failed to set security level");
- return Status::ERROR_DRM_INVALID_STATE;
- }
- }
- return Status::OK;
-}
-
-Return<void> DrmPlugin::getMetrics(getMetrics_cb _hidl_cb) {
- // Set the open session count metric.
- DrmMetricGroup::Attribute openSessionOkAttribute = {
- "status", DrmMetricGroup::ValueType::INT64_TYPE, (int64_t) Status::OK, 0.0, ""
- };
- DrmMetricGroup::Value openSessionMetricValue = {
- "count", DrmMetricGroup::ValueType::INT64_TYPE, mOpenSessionOkCount, 0.0, ""
- };
- DrmMetricGroup::Metric openSessionMetric = {
- "open_session", { openSessionOkAttribute }, { openSessionMetricValue }
- };
-
- // Set the close session count metric.
- DrmMetricGroup::Attribute closeSessionOkAttribute = {
- "status", DrmMetricGroup::ValueType::INT64_TYPE, (int64_t) Status::OK, 0.0, ""
- };
- DrmMetricGroup::Value closeSessionMetricValue = {
- "count", DrmMetricGroup::ValueType::INT64_TYPE, mCloseSessionOkCount, 0.0, ""
- };
- DrmMetricGroup::Metric closeSessionMetric = {
- "close_session", { closeSessionOkAttribute }, { closeSessionMetricValue }
- };
-
- // Set the close session, not opened metric.
- DrmMetricGroup::Attribute closeSessionNotOpenedAttribute = {
- "status", DrmMetricGroup::ValueType::INT64_TYPE,
- (int64_t) Status::ERROR_DRM_SESSION_NOT_OPENED, 0.0, ""
- };
- DrmMetricGroup::Value closeSessionNotOpenedMetricValue = {
- "count", DrmMetricGroup::ValueType::INT64_TYPE, mCloseSessionNotOpenedCount, 0.0, ""
- };
- DrmMetricGroup::Metric closeSessionNotOpenedMetric = {
- "close_session", { closeSessionNotOpenedAttribute }, { closeSessionNotOpenedMetricValue }
- };
-
- // Set the setPlaybackId metric.
- std::vector<DrmMetricGroup::Attribute> sids;
- std::vector<DrmMetricGroup::Value> playbackIds;
- for (const auto&[key, value] : mPlaybackId) {
- std::string sid(key.begin(), key.end());
- DrmMetricGroup::Attribute sessionIdAttribute = {
- "sid", DrmMetricGroup::ValueType::STRING_TYPE, 0, 0, sid };
- sids.push_back(sessionIdAttribute);
-
- DrmMetricGroup::Value playbackIdMetricValue = {
- "playbackId", DrmMetricGroup::ValueType::STRING_TYPE, 0, 0, value };
- playbackIds.push_back(playbackIdMetricValue);
- }
- DrmMetricGroup::Metric setPlaybackIdMetric = {
- "set_playback_id", { sids }, { playbackIds }};
-
- DrmMetricGroup metrics = {
- { openSessionMetric, closeSessionMetric,
- closeSessionNotOpenedMetric, setPlaybackIdMetric }};
- _hidl_cb(Status::OK, hidl_vec<DrmMetricGroup>({metrics}));
- return Void();
-}
-
-Return<void> DrmPlugin::getOfflineLicenseKeySetIds(getOfflineLicenseKeySetIds_cb _hidl_cb) {
- std::vector<std::string> licenseNames = mFileHandle.ListLicenses();
- std::vector<KeySetId> keySetIds;
- if (mMockError != Status_V1_2::OK) {
- _hidl_cb(toStatus_1_0(mMockError), keySetIds);
- return Void();
- }
- for (const auto& name : licenseNames) {
- std::vector<uint8_t> keySetId(name.begin(), name.end());
- keySetIds.push_back(keySetId);
- }
- _hidl_cb(Status::OK, keySetIds);
- return Void();
-}
-
-
-Return<Status> DrmPlugin::removeOfflineLicense(const KeySetId& keySetId) {
- if (mMockError != Status_V1_2::OK) {
- return toStatus_1_0(mMockError);
- }
- std::string licenseName(keySetId.begin(), keySetId.end());
- if (mFileHandle.DeleteLicense(licenseName)) {
- return Status::OK;
- }
- return Status::BAD_VALUE;
-}
-
-Return<void> DrmPlugin::getOfflineLicenseState(const KeySetId& keySetId,
- getOfflineLicenseState_cb _hidl_cb) {
- std::string licenseName(keySetId.begin(), keySetId.end());
- DeviceFiles::LicenseState state;
- std::string license;
- OfflineLicenseState hLicenseState;
- if (mMockError != Status_V1_2::OK) {
- _hidl_cb(toStatus_1_0(mMockError), OfflineLicenseState::UNKNOWN);
- } else if (mFileHandle.RetrieveLicense(licenseName, &state, &license)) {
- switch (state) {
- case DeviceFiles::kLicenseStateActive:
- hLicenseState = OfflineLicenseState::USABLE;
- break;
- case DeviceFiles::kLicenseStateReleasing:
- hLicenseState = OfflineLicenseState::INACTIVE;
- break;
- case DeviceFiles::kLicenseStateUnknown:
- hLicenseState = OfflineLicenseState::UNKNOWN;
- break;
- }
- _hidl_cb(Status::OK, hLicenseState);
- } else {
- _hidl_cb(Status::BAD_VALUE, OfflineLicenseState::UNKNOWN);
- }
- return Void();
-}
-
-Return<void> DrmPlugin::getSecureStops(getSecureStops_cb _hidl_cb) {
- mSecureStopLock.lock();
- std::vector<SecureStop> stops;
- for (auto itr = mSecureStops.begin(); itr != mSecureStops.end(); ++itr) {
- ClearkeySecureStop clearkeyStop = itr->second;
- std::vector<uint8_t> stopVec;
- stopVec.insert(stopVec.end(), clearkeyStop.id.begin(), clearkeyStop.id.end());
- stopVec.insert(stopVec.end(), clearkeyStop.data.begin(), clearkeyStop.data.end());
-
- SecureStop stop;
- stop.opaqueData = toHidlVec(stopVec);
- stops.push_back(stop);
- }
- mSecureStopLock.unlock();
-
- _hidl_cb(Status::OK, stops);
- return Void();
-}
-
-Return<void> DrmPlugin::getSecureStop(const hidl_vec<uint8_t>& secureStopId,
- getSecureStop_cb _hidl_cb) {
- std::vector<uint8_t> stopVec;
-
- mSecureStopLock.lock();
- auto itr = mSecureStops.find(toVector(secureStopId));
- if (itr != mSecureStops.end()) {
- ClearkeySecureStop clearkeyStop = itr->second;
- stopVec.insert(stopVec.end(), clearkeyStop.id.begin(), clearkeyStop.id.end());
- stopVec.insert(stopVec.end(), clearkeyStop.data.begin(), clearkeyStop.data.end());
- }
- mSecureStopLock.unlock();
-
- SecureStop stop;
- if (!stopVec.empty()) {
- stop.opaqueData = toHidlVec(stopVec);
- _hidl_cb(Status::OK, stop);
- } else {
- _hidl_cb(Status::BAD_VALUE, stop);
- }
- return Void();
-}
-
-Return<Status> DrmPlugin::releaseSecureStop(const hidl_vec<uint8_t>& secureStopId) {
- return removeSecureStop(secureStopId);
-}
-
-Return<Status> DrmPlugin::releaseAllSecureStops() {
- return removeAllSecureStops();
-}
-
-Return<void> DrmPlugin::getSecureStopIds(getSecureStopIds_cb _hidl_cb) {
- mSecureStopLock.lock();
- std::vector<SecureStopId> ids;
- for (auto itr = mSecureStops.begin(); itr != mSecureStops.end(); ++itr) {
- ids.push_back(itr->first);
- }
- mSecureStopLock.unlock();
-
- _hidl_cb(Status::OK, toHidlVec(ids));
- return Void();
-}
-
-Return<Status> DrmPlugin::releaseSecureStops(const SecureStopRelease& ssRelease) {
- // OpaqueData starts with 4 byte decimal integer string
- const size_t kFourBytesOffset = 4;
- if (ssRelease.opaqueData.size() < kFourBytesOffset) {
- ALOGE("Invalid secureStopRelease length");
- return Status::BAD_VALUE;
- }
-
- Status status = Status::OK;
- std::vector<uint8_t> input = toVector(ssRelease.opaqueData);
-
- if (input.size() < kSecureStopIdSize + kFourBytesOffset) {
- // The minimum size of SecureStopRelease has to contain
- // a 4 bytes count and one secureStop id
- ALOGE("Total size of secureStops is too short");
- return Status::BAD_VALUE;
- }
-
- // The format of opaqueData is shared between the server
- // and the drm service. The clearkey implementation consists of:
- // count - number of secure stops
- // list of fixed length secure stops
- size_t countBufferSize = sizeof(uint32_t);
- if (input.size() < countBufferSize) {
- // SafetyNet logging
- android_errorWriteLog(0x534e4554, "144766455");
- return Status::BAD_VALUE;
- }
- uint32_t count = 0;
- sscanf(reinterpret_cast<char*>(input.data()), "%04" PRIu32, &count);
-
- // Avoid divide by 0 below.
- if (count == 0) {
- ALOGE("Invalid 0 secureStop count");
- return Status::BAD_VALUE;
- }
-
- // Computes the fixed length secureStop size
- size_t secureStopSize = (input.size() - kFourBytesOffset) / count;
- if (secureStopSize < kSecureStopIdSize) {
- // A valid secureStop contains the id plus data
- ALOGE("Invalid secureStop size");
- return Status::BAD_VALUE;
- }
- uint8_t* buffer = new uint8_t[secureStopSize];
- size_t offset = kFourBytesOffset; // skip the count
- for (size_t i = 0; i < count; ++i, offset += secureStopSize) {
- memcpy(buffer, input.data() + offset, secureStopSize);
-
- // A secureStop contains id+data, we only use the id for removal
- std::vector<uint8_t> id(buffer, buffer + kSecureStopIdSize);
- status = removeSecureStop(toHidlVec(id));
- if (Status::OK != status) break;
- }
-
- delete[] buffer;
- return status;
-}
-
-Return<Status> DrmPlugin::removeSecureStop(const hidl_vec<uint8_t>& secureStopId) {
- Mutex::Autolock lock(mSecureStopLock);
-
- if (1 != mSecureStops.erase(toVector(secureStopId))) {
- return Status::BAD_VALUE;
- }
- return Status::OK;
-}
-
-Return<Status> DrmPlugin::removeAllSecureStops() {
- Mutex::Autolock lock(mSecureStopLock);
-
- mSecureStops.clear();
- mNextSecureStopId = kSecureStopIdStart;
- return Status::OK;
-}
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/InitDataParser.cpp b/drm/mediadrm/plugins/clearkey/hidl/InitDataParser.cpp
deleted file mode 100644
index eccc843..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/InitDataParser.cpp
+++ /dev/null
@@ -1,186 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "hidl_InitDataParser"
-
-#include <algorithm>
-#include <utils/Log.h>
-
-#include "InitDataParser.h"
-
-#include "Base64.h"
-
-#include "ClearKeyUUID.h"
-#include "MimeType.h"
-#include "Utils.h"
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-namespace {
- const size_t kKeyIdSize = 16;
- const size_t kSystemIdSize = 16;
-}
-
-std::vector<uint8_t> StrToVector(const std::string& str) {
- std::vector<uint8_t> vec(str.begin(), str.end());
- return vec;
-}
-
-Status InitDataParser::parse(const std::vector<uint8_t>& initData,
- const std::string& mimeType,
- V1_0::KeyType keyType,
- std::vector<uint8_t>* licenseRequest) {
- // Build a list of the key IDs
- std::vector<const uint8_t*> keyIds;
-
- if (mimeType == kIsoBmffVideoMimeType.c_str() ||
- mimeType == kIsoBmffAudioMimeType.c_str() ||
- mimeType == kCencInitDataFormat.c_str()) {
- Status res = parsePssh(initData, &keyIds);
- if (res != Status::OK) {
- return res;
- }
- } else if (mimeType == kWebmVideoMimeType.c_str() ||
- mimeType == kWebmAudioMimeType.c_str() ||
- mimeType == kWebmInitDataFormat.c_str()) {
- // WebM "init data" is just a single key ID
- if (initData.size() != kKeyIdSize) {
- return Status::ERROR_DRM_CANNOT_HANDLE;
- }
- keyIds.push_back(initData.data());
- } else {
- return Status::ERROR_DRM_CANNOT_HANDLE;
- }
-
- if (keyType == V1_0::KeyType::RELEASE) {
- // restore key
- }
-
- // Build the request
- std::string requestJson = generateRequest(keyType, keyIds);
- std::vector<uint8_t> requestJsonVec = StrToVector(requestJson);
-
- licenseRequest->clear();
- licenseRequest->insert(licenseRequest->end(), requestJsonVec.begin(), requestJsonVec.end());
- return Status::OK;
-}
-
-Status InitDataParser::parsePssh(const std::vector<uint8_t>& initData,
- std::vector<const uint8_t*>* keyIds) {
- // Description of PSSH format:
- // https://w3c.github.io/encrypted-media/format-registry/initdata/cenc.html
- size_t readPosition = 0;
-
- uint32_t expectedSize = initData.size();
- const char psshIdentifier[4] = {'p', 's', 's', 'h'};
- const uint8_t psshVersion1[4] = {1, 0, 0, 0};
- uint32_t keyIdCount = 0;
- size_t headerSize = sizeof(expectedSize) + sizeof(psshIdentifier) +
- sizeof(psshVersion1) + kSystemIdSize + sizeof(keyIdCount);
- if (initData.size() < headerSize) {
- return Status::ERROR_DRM_CANNOT_HANDLE;
- }
-
- // Validate size field
- expectedSize = htonl(expectedSize);
- if (memcmp(&initData[readPosition], &expectedSize,
- sizeof(expectedSize)) != 0) {
- return Status::ERROR_DRM_CANNOT_HANDLE;
- }
- readPosition += sizeof(expectedSize);
-
- // Validate PSSH box identifier
- if (memcmp(&initData[readPosition], psshIdentifier,
- sizeof(psshIdentifier)) != 0) {
- return Status::ERROR_DRM_CANNOT_HANDLE;
- }
- readPosition += sizeof(psshIdentifier);
-
- // Validate EME version number
- if (memcmp(&initData[readPosition], psshVersion1,
- sizeof(psshVersion1)) != 0) {
- return Status::ERROR_DRM_CANNOT_HANDLE;
- }
- readPosition += sizeof(psshVersion1);
-
- // Validate system ID
- if (!clearkeydrm::isClearKeyUUID(&initData[readPosition])) {
- return Status::ERROR_DRM_CANNOT_HANDLE;
- }
- readPosition += kSystemIdSize;
-
- // Read key ID count
- memcpy(&keyIdCount, &initData[readPosition], sizeof(keyIdCount));
- keyIdCount = ntohl(keyIdCount);
- readPosition += sizeof(keyIdCount);
-
- uint64_t psshSize = 0;
- if (__builtin_mul_overflow(keyIdCount, kKeyIdSize, &psshSize) ||
- __builtin_add_overflow(readPosition, psshSize, &psshSize) ||
- psshSize != initData.size() - sizeof(uint32_t) /* DataSize(0) */) {
- return Status::ERROR_DRM_CANNOT_HANDLE;
- }
-
- // Calculate the key ID offsets
- for (uint32_t i = 0; i < keyIdCount; ++i) {
- size_t keyIdPosition = readPosition + (i * kKeyIdSize);
- keyIds->push_back(&initData[keyIdPosition]);
- }
- return Status::OK;
-}
-
-std::string InitDataParser::generateRequest(V1_0::KeyType keyType,
- const std::vector<const uint8_t*>& keyIds) {
- const std::string kRequestPrefix("{\"kids\":[");
- const std::string kTemporarySession("],\"type\":\"temporary\"}");
- const std::string kPersistentSession("],\"type\":\"persistent-license\"}");
-
- std::string request(kRequestPrefix);
- std::string encodedId;
- for (size_t i = 0; i < keyIds.size(); ++i) {
- encodedId.clear();
- encodeBase64Url(keyIds[i], kKeyIdSize, &encodedId);
- if (i != 0) {
- request.append(",");
- }
- request.push_back('\"');
- request.append(encodedId);
- request.push_back('\"');
- }
- if (keyType == V1_0::KeyType::STREAMING) {
- request.append(kTemporarySession);
- } else if (keyType == V1_0::KeyType::OFFLINE ||
- keyType == V1_0::KeyType::RELEASE) {
- request.append(kPersistentSession);
- }
-
- // Android's Base64 encoder produces padding. EME forbids padding.
- const char kBase64Padding = '=';
- request.erase(std::remove(request.begin(), request.end(), kBase64Padding), request.end());
-
- return request;
-}
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/JsonWebKey.cpp b/drm/mediadrm/plugins/clearkey/hidl/JsonWebKey.cpp
deleted file mode 100644
index 45cc775..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/JsonWebKey.cpp
+++ /dev/null
@@ -1,278 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#define LOG_TAG "hidl_JsonWebKey"
-
-#include <utils/Log.h>
-
-#include "JsonWebKey.h"
-
-#include "Base64.h"
-
-namespace {
-const std::string kBase64Padding("=");
-const std::string kKeysTag("keys");
-const std::string kKeyTypeTag("kty");
-const std::string kKeyTag("k");
-const std::string kKeyIdTag("kid");
-const std::string kMediaSessionType("type");
-const std::string kPersistentLicenseSession("persistent-license");
-const std::string kSymmetricKeyValue("oct");
-const std::string kTemporaryLicenseSession("temporary");
-}
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-JsonWebKey::JsonWebKey() {
-}
-
-JsonWebKey::~JsonWebKey() {
-}
-
-/*
- * Parses a JSON Web Key Set string, initializes a KeyMap with key id:key
- * pairs from the JSON Web Key Set. Both key ids and keys are base64url
- * encoded. The KeyMap contains base64url decoded key id:key pairs.
- *
- * @return Returns false for errors, true for success.
- */
-bool JsonWebKey::extractKeysFromJsonWebKeySet(const std::string& jsonWebKeySet,
- KeyMap* keys) {
-
- keys->clear();
-
- if (!parseJsonWebKeySet(jsonWebKeySet, &mJsonObjects)) {
- return false;
- }
-
- // mJsonObjects[0] contains the entire JSON Web Key Set, including
- // all the base64 encoded keys. Each key is also stored separately as
- // a JSON object in mJsonObjects[1..n] where n is the total
- // number of keys in the set.
- if (mJsonObjects.size() == 0 || !isJsonWebKeySet(mJsonObjects[0])) {
- return false;
- }
-
- std::string encodedKey, encodedKeyId;
- std::vector<uint8_t> decodedKey, decodedKeyId;
-
- // mJsonObjects[1] contains the first JSON Web Key in the set
- for (size_t i = 1; i < mJsonObjects.size(); ++i) {
- encodedKeyId.clear();
- encodedKey.clear();
-
- if (!parseJsonObject(mJsonObjects[i], &mTokens))
- return false;
-
- if (findKey(mJsonObjects[i], &encodedKeyId, &encodedKey)) {
- if (encodedKeyId.empty() || encodedKey.empty()) {
- ALOGE("Must have both key id and key in the JsonWebKey set.");
- continue;
- }
-
- if (!decodeBase64String(encodedKeyId, &decodedKeyId)) {
- ALOGE("Failed to decode key id(%s)", encodedKeyId.c_str());
- continue;
- }
-
- if (!decodeBase64String(encodedKey, &decodedKey)) {
- ALOGE("Failed to decode key(%s)", encodedKey.c_str());
- continue;
- }
-
- keys->insert(std::pair<std::vector<uint8_t>,
- std::vector<uint8_t> >(decodedKeyId, decodedKey));
- }
- }
- return true;
-}
-
-bool JsonWebKey::decodeBase64String(const std::string& encodedText,
- std::vector<uint8_t>* decodedText) {
-
- decodedText->clear();
-
- // encodedText should not contain padding characters as per EME spec.
- if (encodedText.find(kBase64Padding) != std::string::npos) {
- return false;
- }
-
- // Since decodeBase64() requires padding characters,
- // add them so length of encodedText is exactly a multiple of 4.
- int remainder = encodedText.length() % 4;
- std::string paddedText(encodedText);
- if (remainder > 0) {
- for (int i = 0; i < 4 - remainder; ++i) {
- paddedText.append(kBase64Padding);
- }
- }
-
- sp<Buffer> buffer = decodeBase64(paddedText);
- if (buffer == nullptr) {
- ALOGE("Malformed base64 encoded content found.");
- return false;
- }
-
- decodedText->insert(decodedText->end(), buffer->base(), buffer->base() + buffer->size());
- return true;
-}
-
-bool JsonWebKey::findKey(const std::string& jsonObject, std::string* keyId,
- std::string* encodedKey) {
-
- std::string key, value;
-
- // Only allow symmetric key, i.e. "kty":"oct" pair.
- if (jsonObject.find(kKeyTypeTag) != std::string::npos) {
- findValue(kKeyTypeTag, &value);
- if (0 != value.compare(kSymmetricKeyValue))
- return false;
- }
-
- if (jsonObject.find(kKeyIdTag) != std::string::npos) {
- findValue(kKeyIdTag, keyId);
- }
-
- if (jsonObject.find(kKeyTag) != std::string::npos) {
- findValue(kKeyTag, encodedKey);
- }
- return true;
-}
-
-void JsonWebKey::findValue(const std::string &key, std::string* value) {
- value->clear();
- const char* valueToken;
- for (std::vector<std::string>::const_iterator nextToken = mTokens.begin();
- nextToken != mTokens.end(); ++nextToken) {
- if (0 == (*nextToken).compare(key)) {
- if (nextToken + 1 == mTokens.end())
- break;
- valueToken = (*(nextToken + 1)).c_str();
- value->assign(valueToken);
- nextToken++;
- break;
- }
- }
-}
-
-bool JsonWebKey::isJsonWebKeySet(const std::string& jsonObject) const {
- if (jsonObject.find(kKeysTag) == std::string::npos) {
- ALOGE("JSON Web Key does not contain keys.");
- return false;
- }
- return true;
-}
-
-/*
- * Parses a JSON objects string and initializes a vector of tokens.
- *
- * @return Returns false for errors, true for success.
- */
-bool JsonWebKey::parseJsonObject(const std::string& jsonObject,
- std::vector<std::string>* tokens) {
- jsmn_parser parser;
-
- jsmn_init(&parser);
- int numTokens = jsmn_parse(&parser,
- jsonObject.c_str(), jsonObject.size(), nullptr, 0);
- if (numTokens < 0) {
- ALOGE("Parser returns error code=%d", numTokens);
- return false;
- }
-
- unsigned int jsmnTokensSize = numTokens * sizeof(jsmntok_t);
- mJsmnTokens.clear();
- mJsmnTokens.resize(jsmnTokensSize);
-
- jsmn_init(&parser);
- int status = jsmn_parse(&parser, jsonObject.c_str(),
- jsonObject.size(), mJsmnTokens.data(), numTokens);
- if (status < 0) {
- ALOGE("Parser returns error code=%d", status);
- return false;
- }
-
- tokens->clear();
- std::string token;
- const char *pjs;
- for (int j = 0; j < numTokens; ++j) {
- pjs = jsonObject.c_str() + mJsmnTokens[j].start;
- if (mJsmnTokens[j].type == JSMN_STRING ||
- mJsmnTokens[j].type == JSMN_PRIMITIVE) {
- token.assign(pjs, mJsmnTokens[j].end - mJsmnTokens[j].start);
- tokens->push_back(token);
- }
- }
- return true;
-}
-
-/*
- * Parses JSON Web Key Set string and initializes a vector of JSON objects.
- *
- * @return Returns false for errors, true for success.
- */
-bool JsonWebKey::parseJsonWebKeySet(const std::string& jsonWebKeySet,
- std::vector<std::string>* jsonObjects) {
- if (jsonWebKeySet.empty()) {
- ALOGE("Empty JSON Web Key");
- return false;
- }
-
- // The jsmn parser only supports unicode encoding.
- jsmn_parser parser;
-
- // Computes number of tokens. A token marks the type, offset in
- // the original string.
- jsmn_init(&parser);
- int numTokens = jsmn_parse(&parser,
- jsonWebKeySet.c_str(), jsonWebKeySet.size(), nullptr, 0);
- if (numTokens < 0) {
- ALOGE("Parser returns error code=%d", numTokens);
- return false;
- }
-
- unsigned int jsmnTokensSize = numTokens * sizeof(jsmntok_t);
- mJsmnTokens.resize(jsmnTokensSize);
-
- jsmn_init(&parser);
- int status = jsmn_parse(&parser, jsonWebKeySet.c_str(),
- jsonWebKeySet.size(), mJsmnTokens.data(), numTokens);
- if (status < 0) {
- ALOGE("Parser returns error code=%d", status);
- return false;
- }
-
- std::string token;
- const char *pjs;
- for (int i = 0; i < numTokens; ++i) {
- pjs = jsonWebKeySet.c_str() + mJsmnTokens[i].start;
- if (mJsmnTokens[i].type == JSMN_OBJECT) {
- token.assign(pjs, mJsmnTokens[i].end - mJsmnTokens[i].start);
- jsonObjects->push_back(token);
- }
- }
- return true;
-}
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
-
diff --git a/drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp b/drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp
deleted file mode 100644
index 56910be..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp
+++ /dev/null
@@ -1,92 +0,0 @@
-// Copyright 2018 Google LLC. All Rights Reserved. This file and proprietary
-// source code may only be used and distributed under the Widevine Master
-// License Agreement.
-
-#include <utils/Log.h>
-#include <string>
-
-#include "MemoryFileSystem.h"
-#include "Utils.h"
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-std::string MemoryFileSystem::GetFileName(const std::string& path) {
- size_t index = path.find_last_of('/');
- if (index != std::string::npos) {
- return path.substr(index+1);
- } else {
- return path;
- }
-}
-
-bool MemoryFileSystem::FileExists(const std::string& fileName) const {
- auto result = mMemoryFileSystem.find(fileName);
- return result != mMemoryFileSystem.end();
-}
-
-ssize_t MemoryFileSystem::GetFileSize(const std::string& fileName) const {
- auto result = mMemoryFileSystem.find(fileName);
- if (result != mMemoryFileSystem.end()) {
- return static_cast<ssize_t>(result->second.getFileSize());
- } else {
- ALOGE("Failed to get size for %s", fileName.c_str());
- return -1;
- }
-}
-
-std::vector<std::string> MemoryFileSystem::ListFiles() const {
- std::vector<std::string> list;
- for (const auto& filename : mMemoryFileSystem) {
- list.push_back(filename.first);
- }
- return list;
-}
-
-size_t MemoryFileSystem::Read(const std::string& path, std::string* buffer) {
- std::string key = GetFileName(path);
- auto result = mMemoryFileSystem.find(key);
- if (result != mMemoryFileSystem.end()) {
- std::string serializedHashFile = result->second.getContent();
- buffer->assign(serializedHashFile);
- return buffer->size();
- } else {
- ALOGE("Failed to read from %s", path.c_str());
- return -1;
- }
-}
-
-size_t MemoryFileSystem::Write(const std::string& path, const MemoryFile& memoryFile) {
- std::string key = GetFileName(path);
- auto result = mMemoryFileSystem.find(key);
- if (result != mMemoryFileSystem.end()) {
- mMemoryFileSystem.erase(key);
- }
- mMemoryFileSystem.insert(std::pair<std::string, MemoryFile>(key, memoryFile));
- return memoryFile.getFileSize();
-}
-
-bool MemoryFileSystem::RemoveFile(const std::string& fileName) {
- auto result = mMemoryFileSystem.find(fileName);
- if (result != mMemoryFileSystem.end()) {
- mMemoryFileSystem.erase(result);
- return true;
- } else {
- ALOGE("Cannot find license to remove: %s", fileName.c_str());
- return false;
- }
-}
-
-bool MemoryFileSystem::RemoveAllFiles() {
- mMemoryFileSystem.clear();
- return mMemoryFileSystem.empty();
-}
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/Session.cpp b/drm/mediadrm/plugins/clearkey/hidl/Session.cpp
deleted file mode 100644
index cf668d4..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/Session.cpp
+++ /dev/null
@@ -1,101 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "hidl_ClearKeySession"
-#include <utils/Log.h>
-
-#include "Session.h"
-#include "Utils.h"
-
-#include "AesCtrDecryptor.h"
-#include "InitDataParser.h"
-#include "JsonWebKey.h"
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-using ::android::hardware::drm::V1_0::KeyValue;
-using ::android::hardware::drm::V1_0::Status;
-using ::android::hardware::drm::V1_0::SubSample;
-using ::android::hardware::Return;
-using ::android::sp;
-
-using android::Mutex;
-
-Status Session::getKeyRequest(
- const std::vector<uint8_t>& initData,
- const std::string& mimeType,
- V1_0::KeyType keyType,
- std::vector<uint8_t>* keyRequest) const {
- InitDataParser parser;
- return parser.parse(initData, mimeType, keyType, keyRequest);
-}
-
-Status Session::provideKeyResponse(const std::vector<uint8_t>& response) {
- std::string responseString(
- reinterpret_cast<const char*>(response.data()), response.size());
- KeyMap keys;
-
- Mutex::Autolock lock(mMapLock);
- JsonWebKey parser;
- if (parser.extractKeysFromJsonWebKeySet(responseString, &keys)) {
- for (auto &key : keys) {
- std::string first(key.first.begin(), key.first.end());
- std::string second(key.second.begin(), key.second.end());
- mKeyMap.insert(std::pair<std::vector<uint8_t>,
- std::vector<uint8_t> >(key.first, key.second));
- }
- return Status::OK;
- } else {
- return Status::ERROR_DRM_UNKNOWN;
- }
-}
-
-Status_V1_2 Session::decrypt(
- const KeyId keyId, const Iv iv, const uint8_t* srcPtr,
- uint8_t* destPtr, const std::vector<SubSample> subSamples,
- size_t* bytesDecryptedOut) {
- Mutex::Autolock lock(mMapLock);
-
- if (getMockError() != Status_V1_2::OK) {
- return getMockError();
- }
-
- std::vector<uint8_t> keyIdVector;
- keyIdVector.clear();
- keyIdVector.insert(keyIdVector.end(), keyId, keyId + kBlockSize);
- std::map<std::vector<uint8_t>, std::vector<uint8_t> >::iterator itr;
- itr = mKeyMap.find(keyIdVector);
- if (itr == mKeyMap.end()) {
- return Status_V1_2::ERROR_DRM_NO_LICENSE;
- }
-
- AesCtrDecryptor decryptor;
- Status status = decryptor.decrypt(
- itr->second /*key*/, iv, srcPtr, destPtr, subSamples,
- subSamples.size(), bytesDecryptedOut);
- return static_cast<Status_V1_2>(status);
-}
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/SessionLibrary.cpp b/drm/mediadrm/plugins/clearkey/hidl/SessionLibrary.cpp
deleted file mode 100644
index 88afcc4..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/SessionLibrary.cpp
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "hidl_ClearKeySessionLibrary"
-#include <utils/Log.h>
-
-#include "SessionLibrary.h"
-#include "Utils.h"
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-using ::android::hardware::hidl_string;
-using ::android::hardware::hidl_vec;
-using ::android::sp;
-
-Mutex SessionLibrary::sSingletonLock;
-SessionLibrary* SessionLibrary::sSingleton = NULL;
-
-SessionLibrary* SessionLibrary::get() {
- Mutex::Autolock lock(sSingletonLock);
-
- if (sSingleton == NULL) {
- ALOGD("Instantiating Session Library Singleton.");
- sSingleton = new SessionLibrary();
- }
-
- return sSingleton;
-}
-
-sp<Session> SessionLibrary::createSession() {
- Mutex::Autolock lock(mSessionsLock);
-
- char sessionIdRaw[16];
- snprintf(sessionIdRaw, sizeof(sessionIdRaw), "%u", mNextSessionId);
-
- mNextSessionId += 1;
-
- std::vector<uint8_t> sessionId;
- sessionId.insert(sessionId.end(), sessionIdRaw,
- sessionIdRaw + sizeof(sessionIdRaw) / sizeof(uint8_t));
-
- mSessions.insert(std::pair<std::vector<uint8_t>,
- sp<Session> >(sessionId, new Session(sessionId)));
- std::map<std::vector<uint8_t>, sp<Session> >::iterator itr =
- mSessions.find(sessionId);
- if (itr != mSessions.end()) {
- return itr->second;
- } else {
- return nullptr;
- }
-}
-
-sp<Session> SessionLibrary::findSession(
- const std::vector<uint8_t>& sessionId) {
- Mutex::Autolock lock(mSessionsLock);
- std::map<std::vector<uint8_t>, sp<Session> >::iterator itr =
- mSessions.find(sessionId);
- if (itr != mSessions.end()) {
- return itr->second;
- } else {
- return nullptr;
- }
-}
-
-void SessionLibrary::destroySession(const sp<Session>& session) {
- Mutex::Autolock lock(mSessionsLock);
- mSessions.erase(session->sessionId());
-}
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
diff --git a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service-lazy.clearkey.rc b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service-lazy.clearkey.rc
deleted file mode 100644
index ec4517d..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service-lazy.clearkey.rc
+++ /dev/null
@@ -1,14 +0,0 @@
-service vendor.drm-clearkey-hal-1-2 /vendor/bin/hw/android.hardware.drm@1.2-service-lazy.clearkey
- interface android.hardware.drm@1.0::ICryptoFactory clearkey
- interface android.hardware.drm@1.0::IDrmFactory clearkey
- interface android.hardware.drm@1.1::ICryptoFactory clearkey
- interface android.hardware.drm@1.1::IDrmFactory clearkey
- interface android.hardware.drm@1.2::ICryptoFactory clearkey
- interface android.hardware.drm@1.2::IDrmFactory clearkey
- disabled
- oneshot
- class hal
- user media
- group media mediadrm
- ioprio rt 4
- task_profiles ProcessCapacityHigh
diff --git a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service.clearkey.rc b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service.clearkey.rc
deleted file mode 100644
index 3b48cf2..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.2-service.clearkey.rc
+++ /dev/null
@@ -1,13 +0,0 @@
-service vendor.drm-clearkey-hal-1-2 /vendor/bin/hw/android.hardware.drm@1.2-service.clearkey
- interface android.hardware.drm@1.0::ICryptoFactory clearkey
- interface android.hardware.drm@1.0::IDrmFactory clearkey
- interface android.hardware.drm@1.1::ICryptoFactory clearkey
- interface android.hardware.drm@1.1::IDrmFactory clearkey
- interface android.hardware.drm@1.2::ICryptoFactory clearkey
- interface android.hardware.drm@1.2::IDrmFactory clearkey
- disabled
- class hal
- user media
- group media mediadrm
- ioprio rt 4
- task_profiles ProcessCapacityHigh
diff --git a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service-lazy.clearkey.rc b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service-lazy.clearkey.rc
deleted file mode 100644
index 6e64978..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service-lazy.clearkey.rc
+++ /dev/null
@@ -1,16 +0,0 @@
-service vendor.drm-clearkey-hal-1-3 /vendor/bin/hw/android.hardware.drm@1.3-service-lazy.clearkey
- interface android.hardware.drm@1.0::ICryptoFactory clearkey
- interface android.hardware.drm@1.0::IDrmFactory clearkey
- interface android.hardware.drm@1.1::ICryptoFactory clearkey
- interface android.hardware.drm@1.1::IDrmFactory clearkey
- interface android.hardware.drm@1.2::ICryptoFactory clearkey
- interface android.hardware.drm@1.2::IDrmFactory clearkey
- interface android.hardware.drm@1.3::ICryptoFactory clearkey
- interface android.hardware.drm@1.3::IDrmFactory clearkey
- disabled
- oneshot
- class hal
- user media
- group media mediadrm
- ioprio rt 4
- task_profiles ProcessCapacityHigh
diff --git a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service.clearkey.rc b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service.clearkey.rc
deleted file mode 100644
index e302e1b..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.3-service.clearkey.rc
+++ /dev/null
@@ -1,14 +0,0 @@
-service vendor.drm-clearkey-hal-1-3 /vendor/bin/hw/android.hardware.drm@1.3-service.clearkey
- interface android.hardware.drm@1.0::ICryptoFactory clearkey
- interface android.hardware.drm@1.0::IDrmFactory clearkey
- interface android.hardware.drm@1.1::ICryptoFactory clearkey
- interface android.hardware.drm@1.1::IDrmFactory clearkey
- interface android.hardware.drm@1.2::ICryptoFactory clearkey
- interface android.hardware.drm@1.2::IDrmFactory clearkey
- interface android.hardware.drm@1.3::ICryptoFactory clearkey
- interface android.hardware.drm@1.3::IDrmFactory clearkey
- class hal
- user media
- group media mediadrm
- ioprio rt 4
- task_profiles ProcessCapacityHigh
diff --git a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service-lazy.clearkey.rc b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service-lazy.clearkey.rc
deleted file mode 100644
index 84a63a1..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service-lazy.clearkey.rc
+++ /dev/null
@@ -1,18 +0,0 @@
-service vendor.drm-clearkey-hal-1-4 /vendor/bin/hw/android.hardware.drm@1.4-service-lazy.clearkey
- interface android.hardware.drm@1.0::ICryptoFactory clearkey
- interface android.hardware.drm@1.0::IDrmFactory clearkey
- interface android.hardware.drm@1.1::ICryptoFactory clearkey
- interface android.hardware.drm@1.1::IDrmFactory clearkey
- interface android.hardware.drm@1.2::ICryptoFactory clearkey
- interface android.hardware.drm@1.2::IDrmFactory clearkey
- interface android.hardware.drm@1.3::ICryptoFactory clearkey
- interface android.hardware.drm@1.3::IDrmFactory clearkey
- interface android.hardware.drm@1.4::ICryptoFactory clearkey
- interface android.hardware.drm@1.4::IDrmFactory clearkey
- disabled
- oneshot
- class hal
- user media
- group media mediadrm
- ioprio rt 4
- task_profiles ProcessCapacityHigh
diff --git a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service.clearkey.rc b/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service.clearkey.rc
deleted file mode 100644
index 649599e..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/android.hardware.drm@1.4-service.clearkey.rc
+++ /dev/null
@@ -1,16 +0,0 @@
-service vendor.drm-clearkey-hal-1-4 /vendor/bin/hw/android.hardware.drm@1.4-service.clearkey
- interface android.hardware.drm@1.0::ICryptoFactory clearkey
- interface android.hardware.drm@1.0::IDrmFactory clearkey
- interface android.hardware.drm@1.1::ICryptoFactory clearkey
- interface android.hardware.drm@1.1::IDrmFactory clearkey
- interface android.hardware.drm@1.2::ICryptoFactory clearkey
- interface android.hardware.drm@1.2::IDrmFactory clearkey
- interface android.hardware.drm@1.3::ICryptoFactory clearkey
- interface android.hardware.drm@1.3::IDrmFactory clearkey
- interface android.hardware.drm@1.4::ICryptoFactory clearkey
- interface android.hardware.drm@1.4::IDrmFactory clearkey
- class hal
- user media
- group media mediadrm
- ioprio rt 4
- task_profiles ProcessCapacityHigh
diff --git a/drm/mediadrm/plugins/clearkey/hidl/fuzzer/README.md b/drm/mediadrm/plugins/clearkey/hidl/fuzzer/README.md
deleted file mode 100644
index cb45460..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/fuzzer/README.md
+++ /dev/null
@@ -1,52 +0,0 @@
-# Fuzzer for android.hardware.drm@1.4-service.clearkey
-
-## Plugin Design Considerations
-The fuzzer plugin for android.hardware.drm@1.4-service.clearkey is designed based on the understanding of the
-source code and tries to achieve the following:
-
-##### Maximize code coverage
-The configuration parameters are not hardcoded, but instead selected based on
-incoming data. This ensures more code paths are reached by the fuzzer.
-
-android.hardware.drm@1.4-service.clearkey supports the following parameters:
-1. Security Level (parameter name: `securityLevel`)
-2. Mime Type (parameter name: `mimeType`)
-3. Key Type (parameter name: `keyType`)
-4. Crypto Mode (parameter name: `cryptoMode`)
-
-| Parameter| Valid Values| Configured Value|
-|------------- |-------------| ----- |
-| `securityLevel` | 0.`SecurityLevel::UNKNOWN` 1.`SecurityLevel::SW_SECURE_CRYPTO` 2.`SecurityLevel::SW_SECURE_DECODE` 3.`SecurityLevel::HW_SECURE_CRYPTO` 4.`SecurityLevel::HW_SECURE_DECODE` 5.`SecurityLevel::HW_SECURE_ALL`| Value obtained from FuzzedDataProvider in the range 0 to 5|
-| `mimeType` | 0.`video/mp4` 1.`video/mpeg` 2.`video/x-flv` 3.`video/mj2` 4.`video/3gp2` 5.`video/3gpp` 6.`video/3gpp2` 7.`audio/mp4` 8.`audio/mpeg` 9.`audio/aac` 10.`audio/3gp2` 11.`audio/3gpp` 12.`audio/3gpp2` 13.`audio/webm` 14.`video/webm` 15.`webm` 16.`cenc` 17.`video/unknown` 18.`audio/unknown`| Value obtained from FuzzedDataProvider in the range 0 to 18|
-| `keyType` | 0.`KeyType::OFFLINE` 1.`KeyType::STREAMING` 2.`KeyType::RELEASE` | Value obtained from FuzzedDataProvider in the range 0 to 2|
-| `cryptoMode` | 0.`Mode::UNENCRYPTED` 1.`Mode::AES_CTR` 2.`Mode::AES_CBC_CTS` 3.`Mode::AES_CBC` | Value obtained from FuzzedDataProvider in the range 0 to 3|
-
-This also ensures that the plugin is always deterministic for any given input.
-
-##### Maximize utilization of input data
-The plugin feeds the entire input data to the module.
-This ensures that the plugin tolerates any kind of input (empty, huge,
-malformed, etc) and doesnt `exit()` on any input and thereby increasing the
-chance of identifying vulnerabilities.
-
-## Build
-
-This describes steps to build clearkeyV1.4_fuzzer binary.
-
-### Android
-
-#### Steps to build
-Build the fuzzer
-```
- $ mm -j$(nproc) clearkeyV1.4_fuzzer
-```
-#### Steps to run
-To run on device
-```
- $ adb sync data
- $ adb shell /data/fuzz/${TARGET_ARCH}/clearkeyV1.4_fuzzer/vendor/hw/clearkeyV1.4_fuzzer
-```
-
-## References:
- * http://llvm.org/docs/LibFuzzer.html
- * https://github.com/google/oss-fuzz
diff --git a/drm/mediadrm/plugins/clearkey/hidl/fuzzer/clearkeyV1.4_fuzzer.cpp b/drm/mediadrm/plugins/clearkey/hidl/fuzzer/clearkeyV1.4_fuzzer.cpp
deleted file mode 100644
index afe0e6c..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/fuzzer/clearkeyV1.4_fuzzer.cpp
+++ /dev/null
@@ -1,719 +0,0 @@
-/*
- * Copyright (C) 2021 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at:
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-#include <include/CreatePluginFactories.h>
-
-#include <android/hidl/allocator/1.0/IAllocator.h>
-#include <fuzzer/FuzzedDataProvider.h>
-#include <hidlmemory/mapping.h>
-#include <include/ClearKeyDrmProperties.h>
-#include <include/CryptoFactory.h>
-#include <include/CryptoPlugin.h>
-#include <include/DrmPlugin.h>
-#include <utils/Log.h>
-#include <utils/String8.h>
-
-namespace drm = ::android::hardware::drm;
-using namespace std;
-using namespace android;
-using ::android::sp;
-using ::android::hardware::hidl_array;
-using ::android::hardware::hidl_memory;
-using ::android::hardware::hidl_string;
-using ::android::hardware::hidl_vec;
-using ::android::hardware::Return;
-using ::android::hidl::allocator::V1_0::IAllocator;
-using ::android::hidl::memory::V1_0::IMemory;
-using drm::V1_0::BufferType;
-using drm::V1_0::DestinationBuffer;
-using drm::V1_0::EventType;
-using drm::V1_0::ICryptoPlugin;
-using drm::V1_0::IDrmPlugin;
-using drm::V1_0::IDrmPluginListener;
-using drm::V1_0::KeyedVector;
-using drm::V1_0::KeyStatus;
-using drm::V1_0::KeyStatusType;
-using drm::V1_0::KeyType;
-using drm::V1_0::Mode;
-using drm::V1_0::Pattern;
-using drm::V1_0::SecureStop;
-using drm::V1_0::SharedBuffer;
-using drm::V1_0::Status;
-using drm::V1_0::SubSample;
-using drm::V1_1::DrmMetricGroup;
-using drm::V1_1::HdcpLevel;
-using drm::V1_1::SecureStopRelease;
-using drm::V1_1::SecurityLevel;
-using drm::V1_2::KeySetId;
-using drm::V1_2::OfflineLicenseState;
-using drm::V1_4::clearkey::ICryptoFactory;
-using drm::V1_4::clearkey::IDrmFactory;
-using drm::V1_4::clearkey::kAlgorithmsKey;
-using drm::V1_4::clearkey::kClientIdKey;
-using drm::V1_4::clearkey::kDeviceIdKey;
-using drm::V1_4::clearkey::kDrmErrorTestKey;
-using drm::V1_4::clearkey::kListenerTestSupportKey;
-using drm::V1_4::clearkey::kMetricsKey;
-using drm::V1_4::clearkey::kPluginDescriptionKey;
-using drm::V1_4::clearkey::kVendorKey;
-using drm::V1_4::clearkey::kVersionKey;
-
-typedef ::android::hardware::hidl_vec<uint8_t> SessionId;
-typedef ::android::hardware::hidl_vec<uint8_t> SecureStopId;
-
-static const uint8_t kInvalidUUID[] = {0x10, 0x20, 0x30, 0x40, 0x50, 0x60,
- 0x70, 0x80, 0x10, 0x20, 0x30, 0x40,
- 0x50, 0x60, 0x70, 0x80};
-
-static const uint8_t kClearKeyUUID[] = {0xE2, 0x71, 0x9D, 0x58, 0xA9, 0x85,
- 0xB3, 0xC9, 0x78, 0x1A, 0xB0, 0x30,
- 0xAF, 0x78, 0xD3, 0x0E};
-
-const SecurityLevel kSecurityLevel[] = {
- SecurityLevel::UNKNOWN, SecurityLevel::SW_SECURE_CRYPTO,
- SecurityLevel::SW_SECURE_DECODE, SecurityLevel::HW_SECURE_CRYPTO,
- SecurityLevel::HW_SECURE_DECODE, SecurityLevel::HW_SECURE_ALL};
-
-const char *kMimeType[] = {
- "video/mp4", "video/mpeg", "video/x-flv", "video/mj2", "video/3gp2",
- "video/3gpp", "video/3gpp2", "audio/mp4", "audio/mpeg", "audio/aac",
- "audio/3gp2", "audio/3gpp", "audio/3gpp2", "audio/webm", "video/webm",
- "webm", "cenc", "video/unknown", "audio/unknown"};
-
-const char *kCipherAlgorithm[] = {"AES/CBC/NoPadding", ""};
-
-const char *kMacAlgorithm[] = {"HmacSHA256", ""};
-
-const char *kRSAAlgorithm[] = {"RSASSA-PSS-SHA1", ""};
-
-const std::string kProperty[] = {kVendorKey,
- kVersionKey,
- kPluginDescriptionKey,
- kAlgorithmsKey,
- kListenerTestSupportKey,
- kDrmErrorTestKey,
- kDeviceIdKey,
- kClientIdKey,
- kMetricsKey,
- "placeholder"};
-
-const KeyType kKeyType[] = {KeyType::OFFLINE, KeyType::STREAMING,
- KeyType::RELEASE};
-
-const Mode kCryptoMode[] = {Mode::UNENCRYPTED, Mode::AES_CTR, Mode::AES_CBC_CTS,
- Mode::AES_CBC};
-
-const hidl_vec<uint8_t> validInitData = {
- // BMFF box header (4 bytes size + 'pssh')
- 0x00, 0x00, 0x00, 0x34, 0x70, 0x73, 0x73, 0x68,
- // full box header (version = 1 flags = 0)
- 0x01, 0x00, 0x00, 0x00,
- // system id
- 0x10, 0x77, 0xef, 0xec, 0xc0, 0xb2, 0x4d, 0x02, 0xac, 0xe3, 0x3c, 0x1e,
- 0x52, 0xe2, 0xfb, 0x4b,
- // number of key ids
- 0x00, 0x00, 0x00, 0x01,
- // key id
- 0x60, 0x06, 0x1e, 0x01, 0x7e, 0x47, 0x7e, 0x87, 0x7e, 0x57, 0xd0, 0x0d,
- 0x1e, 0xd0, 0x0d, 0x1e,
- // size of data, must be zero
- 0x00, 0x00, 0x00, 0x00};
-
-const hidl_vec<uint8_t> validKeyResponse = {
- 0x7b, 0x22, 0x6b, 0x65, 0x79, 0x73, 0x22, 0x3a, 0x5b, 0x7b, 0x22,
- 0x6b, 0x74, 0x79, 0x22, 0x3a, 0x22, 0x6f, 0x63, 0x74, 0x22, 0x2c,
- 0x22, 0x6b, 0x69, 0x64, 0x22, 0x3a, 0x22, 0x59, 0x41, 0x59, 0x65,
- 0x41, 0x58, 0x35, 0x48, 0x66, 0x6f, 0x64, 0x2d, 0x56, 0x39, 0x41,
- 0x4e, 0x48, 0x74, 0x41, 0x4e, 0x48, 0x67, 0x22, 0x2c, 0x22, 0x6b,
- 0x22, 0x3a, 0x22, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x54, 0x65,
- 0x73, 0x74, 0x4b, 0x65, 0x79, 0x42, 0x61, 0x73, 0x65, 0x36, 0x34,
- 0x67, 0x67, 0x67, 0x22, 0x7d, 0x5d, 0x7d, 0x0a};
-
-const size_t kAESBlockSize = 16;
-const size_t kMaxStringLength = 100;
-const size_t kMaxSubSamples = 10;
-const size_t kMaxNumBytes = 1000;
-const size_t kSegmentIndex = 0;
-
-template <typename T, size_t size>
-T getValueFromArray(FuzzedDataProvider *fdp, const T (&arr)[size]) {
- return arr[fdp->ConsumeIntegralInRange<int32_t>(0, size - 1)];
-}
-
-class TestDrmPluginListener : public IDrmPluginListener {
-public:
- TestDrmPluginListener() {}
- virtual ~TestDrmPluginListener() {}
-
- virtual Return<void> sendEvent(EventType /*eventType*/,
- const hidl_vec<uint8_t> & /*sessionId*/,
- const hidl_vec<uint8_t> & /*data*/) override {
- return Return<void>();
- }
-
- virtual Return<void>
- sendExpirationUpdate(const hidl_vec<uint8_t> & /*sessionId*/,
- int64_t /*expiryTimeInMS*/) override {
- return Return<void>();
- }
-
- virtual Return<void>
- sendKeysChange(const hidl_vec<uint8_t> & /*sessionId*/,
- const hidl_vec<KeyStatus> & /*keyStatusList*/,
- bool /*hasNewUsableKey*/) override {
- return Return<void>();
- }
-};
-
-class ClearKeyFuzzer {
-public:
- ~ClearKeyFuzzer() { deInit(); }
- bool init();
- void process(const uint8_t *data, size_t size);
-
-private:
- void deInit();
- void invokeDrmPlugin(const uint8_t *data, size_t size);
- void invokeCryptoPlugin(const uint8_t *data);
- void invokeDrm(const uint8_t *data, size_t size);
- void invokeCrypto(const uint8_t *data);
- void invokeDrmDecryptEncryptAPI(const uint8_t *data, size_t size);
- bool invokeDrmFactory();
- bool invokeCryptoFactory();
- void invokeDrmV1_4API();
- void invokeDrmSetAlgorithmAPI();
- void invokeDrmPropertyAPI();
- void invokeDrmSecureStopAPI();
- void invokeDrmOfflineLicenseAPI(const uint8_t *data, size_t size);
- SessionId getSessionId();
- SecureStopRelease makeSecureRelease(const SecureStop &stop);
- sp<IDrmFactory> mDrmFactory = nullptr;
- sp<ICryptoFactory> mCryptoFactory = nullptr;
- sp<IDrmPlugin> mDrmPlugin = nullptr;
- sp<drm::V1_1::IDrmPlugin> mDrmPluginV1_1 = nullptr;
- sp<drm::V1_2::IDrmPlugin> mDrmPluginV1_2 = nullptr;
- sp<drm::V1_4::IDrmPlugin> mDrmPluginV1_4 = nullptr;
- sp<drm::V1_4::ICryptoPlugin> mCryptoPluginV1_4 = nullptr;
- sp<ICryptoPlugin> mCryptoPlugin = nullptr;
- FuzzedDataProvider *mFDP = nullptr;
- SessionId mSessionId = {};
- SessionId mSessionIdV1 = {};
-};
-
-void ClearKeyFuzzer::deInit() {
- if (mDrmPluginV1_1) {
- mDrmPluginV1_1->closeSession(mSessionIdV1);
- }
- if (mDrmPluginV1_2) {
- mDrmPluginV1_2->closeSession(mSessionId);
- }
- mDrmFactory.clear();
- mCryptoFactory.clear();
- mDrmPlugin.clear();
- mDrmPluginV1_1.clear();
- mDrmPluginV1_2.clear();
- mDrmPluginV1_4.clear();
- mCryptoPlugin.clear();
- mCryptoPluginV1_4.clear();
- mSessionId = {};
- mSessionIdV1 = {};
-}
-
-void ClearKeyFuzzer::invokeDrmV1_4API() {
- mDrmPluginV1_4->requiresSecureDecoderDefault(
- getValueFromArray(mFDP, kMimeType));
- mDrmPluginV1_4->requiresSecureDecoder(
- getValueFromArray(mFDP, kMimeType),
- getValueFromArray(mFDP, kSecurityLevel));
- mDrmPluginV1_4->setPlaybackId(
- mSessionId, mFDP->ConsumeRandomLengthString(kMaxStringLength).c_str());
- drm::V1_4::IDrmPlugin::getLogMessages_cb cb =
- [&]([[maybe_unused]] drm::V1_4::Status status,
- [[maybe_unused]] hidl_vec<drm::V1_4::LogMessage> logs) {};
- mDrmPluginV1_4->getLogMessages(cb);
-}
-
-void ClearKeyFuzzer::invokeDrmSetAlgorithmAPI() {
- const hidl_string cipherAlgo =
- mFDP->ConsumeBool()
- ? mFDP->ConsumeRandomLengthString(kMaxStringLength).c_str()
- : hidl_string(kCipherAlgorithm[mFDP->ConsumeBool()]);
- mDrmPluginV1_2->setCipherAlgorithm(mSessionId, cipherAlgo);
-
- const hidl_string macAlgo =
- mFDP->ConsumeBool()
- ? mFDP->ConsumeRandomLengthString(kMaxStringLength).c_str()
- : hidl_string(kMacAlgorithm[mFDP->ConsumeBool()]);
- mDrmPluginV1_2->setMacAlgorithm(mSessionId, macAlgo);
-}
-
-void ClearKeyFuzzer::invokeDrmPropertyAPI() {
- mDrmPluginV1_2->setPropertyString(
- hidl_string(getValueFromArray(mFDP, kProperty)), hidl_string("value"));
-
- hidl_string stringValue;
- mDrmPluginV1_2->getPropertyString(
- getValueFromArray(mFDP, kProperty),
- [&](Status status, const hidl_string &hValue) {
- if (status == Status::OK) {
- stringValue = hValue;
- }
- });
-
- hidl_vec<uint8_t> value = {};
- mDrmPluginV1_2->setPropertyByteArray(
- hidl_string(getValueFromArray(mFDP, kProperty)), value);
-
- hidl_vec<uint8_t> byteValue;
- mDrmPluginV1_2->getPropertyByteArray(
- getValueFromArray(mFDP, kProperty),
- [&](Status status, const hidl_vec<uint8_t> &hValue) {
- if (status == Status::OK) {
- byteValue = hValue;
- }
- });
-}
-
-SessionId ClearKeyFuzzer::getSessionId() {
- SessionId emptySessionId = {};
- return mFDP->ConsumeBool() ? mSessionId : emptySessionId;
-}
-
-void ClearKeyFuzzer::invokeDrmDecryptEncryptAPI(const uint8_t *data,
- size_t size) {
- uint32_t currSessions, maximumSessions;
- mDrmPluginV1_2->getNumberOfSessions(
- [&](Status status, uint32_t hCurrentSessions, uint32_t hMaxSessions) {
- if (status == Status::OK) {
- currSessions = hCurrentSessions;
- maximumSessions = hMaxSessions;
- }
- });
-
- HdcpLevel connected, maximum;
- mDrmPluginV1_2->getHdcpLevels([&](Status status,
- const HdcpLevel &hConnectedLevel,
- const HdcpLevel &hMaxLevel) {
- if (status == Status::OK) {
- connected = hConnectedLevel;
- maximum = hMaxLevel;
- }
- });
-
- drm::V1_2::HdcpLevel connectedV1_2, maximumV1_2;
- mDrmPluginV1_2->getHdcpLevels_1_2(
- [&](drm::V1_2::Status status, const drm::V1_2::HdcpLevel &connectedLevel,
- const drm::V1_2::HdcpLevel &maxLevel) {
- if (status == drm::V1_2::Status::OK) {
- connectedV1_2 = connectedLevel;
- maximumV1_2 = maxLevel;
- }
- });
-
- SecurityLevel securityLevel;
- mDrmPluginV1_2->getSecurityLevel(mSessionId,
- [&](Status status, SecurityLevel hLevel) {
- if (status == Status::OK) {
- securityLevel = hLevel;
- }
- });
-
- hidl_vec<DrmMetricGroup> metrics;
- mDrmPluginV1_2->getMetrics(
- [&](Status status, hidl_vec<DrmMetricGroup> hMetricGroups) {
- if (status == Status::OK) {
- metrics = hMetricGroups;
- }
- });
-
- hidl_string certificateType;
- hidl_string certificateAuthority;
- mDrmPluginV1_2->getProvisionRequest(certificateType, certificateAuthority,
- [&]([[maybe_unused]] Status status,
- const hidl_vec<uint8_t> &,
- const hidl_string &) {});
-
- mDrmPluginV1_2->getProvisionRequest_1_2(
- certificateType, certificateAuthority,
- [&]([[maybe_unused]] drm::V1_2::Status status, const hidl_vec<uint8_t> &,
- const hidl_string &) {});
-
- hidl_vec<uint8_t> response;
- mDrmPluginV1_2->provideProvisionResponse(
- response, [&]([[maybe_unused]] Status status, const hidl_vec<uint8_t> &,
- const hidl_vec<uint8_t> &) {});
-
- hidl_vec<uint8_t> initData = {};
- if (mFDP->ConsumeBool()) {
- initData = validInitData;
- } else {
- initData.setToExternal(const_cast<uint8_t *>(data), kAESBlockSize);
- }
- hidl_string mimeType = getValueFromArray(mFDP, kMimeType);
- KeyType keyType = mFDP->ConsumeBool()
- ? static_cast<KeyType>(mFDP->ConsumeIntegral<size_t>())
- : getValueFromArray(mFDP, kKeyType);
- KeyedVector optionalParameters;
- mDrmPluginV1_2->getKeyRequest_1_2(
- mSessionId, initData, mimeType, keyType, optionalParameters,
- [&]([[maybe_unused]] drm::V1_2::Status status, const hidl_vec<uint8_t> &,
- drm::V1_1::KeyRequestType, const hidl_string &) {});
- mDrmPluginV1_1->getKeyRequest_1_1(
- mSessionIdV1, initData, mimeType, keyType, optionalParameters,
- [&]([[maybe_unused]] drm::V1_0::Status status, const hidl_vec<uint8_t> &,
- drm::V1_1::KeyRequestType, const hidl_string &) {});
- hidl_vec<uint8_t> emptyInitData = {};
- mDrmPlugin->getKeyRequest(
- mSessionId, mFDP->ConsumeBool() ? initData : emptyInitData, mimeType,
- keyType, optionalParameters,
- [&]([[maybe_unused]] drm::V1_0::Status status, const hidl_vec<uint8_t> &,
- drm::V1_0::KeyRequestType, const hidl_string &) {});
-
- hidl_vec<uint8_t> keyResponse = {};
- if (mFDP->ConsumeBool()) {
- keyResponse = validKeyResponse;
- } else {
- keyResponse.setToExternal(const_cast<uint8_t *>(data), size);
- }
- hidl_vec<uint8_t> keySetId;
- hidl_vec<uint8_t> emptyKeyResponse = {};
- mDrmPluginV1_2->provideKeyResponse(
- getSessionId(), mFDP->ConsumeBool() ? keyResponse : emptyKeyResponse,
- [&](Status status, const hidl_vec<uint8_t> &hKeySetId) {
- if (status == Status::OK) {
- keySetId = hKeySetId;
- }
- });
-
- mDrmPluginV1_2->restoreKeys(getSessionId(), keySetId);
-
- mDrmPluginV1_2->queryKeyStatus(
- getSessionId(),
- [&]([[maybe_unused]] Status status, KeyedVector /* info */) {});
-
- hidl_vec<uint8_t> keyId, input, iv;
- keyId.setToExternal(const_cast<uint8_t *>(data), size);
- input.setToExternal(const_cast<uint8_t *>(data), size);
- iv.setToExternal(const_cast<uint8_t *>(data), size);
- mDrmPluginV1_2->encrypt(
- getSessionId(), keyId, input, iv,
- [&]([[maybe_unused]] Status status, const hidl_vec<uint8_t> &) {});
-
- mDrmPluginV1_2->decrypt(
- getSessionId(), keyId, input, iv,
- [&]([[maybe_unused]] Status status, const hidl_vec<uint8_t> &) {});
-
- hidl_vec<uint8_t> message;
- message.setToExternal(const_cast<uint8_t *>(data), size);
- mDrmPluginV1_2->sign(
- getSessionId(), keyId, message,
- [&]([[maybe_unused]] Status status, const hidl_vec<uint8_t> &) {});
-
- hidl_vec<uint8_t> signature;
- signature.setToExternal(const_cast<uint8_t *>(data), size);
- mDrmPluginV1_2->verify(getSessionId(), keyId, message, signature,
- [&]([[maybe_unused]] Status status, bool) {});
-
- hidl_vec<uint8_t> wrappedKey;
- signature.setToExternal(const_cast<uint8_t *>(data), size);
- mDrmPluginV1_2->signRSA(
- getSessionId(), kRSAAlgorithm[mFDP->ConsumeBool()], message, wrappedKey,
- [&]([[maybe_unused]] Status status, const hidl_vec<uint8_t> &) {});
-
- mDrmPluginV1_2->removeKeys(getSessionId());
-}
-
-/**
- * Helper function to create a secure release message for
- * a secure stop. The clearkey secure stop release format
- * is just a count followed by the secure stop opaque data.
- */
-SecureStopRelease ClearKeyFuzzer::makeSecureRelease(const SecureStop &stop) {
- std::vector<uint8_t> stopData = stop.opaqueData;
- std::vector<uint8_t> buffer;
- std::string count = "0001";
-
- auto it = buffer.insert(buffer.begin(), count.begin(), count.end());
- buffer.insert(it + count.size(), stopData.begin(), stopData.end());
- SecureStopRelease release = {.opaqueData = hidl_vec<uint8_t>(buffer)};
- return release;
-}
-
-void ClearKeyFuzzer::invokeDrmSecureStopAPI() {
- SecureStopId ssid;
- mDrmPluginV1_2->getSecureStop(
- ssid, [&]([[maybe_unused]] Status status, const SecureStop &) {});
-
- mDrmPluginV1_2->getSecureStopIds(
- [&]([[maybe_unused]] Status status,
- [[maybe_unused]] const hidl_vec<SecureStopId> &secureStopIds) {});
-
- SecureStopRelease release;
- mDrmPluginV1_2->getSecureStops(
- [&]([[maybe_unused]] Status status, const hidl_vec<SecureStop> &stops) {
- if (stops.size() > 0) {
- release = makeSecureRelease(
- stops[mFDP->ConsumeIntegralInRange<size_t>(0, stops.size() - 1)]);
- }
- });
-
- mDrmPluginV1_2->releaseSecureStops(release);
-
- mDrmPluginV1_2->removeSecureStop(ssid);
-
- mDrmPluginV1_2->removeAllSecureStops();
-
- mDrmPluginV1_2->releaseSecureStop(ssid);
-
- mDrmPluginV1_2->releaseAllSecureStops();
-}
-
-void ClearKeyFuzzer::invokeDrmOfflineLicenseAPI(const uint8_t *data,
- size_t size) {
- hidl_vec<KeySetId> keySetIds = {};
- mDrmPluginV1_2->getOfflineLicenseKeySetIds(
- [&](Status status, const hidl_vec<KeySetId> &hKeySetIds) {
- if (status == Status::OK) {
- keySetIds = hKeySetIds;
- }
- });
-
- OfflineLicenseState licenseState;
- KeySetId keySetId = {};
- if (keySetIds.size() > 0) {
- keySetId = keySetIds[mFDP->ConsumeIntegralInRange<size_t>(
- 0, keySetIds.size() - 1)];
- } else {
- keySetId.setToExternal(const_cast<uint8_t *>(data), size);
- }
- mDrmPluginV1_2->getOfflineLicenseState(
- keySetId, [&](Status status, OfflineLicenseState hLicenseState) {
- if (status == Status::OK) {
- licenseState = hLicenseState;
- }
- });
-
- mDrmPluginV1_2->removeOfflineLicense(keySetId);
-}
-
-void ClearKeyFuzzer::invokeDrmPlugin(const uint8_t *data, size_t size) {
- SecurityLevel secLevel =
- mFDP->ConsumeBool()
- ? getValueFromArray(mFDP, kSecurityLevel)
- : static_cast<SecurityLevel>(mFDP->ConsumeIntegral<uint32_t>());
- mDrmPluginV1_1->openSession_1_1(
- secLevel, [&]([[maybe_unused]] Status status, const SessionId &id) {
- mSessionIdV1 = id;
- });
- mDrmPluginV1_2->openSession([&]([[maybe_unused]] Status status,
- const SessionId &id) { mSessionId = id; });
-
- sp<TestDrmPluginListener> listener = new TestDrmPluginListener();
- mDrmPluginV1_2->setListener(listener);
- const hidl_vec<KeyStatus> keyStatusList = {
- {{1}, KeyStatusType::USABLE},
- {{2}, KeyStatusType::EXPIRED},
- {{3}, KeyStatusType::OUTPUTNOTALLOWED},
- {{4}, KeyStatusType::STATUSPENDING},
- {{5}, KeyStatusType::INTERNALERROR},
- };
- mDrmPluginV1_2->sendKeysChange(mSessionId, keyStatusList, true);
-
- invokeDrmV1_4API();
- invokeDrmSetAlgorithmAPI();
- invokeDrmPropertyAPI();
- invokeDrmDecryptEncryptAPI(data, size);
- invokeDrmSecureStopAPI();
- invokeDrmOfflineLicenseAPI(data, size);
-}
-
-void ClearKeyFuzzer::invokeCryptoPlugin(const uint8_t *data) {
- mCryptoPlugin->requiresSecureDecoderComponent(
- getValueFromArray(mFDP, kMimeType));
-
- const uint32_t width = mFDP->ConsumeIntegral<uint32_t>();
- const uint32_t height = mFDP->ConsumeIntegral<uint32_t>();
- mCryptoPlugin->notifyResolution(width, height);
-
- mCryptoPlugin->setMediaDrmSession(mSessionId);
-
- size_t totalSize = 0;
- const size_t numSubSamples =
- mFDP->ConsumeIntegralInRange<size_t>(1, kMaxSubSamples);
-
- const Pattern pattern = {0, 0};
- hidl_vec<SubSample> subSamples;
- subSamples.resize(numSubSamples);
-
- for (size_t i = 0; i < numSubSamples; ++i) {
- const uint32_t clearBytes =
- mFDP->ConsumeIntegralInRange<uint32_t>(0, kMaxNumBytes);
- const uint32_t encryptedBytes =
- mFDP->ConsumeIntegralInRange<uint32_t>(0, kMaxNumBytes);
- subSamples[i].numBytesOfClearData = clearBytes;
- subSamples[i].numBytesOfEncryptedData = encryptedBytes;
- totalSize += subSamples[i].numBytesOfClearData;
- totalSize += subSamples[i].numBytesOfEncryptedData;
- }
-
- // The first totalSize bytes of shared memory is the encrypted
- // input, the second totalSize bytes is the decrypted output.
- size_t memoryBytes = totalSize * 2;
-
- sp<IAllocator> ashmemAllocator = IAllocator::getService("ashmem");
- if (!ashmemAllocator.get()) {
- return;
- }
-
- hidl_memory hidlMemory;
- ashmemAllocator->allocate(memoryBytes, [&]([[maybe_unused]] bool success,
- const hidl_memory &memory) {
- mCryptoPlugin->setSharedBufferBase(memory, kSegmentIndex);
- hidlMemory = memory;
- });
-
- sp<IMemory> mappedMemory = mapMemory(hidlMemory);
- if (!mappedMemory.get()) {
- return;
- }
- mCryptoPlugin->setSharedBufferBase(hidlMemory, kSegmentIndex);
-
- uint32_t srcBufferId =
- mFDP->ConsumeBool() ? kSegmentIndex : mFDP->ConsumeIntegral<uint32_t>();
- const SharedBuffer sourceBuffer = {
- .bufferId = srcBufferId, .offset = 0, .size = totalSize};
-
- BufferType type = mFDP->ConsumeBool() ? BufferType::SHARED_MEMORY
- : BufferType::NATIVE_HANDLE;
- uint32_t destBufferId =
- mFDP->ConsumeBool() ? kSegmentIndex : mFDP->ConsumeIntegral<uint32_t>();
- const DestinationBuffer destBuffer = {
- .type = type,
- {.bufferId = destBufferId, .offset = totalSize, .size = totalSize},
- .secureMemory = nullptr};
-
- const uint64_t offset = 0;
- uint32_t bytesWritten = 0;
- hidl_array<uint8_t, kAESBlockSize> keyId =
- hidl_array<uint8_t, kAESBlockSize>(data);
- hidl_array<uint8_t, kAESBlockSize> iv =
- hidl_array<uint8_t, kAESBlockSize>(data);
- Mode mode = getValueFromArray(mFDP, kCryptoMode);
- mCryptoPlugin->decrypt(
- mFDP->ConsumeBool(), keyId, iv, mode, pattern, subSamples, sourceBuffer,
- offset, destBuffer,
- [&]([[maybe_unused]] Status status, uint32_t count,
- [[maybe_unused]] string detailedError) { bytesWritten = count; });
- drm::V1_4::IDrmPlugin::getLogMessages_cb cb =
- [&]([[maybe_unused]] drm::V1_4::Status status,
- [[maybe_unused]] hidl_vec<drm::V1_4::LogMessage> logs) {};
- mCryptoPluginV1_4->getLogMessages(cb);
-}
-
-bool ClearKeyFuzzer::invokeDrmFactory() {
- hidl_string packageName(
- mFDP->ConsumeRandomLengthString(kMaxStringLength).c_str());
- hidl_string mimeType(getValueFromArray(mFDP, kMimeType));
- SecurityLevel securityLevel =
- mFDP->ConsumeBool()
- ? getValueFromArray(mFDP, kSecurityLevel)
- : static_cast<SecurityLevel>(mFDP->ConsumeIntegral<uint32_t>());
- const hidl_array<uint8_t, 16> uuid =
- mFDP->ConsumeBool() ? kClearKeyUUID : kInvalidUUID;
- mDrmFactory->isCryptoSchemeSupported_1_2(uuid, mimeType, securityLevel);
- mDrmFactory->createPlugin(
- uuid, packageName, [&](Status status, const sp<IDrmPlugin> &plugin) {
- if (status == Status::OK) {
- mDrmPlugin = plugin.get();
- mDrmPluginV1_1 = drm::V1_1::IDrmPlugin::castFrom(mDrmPlugin);
- mDrmPluginV1_2 = drm::V1_2::IDrmPlugin::castFrom(mDrmPlugin);
- mDrmPluginV1_4 = drm::V1_4::IDrmPlugin::castFrom(mDrmPlugin);
- }
- });
-
- std::vector<hidl_array<uint8_t, 16>> supportedSchemes;
- mDrmFactory->getSupportedCryptoSchemes(
- [&](const hidl_vec<hidl_array<uint8_t, 16>> &schemes) {
- for (const auto &scheme : schemes) {
- supportedSchemes.push_back(scheme);
- }
- });
-
- if (!(mDrmPlugin && mDrmPluginV1_1 && mDrmPluginV1_2 && mDrmPluginV1_4)) {
- return false;
- }
- return true;
-}
-
-bool ClearKeyFuzzer::invokeCryptoFactory() {
- const hidl_array<uint8_t, 16> uuid =
- mFDP->ConsumeBool() ? kClearKeyUUID : kInvalidUUID;
- mCryptoFactory->createPlugin(
- uuid, mSessionId, [this](Status status, const sp<ICryptoPlugin> &plugin) {
- if (status == Status::OK) {
- mCryptoPlugin = plugin;
- mCryptoPluginV1_4 = drm::V1_4::ICryptoPlugin::castFrom(mCryptoPlugin);
- }
- });
-
- if (!mCryptoPlugin && !mCryptoPluginV1_4) {
- return false;
- }
- return true;
-}
-
-void ClearKeyFuzzer::invokeDrm(const uint8_t *data, size_t size) {
- if (!invokeDrmFactory()) {
- return;
- }
- invokeDrmPlugin(data, size);
-}
-
-void ClearKeyFuzzer::invokeCrypto(const uint8_t *data) {
- if (!invokeCryptoFactory()) {
- return;
- }
- invokeCryptoPlugin(data);
-}
-
-void ClearKeyFuzzer::process(const uint8_t *data, size_t size) {
- mFDP = new FuzzedDataProvider(data, size);
- invokeDrm(data, size);
- invokeCrypto(data);
- delete mFDP;
-}
-
-bool ClearKeyFuzzer::init() {
- mCryptoFactory =
- android::hardware::drm::V1_4::clearkey::createCryptoFactory();
- mDrmFactory = android::hardware::drm::V1_4::clearkey::createDrmFactory();
- if (!mDrmFactory && !mCryptoFactory) {
- return false;
- }
- return true;
-}
-
-extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
- if (size < kAESBlockSize) {
- return 0;
- }
- ClearKeyFuzzer clearKeyFuzzer;
- if (clearKeyFuzzer.init()) {
- clearKeyFuzzer.process(data, size);
- }
- return 0;
-}
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/AesCtrDecryptor.h b/drm/mediadrm/plugins/clearkey/hidl/include/AesCtrDecryptor.h
deleted file mode 100644
index 97794f7..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/include/AesCtrDecryptor.h
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef CLEARKEY_AES_CTR_DECRYPTOR_H_
-#define CLEARKEY_AES_CTR_DECRYPTOR_H_
-
-#include "ClearKeyTypes.h"
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-using ::android::hardware::drm::V1_0::Status;
-using ::android::hardware::drm::V1_0::SubSample;
-
-class AesCtrDecryptor {
-public:
- AesCtrDecryptor() {}
-
- Status decrypt(const std::vector<uint8_t>& key, const Iv iv,
- const uint8_t* source, uint8_t* destination,
- const std::vector<SubSample> subSamples, size_t numSubSamples,
- size_t* bytesDecryptedOut);
-
-private:
- CLEARKEY_DISALLOW_COPY_AND_ASSIGN(AesCtrDecryptor);
-};
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
-
-#endif // CLEARKEY_AES_CTR_DECRYPTOR_H_
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/Base64.h b/drm/mediadrm/plugins/clearkey/hidl/include/Base64.h
deleted file mode 100644
index 2349f23..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/include/Base64.h
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef BASE_64_H_
-
-#define BASE_64_H_
-
-#include <android/hardware/drm/1.0/types.h>
-
-#include "Buffer.h"
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-using ::android::sp;
-
-struct Buffer;
-
-sp<Buffer> decodeBase64(const std::string &s);
-void encodeBase64(const void *data, size_t size, std::string *out);
-
-void encodeBase64Url(const void *data, size_t size, std::string *out);
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
-
-#endif // BASE_64_H_
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/Buffer.h b/drm/mediadrm/plugins/clearkey/hidl/include/Buffer.h
deleted file mode 100644
index 66aaa73..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/include/Buffer.h
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef BUFFER_H_
-#define BUFFER_H_
-
-#include <android/hardware/drm/1.0/types.h>
-#include <utils/RefBase.h>
-
-#include "ClearKeyTypes.h"
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-using ::android::sp;
-
-struct Buffer : public RefBase {
- explicit Buffer(size_t capacity);
-
- uint8_t *base() { return reinterpret_cast<uint8_t *>(mData); }
- uint8_t *data() { return reinterpret_cast<uint8_t *>(mData) + mRangeOffset; }
- size_t capacity() const { return mCapacity; }
- size_t size() const { return mRangeLength; }
- size_t offset() const { return mRangeOffset; }
-
-protected:
- virtual ~Buffer();
-
-private:
- void *mData;
- size_t mCapacity;
- size_t mRangeOffset;
- size_t mRangeLength;
-
- bool mOwnsData;
-
- CLEARKEY_DISALLOW_COPY_AND_ASSIGN(Buffer);
-};
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
-
-#endif // BUFFER_H_
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/ClearKeyDrmProperties.h b/drm/mediadrm/plugins/clearkey/hidl/include/ClearKeyDrmProperties.h
deleted file mode 100644
index 8e47c45..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/include/ClearKeyDrmProperties.h
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef CLEARKEY_DRM_PROPERTIES_H_
-#define CLEARKEY_DRM_PROPERTIES_H_
-
-#include <string.h>
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-static const std::string kVendorKey("vendor");
-static const std::string kVendorValue("Google");
-static const std::string kVersionKey("version");
-static const std::string kVersionValue("1.2");
-static const std::string kPluginDescriptionKey("description");
-static const std::string kPluginDescriptionValue("ClearKey CDM");
-static const std::string kAlgorithmsKey("algorithms");
-static const std::string kAlgorithmsValue("");
-static const std::string kListenerTestSupportKey("listenerTestSupport");
-static const std::string kListenerTestSupportValue("true");
-static const std::string kDrmErrorTestKey("drmErrorTest");
-static const std::string kDrmErrorTestValue("");
-static const std::string kResourceContentionValue("resourceContention");
-static const std::string kLostStateValue("lostState");
-static const std::string kFrameTooLargeValue("frameTooLarge");
-static const std::string kInvalidStateValue("invalidState");
-
-static const std::string kDeviceIdKey("deviceId");
-static const uint8_t kTestDeviceIdData[] =
- {0x0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7,
- 0x8, 0x9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf};
-
-// settable byte array property
-static const std::string kClientIdKey("clientId");
-
-// TODO stub out metrics for nw
-static const std::string kMetricsKey("metrics");
-static const uint8_t kMetricsData[] = { 0 };
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
-
-#endif // CLEARKEY_DRM_PROPERTIES_H_
-
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/ClearKeyTypes.h b/drm/mediadrm/plugins/clearkey/hidl/include/ClearKeyTypes.h
deleted file mode 100644
index cd18029..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/include/ClearKeyTypes.h
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef CLEARKEY_MACROS_H_
-#define CLEARKEY_MACROS_H_
-
-#include <android/hardware/drm/1.2/types.h>
-
-#include <map>
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-using ::android::hardware::drm::V1_0::KeyValue;
-using ::android::hardware::drm::V1_1::SecurityLevel;
-using ::android::hardware::hidl_vec;
-
-const uint8_t kBlockSize = 16; //AES_BLOCK_SIZE;
-typedef uint8_t KeyId[kBlockSize];
-typedef uint8_t Iv[kBlockSize];
-
-typedef ::android::hardware::drm::V1_0::SubSample SubSample;
-typedef std::map<std::vector<uint8_t>, std::vector<uint8_t> > KeyMap;
-
-#define CLEARKEY_DISALLOW_COPY_AND_ASSIGN(TypeName) \
- TypeName(const TypeName&) = delete; \
- void operator=(const TypeName&) = delete;
-
-#define CLEARKEY_DISALLOW_COPY_AND_ASSIGN_AND_NEW(TypeName) \
- TypeName() = delete; \
- TypeName(const TypeName&) = delete; \
- void operator=(const TypeName&) = delete;
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
-
-#endif // CLEARKEY_MACROS_H_
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/CreatePluginFactories.h b/drm/mediadrm/plugins/clearkey/hidl/include/CreatePluginFactories.h
deleted file mode 100644
index d4a8a17..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/include/CreatePluginFactories.h
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef CLEARKEY_CREATE_PLUGIN_FACTORIES_H_
-#define CLEARKEY_CREATE_PLUGIN_FACTORIES_H_
-
-#include <android/hardware/drm/1.4/ICryptoFactory.h>
-#include <android/hardware/drm/1.4/IDrmFactory.h>
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-using ::android::hardware::drm::V1_4::ICryptoFactory;
-using ::android::hardware::drm::V1_4::IDrmFactory;
-
-extern "C" {
- IDrmFactory* createDrmFactory();
- ICryptoFactory* createCryptoFactory();
-}
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
-#endif // CLEARKEY_CREATE_PLUGIN_FACTORIES_H_
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/CryptoFactory.h b/drm/mediadrm/plugins/clearkey/hidl/include/CryptoFactory.h
deleted file mode 100644
index e6b541f..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/include/CryptoFactory.h
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef CLEARKEY_CRYPTO_FACTORY_H_
-#define CLEARKEY_CRYPTO_FACTORY_H_
-
-#include <android/hardware/drm/1.0/ICryptoPlugin.h>
-#include <android/hardware/drm/1.4/ICryptoFactory.h>
-
-#include "ClearKeyTypes.h"
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-using ::android::hardware::drm::V1_4::ICryptoFactory;
-using ::android::hardware::drm::V1_0::ICryptoPlugin;
-using ::android::hardware::hidl_array;
-using ::android::hardware::hidl_string;
-using ::android::hardware::Return;
-
-struct CryptoFactory : public ICryptoFactory {
- CryptoFactory() {}
- virtual ~CryptoFactory() {}
-
- Return<bool> isCryptoSchemeSupported(const hidl_array<uint8_t, 16>& uuid)
- override;
-
- Return<void> createPlugin(
- const hidl_array<uint8_t, 16>& uuid,
- const hidl_vec<uint8_t>& initData,
- createPlugin_cb _hidl_cb) override;
-
-private:
- CLEARKEY_DISALLOW_COPY_AND_ASSIGN(CryptoFactory);
-
-};
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
-
-#endif // CLEARKEY_CRYPTO_FACTORY_H_
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/CryptoPlugin.h b/drm/mediadrm/plugins/clearkey/hidl/include/CryptoPlugin.h
deleted file mode 100644
index b272a83..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/include/CryptoPlugin.h
+++ /dev/null
@@ -1,124 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef CLEARKEY_CRYPTO_PLUGIN_H_
-#define CLEARKEY_CRYPTO_PLUGIN_H_
-
-#include <android/hardware/drm/1.4/ICryptoPlugin.h>
-#include <android/hidl/memory/1.0/IMemory.h>
-
-#include <mutex>
-
-#include "ClearKeyTypes.h"
-#include "Session.h"
-#include "Utils.h"
-
-namespace {
- static const size_t KEY_ID_SIZE = 16;
- static const size_t KEY_IV_SIZE = 16;
-}
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-namespace drm = ::android::hardware::drm;
-using drm::V1_0::DestinationBuffer;
-using drm::V1_0::Mode;
-using drm::V1_0::Pattern;
-using drm::V1_0::SharedBuffer;
-using drm::V1_0::Status;
-using drm::V1_0::SubSample;
-
-using ::android::hardware::hidl_array;
-using ::android::hardware::hidl_memory;
-using ::android::hardware::hidl_string;
-using ::android::hardware::hidl_vec;
-using ::android::hardware::Return;
-using ::android::hardware::Void;
-using ::android::hidl::memory::V1_0::IMemory;
-using ::android::sp;
-
-typedef drm::V1_2::Status Status_V1_2;
-
-struct CryptoPlugin : public drm::V1_4::ICryptoPlugin {
- explicit CryptoPlugin(const hidl_vec<uint8_t>& sessionId) {
- mInitStatus = setMediaDrmSession(sessionId);
- }
- virtual ~CryptoPlugin() {}
-
- Return<bool> requiresSecureDecoderComponent(const hidl_string& mime) {
- UNUSED(mime);
- return false;
- }
-
- Return<void> notifyResolution(uint32_t width, uint32_t height) {
- UNUSED(width);
- UNUSED(height);
- return Void();
- }
-
- Return<void> decrypt(
- bool secure,
- const hidl_array<uint8_t, KEY_ID_SIZE>& keyId,
- const hidl_array<uint8_t, KEY_IV_SIZE>& iv,
- Mode mode,
- const Pattern& pattern,
- const hidl_vec<SubSample>& subSamples,
- const SharedBuffer& source,
- uint64_t offset,
- const DestinationBuffer& destination,
- decrypt_cb _hidl_cb);
-
- Return<void> decrypt_1_2(
- bool secure,
- const hidl_array<uint8_t, KEY_ID_SIZE>& keyId,
- const hidl_array<uint8_t, KEY_IV_SIZE>& iv,
- Mode mode,
- const Pattern& pattern,
- const hidl_vec<SubSample>& subSamples,
- const SharedBuffer& source,
- uint64_t offset,
- const DestinationBuffer& destination,
- decrypt_1_2_cb _hidl_cb) NO_THREAD_SAFETY_ANALYSIS; // use unique_lock
-
- Return<void> setSharedBufferBase(const hidl_memory& base,
- uint32_t bufferId);
-
- Return<Status> setMediaDrmSession(const hidl_vec<uint8_t>& sessionId);
-
- Return<Status> getInitStatus() const { return mInitStatus; }
-
- Return<void> getLogMessages(
- getLogMessages_cb _hidl_cb);
-private:
- CLEARKEY_DISALLOW_COPY_AND_ASSIGN(CryptoPlugin);
-
- std::mutex mSharedBufferLock;
- std::map<uint32_t, sp<IMemory>> mSharedBufferMap GUARDED_BY(mSharedBufferLock);
- sp<Session> mSession;
- Status mInitStatus;
-};
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
-
-#endif // CLEARKEY_CRYPTO_PLUGIN_H_
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/DeviceFiles.h b/drm/mediadrm/plugins/clearkey/hidl/include/DeviceFiles.h
deleted file mode 100644
index 6466ac3..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/include/DeviceFiles.h
+++ /dev/null
@@ -1,73 +0,0 @@
-// Copyright 2018 Google LLC. All Rights Reserved. This file and proprietary
-// source code may only be used and distributed under the Widevine Master
-// License Agreement.
-//
-#ifndef CLEARKEY_DEVICE_FILES_H_
-#define CLEARKEY_DEVICE_FILES_H_
-
-#include <errno.h>
-#include <stdio.h>
-#include <unistd.h>
-
-#include <set>
-#include <string>
-#include <vector>
-
-#include "protos/DeviceFiles.pb.h"
-#include "ClearKeyTypes.h"
-#include "MemoryFileSystem.h"
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-using ::android::hardware::drm::V1_2::clearkey::OfflineFile;
-
-class DeviceFiles {
- public:
- typedef enum {
- kLicenseStateUnknown,
- kLicenseStateActive,
- kLicenseStateReleasing,
- } LicenseState;
-
- DeviceFiles() {};
- virtual ~DeviceFiles() {};
-
- virtual bool StoreLicense(const std::string& keySetId, LicenseState state,
- const std::string& keyResponse);
-
- virtual bool RetrieveLicense(
- const std::string& key_set_id, LicenseState* state, std::string* offlineLicense);
-
- virtual bool LicenseExists(const std::string& keySetId);
-
- virtual std::vector<std::string> ListLicenses() const;
-
- virtual bool DeleteLicense(const std::string& keySetId);
-
- virtual bool DeleteAllLicenses();
-
- private:
- bool FileExists(const std::string& path) const;
- ssize_t GetFileSize(const std::string& fileName) const;
- bool RemoveFile(const std::string& fileName);
-
- bool RetrieveHashedFile(const std::string& fileName, OfflineFile* deSerializedFile);
- bool StoreFileRaw(const std::string& fileName, const std::string& serializedFile);
- bool StoreFileWithHash(const std::string& fileName, const std::string& serializedFile);
-
- MemoryFileSystem mFileHandle;
-
- CLEARKEY_DISALLOW_COPY_AND_ASSIGN(DeviceFiles);
-};
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
-
-#endif // CLEARKEY_DEVICE_FILES_H_
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/DrmFactory.h b/drm/mediadrm/plugins/clearkey/hidl/include/DrmFactory.h
deleted file mode 100644
index fea1ec8..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/include/DrmFactory.h
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef CLEARKEY_DRM_FACTORY_H_
-#define CLEARKEY_DRM_FACTORY_H_
-
-#include <android/hardware/drm/1.4/IDrmPlugin.h>
-#include <android/hardware/drm/1.4/IDrmFactory.h>
-
-#include "ClearKeyTypes.h"
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-using ::android::hardware::drm::V1_1::SecurityLevel;
-using ::android::hardware::hidl_array;
-using ::android::hardware::hidl_handle;
-using ::android::hardware::hidl_string;
-using ::android::hardware::Return;
-
-struct DrmFactory : public IDrmFactory {
- DrmFactory() {}
- virtual ~DrmFactory() {}
-
- Return<bool> isCryptoSchemeSupported(const hidl_array<uint8_t, 16>& uuid)
- override;
-
- Return<bool> isCryptoSchemeSupported_1_2(const hidl_array<uint8_t, 16>& uuid,
- const hidl_string& mimeType,
- SecurityLevel level) override;
-
- Return<bool> isContentTypeSupported(const hidl_string &mimeType)
- override;
-
- Return<void> createPlugin(
- const hidl_array<uint8_t, 16>& uuid,
- const hidl_string& appPackageName,
- createPlugin_cb _hidl_cb) override;
-
- Return<void> getSupportedCryptoSchemes(
- getSupportedCryptoSchemes_cb _hidl_cb) override;
-
- Return<void> debug(const hidl_handle& fd, const hidl_vec<hidl_string>& args);
-
-private:
- CLEARKEY_DISALLOW_COPY_AND_ASSIGN(DrmFactory);
-};
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
-
-#endif // CLEARKEY_DRM_FACTORY_H_
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h b/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h
deleted file mode 100644
index 274a89a..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h
+++ /dev/null
@@ -1,449 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef CLEARKEY_DRM_PLUGIN_H_
-#define CLEARKEY_DRM_PLUGIN_H_
-
-#include <android/hardware/drm/1.4/IDrmPlugin.h>
-#include <android/hardware/drm/1.2/IDrmPluginListener.h>
-
-#include <map>
-#include <stdio.h>
-
-#include <utils/List.h>
-
-#include "DeviceFiles.h"
-#include "SessionLibrary.h"
-#include "Utils.h"
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-namespace drm = ::android::hardware::drm;
-using drm::V1_0::EventType;
-using drm::V1_0::IDrmPluginListener;
-using drm::V1_0::KeyRequestType;
-using drm::V1_0::KeyStatus;
-using drm::V1_0::KeyType;
-using drm::V1_0::KeyValue;
-using drm::V1_0::SecureStop;
-using drm::V1_0::SecureStopId;
-using drm::V1_0::SessionId;
-using drm::V1_0::Status;
-using drm::V1_1::DrmMetricGroup;
-using drm::V1_1::HdcpLevel;
-using drm::V1_1::SecureStopRelease;
-using drm::V1_1::SecurityLevel;
-using drm::V1_2::KeySetId;
-using drm::V1_2::OfflineLicenseState;
-using drm::V1_4::clearkey::DeviceFiles;
-using drm::V1_4::clearkey::Session;
-using drm::V1_4::clearkey::SessionLibrary;
-using drm::V1_4::IDrmPlugin;
-
-using ::android::hardware::hidl_string;
-using ::android::hardware::hidl_vec;
-using ::android::hardware::Return;
-using ::android::hardware::Void;
-using ::android::sp;
-
-typedef drm::V1_1::KeyRequestType KeyRequestType_V1_1;
-typedef drm::V1_2::IDrmPluginListener IDrmPluginListener_V1_2;
-typedef drm::V1_2::KeyStatus KeyStatus_V1_2;
-typedef drm::V1_2::Status Status_V1_2;
-typedef drm::V1_2::HdcpLevel HdcpLevel_V1_2;
-
-struct DrmPlugin : public IDrmPlugin {
- explicit DrmPlugin(SessionLibrary* sessionLibrary);
-
- virtual ~DrmPlugin() { mFileHandle.DeleteAllLicenses(); }
-
- Return<void> openSession(openSession_cb _hidl_cb) override;
- Return<void> openSession_1_1(SecurityLevel securityLevel,
- openSession_cb _hidl_cb) override;
-
- Return<Status> closeSession(const hidl_vec<uint8_t>& sessionId) override;
-
- Return<void> getKeyRequest(
- const hidl_vec<uint8_t>& scope,
- const hidl_vec<uint8_t>& initData,
- const hidl_string& mimeType,
- KeyType keyType,
- const hidl_vec<KeyValue>& optionalParameters,
- getKeyRequest_cb _hidl_cb) override;
-
- Return<void> getKeyRequest_1_1(
- const hidl_vec<uint8_t>& scope,
- const hidl_vec<uint8_t>& initData,
- const hidl_string& mimeType,
- KeyType keyType,
- const hidl_vec<KeyValue>& optionalParameters,
- getKeyRequest_1_1_cb _hidl_cb) override;
-
- Return<void> getKeyRequest_1_2(
- const hidl_vec<uint8_t>& scope,
- const hidl_vec<uint8_t>& initData,
- const hidl_string& mimeType,
- KeyType keyType,
- const hidl_vec<KeyValue>& optionalParameters,
- getKeyRequest_1_2_cb _hidl_cb) override;
-
- Return<void> provideKeyResponse(
- const hidl_vec<uint8_t>& scope,
- const hidl_vec<uint8_t>& response,
- provideKeyResponse_cb _hidl_cb) override;
-
- Return<Status> removeKeys(const hidl_vec<uint8_t>& sessionId) {
- if (sessionId.size() == 0) {
- return Status::BAD_VALUE;
- }
- return Status::ERROR_DRM_CANNOT_HANDLE;
- }
-
- Return<Status> restoreKeys(
- const hidl_vec<uint8_t>& sessionId,
- const hidl_vec<uint8_t>& keySetId) override;
-
- Return<void> queryKeyStatus(
- const hidl_vec<uint8_t>& sessionId,
- queryKeyStatus_cb _hidl_cb) override;
-
- Return<void> getProvisionRequest(
- const hidl_string& certificateType,
- const hidl_string& certificateAuthority,
- getProvisionRequest_cb _hidl_cb) {
- UNUSED(certificateType);
- UNUSED(certificateAuthority);
-
- hidl_string defaultUrl;
- _hidl_cb(Status::ERROR_DRM_CANNOT_HANDLE, hidl_vec<uint8_t>(), defaultUrl);
- return Void();
- }
-
- Return<void> getProvisionRequest_1_2(
- const hidl_string& certificateType,
- const hidl_string& certificateAuthority,
- getProvisionRequest_1_2_cb _hidl_cb) {
- UNUSED(certificateType);
- UNUSED(certificateAuthority);
-
- hidl_string defaultUrl;
- _hidl_cb(Status_V1_2::ERROR_DRM_CANNOT_HANDLE, hidl_vec<uint8_t>(), defaultUrl);
- return Void();
- }
-
- Return<void> provideProvisionResponse(
- const hidl_vec<uint8_t>& response,
- provideProvisionResponse_cb _hidl_cb) {
-
- if (response.size() == 0) {
- _hidl_cb(Status::BAD_VALUE, hidl_vec<uint8_t>(), hidl_vec<uint8_t>());
- return Void();
- }
- _hidl_cb(Status::ERROR_DRM_CANNOT_HANDLE, hidl_vec<uint8_t>(), hidl_vec<uint8_t>());
- return Void();
- }
-
- Return<void> getHdcpLevels(getHdcpLevels_cb _hidl_cb) {
- HdcpLevel connectedLevel = HdcpLevel::HDCP_NONE;
- HdcpLevel maxLevel = HdcpLevel::HDCP_NO_OUTPUT;
- _hidl_cb(Status::OK, connectedLevel, maxLevel);
- return Void();
- }
-
- Return<void> getHdcpLevels_1_2(getHdcpLevels_1_2_cb _hidl_cb) {
- HdcpLevel_V1_2 connectedLevel = HdcpLevel_V1_2::HDCP_NONE;
- HdcpLevel_V1_2 maxLevel = HdcpLevel_V1_2::HDCP_NO_OUTPUT;
- _hidl_cb(Status_V1_2::OK, connectedLevel, maxLevel);
- return Void();
- }
-
- Return<void> getNumberOfSessions(getNumberOfSessions_cb _hidl_cb) override;
-
- Return<void> getSecurityLevel(const hidl_vec<uint8_t>& sessionId,
- getSecurityLevel_cb _hidl_cb) override;
-
- Return<void> getMetrics(getMetrics_cb _hidl_cb) override;
-
- Return<void> getOfflineLicenseKeySetIds(getOfflineLicenseKeySetIds_cb _hidl_cb) override;
-
- Return<Status> removeOfflineLicense(const KeySetId &keySetId) override;
-
- Return<void> getOfflineLicenseState(const KeySetId &keySetId,
- getOfflineLicenseState_cb _hidl_cb) override;
-
- Return<void> getPropertyString(
- const hidl_string& name,
- getPropertyString_cb _hidl_cb) override;
-
- Return<void> getPropertyByteArray(
- const hidl_string& name,
- getPropertyByteArray_cb _hidl_cb) override;
-
- Return<Status> setPropertyString(
- const hidl_string& name, const hidl_string& value) override;
-
- Return<Status> setPropertyByteArray(
- const hidl_string& name, const hidl_vec<uint8_t>& value) override;
-
- Return<void> getLogMessages(
- getLogMessages_cb _hidl_cb) override;
-
- Return<Status> setPlaybackId(
- const hidl_vec<uint8_t>& sessionId,
- const hidl_string& playbackId) override;
-
- Return<bool> requiresSecureDecoder(
- const hidl_string& mime, SecurityLevel level) override;
-
- Return<bool> requiresSecureDecoderDefault(const hidl_string& mime) override;
-
- Return<Status> setCipherAlgorithm(
- const hidl_vec<uint8_t>& sessionId, const hidl_string& algorithm) {
- if (sessionId.size() == 0 || algorithm.size() == 0) {
- return Status::BAD_VALUE;
- }
- return Status::ERROR_DRM_CANNOT_HANDLE;
- }
-
- Return<Status> setMacAlgorithm(
- const hidl_vec<uint8_t>& sessionId, const hidl_string& algorithm) {
- if (sessionId.size() == 0 || algorithm.size() == 0) {
- return Status::BAD_VALUE;
- }
- return Status::ERROR_DRM_CANNOT_HANDLE;
- }
-
- Return<void> encrypt(
- const hidl_vec<uint8_t>& sessionId,
- const hidl_vec<uint8_t>& keyId,
- const hidl_vec<uint8_t>& input,
- const hidl_vec<uint8_t>& iv,
- encrypt_cb _hidl_cb) {
- if (sessionId.size() == 0 || keyId.size() == 0 ||
- input.size() == 0 || iv.size() == 0) {
- _hidl_cb(Status::BAD_VALUE, hidl_vec<uint8_t>());
- return Void();
- }
- _hidl_cb(Status::ERROR_DRM_CANNOT_HANDLE, hidl_vec<uint8_t>());
- return Void();
- }
-
- Return<void> decrypt(
- const hidl_vec<uint8_t>& sessionId,
- const hidl_vec<uint8_t>& keyId,
- const hidl_vec<uint8_t>& input,
- const hidl_vec<uint8_t>& iv,
- decrypt_cb _hidl_cb) {
- if (sessionId.size() == 0 || keyId.size() == 0 ||
- input.size() == 0 || iv.size() == 0) {
- _hidl_cb(Status::BAD_VALUE, hidl_vec<uint8_t>());
- return Void();
- }
- _hidl_cb(Status::ERROR_DRM_CANNOT_HANDLE, hidl_vec<uint8_t>());
- return Void();
- }
-
- Return<void> sign(
- const hidl_vec<uint8_t>& sessionId,
- const hidl_vec<uint8_t>& keyId,
- const hidl_vec<uint8_t>& message,
- sign_cb _hidl_cb) {
- if (sessionId.size() == 0 || keyId.size() == 0 ||
- message.size() == 0) {
- _hidl_cb(Status::BAD_VALUE, hidl_vec<uint8_t>());
- return Void();
- }
- _hidl_cb(Status::ERROR_DRM_CANNOT_HANDLE, hidl_vec<uint8_t>());
- return Void();
- }
-
- Return<void> verify(
- const hidl_vec<uint8_t>& sessionId,
- const hidl_vec<uint8_t>& keyId,
- const hidl_vec<uint8_t>& message,
- const hidl_vec<uint8_t>& signature,
- verify_cb _hidl_cb) {
-
- if (sessionId.size() == 0 || keyId.size() == 0 ||
- message.size() == 0 || signature.size() == 0) {
- _hidl_cb(Status::BAD_VALUE, false);
- return Void();
- }
- _hidl_cb(Status::ERROR_DRM_CANNOT_HANDLE, false);
- return Void();
- }
-
- Return<void> signRSA(
- const hidl_vec<uint8_t>& sessionId,
- const hidl_string& algorithm,
- const hidl_vec<uint8_t>& message,
- const hidl_vec<uint8_t>& wrappedKey,
- signRSA_cb _hidl_cb) {
- if (sessionId.size() == 0 || algorithm.size() == 0 ||
- message.size() == 0 || wrappedKey.size() == 0) {
- _hidl_cb(Status::BAD_VALUE, hidl_vec<uint8_t>());
- return Void();
- }
- _hidl_cb(Status::ERROR_DRM_CANNOT_HANDLE, hidl_vec<uint8_t>());
- return Void();
- }
-
- Return<void> setListener(const sp<IDrmPluginListener>& listener) {
- mListener = listener;
- mListenerV1_2 = IDrmPluginListener_V1_2::castFrom(listener);
- return Void();
- };
-
- Return<void> sendEvent(
- EventType eventType,
- const hidl_vec<uint8_t>& sessionId,
- const hidl_vec<uint8_t>& data) {
- if (mListenerV1_2 != NULL) {
- mListenerV1_2->sendEvent(eventType, sessionId, data);
- } else if (mListener != NULL) {
- mListener->sendEvent(eventType, sessionId, data);
- } else {
- ALOGE("Null event listener, event not sent");
- }
- return Void();
- }
-
- Return<void> sendExpirationUpdate(
- const hidl_vec<uint8_t>& sessionId,
- int64_t expiryTimeInMS) {
- if (mListenerV1_2 != NULL) {
- mListenerV1_2->sendExpirationUpdate(sessionId, expiryTimeInMS);
- } else if (mListener != NULL) {
- mListener->sendExpirationUpdate(sessionId, expiryTimeInMS);
- } else {
- ALOGE("Null event listener, event not sent");
- }
- return Void();
- }
-
- Return<void> sendKeysChange(
- const hidl_vec<uint8_t>& sessionId,
- const hidl_vec<KeyStatus>& keyStatusList, bool hasNewUsableKey) {
- if (mListenerV1_2 != NULL) {
- mListenerV1_2->sendKeysChange(sessionId, keyStatusList, hasNewUsableKey);
- } else if (mListener != NULL) {
- mListener->sendKeysChange(sessionId, keyStatusList, hasNewUsableKey);
- } else {
- ALOGE("Null event listener, event not sent");
- }
- return Void();
- }
-
- Return<void> sendKeysChange_1_2(
- const hidl_vec<uint8_t>& sessionId,
- const hidl_vec<KeyStatus_V1_2>& keyStatusList, bool hasNewUsableKey) {
- if (mListenerV1_2 != NULL) {
- mListenerV1_2->sendKeysChange_1_2(sessionId, keyStatusList, hasNewUsableKey);
- }
- return Void();
- }
-
- Return<void> sendSessionLostState(
- const hidl_vec<uint8_t>& sessionId) {
- if (mListenerV1_2 != NULL) {
- mListenerV1_2->sendSessionLostState(sessionId);
- }
- return Void();
- }
-
- Return<void> getSecureStops(getSecureStops_cb _hidl_cb);
-
- Return<void> getSecureStop(const hidl_vec<uint8_t>& secureStopId,
- getSecureStop_cb _hidl_cb);
-
- Return<Status> releaseSecureStop(const hidl_vec<uint8_t>& ssRelease);
-
- Return<Status> releaseAllSecureStops();
-
- Return<void> getSecureStopIds(getSecureStopIds_cb _hidl_cb);
-
- Return<Status> releaseSecureStops(const SecureStopRelease& ssRelease);
-
- Return<Status> removeSecureStop(const hidl_vec<uint8_t>& secureStopId);
-
- Return<Status> removeAllSecureStops();
-
-private:
- void initProperties();
- void installSecureStop(const hidl_vec<uint8_t>& sessionId);
- bool makeKeySetId(std::string* keySetId);
- void setPlayPolicy();
-
- Return<Status> setSecurityLevel(const hidl_vec<uint8_t>& sessionId,
- SecurityLevel level);
-
- Status_V1_2 getKeyRequestCommon(const hidl_vec<uint8_t>& scope,
- const hidl_vec<uint8_t>& initData,
- const hidl_string& mimeType,
- KeyType keyType,
- const hidl_vec<KeyValue>& optionalParameters,
- std::vector<uint8_t> *request,
- KeyRequestType_V1_1 *getKeyRequestType,
- std::string *defaultUrl);
-
- struct ClearkeySecureStop {
- std::vector<uint8_t> id;
- std::vector<uint8_t> data;
- };
-
- std::map<std::vector<uint8_t>, ClearkeySecureStop> mSecureStops;
- std::vector<KeyValue> mPlayPolicy;
- std::map<std::string, std::string> mStringProperties;
- std::map<std::string, std::vector<uint8_t> > mByteArrayProperties;
- std::map<std::string, std::vector<uint8_t> > mReleaseKeysMap;
- std::map<std::vector<uint8_t>, std::string> mPlaybackId;
- sp<IDrmPluginListener> mListener;
- sp<IDrmPluginListener_V1_2> mListenerV1_2;
- SessionLibrary *mSessionLibrary;
- int64_t mOpenSessionOkCount;
- int64_t mCloseSessionOkCount;
- int64_t mCloseSessionNotOpenedCount;
- uint32_t mNextSecureStopId;
- android::Mutex mPlayPolicyLock;
-
- // set by property to mock error scenarios
- Status_V1_2 mMockError;
-
- void processMockError(const sp<Session> &session) {
- session->setMockError(mMockError);
- mMockError = Status_V1_2::OK;
- }
-
- DeviceFiles mFileHandle;
- Mutex mSecureStopLock;
- Mutex mSecurityLevelLock;
- std::map<std::vector<uint8_t>, SecurityLevel> mSecurityLevel
- GUARDED_BY(mSecurityLevelLock);
-
- CLEARKEY_DISALLOW_COPY_AND_ASSIGN_AND_NEW(DrmPlugin);
-};
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
-
-#endif // CLEARKEY_DRM_PLUGIN_H_
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/InitDataParser.h b/drm/mediadrm/plugins/clearkey/hidl/include/InitDataParser.h
deleted file mode 100644
index 59338c9..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/include/InitDataParser.h
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef CLEARKEY_INIT_DATA_PARSER_H_
-#define CLEARKEY_INIT_DATA_PARSER_H_
-
-#include <android/hardware/drm/1.0/types.h>
-
-#include "ClearKeyTypes.h"
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-using ::android::hardware::drm::V1_0::Status;
-
-class InitDataParser {
-public:
- InitDataParser() {}
-
- Status parse(const std::vector<uint8_t>& initData,
- const std::string& mimeType,
- V1_0::KeyType keyType,
- std::vector<uint8_t>* licenseRequest);
-
-private:
- CLEARKEY_DISALLOW_COPY_AND_ASSIGN(InitDataParser);
-
- Status parsePssh(const std::vector<uint8_t>& initData,
- std::vector<const uint8_t*>* keyIds);
-
- std::string generateRequest(V1_0::KeyType keyType,
- const std::vector<const uint8_t*>& keyIds);
-};
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
-
-#endif // CLEARKEY_INIT_DATA_PARSER_H_
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/JsonWebKey.h b/drm/mediadrm/plugins/clearkey/hidl/include/JsonWebKey.h
deleted file mode 100644
index 40a2d74..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/include/JsonWebKey.h
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef CLEARKEY_JSON_WEB_KEY_H_
-#define CLEARKEY_JSON_WEB_KEY_H_
-
-#include "jsmn.h"
-#include "Utils.h"
-#include "ClearKeyTypes.h"
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-class JsonWebKey {
- public:
- JsonWebKey();
- virtual ~JsonWebKey();
-
- bool extractKeysFromJsonWebKeySet(const std::string& jsonWebKeySet,
- KeyMap* keys);
-
- private:
- std::vector<jsmntok_t> mJsmnTokens;
- std::vector<std::string> mJsonObjects;
- std::vector<std::string> mTokens;
-
- bool decodeBase64String(const std::string& encodedText,
- std::vector<uint8_t>* decodedText);
- bool findKey(const std::string& jsonObject, std::string* keyId,
- std::string* encodedKey);
- void findValue(const std::string &key, std::string* value);
- bool isJsonWebKeySet(const std::string& jsonObject) const;
- bool parseJsonObject(const std::string& jsonObject,
- std::vector<std::string>* tokens);
- bool parseJsonWebKeySet(const std::string& jsonWebKeySet,
- std::vector<std::string>* jsonObjects);
-
- CLEARKEY_DISALLOW_COPY_AND_ASSIGN(JsonWebKey);
-};
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
-
-#endif // CLEARKEY_JSON_WEB_KEY_H_
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/MemoryFileSystem.h b/drm/mediadrm/plugins/clearkey/hidl/include/MemoryFileSystem.h
deleted file mode 100644
index 1d98860..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/include/MemoryFileSystem.h
+++ /dev/null
@@ -1,68 +0,0 @@
-// Copyright 2018 Google LLC. All Rights Reserved. This file and proprietary
-// source code may only be used and distributed under the Widevine Master
-// License Agreement.
-//
-#ifndef CLEARKEY_MEMORY_FILE_SYSTEM_H_
-#define CLEARKEY_MEMORY_FILE_SYSTEM_H_
-
-#include <map>
-#include <string>
-
-#include "ClearKeyTypes.h"
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-// Using android file system requires clearkey plugin to update
-// its sepolicy. However, we are unable to update sepolicy for
-// older vendor partitions. To provide backward compatibility,
-// clearkey plugin implements a very simple file system in memory.
-// This memory file system does not support directory structure.
-class MemoryFileSystem {
- public:
- struct MemoryFile {
- std::string fileName; // excludes path
- std::string content;
- size_t fileSize;
-
- std::string getContent() const { return content; }
- size_t getFileSize() const { return fileSize; }
- void setContent(const std::string& file) { content = file; }
- void setFileName(const std::string& name) { fileName = name; }
- void setFileSize(size_t size) {
- content.resize(size); fileSize = size;
- }
- };
-
- MemoryFileSystem() {};
- virtual ~MemoryFileSystem() {};
-
- bool FileExists(const std::string& fileName) const;
- ssize_t GetFileSize(const std::string& fileName) const;
- std::vector<std::string> ListFiles() const;
- size_t Read(const std::string& pathName, std::string* buffer);
- bool RemoveAllFiles();
- bool RemoveFile(const std::string& fileName);
- size_t Write(const std::string& pathName, const MemoryFile& memoryFile);
-
- private:
- // License file name is made up of a unique keySetId, therefore,
- // the filename can be used as the key to locate licenses in the
- // memory file system.
- std::map<std::string, MemoryFile> mMemoryFileSystem;
-
- std::string GetFileName(const std::string& path);
-
- CLEARKEY_DISALLOW_COPY_AND_ASSIGN(MemoryFileSystem);
-};
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
-
-#endif // CLEARKEY_MEMORY_FILE_SYSTEM_H_
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/Session.h b/drm/mediadrm/plugins/clearkey/hidl/include/Session.h
deleted file mode 100644
index 05cb8c8..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/include/Session.h
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef CLEARKEY_SESSION_H_
-#define CLEARKEY_SESSION_H_
-
-#include <utils/Mutex.h>
-#include <utils/RefBase.h>
-#include <vector>
-
-#include "ClearKeyTypes.h"
-
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-namespace drm = ::android::hardware::drm;
-using drm::V1_0::Status;
-using drm::V1_0::SubSample;
-
-typedef drm::V1_2::Status Status_V1_2;
-
-class Session : public RefBase {
-public:
- explicit Session(const std::vector<uint8_t>& sessionId)
- : mSessionId(sessionId), mMockError(Status_V1_2::OK) {}
- virtual ~Session() {}
-
- const std::vector<uint8_t>& sessionId() const { return mSessionId; }
-
- Status getKeyRequest(
- const std::vector<uint8_t>& initDataType,
- const std::string& mimeType,
- V1_0::KeyType keyType,
- std::vector<uint8_t>* keyRequest) const;
-
- Status provideKeyResponse(
- const std::vector<uint8_t>& response);
-
- Status_V1_2 decrypt(
- const KeyId keyId, const Iv iv, const uint8_t* srcPtr,
- uint8_t* dstPtr, const std::vector<SubSample> subSamples,
- size_t* bytesDecryptedOut);
-
- void setMockError(Status_V1_2 error) {mMockError = error;}
- Status_V1_2 getMockError() const {return mMockError;}
-
-private:
- CLEARKEY_DISALLOW_COPY_AND_ASSIGN(Session);
-
- const std::vector<uint8_t> mSessionId;
- KeyMap mKeyMap;
- Mutex mMapLock;
-
- // For mocking error return scenarios
- Status_V1_2 mMockError;
-};
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
-
-#endif // CLEARKEY_SESSION_H_
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/SessionLibrary.h b/drm/mediadrm/plugins/clearkey/hidl/include/SessionLibrary.h
deleted file mode 100644
index 5e77438..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/include/SessionLibrary.h
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef CLEARKEY_SESSION_LIBRARY_H_
-#define CLEARKEY_SESSION_LIBRARY_H_
-
-#include <utils/RefBase.h>
-#include <utils/Mutex.h>
-
-#include "ClearKeyTypes.h"
-#include "Session.h"
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-using ::android::sp;
-
-class SessionLibrary : public RefBase {
-public:
- static SessionLibrary* get();
-
- sp<Session> createSession();
-
- sp<Session> findSession(
- const std::vector<uint8_t>& sessionId);
-
- void destroySession(const sp<Session>& session);
-
- size_t numOpenSessions() const { return mSessions.size(); }
-
-private:
- CLEARKEY_DISALLOW_COPY_AND_ASSIGN(SessionLibrary);
-
- SessionLibrary() : mNextSessionId(1) {}
-
- static Mutex sSingletonLock;
- static SessionLibrary* sSingleton;
-
- Mutex mSessionsLock;
- uint32_t mNextSessionId;
- std::map<std::vector<uint8_t>, sp<Session> > mSessions;
-};
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
-
-#endif // CLEARKEY_SESSION_LIBRARY_H_
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/TypeConvert.h b/drm/mediadrm/plugins/clearkey/hidl/include/TypeConvert.h
deleted file mode 100644
index 22eeccd..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/include/TypeConvert.h
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef CLEARKEY_ANDROID_HARDWARE_DRM_V1_4_TYPECONVERT
-#define CLEARKEY_ANDROID_HARDWARE_DRM_V1_4_TYPECONVERT
-
-#include <vector>
-
-#include <android/hardware/drm/1.0/types.h>
-
-namespace android {
-namespace hardware {
-namespace drm {
-namespace V1_4 {
-namespace clearkey {
-
-using ::android::hardware::hidl_array;
-using ::android::hardware::hidl_vec;
-
-template<typename T> const hidl_vec<T> toHidlVec(const std::vector<T> &vec) {
- hidl_vec<T> hVec;
- hVec.setToExternal(const_cast<T *>(vec.data()), vec.size());
- return hVec;
-}
-
-template<typename T> hidl_vec<T> toHidlVec(std::vector<T> &vec) {
- hidl_vec<T> hVec;
- hVec.setToExternal(vec.data(), vec.size());
- return hVec;
-}
-
-template<typename T> const std::vector<T> toVector(const hidl_vec<T> &hVec) {
- std::vector<T> vec;
- vec.assign(hVec.data(), hVec.data() + hVec.size());
- return *const_cast<const std::vector<T> *>(&vec);
-}
-
-template<typename T> std::vector<T> toVector(hidl_vec<T> &hVec) {
- std::vector<T> vec;
- vec.assign(hVec.data(), hVec.data() + hVec.size());
- return vec;
-}
-
-template<typename T, size_t SIZE> const std::vector<T> toVector(
- const hidl_array<T, SIZE> &hArray) {
- std::vector<T> vec;
- vec.assign(hArray.data(), hArray.data() + hArray.size());
- return vec;
-}
-
-template<typename T, size_t SIZE> std::vector<T> toVector(
- hidl_array<T, SIZE> &hArray) {
- std::vector<T> vec;
- vec.assign(hArray.data(), hArray.data() + hArray.size());
- return vec;
-}
-
-inline Status toStatus_1_0(Status_V1_2 status) {
- switch (status) {
- case Status_V1_2::ERROR_DRM_INSUFFICIENT_SECURITY:
- case Status_V1_2::ERROR_DRM_FRAME_TOO_LARGE:
- case Status_V1_2::ERROR_DRM_SESSION_LOST_STATE:
- return Status::ERROR_DRM_UNKNOWN;
- default:
- return static_cast<Status>(status);
- }
-}
-
-} // namespace clearkey
-} // namespace V1_4
-} // namespace drm
-} // namespace hardware
-} // namespace android
-
-#endif // CLEARKEY_ANDROID_HARDWARE_DRM_V1_4_TYPECONVERT
diff --git a/drm/mediadrm/plugins/clearkey/hidl/manifest_android.hardware.drm@1.2-service.clearkey.xml b/drm/mediadrm/plugins/clearkey/hidl/manifest_android.hardware.drm@1.2-service.clearkey.xml
deleted file mode 100644
index 16cba11..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/manifest_android.hardware.drm@1.2-service.clearkey.xml
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!-- Copyright (C) 2019 The Android Open Source Project
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<manifest version="1.0" type="device">
- <hal format="hidl">
- <name>android.hardware.drm</name>
- <transport>hwbinder</transport>
- <fqname>@1.2::ICryptoFactory/clearkey</fqname>
- <fqname>@1.2::IDrmFactory/clearkey</fqname>
- </hal>
-</manifest>
diff --git a/drm/mediadrm/plugins/clearkey/hidl/manifest_android.hardware.drm@1.3-service.clearkey.xml b/drm/mediadrm/plugins/clearkey/hidl/manifest_android.hardware.drm@1.3-service.clearkey.xml
deleted file mode 100644
index 229ee96..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/manifest_android.hardware.drm@1.3-service.clearkey.xml
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!-- Copyright (C) 2019 The Android Open Source Project
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<manifest version="1.0" type="device">
- <hal format="hidl">
- <name>android.hardware.drm</name>
- <transport>hwbinder</transport>
- <fqname>@1.3::ICryptoFactory/clearkey</fqname>
- <fqname>@1.3::IDrmFactory/clearkey</fqname>
- </hal>
-</manifest>
diff --git a/drm/mediadrm/plugins/clearkey/hidl/manifest_android.hardware.drm@1.4-service.clearkey.xml b/drm/mediadrm/plugins/clearkey/hidl/manifest_android.hardware.drm@1.4-service.clearkey.xml
deleted file mode 100644
index 31ddb5f..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/manifest_android.hardware.drm@1.4-service.clearkey.xml
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!-- Copyright (C) 2021 The Android Open Source Project
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<manifest version="1.0" type="device">
- <hal format="hidl">
- <name>android.hardware.drm</name>
- <transport>hwbinder</transport>
- <fqname>@1.4::ICryptoFactory/clearkey</fqname>
- <fqname>@1.4::IDrmFactory/clearkey</fqname>
- </hal>
-</manifest>
diff --git a/drm/mediadrm/plugins/clearkey/hidl/protos/DeviceFiles.proto b/drm/mediadrm/plugins/clearkey/hidl/protos/DeviceFiles.proto
deleted file mode 100644
index 3e11f0b..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/protos/DeviceFiles.proto
+++ /dev/null
@@ -1,47 +0,0 @@
-// ----------------------------------------------------------------------------
-// device_files.proto
-// ----------------------------------------------------------------------------
-// Copyright 2018 Google LLC. All Rights Reserved. This file and proprietary
-// source code may only be used and distributed under the Widevine Master
-// License Agreement.
-//
-// Description:
-// Format of various files stored at the device.
-//
-syntax = "proto2";
-
-package android.hardware.drm.V1_2.clearkey;
-
-// need this if we are using libprotobuf-cpp-2.3.0-lite
-option optimize_for = LITE_RUNTIME;
-
-message License {
- enum LicenseState {
- ACTIVE = 1;
- RELEASING = 2;
- }
-
- optional LicenseState state = 1;
- optional bytes license = 2;
-}
-
-message OfflineFile {
- enum FileType {
- LICENSE = 1;
- }
-
- enum FileVersion {
- VERSION_1 = 1;
- }
-
- optional FileType type = 1;
- optional FileVersion version = 2 [default = VERSION_1];
- optional License license = 3;
-
-}
-
-message HashedFile {
- optional bytes file = 1;
- // A raw (not hex-encoded) SHA256, taken over the bytes of 'file'.
- optional bytes hash = 2;
-}
diff --git a/drm/mediadrm/plugins/clearkey/hidl/service.cpp b/drm/mediadrm/plugins/clearkey/hidl/service.cpp
deleted file mode 100644
index d3d6905..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/service.cpp
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Copyright 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include <CryptoFactory.h>
-#include <DrmFactory.h>
-
-#include <android-base/logging.h>
-#include <binder/ProcessState.h>
-#include <hidl/HidlLazyUtils.h>
-#include <hidl/HidlTransportSupport.h>
-
-using ::android::hardware::configureRpcThreadpool;
-using ::android::hardware::joinRpcThreadpool;
-using ::android::sp;
-
-using android::hardware::drm::V1_4::ICryptoFactory;
-using android::hardware::drm::V1_4::IDrmFactory;
-using android::hardware::drm::V1_4::clearkey::CryptoFactory;
-using android::hardware::drm::V1_4::clearkey::DrmFactory;
-
-int main(int /* argc */, char** /* argv */) {
- sp<IDrmFactory> drmFactory = new DrmFactory;
- sp<ICryptoFactory> cryptoFactory = new CryptoFactory;
-
- configureRpcThreadpool(8, true /* callerWillJoin */);
-
- // Setup hwbinder service
- CHECK_EQ(drmFactory->registerAsService("clearkey"), android::NO_ERROR)
- << "Failed to register Clearkey Factory HAL";
- CHECK_EQ(cryptoFactory->registerAsService("clearkey"), android::NO_ERROR)
- << "Failed to register Clearkey Crypto HAL";
-
- joinRpcThreadpool();
-}
diff --git a/drm/mediadrm/plugins/clearkey/hidl/serviceLazy.cpp b/drm/mediadrm/plugins/clearkey/hidl/serviceLazy.cpp
deleted file mode 100644
index 358b5cc..0000000
--- a/drm/mediadrm/plugins/clearkey/hidl/serviceLazy.cpp
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright 2019 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#include <CryptoFactory.h>
-#include <DrmFactory.h>
-
-#include <android-base/logging.h>
-#include <binder/ProcessState.h>
-#include <hidl/HidlLazyUtils.h>
-#include <hidl/HidlTransportSupport.h>
-
-using ::android::hardware::configureRpcThreadpool;
-using ::android::hardware::joinRpcThreadpool;
-using ::android::sp;
-
-using android::hardware::drm::V1_4::ICryptoFactory;
-using android::hardware::drm::V1_4::IDrmFactory;
-using android::hardware::drm::V1_4::clearkey::CryptoFactory;
-using android::hardware::drm::V1_4::clearkey::DrmFactory;
-using android::hardware::LazyServiceRegistrar;
-
-int main(int /* argc */, char** /* argv */) {
- sp<IDrmFactory> drmFactory = new DrmFactory;
- sp<ICryptoFactory> cryptoFactory = new CryptoFactory;
-
- configureRpcThreadpool(8, true /* callerWillJoin */);
-
- // Setup hwbinder service
- auto serviceRegistrar = LazyServiceRegistrar::getInstance();
-
- // Setup hwbinder service
- CHECK_EQ(serviceRegistrar.registerService(drmFactory, "clearkey"), android::NO_ERROR)
- << "Failed to register Clearkey Factory HAL";
- CHECK_EQ(serviceRegistrar.registerService(cryptoFactory, "clearkey"), android::NO_ERROR)
- << "Failed to register Clearkey Crypto HAL";
-
- joinRpcThreadpool();
-}
diff --git a/include/drm/TEST_MAPPING b/include/drm/TEST_MAPPING
index 74fa50d..8595f12 100644
--- a/include/drm/TEST_MAPPING
+++ b/include/drm/TEST_MAPPING
@@ -1,16 +1,16 @@
{
"presubmit": [
{
- "name": "GtsMediaTestCases",
+ "name": "WvtsDeviceTestCases",
"options" : [
{
"include-annotation": "android.platform.test.annotations.Presubmit"
},
{
- "include-filter": "com.google.android.media.gts.WidevineGenericOpsTests"
+ "include-filter": "com.google.android.media.wvts.WidevineGenericOpsTests"
},
{
- "include-filter": "com.google.android.media.gts.WidevineH264PlaybackTests"
+ "include-filter": "com.google.android.media.wvts.WidevineH264PlaybackTests"
}
]
}
diff --git a/include/media/Interpolator.h b/include/media/Interpolator.h
index 71e7604..0ee8779 100644
--- a/include/media/Interpolator.h
+++ b/include/media/Interpolator.h
@@ -122,7 +122,7 @@
// monotonic computation.
// we use lazy computation here - if we precompute in
// a single pass, duplicate secant computations may be avoided.
- S sec, sec0, sec1;
+ S sec{}, sec0{}, sec1{}; // initialization not needed, used for clang-tidy
if (!catmullRom || monotonic) {
sec = (high->second - low->second) / interval;
sec0 = low2 != this->end()
@@ -269,7 +269,7 @@
// Note: We don't need to check size is within some bounds as
// the Parcel read will fail if size is incorrectly specified too large.
- float lastx;
+ float lastx = 0.f; // initialization not needed, used for clang tidy
for (uint32_t i = 0; i < size; ++i) {
float x = config.xy[i * 2];
float y = config.xy[i * 2 + 1];
diff --git a/include/media/MmapStreamInterface.h b/include/media/MmapStreamInterface.h
index 61de987..7725175 100644
--- a/include/media/MmapStreamInterface.h
+++ b/include/media/MmapStreamInterface.h
@@ -155,6 +155,18 @@
*/
virtual status_t standby() = 0;
+ /**
+ * Report when data being written to a playback buffer. Currently, this is used by mmap
+ * playback thread for sound dose computation.
+ *
+ * \param[in] buffer a pointer to the audio data
+ * \param[in] frameCount the number of frames written by the CPU
+ * \return OK in case of success.
+ * NO_INIT in case of initialization error
+ * INVALID_OPERATION in case of wrong thread type
+ */
+ virtual status_t reportData(const void* buffer, size_t frameCount) = 0;
+
protected:
// Subclasses can not be constructed directly by clients.
MmapStreamInterface() {}
diff --git a/media/TEST_MAPPING b/media/TEST_MAPPING
index 48a060b..5b1bd91 100644
--- a/media/TEST_MAPPING
+++ b/media/TEST_MAPPING
@@ -21,25 +21,28 @@
],
"presubmit": [
{
- "name": "GtsMediaTestCases",
+ "name": "WvtsDeviceTestCases",
"options" : [
{
"include-annotation": "android.platform.test.annotations.Presubmit"
},
{
- "include-filter": "com.google.android.media.gts.WidevineGenericOpsTests"
+ "include-filter": "com.google.android.media.wvts.WidevineGenericOpsTests"
},
{
- "include-filter": "com.google.android.media.gts.WidevineH264PlaybackTests"
+ "include-filter": "com.google.android.media.wvts.WidevineH264PlaybackTests"
}
],
"file_patterns": ["(?i)drm|crypto"]
- }
- ],
-
- "imports": [
+ },
{
- "path": "frameworks/av/drm/mediadrm/plugins"
+ "name": "CtsMediaDrmFrameworkTestCases",
+ "options" : [
+ {
+ "include-annotation": "android.platform.test.annotations.Presubmit"
+ }
+ ],
+ "file_patterns": ["(?i)drm|crypto"]
}
],
diff --git a/media/audioaidlconversion/AidlConversionCppNdk.cpp b/media/audioaidlconversion/AidlConversionCppNdk.cpp
index a3934dd..3b06245 100644
--- a/media/audioaidlconversion/AidlConversionCppNdk.cpp
+++ b/media/audioaidlconversion/AidlConversionCppNdk.cpp
@@ -14,6 +14,8 @@
* limitations under the License.
*/
+#include <stdio.h>
+
#include <algorithm>
#include <map>
#include <utility>
@@ -43,6 +45,9 @@
using ::android::BAD_VALUE;
using ::android::OK;
+using ::android::String16;
+using ::android::String8;
+using ::android::status_t;
using ::android::base::unexpected;
using media::audio::common::AudioChannelLayout;
@@ -567,7 +572,6 @@
GET_DEVICE_DESC_CONNECTION(BT_LE));
return pairs;
}();
-#undef GET_DEVICE_DESC_CONNECTION
return pairs;
}
@@ -995,55 +999,161 @@
}
}
+AudioDeviceAddress::Tag suggestDeviceAddressTag(const AudioDeviceDescription& description) {
+ using Tag = AudioDeviceAddress::Tag;
+ if (std::string connection = description.connection;
+ connection == GET_DEVICE_DESC_CONNECTION(BT_A2DP) ||
+ // Note: BT LE Broadcast uses a "group id".
+ (description.type != AudioDeviceType::OUT_BROADCAST &&
+ connection == GET_DEVICE_DESC_CONNECTION(BT_LE)) ||
+ connection == GET_DEVICE_DESC_CONNECTION(BT_SCO) ||
+ connection == GET_DEVICE_DESC_CONNECTION(WIRELESS)) {
+ return Tag::mac;
+ } else if (connection == GET_DEVICE_DESC_CONNECTION(IP_V4)) {
+ return Tag::ipv4;
+ } else if (connection == GET_DEVICE_DESC_CONNECTION(USB)) {
+ return Tag::alsa;
+ }
+ return Tag::id;
+}
+
::android::status_t aidl2legacy_AudioDevice_audio_device(
const AudioDevice& aidl,
audio_devices_t* legacyType, char* legacyAddress) {
- *legacyType = VALUE_OR_RETURN_STATUS(
- aidl2legacy_AudioDeviceDescription_audio_devices_t(aidl.type));
- return aidl2legacy_string(
- aidl.address.get<AudioDeviceAddress::id>(),
- legacyAddress, AUDIO_DEVICE_MAX_ADDRESS_LEN);
+ std::string stringAddress;
+ RETURN_STATUS_IF_ERROR(aidl2legacy_AudioDevice_audio_device(
+ aidl, legacyType, &stringAddress));
+ return aidl2legacy_string(stringAddress, legacyAddress, AUDIO_DEVICE_MAX_ADDRESS_LEN);
}
::android::status_t aidl2legacy_AudioDevice_audio_device(
const AudioDevice& aidl,
audio_devices_t* legacyType, String8* legacyAddress) {
- *legacyType = VALUE_OR_RETURN_STATUS(
- aidl2legacy_AudioDeviceDescription_audio_devices_t(aidl.type));
- *legacyAddress = VALUE_OR_RETURN_STATUS(aidl2legacy_string_view_String8(
- aidl.address.get<AudioDeviceAddress::id>()));
+ std::string stringAddress;
+ RETURN_STATUS_IF_ERROR(aidl2legacy_AudioDevice_audio_device(
+ aidl, legacyType, &stringAddress));
+ *legacyAddress = VALUE_OR_RETURN_STATUS(aidl2legacy_string_view_String8(stringAddress));
return OK;
}
::android::status_t aidl2legacy_AudioDevice_audio_device(
const AudioDevice& aidl,
audio_devices_t* legacyType, std::string* legacyAddress) {
+ using Tag = AudioDeviceAddress::Tag;
*legacyType = VALUE_OR_RETURN_STATUS(
aidl2legacy_AudioDeviceDescription_audio_devices_t(aidl.type));
- *legacyAddress = aidl.address.get<AudioDeviceAddress::id>();
+ char addressBuffer[AUDIO_DEVICE_MAX_ADDRESS_LEN]{};
+ // 'aidl.address' can be empty even when the connection type is not.
+ // This happens for device ports that act as "blueprints". In this case
+ // we pass an empty string using the 'id' variant.
+ switch (aidl.address.getTag()) {
+ case Tag::mac: {
+ const std::vector<uint8_t>& mac = aidl.address.get<AudioDeviceAddress::mac>();
+ if (mac.size() != 6) return BAD_VALUE;
+ snprintf(addressBuffer, AUDIO_DEVICE_MAX_ADDRESS_LEN, "%02X:%02X:%02X:%02X:%02X:%02X",
+ mac[0], mac[1], mac[2], mac[3], mac[4], mac[5]);
+ } break;
+ case Tag::ipv4: {
+ const std::vector<uint8_t>& ipv4 = aidl.address.get<AudioDeviceAddress::ipv4>();
+ if (ipv4.size() != 4) return BAD_VALUE;
+ snprintf(addressBuffer, AUDIO_DEVICE_MAX_ADDRESS_LEN, "%u.%u.%u.%u",
+ ipv4[0], ipv4[1], ipv4[2], ipv4[3]);
+ } break;
+ case Tag::ipv6: {
+ const std::vector<int32_t>& ipv6 = aidl.address.get<AudioDeviceAddress::ipv6>();
+ if (ipv6.size() != 8) return BAD_VALUE;
+ snprintf(addressBuffer, AUDIO_DEVICE_MAX_ADDRESS_LEN,
+ "%04X:%04X:%04X:%04X:%04X:%04X:%04X:%04X",
+ ipv6[0], ipv6[1], ipv6[2], ipv6[3], ipv6[4], ipv6[5], ipv6[6], ipv6[7]);
+ } break;
+ case Tag::alsa: {
+ const std::vector<int32_t>& alsa = aidl.address.get<AudioDeviceAddress::alsa>();
+ if (alsa.size() != 2) return BAD_VALUE;
+ snprintf(addressBuffer, AUDIO_DEVICE_MAX_ADDRESS_LEN, "card=%d;device=%d",
+ alsa[0], alsa[1]);
+ } break;
+ case Tag::id: {
+ RETURN_STATUS_IF_ERROR(aidl2legacy_string(aidl.address.get<AudioDeviceAddress::id>(),
+ addressBuffer, AUDIO_DEVICE_MAX_ADDRESS_LEN));
+ } break;
+ }
+ *legacyAddress = addressBuffer;
return OK;
}
ConversionResult<AudioDevice> legacy2aidl_audio_device_AudioDevice(
audio_devices_t legacyType, const char* legacyAddress) {
- AudioDevice aidl;
- aidl.type = VALUE_OR_RETURN(
- legacy2aidl_audio_devices_t_AudioDeviceDescription(legacyType));
- const std::string aidl_id = VALUE_OR_RETURN(
+ const std::string stringAddress = VALUE_OR_RETURN(
legacy2aidl_string(legacyAddress, AUDIO_DEVICE_MAX_ADDRESS_LEN));
- aidl.address = AudioDeviceAddress::make<AudioDeviceAddress::id>(aidl_id);
- return aidl;
+ return legacy2aidl_audio_device_AudioDevice(legacyType, stringAddress);
}
ConversionResult<AudioDevice>
legacy2aidl_audio_device_AudioDevice(
audio_devices_t legacyType, const String8& legacyAddress) {
+ const std::string stringAddress = VALUE_OR_RETURN(legacy2aidl_String8_string(legacyAddress));
+ return legacy2aidl_audio_device_AudioDevice(legacyType, stringAddress);
+}
+
+ConversionResult<AudioDevice>
+legacy2aidl_audio_device_AudioDevice(
+ audio_devices_t legacyType, const std::string& legacyAddress) {
+ using Tag = AudioDeviceAddress::Tag;
AudioDevice aidl;
aidl.type = VALUE_OR_RETURN(
legacy2aidl_audio_devices_t_AudioDeviceDescription(legacyType));
- const std::string aidl_id = VALUE_OR_RETURN(
- legacy2aidl_String8_string(legacyAddress));
- aidl.address = AudioDeviceAddress::make<AudioDeviceAddress::id>(aidl_id);
+ // 'legacyAddress' can be empty even when the connection type is not.
+ // This happens for device ports that act as "blueprints". In this case
+ // we pass an empty string using the 'id' variant.
+ if (!legacyAddress.empty()) {
+ switch (suggestDeviceAddressTag(aidl.type)) {
+ case Tag::mac: {
+ std::vector<uint8_t> mac(6);
+ int status = sscanf(legacyAddress.c_str(), "%hhX:%hhX:%hhX:%hhX:%hhX:%hhX",
+ &mac[0], &mac[1], &mac[2], &mac[3], &mac[4], &mac[5]);
+ if (status != mac.size()) {
+ ALOGE("%s: malformed MAC address: \"%s\"", __func__, legacyAddress.c_str());
+ return unexpected(BAD_VALUE);
+ }
+ aidl.address = AudioDeviceAddress::make<AudioDeviceAddress::mac>(std::move(mac));
+ } break;
+ case Tag::ipv4: {
+ std::vector<uint8_t> ipv4(4);
+ int status = sscanf(legacyAddress.c_str(), "%hhu.%hhu.%hhu.%hhu",
+ &ipv4[0], &ipv4[1], &ipv4[2], &ipv4[3]);
+ if (status != ipv4.size()) {
+ ALOGE("%s: malformed IPv4 address: \"%s\"", __func__, legacyAddress.c_str());
+ return unexpected(BAD_VALUE);
+ }
+ aidl.address = AudioDeviceAddress::make<AudioDeviceAddress::ipv4>(std::move(ipv4));
+ } break;
+ case Tag::ipv6: {
+ std::vector<int32_t> ipv6(8);
+ int status = sscanf(legacyAddress.c_str(), "%X:%X:%X:%X:%X:%X:%X:%X",
+ &ipv6[0], &ipv6[1], &ipv6[2], &ipv6[3], &ipv6[4], &ipv6[5], &ipv6[6],
+ &ipv6[7]);
+ if (status != ipv6.size()) {
+ ALOGE("%s: malformed IPv6 address: \"%s\"", __func__, legacyAddress.c_str());
+ return unexpected(BAD_VALUE);
+ }
+ aidl.address = AudioDeviceAddress::make<AudioDeviceAddress::ipv6>(std::move(ipv6));
+ } break;
+ case Tag::alsa: {
+ std::vector<int32_t> alsa(2);
+ int status = sscanf(legacyAddress.c_str(), "card=%d;device=%d", &alsa[0], &alsa[1]);
+ if (status != alsa.size()) {
+ ALOGE("%s: malformed ALSA address: \"%s\"", __func__, legacyAddress.c_str());
+ return unexpected(BAD_VALUE);
+ }
+ aidl.address = AudioDeviceAddress::make<AudioDeviceAddress::alsa>(std::move(alsa));
+ } break;
+ case Tag::id: {
+ aidl.address = AudioDeviceAddress::make<AudioDeviceAddress::id>(legacyAddress);
+ } break;
+ }
+ } else {
+ aidl.address = AudioDeviceAddress::make<AudioDeviceAddress::id>(legacyAddress);
+ }
return aidl;
}
@@ -2724,6 +2834,10 @@
return AUDIO_LATENCY_MODE_FREE;
case AudioLatencyMode::LOW:
return AUDIO_LATENCY_MODE_LOW;
+ case AudioLatencyMode::DYNAMIC_SPATIAL_AUDIO_SOFTWARE:
+ return AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_SOFTWARE;
+ case AudioLatencyMode::DYNAMIC_SPATIAL_AUDIO_HARDWARE:
+ return AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_HARDWARE;
}
return unexpected(BAD_VALUE);
}
@@ -2734,6 +2848,10 @@
return AudioLatencyMode::FREE;
case AUDIO_LATENCY_MODE_LOW:
return AudioLatencyMode::LOW;
+ case AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_SOFTWARE:
+ return AudioLatencyMode::DYNAMIC_SPATIAL_AUDIO_SOFTWARE;
+ case AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_HARDWARE:
+ return AudioLatencyMode::DYNAMIC_SPATIAL_AUDIO_HARDWARE;
}
return unexpected(BAD_VALUE);
}
@@ -3014,6 +3132,8 @@
} // namespace android
+#undef GET_DEVICE_DESC_CONNECTION
+
#if defined(BACKEND_NDK)
} // aidl
#endif
diff --git a/media/audioaidlconversion/AidlConversionEffect.cpp b/media/audioaidlconversion/AidlConversionEffect.cpp
index ec380e3..611cfab 100644
--- a/media/audioaidlconversion/AidlConversionEffect.cpp
+++ b/media/audioaidlconversion/AidlConversionEffect.cpp
@@ -48,6 +48,8 @@
using ::aidl::android::media::audio::common::AudioDeviceDescription;
using ::android::BAD_VALUE;
+using ::android::OK;
+using ::android::status_t;
using ::android::base::unexpected;
using ::android::effect::utils::EffectParamReader;
using ::android::effect::utils::EffectParamWriter;
@@ -407,50 +409,66 @@
}
/**
- * Copy the entire effect_param_t to DefaultExtension::bytes.
+ * Copy the parameter area of effect_param_t to DefaultExtension::bytes.
*/
-ConversionResult<Parameter> legacy2aidl_EffectParameterReader_ParameterExtension(
+ConversionResult<VendorExtension> legacy2aidl_EffectParameterReader_Param_VendorExtension(
EffectParamReader& param) {
- size_t len = param.getTotalSize();
- DefaultExtension ext;
- ext.bytes.resize(len);
- std::memcpy(ext.bytes.data(), ¶m.getEffectParam(), len);
+ size_t len = param.getParameterSize();
+ DefaultExtension defaultExt;
+ defaultExt.bytes.resize(len);
+ RETURN_IF_ERROR(param.readFromParameter(defaultExt.bytes.data(), len));
- VendorExtension effectParam;
- effectParam.extension.setParcelable(ext);
- return UNION_MAKE(Parameter, specific,
- UNION_MAKE(Parameter::Specific, vendorEffect, effectParam));
+ VendorExtension ext;
+ ext.extension.setParcelable(defaultExt);
+ return ext;
}
-ConversionResult<std::vector<uint8_t>> aidl2legacy_ParameterExtension_vector_uint8(
- const Parameter& param) {
- VendorExtension effectParam = VALUE_OR_RETURN(
- (::aidl::android::getParameterSpecific<Parameter, VendorExtension,
- Parameter::Specific::vendorEffect>(param)));
- std::optional<DefaultExtension> ext;
- if (STATUS_OK != effectParam.extension.getParcelable(&ext) || !ext.has_value()) {
+/**
+ * Copy the data area of effect_param_t to DefaultExtension::bytes.
+ */
+ConversionResult<VendorExtension> legacy2aidl_EffectParameterReader_Data_VendorExtension(
+ EffectParamReader& param) {
+ size_t len = param.getValueSize();
+ DefaultExtension defaultExt;
+ defaultExt.bytes.resize(len);
+ RETURN_IF_ERROR(param.readFromValue(defaultExt.bytes.data(), len));
+
+ VendorExtension ext;
+ ext.extension.setParcelable(defaultExt);
+ return ext;
+}
+
+/**
+ * Copy DefaultExtension::bytes to the data area of effect_param_t.
+ */
+ConversionResult<status_t> aidl2legacy_VendorExtension_EffectParameterWriter_Data(
+ EffectParamWriter& param, VendorExtension ext) {
+ std::optional<DefaultExtension> defaultExt;
+ RETURN_IF_ERROR(ext.extension.getParcelable(&defaultExt));
+ if (!defaultExt.has_value()) {
return unexpected(BAD_VALUE);
}
- return ext.value().bytes;
+
+ RETURN_IF_ERROR(param.writeToValue(defaultExt->bytes.data(), defaultExt->bytes.size()));
+
+ return OK;
+}
+
+ConversionResult<Parameter> legacy2aidl_EffectParameterReader_ParameterExtension(
+ EffectParamReader& param) {
+ VendorExtension ext =
+ VALUE_OR_RETURN(legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+ return UNION_MAKE(Parameter, specific, UNION_MAKE(Parameter::Specific, vendorEffect, ext));
}
ConversionResult<::android::status_t> aidl2legacy_ParameterExtension_EffectParameterWriter(
const ::aidl::android::hardware::audio::effect::Parameter& aidl,
EffectParamWriter& legacy) {
- const std::vector<uint8_t>& extBytes = VALUE_OR_RETURN_STATUS(
- ::aidl::android::aidl2legacy_ParameterExtension_vector_uint8(aidl));
- if (legacy.getTotalSize() < extBytes.size()) {
- legacy.setStatus(BAD_VALUE);
- return unexpected(BAD_VALUE);
- }
-
- // create a reader wrapper and read the content to legacy EffectParamWriter
- EffectParamReader reader(*(effect_param_t*)extBytes.data());
- if (STATUS_OK != legacy.writeToValue(reader.getValueAddress(), reader.getValueSize())) {
- legacy.setStatus(BAD_VALUE);
- return unexpected(BAD_VALUE);
- }
- return STATUS_OK;
+ VendorExtension ext = VALUE_OR_RETURN(
+ (::aidl::android::getParameterSpecific<Parameter, VendorExtension,
+ Parameter::Specific::vendorEffect>(aidl)));
+ return VALUE_OR_RETURN_STATUS(
+ aidl2legacy_VendorExtension_EffectParameterWriter_Data(legacy, ext));
}
} // namespace android
diff --git a/media/audioaidlconversion/AidlConversionNdk.cpp b/media/audioaidlconversion/AidlConversionNdk.cpp
index 7c63339..9b14a5e 100644
--- a/media/audioaidlconversion/AidlConversionNdk.cpp
+++ b/media/audioaidlconversion/AidlConversionNdk.cpp
@@ -14,14 +14,18 @@
* limitations under the License.
*/
+#include <sstream>
#include <utility>
+#include <system/audio.h>
#define LOG_TAG "AidlConversionNdk"
//#define LOG_NDEBUG 0
#include <utils/Log.h>
+#include <utils/Errors.h>
#include <media/AidlConversionCppNdk.h>
#include <media/AidlConversionNdk.h>
+#include <Utils.h>
////////////////////////////////////////////////////////////////////////////////////////////////////
// AIDL NDK backend to legacy audio data structure conversion utilities.
@@ -29,44 +33,166 @@
namespace aidl {
namespace android {
+using hardware::audio::common::PlaybackTrackMetadata;
+using hardware::audio::common::RecordTrackMetadata;
+using ::android::BAD_VALUE;
+using ::android::OK;
+
+namespace {
+
+::android::status_t combineString(
+ const std::vector<std::string>& v, char separator, std::string* result) {
+ std::ostringstream oss;
+ for (const auto& s : v) {
+ if (oss.tellp() > 0) {
+ oss << separator;
+ }
+ if (s.find(separator) == std::string::npos) {
+ oss << s;
+ } else {
+ ALOGE("%s: string \"%s\" contains separator character \"%c\"",
+ __func__, s.c_str(), separator);
+ return BAD_VALUE;
+ }
+ }
+ *result = oss.str();
+ return OK;
+}
+
+std::vector<std::string> splitString(const std::string& s, char separator) {
+ std::istringstream iss(s);
+ std::string t;
+ std::vector<std::string> result;
+ while (std::getline(iss, t, separator)) {
+ result.push_back(std::move(t));
+ }
+ return result;
+}
+
+std::vector<std::string> filterOutNonVendorTags(const std::vector<std::string>& tags) {
+ std::vector<std::string> result;
+ std::copy_if(tags.begin(), tags.end(), std::back_inserter(result),
+ ::aidl::android::hardware::audio::common::maybeVendorExtension);
+ return result;
+}
+
+} // namespace
+
// buffer_provider_t is not supported thus skipped
-ConversionResult<buffer_config_t> aidl2legacy_AudioConfigBase_buffer_config_t(
- const media::audio::common::AudioConfigBase& aidl, bool isInput) {
+ConversionResult<buffer_config_t> aidl2legacy_AudioConfig_buffer_config_t(
+ const media::audio::common::AudioConfig& aidl, bool isInput) {
buffer_config_t legacy;
- legacy.samplingRate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.sampleRate));
+ legacy.samplingRate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.base.sampleRate));
legacy.mask |= EFFECT_CONFIG_SMP_RATE;
legacy.channels = VALUE_OR_RETURN(
- aidl2legacy_AudioChannelLayout_audio_channel_mask_t(aidl.channelMask, isInput));
+ aidl2legacy_AudioChannelLayout_audio_channel_mask_t(aidl.base.channelMask, isInput));
legacy.mask |= EFFECT_CONFIG_CHANNELS;
- legacy.format = VALUE_OR_RETURN(aidl2legacy_AudioFormatDescription_audio_format_t(aidl.format));
+ legacy.format =
+ VALUE_OR_RETURN(aidl2legacy_AudioFormatDescription_audio_format_t(aidl.base.format));
legacy.mask |= EFFECT_CONFIG_FORMAT;
+ legacy.buffer.frameCount = aidl.frameCount;
// TODO: add accessMode and mask
return legacy;
}
-ConversionResult<media::audio::common::AudioConfigBase>
-legacy2aidl_buffer_config_t_AudioConfigBase(const buffer_config_t& legacy, bool isInput) {
- media::audio::common::AudioConfigBase aidl;
+ConversionResult<media::audio::common::AudioConfig>
+legacy2aidl_buffer_config_t_AudioConfig(const buffer_config_t& legacy, bool isInput) {
+ media::audio::common::AudioConfig aidl;
if (legacy.mask & EFFECT_CONFIG_SMP_RATE) {
- aidl.sampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.samplingRate));
+ aidl.base.sampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.samplingRate));
}
if (legacy.mask & EFFECT_CONFIG_CHANNELS) {
- aidl.channelMask = VALUE_OR_RETURN(legacy2aidl_audio_channel_mask_t_AudioChannelLayout(
+ aidl.base.channelMask = VALUE_OR_RETURN(legacy2aidl_audio_channel_mask_t_AudioChannelLayout(
static_cast<audio_channel_mask_t>(legacy.channels), isInput));
}
if (legacy.mask & EFFECT_CONFIG_FORMAT) {
- aidl.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormatDescription(
+ aidl.base.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormatDescription(
static_cast<audio_format_t>(legacy.format)));
}
+ aidl.frameCount = legacy.buffer.frameCount;
// TODO: add accessMode and mask
return aidl;
}
+::android::status_t aidl2legacy_AudioAttributesTags(
+ const std::vector<std::string>& aidl, char* legacy) {
+ std::string aidlTags;
+ RETURN_STATUS_IF_ERROR(combineString(
+ filterOutNonVendorTags(aidl), AUDIO_ATTRIBUTES_TAGS_SEPARATOR, &aidlTags));
+ RETURN_STATUS_IF_ERROR(aidl2legacy_string(aidlTags, legacy, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE));
+ return OK;
+}
+
+ConversionResult<std::vector<std::string>> legacy2aidl_AudioAttributesTags(const char* legacy) {
+ std::string legacyTags = VALUE_OR_RETURN(legacy2aidl_string(
+ legacy, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE));
+ return filterOutNonVendorTags(splitString(legacyTags, AUDIO_ATTRIBUTES_TAGS_SEPARATOR));
+}
+
+ConversionResult<playback_track_metadata_v7>
+aidl2legacy_PlaybackTrackMetadata_playback_track_metadata_v7(const PlaybackTrackMetadata& aidl) {
+ playback_track_metadata_v7 legacy;
+ legacy.base.usage = VALUE_OR_RETURN(aidl2legacy_AudioUsage_audio_usage_t(aidl.usage));
+ legacy.base.content_type = VALUE_OR_RETURN(aidl2legacy_AudioContentType_audio_content_type_t(
+ aidl.contentType));
+ legacy.base.gain = aidl.gain;
+ legacy.channel_mask = VALUE_OR_RETURN(aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
+ aidl.channelMask, false /*isInput*/));
+ RETURN_IF_ERROR(aidl2legacy_AudioAttributesTags(aidl.tags, legacy.tags));
+ return legacy;
+}
+
+ConversionResult<PlaybackTrackMetadata>
+legacy2aidl_playback_track_metadata_v7_PlaybackTrackMetadata(
+ const playback_track_metadata_v7& legacy) {
+ PlaybackTrackMetadata aidl;
+ aidl.usage = VALUE_OR_RETURN(legacy2aidl_audio_usage_t_AudioUsage(legacy.base.usage));
+ aidl.contentType = VALUE_OR_RETURN(legacy2aidl_audio_content_type_t_AudioContentType(
+ legacy.base.content_type));
+ aidl.gain = legacy.base.gain;
+ aidl.channelMask = VALUE_OR_RETURN(legacy2aidl_audio_channel_mask_t_AudioChannelLayout(
+ legacy.channel_mask, false /*isInput*/));
+ aidl.tags = VALUE_OR_RETURN(legacy2aidl_AudioAttributesTags(legacy.tags));
+ return aidl;
+}
+
+ConversionResult<record_track_metadata_v7>
+aidl2legacy_RecordTrackMetadata_record_track_metadata_v7(const RecordTrackMetadata& aidl) {
+ record_track_metadata_v7 legacy;
+ legacy.base.source = VALUE_OR_RETURN(aidl2legacy_AudioSource_audio_source_t(aidl.source));
+ legacy.base.gain = aidl.gain;
+ if (aidl.destinationDevice.has_value()) {
+ RETURN_IF_ERROR(aidl2legacy_AudioDevice_audio_device(aidl.destinationDevice.value(),
+ &legacy.base.dest_device, legacy.base.dest_device_address));
+ } else {
+ legacy.base.dest_device = AUDIO_DEVICE_NONE;
+ }
+ legacy.channel_mask = VALUE_OR_RETURN(aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
+ aidl.channelMask, true /*isInput*/));
+ RETURN_IF_ERROR(aidl2legacy_AudioAttributesTags(aidl.tags, legacy.tags));
+ return legacy;
+}
+
+ConversionResult<RecordTrackMetadata>
+legacy2aidl_record_track_metadata_v7_RecordTrackMetadata(const record_track_metadata_v7& legacy) {
+ RecordTrackMetadata aidl;
+ aidl.source = VALUE_OR_RETURN(legacy2aidl_audio_source_t_AudioSource(legacy.base.source));
+ aidl.gain = legacy.base.gain;
+ if (legacy.base.dest_device != AUDIO_DEVICE_NONE) {
+ aidl.destinationDevice = VALUE_OR_RETURN(legacy2aidl_audio_device_AudioDevice(
+ legacy.base.dest_device, legacy.base.dest_device_address));
+ }
+ aidl.channelMask = VALUE_OR_RETURN(legacy2aidl_audio_channel_mask_t_AudioChannelLayout(
+ legacy.channel_mask, true /*isInput*/));
+ aidl.tags = VALUE_OR_RETURN(legacy2aidl_AudioAttributesTags(legacy.tags));
+ return aidl;
+}
+
} // namespace android
} // aidl
diff --git a/media/audioaidlconversion/Android.bp b/media/audioaidlconversion/Android.bp
index c0024ef..bdb3a2c 100644
--- a/media/audioaidlconversion/Android.bp
+++ b/media/audioaidlconversion/Android.bp
@@ -135,12 +135,16 @@
],
defaults: [
"audio_aidl_conversion_common_default",
+ "latest_android_hardware_audio_common_ndk_shared",
"latest_android_media_audio_common_types_ndk_shared",
],
shared_libs: [
"libbinder_ndk",
"libbase",
],
+ static_libs: [
+ "libaudioaidlcommon",
+ ],
cflags: [
"-DBACKEND_NDK",
],
diff --git a/media/audioaidlconversion/TEST_MAPPING b/media/audioaidlconversion/TEST_MAPPING
new file mode 100644
index 0000000..a0c9759
--- /dev/null
+++ b/media/audioaidlconversion/TEST_MAPPING
@@ -0,0 +1,7 @@
+{
+ "presubmit": [
+ {
+ "name": "audio_aidl_ndk_conversion_tests"
+ }
+ ]
+}
diff --git a/media/audioaidlconversion/include/media/AidlConversionCppNdk-impl.h b/media/audioaidlconversion/include/media/AidlConversionCppNdk-impl.h
new file mode 100644
index 0000000..ec1f75c
--- /dev/null
+++ b/media/audioaidlconversion/include/media/AidlConversionCppNdk-impl.h
@@ -0,0 +1,438 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// WARNING: This file is intended for multiple inclusion.
+// Do not include directly, use 'AidlConversionCppNdk.h'.
+#if (defined(BACKEND_NDK_IMPL) && !defined(AUDIO_AIDL_CONVERSION_AIDL_CONVERSION_CPP_NDK_NDK)) || \
+ (!defined(BACKEND_NDK_IMPL) && !defined(AUDIO_AIDL_CONVERSION_AIDL_CONVERSION_CPP_NDK_CPP))
+#if defined(BACKEND_NDK_IMPL)
+#define AUDIO_AIDL_CONVERSION_AIDL_CONVERSION_CPP_NDK_NDK
+#else
+#define AUDIO_AIDL_CONVERSION_AIDL_CONVERSION_CPP_NDK_CPP
+#endif // BACKEND_NDK_IMPL
+
+#include <limits>
+#include <type_traits>
+
+/**
+ * Can handle conversion between AIDL (both CPP and NDK backend) and legacy type.
+ * Controlled by the cflags preprocessor in Android.bp.
+ */
+#if defined(BACKEND_NDK_IMPL)
+#define PREFIX(f) <aidl/f>
+#else
+#define PREFIX(f) <f>
+#endif
+
+#include PREFIX(android/media/audio/common/AudioChannelLayout.h)
+#include PREFIX(android/media/audio/common/AudioConfig.h)
+#include PREFIX(android/media/audio/common/AudioConfigBase.h)
+#include PREFIX(android/media/audio/common/AudioContentType.h)
+#include PREFIX(android/media/audio/common/AudioDeviceDescription.h)
+#include PREFIX(android/media/audio/common/AudioDualMonoMode.h)
+#include PREFIX(android/media/audio/common/AudioEncapsulationMetadataType.h)
+#include PREFIX(android/media/audio/common/AudioEncapsulationMode.h)
+#include PREFIX(android/media/audio/common/AudioEncapsulationType.h)
+#include PREFIX(android/media/audio/common/AudioFormatDescription.h)
+#include PREFIX(android/media/audio/common/AudioGain.h)
+#include PREFIX(android/media/audio/common/AudioGainConfig.h)
+#include PREFIX(android/media/audio/common/AudioGainMode.h)
+#include PREFIX(android/media/audio/common/AudioInputFlags.h)
+#include PREFIX(android/media/audio/common/AudioIoFlags.h)
+#include PREFIX(android/media/audio/common/AudioLatencyMode.h)
+#include PREFIX(android/media/audio/common/AudioMode.h)
+#include PREFIX(android/media/audio/common/AudioOffloadInfo.h)
+#include PREFIX(android/media/audio/common/AudioOutputFlags.h)
+#include PREFIX(android/media/audio/common/AudioPort.h)
+#include PREFIX(android/media/audio/common/AudioPortConfig.h)
+#include PREFIX(android/media/audio/common/AudioPortExt.h)
+#include PREFIX(android/media/audio/common/AudioPortMixExt.h)
+#include PREFIX(android/media/audio/common/AudioPlaybackRate.h)
+#include PREFIX(android/media/audio/common/AudioProfile.h)
+#include PREFIX(android/media/audio/common/AudioSource.h)
+#include PREFIX(android/media/audio/common/AudioStandard.h)
+#include PREFIX(android/media/audio/common/AudioUsage.h)
+#include PREFIX(android/media/audio/common/AudioUuid.h)
+#include PREFIX(android/media/audio/common/ExtraAudioDescriptor.h)
+#include PREFIX(android/media/audio/common/Int.h)
+#include PREFIX(android/media/audio/common/MicrophoneDynamicInfo.h)
+#include PREFIX(android/media/audio/common/MicrophoneInfo.h)
+#undef PREFIX
+
+#include <system/audio.h>
+#include <system/audio_effect.h>
+
+#if defined(BACKEND_NDK_IMPL)
+namespace aidl {
+#endif
+
+namespace android {
+
+// maxSize is the size of the C-string buffer (including the 0-terminator), NOT the max length of
+// the string.
+::android::status_t aidl2legacy_string(std::string_view aidl, char* dest, size_t maxSize);
+ConversionResult<std::string> legacy2aidl_string(const char* legacy, size_t maxSize);
+
+ConversionResult<audio_module_handle_t> aidl2legacy_int32_t_audio_module_handle_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_module_handle_t_int32_t(audio_module_handle_t legacy);
+
+ConversionResult<audio_io_handle_t> aidl2legacy_int32_t_audio_io_handle_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_io_handle_t_int32_t(audio_io_handle_t legacy);
+
+ConversionResult<audio_port_handle_t> aidl2legacy_int32_t_audio_port_handle_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_port_handle_t_int32_t(audio_port_handle_t legacy);
+
+ConversionResult<audio_patch_handle_t> aidl2legacy_int32_t_audio_patch_handle_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_patch_handle_t_int32_t(audio_patch_handle_t legacy);
+
+ConversionResult<audio_unique_id_t> aidl2legacy_int32_t_audio_unique_id_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_unique_id_t_int32_t(audio_unique_id_t legacy);
+
+ConversionResult<audio_hw_sync_t> aidl2legacy_int32_t_audio_hw_sync_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_hw_sync_t_int32_t(audio_hw_sync_t legacy);
+
+ConversionResult<unsigned int> aidl2legacy_int32_t_config_mask(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_config_mask_int32_t(unsigned int legacy);
+
+ConversionResult<pid_t> aidl2legacy_int32_t_pid_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_pid_t_int32_t(pid_t legacy);
+
+ConversionResult<uid_t> aidl2legacy_int32_t_uid_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_uid_t_int32_t(uid_t legacy);
+
+ConversionResult<::android::String8> aidl2legacy_string_view_String8(std::string_view aidl);
+ConversionResult<std::string> legacy2aidl_String8_string(const ::android::String8& legacy);
+
+ConversionResult<::android::String16> aidl2legacy_string_view_String16(std::string_view aidl);
+ConversionResult<std::string> legacy2aidl_String16_string(const ::android::String16& legacy);
+
+ConversionResult<std::optional<::android::String16>>
+aidl2legacy_optional_string_view_optional_String16(std::optional<std::string_view> aidl);
+ConversionResult<std::optional<std::string_view>>
+legacy2aidl_optional_String16_optional_string(std::optional<::android::String16> legacy);
+
+ConversionResult<audio_channel_mask_t> aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
+ const media::audio::common::AudioChannelLayout& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioChannelLayout>
+legacy2aidl_audio_channel_mask_t_AudioChannelLayout(audio_channel_mask_t legacy, bool isInput);
+
+audio_channel_mask_t aidl2legacy_AudioChannelLayout_layout_audio_channel_mask_t_bits(
+ int aidlLayout, bool isInput);
+int legacy2aidl_audio_channel_mask_t_bits_AudioChannelLayout_layout(
+ audio_channel_mask_t legacy, bool isInput);
+
+enum class AudioPortDirection {
+ INPUT, OUTPUT
+};
+ConversionResult<AudioPortDirection> portDirection(audio_port_role_t role, audio_port_type_t type);
+ConversionResult<audio_port_role_t> portRole(AudioPortDirection direction, audio_port_type_t type);
+
+ConversionResult<audio_config_t>
+aidl2legacy_AudioConfig_audio_config_t(const media::audio::common::AudioConfig& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioConfig>
+legacy2aidl_audio_config_t_AudioConfig(const audio_config_t& legacy, bool isInput);
+
+ConversionResult<audio_config_base_t>
+aidl2legacy_AudioConfigBase_audio_config_base_t(
+ const media::audio::common::AudioConfigBase& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioConfigBase>
+legacy2aidl_audio_config_base_t_AudioConfigBase(const audio_config_base_t& legacy, bool isInput);
+
+ConversionResult<audio_input_flags_t>
+aidl2legacy_AudioInputFlags_audio_input_flags_t(media::audio::common::AudioInputFlags aidl);
+ConversionResult<media::audio::common::AudioInputFlags>
+legacy2aidl_audio_input_flags_t_AudioInputFlags(audio_input_flags_t legacy);
+
+ConversionResult<audio_output_flags_t>
+aidl2legacy_AudioOutputFlags_audio_output_flags_t(media::audio::common::AudioOutputFlags aidl);
+ConversionResult<media::audio::common::AudioOutputFlags>
+legacy2aidl_audio_output_flags_t_AudioOutputFlags(audio_output_flags_t legacy);
+
+ConversionResult<audio_input_flags_t> aidl2legacy_int32_t_audio_input_flags_t_mask(
+ int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_input_flags_t_int32_t_mask(
+ audio_input_flags_t legacy);
+
+ConversionResult<audio_output_flags_t> aidl2legacy_int32_t_audio_output_flags_t_mask(
+ int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_output_flags_t_int32_t_mask(
+ audio_output_flags_t legacy);
+
+ConversionResult<audio_io_flags> aidl2legacy_AudioIoFlags_audio_io_flags(
+ const media::audio::common::AudioIoFlags& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioIoFlags> legacy2aidl_audio_io_flags_AudioIoFlags(
+ const audio_io_flags& legacy, bool isInput);
+
+ConversionResult<audio_session_t> aidl2legacy_int32_t_audio_session_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_session_t_int32_t(audio_session_t legacy);
+
+ConversionResult<audio_content_type_t>
+aidl2legacy_AudioContentType_audio_content_type_t(
+ media::audio::common::AudioContentType aidl);
+ConversionResult<media::audio::common::AudioContentType>
+legacy2aidl_audio_content_type_t_AudioContentType(audio_content_type_t legacy);
+
+ConversionResult<audio_devices_t> aidl2legacy_AudioDeviceDescription_audio_devices_t(
+ const media::audio::common::AudioDeviceDescription& aidl);
+ConversionResult<media::audio::common::AudioDeviceDescription>
+legacy2aidl_audio_devices_t_AudioDeviceDescription(audio_devices_t legacy);
+
+media::audio::common::AudioDeviceAddress::Tag suggestDeviceAddressTag(
+ const media::audio::common::AudioDeviceDescription& description);
+
+::android::status_t aidl2legacy_AudioDevice_audio_device(
+ const media::audio::common::AudioDevice& aidl, audio_devices_t* legacyType,
+ char* legacyAddress);
+::android::status_t aidl2legacy_AudioDevice_audio_device(
+ const media::audio::common::AudioDevice& aidl, audio_devices_t* legacyType,
+ ::android::String8* legacyAddress);
+::android::status_t aidl2legacy_AudioDevice_audio_device(
+ const media::audio::common::AudioDevice& aidl, audio_devices_t* legacyType,
+ std::string* legacyAddress);
+
+ConversionResult<media::audio::common::AudioDevice> legacy2aidl_audio_device_AudioDevice(
+ audio_devices_t legacyType, const char* legacyAddress);
+ConversionResult<media::audio::common::AudioDevice> legacy2aidl_audio_device_AudioDevice(
+ audio_devices_t legacyType, const ::android::String8& legacyAddress);
+ConversionResult<media::audio::common::AudioDevice> legacy2aidl_audio_device_AudioDevice(
+ audio_devices_t legacyType, const std::string& legacyAddress);
+
+ConversionResult<audio_extra_audio_descriptor>
+aidl2legacy_ExtraAudioDescriptor_audio_extra_audio_descriptor(
+ const media::audio::common::ExtraAudioDescriptor& aidl);
+
+ConversionResult<media::audio::common::ExtraAudioDescriptor>
+legacy2aidl_audio_extra_audio_descriptor_ExtraAudioDescriptor(
+ const audio_extra_audio_descriptor& legacy);
+
+ConversionResult<audio_encapsulation_metadata_type_t>
+aidl2legacy_AudioEncapsulationMetadataType_audio_encapsulation_metadata_type_t(
+ media::audio::common::AudioEncapsulationMetadataType aidl);
+ConversionResult<media::audio::common::AudioEncapsulationMetadataType>
+legacy2aidl_audio_encapsulation_metadata_type_t_AudioEncapsulationMetadataType(
+ audio_encapsulation_metadata_type_t legacy);
+
+ConversionResult<uint32_t> aidl2legacy_AudioEncapsulationMetadataType_mask(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_AudioEncapsulationMetadataType_mask(uint32_t legacy);
+
+ConversionResult<audio_encapsulation_mode_t>
+aidl2legacy_AudioEncapsulationMode_audio_encapsulation_mode_t(
+ media::audio::common::AudioEncapsulationMode aidl);
+ConversionResult<media::audio::common::AudioEncapsulationMode>
+legacy2aidl_audio_encapsulation_mode_t_AudioEncapsulationMode(audio_encapsulation_mode_t legacy);
+
+ConversionResult<uint32_t> aidl2legacy_AudioEncapsulationMode_mask(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_AudioEncapsulationMode_mask(uint32_t legacy);
+
+ConversionResult<audio_encapsulation_type_t>
+aidl2legacy_AudioEncapsulationType_audio_encapsulation_type_t(
+ const media::audio::common::AudioEncapsulationType& aidl);
+ConversionResult<media::audio::common::AudioEncapsulationType>
+legacy2aidl_audio_encapsulation_type_t_AudioEncapsulationType(
+ const audio_encapsulation_type_t& legacy);
+
+ConversionResult<audio_format_t> aidl2legacy_AudioFormatDescription_audio_format_t(
+ const media::audio::common::AudioFormatDescription& aidl);
+ConversionResult<media::audio::common::AudioFormatDescription>
+legacy2aidl_audio_format_t_AudioFormatDescription(audio_format_t legacy);
+
+ConversionResult<audio_gain_mode_t>
+aidl2legacy_AudioGainMode_audio_gain_mode_t(media::audio::common::AudioGainMode aidl);
+ConversionResult<media::audio::common::AudioGainMode>
+legacy2aidl_audio_gain_mode_t_AudioGainMode(audio_gain_mode_t legacy);
+
+ConversionResult<audio_gain_mode_t> aidl2legacy_int32_t_audio_gain_mode_t_mask(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_gain_mode_t_int32_t_mask(audio_gain_mode_t legacy);
+
+ConversionResult<audio_gain_config> aidl2legacy_AudioGainConfig_audio_gain_config(
+ const media::audio::common::AudioGainConfig& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioGainConfig>
+legacy2aidl_audio_gain_config_AudioGainConfig(const audio_gain_config& legacy, bool isInput);
+
+ConversionResult<audio_gain>
+aidl2legacy_AudioGain_audio_gain(const media::audio::common::AudioGain& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioGain>
+legacy2aidl_audio_gain_AudioGain(const audio_gain& legacy, bool isInput);
+
+ConversionResult<audio_input_flags_t>
+aidl2legacy_AudioInputFlags_audio_input_flags_t(media::audio::common::AudioInputFlags aidl);
+ConversionResult<media::audio::common::AudioInputFlags>
+legacy2aidl_audio_input_flags_t_AudioInputFlags(audio_input_flags_t legacy);
+
+ConversionResult<audio_mode_t>
+aidl2legacy_AudioMode_audio_mode_t(media::audio::common::AudioMode aidl);
+ConversionResult<media::audio::common::AudioMode>
+legacy2aidl_audio_mode_t_AudioMode(audio_mode_t legacy);
+
+ConversionResult<audio_offload_info_t>
+aidl2legacy_AudioOffloadInfo_audio_offload_info_t(
+ const media::audio::common::AudioOffloadInfo& aidl);
+ConversionResult<media::audio::common::AudioOffloadInfo>
+legacy2aidl_audio_offload_info_t_AudioOffloadInfo(const audio_offload_info_t& legacy);
+
+ConversionResult<audio_output_flags_t>
+aidl2legacy_AudioOutputFlags_audio_output_flags_t(media::audio::common::AudioOutputFlags aidl);
+ConversionResult<media::audio::common::AudioOutputFlags>
+legacy2aidl_audio_output_flags_t_AudioOutputFlags(audio_output_flags_t legacy);
+
+// This type is unnamed in the original definition, thus we name it here.
+using audio_port_config_mix_ext_usecase = decltype(audio_port_config_mix_ext::usecase);
+ConversionResult<audio_port_config_mix_ext_usecase>
+aidl2legacy_AudioPortMixExtUseCase_audio_port_config_mix_ext_usecase(
+ const media::audio::common::AudioPortMixExtUseCase& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioPortMixExtUseCase>
+legacy2aidl_audio_port_config_mix_ext_usecase_AudioPortMixExtUseCase(
+ const audio_port_config_mix_ext_usecase& legacy, bool isInput);
+
+ConversionResult<audio_port_config_device_ext>
+aidl2legacy_AudioPortDeviceExt_audio_port_config_device_ext(
+ const media::audio::common::AudioPortDeviceExt& aidl);
+ConversionResult<media::audio::common::AudioPortDeviceExt>
+ legacy2aidl_audio_port_config_device_ext_AudioPortDeviceExt(
+ const audio_port_config_device_ext& legacy);
+
+::android::status_t aidl2legacy_AudioPortConfig_audio_port_config(
+ const media::audio::common::AudioPortConfig& aidl, bool isInput,
+ audio_port_config* legacy, int32_t* portId);
+ConversionResult<media::audio::common::AudioPortConfig>
+legacy2aidl_audio_port_config_AudioPortConfig(
+ const audio_port_config& legacy, bool isInput, int32_t portId);
+
+ConversionResult<audio_port_mix_ext> aidl2legacy_AudioPortMixExt_audio_port_mix_ext(
+ const media::audio::common::AudioPortMixExt& aidl);
+ConversionResult<media::audio::common::AudioPortMixExt>
+legacy2aidl_audio_port_mix_ext_AudioPortMixExt(
+ const audio_port_mix_ext& legacy);
+
+ConversionResult<audio_port_device_ext>
+aidl2legacy_AudioPortDeviceExt_audio_port_device_ext(
+ const media::audio::common::AudioPortDeviceExt& aidl);
+ConversionResult<media::audio::common::AudioPortDeviceExt>
+legacy2aidl_audio_port_device_ext_AudioPortDeviceExt(
+ const audio_port_device_ext& legacy);
+
+ConversionResult<audio_port_v7>
+aidl2legacy_AudioPort_audio_port_v7(
+ const media::audio::common::AudioPort& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioPort>
+legacy2aidl_audio_port_v7_AudioPort(const audio_port_v7& legacy, bool isInput);
+
+ConversionResult<audio_profile> aidl2legacy_AudioProfile_audio_profile(
+ const media::audio::common::AudioProfile& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioProfile> legacy2aidl_audio_profile_AudioProfile(
+ const audio_profile& legacy, bool isInput);
+
+ConversionResult<audio_standard_t> aidl2legacy_AudioStandard_audio_standard_t(
+ media::audio::common::AudioStandard aidl);
+ConversionResult<media::audio::common::AudioStandard> legacy2aidl_audio_standard_t_AudioStandard(
+ audio_standard_t legacy);
+
+ConversionResult<audio_source_t> aidl2legacy_AudioSource_audio_source_t(
+ media::audio::common::AudioSource aidl);
+ConversionResult<media::audio::common::AudioSource> legacy2aidl_audio_source_t_AudioSource(
+ audio_source_t legacy);
+
+ConversionResult<audio_usage_t> aidl2legacy_AudioUsage_audio_usage_t(
+ media::audio::common::AudioUsage aidl);
+ConversionResult<media::audio::common::AudioUsage> legacy2aidl_audio_usage_t_AudioUsage(
+ audio_usage_t legacy);
+
+ConversionResult<audio_uuid_t> aidl2legacy_AudioUuid_audio_uuid_t(
+ const media::audio::common::AudioUuid &aidl);
+ConversionResult<media::audio::common::AudioUuid> legacy2aidl_audio_uuid_t_AudioUuid(
+ const audio_uuid_t& legacy);
+
+ConversionResult<audio_dual_mono_mode_t>
+aidl2legacy_AudioDualMonoMode_audio_dual_mono_mode_t(media::audio::common::AudioDualMonoMode aidl);
+ConversionResult<media::audio::common::AudioDualMonoMode>
+legacy2aidl_audio_dual_mono_mode_t_AudioDualMonoMode(audio_dual_mono_mode_t legacy);
+
+ConversionResult<audio_timestretch_fallback_mode_t>
+aidl2legacy_TimestretchFallbackMode_audio_timestretch_fallback_mode_t(
+ media::audio::common::AudioPlaybackRate::TimestretchFallbackMode aidl);
+ConversionResult<media::audio::common::AudioPlaybackRate::TimestretchFallbackMode>
+legacy2aidl_audio_timestretch_fallback_mode_t_TimestretchFallbackMode(
+ audio_timestretch_fallback_mode_t legacy);
+
+ConversionResult<audio_timestretch_stretch_mode_t>
+aidl2legacy_TimestretchMode_audio_timestretch_stretch_mode_t(
+ media::audio::common::AudioPlaybackRate::TimestretchMode aidl);
+ConversionResult<media::audio::common::AudioPlaybackRate::TimestretchMode>
+legacy2aidl_audio_timestretch_stretch_mode_t_TimestretchMode(
+ audio_timestretch_stretch_mode_t legacy);
+
+ConversionResult<audio_playback_rate_t>
+aidl2legacy_AudioPlaybackRate_audio_playback_rate_t(
+ const media::audio::common::AudioPlaybackRate& aidl);
+ConversionResult<media::audio::common::AudioPlaybackRate>
+legacy2aidl_audio_playback_rate_t_AudioPlaybackRate(const audio_playback_rate_t& legacy);
+
+ConversionResult<audio_latency_mode_t>
+aidl2legacy_AudioLatencyMode_audio_latency_mode_t(media::audio::common::AudioLatencyMode aidl);
+ConversionResult<media::audio::common::AudioLatencyMode>
+legacy2aidl_audio_latency_mode_t_AudioLatencyMode(audio_latency_mode_t legacy);
+
+ConversionResult<audio_microphone_location_t>
+aidl2legacy_MicrophoneInfoLocation_audio_microphone_location_t(
+ media::audio::common::MicrophoneInfo::Location aidl);
+ConversionResult<media::audio::common::MicrophoneInfo::Location>
+legacy2aidl_audio_microphone_location_t_MicrophoneInfoLocation(audio_microphone_location_t legacy);
+
+ConversionResult<audio_microphone_group_t> aidl2legacy_int32_t_audio_microphone_group_t(
+ int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_microphone_group_t_int32_t(
+ audio_microphone_group_t legacy);
+
+ConversionResult<audio_microphone_directionality_t>
+aidl2legacy_MicrophoneInfoDirectionality_audio_microphone_directionality_t(
+ media::audio::common::MicrophoneInfo::Directionality aidl);
+ConversionResult<media::audio::common::MicrophoneInfo::Directionality>
+legacy2aidl_audio_microphone_directionality_t_MicrophoneInfoDirectionality(
+ audio_microphone_directionality_t legacy);
+
+ConversionResult<audio_microphone_coordinate>
+aidl2legacy_MicrophoneInfoCoordinate_audio_microphone_coordinate(
+ const media::audio::common::MicrophoneInfo::Coordinate& aidl);
+ConversionResult<media::audio::common::MicrophoneInfo::Coordinate>
+legacy2aidl_audio_microphone_coordinate_MicrophoneInfoCoordinate(
+ const audio_microphone_coordinate& legacy);
+
+ConversionResult<audio_microphone_channel_mapping_t>
+aidl2legacy_MicrophoneDynamicInfoChannelMapping_audio_microphone_channel_mapping_t(
+ media::audio::common::MicrophoneDynamicInfo::ChannelMapping aidl);
+ConversionResult<media::audio::common::MicrophoneDynamicInfo::ChannelMapping>
+legacy2aidl_audio_microphone_channel_mapping_t_MicrophoneDynamicInfoChannelMapping(
+ audio_microphone_channel_mapping_t legacy);
+
+ConversionResult<audio_microphone_characteristic_t>
+aidl2legacy_MicrophoneInfos_audio_microphone_characteristic_t(
+ const media::audio::common::MicrophoneInfo& aidlInfo,
+ const media::audio::common::MicrophoneDynamicInfo& aidlDynamic);
+::android::status_t
+legacy2aidl_audio_microphone_characteristic_t_MicrophoneInfos(
+ const audio_microphone_characteristic_t& legacy,
+ media::audio::common::MicrophoneInfo* aidlInfo,
+ media::audio::common::MicrophoneDynamicInfo* aidlDynamic);
+
+} // namespace android
+
+#if defined(BACKEND_NDK_IMPL)
+} // aidl
+#endif
+
+// (defined(BACKEND_NDK_IMPL) && !defined(AUDIO_AIDL_CONVERSION_AIDL_CONVERSION_CPP_NDK_NDK)) || \
+// (!defined(BACKEND_NDK_IMPL) && !defined(AUDIO_AIDL_CONVERSION_AIDL_CONVERSION_CPP_NDK_CPP))
+#endif
diff --git a/media/audioaidlconversion/include/media/AidlConversionCppNdk.h b/media/audioaidlconversion/include/media/AidlConversionCppNdk.h
index e1daf31..ea168a4 100644
--- a/media/audioaidlconversion/include/media/AidlConversionCppNdk.h
+++ b/media/audioaidlconversion/include/media/AidlConversionCppNdk.h
@@ -16,412 +16,19 @@
#pragma once
-#include <limits>
-#include <type_traits>
-#include <system/audio.h>
-
-/**
- * Can handle conversion between AIDL (both CPP and NDK backend) and legacy type.
- * Controlled by the cflags preprocessor in Android.bp.
- */
-#if defined(BACKEND_NDK)
-#define PREFIX(f) <aidl/f>
-#else
-#define PREFIX(f) <f>
-#endif
-
-#include PREFIX(android/media/audio/common/AudioChannelLayout.h)
-#include PREFIX(android/media/audio/common/AudioConfig.h)
-#include PREFIX(android/media/audio/common/AudioConfigBase.h)
-#include PREFIX(android/media/audio/common/AudioContentType.h)
-#include PREFIX(android/media/audio/common/AudioDeviceDescription.h)
-#include PREFIX(android/media/audio/common/AudioDualMonoMode.h)
-#include PREFIX(android/media/audio/common/AudioEncapsulationMetadataType.h)
-#include PREFIX(android/media/audio/common/AudioEncapsulationMode.h)
-#include PREFIX(android/media/audio/common/AudioEncapsulationType.h)
-#include PREFIX(android/media/audio/common/AudioFormatDescription.h)
-#include PREFIX(android/media/audio/common/AudioGain.h)
-#include PREFIX(android/media/audio/common/AudioGainConfig.h)
-#include PREFIX(android/media/audio/common/AudioGainMode.h)
-#include PREFIX(android/media/audio/common/AudioInputFlags.h)
-#include PREFIX(android/media/audio/common/AudioIoFlags.h)
-#include PREFIX(android/media/audio/common/AudioLatencyMode.h)
-#include PREFIX(android/media/audio/common/AudioMode.h)
-#include PREFIX(android/media/audio/common/AudioOffloadInfo.h)
-#include PREFIX(android/media/audio/common/AudioOutputFlags.h)
-#include PREFIX(android/media/audio/common/AudioPort.h)
-#include PREFIX(android/media/audio/common/AudioPortConfig.h)
-#include PREFIX(android/media/audio/common/AudioPortExt.h)
-#include PREFIX(android/media/audio/common/AudioPortMixExt.h)
-#include PREFIX(android/media/audio/common/AudioPlaybackRate.h)
-#include PREFIX(android/media/audio/common/AudioProfile.h)
-#include PREFIX(android/media/audio/common/AudioSource.h)
-#include PREFIX(android/media/audio/common/AudioStandard.h)
-#include PREFIX(android/media/audio/common/AudioUsage.h)
-#include PREFIX(android/media/audio/common/AudioUuid.h)
-#include PREFIX(android/media/audio/common/ExtraAudioDescriptor.h)
-#include PREFIX(android/media/audio/common/Int.h)
-#include PREFIX(android/media/audio/common/MicrophoneDynamicInfo.h)
-#include PREFIX(android/media/audio/common/MicrophoneInfo.h)
-#undef PREFIX
-
+// Since conversion functions use ConversionResult, pull it in here.
#include <media/AidlConversionUtil.h>
-#include <system/audio.h>
-#include <system/audio_effect.h>
-using ::android::String16;
-using ::android::String8;
-using ::android::status_t;
+// Include 'AidlConversionCppNdk.h' once if 'BACKEND_NDK' is defined,
+// or no 'BACKEND_*' is defined (C++ backend). Include twice if
+// 'BACKEND_CPP_NDK' is defined: once with 'BACKEND_NDK_IMPL', once w/o defines.
-#if defined(BACKEND_NDK)
-namespace aidl {
+#if defined(BACKEND_CPP_NDK) || defined(BACKEND_NDK)
+#define BACKEND_NDK_IMPL
+#include <media/AidlConversionCppNdk-impl.h>
+#undef BACKEND_NDK_IMPL
#endif
-namespace android {
-
-// maxSize is the size of the C-string buffer (including the 0-terminator), NOT the max length of
-// the string.
-status_t aidl2legacy_string(std::string_view aidl, char* dest, size_t maxSize);
-ConversionResult<std::string> legacy2aidl_string(const char* legacy, size_t maxSize);
-
-ConversionResult<audio_module_handle_t> aidl2legacy_int32_t_audio_module_handle_t(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_audio_module_handle_t_int32_t(audio_module_handle_t legacy);
-
-ConversionResult<audio_io_handle_t> aidl2legacy_int32_t_audio_io_handle_t(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_audio_io_handle_t_int32_t(audio_io_handle_t legacy);
-
-ConversionResult<audio_port_handle_t> aidl2legacy_int32_t_audio_port_handle_t(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_audio_port_handle_t_int32_t(audio_port_handle_t legacy);
-
-ConversionResult<audio_patch_handle_t> aidl2legacy_int32_t_audio_patch_handle_t(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_audio_patch_handle_t_int32_t(audio_patch_handle_t legacy);
-
-ConversionResult<audio_unique_id_t> aidl2legacy_int32_t_audio_unique_id_t(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_audio_unique_id_t_int32_t(audio_unique_id_t legacy);
-
-ConversionResult<audio_hw_sync_t> aidl2legacy_int32_t_audio_hw_sync_t(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_audio_hw_sync_t_int32_t(audio_hw_sync_t legacy);
-
-ConversionResult<unsigned int> aidl2legacy_int32_t_config_mask(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_config_mask_int32_t(unsigned int legacy);
-
-ConversionResult<pid_t> aidl2legacy_int32_t_pid_t(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_pid_t_int32_t(pid_t legacy);
-
-ConversionResult<uid_t> aidl2legacy_int32_t_uid_t(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_uid_t_int32_t(uid_t legacy);
-
-ConversionResult<String8> aidl2legacy_string_view_String8(std::string_view aidl);
-ConversionResult<std::string> legacy2aidl_String8_string(const String8& legacy);
-
-ConversionResult<String16> aidl2legacy_string_view_String16(std::string_view aidl);
-ConversionResult<std::string> legacy2aidl_String16_string(const String16& legacy);
-
-ConversionResult<std::optional<String16>>
-aidl2legacy_optional_string_view_optional_String16(std::optional<std::string_view> aidl);
-ConversionResult<std::optional<std::string_view>>
-legacy2aidl_optional_String16_optional_string(std::optional<String16> legacy);
-
-ConversionResult<audio_channel_mask_t> aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
- const media::audio::common::AudioChannelLayout& aidl, bool isInput);
-ConversionResult<media::audio::common::AudioChannelLayout>
-legacy2aidl_audio_channel_mask_t_AudioChannelLayout(audio_channel_mask_t legacy, bool isInput);
-
-audio_channel_mask_t aidl2legacy_AudioChannelLayout_layout_audio_channel_mask_t_bits(
- int aidlLayout, bool isInput);
-int legacy2aidl_audio_channel_mask_t_bits_AudioChannelLayout_layout(
- audio_channel_mask_t legacy, bool isInput);
-
-enum class AudioPortDirection {
- INPUT, OUTPUT
-};
-ConversionResult<AudioPortDirection> portDirection(audio_port_role_t role, audio_port_type_t type);
-ConversionResult<audio_port_role_t> portRole(AudioPortDirection direction, audio_port_type_t type);
-
-ConversionResult<audio_config_t>
-aidl2legacy_AudioConfig_audio_config_t(const media::audio::common::AudioConfig& aidl, bool isInput);
-ConversionResult<media::audio::common::AudioConfig>
-legacy2aidl_audio_config_t_AudioConfig(const audio_config_t& legacy, bool isInput);
-
-ConversionResult<audio_config_base_t>
-aidl2legacy_AudioConfigBase_audio_config_base_t(
- const media::audio::common::AudioConfigBase& aidl, bool isInput);
-ConversionResult<media::audio::common::AudioConfigBase>
-legacy2aidl_audio_config_base_t_AudioConfigBase(const audio_config_base_t& legacy, bool isInput);
-
-ConversionResult<audio_input_flags_t>
-aidl2legacy_AudioInputFlags_audio_input_flags_t(media::audio::common::AudioInputFlags aidl);
-ConversionResult<media::audio::common::AudioInputFlags>
-legacy2aidl_audio_input_flags_t_AudioInputFlags(audio_input_flags_t legacy);
-
-ConversionResult<audio_output_flags_t>
-aidl2legacy_AudioOutputFlags_audio_output_flags_t(media::audio::common::AudioOutputFlags aidl);
-ConversionResult<media::audio::common::AudioOutputFlags>
-legacy2aidl_audio_output_flags_t_AudioOutputFlags(audio_output_flags_t legacy);
-
-ConversionResult<audio_input_flags_t> aidl2legacy_int32_t_audio_input_flags_t_mask(
- int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_audio_input_flags_t_int32_t_mask(
- audio_input_flags_t legacy);
-
-ConversionResult<audio_output_flags_t> aidl2legacy_int32_t_audio_output_flags_t_mask(
- int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_audio_output_flags_t_int32_t_mask(
- audio_output_flags_t legacy);
-
-ConversionResult<audio_io_flags> aidl2legacy_AudioIoFlags_audio_io_flags(
- const media::audio::common::AudioIoFlags& aidl, bool isInput);
-ConversionResult<media::audio::common::AudioIoFlags> legacy2aidl_audio_io_flags_AudioIoFlags(
- const audio_io_flags& legacy, bool isInput);
-
-ConversionResult<audio_session_t> aidl2legacy_int32_t_audio_session_t(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_audio_session_t_int32_t(audio_session_t legacy);
-
-ConversionResult<audio_content_type_t>
-aidl2legacy_AudioContentType_audio_content_type_t(
- media::audio::common::AudioContentType aidl);
-ConversionResult<media::audio::common::AudioContentType>
-legacy2aidl_audio_content_type_t_AudioContentType(audio_content_type_t legacy);
-
-ConversionResult<audio_devices_t> aidl2legacy_AudioDeviceDescription_audio_devices_t(
- const media::audio::common::AudioDeviceDescription& aidl);
-ConversionResult<media::audio::common::AudioDeviceDescription>
-legacy2aidl_audio_devices_t_AudioDeviceDescription(audio_devices_t legacy);
-
-status_t aidl2legacy_AudioDevice_audio_device(
- const media::audio::common::AudioDevice& aidl, audio_devices_t* legacyType,
- char* legacyAddress);
-status_t aidl2legacy_AudioDevice_audio_device(
- const media::audio::common::AudioDevice& aidl, audio_devices_t* legacyType,
- String8* legacyAddress);
-status_t aidl2legacy_AudioDevice_audio_device(
- const media::audio::common::AudioDevice& aidl, audio_devices_t* legacyType,
- std::string* legacyAddress);
-
-ConversionResult<media::audio::common::AudioDevice> legacy2aidl_audio_device_AudioDevice(
- audio_devices_t legacyType, const char* legacyAddress);
-ConversionResult<media::audio::common::AudioDevice> legacy2aidl_audio_device_AudioDevice(
- audio_devices_t legacyType, const String8& legacyAddress);
-
-ConversionResult<audio_extra_audio_descriptor>
-aidl2legacy_ExtraAudioDescriptor_audio_extra_audio_descriptor(
- const media::audio::common::ExtraAudioDescriptor& aidl);
-
-ConversionResult<media::audio::common::ExtraAudioDescriptor>
-legacy2aidl_audio_extra_audio_descriptor_ExtraAudioDescriptor(
- const audio_extra_audio_descriptor& legacy);
-
-ConversionResult<audio_encapsulation_metadata_type_t>
-aidl2legacy_AudioEncapsulationMetadataType_audio_encapsulation_metadata_type_t(
- media::audio::common::AudioEncapsulationMetadataType aidl);
-ConversionResult<media::audio::common::AudioEncapsulationMetadataType>
-legacy2aidl_audio_encapsulation_metadata_type_t_AudioEncapsulationMetadataType(
- audio_encapsulation_metadata_type_t legacy);
-
-ConversionResult<uint32_t> aidl2legacy_AudioEncapsulationMetadataType_mask(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_AudioEncapsulationMetadataType_mask(uint32_t legacy);
-
-ConversionResult<audio_encapsulation_mode_t>
-aidl2legacy_AudioEncapsulationMode_audio_encapsulation_mode_t(
- media::audio::common::AudioEncapsulationMode aidl);
-ConversionResult<media::audio::common::AudioEncapsulationMode>
-legacy2aidl_audio_encapsulation_mode_t_AudioEncapsulationMode(audio_encapsulation_mode_t legacy);
-
-ConversionResult<uint32_t> aidl2legacy_AudioEncapsulationMode_mask(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_AudioEncapsulationMode_mask(uint32_t legacy);
-
-ConversionResult<audio_encapsulation_type_t>
-aidl2legacy_AudioEncapsulationType_audio_encapsulation_type_t(
- const media::audio::common::AudioEncapsulationType& aidl);
-ConversionResult<media::audio::common::AudioEncapsulationType>
-legacy2aidl_audio_encapsulation_type_t_AudioEncapsulationType(
- const audio_encapsulation_type_t& legacy);
-
-ConversionResult<audio_format_t> aidl2legacy_AudioFormatDescription_audio_format_t(
- const media::audio::common::AudioFormatDescription& aidl);
-ConversionResult<media::audio::common::AudioFormatDescription>
-legacy2aidl_audio_format_t_AudioFormatDescription(audio_format_t legacy);
-
-ConversionResult<audio_gain_mode_t>
-aidl2legacy_AudioGainMode_audio_gain_mode_t(media::audio::common::AudioGainMode aidl);
-ConversionResult<media::audio::common::AudioGainMode>
-legacy2aidl_audio_gain_mode_t_AudioGainMode(audio_gain_mode_t legacy);
-
-ConversionResult<audio_gain_mode_t> aidl2legacy_int32_t_audio_gain_mode_t_mask(int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_audio_gain_mode_t_int32_t_mask(audio_gain_mode_t legacy);
-
-ConversionResult<audio_gain_config> aidl2legacy_AudioGainConfig_audio_gain_config(
- const media::audio::common::AudioGainConfig& aidl, bool isInput);
-ConversionResult<media::audio::common::AudioGainConfig>
-legacy2aidl_audio_gain_config_AudioGainConfig(const audio_gain_config& legacy, bool isInput);
-
-ConversionResult<audio_gain>
-aidl2legacy_AudioGain_audio_gain(const media::audio::common::AudioGain& aidl, bool isInput);
-ConversionResult<media::audio::common::AudioGain>
-legacy2aidl_audio_gain_AudioGain(const audio_gain& legacy, bool isInput);
-
-ConversionResult<audio_input_flags_t>
-aidl2legacy_AudioInputFlags_audio_input_flags_t(media::audio::common::AudioInputFlags aidl);
-ConversionResult<media::audio::common::AudioInputFlags>
-legacy2aidl_audio_input_flags_t_AudioInputFlags(audio_input_flags_t legacy);
-
-ConversionResult<audio_mode_t>
-aidl2legacy_AudioMode_audio_mode_t(media::audio::common::AudioMode aidl);
-ConversionResult<media::audio::common::AudioMode>
-legacy2aidl_audio_mode_t_AudioMode(audio_mode_t legacy);
-
-ConversionResult<audio_offload_info_t>
-aidl2legacy_AudioOffloadInfo_audio_offload_info_t(
- const media::audio::common::AudioOffloadInfo& aidl);
-ConversionResult<media::audio::common::AudioOffloadInfo>
-legacy2aidl_audio_offload_info_t_AudioOffloadInfo(const audio_offload_info_t& legacy);
-
-ConversionResult<audio_output_flags_t>
-aidl2legacy_AudioOutputFlags_audio_output_flags_t(media::audio::common::AudioOutputFlags aidl);
-ConversionResult<media::audio::common::AudioOutputFlags>
-legacy2aidl_audio_output_flags_t_AudioOutputFlags(audio_output_flags_t legacy);
-
-// This type is unnamed in the original definition, thus we name it here.
-using audio_port_config_mix_ext_usecase = decltype(audio_port_config_mix_ext::usecase);
-ConversionResult<audio_port_config_mix_ext_usecase>
-aidl2legacy_AudioPortMixExtUseCase_audio_port_config_mix_ext_usecase(
- const media::audio::common::AudioPortMixExtUseCase& aidl, bool isInput);
-ConversionResult<media::audio::common::AudioPortMixExtUseCase>
-legacy2aidl_audio_port_config_mix_ext_usecase_AudioPortMixExtUseCase(
- const audio_port_config_mix_ext_usecase& legacy, bool isInput);
-
-ConversionResult<audio_port_config_device_ext>
-aidl2legacy_AudioPortDeviceExt_audio_port_config_device_ext(
- const media::audio::common::AudioPortDeviceExt& aidl);
-ConversionResult<media::audio::common::AudioPortDeviceExt>
- legacy2aidl_audio_port_config_device_ext_AudioPortDeviceExt(
- const audio_port_config_device_ext& legacy);
-
-status_t aidl2legacy_AudioPortConfig_audio_port_config(
- const media::audio::common::AudioPortConfig& aidl, bool isInput,
- audio_port_config* legacy, int32_t* portId);
-ConversionResult<media::audio::common::AudioPortConfig>
-legacy2aidl_audio_port_config_AudioPortConfig(
- const audio_port_config& legacy, bool isInput, int32_t portId);
-
-ConversionResult<audio_port_mix_ext> aidl2legacy_AudioPortMixExt_audio_port_mix_ext(
- const media::audio::common::AudioPortMixExt& aidl);
-ConversionResult<media::audio::common::AudioPortMixExt>
-legacy2aidl_audio_port_mix_ext_AudioPortMixExt(
- const audio_port_mix_ext& legacy);
-
-ConversionResult<audio_port_device_ext>
-aidl2legacy_AudioPortDeviceExt_audio_port_device_ext(
- const media::audio::common::AudioPortDeviceExt& aidl);
-ConversionResult<media::audio::common::AudioPortDeviceExt>
-legacy2aidl_audio_port_device_ext_AudioPortDeviceExt(
- const audio_port_device_ext& legacy);
-
-ConversionResult<audio_port_v7>
-aidl2legacy_AudioPort_audio_port_v7(
- const media::audio::common::AudioPort& aidl, bool isInput);
-ConversionResult<media::audio::common::AudioPort>
-legacy2aidl_audio_port_v7_AudioPort(const audio_port_v7& legacy, bool isInput);
-
-ConversionResult<audio_profile> aidl2legacy_AudioProfile_audio_profile(
- const media::audio::common::AudioProfile& aidl, bool isInput);
-ConversionResult<media::audio::common::AudioProfile> legacy2aidl_audio_profile_AudioProfile(
- const audio_profile& legacy, bool isInput);
-
-ConversionResult<audio_standard_t> aidl2legacy_AudioStandard_audio_standard_t(
- media::audio::common::AudioStandard aidl);
-ConversionResult<media::audio::common::AudioStandard> legacy2aidl_audio_standard_t_AudioStandard(
- audio_standard_t legacy);
-
-ConversionResult<audio_source_t> aidl2legacy_AudioSource_audio_source_t(
- media::audio::common::AudioSource aidl);
-ConversionResult<media::audio::common::AudioSource> legacy2aidl_audio_source_t_AudioSource(
- audio_source_t legacy);
-
-ConversionResult<audio_usage_t> aidl2legacy_AudioUsage_audio_usage_t(
- media::audio::common::AudioUsage aidl);
-ConversionResult<media::audio::common::AudioUsage> legacy2aidl_audio_usage_t_AudioUsage(
- audio_usage_t legacy);
-
-ConversionResult<audio_uuid_t> aidl2legacy_AudioUuid_audio_uuid_t(
- const media::audio::common::AudioUuid &aidl);
-ConversionResult<media::audio::common::AudioUuid> legacy2aidl_audio_uuid_t_AudioUuid(
- const audio_uuid_t& legacy);
-
-ConversionResult<audio_dual_mono_mode_t>
-aidl2legacy_AudioDualMonoMode_audio_dual_mono_mode_t(media::audio::common::AudioDualMonoMode aidl);
-ConversionResult<media::audio::common::AudioDualMonoMode>
-legacy2aidl_audio_dual_mono_mode_t_AudioDualMonoMode(audio_dual_mono_mode_t legacy);
-
-ConversionResult<audio_timestretch_fallback_mode_t>
-aidl2legacy_TimestretchFallbackMode_audio_timestretch_fallback_mode_t(
- media::audio::common::AudioPlaybackRate::TimestretchFallbackMode aidl);
-ConversionResult<media::audio::common::AudioPlaybackRate::TimestretchFallbackMode>
-legacy2aidl_audio_timestretch_fallback_mode_t_TimestretchFallbackMode(
- audio_timestretch_fallback_mode_t legacy);
-
-ConversionResult<audio_timestretch_stretch_mode_t>
-aidl2legacy_TimestretchMode_audio_timestretch_stretch_mode_t(
- media::audio::common::AudioPlaybackRate::TimestretchMode aidl);
-ConversionResult<media::audio::common::AudioPlaybackRate::TimestretchMode>
-legacy2aidl_audio_timestretch_stretch_mode_t_TimestretchMode(
- audio_timestretch_stretch_mode_t legacy);
-
-ConversionResult<audio_playback_rate_t>
-aidl2legacy_AudioPlaybackRate_audio_playback_rate_t(
- const media::audio::common::AudioPlaybackRate& aidl);
-ConversionResult<media::audio::common::AudioPlaybackRate>
-legacy2aidl_audio_playback_rate_t_AudioPlaybackRate(const audio_playback_rate_t& legacy);
-
-ConversionResult<audio_latency_mode_t>
-aidl2legacy_AudioLatencyMode_audio_latency_mode_t(media::audio::common::AudioLatencyMode aidl);
-ConversionResult<media::audio::common::AudioLatencyMode>
-legacy2aidl_audio_latency_mode_t_AudioLatencyMode(audio_latency_mode_t legacy);
-
-ConversionResult<audio_microphone_location_t>
-aidl2legacy_MicrophoneInfoLocation_audio_microphone_location_t(
- media::audio::common::MicrophoneInfo::Location aidl);
-ConversionResult<media::audio::common::MicrophoneInfo::Location>
-legacy2aidl_audio_microphone_location_t_MicrophoneInfoLocation(audio_microphone_location_t legacy);
-
-ConversionResult<audio_microphone_group_t> aidl2legacy_int32_t_audio_microphone_group_t(
- int32_t aidl);
-ConversionResult<int32_t> legacy2aidl_audio_microphone_group_t_int32_t(
- audio_microphone_group_t legacy);
-
-ConversionResult<audio_microphone_directionality_t>
-aidl2legacy_MicrophoneInfoDirectionality_audio_microphone_directionality_t(
- media::audio::common::MicrophoneInfo::Directionality aidl);
-ConversionResult<media::audio::common::MicrophoneInfo::Directionality>
-legacy2aidl_audio_microphone_directionality_t_MicrophoneInfoDirectionality(
- audio_microphone_directionality_t legacy);
-
-ConversionResult<audio_microphone_coordinate>
-aidl2legacy_MicrophoneInfoCoordinate_audio_microphone_coordinate(
- const media::audio::common::MicrophoneInfo::Coordinate& aidl);
-ConversionResult<media::audio::common::MicrophoneInfo::Coordinate>
-legacy2aidl_audio_microphone_coordinate_MicrophoneInfoCoordinate(
- const audio_microphone_coordinate& legacy);
-
-ConversionResult<audio_microphone_channel_mapping_t>
-aidl2legacy_MicrophoneDynamicInfoChannelMapping_audio_microphone_channel_mapping_t(
- media::audio::common::MicrophoneDynamicInfo::ChannelMapping aidl);
-ConversionResult<media::audio::common::MicrophoneDynamicInfo::ChannelMapping>
-legacy2aidl_audio_microphone_channel_mapping_t_MicrophoneDynamicInfoChannelMapping(
- audio_microphone_channel_mapping_t legacy);
-
-ConversionResult<audio_microphone_characteristic_t>
-aidl2legacy_MicrophoneInfos_audio_microphone_characteristic_t(
- const media::audio::common::MicrophoneInfo& aidlInfo,
- const media::audio::common::MicrophoneDynamicInfo& aidlDynamic);
-status_t
-legacy2aidl_audio_microphone_characteristic_t_MicrophoneInfos(
- const audio_microphone_characteristic_t& legacy,
- media::audio::common::MicrophoneInfo* aidlInfo,
- media::audio::common::MicrophoneDynamicInfo* aidlDynamic);
-
-} // namespace android
-
-#if defined(BACKEND_NDK)
-} // aidl
+#if defined(BACKEND_CPP_NDK) || !defined(BACKEND_NDK)
+#include <media/AidlConversionCppNdk-impl.h>
#endif
diff --git a/media/audioaidlconversion/include/media/AidlConversionEffect.h b/media/audioaidlconversion/include/media/AidlConversionEffect.h
index 3aa9ac2..5e245a7 100644
--- a/media/audioaidlconversion/include/media/AidlConversionEffect.h
+++ b/media/audioaidlconversion/include/media/AidlConversionEffect.h
@@ -46,19 +46,39 @@
return VALUE_OR_RETURN((unionGetField<T, field>(spec)));
}
-#define GET_PARAMETER_SPECIFIC_FIELD(u, specific, tag, field, fieldType) \
- getParameterSpecificField<std::decay_t<decltype(u)>, specific, \
- aidl::android::hardware::audio::effect::Parameter::Specific::tag, \
- specific::field, fieldType>(u)
+#define GET_PARAMETER_SPECIFIC_FIELD(_u, _effect, _tag, _field, _fieldType) \
+ getParameterSpecificField<std::decay_t<decltype(_u)>, _effect, \
+ aidl::android::hardware::audio::effect::Parameter::Specific::_tag, \
+ _effect::_field, _fieldType>(_u)
-#define MAKE_SPECIFIC_PARAMETER(spec, tag, field, value) \
- UNION_MAKE(aidl::android::hardware::audio::effect::Parameter, specific, \
- UNION_MAKE(aidl::android::hardware::audio::effect::Parameter::Specific, tag, \
- UNION_MAKE(spec, field, value)))
+#define MAKE_SPECIFIC_PARAMETER(_spec, _tag, _field, _value) \
+ UNION_MAKE(aidl::android::hardware::audio::effect::Parameter, specific, \
+ UNION_MAKE(aidl::android::hardware::audio::effect::Parameter::Specific, _tag, \
+ UNION_MAKE(_spec, _field, _value)))
-#define MAKE_SPECIFIC_PARAMETER_ID(spec, tag, field) \
- UNION_MAKE(aidl::android::hardware::audio::effect::Parameter::Id, tag, \
- UNION_MAKE(spec::Id, commonTag, field))
+#define MAKE_SPECIFIC_PARAMETER_ID(_spec, _tag, _field) \
+ UNION_MAKE(aidl::android::hardware::audio::effect::Parameter::Id, _tag, \
+ UNION_MAKE(_spec::Id, commonTag, _field))
+
+#define MAKE_EXTENSION_PARAMETER_ID(_effect, _tag, _field) \
+ UNION_MAKE(aidl::android::hardware::audio::effect::Parameter::Id, _tag, \
+ UNION_MAKE(_effect::Id, vendorExtensionTag, _field))
+
+#define VENDOR_EXTENSION_GET_AND_RETURN(_effect, _tag, _param) \
+ { \
+ aidl::android::hardware::audio::effect::VendorExtension _extId = VALUE_OR_RETURN_STATUS( \
+ aidl::android::legacy2aidl_EffectParameterReader_Param_VendorExtension(_param)); \
+ aidl::android::hardware::audio::effect::Parameter::Id _id = \
+ MAKE_EXTENSION_PARAMETER_ID(_effect, _tag##Tag, _extId); \
+ aidl::android::hardware::audio::effect::Parameter _aidlParam; \
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(_id, &_aidlParam))); \
+ aidl::android::hardware::audio::effect::VendorExtension _ext = \
+ VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD( \
+ _aidlParam, _effect, _tag, _effect::vendor, VendorExtension)); \
+ return VALUE_OR_RETURN_STATUS( \
+ aidl::android::aidl2legacy_ParameterExtension_EffectParameterWriter(_aidlParam, \
+ _param)); \
+ }
ConversionResult<uint32_t> aidl2legacy_Flags_Type_uint32(
::aidl::android::hardware::audio::effect::Flags::Type type);
@@ -140,11 +160,23 @@
ConversionResult<::aidl::android::hardware::audio::effect::Parameter>
legacy2aidl_EffectParameterReader_ParameterExtension(
::android::effect::utils::EffectParamReader& param);
-ConversionResult<std::vector<uint8_t>> aidl2legacy_ParameterExtension_vector_uint8(
- const ::aidl::android::hardware::audio::effect::Parameter& legacy);
ConversionResult<::android::status_t> aidl2legacy_ParameterExtension_EffectParameterWriter(
const ::aidl::android::hardware::audio::effect::Parameter& aidl,
::android::effect::utils::EffectParamWriter& legacy);
+ConversionResult<::aidl::android::hardware::audio::effect::VendorExtension>
+legacy2aidl_EffectParameterReader_Param_VendorExtension(
+ ::android::effect::utils::EffectParamReader& param);
+ConversionResult<::aidl::android::hardware::audio::effect::VendorExtension>
+legacy2aidl_EffectParameterReader_Data_VendorExtension(
+ ::android::effect::utils::EffectParamReader& param);
+
+ConversionResult<::android::status_t> aidl2legacy_VendorExtension_EffectParameterWriter_Data(
+ ::android::effect::utils::EffectParamWriter& param,
+ ::aidl::android::hardware::audio::effect::VendorExtension ext);
+ConversionResult<::aidl::android::hardware::audio::effect::Parameter>
+legacy2aidl_EffectParameterReader_ParameterExtension(
+ ::android::effect::utils::EffectParamReader& param);
+
} // namespace android
} // namespace aidl
diff --git a/media/audioaidlconversion/include/media/AidlConversionNdk.h b/media/audioaidlconversion/include/media/AidlConversionNdk.h
index 98a7d41..813a728 100644
--- a/media/audioaidlconversion/include/media/AidlConversionNdk.h
+++ b/media/audioaidlconversion/include/media/AidlConversionNdk.h
@@ -16,25 +16,45 @@
#pragma once
-#include <android/binder_auto_utils.h>
-#include <android/binder_manager.h>
-#include <android/binder_process.h>
-
/**
- * Can only handle conversion between AIDL (NDK backend) and legacy type.
+ * Can only handle conversion between AIDL (NDK backend) and legacy types.
*/
+
+#include <string>
+#include <vector>
+
#include <hardware/audio_effect.h>
-#include <media/AidlConversionUtil.h>
#include <system/audio_effect.h>
+
+#include <aidl/android/hardware/audio/common/PlaybackTrackMetadata.h>
+#include <aidl/android/hardware/audio/common/RecordTrackMetadata.h>
#include <aidl/android/media/audio/common/AudioConfig.h>
+#include <media/AidlConversionUtil.h>
namespace aidl {
namespace android {
-ConversionResult<buffer_config_t> aidl2legacy_AudioConfigBase_buffer_config_t(
- const media::audio::common::AudioConfigBase& aidl, bool isInput);
-ConversionResult<media::audio::common::AudioConfigBase> legacy2aidl_buffer_config_t_AudioConfigBase(
+ConversionResult<buffer_config_t> aidl2legacy_AudioConfig_buffer_config_t(
+ const media::audio::common::AudioConfig& aidl, bool isInput);
+ConversionResult<media::audio::common::AudioConfig> legacy2aidl_buffer_config_t_AudioConfig(
const buffer_config_t& legacy, bool isInput);
+::android::status_t aidl2legacy_AudioAttributesTags(
+ const std::vector<std::string>& aidl, char* legacy);
+ConversionResult<std::vector<std::string>> legacy2aidl_AudioAttributesTags(const char* legacy);
+
+ConversionResult<playback_track_metadata_v7>
+aidl2legacy_PlaybackTrackMetadata_playback_track_metadata_v7(
+ const hardware::audio::common::PlaybackTrackMetadata& aidl);
+ConversionResult<hardware::audio::common::PlaybackTrackMetadata>
+legacy2aidl_playback_track_metadata_v7_PlaybackTrackMetadata(
+ const playback_track_metadata_v7& legacy);
+
+ConversionResult<record_track_metadata_v7>
+aidl2legacy_RecordTrackMetadata_record_track_metadata_v7(
+ const hardware::audio::common::RecordTrackMetadata& aidl);
+ConversionResult<hardware::audio::common::RecordTrackMetadata>
+legacy2aidl_record_track_metadata_v7_RecordTrackMetadata(const record_track_metadata_v7& legacy);
+
} // namespace android
} // namespace aidl
diff --git a/media/audioaidlconversion/include/media/AidlConversionUtil-impl.h b/media/audioaidlconversion/include/media/AidlConversionUtil-impl.h
new file mode 100644
index 0000000..b179cbb
--- /dev/null
+++ b/media/audioaidlconversion/include/media/AidlConversionUtil-impl.h
@@ -0,0 +1,438 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// WARNING: This file is intended for multiple inclusion, one time
+// with BACKEND_NDK_IMPL defined, one time without it.
+// Do not include directly, use 'AidlConversionUtil.h'.
+#if (defined(BACKEND_NDK_IMPL) && !defined(AUDIO_AIDL_CONVERSION_AIDL_CONVERSION_UTIL_NDK)) || \
+ (!defined(BACKEND_NDK_IMPL) && !defined(AUDIO_AIDL_CONVERSION_AIDL_CONVERSION_UTIL_CPP))
+#if defined(BACKEND_NDK_IMPL)
+#define AUDIO_AIDL_CONVERSION_AIDL_CONVERSION_UTIL_NDK
+#else
+#define AUDIO_AIDL_CONVERSION_AIDL_CONVERSION_UTIL_CPP
+#endif // BACKEND_NDK_IMPL
+
+#include <limits>
+#include <type_traits>
+#include <utility>
+
+#include <android-base/expected.h>
+#include <binder/Status.h>
+
+#if defined(BACKEND_NDK_IMPL)
+#include <android/binder_auto_utils.h>
+#include <android/binder_enums.h>
+#include <android/binder_status.h>
+
+namespace aidl {
+#else
+#include <binder/Enums.h>
+#endif // BACKEND_NDK_IMPL
+namespace android {
+
+#if defined(BACKEND_NDK_IMPL)
+// This adds `::aidl::android::ConversionResult` for convenience.
+// Otherwise, it would be required to write `::android::ConversionResult` everywhere.
+template <typename T>
+using ConversionResult = ::android::ConversionResult<T>;
+#endif // BACKEND_NDK_IMPL
+
+/**
+ * A generic template to safely cast between integral types, respecting limits of the destination
+ * type.
+ */
+template<typename To, typename From>
+ConversionResult<To> convertIntegral(From from) {
+ // Special handling is required for signed / vs. unsigned comparisons, since otherwise we may
+ // have the signed converted to unsigned and produce wrong results.
+ if constexpr (std::is_signed_v<From> && !std::is_signed_v<To>) {
+ if (from < 0 || from > std::numeric_limits<To>::max()) {
+ return ::android::base::unexpected(::android::BAD_VALUE);
+ }
+ } else if constexpr (std::is_signed_v<To> && !std::is_signed_v<From>) {
+ if (from > std::numeric_limits<To>::max()) {
+ return ::android::base::unexpected(::android::BAD_VALUE);
+ }
+ } else /* constexpr */ {
+ if (from < std::numeric_limits<To>::min() || from > std::numeric_limits<To>::max()) {
+ return ::android::base::unexpected(::android::BAD_VALUE);
+ }
+ }
+ return static_cast<To>(from);
+}
+
+/**
+ * A generic template to safely cast between types, that are intended to be the same size, but
+ * interpreted differently.
+ */
+template<typename To, typename From>
+ConversionResult<To> convertReinterpret(From from) {
+ static_assert(sizeof(From) == sizeof(To));
+ return static_cast<To>(from);
+}
+
+/**
+ * A generic template that helps convert containers of convertible types, using iterators.
+ */
+template<typename InputIterator, typename OutputIterator, typename Func>
+::android::status_t convertRange(InputIterator start,
+ InputIterator end,
+ OutputIterator out,
+ const Func& itemConversion) {
+ for (InputIterator iter = start; iter != end; ++iter, ++out) {
+ *out = VALUE_OR_RETURN_STATUS(itemConversion(*iter));
+ }
+ return ::android::OK;
+}
+
+/**
+ * A generic template that helps convert containers of convertible types, using iterators.
+ * Uses a limit as maximum conversion items.
+ */
+template<typename InputIterator, typename OutputIterator, typename Func>
+::android::status_t convertRangeWithLimit(InputIterator start,
+ InputIterator end,
+ OutputIterator out,
+ const Func& itemConversion,
+ const size_t limit) {
+ InputIterator last = end;
+ if (end - start > limit) {
+ last = start + limit;
+ }
+ for (InputIterator iter = start; (iter != last); ++iter, ++out) {
+ *out = VALUE_OR_RETURN_STATUS(itemConversion(*iter));
+ }
+ return ::android::OK;
+}
+
+/**
+ * A generic template that helps convert containers of convertible types.
+ */
+template<typename OutputContainer, typename InputContainer, typename Func>
+ConversionResult<OutputContainer>
+convertContainer(const InputContainer& input, const Func& itemConversion) {
+ OutputContainer output;
+ auto ins = std::inserter(output, output.begin());
+ for (const auto& item : input) {
+ *ins = VALUE_OR_RETURN(itemConversion(item));
+ }
+ return output;
+}
+
+/**
+ * A generic template that helps convert containers of convertible types
+ * using an item conversion function with an additional parameter.
+ */
+template<typename OutputContainer, typename InputContainer, typename Func, typename Parameter>
+ConversionResult<OutputContainer>
+convertContainer(const InputContainer& input, const Func& itemConversion, const Parameter& param) {
+ OutputContainer output;
+ auto ins = std::inserter(output, output.begin());
+ for (const auto& item : input) {
+ *ins = VALUE_OR_RETURN(itemConversion(item, param));
+ }
+ return output;
+}
+
+/**
+ * A generic template that helps to "zip" two input containers of the same size
+ * into a single vector of converted types. The conversion function must
+ * thus accept two arguments.
+ */
+template<typename OutputContainer, typename InputContainer1,
+ typename InputContainer2, typename Func>
+ConversionResult<OutputContainer>
+convertContainers(const InputContainer1& input1, const InputContainer2& input2,
+ const Func& itemConversion) {
+ auto iter2 = input2.begin();
+ OutputContainer output;
+ auto ins = std::inserter(output, output.begin());
+ for (const auto& item1 : input1) {
+ RETURN_IF_ERROR(iter2 != input2.end() ? ::android::OK : ::android::BAD_VALUE);
+ *ins = VALUE_OR_RETURN(itemConversion(item1, *iter2++));
+ }
+ return output;
+}
+
+/**
+ * A generic template that helps to "unzip" a per-element conversion into
+ * a pair of elements into a pair of containers. The conversion function
+ * must emit a pair of elements.
+ */
+template<typename OutputContainer1, typename OutputContainer2,
+ typename InputContainer, typename Func>
+ConversionResult<std::pair<OutputContainer1, OutputContainer2>>
+convertContainerSplit(const InputContainer& input, const Func& itemConversion) {
+ OutputContainer1 output1;
+ OutputContainer2 output2;
+ auto ins1 = std::inserter(output1, output1.begin());
+ auto ins2 = std::inserter(output2, output2.begin());
+ for (const auto& item : input) {
+ auto out_pair = VALUE_OR_RETURN(itemConversion(item));
+ *ins1 = out_pair.first;
+ *ins2 = out_pair.second;
+ }
+ return std::make_pair(output1, output2);
+}
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+// The code below establishes:
+// IntegralTypeOf<T>, which works for either integral types (in which case it evaluates to T), or
+// enum types (in which case it evaluates to std::underlying_type_T<T>).
+
+template<typename T, typename = std::enable_if_t<std::is_integral_v<T> || std::is_enum_v<T>>>
+struct IntegralTypeOfStruct {
+ using Type = T;
+};
+
+template<typename T>
+struct IntegralTypeOfStruct<T, std::enable_if_t<std::is_enum_v<T>>> {
+ using Type = std::underlying_type_t<T>;
+};
+
+template<typename T>
+using IntegralTypeOf = typename IntegralTypeOfStruct<T>::Type;
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+// Utilities for handling bitmasks.
+
+template<typename Enum>
+Enum indexToEnum_index(int index) {
+ static_assert(std::is_enum_v<Enum> || std::is_integral_v<Enum>);
+ return static_cast<Enum>(index);
+}
+
+template<typename Enum>
+Enum indexToEnum_bitmask(int index) {
+ static_assert(std::is_enum_v<Enum> || std::is_integral_v<Enum>);
+ return static_cast<Enum>(1 << index);
+}
+
+template<typename Mask, typename Enum>
+Mask enumToMask_bitmask(Enum e) {
+ static_assert(std::is_enum_v<Enum> || std::is_integral_v<Enum>);
+ static_assert(std::is_enum_v<Mask> || std::is_integral_v<Mask>);
+ return static_cast<Mask>(e);
+}
+
+template<typename Mask, typename Enum>
+Mask enumToMask_index(Enum e) {
+ static_assert(std::is_enum_v<Enum> || std::is_integral_v<Enum>);
+ static_assert(std::is_enum_v<Mask> || std::is_integral_v<Mask>);
+ return static_cast<Mask>(static_cast<std::make_unsigned_t<IntegralTypeOf<Mask>>>(1)
+ << static_cast<int>(e));
+}
+
+template<typename DestMask, typename SrcMask, typename DestEnum, typename SrcEnum>
+ConversionResult<DestMask> convertBitmask(
+ SrcMask src, const std::function<ConversionResult<DestEnum>(SrcEnum)>& enumConversion,
+ const std::function<SrcEnum(int)>& srcIndexToEnum,
+ const std::function<DestMask(DestEnum)>& destEnumToMask) {
+ using UnsignedDestMask = std::make_unsigned_t<IntegralTypeOf<DestMask>>;
+ using UnsignedSrcMask = std::make_unsigned_t<IntegralTypeOf<SrcMask>>;
+
+ UnsignedDestMask dest = static_cast<UnsignedDestMask>(0);
+ UnsignedSrcMask usrc = static_cast<UnsignedSrcMask>(src);
+
+ int srcBitIndex = 0;
+ while (usrc != 0) {
+ if (usrc & 1) {
+ SrcEnum srcEnum = srcIndexToEnum(srcBitIndex);
+ DestEnum destEnum = VALUE_OR_RETURN(enumConversion(srcEnum));
+ DestMask destMask = destEnumToMask(destEnum);
+ dest |= destMask;
+ }
+ ++srcBitIndex;
+ usrc >>= 1;
+ }
+ return static_cast<DestMask>(dest);
+}
+
+template<typename Mask, typename Enum>
+bool bitmaskIsSet(Mask mask, Enum index) {
+ return (mask & enumToMask_index<Mask, Enum>(index)) != 0;
+}
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+// Utilities for working with AIDL unions.
+// UNION_GET(obj, fieldname) returns a ConversionResult<T> containing either the strongly-typed
+// value of the respective field, or ::android::BAD_VALUE if the union is not set to the requested
+// field.
+// UNION_SET(obj, fieldname, value) sets the requested field to the given value.
+
+template<typename T, typename T::Tag tag>
+using UnionFieldType = std::decay_t<decltype(std::declval<T>().template get<tag>())>;
+
+template<typename T, typename T::Tag tag>
+ConversionResult<UnionFieldType<T, tag>> unionGetField(const T& u) {
+ if (u.getTag() != tag) {
+ return ::android::base::unexpected(::android::BAD_VALUE);
+ }
+ return u.template get<tag>();
+}
+
+#define UNION_GET(u, field) \
+ unionGetField<std::decay_t<decltype(u)>, std::decay_t<decltype(u)>::Tag::field>(u)
+
+#define UNION_SET(u, field, value) \
+ (u).set<std::decay_t<decltype(u)>::Tag::field>(value)
+
+#define UNION_MAKE(u, field, value) u::make<u::Tag::field>(value)
+
+namespace aidl_utils {
+
+/**
+ * Return true if the value is valid for the AIDL enumeration.
+ */
+template <typename T>
+bool isValidEnum(T value) {
+#if defined(BACKEND_NDK_IMPL)
+ constexpr ndk::enum_range<T> er{};
+#else
+ constexpr ::android::enum_range<T> er{};
+#endif
+ return std::find(er.begin(), er.end(), value) != er.end();
+}
+
+// T is a "container" of enum binder types with a toString().
+template <typename T>
+std::string enumsToString(const T& t) {
+ std::string s;
+ for (const auto item : t) {
+ if (s.empty()) {
+ s = toString(item);
+ } else {
+ s.append("|").append(toString(item));
+ }
+ }
+ return s;
+}
+
+/**
+ * Return the equivalent Android ::android::status_t from a binder exception code.
+ *
+ * Generally one should use statusTFromBinderStatus() instead.
+ *
+ * Exception codes can be generated from a remote Java service exception, translate
+ * them for use on the Native side.
+ *
+ * Note: for EX_TRANSACTION_FAILED and EX_SERVICE_SPECIFIC a more detailed error code
+ * can be found from transactionError() or serviceSpecificErrorCode().
+ */
+static inline ::android::status_t statusTFromExceptionCode(int32_t exceptionCode) {
+ using namespace ::android::binder;
+ switch (exceptionCode) {
+ case Status::EX_NONE:
+ return ::android::OK;
+ case Status::EX_SECURITY: // Java SecurityException, rethrows locally in Java
+ return ::android::PERMISSION_DENIED;
+ case Status::EX_BAD_PARCELABLE: // Java BadParcelableException, rethrows in Java
+ case Status::EX_ILLEGAL_ARGUMENT: // Java IllegalArgumentException, rethrows in Java
+ case Status::EX_NULL_POINTER: // Java NullPointerException, rethrows in Java
+ return ::android::BAD_VALUE;
+ case Status::EX_ILLEGAL_STATE: // Java IllegalStateException, rethrows in Java
+ case Status::EX_UNSUPPORTED_OPERATION: // Java UnsupportedOperationException, rethrows
+ return ::android::INVALID_OPERATION;
+ case Status::EX_HAS_REPLY_HEADER: // Native strictmode violation
+ case Status::EX_PARCELABLE: // Java bootclass loader (not standard exception), rethrows
+ case Status::EX_NETWORK_MAIN_THREAD: // Java NetworkOnMainThreadException, rethrows
+ case Status::EX_TRANSACTION_FAILED: // Native - see error code
+ case Status::EX_SERVICE_SPECIFIC: // Java ServiceSpecificException,
+ // rethrows in Java with integer error code
+ return ::android::UNKNOWN_ERROR;
+ }
+ return ::android::UNKNOWN_ERROR;
+}
+
+/**
+ * Return the equivalent Android ::android::status_t from a binder status.
+ *
+ * Used to handle errors from a AIDL method declaration
+ *
+ * [oneway] void method(type0 param0, ...)
+ *
+ * or the following (where return_type is not a status_t)
+ *
+ * return_type method(type0 param0, ...)
+ */
+static inline ::android::status_t statusTFromBinderStatus(const ::android::binder::Status &status) {
+ return status.isOk() ? ::android::OK // check ::android::OK,
+ : status.serviceSpecificErrorCode() // service-side error, not standard Java exception
+ // (fromServiceSpecificError)
+ ?: status.transactionError() // a native binder transaction error (fromStatusT)
+ ?: statusTFromExceptionCode(status.exceptionCode()); // a service-side error with a
+ // standard Java exception (fromExceptionCode)
+}
+
+#if defined(BACKEND_NDK_IMPL)
+static inline ::android::status_t statusTFromBinderStatus(const ::ndk::ScopedAStatus &status) {
+ // What we want to do is to 'return statusTFromBinderStatus(status.get()->get())'
+ // However, since the definition of AStatus is not exposed, we have to do the same
+ // via methods of ScopedAStatus:
+ return status.isOk() ? ::android::OK // check ::android::OK,
+ : status.getServiceSpecificError() // service-side error, not standard Java exception
+ // (fromServiceSpecificError)
+ ?: status.getStatus() // a native binder transaction error (fromStatusT)
+ ?: statusTFromExceptionCode(status.getExceptionCode()); // a service-side error with a
+ // standard Java exception (fromExceptionCode)
+}
+#endif
+
+/**
+ * Return a binder::Status from native service status.
+ *
+ * This is used for methods not returning an explicit status_t,
+ * where Java callers expect an exception, not an integer return value.
+ */
+static inline ::android::binder::Status binderStatusFromStatusT(
+ ::android::status_t status, const char *optionalMessage = nullptr) {
+ const char * const emptyIfNull = optionalMessage == nullptr ? "" : optionalMessage;
+ // From binder::Status instructions:
+ // Prefer a generic exception code when possible, then a service specific
+ // code, and finally a ::android::status_t for low level failures or legacy support.
+ // Exception codes and service specific errors map to nicer exceptions for
+ // Java clients.
+
+ using namespace ::android::binder;
+ switch (status) {
+ case ::android::OK:
+ return Status::ok();
+ case ::android::PERMISSION_DENIED: // throw SecurityException on Java side
+ return Status::fromExceptionCode(Status::EX_SECURITY, emptyIfNull);
+ case ::android::BAD_VALUE: // throw IllegalArgumentException on Java side
+ return Status::fromExceptionCode(Status::EX_ILLEGAL_ARGUMENT, emptyIfNull);
+ case ::android::INVALID_OPERATION: // throw IllegalStateException on Java side
+ return Status::fromExceptionCode(Status::EX_ILLEGAL_STATE, emptyIfNull);
+ }
+
+ // A service specific error will not show on status.transactionError() so
+ // be sure to use statusTFromBinderStatus() for reliable error handling.
+
+ // throw a ServiceSpecificException.
+ return Status::fromServiceSpecificError(status, emptyIfNull);
+}
+
+} // namespace aidl_utils
+
+} // namespace android
+
+#if defined(BACKEND_NDK_IMPL)
+} // namespace aidl
+#endif
+
+// (defined(BACKEND_NDK_IMPL) && !defined(AUDIO_AIDL_CONVERSION_AIDL_CONVERSION_UTIL_NDK)) || \
+// (!defined(BACKEND_NDK_IMPL) && !defined(AUDIO_AIDL_CONVERSION_AIDL_CONVERSION_UTIL_CPP))
+#endif
diff --git a/media/audioaidlconversion/include/media/AidlConversionUtil.h b/media/audioaidlconversion/include/media/AidlConversionUtil.h
index 8b2e0de..b846436 100644
--- a/media/audioaidlconversion/include/media/AidlConversionUtil.h
+++ b/media/audioaidlconversion/include/media/AidlConversionUtil.h
@@ -16,407 +16,26 @@
#pragma once
-#include <limits>
-#include <type_traits>
-#include <utility>
-
-#include <android-base/expected.h>
-#include <binder/Status.h>
#include <error/Result.h>
-#if defined(BACKEND_NDK)
-#include <android/binder_auto_utils.h>
-#include <android/binder_enums.h>
-#include <android/binder_status.h>
-
-namespace aidl {
-#else
-#include <binder/Enums.h>
-#endif
-
+namespace android {
+// `ConversionResult` is always defined in the `::android` namespace,
+// so that it can be found from any nested namespace.
+// See below for the convenience alias specific to the NDK backend.
template <typename T>
using ConversionResult = ::android::error::Result<T>;
-
-namespace android {
-/**
- * A generic template to safely cast between integral types, respecting limits of the destination
- * type.
- */
-template<typename To, typename From>
-ConversionResult<To> convertIntegral(From from) {
- // Special handling is required for signed / vs. unsigned comparisons, since otherwise we may
- // have the signed converted to unsigned and produce wrong results.
- if (std::is_signed_v<From> && !std::is_signed_v<To>) {
- if (from < 0 || from > std::numeric_limits<To>::max()) {
- return ::android::base::unexpected(::android::BAD_VALUE);
- }
- } else if (std::is_signed_v<To> && !std::is_signed_v<From>) {
- if (from > std::numeric_limits<To>::max()) {
- return ::android::base::unexpected(::android::BAD_VALUE);
- }
- } else {
- if (from < std::numeric_limits<To>::min() || from > std::numeric_limits<To>::max()) {
- return ::android::base::unexpected(::android::BAD_VALUE);
- }
- }
- return static_cast<To>(from);
-}
-
-/**
- * A generic template to safely cast between types, that are intended to be the same size, but
- * interpreted differently.
- */
-template<typename To, typename From>
-ConversionResult<To> convertReinterpret(From from) {
- static_assert(sizeof(From) == sizeof(To));
- return static_cast<To>(from);
-}
-
-/**
- * A generic template that helps convert containers of convertible types, using iterators.
- */
-template<typename InputIterator, typename OutputIterator, typename Func>
-::android::status_t convertRange(InputIterator start,
- InputIterator end,
- OutputIterator out,
- const Func& itemConversion) {
- for (InputIterator iter = start; iter != end; ++iter, ++out) {
- *out = VALUE_OR_RETURN_STATUS(itemConversion(*iter));
- }
- return ::android::OK;
-}
-
-/**
- * A generic template that helps convert containers of convertible types, using iterators.
- * Uses a limit as maximum conversion items.
- */
-template<typename InputIterator, typename OutputIterator, typename Func>
-::android::status_t convertRangeWithLimit(InputIterator start,
- InputIterator end,
- OutputIterator out,
- const Func& itemConversion,
- const size_t limit) {
- InputIterator last = end;
- if (end - start > limit) {
- last = start + limit;
- }
- for (InputIterator iter = start; (iter != last); ++iter, ++out) {
- *out = VALUE_OR_RETURN_STATUS(itemConversion(*iter));
- }
- return ::android::OK;
-}
-
-/**
- * A generic template that helps convert containers of convertible types.
- */
-template<typename OutputContainer, typename InputContainer, typename Func>
-ConversionResult<OutputContainer>
-convertContainer(const InputContainer& input, const Func& itemConversion) {
- OutputContainer output;
- auto ins = std::inserter(output, output.begin());
- for (const auto& item : input) {
- *ins = VALUE_OR_RETURN(itemConversion(item));
- }
- return output;
-}
-
-/**
- * A generic template that helps convert containers of convertible types
- * using an item conversion function with an additional parameter.
- */
-template<typename OutputContainer, typename InputContainer, typename Func, typename Parameter>
-ConversionResult<OutputContainer>
-convertContainer(const InputContainer& input, const Func& itemConversion, const Parameter& param) {
- OutputContainer output;
- auto ins = std::inserter(output, output.begin());
- for (const auto& item : input) {
- *ins = VALUE_OR_RETURN(itemConversion(item, param));
- }
- return output;
-}
-
-/**
- * A generic template that helps to "zip" two input containers of the same size
- * into a single vector of converted types. The conversion function must
- * thus accept two arguments.
- */
-template<typename OutputContainer, typename InputContainer1,
- typename InputContainer2, typename Func>
-ConversionResult<OutputContainer>
-convertContainers(const InputContainer1& input1, const InputContainer2& input2,
- const Func& itemConversion) {
- auto iter2 = input2.begin();
- OutputContainer output;
- auto ins = std::inserter(output, output.begin());
- for (const auto& item1 : input1) {
- RETURN_IF_ERROR(iter2 != input2.end() ? ::android::OK : ::android::BAD_VALUE);
- *ins = VALUE_OR_RETURN(itemConversion(item1, *iter2++));
- }
- return output;
-}
-
-/**
- * A generic template that helps to "unzip" a per-element conversion into
- * a pair of elements into a pair of containers. The conversion function
- * must emit a pair of elements.
- */
-template<typename OutputContainer1, typename OutputContainer2,
- typename InputContainer, typename Func>
-ConversionResult<std::pair<OutputContainer1, OutputContainer2>>
-convertContainerSplit(const InputContainer& input, const Func& itemConversion) {
- OutputContainer1 output1;
- OutputContainer2 output2;
- auto ins1 = std::inserter(output1, output1.begin());
- auto ins2 = std::inserter(output2, output2.begin());
- for (const auto& item : input) {
- auto out_pair = VALUE_OR_RETURN(itemConversion(item));
- *ins1 = out_pair.first;
- *ins2 = out_pair.second;
- }
- return std::make_pair(output1, output2);
-}
-
-////////////////////////////////////////////////////////////////////////////////////////////////////
-// The code below establishes:
-// IntegralTypeOf<T>, which works for either integral types (in which case it evaluates to T), or
-// enum types (in which case it evaluates to std::underlying_type_T<T>).
-
-template<typename T, typename = std::enable_if_t<std::is_integral_v<T> || std::is_enum_v<T>>>
-struct IntegralTypeOfStruct {
- using Type = T;
-};
-
-template<typename T>
-struct IntegralTypeOfStruct<T, std::enable_if_t<std::is_enum_v<T>>> {
- using Type = std::underlying_type_t<T>;
-};
-
-template<typename T>
-using IntegralTypeOf = typename IntegralTypeOfStruct<T>::Type;
-
-////////////////////////////////////////////////////////////////////////////////////////////////////
-// Utilities for handling bitmasks.
-
-template<typename Enum>
-Enum indexToEnum_index(int index) {
- static_assert(std::is_enum_v<Enum> || std::is_integral_v<Enum>);
- return static_cast<Enum>(index);
-}
-
-template<typename Enum>
-Enum indexToEnum_bitmask(int index) {
- static_assert(std::is_enum_v<Enum> || std::is_integral_v<Enum>);
- return static_cast<Enum>(1 << index);
-}
-
-template<typename Mask, typename Enum>
-Mask enumToMask_bitmask(Enum e) {
- static_assert(std::is_enum_v<Enum> || std::is_integral_v<Enum>);
- static_assert(std::is_enum_v<Mask> || std::is_integral_v<Mask>);
- return static_cast<Mask>(e);
-}
-
-template<typename Mask, typename Enum>
-Mask enumToMask_index(Enum e) {
- static_assert(std::is_enum_v<Enum> || std::is_integral_v<Enum>);
- static_assert(std::is_enum_v<Mask> || std::is_integral_v<Mask>);
- return static_cast<Mask>(static_cast<std::make_unsigned_t<IntegralTypeOf<Mask>>>(1)
- << static_cast<int>(e));
-}
-
-template<typename DestMask, typename SrcMask, typename DestEnum, typename SrcEnum>
-ConversionResult<DestMask> convertBitmask(
- SrcMask src, const std::function<ConversionResult<DestEnum>(SrcEnum)>& enumConversion,
- const std::function<SrcEnum(int)>& srcIndexToEnum,
- const std::function<DestMask(DestEnum)>& destEnumToMask) {
- using UnsignedDestMask = std::make_unsigned_t<IntegralTypeOf<DestMask>>;
- using UnsignedSrcMask = std::make_unsigned_t<IntegralTypeOf<SrcMask>>;
-
- UnsignedDestMask dest = static_cast<UnsignedDestMask>(0);
- UnsignedSrcMask usrc = static_cast<UnsignedSrcMask>(src);
-
- int srcBitIndex = 0;
- while (usrc != 0) {
- if (usrc & 1) {
- SrcEnum srcEnum = srcIndexToEnum(srcBitIndex);
- DestEnum destEnum = VALUE_OR_RETURN(enumConversion(srcEnum));
- DestMask destMask = destEnumToMask(destEnum);
- dest |= destMask;
- }
- ++srcBitIndex;
- usrc >>= 1;
- }
- return static_cast<DestMask>(dest);
-}
-
-template<typename Mask, typename Enum>
-bool bitmaskIsSet(Mask mask, Enum index) {
- return (mask & enumToMask_index<Mask, Enum>(index)) != 0;
-}
-
-////////////////////////////////////////////////////////////////////////////////////////////////////
-// Utilities for working with AIDL unions.
-// UNION_GET(obj, fieldname) returns a ConversionResult<T> containing either the strongly-typed
-// value of the respective field, or ::android::BAD_VALUE if the union is not set to the requested
-// field.
-// UNION_SET(obj, fieldname, value) sets the requested field to the given value.
-
-template<typename T, typename T::Tag tag>
-using UnionFieldType = std::decay_t<decltype(std::declval<T>().template get<tag>())>;
-
-template<typename T, typename T::Tag tag>
-ConversionResult<UnionFieldType<T, tag>> unionGetField(const T& u) {
- if (u.getTag() != tag) {
- return ::android::base::unexpected(::android::BAD_VALUE);
- }
- return u.template get<tag>();
-}
-
-#define UNION_GET(u, field) \
- unionGetField<std::decay_t<decltype(u)>, std::decay_t<decltype(u)>::Tag::field>(u)
-
-#define UNION_SET(u, field, value) \
- (u).set<std::decay_t<decltype(u)>::Tag::field>(value)
-
-#define UNION_MAKE(u, field, value) u::make<u::Tag::field>(value)
-
-namespace aidl_utils {
-
-/**
- * Return true if the value is valid for the AIDL enumeration.
- */
-template <typename T>
-bool isValidEnum(T value) {
-#if defined(BACKEND_NDK)
- constexpr ndk::enum_range<T> er{};
-#else
- constexpr ::android::enum_range<T> er{};
-#endif
- return std::find(er.begin(), er.end(), value) != er.end();
-}
-
-// T is a "container" of enum binder types with a toString().
-template <typename T>
-std::string enumsToString(const T& t) {
- std::string s;
- for (const auto item : t) {
- if (s.empty()) {
- s = toString(item);
- } else {
- s.append("|").append(toString(item));
- }
- }
- return s;
-}
-
-/**
- * Return the equivalent Android ::android::status_t from a binder exception code.
- *
- * Generally one should use statusTFromBinderStatus() instead.
- *
- * Exception codes can be generated from a remote Java service exception, translate
- * them for use on the Native side.
- *
- * Note: for EX_TRANSACTION_FAILED and EX_SERVICE_SPECIFIC a more detailed error code
- * can be found from transactionError() or serviceSpecificErrorCode().
- */
-static inline ::android::status_t statusTFromExceptionCode(int32_t exceptionCode) {
- using namespace ::android::binder;
- switch (exceptionCode) {
- case Status::EX_NONE:
- return ::android::OK;
- case Status::EX_SECURITY: // Java SecurityException, rethrows locally in Java
- return ::android::PERMISSION_DENIED;
- case Status::EX_BAD_PARCELABLE: // Java BadParcelableException, rethrows in Java
- case Status::EX_ILLEGAL_ARGUMENT: // Java IllegalArgumentException, rethrows in Java
- case Status::EX_NULL_POINTER: // Java NullPointerException, rethrows in Java
- return ::android::BAD_VALUE;
- case Status::EX_ILLEGAL_STATE: // Java IllegalStateException, rethrows in Java
- case Status::EX_UNSUPPORTED_OPERATION: // Java UnsupportedOperationException, rethrows
- return ::android::INVALID_OPERATION;
- case Status::EX_HAS_REPLY_HEADER: // Native strictmode violation
- case Status::EX_PARCELABLE: // Java bootclass loader (not standard exception), rethrows
- case Status::EX_NETWORK_MAIN_THREAD: // Java NetworkOnMainThreadException, rethrows
- case Status::EX_TRANSACTION_FAILED: // Native - see error code
- case Status::EX_SERVICE_SPECIFIC: // Java ServiceSpecificException,
- // rethrows in Java with integer error code
- return ::android::UNKNOWN_ERROR;
- }
- return ::android::UNKNOWN_ERROR;
-}
-
-/**
- * Return the equivalent Android ::android::status_t from a binder status.
- *
- * Used to handle errors from a AIDL method declaration
- *
- * [oneway] void method(type0 param0, ...)
- *
- * or the following (where return_type is not a status_t)
- *
- * return_type method(type0 param0, ...)
- */
-static inline ::android::status_t statusTFromBinderStatus(const ::android::binder::Status &status) {
- return status.isOk() ? ::android::OK // check ::android::OK,
- : status.serviceSpecificErrorCode() // service-side error, not standard Java exception
- // (fromServiceSpecificError)
- ?: status.transactionError() // a native binder transaction error (fromStatusT)
- ?: statusTFromExceptionCode(status.exceptionCode()); // a service-side error with a
- // standard Java exception (fromExceptionCode)
-}
-
-#if defined(BACKEND_NDK)
-static inline ::android::status_t statusTFromBinderStatus(const ::ndk::ScopedAStatus &status) {
- // What we want to do is to 'return statusTFromBinderStatus(status.get()->get())'
- // However, since the definition of AStatus is not exposed, we have to do the same
- // via methods of ScopedAStatus:
- return status.isOk() ? ::android::OK // check ::android::OK,
- : status.getServiceSpecificError() // service-side error, not standard Java exception
- // (fromServiceSpecificError)
- ?: status.getStatus() // a native binder transaction error (fromStatusT)
- ?: statusTFromExceptionCode(status.getExceptionCode()); // a service-side error with a
- // standard Java exception (fromExceptionCode)
-}
-#endif
-
-/**
- * Return a binder::Status from native service status.
- *
- * This is used for methods not returning an explicit status_t,
- * where Java callers expect an exception, not an integer return value.
- */
-static inline ::android::binder::Status binderStatusFromStatusT(
- ::android::status_t status, const char *optionalMessage = nullptr) {
- const char * const emptyIfNull = optionalMessage == nullptr ? "" : optionalMessage;
- // From binder::Status instructions:
- // Prefer a generic exception code when possible, then a service specific
- // code, and finally a ::android::status_t for low level failures or legacy support.
- // Exception codes and service specific errors map to nicer exceptions for
- // Java clients.
-
- using namespace ::android::binder;
- switch (status) {
- case ::android::OK:
- return Status::ok();
- case ::android::PERMISSION_DENIED: // throw SecurityException on Java side
- return Status::fromExceptionCode(Status::EX_SECURITY, emptyIfNull);
- case ::android::BAD_VALUE: // throw IllegalArgumentException on Java side
- return Status::fromExceptionCode(Status::EX_ILLEGAL_ARGUMENT, emptyIfNull);
- case ::android::INVALID_OPERATION: // throw IllegalStateException on Java side
- return Status::fromExceptionCode(Status::EX_ILLEGAL_STATE, emptyIfNull);
- }
-
- // A service specific error will not show on status.transactionError() so
- // be sure to use statusTFromBinderStatus() for reliable error handling.
-
- // throw a ServiceSpecificException.
- return Status::fromServiceSpecificError(status, emptyIfNull);
-}
-
-} // namespace aidl_utils
-
} // namespace android
-#if defined(BACKEND_NDK)
-} // namespace aidl
+// Include 'AidlConversionUtil.h' once if 'BACKEND_NDK' is defined,
+// or no 'BACKEND_*' is defined (C++ backend). Include twice if
+// 'BACKEND_CPP_NDK' is defined: once with 'BACKEND_NDK_IMPL', once w/o defines.
+
+#if defined(BACKEND_CPP_NDK) || defined(BACKEND_NDK)
+#define BACKEND_NDK_IMPL
+#include <media/AidlConversionUtil-impl.h>
+#undef BACKEND_NDK_IMPL
+#endif
+
+#if defined(BACKEND_CPP_NDK) || !defined(BACKEND_NDK)
+#include <media/AidlConversionUtil-impl.h>
#endif
diff --git a/media/audioaidlconversion/tests/Android.bp b/media/audioaidlconversion/tests/Android.bp
new file mode 100644
index 0000000..de7c8a2
--- /dev/null
+++ b/media/audioaidlconversion/tests/Android.bp
@@ -0,0 +1,46 @@
+package {
+ // See: http://go/android-license-faq
+ // A large-scale-change added 'default_applicable_licenses' to import
+ // all of the 'license_kinds' from "frameworks_av_license"
+ // to get the below license kinds:
+ // SPDX-license-identifier-Apache-2.0
+ default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_defaults {
+ name: "libaudio_aidl_conversion_tests_defaults",
+ test_suites: ["device-tests"],
+ cflags: [
+ "-Wall",
+ "-Werror",
+ ],
+ sanitize: {
+ misc_undefined: [
+ "unsigned-integer-overflow",
+ "signed-integer-overflow",
+ ],
+ },
+}
+
+cc_test {
+ name: "audio_aidl_ndk_conversion_tests",
+
+ defaults: [
+ "latest_android_media_audio_common_types_ndk_static",
+ "latest_android_hardware_audio_common_ndk_static",
+ "libaudio_aidl_conversion_tests_defaults",
+ ],
+ srcs: ["audio_aidl_ndk_conversion_tests.cpp"],
+ shared_libs: [
+ "libbinder",
+ "libcutils",
+ "liblog",
+ "libutils",
+ ],
+ static_libs: [
+ "libaudio_aidl_conversion_common_ndk",
+ ],
+ cflags: [
+ "-DBACKEND_NDK",
+ ],
+}
diff --git a/media/audioaidlconversion/tests/audio_aidl_ndk_conversion_tests.cpp b/media/audioaidlconversion/tests/audio_aidl_ndk_conversion_tests.cpp
new file mode 100644
index 0000000..c505e60
--- /dev/null
+++ b/media/audioaidlconversion/tests/audio_aidl_ndk_conversion_tests.cpp
@@ -0,0 +1,91 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <iostream>
+#include <type_traits>
+
+#include <gtest/gtest.h>
+
+#include <media/AidlConversionNdk.h>
+
+namespace {
+template<typename> struct mf_traits {};
+template<class T, class U> struct mf_traits<U T::*> {
+ using member_type = U;
+};
+} // namespace
+
+// Provide value printers for types generated from AIDL
+// They need to be in the same namespace as the types we intend to print
+namespace aidl::android::hardware::audio::common {
+ template <typename P>
+ std::enable_if_t<std::is_function_v<typename mf_traits<decltype(&P::toString)>::member_type>,
+ std::ostream&> operator<<(std::ostream& os, const P& p) {
+ return os << p.toString();
+ }
+ template <typename E>
+ std::enable_if_t<std::is_enum_v<E>, std::ostream&> operator<<(std::ostream& os, const E& e) {
+ return os << toString(e);
+ }
+} // namespace aidl::android::hardware::audio::common
+
+using aidl::android::hardware::audio::common::PlaybackTrackMetadata;
+using aidl::android::hardware::audio::common::RecordTrackMetadata;
+using aidl::android::media::audio::common::AudioSource;
+using aidl::android::media::audio::common::AudioUsage;
+using namespace aidl::android; // for conversion functions
+
+TEST(AudioPlaybackTrackMetadata, Aidl2Legacy2Aidl) {
+ const PlaybackTrackMetadata initial{ .usage = AudioUsage::UNKNOWN };
+ auto conv = aidl2legacy_PlaybackTrackMetadata_playback_track_metadata_v7(initial);
+ ASSERT_TRUE(conv.ok());
+ auto convBack = legacy2aidl_playback_track_metadata_v7_PlaybackTrackMetadata(conv.value());
+ ASSERT_TRUE(convBack.ok());
+ EXPECT_EQ(initial, convBack.value());
+}
+
+TEST(AudioPlaybackTrackMetadata, NonVendorTags) {
+ PlaybackTrackMetadata initial{ .usage = AudioUsage::UNKNOWN };
+ initial.tags.emplace_back("random string"); // Must be filtered out.
+ initial.tags.emplace_back("VX_GOOGLE_42");
+ auto conv = aidl2legacy_PlaybackTrackMetadata_playback_track_metadata_v7(initial);
+ ASSERT_TRUE(conv.ok());
+ auto convBack = legacy2aidl_playback_track_metadata_v7_PlaybackTrackMetadata(conv.value());
+ ASSERT_TRUE(convBack.ok());
+ ASSERT_EQ(1, convBack.value().tags.size());
+ EXPECT_EQ(initial.tags[1], convBack.value().tags[0]);
+}
+
+TEST(AudioRecordTrackMetadata, Aidl2Legacy2Aidl) {
+ const RecordTrackMetadata initial{ .source = AudioSource::DEFAULT };
+ auto conv = aidl2legacy_RecordTrackMetadata_record_track_metadata_v7(initial);
+ ASSERT_TRUE(conv.ok());
+ auto convBack = legacy2aidl_record_track_metadata_v7_RecordTrackMetadata(conv.value());
+ ASSERT_TRUE(convBack.ok());
+ EXPECT_EQ(initial, convBack.value());
+}
+
+TEST(AudioRecordTrackMetadata, NonVendorTags) {
+ RecordTrackMetadata initial{ .source = AudioSource::DEFAULT };
+ initial.tags.emplace_back("random string"); // Must be filtered out.
+ initial.tags.emplace_back("VX_GOOGLE_42");
+ auto conv = aidl2legacy_RecordTrackMetadata_record_track_metadata_v7(initial);
+ ASSERT_TRUE(conv.ok());
+ auto convBack = legacy2aidl_record_track_metadata_v7_RecordTrackMetadata(conv.value());
+ ASSERT_TRUE(convBack.ok());
+ ASSERT_EQ(1, convBack.value().tags.size());
+ EXPECT_EQ(initial.tags[1], convBack.value().tags[0]);
+}
diff --git a/media/codec2/TEST_MAPPING b/media/codec2/TEST_MAPPING
index 90bb054..8a894f3 100644
--- a/media/codec2/TEST_MAPPING
+++ b/media/codec2/TEST_MAPPING
@@ -8,17 +8,6 @@
],
"presubmit-large": [
{
- "name": "CtsMediaMiscTestCases",
- "options": [
- {
- "include-annotation": "android.platform.test.annotations.Presubmit"
- },
- {
- "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
- }
- ]
- },
- {
"name": "CtsMediaAudioTestCases",
"options": [
{
@@ -35,50 +24,6 @@
"exclude-filter": "android.media.audio.cts.AudioRecordTest"
}
]
- },
- {
- "name": "CtsMediaDecoderTestCases",
- "options": [
- {
- "include-annotation": "android.platform.test.annotations.Presubmit"
- },
- {
- "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
- }
- ]
- },
- {
- "name": "CtsMediaEncoderTestCases",
- "options": [
- {
- "include-annotation": "android.platform.test.annotations.Presubmit"
- },
- {
- "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
- }
- ]
- },
- {
- "name": "CtsMediaCodecTestCases",
- "options": [
- {
- "include-annotation": "android.platform.test.annotations.Presubmit"
- },
- {
- "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
- }
- ]
- },
- {
- "name": "CtsMediaPlayerTestCases",
- "options": [
- {
- "include-annotation": "android.platform.test.annotations.Presubmit"
- },
- {
- "exclude-annotation": "android.platform.test.annotations.RequiresDevice"
- }
- ]
}
]
}
diff --git a/media/codec2/components/aom/C2SoftAomEnc.cpp b/media/codec2/components/aom/C2SoftAomEnc.cpp
index d147fb4..59cad9d 100644
--- a/media/codec2/components/aom/C2SoftAomEnc.cpp
+++ b/media/codec2/components/aom/C2SoftAomEnc.cpp
@@ -88,6 +88,12 @@
.withSetter(BitrateSetter)
.build());
+ addParameter(DefineParam(mComplexity, C2_PARAMKEY_COMPLEXITY)
+ .withDefault(new C2StreamComplexityTuning::output(0u, 0))
+ .withFields({C2F(mComplexity, value).inRange(0, 5)})
+ .withSetter(Setter<decltype(*mComplexity)>::NonStrictValueWithNoDeps)
+ .build());
+
addParameter(DefineParam(mQuality, C2_PARAMKEY_QUALITY)
.withDefault(new C2StreamQualityTuning::output(0u, 80))
.withFields({C2F(mQuality, value).inRange(0, 100)})
@@ -306,10 +312,20 @@
return 15 + 35 * (100 - c2Quality) / 100;
}
+static int MapC2ComplexityToAOMSpeed (int c2Complexity) {
+ int mapping[6] = {10, 9, 8, 7, 6, 6};
+ if (c2Complexity > 5 || c2Complexity < 0) {
+ ALOGW("Wrong complexity setting. Falling back to speed 10");
+ return 10;
+ }
+ return mapping[c2Complexity];
+}
+
aom_codec_err_t C2SoftAomEnc::setupCodecParameters() {
aom_codec_err_t codec_return = AOM_CODEC_OK;
- codec_return = aom_codec_control(mCodecContext, AOME_SET_CPUUSED, DEFAULT_SPEED);
+ codec_return = aom_codec_control(mCodecContext, AOME_SET_CPUUSED,
+ MapC2ComplexityToAOMSpeed(mComplexity->value));
if (codec_return != AOM_CODEC_OK) goto BailOut;
codec_return = aom_codec_control(mCodecContext, AV1E_SET_ROW_MT, 1);
@@ -461,6 +477,7 @@
mRequestSync = mIntf->getRequestSync_l();
mColorAspects = mIntf->getCodedColorAspects_l();
mQuality = mIntf->getQuality_l();
+ mComplexity = mIntf->getComplexity_l();
}
@@ -481,9 +498,9 @@
mCodecInterface = aom_codec_av1_cx();
if (!mCodecInterface) goto CleanUp;
- ALOGD("AOM: initEncoder. BRMode: %u. KF: %u. QP: %u - %u, 10Bit: %d",
+ ALOGD("AOM: initEncoder. BRMode: %u. KF: %u. QP: %u - %u, 10Bit: %d, comlexity %d",
(uint32_t)mBitrateControlMode,
- mIntf->getSyncFramePeriod(), mMinQuantizer, mMaxQuantizer, mIs10Bit);
+ mIntf->getSyncFramePeriod(), mMinQuantizer, mMaxQuantizer, mIs10Bit, mComplexity->value);
mCodecConfiguration = new aom_codec_enc_cfg_t;
if (!mCodecConfiguration) goto CleanUp;
@@ -799,6 +816,28 @@
}
break;
}
+ case C2PlanarLayout::TYPE_YUVA: {
+ if (mConversionBuffer.size() >= stride * vstride * 3) {
+ uint16_t *dstY, *dstU, *dstV;
+ dstY = (uint16_t*)mConversionBuffer.data();
+ dstU = dstY + stride * vstride;
+ dstV = dstU + (stride * vstride) / 4;
+ convertRGBA1010102ToYUV420Planar16(dstY, dstU, dstV, (uint32_t*)(rView->data()[0]),
+ layout.planes[layout.PLANE_Y].rowInc / 4, stride,
+ vstride, mColorAspects->matrix,
+ mColorAspects->range);
+ aom_img_wrap(&raw_frame, AOM_IMG_FMT_I42016, stride, vstride, mStrideAlign,
+ mConversionBuffer.data());
+ aom_img_set_rect(&raw_frame, 0, 0, width, height, 0);
+ } else {
+ ALOGE("Conversion buffer is too small: %u x %u for %zu", stride, vstride,
+ mConversionBuffer.size());
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+ break;
+ }
+
default:
ALOGE("Unrecognized plane type: %d", layout.type);
work->result = C2_BAD_VALUE;
diff --git a/media/codec2/components/aom/C2SoftAomEnc.h b/media/codec2/components/aom/C2SoftAomEnc.h
index d7832dd..3067735 100644
--- a/media/codec2/components/aom/C2SoftAomEnc.h
+++ b/media/codec2/components/aom/C2SoftAomEnc.h
@@ -103,6 +103,7 @@
std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
std::shared_ptr<C2StreamQualityTuning::output> mQuality;
+ std::shared_ptr<C2StreamComplexityTuning::output> mComplexity;
std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
@@ -129,6 +130,9 @@
std::shared_ptr<C2StreamFrameRateInfo::output> getFrameRate_l() const { return mFrameRate; }
std::shared_ptr<C2StreamBitrateInfo::output> getBitrate_l() const { return mBitrate; }
std::shared_ptr<C2StreamQualityTuning::output> getQuality_l() const { return mQuality; }
+ std::shared_ptr<C2StreamComplexityTuning::output> getComplexity_l() const {
+ return mComplexity;
+ }
std::shared_ptr<C2StreamBitrateModeTuning::output> getBitrateMode_l() const {
return mBitrateMode;
}
@@ -155,6 +159,7 @@
std::shared_ptr<C2StreamSyncFrameIntervalTuning::output> mSyncFramePeriod;
std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
std::shared_ptr<C2StreamQualityTuning::output> mQuality;
+ std::shared_ptr<C2StreamComplexityTuning::output> mComplexity;
std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
std::shared_ptr<C2StreamColorAspectsInfo::input> mColorAspects;
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.cpp b/media/codec2/components/avc/C2SoftAvcEnc.cpp
index 5d2856a..9c054f0 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.cpp
+++ b/media/codec2/components/avc/C2SoftAvcEnc.cpp
@@ -356,7 +356,7 @@
needsUpdate = true;
}
}
- if (!found) {
+ if (!found || me.v.level > LEVEL_AVC_5) {
// We set to the highest supported level.
me.set().level = LEVEL_AVC_5;
}
diff --git a/media/codec2/components/base/SimpleC2Component.cpp b/media/codec2/components/base/SimpleC2Component.cpp
index 32f8fa8..55a1164 100644
--- a/media/codec2/components/base/SimpleC2Component.cpp
+++ b/media/codec2/components/base/SimpleC2Component.cpp
@@ -452,6 +452,60 @@
}
}
+static const int16_t bt709Matrix_10bit[2][3][3] = {
+ { { 218, 732, 74 }, { -117, -395, 512 }, { 512, -465, -47 } }, /* RANGE_FULL */
+ { { 186, 627, 63 }, { -103, -345, 448 }, { 448, -407, -41 } }, /* RANGE_LIMITED */
+};
+
+static const int16_t bt2020Matrix_10bit[2][3][3] = {
+ { { 269, 694, 61 }, { -143, -369, 512 }, { 512, -471, -41 } }, /* RANGE_FULL */
+ { { 230, 594, 52 }, { -125, -323, 448 }, { 448, -412, -36 } }, /* RANGE_LIMITED */
+};
+
+void convertRGBA1010102ToYUV420Planar16(uint16_t* dstY, uint16_t* dstU, uint16_t* dstV,
+ const uint32_t* srcRGBA, size_t srcRGBStride, size_t width,
+ size_t height, C2Color::matrix_t colorMatrix,
+ C2Color::range_t colorRange) {
+ uint16_t r, g, b;
+ int32_t i32Y, i32U, i32V;
+ uint16_t zeroLvl = colorRange == C2Color::RANGE_FULL ? 0 : 64;
+ uint16_t maxLvlLuma = colorRange == C2Color::RANGE_FULL ? 1023 : 940;
+ uint16_t maxLvlChroma = colorRange == C2Color::RANGE_FULL ? 1023 : 960;
+ // set default range as limited
+ if (colorRange != C2Color::RANGE_FULL) {
+ colorRange = C2Color::RANGE_LIMITED;
+ }
+ const int16_t(*weights)[3] = (colorMatrix == C2Color::MATRIX_BT709)
+ ? bt709Matrix_10bit[colorRange - 1]
+ : bt2020Matrix_10bit[colorRange - 1];
+
+ for (size_t y = 0; y < height; ++y) {
+ for (size_t x = 0; x < width; ++x) {
+ b = (srcRGBA[x] >> 20) & 0x3FF;
+ g = (srcRGBA[x] >> 10) & 0x3FF;
+ r = srcRGBA[x] & 0x3FF;
+
+ i32Y = ((r * weights[0][0] + g * weights[0][1] + b * weights[0][2] + 512) >> 10) +
+ zeroLvl;
+ dstY[x] = CLIP3(zeroLvl, i32Y, maxLvlLuma);
+ if (y % 2 == 0 && x % 2 == 0) {
+ i32U = ((r * weights[1][0] + g * weights[1][1] + b * weights[1][2] + 512) >> 10) +
+ 512;
+ i32V = ((r * weights[2][0] + g * weights[2][1] + b * weights[2][2] + 512) >> 10) +
+ 512;
+ dstU[x >> 1] = CLIP3(zeroLvl, i32U, maxLvlChroma);
+ dstV[x >> 1] = CLIP3(zeroLvl, i32V, maxLvlChroma);
+ }
+ }
+ srcRGBA += srcRGBStride;
+ dstY += width;
+ if (y % 2 == 0) {
+ dstU += width / 2;
+ dstV += width / 2;
+ }
+ }
+}
+
std::unique_ptr<C2Work> SimpleC2Component::WorkQueue::pop_front() {
std::unique_ptr<C2Work> work = std::move(mQueue.front().work);
mQueue.pop_front();
diff --git a/media/codec2/components/base/include/SimpleC2Component.h b/media/codec2/components/base/include/SimpleC2Component.h
index 051f798..bc27474 100644
--- a/media/codec2/components/base/include/SimpleC2Component.h
+++ b/media/codec2/components/base/include/SimpleC2Component.h
@@ -21,6 +21,7 @@
#include <unordered_map>
#include <C2Component.h>
+#include <C2Config.h>
#include <media/stagefright/foundation/AHandler.h>
#include <media/stagefright/foundation/ALooper.h>
@@ -61,6 +62,11 @@
size_t dstUStride, size_t dstVStride, size_t width,
size_t height, bool isMonochrome = false);
+void convertRGBA1010102ToYUV420Planar16(uint16_t* dstY, uint16_t* dstU, uint16_t* dstV,
+ const uint32_t* srcRGBA, size_t srcRGBStride, size_t width,
+ size_t height, C2Color::matrix_t colorMatrix,
+ C2Color::range_t colorRange);
+
class SimpleC2Component
: public C2Component, public std::enable_shared_from_this<SimpleC2Component> {
public:
diff --git a/media/codec2/components/hevc/C2SoftHevcEnc.cpp b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
index 9c26c02..56e6e8a 100644
--- a/media/codec2/components/hevc/C2SoftHevcEnc.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcEnc.cpp
@@ -362,7 +362,7 @@
needsUpdate = true;
}
}
- if (!found) {
+ if (!found || me.v.level > LEVEL_HEVC_MAIN_5_2) {
// We set to the highest supported level.
me.set().level = LEVEL_HEVC_MAIN_5_2;
}
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
index d5e8c56..95610fa 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
@@ -49,6 +49,8 @@
const char *MEDIA_MIMETYPE_VIDEO = MEDIA_MIMETYPE_VIDEO_H263;
#endif
+constexpr float VBV_DELAY = 5.0f;
+
} // namepsace
class C2SoftMpeg4Enc::IntfImpl : public SimpleInterface<void>::BaseParams {
@@ -131,7 +133,7 @@
C2Config::LEVEL_MP4V_1,
C2Config::LEVEL_MP4V_2})
})
- .withSetter(ProfileLevelSetter)
+ .withSetter(ProfileLevelSetter, mSize, mFrameRate, mBitrate)
.build());
#else
addParameter(
@@ -148,7 +150,7 @@
C2Config::LEVEL_H263_40,
C2Config::LEVEL_H263_45})
})
- .withSetter(ProfileLevelSetter)
+ .withSetter(ProfileLevelSetter, mSize, mFrameRate, mBitrate)
.build());
#endif
}
@@ -179,7 +181,10 @@
static C2R ProfileLevelSetter(
bool mayBlock,
- C2P<C2StreamProfileLevelInfo::output> &me) {
+ C2P<C2StreamProfileLevelInfo::output> &me,
+ const C2P<C2StreamPictureSizeInfo::input> &size,
+ const C2P<C2StreamFrameRateInfo::output> &frameRate,
+ const C2P<C2StreamBitrateInfo::output> &bitrate) {
(void)mayBlock;
if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
#ifdef MPEG4
@@ -188,11 +193,84 @@
me.set().profile = PROFILE_H263_BASELINE;
#endif
}
- if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
+
+ struct LevelLimits {
+ C2Config::level_t level;
+ uint32_t sampleRate;
+ uint32_t width;
+ uint32_t height;
+ uint32_t frameRate;
+ uint32_t bitrate;
+ uint32_t vbvSize;
+ };
+
+ constexpr LevelLimits kLimits[] = {
+#ifdef MPEG4
+ { LEVEL_MP4V_0, 380160, 176, 144, 15, 64000, 163840 },
+ // { LEVEL_MP4V_0B, 380160, 176, 144, 15, 128000, 163840 },
+ { LEVEL_MP4V_1, 380160, 176, 144, 30, 64000, 163840 },
+ { LEVEL_MP4V_2, 1520640, 352, 288, 30, 128000, 655360 },
+#else
+ // HRD Buffer Size = (B + BPPmaxKb * 1024 bits)
+ // where, (BPPmaxKb * 1024) is maximum number of bits per picture
+ // that has been negotiated for use in the bitstream Sec 3.6 of T-Rec-H.263
+ // and B = 4 * Rmax / PCF. Rmax is max bit rate and PCF is picture
+ // clock frequency
+ { LEVEL_H263_10, 380160, 176, 144, 15, 64000, 74077 },
+ { LEVEL_H263_45, 380160, 176, 144, 15, 128000, 82619 },
+ { LEVEL_H263_20, 1520640, 352, 288, 30, 128000, 279227 },
+ { LEVEL_H263_30, 3041280, 352, 288, 30, 384000, 313395 },
+ { LEVEL_H263_40, 3041280, 352, 288, 30, 2048000, 535483 },
+ // { LEVEL_H263_50, 5068800, 352, 288, 60, 4096000, 808823 },
+#endif
+ };
+
+ auto mbs = ((size.v.width + 15) / 16) * ((size.v.height + 15) / 16);
+ auto sampleRate = mbs * frameRate.v.value * 16 * 16;
+ auto vbvSize = bitrate.v.value * VBV_DELAY;
+
+ // Check if the supplied level meets the MB / bitrate requirements. If
+ // not, update the level with the lowest level meeting the requirements.
+ bool found = false;
+
+ // By default needsUpdate = false in case the supplied level does meet
+ // the requirements.
+ bool needsUpdate = false;
+#ifdef MPEG4
+ // For Level 0b, we want to update the level anyway, as library does not
+ // seem to accept this value.
+ if (me.v.level == LEVEL_MP4V_0B) {
+ needsUpdate = true;
+ }
+#endif
+ for (const LevelLimits &limit : kLimits) {
+ if (sampleRate <= limit.sampleRate && size.v.width <= limit.width &&
+ vbvSize <= limit.vbvSize && size.v.height <= limit.height &&
+ bitrate.v.value <= limit.bitrate && frameRate.v.value <= limit.frameRate) {
+ // This is the lowest level that meets the requirements, and if
+ // we haven't seen the supplied level yet, that means we don't
+ // need the update.
+ if (needsUpdate) {
+ ALOGD("Given level %x does not cover current configuration: "
+ "adjusting to %x", me.v.level, limit.level);
+ me.set().level = limit.level;
+ }
+ found = true;
+ break;
+ }
+ if (me.v.level == limit.level) {
+ // We break out of the loop when the lowest feasible level is
+ // found. The fact that we're here means that our level doesn't
+ // meet the requirement and needs to be updated.
+ needsUpdate = true;
+ }
+ }
+ // If not found, set to the highest supported level.
+ if (!found) {
#ifdef MPEG4
me.set().level = LEVEL_MP4V_2;
#else
- me.set().level = LEVEL_H263_45;
+ me.set().level = LEVEL_H263_40;
#endif
}
return C2R::Ok();
@@ -210,6 +288,18 @@
return (uint32_t)c2_max(c2_min(period + 0.5, double(UINT32_MAX)), 1.);
}
+ ProfileLevelType getProfileLevel_l() const {
+#ifdef MPEG4
+ if (mProfileLevel->level == LEVEL_MP4V_0) return SIMPLE_PROFILE_LEVEL0;
+ else if (mProfileLevel->level == LEVEL_MP4V_1) return SIMPLE_PROFILE_LEVEL1;
+ return SIMPLE_PROFILE_LEVEL2; // level == LEVEL_MP4V_2
+#else
+ // library does not export h263 specific levels. No way to map C2 enums to
+ // library specific constants. Return max supported level.
+ return CORE_PROFILE_LEVEL2;
+#endif
+ }
+
private:
std::shared_ptr<C2StreamUsageTuning::input> mUsage;
std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
@@ -325,8 +415,8 @@
mEncParams->encHeight[0] = mSize->height;
mEncParams->encFrameRate[0] = mFrameRate->value + 0.5;
mEncParams->rcType = VBR_1;
- mEncParams->vbvDelay = 5.0f;
- mEncParams->profile_level = CORE_PROFILE_LEVEL2;
+ mEncParams->vbvDelay = VBV_DELAY;
+ mEncParams->profile_level = mProfileLevel;
mEncParams->packetSize = 32;
mEncParams->rvlcEnable = PV_OFF;
mEncParams->numLayers = 1;
@@ -367,6 +457,7 @@
mSize = mIntf->getSize_l();
mBitrate = mIntf->getBitrate_l();
mFrameRate = mIntf->getFrameRate_l();
+ mProfileLevel = mIntf->getProfileLevel_l();
}
c2_status_t err = initEncParams();
if (C2_OK != err) {
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.h b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.h
index 43461fc..e5c8ea6 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.h
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.h
@@ -65,6 +65,7 @@
std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
+ ProfileLevelType mProfileLevel;
int64_t mNumInputFrames;
MP4EncodingMode mEncodeMode;
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index 6ff3dbc..417b261 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -2503,7 +2503,8 @@
* Note: This parameter allows a decoder to ignore the video peek machinery and
* to revert to its preferred behavior.
*/
-typedef C2StreamParam<C2Tuning, C2EasyEnum<C2PlatformConfig::tunnel_peek_mode_t>,
+typedef C2StreamParam<C2Tuning,
+ C2SimpleValueStruct<C2EasyEnum<C2PlatformConfig::tunnel_peek_mode_t>>,
kParamIndexTunnelPeekMode> C2StreamTunnelPeekModeTuning;
constexpr char C2_PARAMKEY_TUNNEL_PEEK_MODE[] =
"output.tunnel-peek-mode";
diff --git a/media/codec2/hal/client/client.cpp b/media/codec2/hal/client/client.cpp
index 7f75a91..9359e29 100644
--- a/media/codec2/hal/client/client.cpp
+++ b/media/codec2/hal/client/client.cpp
@@ -24,7 +24,6 @@
#include <C2Config.h> // for C2StreamUsageTuning
#include <C2PlatformSupport.h>
-#include <android/binder_auto_utils.h>
#include <android/hardware/media/bufferpool/2.0/IClientManager.h>
#include <android/hardware/media/c2/1.0/IComponent.h>
#include <android/hardware/media/c2/1.0/IComponentInterface.h>
@@ -48,6 +47,7 @@
#include <system/window.h> // for NATIVE_WINDOW_QUERY_*
#include <media/stagefright/foundation/ADebug.h> // for asString(status_t)
+
#include <deque>
#include <iterator>
#include <limits>
@@ -65,6 +65,11 @@
using ::android::hardware::Return;
using ::android::hardware::Void;
+using namespace ::android::hardware::media::c2::V1_1;
+using namespace ::android::hardware::media::c2::V1_1::utils;
+using namespace ::android::hardware::media::bufferpool::V2_0;
+using namespace ::android::hardware::media::bufferpool::V2_0::implementation;
+
using HGraphicBufferProducer1 = ::android::hardware::graphics::bufferqueue::
V1_0::IGraphicBufferProducer;
using HGraphicBufferProducer2 = ::android::hardware::graphics::bufferqueue::
@@ -75,12 +80,6 @@
V2_0::utils::H2BGraphicBufferProducer;
using ::android::hardware::media::c2::V1_2::SurfaceSyncObj;
-namespace bufferpool_hidl = ::android::hardware::media::bufferpool::V2_0;
-namespace c2_hidl_base = ::android::hardware::media::c2;
-namespace c2_hidl = ::android::hardware::media::c2::V1_2;
-
-using c2_hidl::utils::operator<<;
-
namespace /* unnamed */ {
// c2_status_t value that corresponds to hwbinder transaction failure.
@@ -255,43 +254,15 @@
return sCaches;
}
};
-// Codec2ConfigurableClient::HidlImpl
-struct Codec2ConfigurableClient::HidlImpl : public Codec2ConfigurableClient::ImplBase {
- typedef c2_hidl::IConfigurable Base;
+// Codec2ConfigurableClient
- // base cannot be null.
- explicit HidlImpl(const sp<Base>& base);
+const C2String& Codec2ConfigurableClient::getName() const {
+ return mName;
+}
- const C2String& getName() const override {
- return mName;
- }
-
- c2_status_t query(
- const std::vector<C2Param*>& stackParams,
- const std::vector<C2Param::Index> &heapParamIndices,
- c2_blocking_t mayBlock,
- std::vector<std::unique_ptr<C2Param>>* const heapParams) const override;
-
- c2_status_t config(
- const std::vector<C2Param*> ¶ms,
- c2_blocking_t mayBlock,
- std::vector<std::unique_ptr<C2SettingResult>>* const failures) override;
-
- c2_status_t querySupportedParams(
- std::vector<std::shared_ptr<C2ParamDescriptor>>* const params
- ) const override;
-
- c2_status_t querySupportedValues(
- std::vector<C2FieldSupportedValuesQuery>& fields,
- c2_blocking_t mayBlock) const override;
-
-private:
- sp<Base> mBase;
- const C2String mName;
-};
-
-Codec2ConfigurableClient::HidlImpl::HidlImpl(const sp<Base>& base)
+Codec2ConfigurableClient::Codec2ConfigurableClient(
+ const sp<IConfigurable>& base)
: mBase{base},
mName{[base]() -> C2String {
C2String outName;
@@ -303,12 +274,12 @@
}()} {
}
-c2_status_t Codec2ConfigurableClient::HidlImpl::query(
+c2_status_t Codec2ConfigurableClient::query(
const std::vector<C2Param*> &stackParams,
const std::vector<C2Param::Index> &heapParamIndices,
c2_blocking_t mayBlock,
std::vector<std::unique_ptr<C2Param>>* const heapParams) const {
- hidl_vec<c2_hidl::ParamIndex> indices(
+ hidl_vec<ParamIndex> indices(
stackParams.size() + heapParamIndices.size());
size_t numIndices = 0;
for (C2Param* const& stackParam : stackParams) {
@@ -316,12 +287,12 @@
LOG(WARNING) << "query -- null stack param encountered.";
continue;
}
- indices[numIndices++] = static_cast<c2_hidl::ParamIndex>(stackParam->index());
+ indices[numIndices++] = static_cast<ParamIndex>(stackParam->index());
}
size_t numStackIndices = numIndices;
for (const C2Param::Index& index : heapParamIndices) {
indices[numIndices++] =
- static_cast<c2_hidl::ParamIndex>(static_cast<uint32_t>(index));
+ static_cast<ParamIndex>(static_cast<uint32_t>(index));
}
indices.resize(numIndices);
if (heapParams) {
@@ -332,7 +303,7 @@
indices,
mayBlock == C2_MAY_BLOCK,
[&status, &numStackIndices, &stackParams, heapParams](
- c2_hidl::Status s, const c2_hidl::Params& p) {
+ Status s, const Params& p) {
status = static_cast<c2_status_t>(s);
if (status != C2_OK && status != C2_BAD_INDEX) {
LOG(DEBUG) << "query -- call failed: "
@@ -340,7 +311,7 @@
return;
}
std::vector<C2Param*> paramPointers;
- if (!c2_hidl::utils::parseParamsBlob(¶mPointers, p)) {
+ if (!parseParamsBlob(¶mPointers, p)) {
LOG(ERROR) << "query -- error while parsing params.";
status = C2_CORRUPTED;
return;
@@ -400,12 +371,12 @@
return status;
}
-c2_status_t Codec2ConfigurableClient::HidlImpl::config(
+c2_status_t Codec2ConfigurableClient::config(
const std::vector<C2Param*> ¶ms,
c2_blocking_t mayBlock,
std::vector<std::unique_ptr<C2SettingResult>>* const failures) {
- c2_hidl::Params hidlParams;
- if (!c2_hidl::utils::createParamsBlob(&hidlParams, params)) {
+ Params hidlParams;
+ if (!createParamsBlob(&hidlParams, params)) {
LOG(ERROR) << "config -- bad input.";
return C2_TRANSACTION_FAILED;
}
@@ -414,9 +385,9 @@
hidlParams,
mayBlock == C2_MAY_BLOCK,
[&status, ¶ms, failures](
- c2_hidl::Status s,
- const hidl_vec<c2_hidl::SettingResult> f,
- const c2_hidl::Params& o) {
+ Status s,
+ const hidl_vec<SettingResult> f,
+ const Params& o) {
status = static_cast<c2_status_t>(s);
if (status != C2_OK && status != C2_BAD_INDEX) {
LOG(DEBUG) << "config -- call failed: "
@@ -424,14 +395,14 @@
}
size_t i = failures->size();
failures->resize(i + f.size());
- for (const c2_hidl::SettingResult& sf : f) {
- if (!c2_hidl::utils::objcpy(&(*failures)[i++], sf)) {
+ for (const SettingResult& sf : f) {
+ if (!objcpy(&(*failures)[i++], sf)) {
LOG(ERROR) << "config -- "
<< "invalid SettingResult returned.";
return;
}
}
- if (!c2_hidl::utils::updateParamsFromBlob(params, o)) {
+ if (!updateParamsFromBlob(params, o)) {
LOG(ERROR) << "config -- "
<< "failed to parse returned params.";
status = C2_CORRUPTED;
@@ -444,7 +415,7 @@
return status;
}
-c2_status_t Codec2ConfigurableClient::HidlImpl::querySupportedParams(
+c2_status_t Codec2ConfigurableClient::querySupportedParams(
std::vector<std::shared_ptr<C2ParamDescriptor>>* const params) const {
// TODO: Cache and query properly!
c2_status_t status;
@@ -452,8 +423,8 @@
std::numeric_limits<uint32_t>::min(),
std::numeric_limits<uint32_t>::max(),
[&status, params](
- c2_hidl::Status s,
- const hidl_vec<c2_hidl::ParamDescriptor>& p) {
+ Status s,
+ const hidl_vec<ParamDescriptor>& p) {
status = static_cast<c2_status_t>(s);
if (status != C2_OK) {
LOG(DEBUG) << "querySupportedParams -- call failed: "
@@ -462,8 +433,8 @@
}
size_t i = params->size();
params->resize(i + p.size());
- for (const c2_hidl::ParamDescriptor& sp : p) {
- if (!c2_hidl::utils::objcpy(&(*params)[i++], sp)) {
+ for (const ParamDescriptor& sp : p) {
+ if (!objcpy(&(*params)[i++], sp)) {
LOG(ERROR) << "querySupportedParams -- "
<< "invalid returned ParamDescriptor.";
return;
@@ -477,12 +448,12 @@
return status;
}
-c2_status_t Codec2ConfigurableClient::HidlImpl::querySupportedValues(
+c2_status_t Codec2ConfigurableClient::querySupportedValues(
std::vector<C2FieldSupportedValuesQuery>& fields,
c2_blocking_t mayBlock) const {
- hidl_vec<c2_hidl::FieldSupportedValuesQuery> inFields(fields.size());
+ hidl_vec<FieldSupportedValuesQuery> inFields(fields.size());
for (size_t i = 0; i < fields.size(); ++i) {
- if (!c2_hidl::utils::objcpy(&inFields[i], fields[i])) {
+ if (!objcpy(&inFields[i], fields[i])) {
LOG(ERROR) << "querySupportedValues -- bad input";
return C2_TRANSACTION_FAILED;
}
@@ -493,8 +464,8 @@
inFields,
mayBlock == C2_MAY_BLOCK,
[&status, &inFields, &fields](
- c2_hidl::Status s,
- const hidl_vec<c2_hidl::FieldSupportedValuesQueryResult>& r) {
+ Status s,
+ const hidl_vec<FieldSupportedValuesQueryResult>& r) {
status = static_cast<c2_status_t>(s);
if (status != C2_OK) {
LOG(DEBUG) << "querySupportedValues -- call failed: "
@@ -509,7 +480,7 @@
return;
}
for (size_t i = 0; i < fields.size(); ++i) {
- if (!c2_hidl::utils::objcpy(&fields[i], inFields[i], r[i])) {
+ if (!objcpy(&fields[i], inFields[i], r[i])) {
LOG(ERROR) << "querySupportedValues -- "
"invalid returned value.";
status = C2_CORRUPTED;
@@ -524,131 +495,14 @@
return status;
}
-// Codec2ConfigurableClient::AidlImpl
-
-struct Codec2ConfigurableClient::AidlImpl : public Codec2ConfigurableClient::ImplBase {
- // TODO: C2AIDL was not landed yet, use c2_aidl when it is landed.
- typedef c2_hidl::IConfigurable Base;
-
- // base cannot be null.
- explicit AidlImpl(const std::shared_ptr<Base>& base);
-
- const C2String& getName() const override {
- return mName;
- }
-
- c2_status_t query(
- const std::vector<C2Param*>& stackParams,
- const std::vector<C2Param::Index> &heapParamIndices,
- c2_blocking_t mayBlock,
- std::vector<std::unique_ptr<C2Param>>* const heapParams) const override;
-
- c2_status_t config(
- const std::vector<C2Param*> ¶ms,
- c2_blocking_t mayBlock,
- std::vector<std::unique_ptr<C2SettingResult>>* const failures) override;
-
- c2_status_t querySupportedParams(
- std::vector<std::shared_ptr<C2ParamDescriptor>>* const params
- ) const override;
-
- c2_status_t querySupportedValues(
- std::vector<C2FieldSupportedValuesQuery>& fields,
- c2_blocking_t mayBlock) const override;
-
-private:
- std::shared_ptr<Base> mBase;
- const C2String mName;
-};
-
-Codec2ConfigurableClient::AidlImpl::AidlImpl(const std::shared_ptr<Base>& base)
- : mBase{base},
- mName{[base]() -> C2String {
- // TODO: implementation
- (void)base;
- return "";
- }()} {
-}
-
-c2_status_t Codec2ConfigurableClient::AidlImpl::query(
- const std::vector<C2Param*> &stackParams,
- const std::vector<C2Param::Index> &heapParamIndices,
- c2_blocking_t mayBlock,
- std::vector<std::unique_ptr<C2Param>>* const heapParams) const {
- (void)stackParams, (void)heapParamIndices, (void)mayBlock, (void)heapParams;
- // TODO: implementation
- return C2_OMITTED;
-}
-
-c2_status_t Codec2ConfigurableClient::AidlImpl::config(
- const std::vector<C2Param*> ¶ms,
- c2_blocking_t mayBlock,
- std::vector<std::unique_ptr<C2SettingResult>>* const failures) {
- (void)params, (void)mayBlock, (void)failures;
- // TODO: implementation
- return C2_OMITTED;
-}
-
-c2_status_t Codec2ConfigurableClient::AidlImpl::querySupportedParams(
- std::vector<std::shared_ptr<C2ParamDescriptor>>* const params) const {
- (void)params;
- // TODO: implementation
- return C2_OMITTED;
-}
-
-c2_status_t Codec2ConfigurableClient::AidlImpl::querySupportedValues(
- std::vector<C2FieldSupportedValuesQuery>& fields,
- c2_blocking_t mayBlock) const {
- (void)fields, (void)mayBlock;
- // TODO: implementation
- return C2_OMITTED;
-}
-
-// Codec2ConfigurableClient
-
-Codec2ConfigurableClient::Codec2ConfigurableClient(const sp<HidlBase> &hidlBase)
- : mImpl(new Codec2ConfigurableClient::HidlImpl(hidlBase)) {
-}
-
-const C2String& Codec2ConfigurableClient::getName() const {
- return mImpl->getName();
-}
-
-c2_status_t Codec2ConfigurableClient::query(
- const std::vector<C2Param*>& stackParams,
- const std::vector<C2Param::Index> &heapParamIndices,
- c2_blocking_t mayBlock,
- std::vector<std::unique_ptr<C2Param>>* const heapParams) const {
- return mImpl->query(stackParams, heapParamIndices, mayBlock, heapParams);
-}
-
-c2_status_t Codec2ConfigurableClient::config(
- const std::vector<C2Param*> ¶ms,
- c2_blocking_t mayBlock,
- std::vector<std::unique_ptr<C2SettingResult>>* const failures) {
- return mImpl->config(params, mayBlock, failures);
-}
-
-c2_status_t Codec2ConfigurableClient::querySupportedParams(
- std::vector<std::shared_ptr<C2ParamDescriptor>>* const params) const {
- return mImpl->querySupportedParams(params);
-}
-
-c2_status_t Codec2ConfigurableClient::querySupportedValues(
- std::vector<C2FieldSupportedValuesQuery>& fields,
- c2_blocking_t mayBlock) const {
- return mImpl->querySupportedValues(fields, mayBlock);
-}
-
-
// Codec2Client::Component::HidlListener
-struct Codec2Client::Component::HidlListener : public c2_hidl::IComponentListener {
+struct Codec2Client::Component::HidlListener : public IComponentListener {
std::weak_ptr<Component> component;
std::weak_ptr<Listener> base;
- virtual Return<void> onWorkDone(const c2_hidl::WorkBundle& workBundle) override {
+ virtual Return<void> onWorkDone(const WorkBundle& workBundle) override {
std::list<std::unique_ptr<C2Work>> workItems;
- if (!c2_hidl::utils::objcpy(&workItems, workBundle)) {
+ if (!objcpy(&workItems, workBundle)) {
LOG(DEBUG) << "onWorkDone -- received corrupted WorkBundle.";
return Void();
}
@@ -667,12 +521,12 @@
}
virtual Return<void> onTripped(
- const hidl_vec<c2_hidl::SettingResult>& settingResults) override {
+ const hidl_vec<SettingResult>& settingResults) override {
std::vector<std::shared_ptr<C2SettingResult>> c2SettingResults(
settingResults.size());
for (size_t i = 0; i < settingResults.size(); ++i) {
std::unique_ptr<C2SettingResult> c2SettingResult;
- if (!c2_hidl::utils::objcpy(&c2SettingResult, settingResults[i])) {
+ if (!objcpy(&c2SettingResult, settingResults[i])) {
LOG(DEBUG) << "onTripped -- received corrupted SettingResult.";
return Void();
}
@@ -686,13 +540,13 @@
return Void();
}
- virtual Return<void> onError(c2_hidl::Status s, uint32_t errorCode) override {
+ virtual Return<void> onError(Status s, uint32_t errorCode) override {
LOG(DEBUG) << "onError --"
<< " status = " << s
<< ", errorCode = " << errorCode
<< ".";
if (std::shared_ptr<Listener> listener = base.lock()) {
- listener->onError(component, s == c2_hidl::Status::OK ?
+ listener->onError(component, s == Status::OK ?
errorCode : static_cast<c2_status_t>(s));
} else {
LOG(DEBUG) << "onError -- listener died.";
@@ -758,11 +612,11 @@
Codec2Client::Codec2Client(sp<Base> const& base,
size_t serviceIndex)
: Configurable{
- [base]() -> sp<c2_hidl::IConfigurable> {
- Return<sp<c2_hidl::IConfigurable>> transResult =
+ [base]() -> sp<IConfigurable> {
+ Return<sp<IConfigurable>> transResult =
base->getConfigurable();
return transResult.isOk() ?
- static_cast<sp<c2_hidl::IConfigurable>>(transResult) :
+ static_cast<sp<IConfigurable>>(transResult) :
nullptr;
}()
},
@@ -770,11 +624,11 @@
mBase1_1{Base1_1::castFrom(base)},
mBase1_2{Base1_2::castFrom(base)},
mServiceIndex{serviceIndex} {
- Return<sp<bufferpool_hidl::IClientManager>> transResult = base->getPoolClientManager();
+ Return<sp<IClientManager>> transResult = base->getPoolClientManager();
if (!transResult.isOk()) {
LOG(ERROR) << "getPoolClientManager -- transaction failed.";
} else {
- mHostPoolManager = static_cast<sp<bufferpool_hidl::IClientManager>>(transResult);
+ mHostPoolManager = static_cast<sp<IClientManager>>(transResult);
}
}
@@ -811,10 +665,10 @@
transStatus = mBase1_2->createComponent_1_2(
name,
hidlListener,
- bufferpool_hidl::implementation::ClientManager::getInstance(),
+ ClientManager::getInstance(),
[&status, component, hidlListener](
- c2_hidl::Status s,
- const sp<c2_hidl::IComponent>& c) {
+ Status s,
+ const sp<IComponent>& c) {
status = static_cast<c2_status_t>(s);
if (status != C2_OK) {
return;
@@ -827,10 +681,10 @@
transStatus = mBase1_1->createComponent_1_1(
name,
hidlListener,
- bufferpool_hidl::implementation::ClientManager::getInstance(),
+ ClientManager::getInstance(),
[&status, component, hidlListener](
- c2_hidl::Status s,
- const sp<c2_hidl_base::V1_1::IComponent>& c) {
+ Status s,
+ const sp<IComponent>& c) {
status = static_cast<c2_status_t>(s);
if (status != C2_OK) {
return;
@@ -842,10 +696,10 @@
transStatus = mBase1_0->createComponent(
name,
hidlListener,
- bufferpool_hidl::implementation::ClientManager::getInstance(),
+ ClientManager::getInstance(),
[&status, component, hidlListener](
- c2_hidl::Status s,
- const sp<c2_hidl_base::V1_0::IComponent>& c) {
+ Status s,
+ const sp<hardware::media::c2::V1_0::IComponent>& c) {
status = static_cast<c2_status_t>(s);
if (status != C2_OK) {
return;
@@ -893,8 +747,8 @@
Return<void> transStatus = mBase1_0->createInterface(
name,
[&status, interface](
- c2_hidl::Status s,
- const sp<c2_hidl::IComponentInterface>& i) {
+ Status s,
+ const sp<IComponentInterface>& i) {
status = static_cast<c2_status_t>(s);
if (status != C2_OK) {
return;
@@ -924,8 +778,8 @@
c2_status_t status;
Return<void> transStatus = mBase1_0->createInputSurface(
[&status, inputSurface](
- c2_hidl::Status s,
- const sp<c2_hidl::IInputSurface>& i) {
+ Status s,
+ const sp<IInputSurface>& i) {
status = static_cast<c2_status_t>(s);
if (status != C2_OK) {
return;
@@ -951,16 +805,16 @@
std::vector<C2Component::Traits> traits;
std::string const& serviceName = getServiceName();
Return<void> transStatus = mBase1_0->listComponents(
- [&traits, &serviceName](c2_hidl::Status s,
- const hidl_vec<c2_hidl::IComponentStore::ComponentTraits>& t) {
- if (s != c2_hidl::Status::OK) {
+ [&traits, &serviceName](Status s,
+ const hidl_vec<IComponentStore::ComponentTraits>& t) {
+ if (s != Status::OK) {
LOG(DEBUG) << "_listComponents -- call failed: "
<< static_cast<c2_status_t>(s) << ".";
return;
}
traits.resize(t.size());
for (size_t i = 0; i < t.size(); ++i) {
- if (!c2_hidl::utils::objcpy(&traits[i], t[i])) {
+ if (!objcpy(&traits[i], t[i])) {
LOG(ERROR) << "_listComponents -- corrupted output.";
return;
}
@@ -992,14 +846,14 @@
// should reflect the HAL API.
struct SimpleParamReflector : public C2ParamReflector {
virtual std::unique_ptr<C2StructDescriptor> describe(C2Param::CoreIndex coreIndex) const {
- hidl_vec<c2_hidl::ParamIndex> indices(1);
- indices[0] = static_cast<c2_hidl::ParamIndex>(coreIndex.coreIndex());
+ hidl_vec<ParamIndex> indices(1);
+ indices[0] = static_cast<ParamIndex>(coreIndex.coreIndex());
std::unique_ptr<C2StructDescriptor> descriptor;
Return<void> transStatus = mBase->getStructDescriptors(
indices,
[&descriptor](
- c2_hidl::Status s,
- const hidl_vec<c2_hidl::StructDescriptor>& sd) {
+ Status s,
+ const hidl_vec<StructDescriptor>& sd) {
c2_status_t status = static_cast<c2_status_t>(s);
if (status != C2_OK) {
LOG(DEBUG) << "SimpleParamReflector -- "
@@ -1017,7 +871,7 @@
descriptor.reset();
return;
}
- if (!c2_hidl::utils::objcpy(&descriptor, sd[0])) {
+ if (!objcpy(&descriptor, sd[0])) {
LOG(DEBUG) << "SimpleParamReflector -- "
"getStructDescriptors() returned "
"corrupted data.";
@@ -1345,11 +1199,11 @@
// Codec2Client::Interface
Codec2Client::Interface::Interface(const sp<Base>& base)
: Configurable{
- [base]() -> sp<c2_hidl::IConfigurable> {
- Return<sp<c2_hidl::IConfigurable>> transResult =
+ [base]() -> sp<IConfigurable> {
+ Return<sp<IConfigurable>> transResult =
base->getConfigurable();
return transResult.isOk() ?
- static_cast<sp<c2_hidl::IConfigurable>>(transResult) :
+ static_cast<sp<IConfigurable>>(transResult) :
nullptr;
}()
},
@@ -1359,17 +1213,17 @@
// Codec2Client::Component
Codec2Client::Component::Component(const sp<Base>& base)
: Configurable{
- [base]() -> sp<c2_hidl::IConfigurable> {
- Return<sp<c2_hidl::IComponentInterface>> transResult1 =
+ [base]() -> sp<IConfigurable> {
+ Return<sp<IComponentInterface>> transResult1 =
base->getInterface();
if (!transResult1.isOk()) {
return nullptr;
}
- Return<sp<c2_hidl::IConfigurable>> transResult2 =
- static_cast<sp<c2_hidl::IComponentInterface>>(transResult1)->
+ Return<sp<IConfigurable>> transResult2 =
+ static_cast<sp<IComponentInterface>>(transResult1)->
getConfigurable();
return transResult2.isOk() ?
- static_cast<sp<c2_hidl::IConfigurable>>(transResult2) :
+ static_cast<sp<IConfigurable>>(transResult2) :
nullptr;
}()
},
@@ -1382,17 +1236,17 @@
Codec2Client::Component::Component(const sp<Base1_1>& base)
: Configurable{
- [base]() -> sp<c2_hidl::IConfigurable> {
- Return<sp<c2_hidl::IComponentInterface>> transResult1 =
+ [base]() -> sp<IConfigurable> {
+ Return<sp<IComponentInterface>> transResult1 =
base->getInterface();
if (!transResult1.isOk()) {
return nullptr;
}
- Return<sp<c2_hidl::IConfigurable>> transResult2 =
- static_cast<sp<c2_hidl::IComponentInterface>>(transResult1)->
+ Return<sp<IConfigurable>> transResult2 =
+ static_cast<sp<IComponentInterface>>(transResult1)->
getConfigurable();
return transResult2.isOk() ?
- static_cast<sp<c2_hidl::IConfigurable>>(transResult2) :
+ static_cast<sp<IConfigurable>>(transResult2) :
nullptr;
}()
},
@@ -1405,17 +1259,17 @@
Codec2Client::Component::Component(const sp<Base1_2>& base)
: Configurable{
- [base]() -> sp<c2_hidl::IConfigurable> {
- Return<sp<c2_hidl::IComponentInterface>> transResult1 =
+ [base]() -> sp<IConfigurable> {
+ Return<sp<IComponentInterface>> transResult1 =
base->getInterface();
if (!transResult1.isOk()) {
return nullptr;
}
- Return<sp<c2_hidl::IConfigurable>> transResult2 =
- static_cast<sp<c2_hidl::IComponentInterface>>(transResult1)->
+ Return<sp<IConfigurable>> transResult2 =
+ static_cast<sp<IComponentInterface>>(transResult1)->
getConfigurable();
return transResult2.isOk() ?
- static_cast<sp<c2_hidl::IConfigurable>>(transResult2) :
+ static_cast<sp<IConfigurable>>(transResult2) :
nullptr;
}()
},
@@ -1437,9 +1291,9 @@
Return<void> transStatus = mBase1_0->createBlockPool(
static_cast<uint32_t>(id),
[&status, blockPoolId, configurable](
- c2_hidl::Status s,
+ Status s,
uint64_t pId,
- const sp<c2_hidl::IConfigurable>& c) {
+ const sp<IConfigurable>& c) {
status = static_cast<c2_status_t>(s);
configurable->reset();
if (status != C2_OK) {
@@ -1459,13 +1313,13 @@
c2_status_t Codec2Client::Component::destroyBlockPool(
C2BlockPool::local_id_t localId) {
- Return<c2_hidl::Status> transResult = mBase1_0->destroyBlockPool(
+ Return<Status> transResult = mBase1_0->destroyBlockPool(
static_cast<uint64_t>(localId));
if (!transResult.isOk()) {
LOG(ERROR) << "destroyBlockPool -- transaction failed.";
return C2_TRANSACTION_FAILED;
}
- return static_cast<c2_status_t>(static_cast<c2_hidl::Status>(transResult));
+ return static_cast<c2_status_t>(static_cast<Status>(transResult));
}
void Codec2Client::Component::handleOnWorkDone(
@@ -1476,18 +1330,18 @@
c2_status_t Codec2Client::Component::queue(
std::list<std::unique_ptr<C2Work>>* const items) {
- c2_hidl::WorkBundle workBundle;
+ WorkBundle workBundle;
if (!objcpy(&workBundle, *items, mBufferPoolSender.get())) {
LOG(ERROR) << "queue -- bad input.";
return C2_TRANSACTION_FAILED;
}
- Return<c2_hidl::Status> transStatus = mBase1_0->queue(workBundle);
+ Return<Status> transStatus = mBase1_0->queue(workBundle);
if (!transStatus.isOk()) {
LOG(ERROR) << "queue -- transaction failed.";
return C2_TRANSACTION_FAILED;
}
c2_status_t status =
- static_cast<c2_status_t>(static_cast<c2_hidl::Status>(transStatus));
+ static_cast<c2_status_t>(static_cast<Status>(transStatus));
if (status != C2_OK) {
LOG(DEBUG) << "queue -- call failed: " << status << ".";
}
@@ -1501,13 +1355,13 @@
c2_status_t status;
Return<void> transStatus = mBase1_0->flush(
[&status, flushedWork](
- c2_hidl::Status s, const c2_hidl::WorkBundle& wb) {
+ Status s, const WorkBundle& wb) {
status = static_cast<c2_status_t>(s);
if (status != C2_OK) {
LOG(DEBUG) << "flush -- call failed: " << status << ".";
return;
}
- if (!c2_hidl::utils::objcpy(flushedWork, wb)) {
+ if (!objcpy(flushedWork, wb)) {
status = C2_CORRUPTED;
} else {
status = C2_OK;
@@ -1540,14 +1394,14 @@
}
c2_status_t Codec2Client::Component::drain(C2Component::drain_mode_t mode) {
- Return<c2_hidl::Status> transStatus = mBase1_0->drain(
+ Return<Status> transStatus = mBase1_0->drain(
mode == C2Component::DRAIN_COMPONENT_WITH_EOS);
if (!transStatus.isOk()) {
LOG(ERROR) << "drain -- transaction failed.";
return C2_TRANSACTION_FAILED;
}
c2_status_t status =
- static_cast<c2_status_t>(static_cast<c2_hidl::Status>(transStatus));
+ static_cast<c2_status_t>(static_cast<Status>(transStatus));
if (status != C2_OK) {
LOG(DEBUG) << "drain -- call failed: " << status << ".";
}
@@ -1555,13 +1409,13 @@
}
c2_status_t Codec2Client::Component::start() {
- Return<c2_hidl::Status> transStatus = mBase1_0->start();
+ Return<Status> transStatus = mBase1_0->start();
if (!transStatus.isOk()) {
LOG(ERROR) << "start -- transaction failed.";
return C2_TRANSACTION_FAILED;
}
c2_status_t status =
- static_cast<c2_status_t>(static_cast<c2_hidl::Status>(transStatus));
+ static_cast<c2_status_t>(static_cast<Status>(transStatus));
if (status != C2_OK) {
LOG(DEBUG) << "start -- call failed: " << status << ".";
}
@@ -1569,13 +1423,13 @@
}
c2_status_t Codec2Client::Component::stop() {
- Return<c2_hidl::Status> transStatus = mBase1_0->stop();
+ Return<Status> transStatus = mBase1_0->stop();
if (!transStatus.isOk()) {
LOG(ERROR) << "stop -- transaction failed.";
return C2_TRANSACTION_FAILED;
}
c2_status_t status =
- static_cast<c2_status_t>(static_cast<c2_hidl::Status>(transStatus));
+ static_cast<c2_status_t>(static_cast<Status>(transStatus));
if (status != C2_OK) {
LOG(DEBUG) << "stop -- call failed: " << status << ".";
}
@@ -1583,13 +1437,13 @@
}
c2_status_t Codec2Client::Component::reset() {
- Return<c2_hidl::Status> transStatus = mBase1_0->reset();
+ Return<Status> transStatus = mBase1_0->reset();
if (!transStatus.isOk()) {
LOG(ERROR) << "reset -- transaction failed.";
return C2_TRANSACTION_FAILED;
}
c2_status_t status =
- static_cast<c2_status_t>(static_cast<c2_hidl::Status>(transStatus));
+ static_cast<c2_status_t>(static_cast<Status>(transStatus));
if (status != C2_OK) {
LOG(DEBUG) << "reset -- call failed: " << status << ".";
}
@@ -1597,13 +1451,13 @@
}
c2_status_t Codec2Client::Component::release() {
- Return<c2_hidl::Status> transStatus = mBase1_0->release();
+ Return<Status> transStatus = mBase1_0->release();
if (!transStatus.isOk()) {
LOG(ERROR) << "release -- transaction failed.";
return C2_TRANSACTION_FAILED;
}
c2_status_t status =
- static_cast<c2_status_t>(static_cast<c2_hidl::Status>(transStatus));
+ static_cast<c2_status_t>(static_cast<Status>(transStatus));
if (status != C2_OK) {
LOG(DEBUG) << "release -- call failed: " << status << ".";
}
@@ -1620,7 +1474,7 @@
c2_status_t status{};
Return<void> transStatus = mBase1_1->configureVideoTunnel(avSyncHwId,
[&status, sidebandHandle](
- c2_hidl::Status s, hardware::hidl_handle const& h) {
+ Status s, hardware::hidl_handle const& h) {
status = static_cast<c2_status_t>(s);
if (h.getNativeHandle()) {
*sidebandHandle = native_handle_clone(h.getNativeHandle());
@@ -1700,7 +1554,7 @@
ALOGD("setOutputSurface -- generation=%u consumer usage=%#llx%s",
generation, (long long)consumerUsage, syncObj ? " sync" : "");
- Return<c2_hidl::Status> transStatus = syncObj ?
+ Return<Status> transStatus = syncObj ?
mBase1_2->setOutputSurfaceWithSyncObj(
static_cast<uint64_t>(blockPoolId),
bqId == 0 ? nullHgbp : igbp, *syncObj) :
@@ -1708,14 +1562,12 @@
static_cast<uint64_t>(blockPoolId),
bqId == 0 ? nullHgbp : igbp);
- mOutputBufferQueue->expireOldWaiters();
-
if (!transStatus.isOk()) {
LOG(ERROR) << "setOutputSurface -- transaction failed.";
return C2_TRANSACTION_FAILED;
}
c2_status_t status =
- static_cast<c2_status_t>(static_cast<c2_hidl::Status>(transStatus));
+ static_cast<c2_status_t>(static_cast<Status>(transStatus));
if (status != C2_OK) {
LOG(DEBUG) << "setOutputSurface -- call failed: " << status << ".";
}
@@ -1730,6 +1582,10 @@
return mOutputBufferQueue->outputBuffer(block, input, output);
}
+void Codec2Client::Component::pollForRenderedFrames(FrameEventHistoryDelta* delta) {
+ mOutputBufferQueue->pollForRenderedFrames(delta);
+}
+
void Codec2Client::Component::setOutputSurfaceMaxDequeueCount(
int maxDequeueCount) {
mOutputBufferQueue->updateMaxDequeueBufferCount(maxDequeueCount);
@@ -1739,19 +1595,18 @@
C2BlockPool::local_id_t blockPoolId) {
std::scoped_lock lock(mOutputMutex);
mOutputBufferQueue->stop();
- Return<c2_hidl::Status> transStatus = mBase1_0->setOutputSurface(
+ Return<Status> transStatus = mBase1_0->setOutputSurface(
static_cast<uint64_t>(blockPoolId), nullptr);
if (!transStatus.isOk()) {
LOG(ERROR) << "setOutputSurface(stopUsingOutputSurface) -- transaction failed.";
} else {
c2_status_t status =
- static_cast<c2_status_t>(static_cast<c2_hidl::Status>(transStatus));
+ static_cast<c2_status_t>(static_cast<Status>(transStatus));
if (status != C2_OK) {
LOG(DEBUG) << "setOutputSurface(stopUsingOutputSurface) -- call failed: "
<< status << ".";
}
}
- mOutputBufferQueue->expireOldWaiters();
}
c2_status_t Codec2Client::Component::connectToInputSurface(
@@ -1761,7 +1616,7 @@
Return<void> transStatus = mBase1_0->connectToInputSurface(
inputSurface->mBase,
[&status, connection](
- c2_hidl::Status s, const sp<c2_hidl::IInputSurfaceConnection>& c) {
+ Status s, const sp<IInputSurfaceConnection>& c) {
status = static_cast<c2_status_t>(s);
if (status != C2_OK) {
LOG(DEBUG) << "connectToInputSurface -- call failed: "
@@ -1785,7 +1640,7 @@
Return<void> transStatus = mBase1_0->connectToOmxInputSurface(
producer, source,
[&status, connection](
- c2_hidl::Status s, const sp<c2_hidl::IInputSurfaceConnection>& c) {
+ Status s, const sp<IInputSurfaceConnection>& c) {
status = static_cast<c2_status_t>(s);
if (status != C2_OK) {
LOG(DEBUG) << "connectToOmxInputSurface -- call failed: "
@@ -1802,13 +1657,13 @@
}
c2_status_t Codec2Client::Component::disconnectFromInputSurface() {
- Return<c2_hidl::Status> transStatus = mBase1_0->disconnectFromInputSurface();
+ Return<Status> transStatus = mBase1_0->disconnectFromInputSurface();
if (!transStatus.isOk()) {
LOG(ERROR) << "disconnectToInputSurface -- transaction failed.";
return C2_TRANSACTION_FAILED;
}
c2_status_t status =
- static_cast<c2_status_t>(static_cast<c2_hidl::Status>(transStatus));
+ static_cast<c2_status_t>(static_cast<Status>(transStatus));
if (status != C2_OK) {
LOG(DEBUG) << "disconnectFromInputSurface -- call failed: "
<< status << ".";
@@ -1855,13 +1710,13 @@
}
// Codec2Client::InputSurface
-Codec2Client::InputSurface::InputSurface(const sp<c2_hidl::IInputSurface>& base)
+Codec2Client::InputSurface::InputSurface(const sp<IInputSurface>& base)
: Configurable{
- [base]() -> sp<c2_hidl::IConfigurable> {
- Return<sp<c2_hidl::IConfigurable>> transResult =
+ [base]() -> sp<IConfigurable> {
+ Return<sp<IConfigurable>> transResult =
base->getConfigurable();
return transResult.isOk() ?
- static_cast<sp<c2_hidl::IConfigurable>>(transResult) :
+ static_cast<sp<IConfigurable>>(transResult) :
nullptr;
}()
},
@@ -1881,19 +1736,19 @@
return mGraphicBufferProducer;
}
-sp<c2_hidl::IInputSurface> Codec2Client::InputSurface::getHalInterface() const {
+sp<IInputSurface> Codec2Client::InputSurface::getHalInterface() const {
return mBase;
}
// Codec2Client::InputSurfaceConnection
Codec2Client::InputSurfaceConnection::InputSurfaceConnection(
- const sp<c2_hidl::IInputSurfaceConnection>& base)
+ const sp<IInputSurfaceConnection>& base)
: Configurable{
- [base]() -> sp<c2_hidl::IConfigurable> {
- Return<sp<c2_hidl::IConfigurable>> transResult =
+ [base]() -> sp<IConfigurable> {
+ Return<sp<IConfigurable>> transResult =
base->getConfigurable();
return transResult.isOk() ?
- static_cast<sp<c2_hidl::IConfigurable>>(transResult) :
+ static_cast<sp<IConfigurable>>(transResult) :
nullptr;
}()
},
@@ -1901,8 +1756,8 @@
}
c2_status_t Codec2Client::InputSurfaceConnection::disconnect() {
- Return<c2_hidl::Status> transResult = mBase->disconnect();
- return static_cast<c2_status_t>(static_cast<c2_hidl::Status>(transResult));
+ Return<Status> transResult = mBase->disconnect();
+ return static_cast<c2_status_t>(static_cast<Status>(transResult));
}
} // namespace android
diff --git a/media/codec2/hal/client/include/codec2/hidl/client.h b/media/codec2/hal/client/include/codec2/hidl/client.h
index 6a71f91..efbf179 100644
--- a/media/codec2/hal/client/include/codec2/hidl/client.h
+++ b/media/codec2/hal/client/include/codec2/hidl/client.h
@@ -23,6 +23,7 @@
#include <C2Param.h>
#include <C2.h>
+#include <gui/FrameTimestamps.h>
#include <gui/IGraphicBufferProducer.h>
#include <hidl/HidlSupport.h>
#include <utils/StrongPointer.h>
@@ -83,13 +84,6 @@
struct IComponentStore;
} // namespace android::hardware::media::c2::V1_2
-namespace aidl::android::hardware::media::c2 {
-class IComponent;
-class IComponentInterface;
-class IComponentStore;
-class IConfigurable;
-} // namespace aidl::android::hardware::media::c2
-
namespace android::hardware::media::bufferpool::V2_0 {
struct IClientManager;
} // namespace android::hardware::media::bufferpool::V2_0
@@ -112,34 +106,7 @@
// declaration of an inner class is not possible.
struct Codec2ConfigurableClient {
- typedef ::android::hardware::media::c2::V1_0::IConfigurable HidlBase;
-
- struct ImplBase {
- virtual ~ImplBase() = default;
-
- virtual const C2String& getName() const = 0;
-
- virtual c2_status_t query(
- const std::vector<C2Param*>& stackParams,
- const std::vector<C2Param::Index> &heapParamIndices,
- c2_blocking_t mayBlock,
- std::vector<std::unique_ptr<C2Param>>* const heapParams) const = 0;
-
- virtual c2_status_t config(
- const std::vector<C2Param*> ¶ms,
- c2_blocking_t mayBlock,
- std::vector<std::unique_ptr<C2SettingResult>>* const failures) = 0;
-
- virtual c2_status_t querySupportedParams(
- std::vector<std::shared_ptr<C2ParamDescriptor>>* const params
- ) const = 0;
-
- virtual c2_status_t querySupportedValues(
- std::vector<C2FieldSupportedValuesQuery>& fields,
- c2_blocking_t mayBlock) const = 0;
- };
-
- explicit Codec2ConfigurableClient(const sp<HidlBase> &hidlBase);
+ typedef ::android::hardware::media::c2::V1_0::IConfigurable Base;
const C2String& getName() const;
@@ -161,11 +128,15 @@
c2_status_t querySupportedValues(
std::vector<C2FieldSupportedValuesQuery>& fields,
c2_blocking_t mayBlock) const;
-private:
- struct HidlImpl;
- struct AidlImpl;
- const std::unique_ptr<ImplBase> mImpl;
+ // base cannot be null.
+ Codec2ConfigurableClient(const sp<Base>& base);
+
+protected:
+ sp<Base> mBase;
+ C2String mName;
+
+ friend struct Codec2Client;
};
struct Codec2Client : public Codec2ConfigurableClient {
@@ -438,6 +409,9 @@
const QueueBufferInput& input,
QueueBufferOutput* output);
+ // Retrieve frame event history from the output surface.
+ void pollForRenderedFrames(FrameEventHistoryDelta* delta);
+
// Set max dequeue count for output surface.
void setOutputSurfaceMaxDequeueCount(int maxDequeueCount);
@@ -538,4 +512,3 @@
} // namespace android
#endif // CODEC2_HIDL_CLIENT_H
-
diff --git a/media/codec2/hal/client/include/codec2/hidl/output.h b/media/codec2/hal/client/include/codec2/hidl/output.h
index c208df0..35a0224 100644
--- a/media/codec2/hal/client/include/codec2/hidl/output.h
+++ b/media/codec2/hal/client/include/codec2/hidl/output.h
@@ -17,6 +17,7 @@
#ifndef CODEC2_HIDL_V1_0_UTILS_OUTPUT_BUFFER_QUEUE
#define CODEC2_HIDL_V1_0_UTILS_OUTPUT_BUFFER_QUEUE
+#include <gui/FrameTimestamps.h>
#include <gui/IGraphicBufferProducer.h>
#include <codec2/hidl/1.0/types.h>
#include <codec2/hidl/1.2/types.h>
@@ -50,10 +51,6 @@
int maxDequeueBufferCount,
std::shared_ptr<V1_2::SurfaceSyncObj> *syncObj);
- // If there are waiters to allocate from the old surface, wake up and expire
- // them.
- void expireOldWaiters();
-
// Stop using the current output surface. Pending buffer opeations will not
// perform anymore.
void stop();
@@ -64,6 +61,9 @@
const BnGraphicBufferProducer::QueueBufferInput& input,
BnGraphicBufferProducer::QueueBufferOutput* output);
+ // Retrieve frame event history from the output surface.
+ void pollForRenderedFrames(FrameEventHistoryDelta* delta);
+
// Call holdBufferQueueBlock() on output blocks in the given workList.
// The OutputBufferQueue will take the ownership of output blocks.
//
@@ -90,8 +90,6 @@
std::weak_ptr<_C2BlockPoolData> mPoolDatas[BufferQueueDefs::NUM_BUFFER_SLOTS];
std::shared_ptr<C2SurfaceSyncMemory> mSyncMem;
bool mStopped;
- std::mutex mOldMutex;
- std::shared_ptr<C2SurfaceSyncMemory> mOldMem;
bool registerBuffer(const C2ConstGraphicBlock& block);
};
diff --git a/media/codec2/hal/client/output.cpp b/media/codec2/hal/client/output.cpp
index 6aaf9ab..ce706cc 100644
--- a/media/codec2/hal/client/output.cpp
+++ b/media/codec2/hal/client/output.cpp
@@ -217,7 +217,6 @@
sp<GraphicBuffer> buffers[BufferQueueDefs::NUM_BUFFER_SLOTS];
std::weak_ptr<_C2BlockPoolData>
poolDatas[BufferQueueDefs::NUM_BUFFER_SLOTS];
- std::shared_ptr<C2SurfaceSyncMemory> oldMem;
{
std::scoped_lock<std::mutex> l(mMutex);
bool stopped = mStopped;
@@ -239,7 +238,7 @@
}
return false;
}
- oldMem = mSyncMem;
+ std::shared_ptr<C2SurfaceSyncMemory> oldMem = mSyncMem;
C2SyncVariables *oldSync = mSyncMem ? mSyncMem->mem() : nullptr;
if (oldSync) {
oldSync->lock();
@@ -315,26 +314,11 @@
newSync->unlock();
}
}
- {
- std::scoped_lock<std::mutex> l(mOldMutex);
- mOldMem = oldMem;
- }
ALOGD("remote graphic buffer migration %zu/%zu",
success, tryNum);
return true;
}
-void OutputBufferQueue::expireOldWaiters() {
- std::scoped_lock<std::mutex> l(mOldMutex);
- if (mOldMem) {
- C2SyncVariables *oldSync = mOldMem->mem();
- if (oldSync) {
- oldSync->notifyAll();
- }
- mOldMem.reset();
- }
-}
-
void OutputBufferQueue::stop() {
std::scoped_lock<std::mutex> l(mMutex);
mStopped = true;
@@ -492,6 +476,12 @@
return OK;
}
+void OutputBufferQueue::pollForRenderedFrames(FrameEventHistoryDelta* delta) {
+ if (mIgbp) {
+ mIgbp->getFrameTimestamps(delta);
+ }
+}
+
void OutputBufferQueue::holdBufferQueueBlocks(
const std::list<std::unique_ptr<C2Work>>& workList) {
forEachBlock(workList,
@@ -516,4 +506,3 @@
} // namespace media
} // namespace hardware
} // namespace android
-
diff --git a/media/codec2/hal/hidl/1.0/utils/InputSurfaceConnection.cpp b/media/codec2/hal/hidl/1.0/utils/InputSurfaceConnection.cpp
index 7c2e014..d3fdd6b 100644
--- a/media/codec2/hal/hidl/1.0/utils/InputSurfaceConnection.cpp
+++ b/media/codec2/hal/hidl/1.0/utils/InputSurfaceConnection.cpp
@@ -145,7 +145,7 @@
// C2AndroidMemoryUsage(C2MemoryUsage(usage.value)).
// asGrallocUsage();
- uint32_t grallocUsage =
+ uint64_t grallocUsage =
mSinkName.compare(0, 11, "c2.android.") == 0 ?
GRALLOC_USAGE_SW_READ_OFTEN :
GRALLOC_USAGE_HW_VIDEO_ENCODER;
diff --git a/media/codec2/sfplugin/C2OMXNode.cpp b/media/codec2/sfplugin/C2OMXNode.cpp
index ed7d69c..92cfe31 100644
--- a/media/codec2/sfplugin/C2OMXNode.cpp
+++ b/media/codec2/sfplugin/C2OMXNode.cpp
@@ -230,6 +230,12 @@
err = OK;
break;
}
+ case OMX_IndexParamConsumerUsageBits64: {
+ OMX_U64 *usage = (OMX_U64 *)params;
+ *usage = mUsage;
+ err = OK;
+ break;
+ }
case OMX_IndexParamPortDefinition: {
if (size < sizeof(OMX_PARAM_PORTDEFINITIONTYPE)) {
return BAD_VALUE;
@@ -293,6 +299,13 @@
}
mUsage = *((OMX_U32 *)params);
return OK;
+
+ case OMX_IndexParamConsumerUsageBits64:
+ if (size != sizeof(OMX_U64)) {
+ return BAD_VALUE;
+ }
+ mUsage = *((OMX_U64 *)params);
+ return OK;
}
return ERROR_UNSUPPORTED;
}
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index f258bff..eb1b4b5 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -206,12 +206,19 @@
mNode = new C2OMXNode(comp);
mOmxNode = new hardware::media::omx::V1_0::utils::TWOmxNode(mNode);
mNode->setFrameSize(mWidth, mHeight);
-
// Usage is queried during configure(), so setting it beforehand.
- OMX_U32 usage = mConfig.mUsage & 0xFFFFFFFF;
- (void)mNode->setParameter(
- (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits,
- &usage, sizeof(usage));
+ // 64 bit set parameter is existing only in C2OMXNode.
+ OMX_U64 usage64 = mConfig.mUsage;
+ status_t res = mNode->setParameter(
+ (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits64,
+ &usage64, sizeof(usage64));
+
+ if (res != OK) {
+ OMX_U32 usage = mConfig.mUsage & 0xFFFFFFFF;
+ (void)mNode->setParameter(
+ (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits,
+ &usage, sizeof(usage));
+ }
return GetStatus(mSource->configure(
mOmxNode, static_cast<hardware::graphics::common::V1_0::Dataspace>(mDataSpace)));
@@ -877,6 +884,16 @@
if (msg->findInt32(KEY_PUSH_BLANK_BUFFERS_ON_STOP, &pushBlankBuffersOnStop)) {
config->mPushBlankBuffersOnStop = pushBlankBuffersOnStop == 1;
}
+ // secure compoment or protected content default with
+ // "push-blank-buffers-on-shutdown" flag
+ if (!config->mPushBlankBuffersOnStop) {
+ int32_t usageProtected;
+ if (comp->getName().find(".secure") != std::string::npos) {
+ config->mPushBlankBuffersOnStop = true;
+ } else if (msg->findInt32("protected", &usageProtected) && usageProtected) {
+ config->mPushBlankBuffersOnStop = true;
+ }
+ }
}
}
setSurface(surface);
@@ -2540,17 +2557,6 @@
}
void CCodec::initiateReleaseIfStuck() {
- std::string name;
- bool pendingDeadline = false;
- {
- Mutexed<NamedTimePoint>::Locked deadline(mDeadline);
- if (deadline->get() < std::chrono::steady_clock::now()) {
- name = deadline->getName();
- }
- if (deadline->get() != TimePoint::max()) {
- pendingDeadline = true;
- }
- }
bool tunneled = false;
bool isMediaTypeKnown = false;
{
@@ -2588,6 +2594,17 @@
tunneled = config->mTunneled;
isMediaTypeKnown = (kKnownMediaTypes.count(config->mCodingMediaType) != 0);
}
+ std::string name;
+ bool pendingDeadline = false;
+ {
+ Mutexed<NamedTimePoint>::Locked deadline(mDeadline);
+ if (deadline->get() < std::chrono::steady_clock::now()) {
+ name = deadline->getName();
+ }
+ if (deadline->get() != TimePoint::max()) {
+ pendingDeadline = true;
+ }
+ }
if (!tunneled && isMediaTypeKnown && name.empty()) {
constexpr std::chrono::steady_clock::duration kWorkDurationThreshold = 3s;
std::chrono::steady_clock::duration elapsed = mChannel->elapsed();
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index fff008b..d2df4f3 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -426,7 +426,8 @@
size_t offset,
const CryptoPlugin::SubSample *subSamples,
size_t numSubSamples,
- const sp<MediaCodecBuffer> &buffer) {
+ const sp<MediaCodecBuffer> &buffer,
+ AString* errorDetailMsg) {
static const C2MemoryUsage kSecureUsage{C2MemoryUsage::READ_PROTECTED, 0};
static const C2MemoryUsage kDefaultReadWriteUsage{
C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
@@ -456,7 +457,6 @@
ssize_t result = -1;
ssize_t codecDataOffset = 0;
if (mCrypto) {
- AString errorDetailMsg;
int32_t heapSeqNum = getHeapSeqNum(memory);
hardware::drm::V1_0::SharedBuffer src{(uint32_t)heapSeqNum, offset, size};
hardware::drm::V1_0::DestinationBuffer dst;
@@ -470,7 +470,7 @@
}
result = mCrypto->decrypt(
key, iv, mode, pattern, src, 0, subSamples, numSubSamples,
- dst, &errorDetailMsg);
+ dst, errorDetailMsg);
if (result < 0) {
ALOGI("[%s] attachEncryptedBuffer: decrypt failed: result = %zd", mName, result);
return result;
@@ -515,7 +515,9 @@
result = (ssize_t)_bytesWritten;
detailedError = _detailedError;
});
-
+ if (errorDetailMsg) {
+ errorDetailMsg->setTo(detailedError.c_str(), detailedError.size());
+ }
if (!returnVoid.isOk() || status != CasStatus::OK || result < 0) {
ALOGI("[%s] descramble failed, trans=%s, status=%d, result=%zd",
mName, returnVoid.description().c_str(), status, result);
@@ -902,7 +904,7 @@
}
// TODO: revisit this after C2Fence implementation.
- android::IGraphicBufferProducer::QueueBufferInput qbi(
+ IGraphicBufferProducer::QueueBufferInput qbi(
timestampNs,
false, // droppable
dataSpace,
@@ -966,9 +968,9 @@
}
SetMetadataToGralloc4Handle(dataSpace, hdrStaticInfo, hdrDynamicInfo, block.handle());
- // we don't have dirty regions
- qbi.setSurfaceDamage(Region::INVALID_REGION);
- android::IGraphicBufferProducer::QueueBufferOutput qbo;
+ qbi.setSurfaceDamage(Region::INVALID_REGION); // we don't have dirty regions
+ qbi.getFrameTimestamps = true; // we need to know when a frame is rendered
+ IGraphicBufferProducer::QueueBufferOutput qbo;
status_t result = mComponent->queueToOutputSurface(block, qbi, &qbo);
if (result != OK) {
ALOGI("[%s] queueBuffer failed: %d", mName, result);
@@ -986,11 +988,107 @@
int64_t mediaTimeUs = 0;
(void)buffer->meta()->findInt64("timeUs", &mediaTimeUs);
- mCCodecCallback->onOutputFramesRendered(mediaTimeUs, timestampNs);
+ trackReleasedFrame(qbo, mediaTimeUs, timestampNs);
+ processRenderedFrames(qbo.frameTimestamps);
return OK;
}
+void CCodecBufferChannel::initializeFrameTrackingFor(ANativeWindow * window) {
+ int hasPresentFenceTimes = 0;
+ window->query(window, NATIVE_WINDOW_FRAME_TIMESTAMPS_SUPPORTS_PRESENT, &hasPresentFenceTimes);
+ mHasPresentFenceTimes = hasPresentFenceTimes == 1;
+ if (mHasPresentFenceTimes) {
+ ALOGI("Using latch times for frame rendered signals - present fences not supported");
+ }
+ mTrackedFrames.clear();
+}
+
+void CCodecBufferChannel::trackReleasedFrame(const IGraphicBufferProducer::QueueBufferOutput& qbo,
+ int64_t mediaTimeUs, int64_t desiredRenderTimeNs) {
+ // If the render time is earlier than now, then we're suggesting it should be rendered ASAP,
+ // so track the frame as if the desired render time is now.
+ int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
+ if (desiredRenderTimeNs < nowNs) {
+ desiredRenderTimeNs = nowNs;
+ }
+ // We've just queued a frame to the surface, so keep track of it and later check to see if it is
+ // actually rendered.
+ TrackedFrame frame;
+ frame.number = qbo.nextFrameNumber - 1;
+ frame.mediaTimeUs = mediaTimeUs;
+ frame.desiredRenderTimeNs = desiredRenderTimeNs;
+ frame.latchTime = -1;
+ frame.presentFence = nullptr;
+ mTrackedFrames.push_back(frame);
+}
+
+void CCodecBufferChannel::processRenderedFrames(const FrameEventHistoryDelta& deltas) {
+ // Grab the latch times and present fences from the frame event deltas
+ for (const auto& delta : deltas) {
+ for (auto& frame : mTrackedFrames) {
+ if (delta.getFrameNumber() == frame.number) {
+ delta.getLatchTime(&frame.latchTime);
+ delta.getDisplayPresentFence(&frame.presentFence);
+ }
+ }
+ }
+
+ // Scan all frames and check to see if the frames that SHOULD have been rendered by now, have,
+ // in fact, been rendered.
+ int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
+ while (!mTrackedFrames.empty()) {
+ TrackedFrame & frame = mTrackedFrames.front();
+ // Frames that should have been rendered at least 100ms in the past are checked
+ if (frame.desiredRenderTimeNs > nowNs - 100*1000*1000LL) {
+ break;
+ }
+
+ // If we don't have a render time by now, then consider the frame as dropped
+ int64_t renderTimeNs = getRenderTimeNs(frame);
+ if (renderTimeNs != -1) {
+ mCCodecCallback->onOutputFramesRendered(frame.mediaTimeUs, renderTimeNs);
+ }
+ mTrackedFrames.pop_front();
+ }
+}
+
+int64_t CCodecBufferChannel::getRenderTimeNs(const TrackedFrame& frame) {
+ // If the device doesn't have accurate present fence times, then use the latch time as a proxy
+ if (!mHasPresentFenceTimes) {
+ if (frame.latchTime == -1) {
+ ALOGD("no latch time for frame %d", (int) frame.number);
+ return -1;
+ }
+ return frame.latchTime;
+ }
+
+ if (frame.presentFence == nullptr) {
+ ALOGW("no present fence for frame %d", (int) frame.number);
+ return -1;
+ }
+
+ nsecs_t actualRenderTimeNs = frame.presentFence->getSignalTime();
+
+ if (actualRenderTimeNs == Fence::SIGNAL_TIME_INVALID) {
+ ALOGW("invalid signal time for frame %d", (int) frame.number);
+ return -1;
+ }
+
+ if (actualRenderTimeNs == Fence::SIGNAL_TIME_PENDING) {
+ ALOGD("present fence has not fired for frame %d", (int) frame.number);
+ return -1;
+ }
+
+ return actualRenderTimeNs;
+}
+
+void CCodecBufferChannel::pollForRenderedBuffers() {
+ FrameEventHistoryDelta delta;
+ mComponent->pollForRenderedFrames(&delta);
+ processRenderedFrames(delta);
+}
+
status_t CCodecBufferChannel::discardBuffer(const sp<MediaCodecBuffer> &buffer) {
ALOGV("[%s] discardBuffer: %p", mName, buffer.get());
bool released = false;
@@ -1615,6 +1713,8 @@
Mutexed<Output>::Locked output(mOutput);
output->buffers.reset();
}
+ // reset the frames that are being tracked for onFrameRendered callbacks
+ mTrackedFrames.clear();
}
void CCodecBufferChannel::release() {
@@ -1683,6 +1783,8 @@
output->buffers->flushStash();
}
}
+ // reset the frames that are being tracked for onFrameRendered callbacks
+ mTrackedFrames.clear();
}
void CCodecBufferChannel::onWorkDone(
@@ -2162,6 +2264,7 @@
Mutexed<OutputSurface>::Locked output(mOutputSurface);
output->surface = newSurface;
output->generation = generation;
+ initializeFrameTrackingFor(static_cast<ANativeWindow *>(newSurface.get()));
}
if (oldSurface && pushBlankBuffer) {
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index a52d4dc..20dca2b 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -18,6 +18,7 @@
#define CCODEC_BUFFER_CHANNEL_H_
+#include <deque>
#include <map>
#include <memory>
#include <vector>
@@ -85,9 +86,11 @@
size_t offset,
const CryptoPlugin::SubSample *subSamples,
size_t numSubSamples,
- const sp<MediaCodecBuffer> &buffer) override;
+ const sp<MediaCodecBuffer> &buffer,
+ AString* errorDetailMsg) override;
virtual status_t renderOutputBuffer(
const sp<MediaCodecBuffer> &buffer, int64_t timestampNs) override;
+ virtual void pollForRenderedBuffers() override;
virtual status_t discardBuffer(const sp<MediaCodecBuffer> &buffer) override;
virtual void getInputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
virtual void getOutputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
@@ -260,6 +263,14 @@
bool mRunning;
};
+ struct TrackedFrame {
+ uint64_t number;
+ int64_t mediaTimeUs;
+ int64_t desiredRenderTimeNs;
+ nsecs_t latchTime;
+ sp<Fence> presentFence;
+ };
+
void feedInputBufferIfAvailable();
void feedInputBufferIfAvailableInternal();
status_t queueInputBufferInternal(sp<MediaCodecBuffer> buffer,
@@ -272,6 +283,12 @@
void ensureDecryptDestination(size_t size);
int32_t getHeapSeqNum(const sp<hardware::HidlMemory> &memory);
+ void initializeFrameTrackingFor(ANativeWindow * window);
+ void trackReleasedFrame(const IGraphicBufferProducer::QueueBufferOutput& qbo,
+ int64_t mediaTimeUs, int64_t desiredRenderTimeNs);
+ void processRenderedFrames(const FrameEventHistoryDelta& delta);
+ int64_t getRenderTimeNs(const TrackedFrame& frame);
+
QueueSync mSync;
sp<MemoryDealer> mDealer;
sp<IMemory> mDecryptDestination;
@@ -313,6 +330,9 @@
sp<MemoryDealer> makeMemoryDealer(size_t heapSize);
+ std::deque<TrackedFrame> mTrackedFrames;
+ bool mHasPresentFenceTimes;
+
struct OutputSurface {
sp<Surface> surface;
uint32_t generation;
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index cfadc95..a893bc0 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -285,6 +285,12 @@
}
}
+ // Updates or adds a mapper for a "sdkkey"
+ void updateConfigMappersForKey(const SdkKey& key,
+ const std::vector<ConfigMapper>& vec_cm) {
+ mConfigMappers.insert_or_assign(key, vec_cm);
+ }
+
/**
* Returns all paths for a specific domain.
*
@@ -1914,6 +1920,67 @@
const sp<AMessage> &sdkParams, Domain configDomain,
c2_blocking_t blocking,
std::vector<std::unique_ptr<C2Param>> *configUpdate) const {
+ // update the mappers if we know something more of this format.
+ // AV1 10b or 8b encoding request.
+ AString mime;
+ int32_t requestedSdkProfile = -1;
+ if ((mDomain == (IS_VIDEO | IS_ENCODER)) &&
+ sdkParams->findString(KEY_MIME, &mime) &&
+ mime == MIMETYPE_VIDEO_AV1) {
+
+ sdkParams->findInt32(KEY_PROFILE, &requestedSdkProfile);
+ bool is10bAv1EncodeRequested = (requestedSdkProfile == AV1ProfileMain10);
+
+ int32_t bitDepth = (is10bAv1EncodeRequested) ? 10 : 8;
+ // we always initilze with an 8b mapper. Update this only if needed.
+ if (bitDepth != 8) {
+ std::shared_ptr<C2Mapper::ProfileLevelMapper> mapper =
+ C2Mapper::GetBitDepthProfileLevelMapper(mCodingMediaType, bitDepth);
+ mStandardParams->updateConfigMappersForKey(StandardParams::SdkKey(KEY_PROFILE),
+ {
+ ConfigMapper(KEY_PROFILE, C2_PARAMKEY_PROFILE_LEVEL, "profile")
+ .limitTo(Domain::CODED)
+ .withMappers([mapper](C2Value v) -> C2Value {
+ C2Config::profile_t c2 = PROFILE_UNUSED;
+ int32_t sdk;
+ if (mapper && v.get(&sdk) && mapper->mapProfile(sdk, &c2)) {
+ return c2;
+ }
+ return PROFILE_UNUSED;
+ }, [mapper](C2Value v) -> C2Value {
+ C2Config::profile_t c2;
+ int32_t sdk;
+ using C2ValueType =
+ typename _c2_reduce_enum_to_underlying_type<decltype(c2)>::type;
+ if (mapper && v.get((C2ValueType*)&c2) && mapper->mapProfile(c2, &sdk)) {
+ return sdk;
+ }
+ return C2Value();
+ })});
+ mStandardParams->updateConfigMappersForKey(StandardParams::SdkKey(KEY_LEVEL),
+ {
+ ConfigMapper(KEY_LEVEL, C2_PARAMKEY_PROFILE_LEVEL, "level")
+ .limitTo(Domain::CODED)
+ .withMappers([mapper](C2Value v) -> C2Value {
+ C2Config::level_t c2 = LEVEL_UNUSED;
+ int32_t sdk;
+ if (mapper && v.get(&sdk) && mapper->mapLevel(sdk, &c2)) {
+ return c2;
+ }
+ return LEVEL_UNUSED;
+ }, [mapper](C2Value v) -> C2Value {
+ C2Config::level_t c2;
+ int32_t sdk;
+ using C2ValueType =
+ typename _c2_reduce_enum_to_underlying_type<decltype(c2)>::type;
+ if (mapper && v.get((C2ValueType*)&c2) && mapper->mapLevel(c2, &sdk)) {
+ return sdk;
+ }
+ return C2Value();
+ })});
+ }
+ }
+
ReflectedParamUpdater::Dict params = getReflectedFormat(sdkParams, configDomain);
std::vector<C2Param::Index> indices;
diff --git a/media/codec2/vndk/C2Store.cpp b/media/codec2/vndk/C2Store.cpp
index d7a9764..f6f97da 100644
--- a/media/codec2/vndk/C2Store.cpp
+++ b/media/codec2/vndk/C2Store.cpp
@@ -15,7 +15,7 @@
*/
#define LOG_TAG "C2Store"
-#define LOG_NDEBUG 0
+// #define LOG_NDEBUG 0
#include <utils/Log.h>
#include <C2AllocatorBlob.h>
diff --git a/media/codec2/vndk/internal/C2BlockInternal.h b/media/codec2/vndk/internal/C2BlockInternal.h
index fe5390a..1eefd87 100644
--- a/media/codec2/vndk/internal/C2BlockInternal.h
+++ b/media/codec2/vndk/internal/C2BlockInternal.h
@@ -286,7 +286,7 @@
* - Local migration on blockpool side will be done automatically by
* blockpool.
* - Before attachBuffer(), BeginAttachBlockToBufferQueue() should be called
- * to test eligiblity.
+ * to test eligibility.
* - After attachBuffer() is called, EndAttachBlockToBufferQueue() should
* be called. This will set "held" status to true. If it returned
* false, cancelBuffer() should be called.
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index 13e430a..7648c76 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -134,7 +134,11 @@
* The call was successful.
*/
AAUDIO_OK,
- AAUDIO_ERROR_BASE = -900, // TODO review
+
+ /**
+ * Reserved. This should not be returned.
+ */
+ AAUDIO_ERROR_BASE = -900,
/**
* The audio device was disconnected. This could occur, for example, when headphones
@@ -150,6 +154,10 @@
*/
AAUDIO_ERROR_ILLEGAL_ARGUMENT,
// reserved
+
+ /**
+ * An internal error occurred.
+ */
AAUDIO_ERROR_INTERNAL = AAUDIO_ERROR_ILLEGAL_ARGUMENT + 2,
/**
@@ -158,7 +166,9 @@
AAUDIO_ERROR_INVALID_STATE,
// reserved
// reserved
- /* The server rejected the handle used to identify the stream.
+
+ /**
+ * The server rejected the handle used to identify the stream.
*/
AAUDIO_ERROR_INVALID_HANDLE = AAUDIO_ERROR_INVALID_STATE + 3,
// reserved
@@ -174,6 +184,10 @@
* or a timestamp is not available.
*/
AAUDIO_ERROR_UNAVAILABLE,
+
+ /**
+ * Reserved. This should not be returned.
+ */
AAUDIO_ERROR_NO_FREE_HANDLES,
/**
@@ -191,6 +205,10 @@
* An operation took longer than expected.
*/
AAUDIO_ERROR_TIMEOUT,
+
+ /**
+ * A queue is full. This queue would be blocked.
+ */
AAUDIO_ERROR_WOULD_BLOCK,
/**
diff --git a/media/libaaudio/src/binding/AAudioBinderAdapter.cpp b/media/libaaudio/src/binding/AAudioBinderAdapter.cpp
index 42d81ca..ee7480b 100644
--- a/media/libaaudio/src/binding/AAudioBinderAdapter.cpp
+++ b/media/libaaudio/src/binding/AAudioBinderAdapter.cpp
@@ -23,15 +23,16 @@
using android::aidl_utils::statusTFromBinderStatus;
using android::binder::Status;
-AAudioBinderAdapter::AAudioBinderAdapter(IAAudioService* delegate)
- : mDelegate(delegate) {}
+AAudioBinderAdapter::AAudioBinderAdapter(IAAudioService* delegate,
+ int32_t serviceLifetimeId)
+ : mDelegate(delegate), mServiceLifetimeId(serviceLifetimeId) {}
void AAudioBinderAdapter::registerClient(const android::sp<IAAudioClient>& client) {
mDelegate->registerClient(client);
}
-aaudio_handle_t AAudioBinderAdapter::openStream(const AAudioStreamRequest& request,
- AAudioStreamConfiguration& config) {
+AAudioHandleInfo AAudioBinderAdapter::openStream(const AAudioStreamRequest& request,
+ AAudioStreamConfiguration& config) {
aaudio_handle_t result;
StreamParameters params;
Status status = mDelegate->openStream(request.parcelable(),
@@ -41,23 +42,29 @@
result = AAudioConvert_androidToAAudioResult(statusTFromBinderStatus(status));
}
config = params;
- return result;
+ return {mServiceLifetimeId, result};
}
-aaudio_result_t AAudioBinderAdapter::closeStream(aaudio_handle_t streamHandle) {
+aaudio_result_t AAudioBinderAdapter::closeStream(const AAudioHandleInfo& streamHandleInfo) {
+ if (streamHandleInfo.getServiceLifetimeId() != mServiceLifetimeId) {
+ return AAUDIO_ERROR_DISCONNECTED;
+ }
aaudio_result_t result;
- Status status = mDelegate->closeStream(streamHandle, &result);
+ Status status = mDelegate->closeStream(streamHandleInfo.getHandle(), &result);
if (!status.isOk()) {
result = AAudioConvert_androidToAAudioResult(statusTFromBinderStatus(status));
}
return result;
}
-aaudio_result_t AAudioBinderAdapter::getStreamDescription(aaudio_handle_t streamHandle,
+aaudio_result_t AAudioBinderAdapter::getStreamDescription(const AAudioHandleInfo& streamHandleInfo,
AudioEndpointParcelable& endpointOut) {
+ if (streamHandleInfo.getServiceLifetimeId() != mServiceLifetimeId) {
+ return AAUDIO_ERROR_DISCONNECTED;
+ }
aaudio_result_t result;
Endpoint endpoint;
- Status status = mDelegate->getStreamDescription(streamHandle,
+ Status status = mDelegate->getStreamDescription(streamHandleInfo.getHandle(),
&endpoint,
&result);
if (!status.isOk()) {
@@ -67,68 +74,91 @@
return result;
}
-aaudio_result_t AAudioBinderAdapter::startStream(aaudio_handle_t streamHandle) {
+aaudio_result_t AAudioBinderAdapter::startStream(const AAudioHandleInfo& streamHandleInfo) {
+ if (streamHandleInfo.getServiceLifetimeId() != mServiceLifetimeId) {
+ return AAUDIO_ERROR_DISCONNECTED;
+ }
aaudio_result_t result;
- Status status = mDelegate->startStream(streamHandle, &result);
+ Status status = mDelegate->startStream(streamHandleInfo.getHandle(), &result);
if (!status.isOk()) {
result = AAudioConvert_androidToAAudioResult(statusTFromBinderStatus(status));
}
return result;
}
-aaudio_result_t AAudioBinderAdapter::pauseStream(aaudio_handle_t streamHandle) {
+aaudio_result_t AAudioBinderAdapter::pauseStream(const AAudioHandleInfo& streamHandleInfo) {
+ if (streamHandleInfo.getServiceLifetimeId() != mServiceLifetimeId) {
+ return AAUDIO_ERROR_DISCONNECTED;
+ }
aaudio_result_t result;
- Status status = mDelegate->pauseStream(streamHandle, &result);
+ Status status = mDelegate->pauseStream(streamHandleInfo.getHandle(), &result);
if (!status.isOk()) {
result = AAudioConvert_androidToAAudioResult(statusTFromBinderStatus(status));
}
return result;
}
-aaudio_result_t AAudioBinderAdapter::stopStream(aaudio_handle_t streamHandle) {
+aaudio_result_t AAudioBinderAdapter::stopStream(const AAudioHandleInfo& streamHandleInfo) {
+ if (streamHandleInfo.getServiceLifetimeId() != mServiceLifetimeId) {
+ return AAUDIO_ERROR_DISCONNECTED;
+ }
aaudio_result_t result;
- Status status = mDelegate->stopStream(streamHandle, &result);
+ Status status = mDelegate->stopStream(streamHandleInfo.getHandle(), &result);
if (!status.isOk()) {
result = AAudioConvert_androidToAAudioResult(statusTFromBinderStatus(status));
}
return result;
}
-aaudio_result_t AAudioBinderAdapter::flushStream(aaudio_handle_t streamHandle) {
+aaudio_result_t AAudioBinderAdapter::flushStream(const AAudioHandleInfo& streamHandleInfo) {
+ if (streamHandleInfo.getServiceLifetimeId() != mServiceLifetimeId) {
+ return AAUDIO_ERROR_DISCONNECTED;
+ }
aaudio_result_t result;
- Status status = mDelegate->flushStream(streamHandle, &result);
+ Status status = mDelegate->flushStream(streamHandleInfo.getHandle(), &result);
if (!status.isOk()) {
result = AAudioConvert_androidToAAudioResult(statusTFromBinderStatus(status));
}
return result;
}
-aaudio_result_t AAudioBinderAdapter::registerAudioThread(aaudio_handle_t streamHandle,
+aaudio_result_t AAudioBinderAdapter::registerAudioThread(const AAudioHandleInfo& streamHandleInfo,
pid_t clientThreadId,
int64_t periodNanoseconds) {
+ if (streamHandleInfo.getServiceLifetimeId() != mServiceLifetimeId) {
+ return AAUDIO_ERROR_DISCONNECTED;
+ }
aaudio_result_t result;
- Status status = mDelegate->registerAudioThread(streamHandle, clientThreadId, periodNanoseconds, &result);
+ Status status = mDelegate->registerAudioThread(
+ streamHandleInfo.getHandle(), clientThreadId, periodNanoseconds, &result);
if (!status.isOk()) {
result = AAudioConvert_androidToAAudioResult(statusTFromBinderStatus(status));
}
return result;
}
-aaudio_result_t AAudioBinderAdapter::unregisterAudioThread(aaudio_handle_t streamHandle,
+aaudio_result_t AAudioBinderAdapter::unregisterAudioThread(const AAudioHandleInfo& streamHandleInfo,
pid_t clientThreadId) {
+ if (streamHandleInfo.getServiceLifetimeId() != mServiceLifetimeId) {
+ return AAUDIO_ERROR_DISCONNECTED;
+ }
aaudio_result_t result;
- Status status = mDelegate->unregisterAudioThread(streamHandle, clientThreadId, &result);
+ Status status = mDelegate->unregisterAudioThread(
+ streamHandleInfo.getHandle(), clientThreadId, &result);
if (!status.isOk()) {
result = AAudioConvert_androidToAAudioResult(statusTFromBinderStatus(status));
}
return result;
}
-aaudio_result_t AAudioBinderAdapter::exitStandby(aaudio_handle_t streamHandle,
+aaudio_result_t AAudioBinderAdapter::exitStandby(const AAudioHandleInfo& streamHandleInfo,
AudioEndpointParcelable &endpointOut) {
+ if (streamHandleInfo.getServiceLifetimeId() != mServiceLifetimeId) {
+ return AAUDIO_ERROR_DISCONNECTED;
+ }
aaudio_result_t result;
Endpoint endpoint;
- Status status = mDelegate->exitStandby(streamHandle, &endpoint, &result);
+ Status status = mDelegate->exitStandby(streamHandleInfo.getHandle(), &endpoint, &result);
if (!status.isOk()) {
result = AAudioConvert_androidToAAudioResult(statusTFromBinderStatus(status));
}
diff --git a/media/libaaudio/src/binding/AAudioBinderAdapter.h b/media/libaaudio/src/binding/AAudioBinderAdapter.h
index d170783..301150f 100644
--- a/media/libaaudio/src/binding/AAudioBinderAdapter.h
+++ b/media/libaaudio/src/binding/AAudioBinderAdapter.h
@@ -30,38 +30,40 @@
*/
class AAudioBinderAdapter : public AAudioServiceInterface {
public:
- explicit AAudioBinderAdapter(IAAudioService* delegate);
+ AAudioBinderAdapter(IAAudioService* delegate, int32_t serviceLifetimeId);
void registerClient(const android::sp<IAAudioClient>& client) override;
- aaudio_handle_t openStream(const AAudioStreamRequest& request,
- AAudioStreamConfiguration& configuration) override;
+ AAudioHandleInfo openStream(const AAudioStreamRequest& request,
+ AAudioStreamConfiguration& configuration) override;
- aaudio_result_t closeStream(aaudio_handle_t streamHandle) override;
+ aaudio_result_t closeStream(const AAudioHandleInfo& streamHandleInfo) override;
- aaudio_result_t getStreamDescription(aaudio_handle_t streamHandle,
+ aaudio_result_t getStreamDescription(const AAudioHandleInfo& streamHandleInfo,
AudioEndpointParcelable& endpoint) override;
- aaudio_result_t startStream(aaudio_handle_t streamHandle) override;
+ aaudio_result_t startStream(const AAudioHandleInfo& streamHandleInfo) override;
- aaudio_result_t pauseStream(aaudio_handle_t streamHandle) override;
+ aaudio_result_t pauseStream(const AAudioHandleInfo& streamHandleInfo) override;
- aaudio_result_t stopStream(aaudio_handle_t streamHandle) override;
+ aaudio_result_t stopStream(const AAudioHandleInfo& streamHandleInfo) override;
- aaudio_result_t flushStream(aaudio_handle_t streamHandle) override;
+ aaudio_result_t flushStream(const AAudioHandleInfo& streamHandleInfo) override;
- aaudio_result_t registerAudioThread(aaudio_handle_t streamHandle,
+ aaudio_result_t registerAudioThread(const AAudioHandleInfo& streamHandleInfo,
pid_t clientThreadId,
int64_t periodNanoseconds) override;
- aaudio_result_t unregisterAudioThread(aaudio_handle_t streamHandle,
+ aaudio_result_t unregisterAudioThread(const AAudioHandleInfo& streamHandleInfo,
pid_t clientThreadId) override;
- aaudio_result_t exitStandby(aaudio_handle_t streamHandle,
+ aaudio_result_t exitStandby(const AAudioHandleInfo& streamHandleInfo,
AudioEndpointParcelable &parcelable) override;
private:
IAAudioService* const mDelegate;
+ // A unique id to recognize the service that the adapter connected to.
+ const int32_t mServiceLifetimeId;
};
} // namespace aaudio
diff --git a/media/libaaudio/src/binding/AAudioBinderClient.cpp b/media/libaaudio/src/binding/AAudioBinderClient.cpp
index 8e5facc..5f34a75 100644
--- a/media/libaaudio/src/binding/AAudioBinderClient.cpp
+++ b/media/libaaudio/src/binding/AAudioBinderClient.cpp
@@ -90,7 +90,8 @@
ALOGE("%s() - linkToDeath() returned %d", __func__, status);
}
aaudioService = interface_cast<IAAudioService>(binder);
- mAdapter = std::make_shared<Adapter>(aaudioService, mAAudioClient);
+ mAdapter = std::make_shared<Adapter>(
+ aaudioService, mAAudioClient, mAAudioClient->getServiceLifetimeId());
needToRegister = true;
// Make sure callbacks can be received by mAAudioClient
ProcessState::self()->startThreadPool();
@@ -115,97 +116,101 @@
/**
* @param request info needed to create the stream
* @param configuration contains information about the created stream
-* @return handle to the stream or a negative error
+* @return an object for aaudio handle information, which includes the connected
+* aaudio service lifetime id to recognize the connected aaudio service
+* and aaudio handle to recognize the stream. If an error occurs, the
+* aaudio handle will be set as the negative error.
*/
-aaudio_handle_t AAudioBinderClient::openStream(const AAudioStreamRequest &request,
- AAudioStreamConfiguration &configuration) {
- aaudio_handle_t stream;
+AAudioHandleInfo AAudioBinderClient::openStream(const AAudioStreamRequest &request,
+ AAudioStreamConfiguration &configuration) {
for (int i = 0; i < 2; i++) {
std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
- if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
+ if (service.get() == nullptr) {
+ return {};
+ }
- stream = service->openStream(request, configuration);
+ AAudioHandleInfo handleInfo = service->openStream(request, configuration);
- if (stream == AAUDIO_ERROR_NO_SERVICE) {
+ if (handleInfo.getHandle() == AAUDIO_ERROR_NO_SERVICE) {
ALOGE("openStream lost connection to AAudioService.");
dropAAudioService(); // force a reconnect
} else {
- break;
+ return handleInfo;
}
}
- return stream;
+ return {};
}
-aaudio_result_t AAudioBinderClient::closeStream(aaudio_handle_t streamHandle) {
+aaudio_result_t AAudioBinderClient::closeStream(const AAudioHandleInfo& streamHandleInfo) {
std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
- return service->closeStream(streamHandle);
+ return service->closeStream(streamHandleInfo);
}
/* Get an immutable description of the in-memory queues
* used to communicate with the underlying HAL or Service.
*/
-aaudio_result_t AAudioBinderClient::getStreamDescription(aaudio_handle_t streamHandle,
+aaudio_result_t AAudioBinderClient::getStreamDescription(const AAudioHandleInfo& streamHandleInfo,
AudioEndpointParcelable& endpointOut) {
std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
- return service->getStreamDescription(streamHandle, endpointOut);
+ return service->getStreamDescription(streamHandleInfo, endpointOut);
}
-aaudio_result_t AAudioBinderClient::startStream(aaudio_handle_t streamHandle) {
+aaudio_result_t AAudioBinderClient::startStream(const AAudioHandleInfo& streamHandleInfo) {
std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
- return service->startStream(streamHandle);
+ return service->startStream(streamHandleInfo);
}
-aaudio_result_t AAudioBinderClient::pauseStream(aaudio_handle_t streamHandle) {
+aaudio_result_t AAudioBinderClient::pauseStream(const AAudioHandleInfo& streamHandleInfo) {
std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
- return service->pauseStream(streamHandle);
+ return service->pauseStream(streamHandleInfo);
}
-aaudio_result_t AAudioBinderClient::stopStream(aaudio_handle_t streamHandle) {
+aaudio_result_t AAudioBinderClient::stopStream(const AAudioHandleInfo& streamHandleInfo) {
std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
- return service->stopStream(streamHandle);
+ return service->stopStream(streamHandleInfo);
}
-aaudio_result_t AAudioBinderClient::flushStream(aaudio_handle_t streamHandle) {
+aaudio_result_t AAudioBinderClient::flushStream(const AAudioHandleInfo& streamHandleInfo) {
std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
- return service->flushStream(streamHandle);
+ return service->flushStream(streamHandleInfo);
}
/**
* Manage the specified thread as a low latency audio thread.
*/
-aaudio_result_t AAudioBinderClient::registerAudioThread(aaudio_handle_t streamHandle,
+aaudio_result_t AAudioBinderClient::registerAudioThread(const AAudioHandleInfo& streamHandleInfo,
pid_t clientThreadId,
int64_t periodNanoseconds) {
std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
- return service->registerAudioThread(streamHandle, clientThreadId, periodNanoseconds);
+ return service->registerAudioThread(streamHandleInfo, clientThreadId, periodNanoseconds);
}
-aaudio_result_t AAudioBinderClient::unregisterAudioThread(aaudio_handle_t streamHandle,
+aaudio_result_t AAudioBinderClient::unregisterAudioThread(const AAudioHandleInfo& streamHandleInfo,
pid_t clientThreadId) {
std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
- return service->unregisterAudioThread(streamHandle, clientThreadId);
+ return service->unregisterAudioThread(streamHandleInfo, clientThreadId);
}
-aaudio_result_t AAudioBinderClient::exitStandby(aaudio_handle_t streamHandle,
+aaudio_result_t AAudioBinderClient::exitStandby(const AAudioHandleInfo& streamHandleInfo,
AudioEndpointParcelable &endpointOut) {
std::shared_ptr<AAudioServiceInterface> service = getAAudioService();
if (service.get() == nullptr) return AAUDIO_ERROR_NO_SERVICE;
- return service->exitStandby(streamHandle, endpointOut);
+ return service->exitStandby(streamHandleInfo, endpointOut);
}
diff --git a/media/libaaudio/src/binding/AAudioBinderClient.h b/media/libaaudio/src/binding/AAudioBinderClient.h
index 0968f4c..8faf6e8 100644
--- a/media/libaaudio/src/binding/AAudioBinderClient.h
+++ b/media/libaaudio/src/binding/AAudioBinderClient.h
@@ -17,6 +17,8 @@
#ifndef ANDROID_AAUDIO_AAUDIO_BINDER_CLIENT_H
#define ANDROID_AAUDIO_AAUDIO_BINDER_CLIENT_H
+#include <mutex>
+
#include <utils/RefBase.h>
#include <utils/Singleton.h>
@@ -52,63 +54,66 @@
/**
* @param request info needed to create the stream
* @param configuration contains resulting information about the created stream
- * @return handle to the stream or a negative error
+ * @return an object for aaudio handle information, which includes the connected
+ * aaudio service lifetime id to recognize the connected aaudio service
+ * and aaudio handle to recognize the stream. If an error occurs, the
+ * aaudio handle will be set as the negative error.
*/
- aaudio_handle_t openStream(const AAudioStreamRequest &request,
- AAudioStreamConfiguration &configurationOutput) override;
+ AAudioHandleInfo openStream(const AAudioStreamRequest &request,
+ AAudioStreamConfiguration &configurationOutput) override;
- aaudio_result_t closeStream(aaudio_handle_t streamHandle) override;
+ aaudio_result_t closeStream(const AAudioHandleInfo& streamHandleInfo) override;
/* Get an immutable description of the in-memory queues
* used to communicate with the underlying HAL or Service.
*/
- aaudio_result_t getStreamDescription(aaudio_handle_t streamHandle,
+ aaudio_result_t getStreamDescription(const AAudioHandleInfo& streamHandleInfo,
AudioEndpointParcelable &endpointOut) override;
/**
* Start the flow of data.
* This is asynchronous. When complete, the service will send a STARTED event.
*/
- aaudio_result_t startStream(aaudio_handle_t streamHandle) override;
+ aaudio_result_t startStream(const AAudioHandleInfo& streamHandleInfo) override;
/**
* Stop the flow of data such that start() can resume without loss of data.
* This is asynchronous. When complete, the service will send a PAUSED event.
*/
- aaudio_result_t pauseStream(aaudio_handle_t streamHandle) override;
+ aaudio_result_t pauseStream(const AAudioHandleInfo& streamHandleInfo) override;
- aaudio_result_t stopStream(aaudio_handle_t streamHandle) override;
+ aaudio_result_t stopStream(const AAudioHandleInfo& streamHandleInfo) override;
/**
* Discard any data held by the underlying HAL or Service.
* This is asynchronous. When complete, the service will send a FLUSHED event.
*/
- aaudio_result_t flushStream(aaudio_handle_t streamHandle) override;
+ aaudio_result_t flushStream(const AAudioHandleInfo& streamHandleInfo) override;
/**
* Manage the specified thread as a low latency audio thread.
* TODO Consider passing this information as part of the startStream() call.
*/
- aaudio_result_t registerAudioThread(aaudio_handle_t streamHandle,
- pid_t clientThreadId,
- int64_t periodNanoseconds) override;
+ aaudio_result_t registerAudioThread(const AAudioHandleInfo& streamHandleInfo,
+ pid_t clientThreadId,
+ int64_t periodNanoseconds) override;
- aaudio_result_t unregisterAudioThread(aaudio_handle_t streamHandle,
- pid_t clientThreadId) override;
+ aaudio_result_t unregisterAudioThread(const AAudioHandleInfo& streamHandleInfo,
+ pid_t clientThreadId) override;
- aaudio_result_t startClient(aaudio_handle_t streamHandle __unused,
+ aaudio_result_t startClient(const AAudioHandleInfo& streamHandleInfo __unused,
const android::AudioClient& client __unused,
const audio_attributes_t *attr __unused,
audio_port_handle_t *clientHandle __unused) override {
return AAUDIO_ERROR_UNAVAILABLE;
}
- aaudio_result_t stopClient(aaudio_handle_t streamHandle __unused,
+ aaudio_result_t stopClient(const AAudioHandleInfo& streamHandleInfo __unused,
audio_port_handle_t clientHandle __unused) override {
return AAUDIO_ERROR_UNAVAILABLE;
}
- aaudio_result_t exitStandby(aaudio_handle_t streamHandle,
+ aaudio_result_t exitStandby(const AAudioHandleInfo& streamHandleInfo,
AudioEndpointParcelable &endpointOut) override;
void onStreamChange(aaudio_handle_t /*handle*/, int32_t /*opcode*/, int32_t /*value*/) {
@@ -117,6 +122,10 @@
ALOGW("onStreamChange called!");
}
+ int32_t getServiceLifetimeId() const {
+ return mAAudioClient->getServiceLifetimeId();
+ }
+
class AAudioClient : public android::IBinder::DeathRecipient, public BnAAudioClient {
public:
explicit AAudioClient(const android::wp<AAudioBinderClient>& aaudioBinderClient)
@@ -125,6 +134,7 @@
// implement DeathRecipient
virtual void binderDied(const android::wp<android::IBinder>& who __unused) {
+ mServiceLifetimeId++;
android::sp<AAudioBinderClient> client = mBinderClient.promote();
if (client.get() != nullptr) {
client->dropAAudioService();
@@ -141,8 +151,13 @@
}
return android::binder::Status::ok();
}
+
+ int32_t getServiceLifetimeId() const {
+ return mServiceLifetimeId.load();
+ }
private:
android::wp<AAudioBinderClient> mBinderClient;
+ std::atomic_int mServiceLifetimeId{0};
};
// This adapter is used to convert the binder interface (delegate) to the AudioServiceInterface
@@ -153,8 +168,9 @@
class Adapter : public AAudioBinderAdapter {
public:
Adapter(const android::sp<IAAudioService>& delegate,
- android::sp<AAudioClient> aaudioClient)
- : AAudioBinderAdapter(delegate.get()),
+ android::sp<AAudioClient> aaudioClient,
+ int32_t serviceLifetimeId)
+ : AAudioBinderAdapter(delegate.get(), serviceLifetimeId),
mDelegate(delegate),
mAAudioClient(std::move(aaudioClient)) {}
@@ -165,7 +181,7 @@
}
// This should never be called (call is rejected at the AudioBinderClient level).
- aaudio_result_t startClient(aaudio_handle_t streamHandle __unused,
+ aaudio_result_t startClient(const AAudioHandleInfo& streamHandle __unused,
const android::AudioClient& client __unused,
const audio_attributes_t* attr __unused,
audio_port_handle_t* clientHandle __unused) override {
@@ -174,7 +190,7 @@
}
// This should never be called (call is rejected at the AudioBinderClient level).
- aaudio_result_t stopClient(aaudio_handle_t streamHandle __unused,
+ aaudio_result_t stopClient(const AAudioHandleInfo& streamHandle __unused,
audio_port_handle_t clientHandle __unused) override {
LOG_ALWAYS_FATAL("Shouldn't get here");
return AAUDIO_ERROR_UNAVAILABLE;
diff --git a/media/libaaudio/src/binding/AAudioServiceDefinitions.h b/media/libaaudio/src/binding/AAudioServiceDefinitions.h
index 8a2303c..7b8978f 100644
--- a/media/libaaudio/src/binding/AAudioServiceDefinitions.h
+++ b/media/libaaudio/src/binding/AAudioServiceDefinitions.h
@@ -85,6 +85,23 @@
RingBufferDescriptor dataQueueDescriptor; // playback or capture
} EndpointDescriptor;
+static constexpr int32_t AAUDIO_SERVICE_LIFETIME_ID_INVALID = -1;
+
+class AAudioHandleInfo {
+public:
+ AAudioHandleInfo()
+ : AAudioHandleInfo(AAUDIO_SERVICE_LIFETIME_ID_INVALID, AAUDIO_HANDLE_INVALID) {}
+ AAudioHandleInfo(int32_t serviceLifetimeId, aaudio_handle_t handle)
+ : mServiceLifetimeId(serviceLifetimeId), mHandle(handle) {}
+
+ int32_t getServiceLifetimeId() const { return mServiceLifetimeId; }
+ aaudio_handle_t getHandle() const { return mHandle; }
+
+private:
+ int32_t mServiceLifetimeId;
+ aaudio_handle_t mHandle;
+};
+
} // namespace aaudio
#endif //BINDING_AAUDIOSERVICEDEFINITIONS_H
diff --git a/media/libaaudio/src/binding/AAudioServiceInterface.h b/media/libaaudio/src/binding/AAudioServiceInterface.h
index e901767..79f498b 100644
--- a/media/libaaudio/src/binding/AAudioServiceInterface.h
+++ b/media/libaaudio/src/binding/AAudioServiceInterface.h
@@ -45,55 +45,58 @@
/**
* @param request info needed to create the stream
* @param configuration contains information about the created stream
- * @return handle to the stream or a negative error
+ * @return an object for aaudio handle information, which includes the connected
+ * aaudio service lifetime id to recognize the connected aaudio service
+ * and aaudio handle to recognize the stream. If an error occurs, the
+ * aaudio handle will be set as the negative error.
*/
- virtual aaudio_handle_t openStream(const AAudioStreamRequest &request,
- AAudioStreamConfiguration &configuration) = 0;
+ virtual AAudioHandleInfo openStream(const AAudioStreamRequest &request,
+ AAudioStreamConfiguration &configuration) = 0;
- virtual aaudio_result_t closeStream(aaudio_handle_t streamHandle) = 0;
+ virtual aaudio_result_t closeStream(const AAudioHandleInfo& streamHandleInfo) = 0;
/* Get an immutable description of the in-memory queues
* used to communicate with the underlying HAL or Service.
*/
- virtual aaudio_result_t getStreamDescription(aaudio_handle_t streamHandle,
+ virtual aaudio_result_t getStreamDescription(const AAudioHandleInfo& streamHandleInfo,
AudioEndpointParcelable &parcelable) = 0;
/**
* Start the flow of data.
*/
- virtual aaudio_result_t startStream(aaudio_handle_t streamHandle) = 0;
+ virtual aaudio_result_t startStream(const AAudioHandleInfo& streamHandleInfo) = 0;
/**
* Stop the flow of data such that start() can resume without loss of data.
*/
- virtual aaudio_result_t pauseStream(aaudio_handle_t streamHandle) = 0;
+ virtual aaudio_result_t pauseStream(const AAudioHandleInfo& streamHandleInfo) = 0;
/**
- * Stop the flow of data after data currently inthe buffer has played.
+ * Stop the flow of data after data currently in the buffer has played.
*/
- virtual aaudio_result_t stopStream(aaudio_handle_t streamHandle) = 0;
+ virtual aaudio_result_t stopStream(const AAudioHandleInfo& streamHandleInfo) = 0;
/**
* Discard any data held by the underlying HAL or Service.
*/
- virtual aaudio_result_t flushStream(aaudio_handle_t streamHandle) = 0;
+ virtual aaudio_result_t flushStream(const AAudioHandleInfo& streamHandleInfo) = 0;
/**
* Manage the specified thread as a low latency audio thread.
*/
- virtual aaudio_result_t registerAudioThread(aaudio_handle_t streamHandle,
+ virtual aaudio_result_t registerAudioThread(const AAudioHandleInfo& streamHandleInfo,
pid_t clientThreadId,
int64_t periodNanoseconds) = 0;
- virtual aaudio_result_t unregisterAudioThread(aaudio_handle_t streamHandle,
+ virtual aaudio_result_t unregisterAudioThread(const AAudioHandleInfo& streamHandleInfo,
pid_t clientThreadId) = 0;
- virtual aaudio_result_t startClient(aaudio_handle_t streamHandle,
+ virtual aaudio_result_t startClient(const AAudioHandleInfo& streamHandleInfo,
const android::AudioClient& client,
const audio_attributes_t *attr,
audio_port_handle_t *clientHandle) = 0;
- virtual aaudio_result_t stopClient(aaudio_handle_t streamHandle,
+ virtual aaudio_result_t stopClient(const AAudioHandleInfo& streamHandleInfo,
audio_port_handle_t clientHandle) = 0;
/**
@@ -103,7 +106,7 @@
* @param parcelable contains new data queue information
* @return the result of the execution
*/
- virtual aaudio_result_t exitStandby(aaudio_handle_t streamHandle,
+ virtual aaudio_result_t exitStandby(const AAudioHandleInfo& streamHandleInfo,
AudioEndpointParcelable &parcelable) = 0;
};
diff --git a/media/libaaudio/src/binding/AudioEndpointParcelable.cpp b/media/libaaudio/src/binding/AudioEndpointParcelable.cpp
index b1262df..d15d2fa 100644
--- a/media/libaaudio/src/binding/AudioEndpointParcelable.cpp
+++ b/media/libaaudio/src/binding/AudioEndpointParcelable.cpp
@@ -18,6 +18,7 @@
//#define LOG_NDEBUG 0
#include <utils/Log.h>
+#include <map>
#include <stdint.h>
#include <binder/Parcel.h>
@@ -37,8 +38,7 @@
: mUpMessageQueueParcelable(parcelable.upMessageQueueParcelable),
mDownMessageQueueParcelable(parcelable.downMessageQueueParcelable),
mUpDataQueueParcelable(parcelable.upDataQueueParcelable),
- mDownDataQueueParcelable(parcelable.downDataQueueParcelable),
- mNumSharedMemories(parcelable.sharedMemories.size()) {
+ mDownDataQueueParcelable(parcelable.downDataQueueParcelable) {
for (size_t i = 0; i < parcelable.sharedMemories.size() && i < MAX_SHARED_MEMORIES; ++i) {
// Re-construct.
mSharedMemories[i].~SharedMemoryParcelable();
@@ -52,15 +52,48 @@
return *this;
}
+namespace {
+
+void updateSharedMemoryIndex(SharedRegion* sharedRegion, int oldIndex, int newIndex) {
+ if (sharedRegion->sharedMemoryIndex == oldIndex) {
+ sharedRegion->sharedMemoryIndex = newIndex;
+ }
+}
+
+void updateSharedMemoryIndex(RingBuffer* ringBuffer, int oldIndex, int newIndex) {
+ updateSharedMemoryIndex(&ringBuffer->readCounterParcelable, oldIndex, newIndex);
+ updateSharedMemoryIndex(&ringBuffer->writeCounterParcelable, oldIndex, newIndex);
+ updateSharedMemoryIndex(&ringBuffer->dataParcelable, oldIndex, newIndex);
+}
+
+void updateSharedMemoryIndex(Endpoint* endpoint, int oldIndex, int newIndex) {
+ updateSharedMemoryIndex(&endpoint->upMessageQueueParcelable, oldIndex, newIndex);
+ updateSharedMemoryIndex(&endpoint->downMessageQueueParcelable, oldIndex, newIndex);
+ updateSharedMemoryIndex(&endpoint->upDataQueueParcelable, oldIndex, newIndex);
+ updateSharedMemoryIndex(&endpoint->downDataQueueParcelable, oldIndex, newIndex);
+}
+
+} // namespace
+
Endpoint AudioEndpointParcelable::parcelable()&& {
Endpoint result;
result.upMessageQueueParcelable = mUpMessageQueueParcelable.parcelable();
result.downMessageQueueParcelable = mDownMessageQueueParcelable.parcelable();
result.upDataQueueParcelable = mUpDataQueueParcelable.parcelable();
result.downDataQueueParcelable = mDownDataQueueParcelable.parcelable();
- result.sharedMemories.reserve(std::min(mNumSharedMemories, MAX_SHARED_MEMORIES));
- for (size_t i = 0; i < mNumSharedMemories && i < MAX_SHARED_MEMORIES; ++i) {
- result.sharedMemories.emplace_back(std::move(mSharedMemories[i]).parcelable());
+ // To transfer through binder, only valid/in-use shared memory is allowed. By design, the
+ // shared memories that are currently in-use may not be placed continuously from position 0.
+ // However, when marshalling the shared memories into Endpoint, the shared memories will be
+ // re-indexed from 0. In that case, when placing a shared memory, it is needed to update the
+ // corresponding cached indexes.
+ for (int i = 0; i < MAX_SHARED_MEMORIES; ++i) {
+ if (mSharedMemories[i].isInUse()) {
+ const int index = result.sharedMemories.size();
+ result.sharedMemories.emplace_back(std::move(mSharedMemories[i]).parcelable());
+ // Updating all the SharedRegion that is using `i` as shared memory index with the
+ // new shared memory index as `result.sharedMemories.size() - 1`.
+ updateSharedMemoryIndex(&result, i, index);
+ }
}
return result;
}
@@ -71,28 +104,50 @@
*/
int32_t AudioEndpointParcelable::addFileDescriptor(const unique_fd& fd,
int32_t sizeInBytes) {
- if (mNumSharedMemories >= MAX_SHARED_MEMORIES) {
+ const int32_t index = getNextAvailableSharedMemoryPosition();
+ if (index < 0) {
return AAUDIO_ERROR_OUT_OF_RANGE;
}
- int32_t index = mNumSharedMemories++;
mSharedMemories[index].setup(fd, sizeInBytes);
return index;
}
void AudioEndpointParcelable::closeDataFileDescriptor() {
- const int32_t curDataMemoryIndex = mDownDataQueueParcelable.getSharedMemoryIndex();
- mSharedMemories[curDataMemoryIndex].closeAndReleaseFd();
+ for (const int32_t memoryIndex : std::set{mDownDataQueueParcelable.getDataSharedMemoryIndex(),
+ mDownDataQueueParcelable.getReadCounterSharedMemoryIndex(),
+ mDownDataQueueParcelable.getWriteCounterSharedMemoryIndex()}) {
+ mSharedMemories[memoryIndex].closeAndReleaseFd();
+ }
}
-void AudioEndpointParcelable::updateDataFileDescriptor(
+aaudio_result_t AudioEndpointParcelable::updateDataFileDescriptor(
AudioEndpointParcelable* endpointParcelable) {
- const int32_t curDataMemoryIndex = mDownDataQueueParcelable.getSharedMemoryIndex();
- const int32_t newDataMemoryIndex =
- endpointParcelable->mDownDataQueueParcelable.getSharedMemoryIndex();
- mSharedMemories[curDataMemoryIndex].close();
- mSharedMemories[curDataMemoryIndex].setup(
- endpointParcelable->mSharedMemories[newDataMemoryIndex]);
- mDownDataQueueParcelable.updateMemory(endpointParcelable->mDownDataQueueParcelable);
+ // Before updating data file descriptor, close the old shared memories.
+ closeDataFileDescriptor();
+ // The given endpoint parcelable and this one are two different objects, the indexes in
+ // `mSharedMemories` for `mDownDataQueueParcelable` can be different. In that case, an index
+ // map, which maps from the index in given endpoint parcelable to the index in this endpoint
+ // parcelable, is required when updating shared memory.
+ std::map<int32_t, int32_t> memoryIndexMap;
+ auto& downDataQueueParcelable = endpointParcelable->mDownDataQueueParcelable;
+ for (const int32_t memoryIndex : {downDataQueueParcelable.getDataSharedMemoryIndex(),
+ downDataQueueParcelable.getReadCounterSharedMemoryIndex(),
+ downDataQueueParcelable.getWriteCounterSharedMemoryIndex()}) {
+ if (memoryIndexMap.find(memoryIndex) != memoryIndexMap.end()) {
+ // This shared memory has been set up in this endpoint parcelable.
+ continue;
+ }
+ // Set up the memory in the next available shared memory position.
+ const int index = getNextAvailableSharedMemoryPosition();
+ if (index < 0) {
+ return AAUDIO_ERROR_OUT_OF_RANGE;
+ }
+ mSharedMemories[index].setup(endpointParcelable->mSharedMemories[memoryIndex]);
+ memoryIndexMap.emplace(memoryIndex, index);
+ }
+ mDownDataQueueParcelable.updateMemory(
+ endpointParcelable->mDownDataQueueParcelable, memoryIndexMap);
+ return AAUDIO_OK;
}
aaudio_result_t AudioEndpointParcelable::resolve(EndpointDescriptor *descriptor) {
@@ -114,26 +169,29 @@
aaudio_result_t AudioEndpointParcelable::close() {
int err = 0;
- for (int i = 0; i < mNumSharedMemories; i++) {
- int lastErr = mSharedMemories[i].close();
+ for (auto& sharedMemory : mSharedMemories) {
+ const int lastErr = sharedMemory.close();
if (lastErr < 0) err = lastErr;
}
return AAudioConvert_androidToAAudioResult(err);
}
-aaudio_result_t AudioEndpointParcelable::validate() const {
- if (mNumSharedMemories < 0 || mNumSharedMemories >= MAX_SHARED_MEMORIES) {
- ALOGE("invalid mNumSharedMemories = %d", mNumSharedMemories);
- return AAUDIO_ERROR_INTERNAL;
+int32_t AudioEndpointParcelable::getNextAvailableSharedMemoryPosition() const {
+ for (int i = 0; i < MAX_SHARED_MEMORIES; ++i) {
+ if (!mSharedMemories[i].isInUse()) {
+ return i;
+ }
}
- return AAUDIO_OK;
+ return -1;
}
void AudioEndpointParcelable::dump() {
ALOGD("======================================= BEGIN");
- ALOGD("mNumSharedMemories = %d", mNumSharedMemories);
- for (int i = 0; i < mNumSharedMemories; i++) {
- mSharedMemories[i].dump();
+ for (int i = 0; i < MAX_SHARED_MEMORIES; ++i) {
+ if (mSharedMemories[i].isInUse()) {
+ ALOGD("Shared memory index=%d", i);
+ mSharedMemories[i].dump();
+ }
}
ALOGD("mUpMessageQueueParcelable =========");
mUpMessageQueueParcelable.dump();
diff --git a/media/libaaudio/src/binding/AudioEndpointParcelable.h b/media/libaaudio/src/binding/AudioEndpointParcelable.h
index 5d2c38f..722dd14 100644
--- a/media/libaaudio/src/binding/AudioEndpointParcelable.h
+++ b/media/libaaudio/src/binding/AudioEndpointParcelable.h
@@ -61,8 +61,10 @@
* Update current data file descriptor with given endpoint parcelable.
* @param endpointParcelable an endpoint parcelable that contains new data file
* descriptor information
+ * @return AAUDIO_OK if the data file descriptor updates successfully.
+ * AAUDIO_ERROR_OUT_OF_RANGE if there is not enough space for the shared memory.
*/
- void updateDataFileDescriptor(AudioEndpointParcelable* endpointParcelable);
+ aaudio_result_t updateDataFileDescriptor(AudioEndpointParcelable* endpointParcelable);
aaudio_result_t resolve(EndpointDescriptor *descriptor);
aaudio_result_t resolveDataQueue(RingBufferDescriptor *descriptor);
@@ -84,9 +86,10 @@
RingBufferParcelable mDownDataQueueParcelable; // eg. playback
private:
- aaudio_result_t validate() const;
+ // Return the first available shared memory position. Return -1 if all shared memories are
+ // in use.
+ int32_t getNextAvailableSharedMemoryPosition() const;
- int32_t mNumSharedMemories = 0;
SharedMemoryParcelable mSharedMemories[MAX_SHARED_MEMORIES];
};
diff --git a/media/libaaudio/src/binding/RingBufferParcelable.cpp b/media/libaaudio/src/binding/RingBufferParcelable.cpp
index 3bc51d0..f8d748e 100644
--- a/media/libaaudio/src/binding/RingBufferParcelable.cpp
+++ b/media/libaaudio/src/binding/RingBufferParcelable.cpp
@@ -33,7 +33,6 @@
: mReadCounterParcelable(parcelable.readCounterParcelable),
mWriteCounterParcelable(parcelable.writeCounterParcelable),
mDataParcelable(parcelable.dataParcelable),
- mSharedMemoryIndex(parcelable.sharedMemoryIndex),
mBytesPerFrame(parcelable.bytesPerFrame),
mFramesPerBurst(parcelable.framesPerBurst),
mCapacityInFrames(parcelable.capacityInFrames),
@@ -46,7 +45,6 @@
result.readCounterParcelable = mReadCounterParcelable.parcelable();
result.writeCounterParcelable = mWriteCounterParcelable.parcelable();
result.dataParcelable = mDataParcelable.parcelable();
- result.sharedMemoryIndex = mSharedMemoryIndex;
result.bytesPerFrame = mBytesPerFrame;
result.framesPerBurst = mFramesPerBurst;
result.capacityInFrames = mCapacityInFrames;
@@ -62,19 +60,26 @@
int32_t readCounterOffset,
int32_t writeCounterOffset,
int32_t counterSizeBytes) {
- mSharedMemoryIndex = sharedMemoryIndex;
- mReadCounterParcelable.setup(sharedMemoryIndex, readCounterOffset, counterSizeBytes);
- mWriteCounterParcelable.setup(sharedMemoryIndex, writeCounterOffset, counterSizeBytes);
- mDataParcelable.setup(sharedMemoryIndex, dataMemoryOffset, dataSizeInBytes);
+ mReadCounterParcelable.setup({sharedMemoryIndex, readCounterOffset, counterSizeBytes});
+ mWriteCounterParcelable.setup({sharedMemoryIndex, writeCounterOffset, counterSizeBytes});
+ mDataParcelable.setup({sharedMemoryIndex, dataMemoryOffset, dataSizeInBytes});
}
void RingBufferParcelable::setupMemory(int32_t sharedMemoryIndex,
int32_t dataMemoryOffset,
int32_t dataSizeInBytes) {
- mSharedMemoryIndex = sharedMemoryIndex;
- mReadCounterParcelable.setup(sharedMemoryIndex, 0, 0);
- mWriteCounterParcelable.setup(sharedMemoryIndex, 0, 0);
- mDataParcelable.setup(sharedMemoryIndex, dataMemoryOffset, dataSizeInBytes);
+ mReadCounterParcelable.setup({sharedMemoryIndex, 0, 0});
+ mWriteCounterParcelable.setup({sharedMemoryIndex, 0, 0});
+ mDataParcelable.setup({sharedMemoryIndex, dataMemoryOffset, dataSizeInBytes});
+}
+
+void RingBufferParcelable::setupMemory(
+ const SharedRegionParcelable::MemoryInfoTuple& dataMemoryInfo,
+ const SharedRegionParcelable::MemoryInfoTuple& readCounterInfo,
+ const SharedRegionParcelable::MemoryInfoTuple& writeCounterInfo) {
+ mReadCounterParcelable.setup(readCounterInfo);
+ mWriteCounterParcelable.setup(writeCounterInfo);
+ mDataParcelable.setup(dataMemoryInfo);
}
int32_t RingBufferParcelable::getBytesPerFrame() const {
@@ -128,9 +133,11 @@
return AAUDIO_OK;
}
-void RingBufferParcelable::updateMemory(const RingBufferParcelable& parcelable) {
- setupMemory(mSharedMemoryIndex, 0,
- parcelable.getCapacityInFrames() * parcelable.getBytesPerFrame());
+void RingBufferParcelable::updateMemory(const RingBufferParcelable& parcelable,
+ const std::map<int32_t, int32_t>& memoryIndexMap) {
+ setupMemory(parcelable.mDataParcelable.getMemoryInfo(&memoryIndexMap),
+ parcelable.mReadCounterParcelable.getMemoryInfo(&memoryIndexMap),
+ parcelable.mWriteCounterParcelable.getMemoryInfo(&memoryIndexMap));
setBytesPerFrame(parcelable.getBytesPerFrame());
setFramesPerBurst(parcelable.getFramesPerBurst());
setCapacityInFrames(parcelable.getCapacityInFrames());
@@ -152,7 +159,6 @@
return AAUDIO_OK;
}
-
void RingBufferParcelable::dump() {
ALOGD("mCapacityInFrames = %d ---------", mCapacityInFrames);
if (mCapacityInFrames > 0) {
diff --git a/media/libaaudio/src/binding/RingBufferParcelable.h b/media/libaaudio/src/binding/RingBufferParcelable.h
index 29d0d86..4363191 100644
--- a/media/libaaudio/src/binding/RingBufferParcelable.h
+++ b/media/libaaudio/src/binding/RingBufferParcelable.h
@@ -17,6 +17,7 @@
#ifndef ANDROID_AAUDIO_RINGBUFFER_PARCELABLE_H
#define ANDROID_AAUDIO_RINGBUFFER_PARCELABLE_H
+#include <map>
#include <stdint.h>
#include <aaudio/RingBuffer.h>
@@ -46,6 +47,22 @@
int32_t dataMemoryOffset,
int32_t dataSizeInBytes);
+ /**
+ * Set up memory for the RingBufferParcelable.
+ *
+ * This function will take three MemoryInfoTuple as parameters to set up memory. The
+ * MemoryInfoTuple contains the shared memory index, offset in the shared memory and size
+ * of the object. This will allow setting up the read counter, write counter and data memory
+ * that are located in different shared memory blocks.
+ *
+ * @param dataMemoryInfo
+ * @param readCounterInfo
+ * @param writeCounterInfo
+ */
+ void setupMemory(const SharedRegionParcelable::MemoryInfoTuple& dataMemoryInfo,
+ const SharedRegionParcelable::MemoryInfoTuple& readCounterInfo,
+ const SharedRegionParcelable::MemoryInfoTuple& writeCounterInfo);
+
int32_t getBytesPerFrame() const;
void setBytesPerFrame(int32_t bytesPerFrame);
@@ -62,10 +79,24 @@
aaudio_result_t resolve(SharedMemoryParcelable *memoryParcels, RingBufferDescriptor *descriptor);
- void updateMemory(const RingBufferParcelable& parcelable);
+ /**
+ * Update this ring buffer with the given ring buffer.
+ *
+ * @param parcelable the ring buffer to be used to update this ring buffer.
+ * @param memoryIndexMap a map from the shared memory indexes used by the given ring buffer
+ * to the shared memory indexes used by this ring buffer.
+ */
+ void updateMemory(const RingBufferParcelable& parcelable,
+ const std::map<int32_t, int32_t>& memoryIndexMap);
- int32_t getSharedMemoryIndex() const {
- return mSharedMemoryIndex;
+ int32_t getReadCounterSharedMemoryIndex() const {
+ return mReadCounterParcelable.getSharedMemoryIndex();
+ }
+ int32_t getWriteCounterSharedMemoryIndex() const {
+ return mWriteCounterParcelable.getSharedMemoryIndex();
+ }
+ int32_t getDataSharedMemoryIndex() const {
+ return mDataParcelable.getSharedMemoryIndex();
}
void dump();
@@ -77,7 +108,6 @@
SharedRegionParcelable mReadCounterParcelable;
SharedRegionParcelable mWriteCounterParcelable;
SharedRegionParcelable mDataParcelable;
- int32_t mSharedMemoryIndex = -1;
int32_t mBytesPerFrame = 0; // index is in frames
int32_t mFramesPerBurst = 0; // for ISOCHRONOUS queues
int32_t mCapacityInFrames = 0; // zero if unused
diff --git a/media/libaaudio/src/binding/SharedMemoryParcelable.cpp b/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
index 741aefc..c360a1f 100644
--- a/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
+++ b/media/libaaudio/src/binding/SharedMemoryParcelable.cpp
@@ -146,7 +146,7 @@
return AAUDIO_OK;
}
-void SharedMemoryParcelable::dump() {
+void SharedMemoryParcelable::dump() const {
ALOGD("mFd = %d", mFd.get());
ALOGD("mSizeInBytes = %" PRId64, mSizeInBytes);
}
diff --git a/media/libaaudio/src/binding/SharedMemoryParcelable.h b/media/libaaudio/src/binding/SharedMemoryParcelable.h
index 7762fef..909f3a6 100644
--- a/media/libaaudio/src/binding/SharedMemoryParcelable.h
+++ b/media/libaaudio/src/binding/SharedMemoryParcelable.h
@@ -64,7 +64,9 @@
int32_t getSizeInBytes();
- void dump();
+ bool isInUse() const { return mFd.get() != -1; }
+
+ void dump() const;
// Extract a parcelable representation of this object.
// Since we own a unique FD, move semantics are provided to avoid the need to dupe.
diff --git a/media/libaaudio/src/binding/SharedRegionParcelable.cpp b/media/libaaudio/src/binding/SharedRegionParcelable.cpp
index 6fa109b..fd69ef1 100644
--- a/media/libaaudio/src/binding/SharedRegionParcelable.cpp
+++ b/media/libaaudio/src/binding/SharedRegionParcelable.cpp
@@ -46,12 +46,17 @@
return result;
}
-void SharedRegionParcelable::setup(int32_t sharedMemoryIndex,
- int32_t offsetInBytes,
- int32_t sizeInBytes) {
- mSharedMemoryIndex = sharedMemoryIndex;
- mOffsetInBytes = offsetInBytes;
- mSizeInBytes = sizeInBytes;
+void SharedRegionParcelable::setup(MemoryInfoTuple memoryInfoTuple) {
+ mSharedMemoryIndex = std::get<MEMORY_INDEX>(memoryInfoTuple);
+ mOffsetInBytes = std::get<OFFSET>(memoryInfoTuple);
+ mSizeInBytes = std::get<SIZE>(memoryInfoTuple);
+}
+
+SharedRegionParcelable::MemoryInfoTuple SharedRegionParcelable::getMemoryInfo(
+ const std::map<int32_t, int32_t>* memoryIndexMap) const {
+ return {memoryIndexMap == nullptr ? mSharedMemoryIndex : memoryIndexMap->at(mSharedMemoryIndex),
+ mOffsetInBytes,
+ mSizeInBytes};
}
aaudio_result_t SharedRegionParcelable::resolve(SharedMemoryParcelable *memoryParcels,
diff --git a/media/libaaudio/src/binding/SharedRegionParcelable.h b/media/libaaudio/src/binding/SharedRegionParcelable.h
index c15fc30..74ea75d 100644
--- a/media/libaaudio/src/binding/SharedRegionParcelable.h
+++ b/media/libaaudio/src/binding/SharedRegionParcelable.h
@@ -37,12 +37,36 @@
// Construct based on a parcelable representation.
explicit SharedRegionParcelable(const SharedRegion& parcelable);
- void setup(int32_t sharedMemoryIndex, int32_t offsetInBytes, int32_t sizeInBytes);
+ // A tuple that contains information for setting up shared memory.
+ // The information in the tuple is <shared memory index, offset, size in byte>.
+ using MemoryInfoTuple = std::tuple<int, int, int>;
+ // Enums to use as index to query from MemoryInfoTuple
+ enum {
+ MEMORY_INDEX = 0,
+ OFFSET = 1,
+ SIZE = 2,
+ };
+ void setup(MemoryInfoTuple memoryInfoTuple);
aaudio_result_t resolve(SharedMemoryParcelable *memoryParcels, void **regionAddressPtr);
bool isFileDescriptorSafe(SharedMemoryParcelable *memoryParcels);
+ int32_t getSharedMemoryIndex() const { return mSharedMemoryIndex; }
+
+ /**
+ * Get the memory information of this SharedRegionParcelable.
+ *
+ * If the `memoryIndexMap` is not null, it indicates the caller has a different indexing for
+ * the shared memory. In that case, the `memoryIndexMap` must contains information from the
+ * shared memory indexes used by this object to the caller's shared memory indexes.
+ *
+ * @param memoryIndexMap a pointer to a map of memory index, which map the current shared
+ * memory index to a new shared memory index.
+ * @return
+ */
+ MemoryInfoTuple getMemoryInfo(const std::map<int32_t, int32_t>* memoryIndexMap) const;
+
void dump();
// Extract a parcelable representation of this object.
diff --git a/media/libaaudio/src/binding/aidl/aaudio/RingBuffer.aidl b/media/libaaudio/src/binding/aidl/aaudio/RingBuffer.aidl
index dd64493..998fc66 100644
--- a/media/libaaudio/src/binding/aidl/aaudio/RingBuffer.aidl
+++ b/media/libaaudio/src/binding/aidl/aaudio/RingBuffer.aidl
@@ -26,5 +26,4 @@
int framesPerBurst; // for ISOCHRONOUS queues
int capacityInFrames; // zero if unused
int /* RingbufferFlags */ flags; // = RingbufferFlags::NONE;
- int sharedMemoryIndex;
-}
\ No newline at end of file
+}
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index 8fe8569..84c715f 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -33,6 +33,7 @@
#include <utils/Trace.h>
#include "AudioEndpointParcelable.h"
+#include "binding/AAudioBinderClient.h"
#include "binding/AAudioStreamRequest.h"
#include "binding/AAudioStreamConfiguration.h"
#include "binding/AAudioServiceMessage.h"
@@ -65,13 +66,13 @@
AudioStreamInternal::AudioStreamInternal(AAudioServiceInterface &serviceInterface, bool inService)
: AudioStream()
, mClockModel()
- , mServiceStreamHandle(AAUDIO_HANDLE_INVALID)
, mInService(inService)
, mServiceInterface(serviceInterface)
, mAtomicInternalTimestamp()
, mWakeupDelayNanos(AAudioProperty_getWakeupDelayMicros() * AAUDIO_NANOS_PER_MICROSECOND)
, mMinimumSleepNanos(AAudioProperty_getMinimumSleepMicros() * AAUDIO_NANOS_PER_MICROSECOND)
{
+
}
AudioStreamInternal::~AudioStreamInternal() {
@@ -137,8 +138,8 @@
mDeviceChannelCount = getSamplesPerFrame(); // Assume it will be the same. Update if not.
- mServiceStreamHandle = mServiceInterface.openStream(request, configurationOutput);
- if (mServiceStreamHandle < 0
+ mServiceStreamHandleInfo = mServiceInterface.openStream(request, configurationOutput);
+ if (getServiceHandle() < 0
&& (request.getConfiguration().getSamplesPerFrame() == 1
|| request.getConfiguration().getChannelMask() == AAUDIO_CHANNEL_MONO)
&& getDirection() == AAUDIO_DIRECTION_OUTPUT
@@ -147,12 +148,12 @@
// Only do this in the client. Otherwise we end up with a mono mixer in the service
// that writes to a stereo MMAP stream.
ALOGD("%s() - openStream() returned %d, try switching from MONO to STEREO",
- __func__, mServiceStreamHandle);
+ __func__, getServiceHandle());
request.getConfiguration().setChannelMask(AAUDIO_CHANNEL_STEREO);
- mServiceStreamHandle = mServiceInterface.openStream(request, configurationOutput);
+ mServiceStreamHandleInfo = mServiceInterface.openStream(request, configurationOutput);
}
- if (mServiceStreamHandle < 0) {
- return mServiceStreamHandle;
+ if (getServiceHandle() < 0) {
+ return getServiceHandle();
}
// This must match the key generated in oboeservice/AAudioServiceStreamBase.cpp
@@ -160,7 +161,7 @@
if (!mInService) {
// No need to log if it is from service side.
mMetricsId = std::string(AMEDIAMETRICS_KEY_PREFIX_AUDIO_STREAM)
- + std::to_string(mServiceStreamHandle);
+ + std::to_string(getServiceHandle());
}
android::mediametrics::LogItem(mMetricsId)
@@ -200,7 +201,7 @@
setHardwareSampleRate(configurationOutput.getHardwareSampleRate());
setHardwareFormat(configurationOutput.getHardwareFormat());
- result = mServiceInterface.getStreamDescription(mServiceStreamHandle, mEndPointParcelable);
+ result = mServiceInterface.getStreamDescription(mServiceStreamHandleInfo, mEndPointParcelable);
if (result != AAUDIO_OK) {
goto error;
}
@@ -321,8 +322,8 @@
// This must be called under mStreamLock.
aaudio_result_t AudioStreamInternal::release_l() {
aaudio_result_t result = AAUDIO_OK;
- ALOGD("%s(): mServiceStreamHandle = 0x%08X", __func__, mServiceStreamHandle);
- if (mServiceStreamHandle != AAUDIO_HANDLE_INVALID) {
+ ALOGD("%s(): mServiceStreamHandle = 0x%08X", __func__, getServiceHandle());
+ if (getServiceHandle() != AAUDIO_HANDLE_INVALID) {
// Don't release a stream while it is running. Stop it first.
// If DISCONNECTED then we should still try to stop in case the
// error callback is still running.
@@ -333,10 +334,10 @@
logReleaseBufferState();
setState(AAUDIO_STREAM_STATE_CLOSING);
- aaudio_handle_t serviceStreamHandle = mServiceStreamHandle;
- mServiceStreamHandle = AAUDIO_HANDLE_INVALID;
+ auto serviceStreamHandleInfo = mServiceStreamHandleInfo;
+ mServiceStreamHandleInfo = AAudioHandleInfo();
- mServiceInterface.closeStream(serviceStreamHandle);
+ mServiceInterface.closeStream(serviceStreamHandleInfo);
mCallbackBuffer.reset();
// Update local frame counters so we can query them after releasing the endpoint.
@@ -378,13 +379,17 @@
mAudioEndpoint->read(buffer, getBufferCapacity());
mEndPointParcelable.closeDataFileDescriptor();
aaudio_result_t result = mServiceInterface.exitStandby(
- mServiceStreamHandle, endpointParcelable);
+ mServiceStreamHandleInfo, endpointParcelable);
if (result != AAUDIO_OK) {
ALOGE("Failed to exit standby, error=%d", result);
goto exit;
}
// Reconstruct data queue descriptor using new shared file descriptor.
- mEndPointParcelable.updateDataFileDescriptor(&endpointParcelable);
+ result = mEndPointParcelable.updateDataFileDescriptor(&endpointParcelable);
+ if (result != AAUDIO_OK) {
+ ALOGE("%s failed to update data file descriptor, error=%d", __func__, result);
+ goto exit;
+ }
result = mEndPointParcelable.resolveDataQueue(&mEndpointDescriptor.dataQueueDescriptor);
if (result != AAUDIO_OK) {
ALOGE("Failed to resolve data queue after exiting standby, error=%d", result);
@@ -430,7 +435,7 @@
aaudio_result_t AudioStreamInternal::requestStart_l()
{
int64_t startTime;
- if (mServiceStreamHandle == AAUDIO_HANDLE_INVALID) {
+ if (getServiceHandle() == AAUDIO_HANDLE_INVALID) {
ALOGD("requestStart() mServiceStreamHandle invalid");
return AAUDIO_ERROR_INVALID_STATE;
}
@@ -451,12 +456,12 @@
prepareBuffersForStart(); // tell subclasses to get ready
- aaudio_result_t result = mServiceInterface.startStream(mServiceStreamHandle);
+ aaudio_result_t result = mServiceInterface.startStream(mServiceStreamHandleInfo);
if (result == AAUDIO_ERROR_STANDBY) {
// The stream is at standby mode. Need to exit standby before starting the stream.
result = exitStandby_l();
if (result == AAUDIO_OK) {
- result = mServiceInterface.startStream(mServiceStreamHandle);
+ result = mServiceInterface.startStream(mServiceStreamHandleInfo);
}
}
if (result != AAUDIO_OK) {
@@ -535,9 +540,9 @@
return AAUDIO_OK;
}
- if (mServiceStreamHandle == AAUDIO_HANDLE_INVALID) {
+ if (getServiceHandle() == AAUDIO_HANDLE_INVALID) {
ALOGW("%s() mServiceStreamHandle invalid = 0x%08X",
- __func__, mServiceStreamHandle);
+ __func__, getServiceHandle());
return AAUDIO_ERROR_INVALID_STATE;
}
@@ -545,7 +550,7 @@
setState(AAUDIO_STREAM_STATE_STOPPING);
mAtomicInternalTimestamp.clear();
- result = mServiceInterface.stopStream(mServiceStreamHandle);
+ result = mServiceInterface.stopStream(mServiceStreamHandleInfo);
if (result == AAUDIO_ERROR_INVALID_HANDLE) {
ALOGD("%s() INVALID_HANDLE, stream was probably stolen", __func__);
result = AAUDIO_OK;
@@ -554,31 +559,31 @@
}
aaudio_result_t AudioStreamInternal::registerThread() {
- if (mServiceStreamHandle == AAUDIO_HANDLE_INVALID) {
+ if (getServiceHandle() == AAUDIO_HANDLE_INVALID) {
ALOGW("%s() mServiceStreamHandle invalid", __func__);
return AAUDIO_ERROR_INVALID_STATE;
}
- return mServiceInterface.registerAudioThread(mServiceStreamHandle,
- gettid(),
- getPeriodNanoseconds());
+ return mServiceInterface.registerAudioThread(mServiceStreamHandleInfo,
+ gettid(),
+ getPeriodNanoseconds());
}
aaudio_result_t AudioStreamInternal::unregisterThread() {
- if (mServiceStreamHandle == AAUDIO_HANDLE_INVALID) {
+ if (getServiceHandle() == AAUDIO_HANDLE_INVALID) {
ALOGW("%s() mServiceStreamHandle invalid", __func__);
return AAUDIO_ERROR_INVALID_STATE;
}
- return mServiceInterface.unregisterAudioThread(mServiceStreamHandle, gettid());
+ return mServiceInterface.unregisterAudioThread(mServiceStreamHandleInfo, gettid());
}
aaudio_result_t AudioStreamInternal::startClient(const android::AudioClient& client,
const audio_attributes_t *attr,
audio_port_handle_t *portHandle) {
ALOGV("%s() called", __func__);
- if (mServiceStreamHandle == AAUDIO_HANDLE_INVALID) {
+ if (getServiceHandle() == AAUDIO_HANDLE_INVALID) {
return AAUDIO_ERROR_INVALID_STATE;
}
- aaudio_result_t result = mServiceInterface.startClient(mServiceStreamHandle,
+ aaudio_result_t result = mServiceInterface.startClient(mServiceStreamHandleInfo,
client, attr, portHandle);
ALOGV("%s(%d) returning %d", __func__, *portHandle, result);
return result;
@@ -586,10 +591,10 @@
aaudio_result_t AudioStreamInternal::stopClient(audio_port_handle_t portHandle) {
ALOGV("%s(%d) called", __func__, portHandle);
- if (mServiceStreamHandle == AAUDIO_HANDLE_INVALID) {
+ if (getServiceHandle() == AAUDIO_HANDLE_INVALID) {
return AAUDIO_ERROR_INVALID_STATE;
}
- aaudio_result_t result = mServiceInterface.stopClient(mServiceStreamHandle, portHandle);
+ aaudio_result_t result = mServiceInterface.stopClient(mServiceStreamHandleInfo, portHandle);
ALOGV("%s(%d) returning %d", __func__, portHandle, result);
return result;
}
@@ -766,6 +771,22 @@
aaudio_result_t AudioStreamInternal::processData(void *buffer, int32_t numFrames,
int64_t timeoutNanoseconds)
{
+ if (isDisconnected()) {
+ return AAUDIO_ERROR_DISCONNECTED;
+ }
+ if (!mInService &&
+ AAudioBinderClient::getInstance().getServiceLifetimeId() != getServiceLifetimeId()) {
+ // The service lifetime id will be changed whenever the binder died. In that case, if
+ // the service lifetime id from AAudioBinderClient is different from the cached one,
+ // returns AAUDIO_ERROR_DISCONNECTED.
+ // Note that only compare the service lifetime id if it is not in service as the streams
+ // in service will all be gone when aaudio service dies.
+ mClockModel.stop(AudioClock::getNanoseconds());
+ // Set the stream as disconnected as the service lifetime id will only change when
+ // the binder dies.
+ setDisconnected();
+ return AAUDIO_ERROR_DISCONNECTED;
+ }
const char * traceName = "aaProc";
const char * fifoName = "aaRdy";
ATRACE_BEGIN(traceName);
diff --git a/media/libaaudio/src/client/AudioStreamInternal.h b/media/libaaudio/src/client/AudioStreamInternal.h
index 4ea61d2..9c06121 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.h
+++ b/media/libaaudio/src/client/AudioStreamInternal.h
@@ -83,7 +83,11 @@
aaudio_result_t stopClient(audio_port_handle_t clientHandle);
aaudio_handle_t getServiceHandle() const {
- return mServiceStreamHandle;
+ return mServiceStreamHandleInfo.getHandle();
+ }
+
+ int32_t getServiceLifetimeId() const {
+ return mServiceStreamHandleInfo.getServiceLifetimeId();
}
protected:
@@ -148,7 +152,8 @@
std::unique_ptr<AudioEndpoint> mAudioEndpoint; // source for reads or sink for writes
- aaudio_handle_t mServiceStreamHandle; // opaque handle returned from service
+ // opaque handle returned from service
+ AAudioHandleInfo mServiceStreamHandleInfo;
int32_t mXRunCount = 0; // how many underrun events?
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
index 7c7a969..89dd8ff 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
@@ -74,7 +74,7 @@
if (result != AAUDIO_OK) {
return result;
}
- if (mServiceStreamHandle == AAUDIO_HANDLE_INVALID) {
+ if (getServiceHandle() == AAUDIO_HANDLE_INVALID) {
ALOGW("%s() mServiceStreamHandle invalid", __func__);
return AAUDIO_ERROR_INVALID_STATE;
}
@@ -82,17 +82,17 @@
mClockModel.stop(AudioClock::getNanoseconds());
setState(AAUDIO_STREAM_STATE_PAUSING);
mAtomicInternalTimestamp.clear();
- return mServiceInterface.pauseStream(mServiceStreamHandle);
+ return mServiceInterface.pauseStream(mServiceStreamHandleInfo);
}
aaudio_result_t AudioStreamInternalPlay::requestFlush_l() {
- if (mServiceStreamHandle == AAUDIO_HANDLE_INVALID) {
+ if (getServiceHandle() == AAUDIO_HANDLE_INVALID) {
ALOGW("%s() mServiceStreamHandle invalid", __func__);
return AAUDIO_ERROR_INVALID_STATE;
}
setState(AAUDIO_STREAM_STATE_FLUSHING);
- return mServiceInterface.flushStream(mServiceStreamHandle);
+ return mServiceInterface.flushStream(mServiceStreamHandleInfo);
}
void AudioStreamInternalPlay::prepareBuffersForStart() {
diff --git a/media/libaaudio/src/client/IsochronousClockModel.cpp b/media/libaaudio/src/client/IsochronousClockModel.cpp
index 6921271..a39e90e 100644
--- a/media/libaaudio/src/client/IsochronousClockModel.cpp
+++ b/media/libaaudio/src/client/IsochronousClockModel.cpp
@@ -43,12 +43,12 @@
// and dumped to the log when the stream is stopped.
IsochronousClockModel::IsochronousClockModel()
- : mLatenessForDriftNanos(kInitialLatenessForDriftNanos)
{
if ((AAudioProperty_getLogMask() & AAUDIO_LOG_CLOCK_MODEL_HISTOGRAM) != 0) {
mHistogramMicros = std::make_unique<Histogram>(kHistogramBinCount,
kHistogramBinWidthMicros);
}
+ update();
}
void IsochronousClockModel::setPositionAndTime(int64_t framePosition, int64_t nanoTime) {
@@ -61,15 +61,19 @@
ALOGV("start(nanos = %lld)\n", (long long) nanoTime);
mMarkerNanoTime = nanoTime;
mState = STATE_STARTING;
+ mConsecutiveVeryLateCount = 0;
+ mDspStallCount = 0;
if (mHistogramMicros) {
mHistogramMicros->clear();
}
}
void IsochronousClockModel::stop(int64_t nanoTime) {
- ALOGD("stop(nanos = %lld) max lateness = %d micros\n",
- (long long) nanoTime,
- (int) (mMaxMeasuredLatenessNanos / 1000));
+ ALOGD("stop(nanos = %lld) max lateness = %d micros, DSP stalled %d times",
+ (long long) nanoTime,
+ (int) (mMaxMeasuredLatenessNanos / 1000),
+ mDspStallCount
+ );
setPositionAndTime(convertTimeToPosition(nanoTime), nanoTime);
// TODO should we set position?
mState = STATE_STOPPED;
@@ -108,7 +112,9 @@
// ALOGD("processTimestamp() - mSampleRate = %d", mSampleRate);
// ALOGD("processTimestamp() - mState = %d", mState);
+ // Lateness relative to the start of the window.
int64_t latenessNanos = nanosDelta - expectedNanosDelta;
+ int32_t nextConsecutiveVeryLateCount = 0;
switch (mState) {
case STATE_STOPPED:
break;
@@ -137,58 +143,94 @@
// Or we may be drifting due to a fast HW clock.
setPositionAndTime(framePosition, nanoTime);
#if ICM_LOG_DRIFT
- int earlyDeltaMicros = (int) ((expectedNanosDelta - nanosDelta)/ 1000);
- ALOGD("%s() - STATE_RUNNING - #%d, %4d micros EARLY",
+ int earlyDeltaMicros = (int) ((expectedNanosDelta - nanosDelta)
+ / AAUDIO_NANOS_PER_MICROSECOND);
+ ALOGD("%s() - STATE_RUNNING - #%d, %5d micros EARLY",
__func__, mTimestampCount, earlyDeltaMicros);
#endif
- } else if (latenessNanos > mLatenessForDriftNanos) {
- // When we are on the late side, it may be because of preemption in the kernel,
- // or timing jitter caused by resampling in the DSP,
- // or we may be drifting due to a slow HW clock.
- // We add slight drift value just in case there is actual long term drift
- // forward caused by a slower clock.
- // If the clock is faster than the model will get pushed earlier
- // by the code in the earlier branch.
- // The two opposing forces should allow the model to track the real clock
- // over a long time.
- int64_t driftingTime = mMarkerNanoTime + expectedNanosDelta + kDriftNanos;
- setPositionAndTime(framePosition, driftingTime);
-#if ICM_LOG_DRIFT
- ALOGD("%s() - STATE_RUNNING - #%d, DRIFT, lateness = %d micros",
+ } else if (latenessNanos > mLatenessForJumpNanos) {
+ ALOGD("%s() - STATE_RUNNING - #%d, %5d micros VERY LATE, %d times",
__func__,
mTimestampCount,
- (int) (latenessNanos / 1000));
-#endif
+ (int) (latenessNanos / AAUDIO_NANOS_PER_MICROSECOND),
+ mConsecutiveVeryLateCount
+ );
+ // A lateness this large is probably due to a stall in the DSP.
+ // A pause causes a persistent lateness so we can detect it by counting
+ // consecutive late timestamps.
+ if (mConsecutiveVeryLateCount >= kVeryLateCountsNeededToTriggerJump) {
+ // Assume the timestamp is valid and let subsequent EARLY timestamps
+ // move the window quickly to the correct place.
+ setPositionAndTime(framePosition, nanoTime); // JUMP!
+ mDspStallCount++;
+ // Throttle the warnings but do not silence them.
+ // They indicate a bug that needs to be fixed!
+ if ((nanoTime - mLastJumpWarningTimeNanos) > AAUDIO_NANOS_PER_SECOND) {
+ ALOGW("%s() - STATE_RUNNING - #%d, %5d micros VERY LATE! Force window jump"
+ ", mDspStallCount = %d",
+ __func__,
+ mTimestampCount,
+ (int) (latenessNanos / AAUDIO_NANOS_PER_MICROSECOND),
+ mDspStallCount
+ );
+ mLastJumpWarningTimeNanos = nanoTime;
+ }
+ } else {
+ nextConsecutiveVeryLateCount = mConsecutiveVeryLateCount + 1;
+ driftForward(latenessNanos, expectedNanosDelta, framePosition);
+ }
+ } else if (latenessNanos > mLatenessForDriftNanos) {
+ driftForward(latenessNanos, expectedNanosDelta, framePosition);
}
+ mConsecutiveVeryLateCount = nextConsecutiveVeryLateCount;
// Modify mMaxMeasuredLatenessNanos.
// This affects the "late" side of the window, which controls input glitches.
if (latenessNanos > mMaxMeasuredLatenessNanos) { // increase
#if ICM_LOG_DRIFT
- ALOGD("%s() - STATE_RUNNING - #%d, newmax %d - oldmax %d = %4d micros LATE",
+ ALOGD("%s() - STATE_RUNNING - #%d, newmax %d, oldmax %d micros LATE",
__func__,
mTimestampCount,
- (int) (latenessNanos / 1000),
- mMaxMeasuredLatenessNanos / 1000,
- (int) ((latenessNanos - mMaxMeasuredLatenessNanos) / 1000)
+ (int) (latenessNanos / AAUDIO_NANOS_PER_MICROSECOND),
+ (int) (mMaxMeasuredLatenessNanos / AAUDIO_NANOS_PER_MICROSECOND)
);
#endif
mMaxMeasuredLatenessNanos = (int32_t) latenessNanos;
- // Calculate upper region that will trigger a drift forwards.
- mLatenessForDriftNanos = mMaxMeasuredLatenessNanos - (mMaxMeasuredLatenessNanos >> 4);
- } else { // decrease
- // If this is an outlier in lateness then mMaxMeasuredLatenessNanos can go high
- // and stay there. So we slowly reduce mMaxMeasuredLatenessNanos for better
- // long term stability. The two opposing forces will keep mMaxMeasuredLatenessNanos
- // within a reasonable range.
- mMaxMeasuredLatenessNanos -= kDriftNanos;
}
+
break;
default:
break;
}
}
+// When we are on the late side, it may be because of preemption in the kernel,
+// or timing jitter caused by resampling in the DSP,
+// or we may be drifting due to a slow HW clock.
+// We add slight drift value just in case there is actual long term drift
+// forward caused by a slower clock.
+// If the clock is faster than the model will get pushed earlier
+// by the code in the earlier branch.
+// The two opposing forces should allow the model to track the real clock
+// over a long time.
+void IsochronousClockModel::driftForward(int64_t latenessNanos,
+ int64_t expectedNanosDelta,
+ int64_t framePosition) {
+ const int64_t driftNanos = (latenessNanos - mLatenessForDriftNanos) >> kShifterForDrift;
+ const int64_t minDriftNanos = std::min(driftNanos, kMaxDriftNanos);
+ const int64_t expectedMarkerNanoTime = mMarkerNanoTime + expectedNanosDelta;
+ const int64_t driftedTime = expectedMarkerNanoTime + minDriftNanos;
+ setPositionAndTime(framePosition, driftedTime);
+#if ICM_LOG_DRIFT
+ ALOGD("%s() - STATE_RUNNING - #%d, %5d micros LATE, nudge window forward by %d micros",
+ __func__,
+ mTimestampCount,
+ (int) (latenessNanos / AAUDIO_NANOS_PER_MICROSECOND),
+ (int) (minDriftNanos / AAUDIO_NANOS_PER_MICROSECOND)
+ );
+#endif
+}
+
void IsochronousClockModel::setSampleRate(int32_t sampleRate) {
mSampleRate = sampleRate;
update();
@@ -197,11 +239,18 @@
void IsochronousClockModel::setFramesPerBurst(int32_t framesPerBurst) {
mFramesPerBurst = framesPerBurst;
update();
+ ALOGD("%s() - mFramesPerBurst = %d - mBurstPeriodNanos = %" PRId64,
+ __func__,
+ mFramesPerBurst,
+ mBurstPeriodNanos
+ );
}
// Update expected lateness based on sampleRate and framesPerBurst
void IsochronousClockModel::update() {
- mBurstPeriodNanos = convertDeltaPositionToTime(mFramesPerBurst); // uses mSampleRate
+ mBurstPeriodNanos = convertDeltaPositionToTime(mFramesPerBurst);
+ mLatenessForDriftNanos = mBurstPeriodNanos + kLatenessMarginForSchedulingJitter;
+ mLatenessForJumpNanos = mLatenessForDriftNanos * kScalerForJumpLateness;
}
int64_t IsochronousClockModel::convertDeltaPositionToTime(int64_t framesDelta) const {
@@ -257,11 +306,11 @@
}
void IsochronousClockModel::dump() const {
- ALOGD("mMarkerFramePosition = %" PRIu64, mMarkerFramePosition);
- ALOGD("mMarkerNanoTime = %" PRIu64, mMarkerNanoTime);
+ ALOGD("mMarkerFramePosition = %" PRId64, mMarkerFramePosition);
+ ALOGD("mMarkerNanoTime = %" PRId64, mMarkerNanoTime);
ALOGD("mSampleRate = %6d", mSampleRate);
ALOGD("mFramesPerBurst = %6d", mFramesPerBurst);
- ALOGD("mMaxMeasuredLatenessNanos = %6d", mMaxMeasuredLatenessNanos);
+ ALOGD("mMaxMeasuredLatenessNanos = %6" PRId64, mMaxMeasuredLatenessNanos);
ALOGD("mState = %6d", mState);
}
diff --git a/media/libaaudio/src/client/IsochronousClockModel.h b/media/libaaudio/src/client/IsochronousClockModel.h
index 3007237..5be745e 100644
--- a/media/libaaudio/src/client/IsochronousClockModel.h
+++ b/media/libaaudio/src/client/IsochronousClockModel.h
@@ -129,6 +129,9 @@
private:
+ void driftForward(int64_t latenessNanos,
+ int64_t expectedNanosDelta,
+ int64_t framePosition);
int32_t getLateTimeOffsetNanos() const;
void update();
@@ -139,28 +142,44 @@
STATE_RUNNING
};
- // Amount of time to drift forward when we get a late timestamp.
- static constexpr int32_t kDriftNanos = 1 * 1000;
+ // Maximum amount of time to drift forward when we get a late timestamp.
+ static constexpr int64_t kMaxDriftNanos = 10 * AAUDIO_NANOS_PER_MICROSECOND;
// Safety margin to add to the late edge of the timestamp window.
- static constexpr int32_t kExtraLatenessNanos = 100 * 1000;
- // Initial small threshold for causing a drift later in time.
- static constexpr int32_t kInitialLatenessForDriftNanos = 10 * 1000;
+ static constexpr int32_t kExtraLatenessNanos = 100 * AAUDIO_NANOS_PER_MICROSECOND;
+ // Predicted lateness due to scheduling jitter in the HAL timestamp collection.
+ static constexpr int32_t kLatenessMarginForSchedulingJitter
+ = 1000 * AAUDIO_NANOS_PER_MICROSECOND;
+ // Amount we multiply mLatenessForDriftNanos to get mLatenessForJumpNanos.
+ // This determines when we go from thinking the clock is drifting to
+ // when it has actually paused briefly.
+ static constexpr int32_t kScalerForJumpLateness = 5;
+ // Amount to divide lateness past the expected burst window to generate
+ // the drift value for the window. This is meant to be a very slight nudge forward.
+ static constexpr int32_t kShifterForDrift = 6; // divide by 2^N
+ static constexpr int32_t kVeryLateCountsNeededToTriggerJump = 2;
static constexpr int32_t kHistogramBinWidthMicros = 50;
- static constexpr int32_t kHistogramBinCount = 128;
+ static constexpr int32_t kHistogramBinCount = 128;
int64_t mMarkerFramePosition{0}; // Estimated HW position.
int64_t mMarkerNanoTime{0}; // Estimated HW time.
+ int64_t mBurstPeriodNanos{0}; // Time between HW bursts.
+ // Includes mBurstPeriodNanos because we sample randomly over time.
+ int64_t mMaxMeasuredLatenessNanos{0};
+ // Threshold for lateness that triggers a drift later in time.
+ int64_t mLatenessForDriftNanos{0}; // Set in update()
+ // Based on the observed lateness when the DSP is paused for playing a touch sound.
+ int64_t mLatenessForJumpNanos{0}; // Set in update()
+ int64_t mLastJumpWarningTimeNanos{0}; // For throttling warnings.
+
int32_t mSampleRate{48000};
int32_t mFramesPerBurst{48}; // number of frames transferred at one time.
- int32_t mBurstPeriodNanos{0}; // Time between HW bursts.
- // Includes mBurstPeriodNanos because we sample randomly over time.
- int32_t mMaxMeasuredLatenessNanos{0};
- // Threshold for lateness that triggers a drift later in time.
- int32_t mLatenessForDriftNanos;
+ int32_t mConsecutiveVeryLateCount{0}; // To detect persistent DSP lateness.
+
clock_model_state_t mState{STATE_STOPPED}; // State machine handles startup sequence.
int32_t mTimestampCount = 0; // For logging.
+ int32_t mDspStallCount = 0; // For logging.
// distribution of timestamps relative to earliest
std::unique_ptr<android::audio_utils::Histogram> mHistogramMicros;
diff --git a/media/libaaudio/tests/test_clock_model.cpp b/media/libaaudio/tests/test_clock_model.cpp
index 7f7abbd..e455768 100644
--- a/media/libaaudio/tests/test_clock_model.cpp
+++ b/media/libaaudio/tests/test_clock_model.cpp
@@ -30,7 +30,8 @@
// We can use arbitrary values here because we are not opening a real audio stream.
#define SAMPLE_RATE 48000
#define HW_FRAMES_PER_BURST 48
-#define NANOS_PER_BURST (NANOS_PER_SECOND * HW_FRAMES_PER_BURST / SAMPLE_RATE)
+// Sometimes we need a (double) value to avoid misguided Build warnings.
+#define NANOS_PER_BURST ((double) NANOS_PER_SECOND * HW_FRAMES_PER_BURST / SAMPLE_RATE)
class ClockModelTestFixture: public ::testing::Test {
public:
@@ -49,10 +50,20 @@
// cleanup any pending stuff, but no exceptions allowed
}
- // Test processing of timestamps when the hardware may be slightly off from
- // the expected sample rate.
- void checkDriftingClock(double hardwareFramesPerSecond, int numLoops) {
+ /** Test processing of timestamps when the hardware may be slightly off from
+ * the expected sample rate.
+ * @param hardwareFramesPerSecond sample rate that may be slightly off
+ * @param numLoops number of iterations
+ * @param hardwarePauseTime number of seconds to jump forward at halfway point
+ */
+ void checkDriftingClock(double hardwareFramesPerSecond,
+ int numLoops,
+ double hardwarePauseTime = 0.0) {
+ int checksToSkip = 0;
const int64_t startTimeNanos = 500000000; // arbitrary
+ int64_t jumpOffsetNanos = 0;
+
+ srand48(123456); // arbitrary seed for repeatable test results
model.start(startTimeNanos);
const int64_t startPositionFrames = HW_FRAMES_PER_BURST; // hardware
@@ -64,7 +75,7 @@
model.processTimestamp(startPositionFrames, markerTime);
ASSERT_EQ(startPositionFrames, model.convertTimeToPosition(markerTime));
- double elapsedTimeSeconds = startTimeNanos / (double) NANOS_PER_SECOND;
+ double elapsedTimeSeconds = 0.0;
for (int i = 0; i < numLoops; i++) {
// Calculate random delay over several bursts.
const double timeDelaySeconds = 10.0 * drand48() * NANOS_PER_BURST / NANOS_PER_SECOND;
@@ -75,12 +86,37 @@
const int64_t currentTimeFrames = startPositionFrames +
(int64_t)(hardwareFramesPerSecond * elapsedTimeSeconds);
const int64_t numBursts = currentTimeFrames / HW_FRAMES_PER_BURST;
- const int64_t alignedPosition = startPositionFrames + (numBursts * HW_FRAMES_PER_BURST);
+ const int64_t hardwarePosition = startPositionFrames
+ + (numBursts * HW_FRAMES_PER_BURST);
- // Apply drifting timestamp.
- model.processTimestamp(alignedPosition, currentTimeNanos);
+ // Simulate a pause in the DSP where the position freezes for a length of time.
+ if (i == numLoops / 2) {
+ jumpOffsetNanos = (int64_t)(hardwarePauseTime * NANOS_PER_SECOND);
+ checksToSkip = 5; // Give the model some time to catch up.
+ }
- ASSERT_EQ(alignedPosition, model.convertTimeToPosition(currentTimeNanos));
+ // Apply drifting timestamp. Add a random time to simulate the
+ // random sampling of the clock that occurs when polling the DSP clock.
+ int64_t sampledTimeNanos = (int64_t) (currentTimeNanos
+ + jumpOffsetNanos
+ + (drand48() * NANOS_PER_BURST));
+ model.processTimestamp(hardwarePosition, sampledTimeNanos);
+
+ if (checksToSkip > 0) {
+ checksToSkip--;
+ } else {
+ // When the model is drifting it may be pushed forward or backward.
+ const int64_t modelPosition = model.convertTimeToPosition(sampledTimeNanos);
+ if (hardwareFramesPerSecond >= SAMPLE_RATE) { // fast hardware
+ ASSERT_LE(hardwarePosition - HW_FRAMES_PER_BURST, modelPosition);
+ ASSERT_GE(hardwarePosition + HW_FRAMES_PER_BURST, modelPosition);
+ } else {
+ // Slow hardware. If this fails then the model may be drifting
+ // forward in time too slowly. Increase kDriftNanos.
+ ASSERT_LE(hardwarePosition, modelPosition);
+ ASSERT_GE(hardwarePosition + (2 * HW_FRAMES_PER_BURST), modelPosition);
+ }
+ }
}
}
@@ -144,23 +180,31 @@
EXPECT_EQ(position, model.convertTimeToPosition(markerTime + (73 * NANOS_PER_MICROSECOND)));
// convertPositionToTime rounds up
- EXPECT_EQ(markerTime + NANOS_PER_BURST, model.convertPositionToTime(position + 17));
+ EXPECT_EQ(markerTime + (int64_t)NANOS_PER_BURST, model.convertPositionToTime(position + 17));
}
-#define NUM_LOOPS_DRIFT 10000
+#define NUM_LOOPS_DRIFT 200000
-// test nudging the window by using a drifting HW clock
TEST_F(ClockModelTestFixture, clock_no_drift) {
checkDriftingClock(SAMPLE_RATE, NUM_LOOPS_DRIFT);
}
-// These slow drift rates caused errors when I disabled the code that handles
-// drifting in the clock model. So I think the test is valid.
+// Test drifting hardware clocks.
// It is unlikely that real hardware would be off by more than this amount.
+
+// Test a slow clock. This will cause the times to be later than expected.
+// This will push the clock model window forward and cause it to drift.
TEST_F(ClockModelTestFixture, clock_slow_drift) {
- checkDriftingClock(0.998 * SAMPLE_RATE, NUM_LOOPS_DRIFT);
+ checkDriftingClock(0.99998 * SAMPLE_RATE, NUM_LOOPS_DRIFT);
}
+// Test a fast hardware clock. This will cause the times to be earlier
+// than expected. This will cause the clock model to jump backwards quickly.
TEST_F(ClockModelTestFixture, clock_fast_drift) {
- checkDriftingClock(1.002 * SAMPLE_RATE, NUM_LOOPS_DRIFT);
-}
\ No newline at end of file
+ checkDriftingClock(1.00002 * SAMPLE_RATE, NUM_LOOPS_DRIFT);
+}
+
+// Simulate a pause in the DSP, which can occur if the DSP reroutes the audio.
+TEST_F(ClockModelTestFixture, clock_jump_forward_500) {
+ checkDriftingClock(SAMPLE_RATE, NUM_LOOPS_DRIFT, 0.500);
+}
diff --git a/media/libaaudio/tests/test_marshalling.cpp b/media/libaaudio/tests/test_marshalling.cpp
index 49213dc..dfb1620 100644
--- a/media/libaaudio/tests/test_marshalling.cpp
+++ b/media/libaaudio/tests/test_marshalling.cpp
@@ -109,7 +109,7 @@
sharedMemories[0].setup(fd, memSizeBytes);
int32_t regionOffset1 = 32;
int32_t regionSize1 = 16;
- sharedRegionA.setup(0, regionOffset1, regionSize1);
+ sharedRegionA.setup({0, regionOffset1, regionSize1});
void *region1;
EXPECT_EQ(AAUDIO_OK, sharedRegionA.resolve(sharedMemories, ®ion1));
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index 28d76d7..b731702 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -264,6 +264,12 @@
return af->setMode(mode);
}
+status_t AudioSystem::setSimulateDeviceConnections(bool enabled) {
+ const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ if (af == 0) return PERMISSION_DENIED;
+ return af->setSimulateDeviceConnections(enabled);
+}
+
status_t AudioSystem::setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs) {
const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
@@ -1576,6 +1582,15 @@
return OK;
}
+status_t AudioSystem::listDeclaredDevicePorts(media::AudioPortRole role,
+ std::vector<media::AudioPortFw>* result) {
+ if (result == nullptr) return BAD_VALUE;
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(aps->listDeclaredDevicePorts(role, result)));
+ return OK;
+}
+
status_t AudioSystem::getAudioPort(struct audio_port_v7* port) {
if (port == nullptr) {
return BAD_VALUE;
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 9386b9b..d8219a8 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -1702,13 +1702,21 @@
status_t AudioTrack::setOutputDevice(audio_port_handle_t deviceId) {
AutoMutex lock(mLock);
- ALOGV("%s(%d): deviceId=%d mSelectedDeviceId=%d",
- __func__, mPortId, deviceId, mSelectedDeviceId);
+ ALOGV("%s(%d): deviceId=%d mSelectedDeviceId=%d mRoutedDeviceId %d",
+ __func__, mPortId, deviceId, mSelectedDeviceId, mRoutedDeviceId);
if (mSelectedDeviceId != deviceId) {
mSelectedDeviceId = deviceId;
- if (mStatus == NO_ERROR) {
- android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
- mProxy->interrupt();
+ if (mStatus == NO_ERROR && mSelectedDeviceId != mRoutedDeviceId) {
+ if (isPlaying_l()) {
+ android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
+ mProxy->interrupt();
+ } else {
+ // if the track is idle, try to restore now and
+ // defer to next start if not possible
+ if (restoreTrack_l("setOutputDevice") != OK) {
+ android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
+ }
+ }
}
}
return NO_ERROR;
@@ -2185,7 +2193,6 @@
// obtainBuffer() is called with mutex unlocked, so keep extra references to these fields to
// keep them from going away if another thread re-creates the track during obtainBuffer()
sp<AudioTrackClientProxy> proxy;
- sp<IMemory> iMem;
{ // start of lock scope
AutoMutex lock(mLock);
@@ -2211,8 +2218,9 @@
}
// Keep the extra references
+ mProxyObtainBufferRef = mProxy;
proxy = mProxy;
- iMem = mCblkMemory;
+ mCblkMemoryObtainBufferRef = mCblkMemory;
if (mState == STATE_STOPPING) {
status = -EINTR;
@@ -2260,6 +2268,8 @@
buffer.mFrameCount = stepCount;
buffer.mRaw = audioBuffer->raw;
+ sp<IMemory> tempMemory;
+ sp<AudioTrackClientProxy> tempProxy;
AutoMutex lock(mLock);
if (audioBuffer->sequence != mSequence) {
// This Buffer came from a different IAudioTrack instance, so ignore the releaseBuffer
@@ -2269,7 +2279,12 @@
}
mReleased += stepCount;
mInUnderrun = false;
- mProxy->releaseBuffer(&buffer);
+ mProxyObtainBufferRef->releaseBuffer(&buffer);
+ // The extra reference of shared memory and proxy from `obtainBuffer` is not used after
+ // calling `releaseBuffer`. Move the extra reference to a temp strong pointer so that it
+ // will be cleared outside `releaseBuffer`.
+ tempMemory = std::move(mCblkMemoryObtainBufferRef);
+ tempProxy = std::move(mProxyObtainBufferRef);
// restart track if it was disabled by audioflinger due to previous underrun
restartIfDisabled();
@@ -2507,11 +2522,22 @@
timeout.tv_sec = WAIT_STREAM_END_TIMEOUT_SEC;
timeout.tv_nsec = 0;
+ // Use timestamp progress to safeguard we don't falsely time out.
+ AudioTimestamp timestamp{};
+ const bool isTimestampValid = getTimestamp(timestamp) == OK;
+ const auto frameCount = isTimestampValid ? timestamp.mPosition : 0;
+
status_t status = proxy->waitStreamEndDone(&timeout);
switch (status) {
+ case TIMED_OUT:
+ if (isTimestampValid
+ && getTimestamp(timestamp) == OK && frameCount != timestamp.mPosition) {
+ ALOGD("%s: waitStreamEndDone retrying", __func__);
+ break; // we retry again (and recheck possible state change).
+ }
+ [[fallthrough]];
case NO_ERROR:
case DEAD_OBJECT:
- case TIMED_OUT:
if (status != DEAD_OBJECT) {
// for DEAD_OBJECT, we do not send a EVENT_STREAM_END after stop();
// instead, the application should handle the EVENT_NEW_IAUDIOTRACK.
@@ -2529,6 +2555,7 @@
}
}
if (waitStreamEnd && status != DEAD_OBJECT) {
+ ALOGV("%s: waitStreamEndDone complete", __func__);
return NS_INACTIVE;
}
break;
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index bbc39e8..620cdc2 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -816,6 +816,10 @@
return statusTFromBinderStatus(mDelegate->setDeviceConnectedState(aidlPort, connected));
}
+status_t AudioFlingerClientAdapter::setSimulateDeviceConnections(bool enabled) {
+ return statusTFromBinderStatus(mDelegate->setSimulateDeviceConnections(enabled));
+}
+
status_t AudioFlingerClientAdapter::setRequestedLatencyMode(
audio_io_handle_t output, audio_latency_mode_t mode) {
int32_t outputAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(output));
@@ -1370,6 +1374,10 @@
return Status::fromStatusT(mDelegate->setDeviceConnectedState(&portLegacy, connected));
}
+Status AudioFlingerServerAdapter::setSimulateDeviceConnections(bool enabled) {
+ return Status::fromStatusT(mDelegate->setSimulateDeviceConnections(enabled));
+}
+
Status AudioFlingerServerAdapter::setRequestedLatencyMode(
int32_t output, media::audio::common::AudioLatencyMode modeAidl) {
audio_io_handle_t outputLegacy = VALUE_OR_RETURN_BINDER(
diff --git a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
index 568c865..4d9fef4 100644
--- a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
@@ -229,6 +229,9 @@
void setDeviceConnectedState(in AudioPortFw devicePort, boolean connected);
+ // Used for tests only. Requires AIDL HAL to work.
+ void setSimulateDeviceConnections(boolean enabled);
+
/**
* Requests a given latency mode (See AudioLatencyMode.aidl) on an output stream.
* This can be used when some use case on a given mixer/stream can only be enabled
diff --git a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
index fa6c733..90ede8b 100644
--- a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
@@ -203,7 +203,9 @@
in AudioAttributesInternal attributes);
/**
- * List available audio ports and their attributes. Returns the generation.
+ * List currently attached audio ports and their attributes. Returns the generation.
+ * The generation is incremented each time when anything changes in the ports
+ * configuration.
*
* On input, count represents the maximum length of the returned array.
* On output, count is the total number of elements, which may be larger than the array size.
@@ -215,6 +217,13 @@
inout Int count,
out AudioPortFw[] ports);
+ /**
+ * List all device ports declared in the configuration (including currently detached ones)
+ * 'role' can be 'NONE' to get both input and output devices,
+ * 'SINK' for output devices, and 'SOURCE' for input devices.
+ */
+ AudioPortFw[] listDeclaredDevicePorts(AudioPortRole role);
+
/** Get attributes for the audio port with the given id (AudioPort.hal.id field). */
AudioPortFw getAudioPort(int /* audio_port_handle_t */ portId);
diff --git a/media/libaudioclient/aidl/android/media/ISoundDose.aidl b/media/libaudioclient/aidl/android/media/ISoundDose.aidl
index 69f9a1f..0e2a5ab 100644
--- a/media/libaudioclient/aidl/android/media/ISoundDose.aidl
+++ b/media/libaudioclient/aidl/android/media/ISoundDose.aidl
@@ -23,8 +23,8 @@
* AudioService#SoundDoseHelper to the audio server
*/
interface ISoundDose {
- /** Set a new RS2 value used for momentary exposure warnings. */
- oneway void setOutputRs2(float rs2Value);
+ /** Set a new RS2 upper bound used for momentary exposure warnings. */
+ oneway void setOutputRs2UpperBound(float rs2Value);
/**
* Resets the native CSD values. This can happen after a crash in the
@@ -48,11 +48,25 @@
*/
oneway void updateAttenuation(float attenuationDB, int device);
+ /**
+ * Disable the calculation of sound dose. This has the effect that no MEL
+ * values will be computed on the framework side. The MEL returned from
+ * the IHalSoundDoseCallbacks will be ignored.
+ * Should only be called once at startup if the AudioService does not
+ * support CSD.
+ */
+ oneway void disableCsd();
+
/* -------------------------- Test API methods --------------------------
- /** Get the currently used RS2 value. */
- float getOutputRs2();
+ /** Get the currently used RS2 upper bound. */
+ float getOutputRs2UpperBound();
/** Get the current CSD from audioserver. */
float getCsd();
+ /**
+ * Returns true if the HAL supports the ISoundDose interface. Can be either
+ * as part of IModule or standalon sound dose HAL.
+ */
+ boolean isSoundDoseHalSupported();
/** Enables/Disables MEL computations from framework. */
oneway void forceUseFrameworkMel(boolean useFrameworkMel);
/** Enables/Disables the computation of CSD on all devices. */
diff --git a/media/libaudioclient/aidl/android/media/ISpatializer.aidl b/media/libaudioclient/aidl/android/media/ISpatializer.aidl
index a61ad58..250c450 100644
--- a/media/libaudioclient/aidl/android/media/ISpatializer.aidl
+++ b/media/libaudioclient/aidl/android/media/ISpatializer.aidl
@@ -96,17 +96,33 @@
/**
* Sets the display orientation.
+ *
+ * This is the rotation of the displayed content relative to its natural orientation.
+ *
* Orientation is expressed in the angle of rotation from the physical "up" side of the screen
* to the logical "up" side of the content displayed the screen. Counterclockwise angles, as
* viewed while facing the screen are positive.
+ *
+ * Note: DisplayManager currently only returns this in increments of 90 degrees,
+ * so the values will be 0, PI/2, PI, 3PI/2.
*/
void setDisplayOrientation(float physicalToLogicalAngle);
/**
* Sets the hinge angle for foldable devices.
+ *
+ * Per the hinge angle sensor, this returns a value from 0 to 2PI.
+ * The value of 0 is considered closed, and PI is considered flat open.
*/
void setHingeAngle(float hingeAngle);
+ /**
+ * Sets whether a foldable is considered "folded" or not.
+ *
+ * The fold state may affect which physical screen is active for display.
+ */
+ void setFoldState(boolean folded);
+
/** Reports the list of supported spatialization modess (see SpatializationMode.aidl).
* The list should never be empty if an ISpatializer interface was successfully
* retrieved with IAudioPolicyService.getSpatializer().
diff --git a/media/libaudioclient/fuzzer/Android.bp b/media/libaudioclient/fuzzer/Android.bp
index b1feb60..6080314 100644
--- a/media/libaudioclient/fuzzer/Android.bp
+++ b/media/libaudioclient/fuzzer/Android.bp
@@ -80,5 +80,13 @@
"android-media-fuzzing-reports@google.com",
],
componentid: 155276,
+ hotlists: [
+ "4593311",
+ ],
+ description: "The fuzzer targets the APIs of libaudioflinger",
+ vector: "local_no_privileges_required",
+ service_privilege: "privileged",
+ users: "multi_user",
+ fuzzed_code_usage: "shipped",
},
}
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index d033d4f..1bfe34d 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -23,6 +23,7 @@
#include <vector>
#include <android/content/AttributionSourceState.h>
+#include <android/media/AudioPortFw.h>
#include <android/media/AudioVibratorInfo.h>
#include <android/media/BnAudioFlingerClient.h>
#include <android/media/BnAudioPolicyServiceClient.h>
@@ -126,6 +127,9 @@
// set audio mode in audio hardware
static status_t setMode(audio_mode_t mode);
+ // test API: switch HALs into the mode which simulates external device connections
+ static status_t setSimulateDeviceConnections(bool enabled);
+
// returns true in *state if tracks are active on the specified stream or have been active
// in the past inPastMs milliseconds
static status_t isStreamActive(audio_stream_type_t stream, bool *state, uint32_t inPastMs);
@@ -425,6 +429,9 @@
struct audio_port_v7 *ports,
unsigned int *generation);
+ static status_t listDeclaredDevicePorts(media::AudioPortRole role,
+ std::vector<media::AudioPortFw>* result);
+
/* Get attributes for a given audio port. On input, the port
* only needs the 'id' field to be filled in. */
static status_t getAudioPort(struct audio_port_v7 *port);
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index 31f81be..8f712db 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -1126,6 +1126,9 @@
bool isPlaying() {
AutoMutex lock(mLock);
+ return isPlaying_l();
+ }
+ bool isPlaying_l() {
return mState == STATE_ACTIVE || mState == STATE_STOPPING;
}
@@ -1262,6 +1265,11 @@
audio_track_cblk_t* mCblk; // re-load after mLock.unlock()
audio_io_handle_t mOutput = AUDIO_IO_HANDLE_NONE; // from AudioSystem::getOutputForAttr()
+ // A copy of shared memory and proxy between obtainBuffer and releaseBuffer to keep the
+ // shared memory valid when processing data.
+ sp<IMemory> mCblkMemoryObtainBufferRef GUARDED_BY(mLock);
+ sp<AudioTrackClientProxy> mProxyObtainBufferRef GUARDED_BY(mLock);
+
sp<AudioTrackThread> mAudioTrackThread;
bool mThreadCanCallJava;
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index 02d0511..1803862 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -363,6 +363,8 @@
virtual status_t setDeviceConnectedState(const struct audio_port_v7 *port, bool connected) = 0;
+ virtual status_t setSimulateDeviceConnections(bool enabled) = 0;
+
virtual status_t setRequestedLatencyMode(
audio_io_handle_t output, audio_latency_mode_t mode) = 0;
@@ -480,6 +482,7 @@
int32_t getAAudioMixerBurstCount() override;
int32_t getAAudioHardwareBurstMinUsec() override;
status_t setDeviceConnectedState(const struct audio_port_v7 *port, bool connected) override;
+ status_t setSimulateDeviceConnections(bool enabled) override;
status_t setRequestedLatencyMode(audio_io_handle_t output,
audio_latency_mode_t mode) override;
status_t getSupportedLatencyModes(
@@ -578,6 +581,7 @@
GET_AAUDIO_MIXER_BURST_COUNT = media::BnAudioFlingerService::TRANSACTION_getAAudioMixerBurstCount,
GET_AAUDIO_HARDWARE_BURST_MIN_USEC = media::BnAudioFlingerService::TRANSACTION_getAAudioHardwareBurstMinUsec,
SET_DEVICE_CONNECTED_STATE = media::BnAudioFlingerService::TRANSACTION_setDeviceConnectedState,
+ SET_SIMULATE_DEVICE_CONNECTIONS = media::BnAudioFlingerService::TRANSACTION_setSimulateDeviceConnections,
SET_REQUESTED_LATENCY_MODE = media::BnAudioFlingerService::TRANSACTION_setRequestedLatencyMode,
GET_SUPPORTED_LATENCY_MODES = media::BnAudioFlingerService::TRANSACTION_getSupportedLatencyModes,
SET_BLUETOOTH_VARIABLE_LATENCY_ENABLED =
@@ -708,6 +712,7 @@
Status getAAudioMixerBurstCount(int32_t* _aidl_return) override;
Status getAAudioHardwareBurstMinUsec(int32_t* _aidl_return) override;
Status setDeviceConnectedState(const media::AudioPortFw& port, bool connected) override;
+ Status setSimulateDeviceConnections(bool enabled) override;
Status setRequestedLatencyMode(
int output, media::audio::common::AudioLatencyMode mode) override;
Status getSupportedLatencyModes(int output,
diff --git a/media/libaudioclient/tests/Android.bp b/media/libaudioclient/tests/Android.bp
index 2189521..1e8dcca 100644
--- a/media/libaudioclient/tests/Android.bp
+++ b/media/libaudioclient/tests/Android.bp
@@ -14,6 +14,12 @@
"-Wall",
"-Werror",
],
+ shared_libs: [
+ "libbinder",
+ "libcutils",
+ "liblog",
+ "libutils",
+ ],
sanitize: {
misc_undefined: [
"unsigned-integer-overflow",
@@ -22,37 +28,35 @@
},
}
-cc_test {
- name: "audio_aidl_conversion_tests",
+cc_defaults {
+ name: "audio_aidl_conversion_test_defaults",
defaults: [
"libaudioclient_tests_defaults",
"latest_android_media_audio_common_types_cpp_static",
],
- srcs: ["audio_aidl_legacy_conversion_tests.cpp"],
- shared_libs: [
- "libbinder",
- "libcutils",
- "liblog",
- "libutils",
- ],
static_libs: [
- "libaudioclient_aidl_conversion",
- "libaudio_aidl_conversion_common_cpp",
"audioclient-types-aidl-cpp",
"av-types-aidl-cpp",
+ "libaudio_aidl_conversion_common_cpp",
+ "libaudioclient_aidl_conversion",
"libstagefright_foundation",
],
}
cc_test {
+ name: "audio_aidl_conversion_tests",
+ defaults: [
+ "audio_aidl_conversion_test_defaults",
+ ],
+ srcs: ["audio_aidl_legacy_conversion_tests.cpp"],
+}
+
+cc_test {
name: "audio_aidl_status_tests",
defaults: ["libaudioclient_tests_defaults"],
srcs: ["audio_aidl_status_tests.cpp"],
shared_libs: [
"libaudioclient_aidl_conversion",
- "libbinder",
- "libcutils",
- "libutils",
],
}
@@ -70,9 +74,6 @@
shared_libs: [
"framework-permission-aidl-cpp",
"libaudioclient",
- "libbinder",
- "libcutils",
- "libutils",
],
data: ["track_test_input_*.txt"],
}
@@ -89,35 +90,23 @@
"libmediametrics_headers",
],
shared_libs: [
- "libaudioclient",
- "libbinder",
- "libcutils",
- "libutils",
"framework-permission-aidl-cpp",
+ "libaudioclient",
],
data: ["record_test_input_*.txt"],
}
cc_defaults {
name: "libaudioclient_gtests_defaults",
- cflags: [
- "-Wall",
- "-Werror",
- ],
defaults: [
- "latest_android_media_audio_common_types_cpp_static",
+ "audio_aidl_conversion_test_defaults",
],
shared_libs: [
"capture_state_listener-aidl-cpp",
"framework-permission-aidl-cpp",
- "libaudioclient_aidl_conversion",
- "libaudio_aidl_conversion_common_cpp",
"libbase",
- "libbinder",
"libcgrouprc",
- "libcutils",
"libdl",
- "liblog",
"libmedia",
"libmediametrics",
"libmediautils",
@@ -125,8 +114,6 @@
"libnblog",
"libprocessgroup",
"libshmemcompat",
- "libstagefright_foundation",
- "libutils",
"libxml2",
"mediametricsservice-aidl-cpp",
"packagemanager_aidl-cpp",
@@ -148,7 +135,6 @@
],
data: ["bbb*.raw"],
test_config_template: "audio_test_template.xml",
- test_suites: ["device-tests"],
}
cc_test {
diff --git a/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp b/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
index f651a37..0d12f9d 100644
--- a/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
+++ b/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
@@ -15,6 +15,7 @@
*/
#include <iostream>
+#include <string>
#include <gtest/gtest.h>
@@ -32,6 +33,7 @@
using media::AudioPortType;
using media::audio::common::AudioChannelLayout;
using media::audio::common::AudioDevice;
+using media::audio::common::AudioDeviceAddress;
using media::audio::common::AudioDeviceDescription;
using media::audio::common::AudioDeviceType;
using media::audio::common::AudioEncapsulationMetadataType;
@@ -131,6 +133,14 @@
return make_AudioDeviceDescription(AudioDeviceType::IN_DEFAULT);
}
+AudioDeviceDescription make_ADD_MicIn() {
+ return make_AudioDeviceDescription(AudioDeviceType::IN_MICROPHONE);
+}
+
+AudioDeviceDescription make_ADD_RSubmixIn() {
+ return make_AudioDeviceDescription(AudioDeviceType::IN_SUBMIX);
+}
+
AudioDeviceDescription make_ADD_DefaultOut() {
return make_AudioDeviceDescription(AudioDeviceType::OUT_DEFAULT);
}
@@ -145,6 +155,39 @@
AudioDeviceDescription::CONNECTION_BT_SCO());
}
+AudioDeviceDescription make_ADD_BtA2dpHeadphone() {
+ return make_AudioDeviceDescription(AudioDeviceType::OUT_HEADPHONE,
+ AudioDeviceDescription::CONNECTION_BT_A2DP());
+}
+
+AudioDeviceDescription make_ADD_BtLeHeadset() {
+ return make_AudioDeviceDescription(AudioDeviceType::OUT_HEADSET,
+ AudioDeviceDescription::CONNECTION_BT_LE());
+}
+
+AudioDeviceDescription make_ADD_BtLeBroadcast() {
+ return make_AudioDeviceDescription(AudioDeviceType::OUT_BROADCAST,
+ AudioDeviceDescription::CONNECTION_BT_LE());
+}
+
+AudioDeviceDescription make_ADD_IpV4Device() {
+ return make_AudioDeviceDescription(AudioDeviceType::OUT_DEVICE,
+ AudioDeviceDescription::CONNECTION_IP_V4());
+}
+
+AudioDeviceDescription make_ADD_UsbHeadset() {
+ return make_AudioDeviceDescription(AudioDeviceType::OUT_HEADSET,
+ AudioDeviceDescription::CONNECTION_USB());
+}
+
+AudioDevice make_AudioDevice(const AudioDeviceDescription& type,
+ const AudioDeviceAddress& address) {
+ AudioDevice result;
+ result.type = type;
+ result.address = address;
+ return result;
+}
+
AudioFormatDescription make_AudioFormatDescription(AudioFormatType type) {
AudioFormatDescription result;
result.type = type;
@@ -390,6 +433,48 @@
make_ADD_DefaultOut(), make_ADD_WiredHeadset(),
make_ADD_BtScoHeadset()));
+class AudioDeviceRoundTripTest : public testing::TestWithParam<AudioDevice> {};
+TEST_P(AudioDeviceRoundTripTest, Aidl2Legacy2Aidl) {
+ const auto initial = GetParam();
+ audio_devices_t legacyType;
+ String8 legacyAddress;
+ status_t status = aidl2legacy_AudioDevice_audio_device(initial, &legacyType, &legacyAddress);
+ ASSERT_EQ(OK, status);
+ auto convBack = legacy2aidl_audio_device_AudioDevice(legacyType, legacyAddress);
+ ASSERT_TRUE(convBack.ok());
+ EXPECT_EQ(initial, convBack.value());
+}
+INSTANTIATE_TEST_SUITE_P(
+ AudioDeviceRoundTrip, AudioDeviceRoundTripTest,
+ testing::Values(
+ make_AudioDevice(make_ADD_MicIn(),
+ AudioDeviceAddress::make<AudioDeviceAddress::Tag::id>("bottom")),
+ make_AudioDevice(make_ADD_RSubmixIn(),
+ AudioDeviceAddress::make<AudioDeviceAddress::Tag::id>("1:2-in-3")),
+ // The case of a "blueprint" device port for an external device.
+ make_AudioDevice(make_ADD_BtScoHeadset(),
+ AudioDeviceAddress::make<AudioDeviceAddress::Tag::id>("")),
+ make_AudioDevice(make_ADD_BtScoHeadset(),
+ AudioDeviceAddress::make<AudioDeviceAddress::Tag::mac>(
+ std::vector<uint8_t>{1, 2, 3, 4, 5, 6})),
+ // Another "blueprint"
+ make_AudioDevice(make_ADD_BtA2dpHeadphone(),
+ AudioDeviceAddress::make<AudioDeviceAddress::Tag::id>("")),
+ make_AudioDevice(make_ADD_BtA2dpHeadphone(),
+ AudioDeviceAddress::make<AudioDeviceAddress::Tag::mac>(
+ std::vector<uint8_t>{1, 2, 3, 4, 5, 6})),
+ make_AudioDevice(make_ADD_BtLeHeadset(),
+ AudioDeviceAddress::make<AudioDeviceAddress::Tag::mac>(
+ std::vector<uint8_t>{1, 2, 3, 4, 5, 6})),
+ make_AudioDevice(make_ADD_BtLeBroadcast(),
+ AudioDeviceAddress::make<AudioDeviceAddress::Tag::id>("42")),
+ make_AudioDevice(make_ADD_IpV4Device(),
+ AudioDeviceAddress::make<AudioDeviceAddress::Tag::ipv4>(
+ std::vector<uint8_t>{192, 168, 0, 1})),
+ make_AudioDevice(make_ADD_UsbHeadset(),
+ AudioDeviceAddress::make<AudioDeviceAddress::Tag::alsa>(
+ std::vector<int32_t>{1, 2}))));
+
class AudioFormatDescriptionRoundTripTest : public testing::TestWithParam<AudioFormatDescription> {
};
TEST_P(AudioFormatDescriptionRoundTripTest, Aidl2Legacy2Aidl) {
diff --git a/media/libaudioclient/tests/audiosystem_tests.cpp b/media/libaudioclient/tests/audiosystem_tests.cpp
index 2e6915a..45baa94 100644
--- a/media/libaudioclient/tests/audiosystem_tests.cpp
+++ b/media/libaudioclient/tests/audiosystem_tests.cpp
@@ -18,12 +18,19 @@
#include <string.h>
+#include <set>
+
#include <gtest/gtest.h>
+#include <media/AidlConversionCppNdk.h>
#include <media/IAudioFlinger.h>
#include <utils/Log.h>
#include "audio_test_utils.h"
+using android::media::audio::common::AudioDeviceAddress;
+using android::media::audio::common::AudioDeviceDescription;
+using android::media::audio::common::AudioDeviceType;
+using android::media::audio::common::AudioPortExt;
using namespace android;
void anyPatchContainsInputDevice(audio_port_handle_t deviceId, bool& res) {
@@ -214,8 +221,11 @@
GTEST_SKIP() << "No ports returned by the audio system";
}
+ bool sourceFound = false;
for (const auto& port : ports) {
if (port.role != AUDIO_PORT_ROLE_SOURCE || port.type != AUDIO_PORT_TYPE_DEVICE) continue;
+ if (port.ext.device.type != AUDIO_DEVICE_IN_FM_TUNER) continue;
+ sourceFound = true;
sourcePortConfig = port.active_config;
bool patchFound;
@@ -223,8 +233,9 @@
// start audio source.
status_t ret =
AudioSystem::startAudioSource(&sourcePortConfig, &attributes, &sourcePortHandle);
- EXPECT_EQ(OK, ret) << "AudioSystem::startAudioSource for source " << port.ext.device.address
- << " failed";
+ EXPECT_EQ(OK, ret) << "AudioSystem::startAudioSource for source "
+ << audio_device_to_string(port.ext.device.type) << " failed";
+ if (ret != OK) continue;
// verify that patch is established by the source port.
ASSERT_NO_FATAL_FAILURE(anyPatchContainsInputDevice(port.id, patchFound));
@@ -233,13 +244,17 @@
if (sourcePortHandle != AUDIO_PORT_HANDLE_NONE) {
ret = AudioSystem::stopAudioSource(sourcePortHandle);
- EXPECT_EQ(OK, ret) << "AudioSystem::stopAudioSource for handle failed";
+ EXPECT_EQ(OK, ret) << "AudioSystem::stopAudioSource failed for handle "
+ << sourcePortHandle;
}
// verify that no source port patch exists.
ASSERT_NO_FATAL_FAILURE(anyPatchContainsInputDevice(port.id, patchFound));
EXPECT_EQ(false, patchFound);
}
+ if (!sourceFound) {
+ GTEST_SKIP() << "No ports suitable for testing";
+ }
}
TEST_F(AudioSystemTest, CreateAndReleaseAudioPatch) {
@@ -571,3 +586,106 @@
EXPECT_EQ(NO_ERROR, AudioSystem::setUserIdDeviceAffinities(userId, outputDevices));
EXPECT_EQ(NO_ERROR, AudioSystem::removeUserIdDeviceAffinities(userId));
}
+
+namespace {
+
+class WithSimulatedDeviceConnections {
+ public:
+ WithSimulatedDeviceConnections()
+ : mIsSupported(AudioSystem::setSimulateDeviceConnections(true) == OK) {}
+ ~WithSimulatedDeviceConnections() {
+ if (mIsSupported) {
+ if (status_t status = AudioSystem::setSimulateDeviceConnections(false); status != OK) {
+ ALOGE("Error restoring device connections simulation state: %d", status);
+ }
+ }
+ }
+ bool isSupported() const { return mIsSupported; }
+
+ private:
+ const bool mIsSupported;
+};
+
+android::media::audio::common::AudioPort GenerateUniqueDeviceAddress(
+ const android::media::audio::common::AudioPort& port) {
+ static int nextId = 0;
+ using Tag = AudioDeviceAddress::Tag;
+ AudioDeviceAddress address;
+ switch (suggestDeviceAddressTag(port.ext.get<AudioPortExt::Tag::device>().device.type)) {
+ case Tag::id:
+ address = AudioDeviceAddress::make<Tag::id>(std::to_string(++nextId));
+ break;
+ case Tag::mac:
+ address = AudioDeviceAddress::make<Tag::mac>(
+ std::vector<uint8_t>{1, 2, 3, 4, 5, static_cast<uint8_t>(++nextId & 0xff)});
+ break;
+ case Tag::ipv4:
+ address = AudioDeviceAddress::make<Tag::ipv4>(
+ std::vector<uint8_t>{192, 168, 0, static_cast<uint8_t>(++nextId & 0xff)});
+ break;
+ case Tag::ipv6:
+ address = AudioDeviceAddress::make<Tag::ipv6>(std::vector<int32_t>{
+ 0xfc00, 0x0123, 0x4567, 0x89ab, 0xcdef, 0, 0, ++nextId & 0xffff});
+ break;
+ case Tag::alsa:
+ address = AudioDeviceAddress::make<Tag::alsa>(std::vector<int32_t>{1, ++nextId});
+ break;
+ }
+ android::media::audio::common::AudioPort result = port;
+ result.ext.get<AudioPortExt::Tag::device>().device.address = std::move(address);
+ return result;
+}
+
+} // namespace
+
+TEST_F(AudioSystemTest, SetDeviceConnectedState) {
+ WithSimulatedDeviceConnections connSim;
+ if (!connSim.isSupported()) {
+ GTEST_SKIP() << "Simulation of external device connections not supported";
+ }
+ std::vector<media::AudioPortFw> ports;
+ ASSERT_EQ(OK, AudioSystem::listDeclaredDevicePorts(media::AudioPortRole::NONE, &ports));
+ if (ports.empty()) {
+ GTEST_SKIP() << "No ports returned by the audio system";
+ }
+ const std::set<AudioDeviceType> typesToUse{
+ AudioDeviceType::IN_DEVICE, AudioDeviceType::IN_HEADSET,
+ AudioDeviceType::IN_MICROPHONE, AudioDeviceType::OUT_DEVICE,
+ AudioDeviceType::OUT_HEADPHONE, AudioDeviceType::OUT_HEADSET,
+ AudioDeviceType::OUT_HEARING_AID, AudioDeviceType::OUT_SPEAKER};
+ std::vector<media::AudioPortFw> externalDevicePorts;
+ for (const auto& port : ports) {
+ if (const auto& device = port.hal.ext.get<AudioPortExt::device>().device;
+ !device.type.connection.empty() && typesToUse.count(device.type.type)) {
+ externalDevicePorts.push_back(port);
+ }
+ }
+ if (externalDevicePorts.empty()) {
+ GTEST_SKIP() << "No ports for considered non-attached devices";
+ }
+ for (auto& port : externalDevicePorts) {
+ android::media::audio::common::AudioPort aidlPort = GenerateUniqueDeviceAddress(port.hal);
+ SCOPED_TRACE(aidlPort.toString());
+ audio_devices_t type;
+ char address[AUDIO_DEVICE_MAX_ADDRESS_LEN];
+ status_t status = aidl2legacy_AudioDevice_audio_device(
+ aidlPort.ext.get<AudioPortExt::Tag::device>().device, &type, address);
+ ASSERT_EQ(OK, status);
+ audio_policy_dev_state_t deviceState = AudioSystem::getDeviceConnectionState(type, address);
+ EXPECT_EQ(AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE, deviceState);
+ if (deviceState != AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE) continue;
+ // !!! Instead of the default format, use each format from 'ext.encodedFormats'
+ // !!! if they are not empty
+ status = AudioSystem::setDeviceConnectionState(AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+ aidlPort, AUDIO_FORMAT_DEFAULT);
+ EXPECT_EQ(OK, status);
+ if (status != OK) continue;
+ deviceState = AudioSystem::getDeviceConnectionState(type, address);
+ EXPECT_EQ(AUDIO_POLICY_DEVICE_STATE_AVAILABLE, deviceState);
+ status = AudioSystem::setDeviceConnectionState(AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+ aidlPort, AUDIO_FORMAT_DEFAULT);
+ EXPECT_EQ(OK, status);
+ deviceState = AudioSystem::getDeviceConnectionState(type, address);
+ EXPECT_EQ(AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE, deviceState);
+ }
+}
diff --git a/media/libaudiohal/Android.bp b/media/libaudiohal/Android.bp
index f47dd0b..1dbcb86 100644
--- a/media/libaudiohal/Android.bp
+++ b/media/libaudiohal/Android.bp
@@ -76,3 +76,11 @@
export_include_dirs: ["include"],
}
+
+cc_library_headers {
+ name: "libaudiohalimpl_headers",
+
+ header_libs: ["libaudiohal_headers"],
+ export_header_lib_headers: ["libaudiohal_headers"],
+ export_include_dirs: ["impl"],
+}
diff --git a/media/libaudiohal/impl/Android.bp b/media/libaudiohal/impl/Android.bp
index 0e98856..15726ff 100644
--- a/media/libaudiohal/impl/Android.bp
+++ b/media/libaudiohal/impl/Android.bp
@@ -262,6 +262,7 @@
"EffectBufferHalAidl.cpp",
"EffectHalAidl.cpp",
"effectsAidlConversion/AidlConversionAec.cpp",
+ "effectsAidlConversion/AidlConversionAgc1.cpp",
"effectsAidlConversion/AidlConversionAgc2.cpp",
"effectsAidlConversion/AidlConversionBassBoost.cpp",
"effectsAidlConversion/AidlConversionDownmix.cpp",
@@ -279,6 +280,7 @@
"EffectsFactoryHalAidl.cpp",
"EffectsFactoryHalEntry.cpp",
"StreamHalAidl.cpp",
+ ":audio_effectproxy_src_files"
],
static_libs: [
"android.hardware.common-V2-ndk",
@@ -286,6 +288,7 @@
],
shared_libs: [
"libbinder_ndk",
+ "libaudio_aidl_conversion_common_cpp",
"libaudio_aidl_conversion_common_ndk",
"libaudio_aidl_conversion_effect_ndk",
"libaudioaidlcommon",
@@ -298,6 +301,11 @@
"-Wextra",
"-Werror",
"-Wthread-safety",
- "-DBACKEND_NDK",
+ "-DBACKEND_CPP_NDK",
],
}
+
+filegroup {
+ name: "audio_effectproxy_src_files",
+ srcs: ["EffectProxy.cpp"],
+}
diff --git a/media/libaudiohal/impl/DeviceHalAidl.cpp b/media/libaudiohal/impl/DeviceHalAidl.cpp
index 21e1a32..25ee61a 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalAidl.cpp
@@ -23,6 +23,8 @@
#include <aidl/android/hardware/audio/core/BnStreamCallback.h>
#include <aidl/android/hardware/audio/core/BnStreamOutEventCallback.h>
#include <aidl/android/hardware/audio/core/StreamDescriptor.h>
+#include <android/binder_enums.h>
+#include <binder/Enums.h>
#include <error/expected_utils.h>
#include <media/AidlConversionCppNdk.h>
#include <media/AidlConversionUtil.h>
@@ -34,31 +36,43 @@
#include "StreamHalAidl.h"
using aidl::android::aidl_utils::statusTFromBinderStatus;
+using aidl::android::media::audio::common::AudioChannelLayout;
using aidl::android::media::audio::common::AudioConfig;
using aidl::android::media::audio::common::AudioDevice;
+using aidl::android::media::audio::common::AudioDeviceAddress;
using aidl::android::media::audio::common::AudioDeviceType;
+using aidl::android::media::audio::common::AudioFormatType;
using aidl::android::media::audio::common::AudioInputFlags;
using aidl::android::media::audio::common::AudioIoFlags;
using aidl::android::media::audio::common::AudioLatencyMode;
+using aidl::android::media::audio::common::AudioMMapPolicy;
+using aidl::android::media::audio::common::AudioMMapPolicyInfo;
+using aidl::android::media::audio::common::AudioMMapPolicyType;
using aidl::android::media::audio::common::AudioMode;
using aidl::android::media::audio::common::AudioOutputFlags;
using aidl::android::media::audio::common::AudioPort;
using aidl::android::media::audio::common::AudioPortConfig;
using aidl::android::media::audio::common::AudioPortDeviceExt;
-using aidl::android::media::audio::common::AudioPortMixExt;
using aidl::android::media::audio::common::AudioPortExt;
+using aidl::android::media::audio::common::AudioPortMixExt;
+using aidl::android::media::audio::common::AudioPortMixExtUseCase;
+using aidl::android::media::audio::common::AudioProfile;
using aidl::android::media::audio::common::AudioSource;
-using aidl::android::media::audio::common::Int;
using aidl::android::media::audio::common::Float;
+using aidl::android::media::audio::common::Int;
+using aidl::android::media::audio::common::MicrophoneDynamicInfo;
+using aidl::android::media::audio::common::MicrophoneInfo;
+using aidl::android::hardware::audio::common::getFrameSizeInBytes;
+using aidl::android::hardware::audio::common::isBitPositionFlagSet;
+using aidl::android::hardware::audio::common::isDefaultAudioFormat;
+using aidl::android::hardware::audio::common::makeBitPositionFlagMask;
using aidl::android::hardware::audio::common::RecordTrackMetadata;
using aidl::android::hardware::audio::core::AudioPatch;
+using aidl::android::hardware::audio::core::AudioRoute;
using aidl::android::hardware::audio::core::IModule;
using aidl::android::hardware::audio::core::ITelephony;
+using aidl::android::hardware::audio::core::ModuleDebug;
using aidl::android::hardware::audio::core::StreamDescriptor;
-using aidl::android::hardware::audio::core::sounddose::ISoundDose;
-using android::hardware::audio::common::getFrameSizeInBytes;
-using android::hardware::audio::common::isBitPositionFlagSet;
-using android::hardware::audio::common::makeBitPositionFlagMask;
namespace android {
@@ -82,6 +96,75 @@
portConfig->format = config.base.format;
}
+template<typename OutEnum, typename OutEnumRange, typename InEnum>
+ConversionResult<OutEnum> convertEnum(const OutEnumRange& range, InEnum e) {
+ using InIntType = std::underlying_type_t<InEnum>;
+ static_assert(std::is_same_v<InIntType, std::underlying_type_t<OutEnum>>);
+
+ InIntType inEnumIndex = static_cast<InIntType>(e);
+ OutEnum outEnum = static_cast<OutEnum>(inEnumIndex);
+ if (std::find(range.begin(), range.end(), outEnum) == range.end()) {
+ return ::android::base::unexpected(BAD_VALUE);
+ }
+ return outEnum;
+}
+
+template<typename NdkEnum, typename CppEnum>
+ConversionResult<NdkEnum> cpp2ndk_Enum(CppEnum e) {
+ return convertEnum<NdkEnum>(::ndk::enum_range<NdkEnum>(), e);
+}
+
+template<typename CppEnum, typename NdkEnum>
+ConversionResult<CppEnum> ndk2cpp_Enum(NdkEnum e) {
+ return convertEnum<CppEnum>(::android::enum_range<CppEnum>(), e);
+}
+
+ConversionResult<android::media::audio::common::AudioDeviceAddress>
+ndk2cpp_AudioDeviceAddress(const AudioDeviceAddress& ndk) {
+ using CppTag = android::media::audio::common::AudioDeviceAddress::Tag;
+ using NdkTag = AudioDeviceAddress::Tag;
+
+ CppTag cppTag = VALUE_OR_RETURN(ndk2cpp_Enum<CppTag>(ndk.getTag()));
+
+ switch (cppTag) {
+ case CppTag::id:
+ return android::media::audio::common::AudioDeviceAddress::make<CppTag::id>(
+ ndk.get<NdkTag::id>());
+ case CppTag::mac:
+ return android::media::audio::common::AudioDeviceAddress::make<CppTag::mac>(
+ ndk.get<NdkTag::mac>());
+ case CppTag::ipv4:
+ return android::media::audio::common::AudioDeviceAddress::make<CppTag::ipv4>(
+ ndk.get<NdkTag::ipv4>());
+ case CppTag::ipv6:
+ return android::media::audio::common::AudioDeviceAddress::make<CppTag::ipv6>(
+ ndk.get<NdkTag::ipv6>());
+ case CppTag::alsa:
+ return android::media::audio::common::AudioDeviceAddress::make<CppTag::alsa>(
+ ndk.get<NdkTag::alsa>());
+ }
+
+ return ::android::base::unexpected(BAD_VALUE);
+}
+
+ConversionResult<media::audio::common::AudioDevice> ndk2cpp_AudioDevice(const AudioDevice& ndk) {
+ media::audio::common::AudioDevice cpp;
+ cpp.type.type = VALUE_OR_RETURN(
+ ndk2cpp_Enum<media::audio::common::AudioDeviceType>(ndk.type.type));
+ cpp.type.connection = ndk.type.connection;
+ cpp.address = VALUE_OR_RETURN(ndk2cpp_AudioDeviceAddress(ndk.address));
+ return cpp;
+}
+
+ConversionResult<media::audio::common::AudioMMapPolicyInfo>
+ndk2cpp_AudioMMapPolicyInfo(const AudioMMapPolicyInfo& ndk) {
+ media::audio::common::AudioMMapPolicyInfo cpp;
+ cpp.device = VALUE_OR_RETURN(ndk2cpp_AudioDevice(ndk.device));
+ cpp.mmapPolicy = VALUE_OR_RETURN(
+ ndk2cpp_Enum<media::audio::common::AudioMMapPolicy>(ndk.mmapPolicy));
+ return cpp;
+}
+
} // namespace
status_t DeviceHalAidl::getSupportedDevices(uint32_t*) {
@@ -114,6 +197,7 @@
}
ALOGI("%s: module %s default port ids: input %d, output %d",
__func__, mInstance.c_str(), mDefaultInputPortId, mDefaultOutputPortId);
+ RETURN_STATUS_IF_ERROR(updateRoutes());
std::vector<AudioPortConfig> portConfigs;
RETURN_STATUS_IF_ERROR(
statusTFromBinderStatus(mModule->getAudioPortConfigs(&portConfigs))); // OK if empty
@@ -250,13 +334,14 @@
::aidl::android::legacy2aidl_audio_config_t_AudioConfig(*config, true /*isInput*/));
AudioDevice aidlDevice;
aidlDevice.type.type = AudioDeviceType::IN_DEFAULT;
+ AudioSource aidlSource = AudioSource::DEFAULT;
AudioIoFlags aidlFlags = AudioIoFlags::make<AudioIoFlags::Tag::input>(0);
AudioPortConfig mixPortConfig;
Cleanups cleanups;
audio_config writableConfig = *config;
- int32_t nominalLatency;
- RETURN_STATUS_IF_ERROR(prepareToOpenStream(0 /*handle*/, aidlDevice, aidlFlags, &writableConfig,
- &cleanups, &aidlConfig, &mixPortConfig, &nominalLatency));
+ AudioPatch aidlPatch;
+ RETURN_STATUS_IF_ERROR(prepareToOpenStream(0 /*handle*/, aidlDevice, aidlFlags, aidlSource,
+ &writableConfig, &cleanups, &aidlConfig, &mixPortConfig, &aidlPatch));
*size = aidlConfig.frameCount *
getFrameSizeInBytes(aidlConfig.base.format, aidlConfig.base.channelMask);
// Do not disarm cleanups to release temporary port configs.
@@ -265,9 +350,13 @@
status_t DeviceHalAidl::prepareToOpenStream(
int32_t aidlHandle, const AudioDevice& aidlDevice, const AudioIoFlags& aidlFlags,
- struct audio_config* config,
+ AudioSource aidlSource, struct audio_config* config,
Cleanups* cleanups, AudioConfig* aidlConfig, AudioPortConfig* mixPortConfig,
- int32_t* nominalLatency) {
+ AudioPatch* aidlPatch) {
+ ALOGD("%p %s::%s: handle %d, device %s, flags %s, source %s, config %s, mix port config %s",
+ this, getClassName().c_str(), __func__, aidlHandle, aidlDevice.toString().c_str(),
+ aidlFlags.toString().c_str(), toString(aidlSource).c_str(),
+ aidlConfig->toString().c_str(), mixPortConfig->toString().c_str());
const bool isInput = aidlFlags.getTag() == AudioIoFlags::Tag::input;
// Find / create AudioPortConfigs for the device port and the mix port,
// then find / create a patch between them, and open a stream on the mix port.
@@ -277,26 +366,24 @@
if (created) {
cleanups->emplace_front(this, &DeviceHalAidl::resetPortConfig, devicePortConfig.id);
}
- RETURN_STATUS_IF_ERROR(findOrCreatePortConfig(*aidlConfig, aidlFlags, aidlHandle,
- mixPortConfig, &created));
+ RETURN_STATUS_IF_ERROR(findOrCreatePortConfig(*aidlConfig, aidlFlags, aidlHandle, aidlSource,
+ std::set<int32_t>{devicePortConfig.portId}, mixPortConfig, &created));
if (created) {
cleanups->emplace_front(this, &DeviceHalAidl::resetPortConfig, mixPortConfig->id);
}
setConfigFromPortConfig(aidlConfig, *mixPortConfig);
- AudioPatch patch;
if (isInput) {
RETURN_STATUS_IF_ERROR(findOrCreatePatch(
- {devicePortConfig.id}, {mixPortConfig->id}, &patch, &created));
+ {devicePortConfig.id}, {mixPortConfig->id}, aidlPatch, &created));
} else {
RETURN_STATUS_IF_ERROR(findOrCreatePatch(
- {mixPortConfig->id}, {devicePortConfig.id}, &patch, &created));
+ {mixPortConfig->id}, {devicePortConfig.id}, aidlPatch, &created));
}
if (created) {
- cleanups->emplace_front(this, &DeviceHalAidl::resetPatch, patch.id);
+ cleanups->emplace_front(this, &DeviceHalAidl::resetPatch, aidlPatch->id);
}
- *nominalLatency = patch.latenciesMs[0];
if (aidlConfig->frameCount <= 0) {
- aidlConfig->frameCount = patch.minimumStreamBufferSizeFrames;
+ aidlConfig->frameCount = aidlPatch->minimumStreamBufferSizeFrames;
}
*config = VALUE_OR_RETURN_STATUS(
::aidl::android::aidl2legacy_AudioConfig_audio_config_t(*aidlConfig, isInput));
@@ -442,9 +529,10 @@
AudioIoFlags aidlFlags = AudioIoFlags::make<AudioIoFlags::Tag::output>(aidlOutputFlags);
AudioPortConfig mixPortConfig;
Cleanups cleanups;
- int32_t nominalLatency;
- RETURN_STATUS_IF_ERROR(prepareToOpenStream(aidlHandle, aidlDevice, aidlFlags, config,
- &cleanups, &aidlConfig, &mixPortConfig, &nominalLatency));
+ AudioPatch aidlPatch;
+ RETURN_STATUS_IF_ERROR(prepareToOpenStream(aidlHandle, aidlDevice, aidlFlags,
+ AudioSource::SYS_RESERVED_INVALID /*only needed for input*/,
+ config, &cleanups, &aidlConfig, &mixPortConfig, &aidlPatch));
::aidl::android::hardware::audio::core::IModule::OpenOutputStreamArguments args;
args.portConfigId = mixPortConfig.id;
const bool isOffload = isBitPositionFlagSet(
@@ -468,8 +556,9 @@
__func__, ret.desc.toString().c_str());
return NO_INIT;
}
- *outStream = sp<StreamOutHalAidl>::make(*config, std::move(context), nominalLatency,
+ *outStream = sp<StreamOutHalAidl>::make(*config, std::move(context), aidlPatch.latenciesMs[0],
std::move(ret.stream), this /*callbackBroker*/);
+ mStreams.insert(std::pair(*outStream, aidlPatch.id));
void* cbCookie = (*outStream).get();
{
std::lock_guard l(mLock);
@@ -506,9 +595,9 @@
::aidl::android::legacy2aidl_audio_source_t_AudioSource(source));
AudioPortConfig mixPortConfig;
Cleanups cleanups;
- int32_t nominalLatency;
- RETURN_STATUS_IF_ERROR(prepareToOpenStream(aidlHandle, aidlDevice, aidlFlags, config,
- &cleanups, &aidlConfig, &mixPortConfig, &nominalLatency));
+ AudioPatch aidlPatch;
+ RETURN_STATUS_IF_ERROR(prepareToOpenStream(aidlHandle, aidlDevice, aidlFlags, aidlSource,
+ config, &cleanups, &aidlConfig, &mixPortConfig, &aidlPatch));
::aidl::android::hardware::audio::core::IModule::OpenInputStreamArguments args;
args.portConfigId = mixPortConfig.id;
RecordTrackMetadata aidlTrackMetadata{
@@ -528,8 +617,9 @@
__func__, ret.desc.toString().c_str());
return NO_INIT;
}
- *inStream = sp<StreamInHalAidl>::make(*config, std::move(context), nominalLatency,
- std::move(ret.stream));
+ *inStream = sp<StreamInHalAidl>::make(*config, std::move(context), aidlPatch.latenciesMs[0],
+ std::move(ret.stream), this /*micInfoProvider*/);
+ mStreams.insert(std::pair(*inStream, aidlPatch.id));
cleanups.disarmAll();
return OK;
}
@@ -597,20 +687,41 @@
__func__, ::android::internal::ToString(aidlSources).c_str(),
::android::internal::ToString(aidlSinks).c_str());
auto fillPortConfigs = [&](
- const std::vector<AudioPortConfig>& configs, std::vector<int32_t>* ids) -> status_t {
+ const std::vector<AudioPortConfig>& configs,
+ const std::set<int32_t>& destinationPortIds,
+ std::vector<int32_t>* ids, std::set<int32_t>* portIds) -> status_t {
for (const auto& s : configs) {
AudioPortConfig portConfig;
bool created = false;
- RETURN_STATUS_IF_ERROR(findOrCreatePortConfig(s, &portConfig, &created));
+ RETURN_STATUS_IF_ERROR(findOrCreatePortConfig(
+ s, destinationPortIds, &portConfig, &created));
if (created) {
cleanups.emplace_front(this, &DeviceHalAidl::resetPortConfig, portConfig.id);
}
ids->push_back(portConfig.id);
+ if (portIds != nullptr) {
+ portIds->insert(portConfig.portId);
+ }
}
return OK;
};
- RETURN_STATUS_IF_ERROR(fillPortConfigs(aidlSources, &aidlPatch.sourcePortConfigIds));
- RETURN_STATUS_IF_ERROR(fillPortConfigs(aidlSinks, &aidlPatch.sinkPortConfigIds));
+ // When looking up port configs, the destinationPortId is only used for mix ports.
+ // Thus, we process device port configs first, and look up the destination port ID from them.
+ bool sourceIsDevice = std::any_of(aidlSources.begin(), aidlSources.end(),
+ [](const auto& config) { return config.ext.getTag() == AudioPortExt::device; });
+ const std::vector<AudioPortConfig>& devicePortConfigs =
+ sourceIsDevice ? aidlSources : aidlSinks;
+ std::vector<int32_t>* devicePortConfigIds =
+ sourceIsDevice ? &aidlPatch.sourcePortConfigIds : &aidlPatch.sinkPortConfigIds;
+ const std::vector<AudioPortConfig>& mixPortConfigs =
+ sourceIsDevice ? aidlSinks : aidlSources;
+ std::vector<int32_t>* mixPortConfigIds =
+ sourceIsDevice ? &aidlPatch.sinkPortConfigIds : &aidlPatch.sourcePortConfigIds;
+ std::set<int32_t> devicePortIds;
+ RETURN_STATUS_IF_ERROR(fillPortConfigs(
+ devicePortConfigs, std::set<int32_t>(), devicePortConfigIds, &devicePortIds));
+ RETURN_STATUS_IF_ERROR(fillPortConfigs(
+ mixPortConfigs, devicePortIds, mixPortConfigIds, nullptr));
if (existingPatchIt != mPatches.end()) {
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
mModule->setAudioPatch(aidlPatch, &aidlPatch)));
@@ -645,30 +756,110 @@
return OK;
}
-status_t DeviceHalAidl::getAudioPort(struct audio_port* port __unused) {
- TIME_CHECK();
- ALOGE("%s not implemented yet", __func__);
- return INVALID_OPERATION;
-}
-
-status_t DeviceHalAidl::getAudioPort(struct audio_port_v7 *port __unused) {
- TIME_CHECK();
- ALOGE("%s not implemented yet", __func__);
- return INVALID_OPERATION;
-}
-
-status_t DeviceHalAidl::setAudioPortConfig(const struct audio_port_config* config __unused) {
+status_t DeviceHalAidl::getAudioPort(struct audio_port* port) {
+ ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
TIME_CHECK();
if (!mModule) return NO_INIT;
- ALOGE("%s not implemented yet", __func__);
+ if (port == nullptr) {
+ return BAD_VALUE;
+ }
+ audio_port_v7 portV7;
+ audio_populate_audio_port_v7(port, &portV7);
+ RETURN_STATUS_IF_ERROR(getAudioPort(&portV7));
+ return audio_populate_audio_port(&portV7, port) ? OK : BAD_VALUE;
+}
+
+status_t DeviceHalAidl::getAudioPort(struct audio_port_v7 *port) {
+ ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ TIME_CHECK();
+ if (!mModule) return NO_INIT;
+ if (port == nullptr) {
+ return BAD_VALUE;
+ }
+ bool isInput = VALUE_OR_RETURN_STATUS(::aidl::android::portDirection(port->role, port->type)) ==
+ ::aidl::android::AudioPortDirection::INPUT;
+ auto aidlPort = VALUE_OR_RETURN_STATUS(
+ ::aidl::android::legacy2aidl_audio_port_v7_AudioPort(*port, isInput));
+ if (aidlPort.ext.getTag() != AudioPortExt::device) {
+ ALOGE("%s: provided port is not a device port (module %s): %s",
+ __func__, mInstance.c_str(), aidlPort.toString().c_str());
+ return BAD_VALUE;
+ }
+ const auto& matchDevice = aidlPort.ext.get<AudioPortExt::device>().device;
+ // It seems that we don't have to call HAL since all valid ports have been added either
+ // during initialization, or while handling connection of an external device.
+ auto portsIt = findPort(matchDevice);
+ if (portsIt == mPorts.end()) {
+ ALOGE("%s: device port for device %s is not found in the module %s",
+ __func__, matchDevice.toString().c_str(), mInstance.c_str());
+ return BAD_VALUE;
+ }
+ const int32_t fwkId = aidlPort.id;
+ aidlPort = portsIt->second;
+ aidlPort.id = fwkId;
+ *port = VALUE_OR_RETURN_STATUS(::aidl::android::aidl2legacy_AudioPort_audio_port_v7(
+ aidlPort, isInput));
return OK;
}
-status_t DeviceHalAidl::getMicrophones(
- std::vector<audio_microphone_characteristic_t>* microphones __unused) {
+status_t DeviceHalAidl::setAudioPortConfig(const struct audio_port_config* config) {
+ ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
TIME_CHECK();
if (!mModule) return NO_INIT;
- ALOGE("%s not implemented yet", __func__);
+ if (config == nullptr) {
+ return BAD_VALUE;
+ }
+ bool isInput = VALUE_OR_RETURN_STATUS(::aidl::android::portDirection(
+ config->role, config->type)) == ::aidl::android::AudioPortDirection::INPUT;
+ AudioPortConfig requestedPortConfig = VALUE_OR_RETURN_STATUS(
+ ::aidl::android::legacy2aidl_audio_port_config_AudioPortConfig(
+ *config, isInput, 0 /*portId*/));
+ AudioPortConfig portConfig;
+ bool created = false;
+ RETURN_STATUS_IF_ERROR(findOrCreatePortConfig(
+ requestedPortConfig, std::set<int32_t>(), &portConfig, &created));
+ return OK;
+}
+
+MicrophoneInfoProvider::Info const* DeviceHalAidl::getMicrophoneInfo() {
+ if (mMicrophones.status == Microphones::Status::UNKNOWN) {
+ TIME_CHECK();
+ std::vector<MicrophoneInfo> aidlInfo;
+ status_t status = statusTFromBinderStatus(mModule->getMicrophones(&aidlInfo));
+ if (status == OK) {
+ mMicrophones.status = Microphones::Status::QUERIED;
+ mMicrophones.info = std::move(aidlInfo);
+ } else if (status == INVALID_OPERATION) {
+ mMicrophones.status = Microphones::Status::NOT_SUPPORTED;
+ } else {
+ ALOGE("%s: Unexpected status from 'IModule.getMicrophones': %d", __func__, status);
+ return {};
+ }
+ }
+ if (mMicrophones.status == Microphones::Status::QUERIED) {
+ return &mMicrophones.info;
+ }
+ return {}; // NOT_SUPPORTED
+}
+
+status_t DeviceHalAidl::getMicrophones(
+ std::vector<audio_microphone_characteristic_t>* microphones) {
+ if (!microphones) {
+ return BAD_VALUE;
+ }
+ TIME_CHECK();
+ if (!mModule) return NO_INIT;
+ auto staticInfo = getMicrophoneInfo();
+ if (!staticInfo) return INVALID_OPERATION;
+ std::vector<MicrophoneDynamicInfo> emptyDynamicInfo;
+ emptyDynamicInfo.reserve(staticInfo->size());
+ std::transform(staticInfo->begin(), staticInfo->end(), std::back_inserter(emptyDynamicInfo),
+ [](const auto& info) { return MicrophoneDynamicInfo{ .id = info.id }; });
+ *microphones = VALUE_OR_RETURN_STATUS(
+ ::aidl::android::convertContainers<std::vector<audio_microphone_characteristic_t>>(
+ *staticInfo, emptyDynamicInfo,
+ ::aidl::android::aidl2legacy_MicrophoneInfos_audio_microphone_characteristic_t)
+ );
return OK;
}
@@ -694,36 +885,57 @@
}
status_t DeviceHalAidl::getMmapPolicyInfos(
- media::audio::common::AudioMMapPolicyType policyType __unused,
- std::vector<media::audio::common::AudioMMapPolicyInfo>* policyInfos __unused) {
+ media::audio::common::AudioMMapPolicyType policyType,
+ std::vector<media::audio::common::AudioMMapPolicyInfo>* policyInfos) {
TIME_CHECK();
- ALOGE("%s not implemented yet", __func__);
+ AudioMMapPolicyType mmapPolicyType =
+ VALUE_OR_RETURN_STATUS(cpp2ndk_Enum<AudioMMapPolicyType>(policyType));
+
+ std::vector<AudioMMapPolicyInfo> mmapPolicyInfos;
+
+ if (status_t status = statusTFromBinderStatus(
+ mModule->getMmapPolicyInfos(mmapPolicyType, &mmapPolicyInfos)); status != OK) {
+ return status;
+ }
+
+ *policyInfos = VALUE_OR_RETURN_STATUS(
+ convertContainer<std::vector<media::audio::common::AudioMMapPolicyInfo>>(
+ mmapPolicyInfos, ndk2cpp_AudioMMapPolicyInfo));
return OK;
}
int32_t DeviceHalAidl::getAAudioMixerBurstCount() {
TIME_CHECK();
- ALOGE("%s not implemented yet", __func__);
- return OK;
+ int32_t mixerBurstCount = 0;
+ if (mModule->getAAudioMixerBurstCount(&mixerBurstCount).isOk()) {
+ return mixerBurstCount;
+ }
+ return 0;
}
int32_t DeviceHalAidl::getAAudioHardwareBurstMinUsec() {
TIME_CHECK();
- ALOGE("%s not implemented yet", __func__);
- return OK;
+ int32_t hardwareBurstMinUsec = 0;
+ if (mModule->getAAudioHardwareBurstMinUsec(&hardwareBurstMinUsec).isOk()) {
+ return hardwareBurstMinUsec;
+ }
+ return 0;
}
error::Result<audio_hw_sync_t> DeviceHalAidl::getHwAvSync() {
TIME_CHECK();
- ALOGE("%s not implemented yet", __func__);
- return base::unexpected(INVALID_OPERATION);
+ if (!mModule) return NO_INIT;
+ int32_t aidlHwAvSync;
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->generateHwAvSyncId(&aidlHwAvSync)));
+ return VALUE_OR_RETURN_STATUS(
+ ::aidl::android::aidl2legacy_int32_t_audio_hw_sync_t(aidlHwAvSync));
}
status_t DeviceHalAidl::dump(int fd, const Vector<String16>& args) {
TIME_CHECK();
if (!mModule) return NO_INIT;
return mModule->dump(fd, Args(args).args(), args.size());
-};
+}
int32_t DeviceHalAidl::supportsBluetoothVariableLatency(bool* supports __unused) {
TIME_CHECK();
@@ -751,6 +963,73 @@
return OK;
}
+status_t DeviceHalAidl::setConnectedState(const struct audio_port_v7 *port, bool connected) {
+ TIME_CHECK();
+ if (!mModule) return NO_INIT;
+ if (port == nullptr) {
+ return BAD_VALUE;
+ }
+ bool isInput = VALUE_OR_RETURN_STATUS(::aidl::android::portDirection(port->role, port->type)) ==
+ ::aidl::android::AudioPortDirection::INPUT;
+ AudioPort aidlPort = VALUE_OR_RETURN_STATUS(
+ ::aidl::android::legacy2aidl_audio_port_v7_AudioPort(*port, isInput));
+ if (aidlPort.ext.getTag() != AudioPortExt::device) {
+ ALOGE("%s: provided port is not a device port (module %s): %s",
+ __func__, mInstance.c_str(), aidlPort.toString().c_str());
+ return BAD_VALUE;
+ }
+ if (connected) {
+ AudioDevice matchDevice = aidlPort.ext.get<AudioPortExt::device>().device;
+ // Reset the device address to find the "template" port.
+ matchDevice.address = AudioDeviceAddress::make<AudioDeviceAddress::id>();
+ auto portsIt = findPort(matchDevice);
+ if (portsIt == mPorts.end()) {
+ ALOGW("%s: device port for device %s is not found in the module %s",
+ __func__, matchDevice.toString().c_str(), mInstance.c_str());
+ return BAD_VALUE;
+ }
+ // Use the ID of the "template" port, use all the information from the provided port.
+ aidlPort.id = portsIt->first;
+ AudioPort connectedPort;
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->connectExternalDevice(
+ aidlPort, &connectedPort)));
+ const auto [it, inserted] = mPorts.insert(std::make_pair(connectedPort.id, connectedPort));
+ LOG_ALWAYS_FATAL_IF(!inserted,
+ "%s: module %s, duplicate port ID received from HAL: %s, existing port: %s",
+ __func__, mInstance.c_str(), connectedPort.toString().c_str(),
+ it->second.toString().c_str());
+ } else { // !connected
+ AudioDevice matchDevice = aidlPort.ext.get<AudioPortExt::device>().device;
+ auto portsIt = findPort(matchDevice);
+ if (portsIt == mPorts.end()) {
+ ALOGW("%s: device port for device %s is not found in the module %s",
+ __func__, matchDevice.toString().c_str(), mInstance.c_str());
+ return BAD_VALUE;
+ }
+ // Any streams opened on the external device must be closed by this time,
+ // thus we can clean up patches and port configs that were created for them.
+ resetUnusedPatchesAndPortConfigs();
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->disconnectExternalDevice(
+ portsIt->second.id)));
+ mPorts.erase(portsIt);
+ }
+ return updateRoutes();
+}
+
+status_t DeviceHalAidl::setSimulateDeviceConnections(bool enabled) {
+ TIME_CHECK();
+ if (!mModule) return NO_INIT;
+ ModuleDebug debug{ .simulateDeviceConnections = enabled };
+ status_t status = statusTFromBinderStatus(mModule->setModuleDebug(debug));
+ // This is important to log as it affects HAL behavior.
+ if (status == OK) {
+ ALOGI("%s: set enabled: %d", __func__, enabled);
+ } else {
+ ALOGW("%s: set enabled to %d failed: %d", __func__, enabled, status);
+ }
+ return status;
+}
+
bool DeviceHalAidl::audioDeviceMatches(const AudioDevice& device, const AudioPort& p) {
if (p.ext.getTag() != AudioPortExt::Tag::device) return false;
return p.ext.get<AudioPortExt::Tag::device>().device == device;
@@ -766,8 +1045,8 @@
return p.ext.get<AudioPortExt::Tag::device>().device == device;
}
-status_t DeviceHalAidl::createPortConfig(
- const AudioPortConfig& requestedPortConfig, PortConfigs::iterator* result) {
+status_t DeviceHalAidl::createOrUpdatePortConfig(
+ const AudioPortConfig& requestedPortConfig, PortConfigs::iterator* result, bool* created) {
TIME_CHECK();
AudioPortConfig appliedPortConfig;
bool applied = false;
@@ -782,11 +1061,17 @@
return NO_INIT;
}
}
- auto id = appliedPortConfig.id;
- auto [it, inserted] = mPortConfigs.emplace(std::move(id), std::move(appliedPortConfig));
- LOG_ALWAYS_FATAL_IF(!inserted, "%s: port config with id %d already exists",
- __func__, it->first);
+
+ int32_t id = appliedPortConfig.id;
+ if (requestedPortConfig.id != 0 && requestedPortConfig.id != id) {
+ LOG_ALWAYS_FATAL("%s: requested port config id %d changed to %d", __func__,
+ requestedPortConfig.id, id);
+ }
+
+ auto [it, inserted] = mPortConfigs.insert_or_assign(std::move(id),
+ std::move(appliedPortConfig));
*result = it;
+ *created = inserted;
return OK;
}
@@ -833,8 +1118,8 @@
}
AudioPortConfig requestedPortConfig;
requestedPortConfig.portId = portsIt->first;
- RETURN_STATUS_IF_ERROR(createPortConfig(requestedPortConfig, &portConfigIt));
- *created = true;
+ RETURN_STATUS_IF_ERROR(createOrUpdatePortConfig(requestedPortConfig, &portConfigIt,
+ created));
} else {
*created = false;
}
@@ -844,6 +1129,7 @@
status_t DeviceHalAidl::findOrCreatePortConfig(
const AudioConfig& config, const std::optional<AudioIoFlags>& flags, int32_t ioHandle,
+ AudioSource source, const std::set<int32_t>& destinationPortIds,
AudioPortConfig* portConfig, bool* created) {
// These flags get removed one by one in this order when retrying port finding.
static const std::vector<AudioInputFlags> kOptionalInputFlags{
@@ -852,7 +1138,7 @@
if (portConfigIt == mPortConfigs.end() && flags.has_value()) {
auto optionalInputFlagsIt = kOptionalInputFlags.begin();
AudioIoFlags matchFlags = flags.value();
- auto portsIt = findPort(config, matchFlags);
+ auto portsIt = findPort(config, matchFlags, destinationPortIds);
while (portsIt == mPorts.end() && matchFlags.getTag() == AudioIoFlags::Tag::input
&& optionalInputFlagsIt != kOptionalInputFlags.end()) {
if (!isBitPositionFlagSet(
@@ -862,7 +1148,7 @@
}
matchFlags.set<AudioIoFlags::Tag::input>(matchFlags.get<AudioIoFlags::Tag::input>() &
~makeBitPositionFlagMask(*optionalInputFlagsIt++));
- portsIt = findPort(config, matchFlags);
+ portsIt = findPort(config, matchFlags, destinationPortIds);
ALOGI("%s: mix port for config %s, flags %s was not found in the module %s, "
"retried with flags %s", __func__, config.toString().c_str(),
flags.value().toString().c_str(), mInstance.c_str(),
@@ -878,22 +1164,42 @@
requestedPortConfig.portId = portsIt->first;
setPortConfigFromConfig(&requestedPortConfig, config);
requestedPortConfig.ext = AudioPortMixExt{ .handle = ioHandle };
- RETURN_STATUS_IF_ERROR(createPortConfig(requestedPortConfig, &portConfigIt));
- *created = true;
+ if (matchFlags.getTag() == AudioIoFlags::Tag::input
+ && source != AudioSource::SYS_RESERVED_INVALID) {
+ requestedPortConfig.ext.get<AudioPortExt::Tag::mix>().usecase =
+ AudioPortMixExtUseCase::make<AudioPortMixExtUseCase::Tag::source>(source);
+ }
+ RETURN_STATUS_IF_ERROR(createOrUpdatePortConfig(requestedPortConfig, &portConfigIt,
+ created));
} else if (!flags.has_value()) {
ALOGW("%s: mix port config for %s, handle %d not found in the module %s, "
"and was not created as flags are not specified",
__func__, config.toString().c_str(), ioHandle, mInstance.c_str());
return BAD_VALUE;
} else {
- *created = false;
+ AudioPortConfig requestedPortConfig = portConfigIt->second;
+ if (requestedPortConfig.ext.getTag() == AudioPortExt::Tag::mix) {
+ AudioPortMixExt& mixExt = requestedPortConfig.ext.get<AudioPortExt::Tag::mix>();
+ if (mixExt.usecase.getTag() == AudioPortMixExtUseCase::Tag::source &&
+ source != AudioSource::SYS_RESERVED_INVALID) {
+ mixExt.usecase.get<AudioPortMixExtUseCase::Tag::source>() = source;
+ }
+ }
+
+ if (requestedPortConfig != portConfigIt->second) {
+ RETURN_STATUS_IF_ERROR(createOrUpdatePortConfig(requestedPortConfig, &portConfigIt,
+ created));
+ } else {
+ *created = false;
+ }
}
*portConfig = portConfigIt->second;
return OK;
}
status_t DeviceHalAidl::findOrCreatePortConfig(
- const AudioPortConfig& requestedPortConfig, AudioPortConfig* portConfig, bool* created) {
+ const AudioPortConfig& requestedPortConfig, const std::set<int32_t>& destinationPortIds,
+ AudioPortConfig* portConfig, bool* created) {
using Tag = AudioPortExt::Tag;
if (requestedPortConfig.ext.getTag() == Tag::mix) {
if (const auto& p = requestedPortConfig;
@@ -905,8 +1211,13 @@
}
AudioConfig config;
setConfigFromPortConfig(&config, requestedPortConfig);
+ AudioSource source = requestedPortConfig.ext.get<Tag::mix>().usecase.getTag() ==
+ AudioPortMixExtUseCase::Tag::source ?
+ requestedPortConfig.ext.get<Tag::mix>().usecase.
+ get<AudioPortMixExtUseCase::Tag::source>() : AudioSource::SYS_RESERVED_INVALID;
return findOrCreatePortConfig(config, requestedPortConfig.flags,
- requestedPortConfig.ext.get<Tag::mix>().handle, portConfig, created);
+ requestedPortConfig.ext.get<Tag::mix>().handle, source, destinationPortIds,
+ portConfig, created);
} else if (requestedPortConfig.ext.getTag() == Tag::device) {
return findOrCreatePortConfig(
requestedPortConfig.ext.get<Tag::device>().device, portConfig, created);
@@ -938,20 +1249,30 @@
[&](const auto& pair) { return audioDeviceMatches(device, pair.second); });
}
+
DeviceHalAidl::Ports::iterator DeviceHalAidl::findPort(
- const AudioConfig& config, const AudioIoFlags& flags) {
+ const AudioConfig& config, const AudioIoFlags& flags,
+ const std::set<int32_t>& destinationPortIds) {
+ auto belongsToProfile = [&config](const AudioProfile& prof) {
+ return (isDefaultAudioFormat(config.base.format) || prof.format == config.base.format) &&
+ (config.base.channelMask.getTag() == AudioChannelLayout::none ||
+ std::find(prof.channelMasks.begin(), prof.channelMasks.end(),
+ config.base.channelMask) != prof.channelMasks.end()) &&
+ (config.base.sampleRate == 0 ||
+ std::find(prof.sampleRates.begin(), prof.sampleRates.end(),
+ config.base.sampleRate) != prof.sampleRates.end());
+ };
auto matcher = [&](const auto& pair) {
const auto& p = pair.second;
return p.ext.getTag() == AudioPortExt::Tag::mix &&
p.flags == flags &&
- std::find_if(p.profiles.begin(), p.profiles.end(),
- [&](const auto& prof) {
- return prof.format == config.base.format &&
- std::find(prof.channelMasks.begin(), prof.channelMasks.end(),
- config.base.channelMask) != prof.channelMasks.end() &&
- std::find(prof.sampleRates.begin(), prof.sampleRates.end(),
- config.base.sampleRate) != prof.sampleRates.end();
- }) != p.profiles.end(); };
+ (destinationPortIds.empty() ||
+ std::any_of(destinationPortIds.begin(), destinationPortIds.end(),
+ [&](const int32_t destId) { return mRoutingMatrix.count(
+ std::make_pair(p.id, destId)) != 0; })) &&
+ (p.profiles.empty() ||
+ std::find_if(p.profiles.begin(), p.profiles.end(), belongsToProfile) !=
+ p.profiles.end()); };
return std::find_if(mPorts.begin(), mPorts.end(), matcher);
}
@@ -976,34 +1297,7 @@
(!flags.has_value() || p.flags.value() == flags.value()) &&
p.ext.template get<Tag::mix>().handle == ioHandle; });
}
-/*
-DeviceHalAidl::PortConfigs::iterator DeviceHalAidl::findPortConfig(
- const AudioPortConfig& portConfig) {
- using Tag = AudioPortExt::Tag;
- if (portConfig.ext.getTag() == Tag::mix) {
- return std::find_if(mPortConfigs.begin(), mPortConfigs.end(),
- [&](const auto& pair) {
- const auto& p = pair.second;
- LOG_ALWAYS_FATAL_IF(p.ext.getTag() == Tag::mix &&
- !p.sampleRate.has_value() || !p.channelMask.has_value() ||
- !p.format.has_value() || !p.flags.has_value(),
- "%s: stored mix port config is not fully specified: %s",
- __func__, p.toString().c_str());
- return p.ext.getTag() == Tag::mix &&
- (!portConfig.sampleRate.has_value() ||
- p.sampleRate == portConfig.sampleRate) &&
- (!portConfig.channelMask.has_value() ||
- p.channelMask == portConfig.channelMask) &&
- (!portConfig.format.has_value() || p.format == portConfig.format) &&
- (!portConfig.flags.has_value() || p.flags == portConfig.flags) &&
- p.ext.template get<Tag::mix>().handle ==
- portConfig.ext.template get<Tag::mix>().handle; });
- } else if (portConfig.ext.getTag() == Tag::device) {
- return findPortConfig(portConfig.ext.get<Tag::device>().device);
- }
- return mPortConfigs.end();
-}
-*/
+
void DeviceHalAidl::resetPatch(int32_t patchId) {
if (auto it = mPatches.find(patchId); it != mPatches.end()) {
mPatches.erase(it);
@@ -1031,6 +1325,56 @@
ALOGE("%s: port config id %d not found", __func__, portConfigId);
}
+void DeviceHalAidl::resetUnusedPatches() {
+ // Since patches can be created independently of streams via 'createAudioPatch',
+ // here we only clean up patches for released streams.
+ for (auto it = mStreams.begin(); it != mStreams.end(); ) {
+ if (auto streamSp = it->first.promote(); streamSp) {
+ ++it;
+ } else {
+ resetPatch(it->second);
+ it = mStreams.erase(it);
+ }
+ }
+}
+
+void DeviceHalAidl::resetUnusedPatchesAndPortConfigs() {
+ resetUnusedPatches();
+ resetUnusedPortConfigs();
+}
+
+void DeviceHalAidl::resetUnusedPortConfigs() {
+ // The assumption is that port configs are used to create patches
+ // (or to open streams, but that involves creation of patches, too). Thus,
+ // orphaned port configs can and should be reset.
+ std::set<int32_t> portConfigIds;
+ std::transform(mPortConfigs.begin(), mPortConfigs.end(),
+ std::inserter(portConfigIds, portConfigIds.end()),
+ [](const auto& pcPair) { return pcPair.first; });
+ for (const auto& p : mPatches) {
+ for (int32_t id : p.second.sourcePortConfigIds) portConfigIds.erase(id);
+ for (int32_t id : p.second.sinkPortConfigIds) portConfigIds.erase(id);
+ }
+ for (int32_t id : portConfigIds) resetPortConfig(id);
+}
+
+status_t DeviceHalAidl::updateRoutes() {
+ TIME_CHECK();
+ std::vector<AudioRoute> routes;
+ RETURN_STATUS_IF_ERROR(
+ statusTFromBinderStatus(mModule->getAudioRoutes(&routes)));
+ ALOGW_IF(routes.empty(), "%s: module %s returned an empty list of audio routes",
+ __func__, mInstance.c_str());
+ mRoutingMatrix.clear();
+ for (const auto& r : routes) {
+ for (auto portId : r.sourcePortIds) {
+ mRoutingMatrix.emplace(r.sinkPortId, portId);
+ mRoutingMatrix.emplace(portId, r.sinkPortId);
+ }
+ }
+ return OK;
+}
+
void DeviceHalAidl::clearCallbacks(void* cookie) {
std::lock_guard l(mLock);
mCallbacks.erase(cookie);
diff --git a/media/libaudiohal/impl/DeviceHalAidl.h b/media/libaudiohal/impl/DeviceHalAidl.h
index 0a56514..e4d5ec6 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.h
+++ b/media/libaudiohal/impl/DeviceHalAidl.h
@@ -25,6 +25,7 @@
#include <android-base/thread_annotations.h>
#include <media/audiohal/DeviceHalInterface.h>
#include <media/audiohal/EffectHalInterface.h>
+#include <media/audiohal/StreamHalInterface.h>
#include "ConversionHelperAidl.h"
@@ -57,8 +58,16 @@
void* cookie, const sp<StreamOutHalInterfaceLatencyModeCallback>&) = 0;
};
+class MicrophoneInfoProvider : public virtual RefBase {
+ public:
+ using Info = std::vector<::aidl::android::media::audio::common::MicrophoneInfo>;
+ virtual ~MicrophoneInfoProvider() = default;
+ // Returns a nullptr if the HAL does not support microphone info retrieval.
+ virtual Info const* getMicrophoneInfo() = 0;
+};
+
class DeviceHalAidl : public DeviceHalInterface, public ConversionHelperAidl,
- public CallbackBroker {
+ public CallbackBroker, public MicrophoneInfoProvider {
public:
// Sets the value of 'devices' to a bitmask of 1 or more values of audio_devices_t.
status_t getSupportedDevices(uint32_t *devices) override;
@@ -131,7 +140,7 @@
status_t setAudioPortConfig(const struct audio_port_config* config) override;
// List microphones
- status_t getMicrophones(std::vector<audio_microphone_characteristic_t>* microphones);
+ status_t getMicrophones(std::vector<audio_microphone_characteristic_t>* microphones) override;
status_t addDeviceEffect(audio_port_handle_t device, sp<EffectHalInterface> effect) override;
@@ -147,13 +156,17 @@
error::Result<audio_hw_sync_t> getHwAvSync() override;
- status_t dump(int __unused, const Vector<String16>& __unused) override;
-
int32_t supportsBluetoothVariableLatency(bool* supports __unused) override;
status_t getSoundDoseInterface(const std::string& module,
::ndk::SpAIBinder* soundDoseBinder) override;
+ status_t setConnectedState(const struct audio_port_v7 *port, bool connected) override;
+
+ status_t setSimulateDeviceConnections(bool enabled) override;
+
+ status_t dump(int __unused, const Vector<String16>& __unused) override;
+
private:
friend class sp<DeviceHalAidl>;
@@ -162,11 +175,20 @@
wp<StreamOutHalInterfaceEventCallback> event;
wp<StreamOutHalInterfaceLatencyModeCallback> latency;
};
+ struct Microphones {
+ enum Status { UNKNOWN, NOT_SUPPORTED, QUERIED };
+ Status status = Status::UNKNOWN;
+ MicrophoneInfoProvider::Info info;
+ };
using Patches = std::map<int32_t /*patch ID*/,
::aidl::android::hardware::audio::core::AudioPatch>;
using PortConfigs = std::map<int32_t /*port config ID*/,
::aidl::android::media::audio::common::AudioPortConfig>;
using Ports = std::map<int32_t /*port ID*/, ::aidl::android::media::audio::common::AudioPort>;
+ // Answers the question "whether portID 'first' is reachable from portID 'second'?"
+ // It's not a map because both portIDs are known. The matrix is symmetric.
+ using RoutingMatrix = std::set<std::pair<int32_t, int32_t>>;
+ using Streams = std::map<wp<StreamHalInterface>, int32_t /*patch ID*/>;
class Cleanups;
// Must not be constructed directly by clients.
@@ -181,9 +203,9 @@
const ::aidl::android::media::audio::common::AudioPort& p);
bool audioDeviceMatches(const ::aidl::android::media::audio::common::AudioDevice& device,
const ::aidl::android::media::audio::common::AudioPortConfig& p);
- status_t createPortConfig(
+ status_t createOrUpdatePortConfig(
const ::aidl::android::media::audio::common::AudioPortConfig& requestedPortConfig,
- PortConfigs::iterator* result);
+ PortConfigs::iterator* result, bool *created);
status_t findOrCreatePatch(
const std::set<int32_t>& sourcePortConfigIds,
const std::set<int32_t>& sinkPortConfigIds,
@@ -199,36 +221,42 @@
const ::aidl::android::media::audio::common::AudioConfig& config,
const std::optional<::aidl::android::media::audio::common::AudioIoFlags>& flags,
int32_t ioHandle,
+ ::aidl::android::media::audio::common::AudioSource aidlSource,
+ const std::set<int32_t>& destinationPortIds,
::aidl::android::media::audio::common::AudioPortConfig* portConfig, bool* created);
status_t findOrCreatePortConfig(
const ::aidl::android::media::audio::common::AudioPortConfig& requestedPortConfig,
+ const std::set<int32_t>& destinationPortIds,
::aidl::android::media::audio::common::AudioPortConfig* portConfig, bool* created);
Patches::iterator findPatch(const std::set<int32_t>& sourcePortConfigIds,
const std::set<int32_t>& sinkPortConfigIds);
Ports::iterator findPort(const ::aidl::android::media::audio::common::AudioDevice& device);
Ports::iterator findPort(
const ::aidl::android::media::audio::common::AudioConfig& config,
- const ::aidl::android::media::audio::common::AudioIoFlags& flags);
+ const ::aidl::android::media::audio::common::AudioIoFlags& flags,
+ const std::set<int32_t>& destinationPortIds);
PortConfigs::iterator findPortConfig(
const ::aidl::android::media::audio::common::AudioDevice& device);
PortConfigs::iterator findPortConfig(
const ::aidl::android::media::audio::common::AudioConfig& config,
const std::optional<::aidl::android::media::audio::common::AudioIoFlags>& flags,
int32_t ioHandle);
- // Currently unused but may be useful for implementing setAudioPortConfig
- // PortConfigs::iterator findPortConfig(
- // const ::aidl::android::media::audio::common::AudioPortConfig& portConfig);
status_t prepareToOpenStream(
int32_t aidlHandle,
const ::aidl::android::media::audio::common::AudioDevice& aidlDevice,
const ::aidl::android::media::audio::common::AudioIoFlags& aidlFlags,
+ ::aidl::android::media::audio::common::AudioSource aidlSource,
struct audio_config* config,
Cleanups* cleanups,
::aidl::android::media::audio::common::AudioConfig* aidlConfig,
::aidl::android::media::audio::common::AudioPortConfig* mixPortConfig,
- int32_t* nominalLatency);
+ ::aidl::android::hardware::audio::core::AudioPatch* aidlPatch);
void resetPatch(int32_t patchId);
void resetPortConfig(int32_t portConfigId);
+ void resetUnusedPatches();
+ void resetUnusedPatchesAndPortConfigs();
+ void resetUnusedPortConfigs();
+ status_t updateRoutes();
// CallbackBroker implementation
void clearCallbacks(void* cookie) override;
@@ -245,6 +273,9 @@
template<class C> sp<C> getCallbackImpl(void* cookie, wp<C> Callbacks::* field);
template<class C> void setCallbackImpl(void* cookie, wp<C> Callbacks::* field, const sp<C>& cb);
+ // MicrophoneInfoProvider implementation
+ MicrophoneInfoProvider::Info const* getMicrophoneInfo() override;
+
const std::string mInstance;
const std::shared_ptr<::aidl::android::hardware::audio::core::IModule> mModule;
std::shared_ptr<::aidl::android::hardware::audio::core::sounddose::ISoundDose>
@@ -254,6 +285,9 @@
int32_t mDefaultOutputPortId = -1;
PortConfigs mPortConfigs;
Patches mPatches;
+ RoutingMatrix mRoutingMatrix;
+ Streams mStreams;
+ Microphones mMicrophones;
std::mutex mLock;
std::map<void*, Callbacks> mCallbacks GUARDED_BY(mLock);
};
diff --git a/media/libaudiohal/impl/DeviceHalHidl.h b/media/libaudiohal/impl/DeviceHalHidl.h
index 30fbd6d..afaad51 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.h
+++ b/media/libaudiohal/impl/DeviceHalHidl.h
@@ -126,6 +126,11 @@
status_t setConnectedState(const struct audio_port_v7 *port, bool connected) override;
+ status_t setSimulateDeviceConnections(bool enabled __unused) override {
+ // Only supported by AIDL HALs.
+ return INVALID_OPERATION;
+ }
+
error::Result<audio_hw_sync_t> getHwAvSync() override;
status_t dump(int fd, const Vector<String16>& args) override;
diff --git a/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp b/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp
index b452fa3..2eaaf5d 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp
+++ b/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp
@@ -48,7 +48,7 @@
// however currently we still get the list of module names from the config.
// Since the example service does not have all modules, the SM will wait
// for the missing ones forever.
- if (strcmp(name, "primary") == 0 || strcmp(name, "r_submix") == 0) {
+ if (strcmp(name, "primary") == 0 || strcmp(name, "r_submix") == 0 || strcmp(name, "usb") == 0) {
if (strcmp(name, "primary") == 0) name = "default";
auto serviceName = std::string(IModule::descriptor) + "/" + name;
service = IModule::fromBinder(
diff --git a/media/libaudiohal/impl/EffectConversionHelperAidl.cpp b/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
index dc47d67..5ab7c84 100644
--- a/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
+++ b/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
@@ -29,6 +29,7 @@
#include <utils/Log.h>
#include "EffectConversionHelperAidl.h"
+#include "EffectProxy.h"
namespace android {
namespace effect {
@@ -36,7 +37,10 @@
using ::aidl::android::aidl_utils::statusTFromBinderStatus;
using ::aidl::android::hardware::audio::effect::CommandId;
using ::aidl::android::hardware::audio::effect::Descriptor;
+using ::aidl::android::hardware::audio::effect::Flags;
+using ::aidl::android::hardware::audio::effect::IEffect;
using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::State;
using ::aidl::android::media::audio::common::AudioDeviceDescription;
using ::aidl::android::media::audio::common::AudioMode;
using ::aidl::android::media::audio::common::AudioSource;
@@ -60,15 +64,20 @@
{EFFECT_CMD_SET_INPUT_DEVICE, &EffectConversionHelperAidl::handleSetDevice},
{EFFECT_CMD_SET_VOLUME, &EffectConversionHelperAidl::handleSetVolume},
{EFFECT_CMD_OFFLOAD, &EffectConversionHelperAidl::handleSetOffload},
- {EFFECT_CMD_FIRST_PROPRIETARY, &EffectConversionHelperAidl::handleFirstPriority},
- // Only visualizer support these commands
+ // Only visualizer support these commands, reuse of EFFECT_CMD_FIRST_PROPRIETARY
{VISUALIZER_CMD_CAPTURE, &EffectConversionHelperAidl::handleVisualizerCapture},
{VISUALIZER_CMD_MEASURE, &EffectConversionHelperAidl::handleVisualizerMeasure}};
EffectConversionHelperAidl::EffectConversionHelperAidl(
std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
int32_t sessionId, int32_t ioId, const Descriptor& desc)
- : mSessionId(sessionId), mIoId(ioId), mDesc(desc), mEffect(std::move(effect)) {
+ : mSessionId(sessionId),
+ mIoId(ioId),
+ mDesc(desc),
+ mEffect(std::move(effect)),
+ mIsInputStream(mDesc.common.flags.type == Flags::Type::PRE_PROC),
+ mIsProxyEffect(mDesc.common.id.proxy.has_value() &&
+ mDesc.common.id.proxy.value() == mDesc.common.id.uuid) {
mCommon.session = sessionId;
mCommon.ioHandle = ioId;
mCommon.input = mCommon.output = kDefaultAudioConfig;
@@ -92,8 +101,8 @@
return BAD_VALUE;
}
- return *(status_t*)pReplyData =
- statusTFromBinderStatus(mEffect->open(mCommon, std::nullopt, &mOpenReturn));
+ // Do nothing for EFFECT_CMD_INIT, call IEffect.open() with EFFECT_CMD_SET_CONFIG
+ return *(status_t*)pReplyData = OK;
}
status_t EffectConversionHelperAidl::handleSetParameter(uint32_t cmdSize, const void* pCmdData,
@@ -140,15 +149,64 @@
return ret;
}
-status_t EffectConversionHelperAidl::handleSetConfig(uint32_t cmdSize,
- const void* pCmdData __unused,
+status_t EffectConversionHelperAidl::handleSetConfig(uint32_t cmdSize, const void* pCmdData,
uint32_t* replySize, void* pReplyData) {
if (!replySize || *replySize != sizeof(int) || !pReplyData ||
cmdSize != sizeof(effect_config_t)) {
+ ALOGE("%s parameter invalid %u %p %p %p", __func__, cmdSize, pCmdData, replySize,
+ pReplyData);
return BAD_VALUE;
}
- // TODO: need to implement setConfig with setParameter(common)
+ effect_config_t* config = (effect_config_t*)pCmdData;
+ Parameter::Common common = {
+ .input =
+ VALUE_OR_RETURN_STATUS(::aidl::android::legacy2aidl_buffer_config_t_AudioConfig(
+ config->inputCfg, mIsInputStream)),
+ .output =
+ VALUE_OR_RETURN_STATUS(::aidl::android::legacy2aidl_buffer_config_t_AudioConfig(
+ config->outputCfg, mIsInputStream)),
+ .session = mCommon.session,
+ .ioHandle = mCommon.ioHandle};
+
+ State state;
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getState(&state)));
+ // in case of buffer/ioHandle re-configure for an opened effect, close it and re-open
+ if (state != State::INIT && mCommon != common) {
+ ALOGI("%s at state %s, closing effect", __func__,
+ android::internal::ToString(state).c_str());
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->close()));
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getState(&state)));
+ mStatusQ.reset();
+ mInputQ.reset();
+ mOutputQ.reset();
+ }
+
+ if (state == State::INIT) {
+ ALOGI("%s at state %s, opening effect", __func__,
+ android::internal::ToString(state).c_str());
+ IEffect::OpenEffectReturn openReturn;
+ RETURN_STATUS_IF_ERROR(
+ statusTFromBinderStatus(mEffect->open(common, std::nullopt, &openReturn)));
+
+ if (mIsProxyEffect) {
+ const auto& ret =
+ std::static_pointer_cast<EffectProxy>(mEffect)->getEffectReturnParam();
+ mStatusQ = std::make_shared<StatusMQ>(ret->statusMQ);
+ mInputQ = std::make_shared<DataMQ>(ret->inputDataMQ);
+ mOutputQ = std::make_shared<DataMQ>(ret->outputDataMQ);
+ } else {
+ mStatusQ = std::make_shared<StatusMQ>(openReturn.statusMQ);
+ mInputQ = std::make_shared<DataMQ>(openReturn.inputDataMQ);
+ mOutputQ = std::make_shared<DataMQ>(openReturn.outputDataMQ);
+ }
+ mCommon = common;
+ } else if (mCommon != common) {
+ ALOGI("%s at state %s, setParameter", __func__, android::internal::ToString(state).c_str());
+ Parameter aidlParam = UNION_MAKE(Parameter, common, mCommon);
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->setParameter(aidlParam)));
+ }
+
return *static_cast<int32_t*>(pReplyData) = OK;
}
@@ -167,11 +225,9 @@
const auto& common = param.get<Parameter::common>();
effect_config_t* pConfig = (effect_config_t*)pReplyData;
pConfig->inputCfg = VALUE_OR_RETURN_STATUS(
- ::aidl::android::aidl2legacy_AudioConfigBase_buffer_config_t(common.input.base, true));
- pConfig->outputCfg =
- VALUE_OR_RETURN_STATUS(::aidl::android::aidl2legacy_AudioConfigBase_buffer_config_t(
- common.output.base, false));
- mCommon = common;
+ ::aidl::android::aidl2legacy_AudioConfig_buffer_config_t(common.input, true));
+ pConfig->outputCfg = VALUE_OR_RETURN_STATUS(
+ ::aidl::android::aidl2legacy_AudioConfig_buffer_config_t(common.output, false));
return OK;
}
@@ -254,17 +310,17 @@
return *static_cast<int32_t*>(pReplyData) = OK;
}
status_t EffectConversionHelperAidl::handleSetVolume(uint32_t cmdSize, const void* pCmdData,
- uint32_t* replySize, void* pReplyData) {
- if (cmdSize != 2 * sizeof(uint32_t) || !pCmdData || !replySize || !pReplyData) {
- ALOGE("%s parameter invalid %u %p %p %p", __func__, cmdSize, pCmdData, replySize,
- pReplyData);
+ uint32_t* replySize __unused,
+ void* pReplyData __unused) {
+ if (cmdSize != 2 * sizeof(uint32_t) || !pCmdData) {
+ ALOGE("%s parameter invalid %u %p", __func__, cmdSize, pCmdData);
return BAD_VALUE;
}
Parameter::VolumeStereo volume = {.left = (float)(*(uint32_t*)pCmdData) / (1 << 24),
.right = (float)(*(uint32_t*)pCmdData + 1) / (1 << 24)};
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
mEffect->setParameter(Parameter::make<Parameter::volumeStereo>(volume))));
- return *static_cast<int32_t*>(pReplyData) = OK;
+ return OK;
}
status_t EffectConversionHelperAidl::handleSetOffload(uint32_t cmdSize, const void* pCmdData,
@@ -274,20 +330,21 @@
pReplyData);
return BAD_VALUE;
}
- // TODO: handle this after effectproxy implemented in libaudiohal
- return *static_cast<int32_t*>(pReplyData) = OK;
-}
-
-status_t EffectConversionHelperAidl::handleFirstPriority(uint32_t cmdSize __unused,
- const void* pCmdData __unused,
- uint32_t* replySize, void* pReplyData) {
- if (!replySize || !pReplyData) {
- ALOGE("%s parameter invalid %p %p", __func__, replySize, pReplyData);
- return BAD_VALUE;
+ effect_offload_param_t* offload = (effect_offload_param_t*)pCmdData;
+ // send to proxy to update active sub-effect
+ if (mIsProxyEffect) {
+ ALOGI("%s offload param offload %s ioHandle %d", __func__,
+ offload->isOffload ? "true" : "false", offload->ioHandle);
+ mCommon.ioHandle = offload->ioHandle;
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+ std::static_pointer_cast<EffectProxy>(mEffect)->setOffloadParam(offload)));
+ // update FMQs
+ const auto& ret = std::static_pointer_cast<EffectProxy>(mEffect)->getEffectReturnParam();
+ mStatusQ = std::make_shared<StatusMQ>(ret->statusMQ);
+ mInputQ = std::make_shared<DataMQ>(ret->inputDataMQ);
+ mOutputQ = std::make_shared<DataMQ>(ret->outputDataMQ);
}
-
- // TODO to be implemented
- return OK;
+ return *static_cast<int32_t*>(pReplyData) = OK;
}
status_t EffectConversionHelperAidl::handleVisualizerCapture(uint32_t cmdSize __unused,
diff --git a/media/libaudiohal/impl/EffectConversionHelperAidl.h b/media/libaudiohal/impl/EffectConversionHelperAidl.h
index f9a4e49..1200264 100644
--- a/media/libaudiohal/impl/EffectConversionHelperAidl.h
+++ b/media/libaudiohal/impl/EffectConversionHelperAidl.h
@@ -19,6 +19,7 @@
#include <utils/Errors.h>
#include <aidl/android/hardware/audio/effect/BpEffect.h>
+#include <fmq/AidlMessageQueue.h>
#include <system/audio_effect.h>
#include <system/audio_effects/audio_effects_utils.h>
@@ -30,17 +31,23 @@
status_t handleCommand(uint32_t cmdCode, uint32_t cmdSize, void* pCmdData, uint32_t* replySize,
void* pReplyData);
virtual ~EffectConversionHelperAidl() {}
- const ::aidl::android::hardware::audio::effect::IEffect::OpenEffectReturn&
- getEffectReturnParam() const {
- return mOpenReturn;
- }
+
+ using StatusMQ = ::android::AidlMessageQueue<
+ ::aidl::android::hardware::audio::effect::IEffect::Status,
+ ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>;
+ using DataMQ = ::android::AidlMessageQueue<
+ float, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>;
+ std::shared_ptr<StatusMQ> getStatusMQ() { return mStatusQ; }
+ std::shared_ptr<DataMQ> getInputMQ() { return mInputQ; }
+ std::shared_ptr<DataMQ> getOutputMQ() { return mOutputQ; }
protected:
const int32_t mSessionId;
const int32_t mIoId;
const ::aidl::android::hardware::audio::effect::Descriptor mDesc;
const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> mEffect;
- ::aidl::android::hardware::audio::effect::IEffect::OpenEffectReturn mOpenReturn;
+ // whether the effect is instantiated on an input stream
+ const bool mIsInputStream;
::aidl::android::hardware::audio::effect::Parameter::Common mCommon;
EffectConversionHelperAidl(
@@ -57,6 +64,7 @@
const aidl::android::media::audio::common::AudioFormatDescription kDefaultFormatDescription = {
.type = aidl::android::media::audio::common::AudioFormatType::PCM,
.pcm = aidl::android::media::audio::common::PcmType::FLOAT_32_BIT};
+ const bool mIsProxyEffect;
static constexpr int kDefaultframeCount = 0x100;
@@ -73,6 +81,9 @@
uint32_t* /* replySize */,
void* /* pReplyData */);
static const std::map<uint32_t /* effect_command_e */, CommandHandler> mCommandHandlerMap;
+ // data and status FMQ
+ std::shared_ptr<StatusMQ> mStatusQ = nullptr;
+ std::shared_ptr<DataMQ> mInputQ = nullptr, mOutputQ = nullptr;
status_t handleInit(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
void* pReplyData);
@@ -96,8 +107,6 @@
void* pReplyData);
status_t handleSetOffload(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
void* pReplyData);
- status_t handleFirstPriority(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
- void* pReplyData);
status_t handleVisualizerCapture(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
void* pReplyData);
status_t handleVisualizerMeasure(uint32_t cmdSize, const void* pCmdData, uint32_t* replySize,
diff --git a/media/libaudiohal/impl/EffectHalAidl.cpp b/media/libaudiohal/impl/EffectHalAidl.cpp
index a684dee..d6135af 100644
--- a/media/libaudiohal/impl/EffectHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectHalAidl.cpp
@@ -24,17 +24,19 @@
#include <media/AidlConversionCppNdk.h>
#include <media/AidlConversionEffect.h>
#include <media/AidlConversionUtil.h>
-#include <media/audiohal/AudioEffectUuid.h>
#include <media/EffectsFactoryApi.h>
#include <mediautils/TimeCheck.h>
#include <system/audio.h>
+#include <system/audio_effects/effect_uuid.h>
#include <utils/Log.h>
#include "EffectHalAidl.h"
+#include "EffectProxy.h"
#include <aidl/android/hardware/audio/effect/IEffect.h>
#include "effectsAidlConversion/AidlConversionAec.h"
+#include "effectsAidlConversion/AidlConversionAgc1.h"
#include "effectsAidlConversion/AidlConversionAgc2.h"
#include "effectsAidlConversion/AidlConversionBassBoost.h"
#include "effectsAidlConversion/AidlConversionDownmix.h"
@@ -51,81 +53,94 @@
#include "effectsAidlConversion/AidlConversionVisualizer.h"
using ::aidl::android::aidl_utils::statusTFromBinderStatus;
-using ::aidl::android::hardware::audio::effect::CommandId;
using ::aidl::android::hardware::audio::effect::Descriptor;
using ::aidl::android::hardware::audio::effect::IEffect;
using ::aidl::android::hardware::audio::effect::IFactory;
-using ::aidl::android::hardware::audio::effect::Parameter;
namespace android {
namespace effect {
-EffectHalAidl::EffectHalAidl(
- const std::shared_ptr<::aidl::android::hardware::audio::effect::IFactory>& factory,
- const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& effect,
- uint64_t effectId, int32_t sessionId, int32_t ioId,
- const ::aidl::android::hardware::audio::effect::Descriptor& desc)
+EffectHalAidl::EffectHalAidl(const std::shared_ptr<IFactory>& factory,
+ const std::shared_ptr<IEffect>& effect, uint64_t effectId,
+ int32_t sessionId, int32_t ioId, const Descriptor& desc,
+ bool isProxyEffect)
: mFactory(factory),
mEffect(effect),
mEffectId(effectId),
mSessionId(sessionId),
mIoId(ioId),
- mDesc(desc) {
+ mDesc(desc),
+ mIsProxyEffect(isProxyEffect) {
createAidlConversion(effect, sessionId, ioId, desc);
}
EffectHalAidl::~EffectHalAidl() {
- if (mFactory) {
- mFactory->destroyEffect(mEffect);
+ if (mEffect) {
+ mIsProxyEffect ? std::static_pointer_cast<EffectProxy>(mEffect)->destroy()
+ : mFactory->destroyEffect(mEffect);
}
}
status_t EffectHalAidl::createAidlConversion(
- std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
+ std::shared_ptr<IEffect> effect,
int32_t sessionId, int32_t ioId,
- const ::aidl::android::hardware::audio::effect::Descriptor& desc) {
+ const Descriptor& desc) {
const auto& typeUuid = desc.common.id.type;
ALOGI("%s create UUID %s", __func__, typeUuid.toString().c_str());
- if (typeUuid == kAcousticEchoCancelerTypeUUID) {
+ if (typeUuid ==
+ ::aidl::android::hardware::audio::effect::getEffectTypeUuidAcousticEchoCanceler()) {
mConversion =
std::make_unique<android::effect::AidlConversionAec>(effect, sessionId, ioId, desc);
- } else if (typeUuid == kAutomaticGainControl2TypeUUID) {
+ } else if (typeUuid == ::aidl::android::hardware::audio::effect::
+ getEffectTypeUuidAutomaticGainControlV1()) {
+ mConversion = std::make_unique<android::effect::AidlConversionAgc1>(effect, sessionId, ioId,
+ desc);
+ } else if (typeUuid == ::aidl::android::hardware::audio::effect::
+ getEffectTypeUuidAutomaticGainControlV2()) {
mConversion = std::make_unique<android::effect::AidlConversionAgc2>(effect, sessionId, ioId,
desc);
- } else if (typeUuid == kBassBoostTypeUUID) {
+ } else if (typeUuid == ::aidl::android::hardware::audio::effect::getEffectTypeUuidBassBoost()) {
mConversion = std::make_unique<android::effect::AidlConversionBassBoost>(effect, sessionId,
ioId, desc);
- } else if (typeUuid == kDownmixTypeUUID) {
+ } else if (typeUuid == ::aidl::android::hardware::audio::effect::getEffectTypeUuidDownmix()) {
mConversion = std::make_unique<android::effect::AidlConversionDownmix>(effect, sessionId,
ioId, desc);
- } else if (typeUuid == kDynamicsProcessingTypeUUID) {
+ } else if (typeUuid ==
+ ::aidl::android::hardware::audio::effect::getEffectTypeUuidDynamicsProcessing()) {
mConversion =
std::make_unique<android::effect::AidlConversionDp>(effect, sessionId, ioId, desc);
- } else if (typeUuid == kEnvReverbTypeUUID) {
+ } else if (typeUuid == ::aidl::android::hardware::audio::effect::getEffectTypeUuidEnvReverb()) {
mConversion = std::make_unique<android::effect::AidlConversionEnvReverb>(effect, sessionId,
ioId, desc);
- } else if (typeUuid == kEqualizerTypeUUID) {
+ } else if (typeUuid == ::aidl::android::hardware::audio::effect::getEffectTypeUuidEqualizer()) {
mConversion =
std::make_unique<android::effect::AidlConversionEq>(effect, sessionId, ioId, desc);
- } else if (typeUuid == kHapticGeneratorTypeUUID) {
+ } else if (typeUuid ==
+ ::aidl::android::hardware::audio::effect::getEffectTypeUuidHapticGenerator()) {
mConversion = std::make_unique<android::effect::AidlConversionHapticGenerator>(
effect, sessionId, ioId, desc);
- } else if (typeUuid == kLoudnessEnhancerTypeUUID) {
+ } else if (typeUuid ==
+ ::aidl::android::hardware::audio::effect::getEffectTypeUuidLoudnessEnhancer()) {
mConversion = std::make_unique<android::effect::AidlConversionLoudnessEnhancer>(
effect, sessionId, ioId, desc);
- } else if (typeUuid == kNoiseSuppressionTypeUUID) {
+ } else if (typeUuid ==
+ ::aidl::android::hardware::audio::effect::getEffectTypeUuidNoiseSuppression()) {
mConversion = std::make_unique<android::effect::AidlConversionNoiseSuppression>(
effect, sessionId, ioId, desc);
- } else if (typeUuid == kPresetReverbTypeUUID) {
+ } else if (typeUuid ==
+ ::aidl::android::hardware::audio::effect::getEffectTypeUuidPresetReverb()) {
mConversion = std::make_unique<android::effect::AidlConversionPresetReverb>(
effect, sessionId, ioId, desc);
- } else if (typeUuid == kSpatializerTypeUUID) {
+ } else if (typeUuid ==
+ ::aidl::android::hardware::audio::effect::getEffectTypeUuidSpatializer()) {
mConversion = std::make_unique<android::effect::AidlConversionSpatializer>(
effect, sessionId, ioId, desc);
- } else if (typeUuid == kVirtualizerTypeUUID) {
+ } else if (typeUuid ==
+ ::aidl::android::hardware::audio::effect::getEffectTypeUuidVirtualizer()) {
mConversion = std::make_unique<android::effect::AidlConversionVirtualizer>(
effect, sessionId, ioId, desc);
- } else if (typeUuid == kVisualizerTypeUUID) {
+ } else if (typeUuid ==
+ ::aidl::android::hardware::audio::effect::getEffectTypeUuidVisualizer()) {
mConversion = std::make_unique<android::effect::AidlConversionVisualizer>(effect, sessionId,
ioId, desc);
} else {
@@ -149,34 +164,49 @@
// write to input FMQ here, wait for statusMQ STATUS_OK, and read from output FMQ
status_t EffectHalAidl::process() {
- size_t available = mInputQ->availableToWrite();
+ auto statusQ = mConversion->getStatusMQ();
+ auto inputQ = mConversion->getInputMQ();
+ auto outputQ = mConversion->getOutputMQ();
+ if (!statusQ || !statusQ->isValid() || !inputQ || !inputQ->isValid() || !outputQ ||
+ !outputQ->isValid()) {
+ ALOGE("%s invalid FMQ [Status %d I %d O %d]", __func__, statusQ ? statusQ->isValid() : 0,
+ inputQ ? inputQ->isValid() : 0, outputQ ? outputQ->isValid() : 0);
+ return INVALID_OPERATION;
+ }
+
+ size_t available = inputQ->availableToWrite();
size_t floatsToWrite = std::min(available, mInBuffer->getSize() / sizeof(float));
if (floatsToWrite == 0) {
- ALOGW("%s not able to write, floats in buffer %zu, space in FMQ %zu", __func__,
+ ALOGE("%s not able to write, floats in buffer %zu, space in FMQ %zu", __func__,
mInBuffer->getSize() / sizeof(float), available);
return INVALID_OPERATION;
}
- if (!mInputQ->write((float*)mInBuffer->ptr(), floatsToWrite)) {
- ALOGW("%s failed to write %zu into inputQ", __func__, floatsToWrite);
+ if (!mInBuffer->audioBuffer() ||
+ !inputQ->write((float*)mInBuffer->audioBuffer()->f32, floatsToWrite)) {
+ ALOGE("%s failed to write %zu floats from audiobuffer %p to inputQ [avail %zu]", __func__,
+ floatsToWrite, mInBuffer->audioBuffer(), inputQ->availableToWrite());
return INVALID_OPERATION;
}
IEffect::Status retStatus{};
- if (!mStatusQ->readBlocking(&retStatus, 1) || retStatus.status != OK ||
+ if (!statusQ->readBlocking(&retStatus, 1) || retStatus.status != OK ||
(size_t)retStatus.fmqConsumed != floatsToWrite || retStatus.fmqProduced == 0) {
- ALOGW("%s read status failed: %s", __func__, retStatus.toString().c_str());
+ ALOGE("%s read status failed: %s", __func__, retStatus.toString().c_str());
return INVALID_OPERATION;
}
- available = mOutputQ->availableToRead();
+ available = outputQ->availableToRead();
size_t floatsToRead = std::min(available, mOutBuffer->getSize() / sizeof(float));
if (floatsToRead == 0) {
- ALOGW("%s not able to read, buffer space %zu, floats in FMQ %zu", __func__,
+ ALOGE("%s not able to read, buffer space %zu, floats in FMQ %zu", __func__,
mOutBuffer->getSize() / sizeof(float), available);
return INVALID_OPERATION;
}
- if (!mOutputQ->read((float*)mOutBuffer->ptr(), floatsToRead)) {
- ALOGW("%s failed to read %zu from outputQ", __func__, floatsToRead);
+ // always read floating point data for AIDL
+ if (!mOutBuffer->audioBuffer() ||
+ !outputQ->read(mOutBuffer->audioBuffer()->f32, floatsToRead)) {
+ ALOGE("%s failed to read %zu from outputQ to audioBuffer %p", __func__, floatsToRead,
+ mOutBuffer->audioBuffer());
return INVALID_OPERATION;
}
@@ -199,20 +229,7 @@
return INVALID_OPERATION;
}
- status_t ret = mConversion->handleCommand(cmdCode, cmdSize, pCmdData, replySize, pReplyData);
- // update FMQs when effect open successfully
- if (ret == OK && cmdCode == EFFECT_CMD_INIT) {
- const auto& retParam = mConversion->getEffectReturnParam();
- mStatusQ = std::make_unique<StatusMQ>(retParam.statusMQ);
- mInputQ = std::make_unique<DataMQ>(retParam.inputDataMQ);
- mOutputQ = std::make_unique<DataMQ>(retParam.outputDataMQ);
- if (!mStatusQ->isValid() || !mInputQ->isValid() || !mOutputQ->isValid()) {
- ALOGE("%s return with invalid FMQ", __func__);
- return NO_INIT;
- }
- }
-
- return ret;
+ return mConversion->handleCommand(cmdCode, cmdSize, pCmdData, replySize, pReplyData);
}
status_t EffectHalAidl::getDescriptor(effect_descriptor_t* pDescriptor) {
diff --git a/media/libaudiohal/impl/EffectHalAidl.h b/media/libaudiohal/impl/EffectHalAidl.h
index 194150d..8966363 100644
--- a/media/libaudiohal/impl/EffectHalAidl.h
+++ b/media/libaudiohal/impl/EffectHalAidl.h
@@ -31,11 +31,6 @@
class EffectHalAidl : public EffectHalInterface {
public:
- using StatusMQ = ::android::AidlMessageQueue<
- ::aidl::android::hardware::audio::effect::IEffect::Status,
- ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>;
- using DataMQ = ::android::AidlMessageQueue<
- float, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>;
// Set the input buffer.
status_t setInBuffer(const sp<EffectBufferHalInterface>& buffer) override;
@@ -83,12 +78,11 @@
const int32_t mSessionId;
const int32_t mIoId;
const ::aidl::android::hardware::audio::effect::Descriptor mDesc;
+ const bool mIsProxyEffect;
+
std::unique_ptr<EffectConversionHelperAidl> mConversion;
- std::unique_ptr<StatusMQ> mStatusQ;
- std::unique_ptr<DataMQ> mInputQ, mOutputQ;
sp<EffectBufferHalInterface> mInBuffer, mOutBuffer;
- effect_config_t mConfig;
status_t createAidlConversion(
std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
@@ -99,8 +93,10 @@
const std::shared_ptr<::aidl::android::hardware::audio::effect::IFactory>& factory,
const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& effect,
uint64_t effectId, int32_t sessionId, int32_t ioId,
- const ::aidl::android::hardware::audio::effect::Descriptor& desc);
+ const ::aidl::android::hardware::audio::effect::Descriptor& desc,
+ bool isProxyEffect);
bool setEffectReverse(bool reverse);
+ bool needUpdateReturnParam(uint32_t cmdCode);
// The destructor automatically releases the effect.
virtual ~EffectHalAidl();
diff --git a/media/libaudiohal/impl/EffectProxy.cpp b/media/libaudiohal/impl/EffectProxy.cpp
new file mode 100644
index 0000000..b61532d
--- /dev/null
+++ b/media/libaudiohal/impl/EffectProxy.cpp
@@ -0,0 +1,291 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <algorithm>
+#include <memory>
+#define LOG_TAG "EffectProxy"
+//#define LOG_NDEBUG 0
+
+#include <fmq/AidlMessageQueue.h>
+#include <utils/Log.h>
+
+#include "EffectProxy.h"
+
+using ::aidl::android::hardware::audio::effect::CommandId;
+using ::aidl::android::hardware::audio::effect::Descriptor;
+using ::aidl::android::hardware::audio::effect::Flags;
+using ::aidl::android::hardware::audio::effect::IEffect;
+using ::aidl::android::hardware::audio::effect::IFactory;
+using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::State;
+using ::aidl::android::media::audio::common::AudioUuid;
+
+namespace android {
+namespace effect {
+
+EffectProxy::EffectProxy(const Descriptor::Identity& id, const std::shared_ptr<IFactory>& factory)
+ : mIdentity([](const Descriptor::Identity& subId) {
+ // update EffectProxy implementation UUID to the sub-effect proxy UUID
+ ALOG_ASSERT(subId.proxy.has_value(), "Sub-effect Identity must have valid proxy UUID");
+ Descriptor::Identity tempId = subId;
+ tempId.uuid = subId.proxy.value();
+ return tempId;
+ }(id)),
+ mFactory(factory) {}
+
+EffectProxy::~EffectProxy() {
+ close();
+ destroy();
+ mSubEffects.clear();
+}
+
+// sub effect must have same proxy UUID as EffectProxy, and the type UUID must match.
+ndk::ScopedAStatus EffectProxy::addSubEffect(const Descriptor& sub) {
+ ALOGV("%s: %s", __func__, mIdentity.type.toString().c_str());
+ if (0 != mSubEffects.count(sub.common.id) || !sub.common.id.proxy.has_value() ||
+ sub.common.id.proxy.value() != mIdentity.uuid) {
+ ALOGE("%s sub effect already exist or mismatch %s", __func__, sub.toString().c_str());
+ return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
+ "illegalSubEffect");
+ }
+
+ // not create sub-effect yet
+ std::get<SubEffectTupleIndex::HANDLE>(mSubEffects[sub.common.id]) = nullptr;
+ std::get<SubEffectTupleIndex::DESCRIPTOR>(mSubEffects[sub.common.id]) = sub;
+ // set the last added sub-effect to active before setOffloadParam()
+ mActiveSub = sub.common.id;
+ ALOGI("%s add %s to proxy %s flag %s", __func__, mActiveSub.toString().c_str(),
+ mIdentity.toString().c_str(), sub.common.flags.toString().c_str());
+
+ if (sub.common.flags.hwAcceleratorMode == Flags::HardwareAccelerator::TUNNEL) {
+ mSubFlags.hwAcceleratorMode = Flags::HardwareAccelerator::TUNNEL;
+ }
+
+ // initial flag values before we know which sub-effect to active (with setOffloadParam)
+ // same as HIDL EffectProxy flags
+ mSubFlags.type = Flags::Type::INSERT;
+ mSubFlags.insert = Flags::Insert::LAST;
+ mSubFlags.volume = Flags::Volume::CTRL;
+
+ // set indication if any sub-effect indication was set
+ mSubFlags.offloadIndication |= sub.common.flags.offloadIndication;
+ mSubFlags.deviceIndication |= sub.common.flags.deviceIndication;
+ mSubFlags.audioModeIndication |= sub.common.flags.audioModeIndication;
+ mSubFlags.audioSourceIndication |= sub.common.flags.audioSourceIndication;
+
+ // set bypass when all sub-effects are bypassing
+ mSubFlags.bypass &= sub.common.flags.bypass;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus EffectProxy::create() {
+ ALOGV("%s: %s", __func__, mIdentity.type.toString().c_str());
+ ndk::ScopedAStatus status = ndk::ScopedAStatus::ok();
+
+ for (auto& sub : mSubEffects) {
+ auto& effectHandle = std::get<SubEffectTupleIndex::HANDLE>(sub.second);
+ ALOGI("%s sub-effect %s", __func__, sub.first.uuid.toString().c_str());
+ status = mFactory->createEffect(sub.first.uuid, &effectHandle);
+ if (!status.isOk() || !effectHandle) {
+ ALOGE("%s sub-effect failed %s", __func__, sub.first.uuid.toString().c_str());
+ break;
+ }
+ }
+
+ // destroy all created effects if failure
+ if (!status.isOk()) {
+ destroy();
+ }
+ return status;
+}
+
+ndk::ScopedAStatus EffectProxy::destroy() {
+ ALOGV("%s: %s", __func__, mIdentity.type.toString().c_str());
+ return runWithAllSubEffects([&](std::shared_ptr<IEffect>& effect) {
+ ndk::ScopedAStatus status = mFactory->destroyEffect(effect);
+ if (status.isOk()) {
+ effect.reset();
+ }
+ return status;
+ });
+}
+
+const IEffect::OpenEffectReturn* EffectProxy::getEffectReturnParam() {
+ return &std::get<SubEffectTupleIndex::RETURN>(mSubEffects[mActiveSub]);
+}
+
+ndk::ScopedAStatus EffectProxy::setOffloadParam(const effect_offload_param_t* offload) {
+ const auto& itor = std::find_if(mSubEffects.begin(), mSubEffects.end(), [&](const auto& sub) {
+ const auto& desc = std::get<SubEffectTupleIndex::DESCRIPTOR>(sub.second);
+ ALOGI("%s: isOffload %d sub-effect: %s, flags %s", __func__, offload->isOffload,
+ desc.common.id.uuid.toString().c_str(), desc.common.flags.toString().c_str());
+ return offload->isOffload ==
+ (desc.common.flags.hwAcceleratorMode == Flags::HardwareAccelerator::TUNNEL);
+ });
+ if (itor == mSubEffects.end()) {
+ ALOGE("%s no %soffload sub-effect found", __func__, offload->isOffload ? "" : "non-");
+ return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_NULL_POINTER,
+ "noActiveEffctFound");
+ }
+
+ mActiveSub = itor->first;
+ ALOGI("%s: active %soffload sub-effect: %s, flags %s", __func__,
+ offload->isOffload ? "" : "non-", mActiveSub.uuid.toString().c_str(),
+ std::get<SubEffectTupleIndex::DESCRIPTOR>(itor->second).common.flags.toString().c_str());
+ return ndk::ScopedAStatus::ok();
+}
+
+// EffectProxy go over sub-effects and call IEffect interfaces
+ndk::ScopedAStatus EffectProxy::open(const Parameter::Common& common,
+ const std::optional<Parameter::Specific>& specific,
+ IEffect::OpenEffectReturn* ret __unused) {
+ ALOGV("%s: %s", __func__, mIdentity.type.toString().c_str());
+ ndk::ScopedAStatus status = ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+ EX_ILLEGAL_ARGUMENT, "nullEffectHandle");
+ for (auto& sub : mSubEffects) {
+ auto& effect = std::get<SubEffectTupleIndex::HANDLE>(sub.second);
+ auto& openRet = std::get<SubEffectTupleIndex::RETURN>(sub.second);
+ if (!effect || !(status = effect->open(common, specific, &openRet)).isOk()) {
+ ALOGE("%s: failed to open UUID %s", __func__, sub.first.uuid.toString().c_str());
+ break;
+ }
+ }
+
+ // close all opened effects if failure
+ if (!status.isOk()) {
+ close();
+ }
+
+ return status;
+}
+
+ndk::ScopedAStatus EffectProxy::close() {
+ ALOGV("%s: %s", __func__, mIdentity.type.toString().c_str());
+ return runWithAllSubEffects([&](std::shared_ptr<IEffect>& effect) {
+ return effect->close();
+ });
+}
+
+ndk::ScopedAStatus EffectProxy::getDescriptor(Descriptor* desc) {
+ if (!desc) {
+ ALOGE("%s: nuull descriptor pointer", __func__);
+ return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_NULL_POINTER, "nullptr");
+ }
+
+ auto& activeSubEffect = std::get<SubEffectTupleIndex::HANDLE>(mSubEffects[mActiveSub]);
+ // return initial descriptor if no active sub-effect exist
+ if (!activeSubEffect) {
+ desc->common.id = mIdentity;
+ desc->common.flags = mSubFlags;
+ desc->common.name = "Proxy";
+ desc->common.implementor = "AOSP";
+ } else {
+ *desc = std::get<SubEffectTupleIndex::DESCRIPTOR>(mSubEffects[mActiveSub]);
+ desc->common.id = mIdentity;
+ }
+
+ ALOGI("%s with %s", __func__, desc->toString().c_str());
+ return ndk::ScopedAStatus::ok();
+}
+
+// Handle with active sub-effect first, only send to other sub-effects when success
+ndk::ScopedAStatus EffectProxy::command(CommandId id) {
+ ALOGV("%s: %s, command %s", __func__, mIdentity.type.toString().c_str(),
+ android::internal::ToString(id).c_str());
+ return runWithActiveSubEffectThenOthers(
+ [&](const std::shared_ptr<IEffect>& effect) -> ndk::ScopedAStatus {
+ return effect->command(id);
+ });
+}
+
+// Return the active sub-effect state
+ndk::ScopedAStatus EffectProxy::getState(State* state) {
+ return runWithActiveSubEffect(
+ [&](const std::shared_ptr<IEffect>& effect) -> ndk::ScopedAStatus {
+ return effect->getState(state);
+ });
+}
+
+// Handle with active sub-effect first, only send to other sub-effects when success
+ndk::ScopedAStatus EffectProxy::setParameter(const Parameter& param) {
+ return runWithActiveSubEffectThenOthers(
+ [&](const std::shared_ptr<IEffect>& effect) -> ndk::ScopedAStatus {
+ return effect->setParameter(param);
+ });
+}
+
+// Return the active sub-effect parameter
+ndk::ScopedAStatus EffectProxy::getParameter(const Parameter::Id& id, Parameter* param) {
+ return runWithActiveSubEffect(
+ [&](const std::shared_ptr<IEffect>& effect) -> ndk::ScopedAStatus {
+ return effect->getParameter(id, param);
+ });
+}
+
+ndk::ScopedAStatus EffectProxy::runWithActiveSubEffectThenOthers(
+ std::function<ndk::ScopedAStatus(const std::shared_ptr<IEffect>&)> const& func) {
+ ndk::ScopedAStatus status = runWithActiveSubEffect(func);
+ if (!status.isOk()) {
+ return status;
+ }
+
+ // proceed with others if active sub-effect success
+ for (const auto& sub : mSubEffects) {
+ auto& effect = std::get<SubEffectTupleIndex::HANDLE>(sub.second);
+ if (sub.first != mActiveSub) {
+ if (!effect) {
+ ALOGE("%s null sub-effect interface for %s", __func__,
+ sub.first.toString().c_str());
+ continue;
+ }
+ func(effect);
+ }
+ }
+ return status;
+}
+
+ndk::ScopedAStatus EffectProxy::runWithActiveSubEffect(
+ std::function<ndk::ScopedAStatus(const std::shared_ptr<IEffect>&)> const& func) {
+ auto& effect = std::get<SubEffectTupleIndex::HANDLE>(mSubEffects[mActiveSub]);
+ if (!effect) {
+ ALOGE("%s null active sub-effect interface, active %s", __func__,
+ mActiveSub.toString().c_str());
+ return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_NULL_POINTER,
+ "activeSubEffectNull");
+ }
+ return func(effect);
+}
+
+ndk::ScopedAStatus EffectProxy::runWithAllSubEffects(
+ std::function<ndk::ScopedAStatus(std::shared_ptr<IEffect>&)> const& func) {
+ ndk::ScopedAStatus status = ndk::ScopedAStatus::ok();
+ // proceed with others if active sub-effect success
+ for (auto& sub : mSubEffects) {
+ auto& effect = std::get<SubEffectTupleIndex::HANDLE>(sub.second);
+ if (!effect) {
+ ALOGW("%s null sub-effect interface for %s", __func__, sub.first.toString().c_str());
+ continue;
+ }
+ ndk::ScopedAStatus temp = func(effect);
+ if (!temp.isOk()) {
+ status = ndk::ScopedAStatus::fromStatus(temp.getStatus());
+ }
+ }
+ return status;
+}
+
+} // namespace effect
+} // namespace android
diff --git a/media/libaudiohal/impl/EffectProxy.h b/media/libaudiohal/impl/EffectProxy.h
new file mode 100644
index 0000000..ffb8a19
--- /dev/null
+++ b/media/libaudiohal/impl/EffectProxy.h
@@ -0,0 +1,130 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <map>
+#include <memory>
+
+#include <aidl/android/hardware/audio/effect/BnEffect.h>
+#include <aidl/android/hardware/audio/effect/BnFactory.h>
+#include <fmq/AidlMessageQueue.h>
+#include <system/audio_effect.h>
+
+namespace android {
+namespace effect {
+
+/**
+ * EffectProxy is the proxy for one or more effect AIDL implementations (sub effect) of same type.
+ * The audio framework use EffectProxy as a composite implementation of all sub effect
+ * implementations.
+ *
+ * At any given time, there is only one active effect which consuming and producing data for each
+ * proxy. All setter commands (except the legacy EFFECT_CMD_OFFLOAD, it will be handled by the audio
+ * framework directly) and parameters will be pass through to all sub effects, the getter commands
+ * and parameters will only passthrough to the active sub-effect.
+ *
+ */
+class EffectProxy final : public ::aidl::android::hardware::audio::effect::BnEffect {
+ public:
+ EffectProxy(const ::aidl::android::hardware::audio::effect::Descriptor::Identity& id,
+ const std::shared_ptr<::aidl::android::hardware::audio::effect::IFactory>& factory);
+
+ /**
+ * Add a sub effect into the proxy, the descriptor of candidate sub-effect need to have same
+ * proxy UUID as mUuid.
+ */
+ ndk::ScopedAStatus addSubEffect(
+ const ::aidl::android::hardware::audio::effect::Descriptor& sub);
+
+ /**
+ * Create all sub-effects via AIDL IFactory, always call create() after all sub-effects added
+ * successfully with addSubEffect.
+ */
+ ndk::ScopedAStatus create();
+
+ /**
+ * Destroy all sub-effects via AIDL IFactory, always call create() after all sub-effects added
+ * successfully with addSubEffect.
+ */
+ ndk::ScopedAStatus destroy();
+
+ /**
+ * Handle offload parameter setting from framework.
+ */
+ ndk::ScopedAStatus setOffloadParam(const effect_offload_param_t* offload);
+
+ /**
+ * Get the const reference of the active sub-effect return parameters.
+ * Always use this interface to get the effect open return parameters (FMQs) after a success
+ * setOffloadParam() call.
+ */
+ const IEffect::OpenEffectReturn* getEffectReturnParam();
+
+ // IEffect interfaces override
+ ndk::ScopedAStatus open(
+ const ::aidl::android::hardware::audio::effect::Parameter::Common& common,
+ const std::optional<::aidl::android::hardware::audio::effect::Parameter::Specific>&
+ specific,
+ ::aidl::android::hardware::audio::effect::IEffect::OpenEffectReturn* ret) override;
+ ndk::ScopedAStatus close() override;
+ ndk::ScopedAStatus getDescriptor(
+ ::aidl::android::hardware::audio::effect::Descriptor* desc) override;
+ ndk::ScopedAStatus command(::aidl::android::hardware::audio::effect::CommandId id) override;
+ ndk::ScopedAStatus getState(::aidl::android::hardware::audio::effect::State* state) override;
+ ndk::ScopedAStatus setParameter(
+ const ::aidl::android::hardware::audio::effect::Parameter& param) override;
+ ndk::ScopedAStatus getParameter(
+ const ::aidl::android::hardware::audio::effect::Parameter::Id& id,
+ ::aidl::android::hardware::audio::effect::Parameter* param) override;
+
+ private:
+ // Proxy identity, copy from one sub-effect, and update the implementation UUID to proxy UUID
+ const ::aidl::android::hardware::audio::effect::Descriptor::Identity mIdentity;
+ const std::shared_ptr<::aidl::android::hardware::audio::effect::IFactory> mFactory;
+
+ // A map of sub effects descriptor to the IEffect handle and return FMQ
+ enum SubEffectTupleIndex { HANDLE, DESCRIPTOR, RETURN };
+ using EffectProxySub =
+ std::tuple<std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>,
+ ::aidl::android::hardware::audio::effect::Descriptor,
+ ::aidl::android::hardware::audio::effect::IEffect::OpenEffectReturn>;
+ std::map<const ::aidl::android::hardware::audio::effect::Descriptor::Identity, EffectProxySub>
+ mSubEffects;
+
+ // Descriptor of the only active effect in the mSubEffects map
+ ::aidl::android::hardware::audio::effect::Descriptor::Identity mActiveSub;
+
+ // keep the flag of sub-effects
+ ::aidl::android::hardware::audio::effect::Flags mSubFlags;
+
+ ndk::ScopedAStatus runWithActiveSubEffectThenOthers(
+ std::function<ndk::ScopedAStatus(
+ const std::shared_ptr<
+ ::aidl::android::hardware::audio::effect::IEffect>&)> const& func);
+
+ ndk::ScopedAStatus runWithActiveSubEffect(
+ std::function<ndk::ScopedAStatus(const std::shared_ptr<IEffect>&)> const& func);
+
+ ndk::ScopedAStatus runWithAllSubEffects(
+ std::function<ndk::ScopedAStatus(std::shared_ptr<IEffect>&)> const& func);
+
+ // close and release all sub-effects
+ ~EffectProxy();
+};
+
+} // namespace effect
+} // namespace android
diff --git a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
index b418b6c..bc05aa0 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
@@ -15,7 +15,9 @@
*/
#include <algorithm>
+#include <cstddef>
#include <cstdint>
+#include <iterator>
#include <memory>
#define LOG_TAG "EffectsFactoryHalAidl"
//#define LOG_NDEBUG 0
@@ -29,10 +31,12 @@
#include "EffectBufferHalAidl.h"
#include "EffectHalAidl.h"
+#include "EffectProxy.h"
#include "EffectsFactoryHalAidl.h"
using ::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid;
using aidl::android::aidl_utils::statusTFromBinderStatus;
+using aidl::android::hardware::audio::effect::Descriptor;
using aidl::android::hardware::audio::effect::IFactory;
using aidl::android::media::audio::common::AudioUuid;
using android::detail::AudioHalVersionInfo;
@@ -42,12 +46,56 @@
EffectsFactoryHalAidl::EffectsFactoryHalAidl(std::shared_ptr<IFactory> effectsFactory)
: mFactory(effectsFactory),
- mHalVersion(AudioHalVersionInfo(AudioHalVersionInfo::Type::AIDL, [this]() {
- int32_t majorVersion = 0;
- return (mFactory && mFactory->getInterfaceVersion(&majorVersion).isOk()) ? majorVersion
- : 0;
- }())) {
- ALOG_ASSERT(effectsFactory != nullptr, "Provided IEffectsFactory service is NULL");
+ mHalVersion(AudioHalVersionInfo(
+ AudioHalVersionInfo::Type::AIDL,
+ [this]() {
+ int32_t majorVersion = 0;
+ return (mFactory && mFactory->getInterfaceVersion(&majorVersion).isOk())
+ ? majorVersion
+ : 0;
+ }())),
+ mHalDescList([this]() {
+ std::vector<Descriptor> list;
+ if (mFactory) {
+ mFactory->queryEffects(std::nullopt, std::nullopt, std::nullopt, &list).isOk();
+ }
+ return list;
+ }()),
+ mUuidProxyMap([this]() {
+ std::map<AudioUuid, std::shared_ptr<EffectProxy>> proxyMap;
+ for (const auto& desc : mHalDescList) {
+ // create EffectProxy
+ if (desc.common.id.proxy.has_value()) {
+ const auto& uuid = desc.common.id.proxy.value();
+ if (0 == proxyMap.count(uuid)) {
+ proxyMap.insert({uuid, ndk::SharedRefBase::make<EffectProxy>(desc.common.id,
+ mFactory)});
+ }
+ proxyMap[uuid]->addSubEffect(desc);
+ ALOGI("%s addSubEffect %s", __func__, desc.common.toString().c_str());
+ }
+ }
+ return proxyMap;
+ }()),
+ mProxyDescList([this]() {
+ std::vector<Descriptor> list;
+ for (const auto& proxy : mUuidProxyMap) {
+ if (Descriptor desc; proxy.second && proxy.second->getDescriptor(&desc).isOk()) {
+ list.emplace_back(std::move(desc));
+ }
+ }
+ return list;
+ }()),
+ mNonProxyDescList([this]() {
+ std::vector<Descriptor> list;
+ std::copy_if(mHalDescList.begin(), mHalDescList.end(), std::back_inserter(list),
+ [](const Descriptor& desc) { return !desc.common.id.proxy.has_value(); });
+ return list;
+ }()),
+ mEffectCount(mNonProxyDescList.size() + mProxyDescList.size()) {
+ ALOG_ASSERT(mFactory != nullptr, "Provided IEffectsFactory service is NULL");
+ ALOGI("%s with %zu nonProxyEffects and %zu proxyEffects", __func__, mNonProxyDescList.size(),
+ mProxyDescList.size());
}
status_t EffectsFactoryHalAidl::queryNumberEffects(uint32_t *pNumEffects) {
@@ -55,11 +103,7 @@
return BAD_VALUE;
}
- {
- std::lock_guard lg(mLock);
- RETURN_STATUS_IF_ERROR(queryEffectList_l());
- *pNumEffects = mDescList->size();
- }
+ *pNumEffects = mEffectCount;
ALOGI("%s %d", __func__, *pNumEffects);
return OK;
}
@@ -69,40 +113,43 @@
return BAD_VALUE;
}
- std::lock_guard lg(mLock);
- RETURN_STATUS_IF_ERROR(queryEffectList_l());
-
- auto listSize = mDescList->size();
- if (index >= listSize) {
- ALOGE("%s index %d exceed size DescList %zd", __func__, index, listSize);
+ if (index >= mEffectCount) {
+ ALOGE("%s index %d exceed max number %zu", __func__, index, mEffectCount);
return INVALID_OPERATION;
}
- *pDescriptor = VALUE_OR_RETURN_STATUS(
- ::aidl::android::aidl2legacy_Descriptor_effect_descriptor(mDescList->at(index)));
+ if (index >= mNonProxyDescList.size()) {
+ *pDescriptor =
+ VALUE_OR_RETURN_STATUS(::aidl::android::aidl2legacy_Descriptor_effect_descriptor(
+ mProxyDescList.at(index - mNonProxyDescList.size())));
+ } else {
+ *pDescriptor =
+ VALUE_OR_RETURN_STATUS(::aidl::android::aidl2legacy_Descriptor_effect_descriptor(
+ mNonProxyDescList.at(index)));
+ }
return OK;
}
status_t EffectsFactoryHalAidl::getDescriptor(const effect_uuid_t* halUuid,
effect_descriptor_t* pDescriptor) {
- if (halUuid == nullptr || pDescriptor == nullptr) {
+ if (halUuid == nullptr) {
return BAD_VALUE;
}
- AudioUuid uuid = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_uuid_t_AudioUuid(*halUuid));
- std::lock_guard lg(mLock);
- return getHalDescriptorWithImplUuid_l(uuid, pDescriptor);
+ AudioUuid uuid =
+ VALUE_OR_RETURN_STATUS(::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid(*halUuid));
+ return getHalDescriptorWithImplUuid(uuid, pDescriptor);
}
status_t EffectsFactoryHalAidl::getDescriptors(const effect_uuid_t* halType,
std::vector<effect_descriptor_t>* descriptors) {
- if (halType == nullptr || descriptors == nullptr) {
+ if (halType == nullptr) {
return BAD_VALUE;
}
- AudioUuid type = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_uuid_t_AudioUuid(*halType));
- std::lock_guard lg(mLock);
- return getHalDescriptorWithTypeUuid_l(type, descriptors);
+ AudioUuid type =
+ VALUE_OR_RETURN_STATUS(::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid(*halType));
+ return getHalDescriptorWithTypeUuid(type, descriptors);
}
status_t EffectsFactoryHalAidl::createEffect(const effect_uuid_t* uuid, int32_t sessionId,
@@ -114,17 +161,25 @@
if (sessionId == AUDIO_SESSION_DEVICE && ioId == AUDIO_IO_HANDLE_NONE) {
return INVALID_OPERATION;
}
-
ALOGI("%s session %d ioId %d", __func__, sessionId, ioId);
- AudioUuid aidlUuid = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_uuid_t_AudioUuid(*uuid));
+ AudioUuid aidlUuid =
+ VALUE_OR_RETURN_STATUS(::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid(*uuid));
std::shared_ptr<IEffect> aidlEffect;
- Descriptor desc;
- RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mFactory->createEffect(aidlUuid, &aidlEffect)));
+ // Use EffectProxy interface instead of IFactory to create
+ const bool isProxy = isProxyEffect(aidlUuid);
+ if (isProxy) {
+ aidlEffect = mUuidProxyMap.at(aidlUuid);
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mUuidProxyMap.at(aidlUuid)->create()));
+ } else {
+ RETURN_STATUS_IF_ERROR(
+ statusTFromBinderStatus(mFactory->createEffect(aidlUuid, &aidlEffect)));
+ }
if (aidlEffect == nullptr) {
- ALOGE("%s IFactory::createFactory failed UUID %s", __func__, aidlUuid.toString().c_str());
+ ALOGE("%s failed to create effect with UUID: %s", __func__, aidlUuid.toString().c_str());
return NAME_NOT_FOUND;
}
+ Descriptor desc;
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(aidlEffect->getDescriptor(&desc)));
uint64_t effectId;
@@ -133,13 +188,23 @@
effectId = ++mEffectIdCounter;
}
- *effect = sp<EffectHalAidl>::make(mFactory, aidlEffect, effectId, sessionId, ioId, desc);
+ *effect =
+ sp<EffectHalAidl>::make(mFactory, aidlEffect, effectId, sessionId, ioId, desc, isProxy);
return OK;
}
status_t EffectsFactoryHalAidl::dumpEffects(int fd) {
- // TODO: add proxy dump here because AIDL service EffectFactory doesn't have proxy handle
- return mFactory->dump(fd, nullptr, 0);
+ status_t ret = OK;
+ // record the error ret and continue dump as many effects as possible
+ for (const auto& proxy : mUuidProxyMap) {
+ if (proxy.second) {
+ if (status_t temp = proxy.second->dump(fd, nullptr, 0); temp != OK) {
+ ret = temp;
+ }
+ }
+ }
+ RETURN_STATUS_IF_ERROR(mFactory->dump(fd, nullptr, 0));
+ return ret;
}
status_t EffectsFactoryHalAidl::allocateBuffer(size_t size, sp<EffectBufferHalInterface>* buffer) {
@@ -157,56 +222,42 @@
return mHalVersion;
}
-status_t EffectsFactoryHalAidl::queryEffectList_l() {
- if (!mDescList) {
- std::vector<Descriptor> list;
- auto status = mFactory->queryEffects(std::nullopt, std::nullopt, std::nullopt, &list);
- if (!status.isOk()) {
- ALOGE("%s IFactory::queryEffects failed %s", __func__, status.getDescription().c_str());
- return status.getStatus();
- }
-
- mDescList = std::make_unique<std::vector<Descriptor>>(list);
- }
- return OK;
-}
-
-status_t EffectsFactoryHalAidl::getHalDescriptorWithImplUuid_l(const AudioUuid& uuid,
- effect_descriptor_t* pDescriptor) {
+status_t EffectsFactoryHalAidl::getHalDescriptorWithImplUuid(const AudioUuid& uuid,
+ effect_descriptor_t* pDescriptor) {
if (pDescriptor == nullptr) {
return BAD_VALUE;
}
- if (!mDescList) {
- RETURN_STATUS_IF_ERROR(queryEffectList_l());
- }
- auto matchIt = std::find_if(mDescList->begin(), mDescList->end(),
- [&](const auto& desc) { return desc.common.id.uuid == uuid; });
- if (matchIt == mDescList->end()) {
- ALOGE("%s UUID %s not found", __func__, uuid.toString().c_str());
+ const auto& list = isProxyEffect(uuid) ? mProxyDescList : mNonProxyDescList;
+ auto matchIt = std::find_if(list.begin(), list.end(),
+ [&](const auto& desc) { return desc.common.id.uuid == uuid; });
+ if (matchIt == list.end()) {
+ ALOGE("%s UUID not found in HAL and proxy list %s", __func__, uuid.toString().c_str());
return BAD_VALUE;
}
+ ALOGI("%s UUID impl found %s", __func__, uuid.toString().c_str());
*pDescriptor = VALUE_OR_RETURN_STATUS(
::aidl::android::aidl2legacy_Descriptor_effect_descriptor(*matchIt));
return OK;
}
-status_t EffectsFactoryHalAidl::getHalDescriptorWithTypeUuid_l(
+status_t EffectsFactoryHalAidl::getHalDescriptorWithTypeUuid(
const AudioUuid& type, std::vector<effect_descriptor_t>* descriptors) {
if (descriptors == nullptr) {
return BAD_VALUE;
}
- if (!mDescList) {
- RETURN_STATUS_IF_ERROR(queryEffectList_l());
- }
+
std::vector<Descriptor> result;
- std::copy_if(mDescList->begin(), mDescList->end(), std::back_inserter(result),
+ std::copy_if(mNonProxyDescList.begin(), mNonProxyDescList.end(), std::back_inserter(result),
[&](auto& desc) { return desc.common.id.type == type; });
- if (result.size() == 0) {
- ALOGE("%s type UUID %s not found", __func__, type.toString().c_str());
+ std::copy_if(mProxyDescList.begin(), mProxyDescList.end(), std::back_inserter(result),
+ [&](auto& desc) { return desc.common.id.type == type; });
+ if (result.empty()) {
+ ALOGW("%s UUID type not found in HAL and proxy list %s", __func__, type.toString().c_str());
return BAD_VALUE;
}
+ ALOGI("%s UUID type found %zu \n %s", __func__, result.size(), type.toString().c_str());
*descriptors = VALUE_OR_RETURN_STATUS(
aidl::android::convertContainer<std::vector<effect_descriptor_t>>(
@@ -214,6 +265,10 @@
return OK;
}
+bool EffectsFactoryHalAidl::isProxyEffect(const AudioUuid& uuid) const {
+ return 0 != mUuidProxyMap.count(uuid);
+}
+
} // namespace effect
// When a shared library is built from a static library, even explicit
diff --git a/media/libaudiohal/impl/EffectsFactoryHalAidl.h b/media/libaudiohal/impl/EffectsFactoryHalAidl.h
index 9c3643b..debfacf 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalAidl.h
+++ b/media/libaudiohal/impl/EffectsFactoryHalAidl.h
@@ -25,6 +25,8 @@
#include <media/audiohal/EffectsFactoryHalInterface.h>
#include <system/thread_defs.h>
+#include "EffectProxy.h"
+
namespace android {
namespace effect {
@@ -60,24 +62,35 @@
detail::AudioHalVersionInfo getHalVersion() const override;
- // for TIME_CHECK
- const std::string getClassName() const { return "EffectHalAidl"; }
-
private:
- std::mutex mLock;
const std::shared_ptr<IFactory> mFactory;
- uint64_t mEffectIdCounter GUARDED_BY(mLock) = 0; // Align with HIDL (0 is INVALID_ID)
- std::unique_ptr<std::vector<Descriptor>> mDescList GUARDED_BY(mLock) = nullptr;
const detail::AudioHalVersionInfo mHalVersion;
+ // Full list of HAL effect descriptors
+ const std::vector<Descriptor> mHalDescList;
+ // Map of proxy UUID (key) to the proxy object
+ const std::map<::aidl::android::media::audio::common::AudioUuid /* proxy impl UUID */,
+ std::shared_ptr<EffectProxy>>
+ mUuidProxyMap;
+ // List of effect proxy, initialize after mUuidProxyMap because it need to have all sub-effects
+ const std::vector<Descriptor> mProxyDescList;
+ // List of non-proxy effects
+ const std::vector<Descriptor> mNonProxyDescList;
+ // total number of effects including proxy effects
+ const size_t mEffectCount;
+
+ std::mutex mLock;
+ uint64_t mEffectIdCounter GUARDED_BY(mLock) = 0; // Align with HIDL (0 is INVALID_ID)
virtual ~EffectsFactoryHalAidl() = default;
- status_t queryEffectList_l() REQUIRES(mLock);
- status_t getHalDescriptorWithImplUuid_l(
+ status_t getHalDescriptorWithImplUuid(
const aidl::android::media::audio::common::AudioUuid& uuid,
- effect_descriptor_t* pDescriptor) REQUIRES(mLock);
- status_t getHalDescriptorWithTypeUuid_l(
+ effect_descriptor_t* pDescriptor);
+
+ status_t getHalDescriptorWithTypeUuid(
const aidl::android::media::audio::common::AudioUuid& type,
- std::vector<effect_descriptor_t>* descriptors) REQUIRES(mLock);
+ std::vector<effect_descriptor_t>* descriptors);
+
+ bool isProxyEffect(const aidl::android::media::audio::common::AudioUuid& uuid) const;
};
} // namespace effect
diff --git a/media/libaudiohal/impl/StreamHalAidl.cpp b/media/libaudiohal/impl/StreamHalAidl.cpp
index 9d67b67..3048580 100644
--- a/media/libaudiohal/impl/StreamHalAidl.cpp
+++ b/media/libaudiohal/impl/StreamHalAidl.cpp
@@ -21,18 +21,27 @@
#include <cstdint>
#include <audio_utils/clock.h>
+#include <media/AidlConversion.h>
+#include <media/AidlConversionCppNdk.h>
+#include <media/AidlConversionNdk.h>
#include <media/AidlConversionUtil.h>
+#include <media/AudioParameter.h>
#include <mediautils/TimeCheck.h>
+#include <system/audio.h>
#include <utils/Log.h>
#include "DeviceHalAidl.h"
#include "StreamHalAidl.h"
using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::hardware::audio::common::PlaybackTrackMetadata;
+using ::aidl::android::hardware::audio::common::RecordTrackMetadata;
using ::aidl::android::hardware::audio::core::IStreamCommon;
using ::aidl::android::hardware::audio::core::IStreamIn;
using ::aidl::android::hardware::audio::core::IStreamOut;
using ::aidl::android::hardware::audio::core::StreamDescriptor;
+using ::aidl::android::hardware::audio::core::MmapBufferDescriptor;
+using ::aidl::android::media::audio::common::MicrophoneDynamicInfo;
namespace android {
@@ -112,11 +121,45 @@
return OK;
}
-status_t StreamHalAidl::setParameters(const String8& kvPairs __unused) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+namespace {
+
+// 'action' must accept a value of type 'T' and return 'status_t'.
+// The function returns 'true' if the parameter was found, and the action has succeeded.
+// The function returns 'false' if the parameter was not found.
+// Any errors get propagated, if there are errors it means the parameter was found.
+template<typename T, typename F>
+error::Result<bool> filterOutAndProcessParameter(
+ AudioParameter& parameters, const String8& key, const F& action) {
+ if (parameters.containsKey(key)) {
+ T value;
+ status_t status = parameters.get(key, value);
+ if (status == OK) {
+ parameters.remove(key);
+ status = action(value);
+ if (status == OK) return true;
+ }
+ return base::unexpected(status);
+ }
+ return false;
+}
+
+} // namespace
+
+status_t StreamHalAidl::setParameters(const String8& kvPairs) {
TIME_CHECK();
if (!mStream) return NO_INIT;
- ALOGE("%s not implemented yet", __func__);
+
+ AudioParameter parameters(kvPairs);
+ ALOGD("%s: parameters: %s", __func__, parameters.toString().c_str());
+
+ (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
+ parameters, String8(AudioParameter::keyStreamHwAvSync),
+ [&](int hwAvSyncId) {
+ return statusTFromBinderStatus(mStream->updateHwAvSyncId(hwAvSyncId));
+ }));
+
+ ALOGW_IF(parameters.size() != 0, "%s: unknown parameters, ignored: %s",
+ __func__, parameters.toString().c_str());
return OK;
}
@@ -213,16 +256,23 @@
ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
TIME_CHECK();
if (!mStream) return NO_INIT;
- ALOGE("%s not implemented yet", __func__);
- return OK;
+ const auto state = getState();
+ StreamDescriptor::Reply reply;
+ if (state == StreamDescriptor::State::STANDBY) {
+ if (status_t status = sendCommand(makeHalCommand<HalCommand::Tag::start>(), &reply, true);
+ status != OK) {
+ return status;
+ }
+ return sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), &reply, true);
+ }
+
+ return INVALID_OPERATION;
}
status_t StreamHalAidl::stop() {
ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
- TIME_CHECK();
if (!mStream) return NO_INIT;
- ALOGE("%s not implemented yet", __func__);
- return OK;
+ return standby();
}
status_t StreamHalAidl::getLatency(uint32_t *latency) {
@@ -248,6 +298,20 @@
return OK;
}
+status_t StreamHalAidl::getHardwarePosition(int64_t *frames, int64_t *timestamp) {
+ ALOGV("%p %s::%s", this, getClassName().c_str(), __func__);
+ if (!mStream) return NO_INIT;
+ StreamDescriptor::Reply reply;
+ // TODO: switch to updateCountersIfNeeded once we sort out mWorkerTid initialization
+ if (status_t status = sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(), &reply, true);
+ status != OK) {
+ return status;
+ }
+ *frames = reply.hardware.frames;
+ *timestamp = reply.hardware.timeNs;
+ return OK;
+}
+
status_t StreamHalAidl::getXruns(int32_t *frames) {
ALOGV("%p %s::%s", this, getClassName().c_str(), __func__);
if (!mStream) return NO_INIT;
@@ -377,19 +441,35 @@
}
status_t StreamHalAidl::createMmapBuffer(int32_t minSizeFrames __unused,
- struct audio_mmap_buffer_info *info __unused) {
+ struct audio_mmap_buffer_info *info) {
ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
TIME_CHECK();
if (!mStream) return NO_INIT;
- ALOGE("%s not implemented yet", __func__);
+ if (!mContext.isMmapped()) {
+ return BAD_VALUE;
+ }
+ const MmapBufferDescriptor& bufferDescriptor = mContext.getMmapBufferDescriptor();
+ info->shared_memory_fd = bufferDescriptor.sharedMemory.fd.get();
+ info->buffer_size_frames = mContext.getBufferSizeFrames();
+ info->burst_size_frames = bufferDescriptor.burstSizeFrames;
+ info->flags = static_cast<audio_mmap_buffer_flag>(bufferDescriptor.flags);
+
return OK;
}
-status_t StreamHalAidl::getMmapPosition(struct audio_mmap_position *position __unused) {
- ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+status_t StreamHalAidl::getMmapPosition(struct audio_mmap_position *position) {
TIME_CHECK();
if (!mStream) return NO_INIT;
- ALOGE("%s not implemented yet", __func__);
+ if (!mContext.isMmapped()) {
+ return BAD_VALUE;
+ }
+ int64_t aidlPosition = 0, aidlTimestamp = 0;
+ if (status_t status = getHardwarePosition(&aidlPosition, &aidlTimestamp); status != OK) {
+ return status;
+ }
+
+ position->time_nanoseconds = aidlTimestamp;
+ position->position_frames = static_cast<int32_t>(aidlPosition);
return OK;
}
@@ -478,12 +558,30 @@
return OK;
}
+// static
+ConversionResult<::aidl::android::hardware::audio::common::SourceMetadata>
+StreamOutHalAidl::legacy2aidl_SourceMetadata(const StreamOutHalInterface::SourceMetadata& legacy) {
+ ::aidl::android::hardware::audio::common::SourceMetadata aidl;
+ aidl.tracks = VALUE_OR_RETURN(
+ ::aidl::android::convertContainer<std::vector<PlaybackTrackMetadata>>(
+ legacy.tracks,
+ ::aidl::android::legacy2aidl_playback_track_metadata_v7_PlaybackTrackMetadata));
+ return aidl;
+}
+
StreamOutHalAidl::StreamOutHalAidl(
const audio_config& config, StreamContextAidl&& context, int32_t nominalLatency,
const std::shared_ptr<IStreamOut>& stream, const sp<CallbackBroker>& callbackBroker)
: StreamHalAidl("StreamOutHalAidl", false /*isInput*/, config, nominalLatency,
std::move(context), getStreamCommon(stream)),
- mStream(stream), mCallbackBroker(callbackBroker) {}
+ mStream(stream), mCallbackBroker(callbackBroker) {
+ // Initialize the offload metadata
+ mOffloadMetadata.sampleRate = static_cast<int32_t>(config.sample_rate);
+ mOffloadMetadata.channelMask = VALUE_OR_FATAL(
+ ::aidl::android::legacy2aidl_audio_channel_mask_t_AudioChannelLayout(
+ config.channel_mask, false));
+ mOffloadMetadata.averageBitRatePerSecond = static_cast<int32_t>(config.offload_info.bit_rate);
+}
StreamOutHalAidl::~StreamOutHalAidl() {
if (auto broker = mCallbackBroker.promote(); broker != nullptr) {
@@ -491,6 +589,19 @@
}
}
+status_t StreamOutHalAidl::setParameters(const String8& kvPairs) {
+ if (!mStream) return NO_INIT;
+
+ AudioParameter parameters(kvPairs);
+ ALOGD("%s parameters: %s", __func__, parameters.toString().c_str());
+
+ if (status_t status = filterAndUpdateOffloadMetadata(parameters); status != OK) {
+ ALOGW("%s filtering or updating offload metadata failed: %d", __func__, status);
+ }
+
+ return StreamHalAidl::setParameters(parameters.toString());
+}
+
status_t StreamOutHalAidl::getLatency(uint32_t *latency) {
return StreamHalAidl::getLatency(latency);
}
@@ -603,11 +714,12 @@
}
status_t StreamOutHalAidl::updateSourceMetadata(
- const StreamOutHalInterface::SourceMetadata& sourceMetadata __unused) {
+ const StreamOutHalInterface::SourceMetadata& sourceMetadata) {
TIME_CHECK();
if (!mStream) return NO_INIT;
- ALOGE("%s not implemented yet", __func__);
- return OK;
+ ::aidl::android::hardware::audio::common::SourceMetadata aidlMetadata =
+ VALUE_OR_RETURN_STATUS(legacy2aidl_SourceMetadata(sourceMetadata));
+ return statusTFromBinderStatus(mStream->updateMetadata(aidlMetadata));
}
status_t StreamOutHalAidl::getDualMonoMode(audio_dual_mono_mode_t* mode __unused) {
@@ -693,12 +805,83 @@
return StreamHalAidl::exit();
}
+status_t StreamOutHalAidl::filterAndUpdateOffloadMetadata(AudioParameter ¶meters) {
+ TIME_CHECK();
+ bool updateMetadata = false;
+ if (VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
+ parameters, String8(AudioParameter::keyOffloadCodecAverageBitRate),
+ [&](int value) {
+ return value > 0 ?
+ mOffloadMetadata.averageBitRatePerSecond = value, OK : BAD_VALUE;
+ }))) {
+ updateMetadata = true;
+ }
+ if (VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
+ parameters, String8(AudioParameter::keyOffloadCodecSampleRate),
+ [&](int value) {
+ return value > 0 ? mOffloadMetadata.sampleRate = value, OK : BAD_VALUE;
+ }))) {
+ updateMetadata = true;
+ }
+ if (VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
+ parameters, String8(AudioParameter::keyOffloadCodecChannels),
+ [&](int value) -> status_t {
+ if (value > 0) {
+ audio_channel_mask_t channel_mask = audio_channel_out_mask_from_count(
+ static_cast<uint32_t>(value));
+ if (channel_mask == AUDIO_CHANNEL_INVALID) return BAD_VALUE;
+ mOffloadMetadata.channelMask = VALUE_OR_RETURN_STATUS(
+ ::aidl::android::legacy2aidl_audio_channel_mask_t_AudioChannelLayout(
+ channel_mask, false /*isInput*/));
+ }
+ return BAD_VALUE;
+ }))) {
+ updateMetadata = true;
+ }
+ if (VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
+ parameters, String8(AudioParameter::keyOffloadCodecDelaySamples),
+ [&](int value) {
+ // The legacy keys are misnamed, the value is in frames.
+ return value > 0 ? mOffloadMetadata.delayFrames = value, OK : BAD_VALUE;
+ }))) {
+ updateMetadata = true;
+ }
+ if (VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
+ parameters, String8(AudioParameter::keyOffloadCodecPaddingSamples),
+ [&](int value) {
+ // The legacy keys are misnamed, the value is in frames.
+ return value > 0 ? mOffloadMetadata.paddingFrames = value, OK : BAD_VALUE;
+ }))) {
+ updateMetadata = true;
+ }
+ if (updateMetadata) {
+ ALOGD("%s set offload metadata %s", __func__, mOffloadMetadata.toString().c_str());
+ if (status_t status = statusTFromBinderStatus(
+ mStream->updateOffloadMetadata(mOffloadMetadata)); status != OK) {
+ ALOGE("%s: updateOffloadMetadata failed %d", __func__, status);
+ return status;
+ }
+ }
+ return OK;
+}
+
+// static
+ConversionResult<::aidl::android::hardware::audio::common::SinkMetadata>
+StreamInHalAidl::legacy2aidl_SinkMetadata(const StreamInHalInterface::SinkMetadata& legacy) {
+ ::aidl::android::hardware::audio::common::SinkMetadata aidl;
+ aidl.tracks = VALUE_OR_RETURN(
+ ::aidl::android::convertContainer<std::vector<RecordTrackMetadata>>(
+ legacy.tracks,
+ ::aidl::android::legacy2aidl_record_track_metadata_v7_RecordTrackMetadata));
+ return aidl;
+}
+
StreamInHalAidl::StreamInHalAidl(
const audio_config& config, StreamContextAidl&& context, int32_t nominalLatency,
- const std::shared_ptr<IStreamIn>& stream)
+ const std::shared_ptr<IStreamIn>& stream, const sp<MicrophoneInfoProvider>& micInfoProvider)
: StreamHalAidl("StreamInHalAidl", true /*isInput*/, config, nominalLatency,
std::move(context), getStreamCommon(stream)),
- mStream(stream) {}
+ mStream(stream), mMicInfoProvider(micInfoProvider) {}
status_t StreamInHalAidl::setGain(float gain __unused) {
TIME_CHECK();
@@ -733,20 +916,48 @@
return getObservablePosition(frames, time);
}
-status_t StreamInHalAidl::getActiveMicrophones(
- std::vector<media::MicrophoneInfoFw> *microphones __unused) {
+status_t StreamInHalAidl::getActiveMicrophones(std::vector<media::MicrophoneInfoFw> *microphones) {
+ if (!microphones) {
+ return BAD_VALUE;
+ }
TIME_CHECK();
if (!mStream) return NO_INIT;
- ALOGE("%s not implemented yet", __func__);
+ sp<MicrophoneInfoProvider> micInfoProvider = mMicInfoProvider.promote();
+ if (!micInfoProvider) return NO_INIT;
+ auto staticInfo = micInfoProvider->getMicrophoneInfo();
+ if (!staticInfo) return INVALID_OPERATION;
+ std::vector<MicrophoneDynamicInfo> dynamicInfo;
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mStream->getActiveMicrophones(&dynamicInfo)));
+ std::vector<media::MicrophoneInfoFw> result;
+ result.reserve(dynamicInfo.size());
+ for (const auto& d : dynamicInfo) {
+ const auto staticInfoIt = std::find_if(staticInfo->begin(), staticInfo->end(),
+ [&](const auto& s) { return s.id == d.id; });
+ if (staticInfoIt != staticInfo->end()) {
+ // Convert into the c++ backend type from the ndk backend type via the legacy structure.
+ audio_microphone_characteristic_t legacy = VALUE_OR_RETURN_STATUS(
+ ::aidl::android::aidl2legacy_MicrophoneInfos_audio_microphone_characteristic_t(
+ *staticInfoIt, d));
+ media::MicrophoneInfoFw info = VALUE_OR_RETURN_STATUS(
+ ::android::legacy2aidl_audio_microphone_characteristic_t_MicrophoneInfoFw(
+ legacy));
+ // Note: info.portId is not filled because it's a bit of framework info.
+ result.push_back(std::move(info));
+ } else {
+ ALOGE("%s: no static info for active microphone with id '%s'", __func__, d.id.c_str());
+ }
+ }
+ *microphones = std::move(result);
return OK;
}
status_t StreamInHalAidl::updateSinkMetadata(
- const StreamInHalInterface::SinkMetadata& sinkMetadata __unused) {
+ const StreamInHalInterface::SinkMetadata& sinkMetadata) {
TIME_CHECK();
if (!mStream) return NO_INIT;
- ALOGE("%s not implemented yet", __func__);
- return OK;
+ ::aidl::android::hardware::audio::common::SinkMetadata aidlMetadata =
+ VALUE_OR_RETURN_STATUS(legacy2aidl_SinkMetadata(sinkMetadata));
+ return statusTFromBinderStatus(mStream->updateMetadata(aidlMetadata));
}
status_t StreamInHalAidl::setPreferredMicrophoneDirection(
diff --git a/media/libaudiohal/impl/StreamHalAidl.h b/media/libaudiohal/impl/StreamHalAidl.h
index d00774c..147c131 100644
--- a/media/libaudiohal/impl/StreamHalAidl.h
+++ b/media/libaudiohal/impl/StreamHalAidl.h
@@ -21,16 +21,22 @@
#include <mutex>
#include <string_view>
+#include <aidl/android/hardware/audio/common/AudioOffloadMetadata.h>
#include <aidl/android/hardware/audio/core/BpStreamCommon.h>
#include <aidl/android/hardware/audio/core/BpStreamIn.h>
#include <aidl/android/hardware/audio/core/BpStreamOut.h>
+#include <aidl/android/hardware/audio/core/MmapBufferDescriptor.h>
#include <fmq/AidlMessageQueue.h>
#include <media/audiohal/EffectHalInterface.h>
#include <media/audiohal/StreamHalInterface.h>
+#include <media/AudioParameter.h>
#include "ConversionHelperAidl.h"
#include "StreamPowerLog.h"
+using ::aidl::android::hardware::audio::common::AudioOffloadMetadata;
+using ::aidl::android::hardware::audio::core::MmapBufferDescriptor;
+
namespace android {
class StreamContextAidl {
@@ -43,21 +49,25 @@
::aidl::android::hardware::common::fmq::SynchronizedReadWrite> DataMQ;
explicit StreamContextAidl(
- const ::aidl::android::hardware::audio::core::StreamDescriptor& descriptor,
+ ::aidl::android::hardware::audio::core::StreamDescriptor& descriptor,
bool isAsynchronous)
: mFrameSizeBytes(descriptor.frameSizeBytes),
mCommandMQ(new CommandMQ(descriptor.command)),
mReplyMQ(new ReplyMQ(descriptor.reply)),
mBufferSizeFrames(descriptor.bufferSizeFrames),
mDataMQ(maybeCreateDataMQ(descriptor)),
- mIsAsynchronous(isAsynchronous) {}
+ mIsAsynchronous(isAsynchronous),
+ mIsMmapped(isMmapped(descriptor)),
+ mMmapBufferDescriptor(maybeGetMmapBuffer(descriptor)) {}
StreamContextAidl(StreamContextAidl&& other) :
mFrameSizeBytes(other.mFrameSizeBytes),
mCommandMQ(std::move(other.mCommandMQ)),
mReplyMQ(std::move(other.mReplyMQ)),
mBufferSizeFrames(other.mBufferSizeFrames),
mDataMQ(std::move(other.mDataMQ)),
- mIsAsynchronous(other.mIsAsynchronous) {}
+ mIsAsynchronous(other.mIsAsynchronous),
+ mIsMmapped(other.mIsMmapped),
+ mMmapBufferDescriptor(std::move(other.mMmapBufferDescriptor)) {}
StreamContextAidl& operator=(StreamContextAidl&& other) {
mFrameSizeBytes = other.mFrameSizeBytes;
mCommandMQ = std::move(other.mCommandMQ);
@@ -65,16 +75,19 @@
mBufferSizeFrames = other.mBufferSizeFrames;
mDataMQ = std::move(other.mDataMQ);
mIsAsynchronous = other.mIsAsynchronous;
+ mIsMmapped = other.mIsMmapped;
+ mMmapBufferDescriptor = std::move(other.mMmapBufferDescriptor);
return *this;
}
bool isValid() const {
return mFrameSizeBytes != 0 &&
mCommandMQ != nullptr && mCommandMQ->isValid() &&
mReplyMQ != nullptr && mReplyMQ->isValid() &&
- (mDataMQ != nullptr || (
+ (mDataMQ == nullptr || (
mDataMQ->isValid() &&
mDataMQ->getQuantumCount() * mDataMQ->getQuantumSize() >=
- mFrameSizeBytes * mBufferSizeFrames));
+ mFrameSizeBytes * mBufferSizeFrames)) &&
+ (!mIsMmapped || mMmapBufferDescriptor.sharedMemory.fd.get() >= 0);
}
size_t getBufferSizeBytes() const { return mFrameSizeBytes * mBufferSizeFrames; }
size_t getBufferSizeFrames() const { return mBufferSizeFrames; }
@@ -83,6 +96,8 @@
size_t getFrameSizeBytes() const { return mFrameSizeBytes; }
ReplyMQ* getReplyMQ() const { return mReplyMQ.get(); }
bool isAsynchronous() const { return mIsAsynchronous; }
+ bool isMmapped() const { return mIsMmapped; }
+ const MmapBufferDescriptor& getMmapBufferDescriptor() const { return mMmapBufferDescriptor; }
private:
static std::unique_ptr<DataMQ> maybeCreateDataMQ(
@@ -93,6 +108,19 @@
}
return nullptr;
}
+ static bool isMmapped(
+ const ::aidl::android::hardware::audio::core::StreamDescriptor& descriptor) {
+ using Tag = ::aidl::android::hardware::audio::core::StreamDescriptor::AudioBuffer::Tag;
+ return descriptor.audio.getTag() == Tag::mmap;
+ }
+ static MmapBufferDescriptor maybeGetMmapBuffer(
+ ::aidl::android::hardware::audio::core::StreamDescriptor& descriptor) {
+ using Tag = ::aidl::android::hardware::audio::core::StreamDescriptor::AudioBuffer::Tag;
+ if (descriptor.audio.getTag() == Tag::mmap) {
+ return std::move(descriptor.audio.get<Tag::mmap>());
+ }
+ return {};
+ }
size_t mFrameSizeBytes;
std::unique_ptr<CommandMQ> mCommandMQ;
@@ -100,6 +128,8 @@
size_t mBufferSizeFrames;
std::unique_ptr<DataMQ> mDataMQ;
bool mIsAsynchronous;
+ bool mIsMmapped;
+ MmapBufferDescriptor mMmapBufferDescriptor;
};
class StreamHalAidl : public virtual StreamHalInterface, public ConversionHelperAidl {
@@ -177,6 +207,8 @@
status_t getObservablePosition(int64_t *frames, int64_t *timestamp);
+ status_t getHardwarePosition(int64_t *frames, int64_t *timestamp);
+
status_t getXruns(int32_t *frames);
status_t transfer(void *buffer, size_t bytes, size_t *transferred);
@@ -230,6 +262,9 @@
class StreamOutHalAidl : public StreamOutHalInterface, public StreamHalAidl {
public:
+ // Extract the output stream parameters and set by AIDL APIs.
+ status_t setParameters(const String8& kvPairs) override;
+
// Return the audio hardware driver estimated latency in milliseconds.
status_t getLatency(uint32_t *latency) override;
@@ -306,9 +341,14 @@
private:
friend class sp<StreamOutHalAidl>;
+ static ConversionResult<::aidl::android::hardware::audio::common::SourceMetadata>
+ legacy2aidl_SourceMetadata(const StreamOutHalInterface::SourceMetadata& legacy);
+
const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamOut> mStream;
const wp<CallbackBroker> mCallbackBroker;
+ AudioOffloadMetadata mOffloadMetadata;
+
// Can not be constructed directly by clients.
StreamOutHalAidl(
const audio_config& config, StreamContextAidl&& context, int32_t nominalLatency,
@@ -316,8 +356,14 @@
const sp<CallbackBroker>& callbackBroker);
~StreamOutHalAidl() override;
+
+ // Filter and update the offload metadata. The parameters which are related to the offload
+ // metadata will be removed after filtering.
+ status_t filterAndUpdateOffloadMetadata(AudioParameter ¶meters);
};
+class MicrophoneInfoProvider;
+
class StreamInHalAidl : public StreamInHalInterface, public StreamHalAidl {
public:
// Set the input gain for the audio driver.
@@ -349,12 +395,17 @@
private:
friend class sp<StreamInHalAidl>;
+ static ConversionResult<::aidl::android::hardware::audio::common::SinkMetadata>
+ legacy2aidl_SinkMetadata(const StreamInHalInterface::SinkMetadata& legacy);
+
const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamIn> mStream;
+ const wp<MicrophoneInfoProvider> mMicInfoProvider;
// Can not be constructed directly by clients.
StreamInHalAidl(
const audio_config& config, StreamContextAidl&& context, int32_t nominalLatency,
- const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamIn>& stream);
+ const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamIn>& stream,
+ const sp<MicrophoneInfoProvider>& micInfoProvider);
~StreamInHalAidl() override = default;
};
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAec.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAec.cpp
index 15768b3..92b77d8 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAec.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAec.cpp
@@ -23,7 +23,6 @@
#include <error/expected_utils.h>
#include <media/AidlConversionNdk.h>
#include <media/AidlConversionEffect.h>
-#include <media/audiohal/AudioEffectUuid.h>
#include <system/audio_effects/effect_aec.h>
#include <utils/Log.h>
@@ -33,9 +32,11 @@
namespace android {
namespace effect {
+using ::aidl::android::getParameterSpecificField;
using ::aidl::android::aidl_utils::statusTFromBinderStatus;
using ::aidl::android::hardware::audio::effect::AcousticEchoCanceler;
using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
using ::android::status_t;
using utils::EffectParamReader;
using utils::EffectParamWriter;
@@ -64,8 +65,13 @@
break;
}
default: {
- ALOGW("%s unknown param %s", __func__, param.toString().c_str());
- return BAD_VALUE;
+ // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+ VendorExtension ext = VALUE_OR_RETURN_STATUS(
+ aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+ aidlParam = MAKE_SPECIFIC_PARAMETER(AcousticEchoCanceler, acousticEchoCanceler, vendor,
+ ext);
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->setParameter(aidlParam)));
+ break;
}
}
@@ -73,7 +79,7 @@
}
status_t AidlConversionAec::getParameter(EffectParamWriter& param) {
- uint32_t type = 0, value = 0;
+ uint32_t type = 0;
if (!param.validateParamValueSize(sizeof(uint32_t), sizeof(uint32_t)) ||
OK != param.readFromParameter(&type)) {
param.setStatus(BAD_VALUE);
@@ -85,29 +91,30 @@
case AEC_PARAM_ECHO_DELAY:
FALLTHROUGH_INTENDED;
case AEC_PARAM_PROPERTIES: {
+ int32_t delay = 0;
Parameter::Id id =
MAKE_SPECIFIC_PARAMETER_ID(AcousticEchoCanceler, acousticEchoCancelerTag,
AcousticEchoCanceler::echoDelayUs);
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
- value = VALUE_OR_RETURN_STATUS(
+ delay = VALUE_OR_RETURN_STATUS(
aidl::android::aidl2legacy_Parameter_aec_uint32_echoDelay(aidlParam));
- break;
+ return param.writeToValue(&delay);
}
case AEC_PARAM_MOBILE_MODE: {
+ int32_t mode = 0;
Parameter::Id id =
MAKE_SPECIFIC_PARAMETER_ID(AcousticEchoCanceler, acousticEchoCancelerTag,
AcousticEchoCanceler::mobileMode);
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
- value = VALUE_OR_RETURN_STATUS(
+ mode = VALUE_OR_RETURN_STATUS(
aidl::android::aidl2legacy_Parameter_aec_uint32_mobileMode(aidlParam));
- break;
+ return param.writeToValue(&mode);
}
- default:
- // use vendor extension implementation
- ALOGW("%s unknown param %s", __func__, param.toString().c_str());
- return BAD_VALUE;
+ default: {
+ // use vendor extension implementation, the first 32bits (param type) won't pass to HAL
+ VENDOR_EXTENSION_GET_AND_RETURN(AcousticEchoCanceler, acousticEchoCanceler, param);
+ }
}
- return param.writeToValue(&value);
}
} // namespace effect
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc1.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc1.cpp
new file mode 100644
index 0000000..1363ba4
--- /dev/null
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc1.cpp
@@ -0,0 +1,163 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cstdint>
+#include <cstring>
+#include <optional>
+#define LOG_TAG "AidlConversionAgc1"
+//#define LOG_NDEBUG 0
+
+#include <error/expected_utils.h>
+#include <media/AidlConversionNdk.h>
+#include <media/AidlConversionEffect.h>
+#include <system/audio_effects/effect_agc.h>
+
+#include <utils/Log.h>
+
+#include "AidlConversionAgc1.h"
+
+namespace android {
+namespace effect {
+
+using ::aidl::android::getParameterSpecificField;
+using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::hardware::audio::effect::AutomaticGainControlV1;
+using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
+using ::android::status_t;
+using utils::EffectParamReader;
+using utils::EffectParamWriter;
+
+status_t AidlConversionAgc1::setParameterLevel(EffectParamReader& param) {
+ int16_t level;
+ RETURN_STATUS_IF_ERROR(param.readFromValue(&level));
+ Parameter aidlParam = MAKE_SPECIFIC_PARAMETER(AutomaticGainControlV1, automaticGainControlV1,
+ targetPeakLevelDbFs, level);
+ return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+}
+
+status_t AidlConversionAgc1::setParameterGain(EffectParamReader& param) {
+ int16_t gain;
+ RETURN_STATUS_IF_ERROR(param.readFromValue(&gain));
+ Parameter aidlParam = MAKE_SPECIFIC_PARAMETER(AutomaticGainControlV1, automaticGainControlV1,
+ maxCompressionGainDb, gain);
+ return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+}
+
+status_t AidlConversionAgc1::setParameterLimiterEnable(EffectParamReader& param) {
+ bool enable;
+ RETURN_STATUS_IF_ERROR(param.readFromValue(&enable));
+ Parameter aidlParam = MAKE_SPECIFIC_PARAMETER(AutomaticGainControlV1, automaticGainControlV1,
+ enableLimiter, enable);
+ return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+}
+
+status_t AidlConversionAgc1::setParameter(EffectParamReader& param) {
+ uint32_t type = 0;
+ if (OK != param.readFromParameter(&type)) {
+ ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+ return BAD_VALUE;
+ }
+ switch (type) {
+ case AGC_PARAM_TARGET_LEVEL: {
+ return setParameterLevel(param);
+ }
+ case AGC_PARAM_COMP_GAIN: {
+ return setParameterGain(param);
+ }
+ case AGC_PARAM_LIMITER_ENA: {
+ return setParameterLimiterEnable(param);
+ }
+ case AGC_PARAM_PROPERTIES: {
+ RETURN_STATUS_IF_ERROR(setParameterLevel(param));
+ RETURN_STATUS_IF_ERROR(setParameterGain(param));
+ RETURN_STATUS_IF_ERROR(setParameterLimiterEnable(param));
+ return OK;
+ }
+ default: {
+ // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+ VendorExtension ext = VALUE_OR_RETURN_STATUS(
+ aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+ Parameter aidlParam = MAKE_SPECIFIC_PARAMETER(AutomaticGainControlV1,
+ automaticGainControlV1, vendor, ext);
+ return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+ }
+ }
+}
+
+status_t AidlConversionAgc1::getParameterLevel(EffectParamWriter& param) {
+ Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(AutomaticGainControlV1, automaticGainControlV1Tag,
+ AutomaticGainControlV1::targetPeakLevelDbFs);
+ Parameter aidlParam;
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+ int32_t level = VALUE_OR_RETURN_STATUS(
+ GET_PARAMETER_SPECIFIC_FIELD(aidlParam, AutomaticGainControlV1, automaticGainControlV1,
+ AutomaticGainControlV1::targetPeakLevelDbFs, int32_t));
+ return param.writeToValue(&level);
+}
+
+status_t AidlConversionAgc1::getParameterGain(EffectParamWriter& param) {
+ Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(AutomaticGainControlV1, automaticGainControlV1Tag,
+ AutomaticGainControlV1::maxCompressionGainDb);
+ Parameter aidlParam;
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+ int32_t gain = VALUE_OR_RETURN_STATUS(
+ GET_PARAMETER_SPECIFIC_FIELD(aidlParam, AutomaticGainControlV1, automaticGainControlV1,
+ AutomaticGainControlV1::maxCompressionGainDb, int32_t));
+ return param.writeToValue(&gain);
+}
+
+status_t AidlConversionAgc1::getParameterLimiterEnable(EffectParamWriter& param) {
+ Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(AutomaticGainControlV1, automaticGainControlV1Tag,
+ AutomaticGainControlV1::enableLimiter);
+ Parameter aidlParam;
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+ bool enable = VALUE_OR_RETURN_STATUS(
+ GET_PARAMETER_SPECIFIC_FIELD(aidlParam, AutomaticGainControlV1, automaticGainControlV1,
+ AutomaticGainControlV1::enableLimiter, bool));
+ return param.writeToValue(&enable);
+}
+
+status_t AidlConversionAgc1::getParameter(EffectParamWriter& param) {
+ uint32_t type = 0;
+ if (OK != param.readFromParameter(&type)) {
+ ALOGE("%s invalid param %s", __func__, param.toString().c_str());
+ return BAD_VALUE;
+ }
+ switch (type) {
+ case AGC_PARAM_TARGET_LEVEL: {
+ return getParameterLevel(param);
+ }
+ case AGC_PARAM_COMP_GAIN: {
+ return getParameterGain(param);
+ }
+ case AGC_PARAM_LIMITER_ENA: {
+ return getParameterLimiterEnable(param);
+ }
+ case AGC_PARAM_PROPERTIES: {
+ RETURN_STATUS_IF_ERROR(getParameterLevel(param));
+ RETURN_STATUS_IF_ERROR(getParameterGain(param));
+ RETURN_STATUS_IF_ERROR(getParameterLimiterEnable(param));
+ return OK;
+ }
+ default: {
+ VENDOR_EXTENSION_GET_AND_RETURN(AutomaticGainControlV1, automaticGainControlV1, param);
+ }
+ }
+}
+
+} // namespace effect
+} // namespace android
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc1.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc1.h
new file mode 100644
index 0000000..b0509fd
--- /dev/null
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc1.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/audio/effect/IEffect.h>
+#include "EffectConversionHelperAidl.h"
+
+namespace android {
+namespace effect {
+
+class AidlConversionAgc1 : public EffectConversionHelperAidl {
+ public:
+ AidlConversionAgc1(std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect> effect,
+ int32_t sessionId, int32_t ioId,
+ const ::aidl::android::hardware::audio::effect::Descriptor& desc)
+ : EffectConversionHelperAidl(effect, sessionId, ioId, desc) {}
+ ~AidlConversionAgc1() {}
+
+ private:
+ status_t setParameterLevel(utils::EffectParamReader& param);
+ status_t setParameterGain(utils::EffectParamReader& param);
+ status_t setParameterLimiterEnable(utils::EffectParamReader& param);
+ status_t setParameter(utils::EffectParamReader& param) override;
+
+ status_t getParameterLevel(utils::EffectParamWriter& param);
+ status_t getParameterGain(utils::EffectParamWriter& param);
+ status_t getParameterLimiterEnable(utils::EffectParamWriter& param);
+ status_t getParameter(utils::EffectParamWriter& param) override;
+};
+
+} // namespace effect
+} // namespace android
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc2.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc2.cpp
index b736936..b35a1c6 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc2.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionAgc2.cpp
@@ -23,7 +23,6 @@
#include <error/expected_utils.h>
#include <media/AidlConversionNdk.h>
#include <media/AidlConversionEffect.h>
-#include <media/audiohal/AudioEffectUuid.h>
#include <system/audio_effects/effect_agc2.h>
#include <utils/Log.h>
@@ -33,9 +32,11 @@
namespace android {
namespace effect {
+using ::aidl::android::getParameterSpecificField;
using ::aidl::android::aidl_utils::statusTFromBinderStatus;
using ::aidl::android::hardware::audio::effect::AutomaticGainControlV2;
using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
using ::android::status_t;
using utils::EffectParamReader;
using utils::EffectParamWriter;
@@ -65,8 +66,12 @@
break;
}
default: {
- ALOGW("%s unknown param %s", __func__, param.toString().c_str());
- return BAD_VALUE;
+ // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+ VendorExtension ext = VALUE_OR_RETURN_STATUS(
+ aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+ aidlParam = MAKE_SPECIFIC_PARAMETER(AutomaticGainControlV2, automaticGainControlV2,
+ vendor, ext);
+ break;
}
}
@@ -110,8 +115,7 @@
break;
}
default: {
- ALOGW("%s unknown param %s", __func__, param.toString().c_str());
- return BAD_VALUE;
+ VENDOR_EXTENSION_GET_AND_RETURN(AutomaticGainControlV2, automaticGainControlV2, param);
}
}
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionBassBoost.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionBassBoost.cpp
index 9ec593f..7c6a5a2 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionBassBoost.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionBassBoost.cpp
@@ -23,7 +23,6 @@
#include <error/expected_utils.h>
#include <media/AidlConversionNdk.h>
#include <media/AidlConversionEffect.h>
-#include <media/audiohal/AudioEffectUuid.h>
#include <system/audio_effects/aidl_effects_utils.h>
#include <system/audio_effects/effect_bassboost.h>
@@ -35,10 +34,12 @@
namespace effect {
using ::aidl::android::convertIntegral;
+using ::aidl::android::getParameterSpecificField;
using ::aidl::android::aidl_utils::statusTFromBinderStatus;
using ::aidl::android::hardware::audio::effect::BassBoost;
using ::aidl::android::hardware::audio::effect::Parameter;
using ::aidl::android::hardware::audio::effect::Range;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
using ::android::status_t;
using utils::EffectParamReader;
using utils::EffectParamWriter;
@@ -63,8 +64,11 @@
return BAD_VALUE;
}
default: {
- ALOGW("%s unknown param %s", __func__, param.toString().c_str());
- return BAD_VALUE;
+ // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+ VendorExtension ext = VALUE_OR_RETURN_STATUS(
+ aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+ aidlParam = MAKE_SPECIFIC_PARAMETER(BassBoost, bassBoost, vendor, ext);
+ break;
}
}
@@ -98,8 +102,7 @@
return param.writeToValue(&value);
}
default: {
- ALOGW("%s unknown param %s", __func__, param.toString().c_str());
- return BAD_VALUE;
+ VENDOR_EXTENSION_GET_AND_RETURN(BassBoost, bassBoost, param);
}
}
}
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDownmix.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDownmix.cpp
index 17cedf7..b57971c 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDownmix.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDownmix.cpp
@@ -23,7 +23,6 @@
#include <error/expected_utils.h>
#include <media/AidlConversionNdk.h>
#include <media/AidlConversionEffect.h>
-#include <media/audiohal/AudioEffectUuid.h>
#include <system/audio_effects/effect_downmix.h>
#include <system/audio_effect.h>
@@ -34,9 +33,11 @@
namespace android {
namespace effect {
+using ::aidl::android::getParameterSpecificField;
using ::aidl::android::aidl_utils::statusTFromBinderStatus;
using ::aidl::android::hardware::audio::effect::Downmix;
using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
using ::android::status_t;
using utils::EffectParamReader;
using utils::EffectParamWriter;
@@ -57,8 +58,10 @@
break;
}
default: {
- ALOGW("%s unknown param %s", __func__, param.toString().c_str());
- return BAD_VALUE;
+ // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+ VendorExtension ext = VALUE_OR_RETURN_STATUS(
+ aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+ aidlParam = MAKE_SPECIFIC_PARAMETER(Downmix, downmix, vendor, ext);
}
}
@@ -83,8 +86,7 @@
break;
}
default: {
- ALOGW("%s unknown param %s", __func__, param.toString().c_str());
- return BAD_VALUE;
+ VENDOR_EXTENSION_GET_AND_RETURN(Downmix, downmix, param);
}
}
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDynamicsProcessing.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDynamicsProcessing.cpp
index 4555c9f..fe845ab 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDynamicsProcessing.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDynamicsProcessing.cpp
@@ -24,10 +24,9 @@
#include <media/AidlConversionCppNdk.h>
#include <media/AidlConversionNdk.h>
#include <media/AidlConversionEffect.h>
-#include <media/audiohal/AudioEffectUuid.h>
#include <system/audio_effect.h>
#include <system/audio_effects/effect_dynamicsprocessing.h>
-
+#include <Utils.h>
#include <utils/Log.h>
#include "AidlConversionDynamicsProcessing.h"
@@ -36,30 +35,26 @@
namespace effect {
using ::aidl::android::convertIntegral;
+using ::aidl::android::getParameterSpecificField;
using ::aidl::android::aidl_utils::statusTFromBinderStatus;
using ::aidl::android::hardware::audio::effect::Capability;
using ::aidl::android::hardware::audio::effect::DynamicsProcessing;
using ::aidl::android::hardware::audio::effect::Parameter;
using ::aidl::android::hardware::audio::effect::toString;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
using ::android::status_t;
using utils::EffectParamReader;
using utils::EffectParamWriter;
status_t AidlConversionDp::setParameter(EffectParamReader& param) {
uint32_t type = 0;
- if (OK != param.readFromParameter(&type)) {
- ALOGE("%s invalid param %s", __func__, param.toString().c_str());
- return BAD_VALUE;
- }
+ RETURN_STATUS_IF_ERROR(param.readFromParameter(&type));
Parameter aidlParam;
switch (type) {
case DP_PARAM_INPUT_GAIN: {
DynamicsProcessing::InputGain inputGainAidl;
- if (OK != param.readFromParameter(&inputGainAidl.channel) ||
- OK != param.readFromValue(&inputGainAidl.gainDb)) {
- ALOGE("%s invalid inputGain %s", __func__, param.toString().c_str());
- return BAD_VALUE;
- }
+ RETURN_STATUS_IF_ERROR(param.readFromParameter(&inputGainAidl.channel));
+ RETURN_STATUS_IF_ERROR(param.readFromValue(&inputGainAidl.gainDb));
aidlParam = MAKE_SPECIFIC_PARAMETER(DynamicsProcessing, dynamicsProcessing, inputGain,
{inputGainAidl});
break;
@@ -122,8 +117,12 @@
break;
}
default: {
- ALOGW("%s unknown param %s", __func__, param.toString().c_str());
- return BAD_VALUE;
+ // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+ VendorExtension ext = VALUE_OR_RETURN_STATUS(
+ aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+ aidlParam =
+ MAKE_SPECIFIC_PARAMETER(DynamicsProcessing, dynamicsProcessing, vendor, ext);
+ break;
}
}
@@ -132,17 +131,12 @@
status_t AidlConversionDp::getParameter(EffectParamWriter& param) {
uint32_t type = 0;
- if (OK != param.readFromParameter(&type)) {
- ALOGE("%s invalid param %s", __func__, param.toString().c_str());
- }
+ RETURN_STATUS_IF_ERROR(param.readFromParameter(&type));
Parameter aidlParam;
switch (type) {
case DP_PARAM_INPUT_GAIN: {
int32_t channel;
- if (OK != param.readFromParameter(&channel)) {
- ALOGE("%s invalid inputGain %s", __func__, param.toString().c_str());
- return BAD_VALUE;
- }
+ RETURN_STATUS_IF_ERROR(param.readFromParameter(&channel));
Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(DynamicsProcessing, dynamicsProcessingTag,
DynamicsProcessing::inputGain);
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
@@ -161,11 +155,6 @@
return BAD_VALUE;
}
case DP_PARAM_ENGINE_ARCHITECTURE: {
- int32_t channel;
- if (OK != param.readFromParameter(&channel)) {
- ALOGE("%s invalid inputGain %s", __func__, param.toString().c_str());
- return BAD_VALUE;
- }
Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(DynamicsProcessing, dynamicsProcessingTag,
DynamicsProcessing::engineArchitecture);
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
@@ -186,18 +175,15 @@
VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(engine.postEqStage.inUse));
int32_t limiterInUse =
VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(engine.limiterInUse));
- if (OK != param.writeToValue(&resolution) ||
- OK != param.writeToValue(&engine.preferredProcessingDurationMs) ||
- OK != param.writeToValue(&preEqInUse) ||
- OK != param.writeToValue(&engine.preEqStage.bandCount) ||
- OK != param.writeToValue(&mbcInUse) ||
- OK != param.writeToValue(&engine.mbcStage.bandCount) ||
- OK != param.writeToValue(&postEqInUse) ||
- OK != param.writeToValue(&engine.postEqStage.bandCount) ||
- OK != param.writeToValue(&limiterInUse)) {
- ALOGE("%s invalid engineArchitecture %s", __func__, param.toString().c_str());
- return BAD_VALUE;
- }
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&resolution));
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&engine.preferredProcessingDurationMs));
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&preEqInUse));
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&engine.preEqStage.bandCount));
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&mbcInUse));
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&engine.mbcStage.bandCount));
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&postEqInUse));
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&engine.postEqStage.bandCount));
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&limiterInUse));
mEngine = engine;
return OK;
}
@@ -223,110 +209,94 @@
return getLimiterConfig(param);
}
case DP_PARAM_GET_CHANNEL_COUNT: {
- uint32_t channel = VALUE_OR_RETURN_STATUS(
- aidl::android::aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
- mCommon.input.base.channelMask, true /* input */));
- if (OK != param.writeToValue(&channel)) {
- ALOGE("%s write channel number %d to param failed %s", __func__, channel,
- param.toString().c_str());
- return BAD_VALUE;
- }
+ uint32_t channel = ::aidl::android::hardware::audio::common::getChannelCount(
+ mCommon.input.base.channelMask);
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&channel));
return OK;
}
default: {
- ALOGW("%s unknown param %s", __func__, param.toString().c_str());
- return BAD_VALUE;
+ VENDOR_EXTENSION_GET_AND_RETURN(DynamicsProcessing, dynamicsProcessing, param);
}
}
}
-aidl::ConversionResult<DynamicsProcessing::ChannelConfig>
+ConversionResult<DynamicsProcessing::ChannelConfig>
AidlConversionDp::readChannelConfigFromParam(EffectParamReader& param) {
int32_t enable, channel;
- if (OK != param.readFromParameter(&channel) || OK != param.readFromValue(&enable)) {
- ALOGE("%s invalid channel config param %s", __func__, param.toString().c_str());
- return ::android::base::unexpected(::android::BAD_VALUE);
- }
+ RETURN_IF_ERROR(param.readFromParameter(&channel));
+ RETURN_IF_ERROR(param.readFromValue(&enable));
+
return DynamicsProcessing::ChannelConfig(
{.enable = VALUE_OR_RETURN(convertIntegral<bool>(enable)), .channel = channel});
}
-aidl::ConversionResult<DynamicsProcessing::EqBandConfig>
+ConversionResult<DynamicsProcessing::EqBandConfig>
AidlConversionDp::readEqBandConfigFromParam(EffectParamReader& param) {
DynamicsProcessing::EqBandConfig config;
int32_t enable;
- if (OK != param.readFromParameter(&config.channel) ||
- OK != param.readFromParameter(&config.band) ||
- OK != param.readFromValue(&enable) ||
- OK != param.readFromValue(&config.cutoffFrequencyHz) ||
- OK != param.readFromValue(&config.gainDb)) {
- ALOGE("%s invalid eq band param %s", __func__, param.toString().c_str());
- return ::android::base::unexpected(::android::BAD_VALUE);
- }
+ RETURN_IF_ERROR(param.readFromParameter(&config.channel));
+ RETURN_IF_ERROR(param.readFromParameter(&config.band));
+ RETURN_IF_ERROR(param.readFromValue(&enable));
+ RETURN_IF_ERROR(param.readFromValue(&config.cutoffFrequencyHz));
+ RETURN_IF_ERROR(param.readFromValue(&config.gainDb));
+
config.enable = VALUE_OR_RETURN(convertIntegral<bool>(enable));
return config;
}
-aidl::ConversionResult<DynamicsProcessing::MbcBandConfig>
+ConversionResult<DynamicsProcessing::MbcBandConfig>
AidlConversionDp::readMbcBandConfigFromParam(EffectParamReader& param) {
DynamicsProcessing::MbcBandConfig config;
int32_t enable;
- if (OK != param.readFromParameter(&config.channel) ||
- OK != param.readFromParameter(&config.band) ||
- OK != param.readFromValue(&enable) ||
- OK != param.readFromValue(&config.cutoffFrequencyHz) ||
- OK != param.readFromValue(&config.attackTimeMs) ||
- OK != param.readFromValue(&config.releaseTimeMs) ||
- OK != param.readFromValue(&config.ratio) ||
- OK != param.readFromValue(&config.thresholdDb) ||
- OK != param.readFromValue(&config.kneeWidthDb) ||
- OK != param.readFromValue(&config.noiseGateThresholdDb) ||
- OK != param.readFromValue(&config.expanderRatio) ||
- OK != param.readFromValue(&config.preGainDb) ||
- OK != param.readFromValue(&config.postGainDb)) {
- ALOGE("%s invalid mbc band config param %s", __func__, param.toString().c_str());
- return ::android::base::unexpected(::android::BAD_VALUE);
- }
+ RETURN_IF_ERROR(param.readFromParameter(&config.channel));
+ RETURN_IF_ERROR(param.readFromParameter(&config.band));
+ RETURN_IF_ERROR(param.readFromValue(&enable));
+ RETURN_IF_ERROR(param.readFromValue(&config.cutoffFrequencyHz));
+ RETURN_IF_ERROR(param.readFromValue(&config.attackTimeMs));
+ RETURN_IF_ERROR(param.readFromValue(&config.releaseTimeMs));
+ RETURN_IF_ERROR(param.readFromValue(&config.ratio));
+ RETURN_IF_ERROR(param.readFromValue(&config.thresholdDb));
+ RETURN_IF_ERROR(param.readFromValue(&config.kneeWidthDb));
+ RETURN_IF_ERROR(param.readFromValue(&config.noiseGateThresholdDb));
+ RETURN_IF_ERROR(param.readFromValue(&config.expanderRatio));
+ RETURN_IF_ERROR(param.readFromValue(&config.preGainDb));
+ RETURN_IF_ERROR(param.readFromValue(&config.postGainDb));
+
config.enable = VALUE_OR_RETURN(convertIntegral<bool>(enable));
return config;
}
-aidl::ConversionResult<DynamicsProcessing::LimiterConfig>
+ConversionResult<DynamicsProcessing::LimiterConfig>
AidlConversionDp::readLimiterConfigFromParam(EffectParamReader& param) {
DynamicsProcessing::LimiterConfig config;
int32_t enable, inUse;
- if (OK != param.readFromParameter(&config.channel) ||
- OK != param.readFromValue(&inUse) ||
- OK != param.readFromValue(&enable) ||
- OK != param.readFromValue(&config.linkGroup) ||
- OK != param.readFromValue(&config.attackTimeMs) ||
- OK != param.readFromValue(&config.releaseTimeMs) ||
- OK != param.readFromValue(&config.ratio) ||
- OK != param.readFromValue(&config.thresholdDb) ||
- OK != param.readFromValue(&config.postGainDb)) {
- ALOGE("%s invalid limiter config param %s", __func__, param.toString().c_str());
- return ::android::base::unexpected(::android::BAD_VALUE);
- }
+ RETURN_IF_ERROR(param.readFromParameter(&config.channel));
+ RETURN_IF_ERROR(param.readFromValue(&inUse));
+ RETURN_IF_ERROR(param.readFromValue(&enable));
+ RETURN_IF_ERROR(param.readFromValue(&config.linkGroup));
+ RETURN_IF_ERROR(param.readFromValue(&config.attackTimeMs));
+ RETURN_IF_ERROR(param.readFromValue(&config.releaseTimeMs));
+ RETURN_IF_ERROR(param.readFromValue(&config.ratio));
+ RETURN_IF_ERROR(param.readFromValue(&config.thresholdDb));
+ RETURN_IF_ERROR(param.readFromValue(&config.postGainDb));
+
config.enable = VALUE_OR_RETURN(convertIntegral<bool>(enable));
return config;
}
-aidl::ConversionResult<DynamicsProcessing::EngineArchitecture>
+ConversionResult<DynamicsProcessing::EngineArchitecture>
AidlConversionDp::readEngineArchitectureFromParam(EffectParamReader& param) {
DynamicsProcessing::EngineArchitecture engine;
int32_t variant, preEqInUse, mbcInUse, postEqInUse, limiterInUse;
- if (OK != param.readFromValue(&variant) &&
- OK != param.readFromValue(&engine.preferredProcessingDurationMs) &&
- OK != param.readFromValue(&preEqInUse) &&
- OK != param.readFromValue(&engine.preEqStage.bandCount) &&
- OK != param.readFromValue(&mbcInUse) &&
- OK != param.readFromValue(&engine.mbcStage.bandCount) &&
- OK != param.readFromValue(&postEqInUse) &&
- OK != param.readFromValue(&engine.postEqStage.bandCount) &&
- OK != param.readFromValue(&limiterInUse)) {
- ALOGE("%s invalid engineArchitecture %s", __func__, param.toString().c_str());
- return ::android::base::unexpected(::android::BAD_VALUE);
- }
+ RETURN_IF_ERROR(param.readFromValue(&variant));
+ RETURN_IF_ERROR(param.readFromValue(&engine.preferredProcessingDurationMs));
+ RETURN_IF_ERROR(param.readFromValue(&preEqInUse));
+ RETURN_IF_ERROR(param.readFromValue(&engine.preEqStage.bandCount));
+ RETURN_IF_ERROR(param.readFromValue(&mbcInUse));
+ RETURN_IF_ERROR(param.readFromValue(&engine.mbcStage.bandCount));
+ RETURN_IF_ERROR(param.readFromValue(&postEqInUse));
+ RETURN_IF_ERROR(param.readFromValue(&engine.postEqStage.bandCount));
+ RETURN_IF_ERROR(param.readFromValue(&limiterInUse));
engine.resolutionPreference = VALUE_OR_RETURN(
aidl::android::legacy2aidl_int32_DynamicsProcessing_ResolutionPreference(variant));
@@ -339,10 +309,7 @@
status_t AidlConversionDp::getChannelConfig(DynamicsProcessing::Tag tag, EffectParamWriter& param) {
int32_t channel;
- if (OK != param.readFromParameter(&channel)) {
- ALOGE("%s invalid parameter %s", __func__, param.toString().c_str());
- return BAD_VALUE;
- }
+ RETURN_STATUS_IF_ERROR(param.readFromParameter(&channel));
Parameter aidlParam;
Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(DynamicsProcessing, dynamicsProcessingTag, tag);
@@ -384,13 +351,9 @@
for (const auto& ch : channels) {
if (ch.channel == channel) {
int32_t enable = ch.enable;
- if (OK != param.writeToValue(&inUse) ||
- OK != param.writeToValue(&enable) ||
- OK != param.writeToValue(&bandCount)) {
- ALOGE("%s failed to write into param value %s", __func__,
- param.toString().c_str());
- return BAD_VALUE;
- }
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&inUse));
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&enable));
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&bandCount));
return OK;
}
}
@@ -400,10 +363,8 @@
status_t AidlConversionDp::getEqBandConfig(DynamicsProcessing::Tag tag, EffectParamWriter& param) {
int32_t channel, band;
- if (OK != param.readFromParameter(&channel) || OK != param.readFromParameter(&band)) {
- ALOGE("%s invalid parameter %s", __func__, param.toString().c_str());
- return BAD_VALUE;
- }
+ RETURN_STATUS_IF_ERROR(param.readFromParameter(&channel));
+ RETURN_STATUS_IF_ERROR(param.readFromParameter(&band));
Parameter aidlParam;
Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(DynamicsProcessing, dynamicsProcessingTag, tag);
@@ -425,12 +386,9 @@
for (const auto& bandIt : bands) {
if (bandIt.channel == channel && bandIt.band == band) {
int32_t enable = bandIt.enable;
- if (OK != param.writeToValue(&enable) ||
- OK != param.writeToValue(&bandIt.cutoffFrequencyHz) ||
- OK != param.writeToValue(&bandIt.gainDb)) {
- ALOGE("%s failed to write into param value %s", __func__, param.toString().c_str());
- return BAD_VALUE;
- }
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&enable));
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&bandIt.cutoffFrequencyHz));
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&bandIt.gainDb));
return OK;
}
}
@@ -440,10 +398,8 @@
status_t AidlConversionDp::getMbcBandConfig(EffectParamWriter& param) {
int32_t channel, band;
- if (OK != param.readFromParameter(&channel) || OK != param.readFromParameter(&band)) {
- ALOGE("%s invalid parameter %s", __func__, param.toString().c_str());
- return BAD_VALUE;
- }
+ RETURN_STATUS_IF_ERROR(param.readFromParameter(&channel));
+ RETURN_STATUS_IF_ERROR(param.readFromParameter(&band));
Parameter aidlParam;
Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(DynamicsProcessing, dynamicsProcessingTag,
DynamicsProcessing::mbcBand);
@@ -457,20 +413,17 @@
for (const auto& bandIt : bands) {
if (bandIt.channel == channel && bandIt.band == band) {
int32_t enable = bandIt.enable;
- if (OK != param.writeToValue(&enable) ||
- OK != param.writeToValue(&bandIt.cutoffFrequencyHz) ||
- OK != param.writeToValue(&bandIt.attackTimeMs) ||
- OK != param.writeToValue(&bandIt.releaseTimeMs) ||
- OK != param.writeToValue(&bandIt.ratio) ||
- OK != param.writeToValue(&bandIt.thresholdDb) ||
- OK != param.writeToValue(&bandIt.kneeWidthDb) ||
- OK != param.writeToValue(&bandIt.noiseGateThresholdDb) ||
- OK != param.writeToValue(&bandIt.expanderRatio) ||
- OK != param.writeToValue(&bandIt.preGainDb) ||
- OK != param.writeToValue(&bandIt.postGainDb)) {
- ALOGE("%s failed to write into param value %s", __func__, param.toString().c_str());
- return BAD_VALUE;
- }
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&enable));
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&bandIt.cutoffFrequencyHz));
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&bandIt.attackTimeMs));
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&bandIt.releaseTimeMs));
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&bandIt.ratio));
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&bandIt.thresholdDb));
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&bandIt.kneeWidthDb));
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&bandIt.noiseGateThresholdDb));
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&bandIt.expanderRatio));
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&bandIt.preGainDb));
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&bandIt.postGainDb));
return OK;
}
}
@@ -480,10 +433,7 @@
status_t AidlConversionDp::getLimiterConfig(EffectParamWriter& param) {
int32_t channel;
- if (OK != param.readFromParameter(&channel)) {
- ALOGE("%s invalid parameter %s", __func__, param.toString().c_str());
- return BAD_VALUE;
- }
+ RETURN_STATUS_IF_ERROR(param.readFromParameter(&channel));
Parameter aidlParam;
Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(DynamicsProcessing, dynamicsProcessingTag,
DynamicsProcessing::limiter);
@@ -498,17 +448,14 @@
if (config.channel == channel) {
int32_t inUse = mEngine.limiterInUse;
int32_t enable = config.enable;
- if (OK != param.writeToValue(&inUse) ||
- OK != param.writeToValue(&enable) ||
- OK != param.writeToValue(&config.linkGroup) ||
- OK != param.writeToValue(&config.attackTimeMs) ||
- OK != param.writeToValue(&config.releaseTimeMs) ||
- OK != param.writeToValue(&config.ratio) ||
- OK != param.writeToValue(&config.thresholdDb) ||
- OK != param.writeToValue(&config.postGainDb)) {
- ALOGE("%s failed to write into param value %s", __func__, param.toString().c_str());
- return BAD_VALUE;
- }
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&inUse));
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&enable));
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&config.linkGroup));
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&config.attackTimeMs));
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&config.releaseTimeMs));
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&config.ratio));
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&config.thresholdDb));
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&config.postGainDb));
return OK;
}
}
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDynamicsProcessing.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDynamicsProcessing.h
index 6bab18d..c5d5a54 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDynamicsProcessing.h
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDynamicsProcessing.h
@@ -36,18 +36,18 @@
status_t setParameter(utils::EffectParamReader& param) override;
status_t getParameter(utils::EffectParamWriter& param) override;
- aidl::ConversionResult<
+ ConversionResult<
aidl::android::hardware::audio::effect::DynamicsProcessing::ChannelConfig>
readChannelConfigFromParam(utils::EffectParamReader& param);
- aidl::ConversionResult<aidl::android::hardware::audio::effect::DynamicsProcessing::EqBandConfig>
+ ConversionResult<aidl::android::hardware::audio::effect::DynamicsProcessing::EqBandConfig>
readEqBandConfigFromParam(utils::EffectParamReader& param);
- aidl::ConversionResult<
+ ConversionResult<
aidl::android::hardware::audio::effect::DynamicsProcessing::MbcBandConfig>
readMbcBandConfigFromParam(utils::EffectParamReader& param);
- aidl::ConversionResult<
+ ConversionResult<
aidl::android::hardware::audio::effect::DynamicsProcessing::LimiterConfig>
readLimiterConfigFromParam(utils::EffectParamReader& param);
- aidl::ConversionResult<
+ ConversionResult<
aidl::android::hardware::audio::effect::DynamicsProcessing::EngineArchitecture>
readEngineArchitectureFromParam(utils::EffectParamReader& param);
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEnvReverb.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEnvReverb.cpp
index 0544e3f..754da43 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEnvReverb.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEnvReverb.cpp
@@ -24,7 +24,6 @@
#include <media/AidlConversionCppNdk.h>
#include <media/AidlConversionNdk.h>
#include <media/AidlConversionEffect.h>
-#include <media/audiohal/AudioEffectUuid.h>
#include <system/audio_effects/effect_environmentalreverb.h>
#include <utils/Log.h>
@@ -39,6 +38,7 @@
using ::aidl::android::aidl_utils::statusTFromBinderStatus;
using ::aidl::android::hardware::audio::effect::EnvironmentalReverb;
using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
using ::android::status_t;
using utils::EffectParamReader;
using utils::EffectParamWriter;
@@ -166,7 +166,13 @@
break;
}
default: {
- // TODO: handle with vendor extension
+ // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+ VendorExtension ext = VALUE_OR_RETURN_STATUS(
+ aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+ Parameter aidlParam = MAKE_SPECIFIC_PARAMETER(EnvironmentalReverb,
+ environmentalReverb, vendor, ext);
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->setParameter(aidlParam)));
+ break;
}
}
return OK;
@@ -240,8 +246,7 @@
break;
}
default: {
- // TODO: handle with vendor extension
- return BAD_VALUE;
+ VENDOR_EXTENSION_GET_AND_RETURN(EnvironmentalReverb, environmentalReverb, param);
}
}
return OK;
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.cpp
index 916ed40..45b98a1 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.cpp
@@ -23,7 +23,6 @@
#include <error/expected_utils.h>
#include <media/AidlConversionNdk.h>
#include <media/AidlConversionEffect.h>
-#include <media/audiohal/AudioEffectUuid.h>
#include <system/audio_effects/effect_equalizer.h>
#include <utils/Log.h>
@@ -38,6 +37,7 @@
using ::aidl::android::hardware::audio::effect::Equalizer;
using ::aidl::android::hardware::audio::effect::Parameter;
using ::aidl::android::hardware::audio::effect::Range;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
using ::android::base::unexpected;
using ::android::status_t;
using utils::EffectParamReader;
@@ -59,7 +59,7 @@
return BAD_VALUE;
}
aidlParam = MAKE_SPECIFIC_PARAMETER(Equalizer, equalizer, preset, (int)value);
- return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+ break;
}
case EQ_PARAM_BAND_LEVEL: {
int32_t band;
@@ -70,7 +70,7 @@
}
std::vector<Equalizer::BandLevel> bandLevels = {{.index = band, .levelMb = level}};
aidlParam = MAKE_SPECIFIC_PARAMETER(Equalizer, equalizer, bandLevels, bandLevels);
- return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+ break;
}
case EQ_PARAM_PROPERTIES: {
int16_t num;
@@ -81,7 +81,7 @@
// set preset if it's valid
if (num >= 0) {
aidlParam = MAKE_SPECIFIC_PARAMETER(Equalizer, equalizer, preset, (int)num);
- return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+ break;
}
// set bandLevel if no preset was set
if (OK != param.readFromValue(&num)) {
@@ -98,30 +98,34 @@
bandLevels.push_back(level);
}
aidlParam = MAKE_SPECIFIC_PARAMETER(Equalizer, equalizer, bandLevels, bandLevels);
- return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+ break;
}
default: {
- // TODO: implement vendor extension parameters
- ALOGW("%s unknown param %s", __func__, param.toString().c_str());
- return BAD_VALUE;
+ // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+ VendorExtension ext = VALUE_OR_RETURN_STATUS(
+ aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+ aidlParam = MAKE_SPECIFIC_PARAMETER(Equalizer, equalizer, vendor, ext);
+ break;
}
}
+
+ return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
}
-aidl::ConversionResult<Parameter> AidlConversionEq::getAidlParameter(Equalizer::Tag tag) {
+ConversionResult<Parameter> AidlConversionEq::getAidlParameter(Equalizer::Tag tag) {
Parameter aidlParam;
Parameter::Id id = MAKE_SPECIFIC_PARAMETER_ID(Equalizer, equalizerTag, tag);
RETURN_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
return aidlParam;
}
-aidl::ConversionResult<int32_t> AidlConversionEq::getParameterPreset() {
+ConversionResult<int32_t> AidlConversionEq::getParameterPreset() {
Parameter aidlParam = VALUE_OR_RETURN_STATUS(getAidlParameter(Equalizer::preset));
return VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(aidlParam, Equalizer, equalizer,
Equalizer::preset, int32_t));
}
-aidl::ConversionResult<std::string> AidlConversionEq::getParameterPresetName(
+ConversionResult<std::string> AidlConversionEq::getParameterPresetName(
EffectParamWriter& param) {
int32_t presetIdx;
if (OK != param.readFromParameter(&presetIdx)) {
@@ -289,8 +293,7 @@
return OK;
}
default: {
- ALOGW("%s unknown param %s", __func__, param.toString().c_str());
- return BAD_VALUE;
+ VENDOR_EXTENSION_GET_AND_RETURN(Equalizer, equalizer, param);
}
}
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.h b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.h
index 2509c20..f94556c 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.h
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.h
@@ -33,10 +33,10 @@
private:
status_t setParameter(utils::EffectParamReader& param) override;
status_t getParameter(utils::EffectParamWriter& param) override;
- aidl::ConversionResult<::aidl::android::hardware::audio::effect::Parameter> getAidlParameter(
+ ConversionResult<::aidl::android::hardware::audio::effect::Parameter> getAidlParameter(
::aidl::android::hardware::audio::effect::Equalizer::Tag tag);
- aidl::ConversionResult<int32_t> getParameterPreset();
- aidl::ConversionResult<std::string> getParameterPresetName(utils::EffectParamWriter& param);
+ ConversionResult<int32_t> getParameterPreset();
+ ConversionResult<std::string> getParameterPresetName(utils::EffectParamWriter& param);
};
} // namespace effect
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionHapticGenerator.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionHapticGenerator.cpp
index 9575e7d..73430ba 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionHapticGenerator.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionHapticGenerator.cpp
@@ -23,7 +23,6 @@
#include <error/expected_utils.h>
#include <media/AidlConversionNdk.h>
#include <media/AidlConversionEffect.h>
-#include <media/audiohal/AudioEffectUuid.h>
#include <system/audio_effects/effect_hapticgenerator.h>
#include <utils/Log.h>
@@ -33,9 +32,11 @@
namespace android {
namespace effect {
+using ::aidl::android::getParameterSpecificField;
using ::aidl::android::aidl_utils::statusTFromBinderStatus;
using ::aidl::android::hardware::audio::effect::HapticGenerator;
using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
using ::android::status_t;
using utils::EffectParamReader;
using utils::EffectParamWriter;
@@ -76,9 +77,11 @@
break;
}
default: {
- // TODO: implement vendor extension parameters
- ALOGW("%s unknown param %s", __func__, param.toString().c_str());
- return BAD_VALUE;
+ // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+ VendorExtension ext = VALUE_OR_RETURN_STATUS(
+ aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+ aidlParam = MAKE_SPECIFIC_PARAMETER(HapticGenerator, hapticGenerator, vendor, ext);
+ break;
}
}
@@ -86,8 +89,8 @@
}
// No parameter to get for HapticGenerator
-status_t AidlConversionHapticGenerator::getParameter(EffectParamWriter& param __unused) {
- return OK;
+status_t AidlConversionHapticGenerator::getParameter(EffectParamWriter& param) {
+ VENDOR_EXTENSION_GET_AND_RETURN(HapticGenerator, hapticGenerator, param);
}
} // namespace effect
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionLoudnessEnhancer.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionLoudnessEnhancer.cpp
index e3c898f..31eec65 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionLoudnessEnhancer.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionLoudnessEnhancer.cpp
@@ -23,7 +23,6 @@
#include <error/expected_utils.h>
#include <media/AidlConversionNdk.h>
#include <media/AidlConversionEffect.h>
-#include <media/audiohal/AudioEffectUuid.h>
#include <system/audio_effects/effect_loudnessenhancer.h>
#include <utils/Log.h>
@@ -37,6 +36,7 @@
using ::aidl::android::getParameterSpecificField;
using ::aidl::android::hardware::audio::effect::LoudnessEnhancer;
using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
using ::android::status_t;
using utils::EffectParamReader;
using utils::EffectParamWriter;
@@ -56,9 +56,11 @@
break;
}
default: {
- // TODO: implement vendor extension parameters
- ALOGW("%s unknown param %s", __func__, param.toString().c_str());
- return BAD_VALUE;
+ // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+ VendorExtension ext = VALUE_OR_RETURN_STATUS(
+ aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+ aidlParam = MAKE_SPECIFIC_PARAMETER(LoudnessEnhancer, loudnessEnhancer, vendor, ext);
+ break;
}
}
return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
@@ -84,9 +86,7 @@
return param.writeToValue(&gain);
}
default: {
- // TODO: implement vendor extension parameters
- ALOGW("%s unknown param %s", __func__, param.toString().c_str());
- return BAD_VALUE;
+ VENDOR_EXTENSION_GET_AND_RETURN(LoudnessEnhancer, loudnessEnhancer, param);
}
}
}
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionNoiseSuppression.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionNoiseSuppression.cpp
index 69184cf..7c34ed7 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionNoiseSuppression.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionNoiseSuppression.cpp
@@ -23,7 +23,6 @@
#include <error/expected_utils.h>
#include <media/AidlConversionNdk.h>
#include <media/AidlConversionEffect.h>
-#include <media/audiohal/AudioEffectUuid.h>
#include <system/audio_effects/effect_ns.h>
#include <utils/Log.h>
@@ -33,10 +32,11 @@
namespace android {
namespace effect {
-using ::aidl::android::aidl_utils::statusTFromBinderStatus;
using ::aidl::android::getParameterSpecificField;
-using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::aidl_utils::statusTFromBinderStatus;
using ::aidl::android::hardware::audio::effect::NoiseSuppression;
+using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
using ::android::status_t;
using utils::EffectParamReader;
using utils::EffectParamWriter;
@@ -61,9 +61,11 @@
break;
}
default: {
- // TODO: implement vendor extension parameters
- ALOGW("%s unknown param %s", __func__, param.toString().c_str());
- return BAD_VALUE;
+ // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+ VendorExtension ext = VALUE_OR_RETURN_STATUS(
+ aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+ aidlParam = MAKE_SPECIFIC_PARAMETER(NoiseSuppression, noiseSuppression, vendor, ext);
+ break;
}
}
return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
@@ -100,9 +102,7 @@
break;
}
default: {
- // TODO: implement vendor extension parameters
- ALOGW("%s unknown param %s", __func__, param.toString().c_str());
- return BAD_VALUE;
+ VENDOR_EXTENSION_GET_AND_RETURN(NoiseSuppression, noiseSuppression, param);
}
}
return param.writeToValue(&value);
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionPresetReverb.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionPresetReverb.cpp
index 3e9bf4b..e936aef 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionPresetReverb.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionPresetReverb.cpp
@@ -23,7 +23,6 @@
#include <error/expected_utils.h>
#include <media/AidlConversionNdk.h>
#include <media/AidlConversionEffect.h>
-#include <media/audiohal/AudioEffectUuid.h>
#include <system/audio_effects/effect_presetreverb.h>
#include <utils/Log.h>
@@ -38,6 +37,7 @@
using ::aidl::android::aidl_utils::statusTFromBinderStatus;
using ::aidl::android::hardware::audio::effect::Parameter;
using ::aidl::android::hardware::audio::effect::PresetReverb;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
using ::android::status_t;
using utils::EffectParamReader;
using utils::EffectParamWriter;
@@ -59,7 +59,10 @@
aidlParam = MAKE_SPECIFIC_PARAMETER(PresetReverb, presetReverb, preset,
static_cast<PresetReverb::Presets>(value));
} else {
- // handle vendor extension
+ // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+ VendorExtension ext = VALUE_OR_RETURN_STATUS(
+ aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+ aidlParam = MAKE_SPECIFIC_PARAMETER(PresetReverb, presetReverb, vendor, ext);
}
return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
@@ -86,6 +89,7 @@
value = static_cast<uint16_t>(aidlPreset);
} else {
// handle vendor extension
+ VENDOR_EXTENSION_GET_AND_RETURN(PresetReverb, presetReverb, param);
}
return param.writeToValue(&value);
}
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp
index d2a94e4..eadd6c3 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp
@@ -20,10 +20,11 @@
#define LOG_TAG "AidlConversionSpatializer"
//#define LOG_NDEBUG 0
+#include <aidl/android/hardware/audio/effect/DefaultExtension.h>
+#include <aidl/android/hardware/audio/effect/VendorExtension.h>
#include <error/expected_utils.h>
#include <media/AidlConversionNdk.h>
#include <media/AidlConversionEffect.h>
-#include <media/audiohal/AudioEffectUuid.h>
#include <system/audio_effects/effect_spatializer.h>
#include <utils/Log.h>
@@ -34,7 +35,9 @@
namespace effect {
using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::hardware::audio::effect::DefaultExtension;
using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
using ::android::status_t;
using utils::EffectParamReader;
using utils::EffectParamWriter;
@@ -46,18 +49,23 @@
}
status_t AidlConversionSpatializer::getParameter(EffectParamWriter& param) {
- Parameter aidlParam;
- Parameter::Id id = UNION_MAKE(Parameter::Id, vendorEffectTag, 0 /* no tag */);
- RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
- const auto& extBytes = VALUE_OR_RETURN_STATUS(
- ::aidl::android::aidl2legacy_ParameterExtension_vector_uint8(aidlParam));
- if (param.getValueSize() < extBytes.size()) {
- ALOGE("%s extension return data %zu exceed vsize %zu", __func__, extBytes.size(),
- param.getValueSize());
+ DefaultExtension defaultExt;
+ // read parameters into DefaultExtension vector<uint8_t>
+ if (OK != param.readFromParameter(defaultExt.bytes.data(), param.getParameterSize())) {
+ ALOGE("%s invalid param %s", __func__, param.toString().c_str());
param.setStatus(BAD_VALUE);
return BAD_VALUE;
}
- return param.writeToValue(extBytes.data(), extBytes.size());
+
+ VendorExtension idTag;
+ idTag.extension.setParcelable(defaultExt);
+ Parameter::Id id = UNION_MAKE(Parameter::Id, vendorEffectTag, idTag);
+ Parameter aidlParam;
+ RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
+ // copy the AIDL extension data back to effect_param_t
+ return VALUE_OR_RETURN_STATUS(
+ ::aidl::android::aidl2legacy_ParameterExtension_EffectParameterWriter(aidlParam,
+ param));
}
} // namespace effect
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVendorExtension.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVendorExtension.cpp
index 584b60e..488d5cd 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVendorExtension.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVendorExtension.cpp
@@ -22,6 +22,7 @@
//#define LOG_NDEBUG 0
#include <aidl/android/hardware/audio/effect/DefaultExtension.h>
+#include <aidl/android/hardware/audio/effect/VendorExtension.h>
#include <error/expected_utils.h>
#include <media/AidlConversionNdk.h>
#include <media/AidlConversionEffect.h>
@@ -56,17 +57,11 @@
}
status_t AidlConversionVendorExtension::getParameter(EffectParamWriter& param) {
- int32_t tag;
- if (OK != param.readFromParameter(&tag)) {
- ALOGE("%s invalid param %s", __func__, param.toString().c_str());
- param.setStatus(BAD_VALUE);
- return BAD_VALUE;
- }
-
+ VendorExtension extId = VALUE_OR_RETURN_STATUS(
+ aidl::android::legacy2aidl_EffectParameterReader_Param_VendorExtension(param));
+ Parameter::Id id = UNION_MAKE(Parameter::Id, vendorEffectTag, extId);
Parameter aidlParam;
- Parameter::Id id = UNION_MAKE(Parameter::Id, vendorEffectTag, tag /* parameter tag */);
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(id, &aidlParam)));
-
// copy the AIDL extension data back to effect_param_t
return VALUE_OR_RETURN_STATUS(
::aidl::android::aidl2legacy_ParameterExtension_EffectParameterWriter(aidlParam,
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVirtualizer.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVirtualizer.cpp
index fe74c8b..c95c3a9 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVirtualizer.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVirtualizer.cpp
@@ -24,7 +24,6 @@
#include <media/AidlConversionCppNdk.h>
#include <media/AidlConversionNdk.h>
#include <media/AidlConversionEffect.h>
-#include <media/audiohal/AudioEffectUuid.h>
#include <system/audio_effects/aidl_effects_utils.h>
#include <system/audio_effects/effect_virtualizer.h>
@@ -40,6 +39,7 @@
using ::aidl::android::hardware::audio::effect::Parameter;
using ::aidl::android::hardware::audio::effect::Range;
using ::aidl::android::hardware::audio::effect::Virtualizer;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
using ::aidl::android::media::audio::common::AudioDeviceDescription;
using ::android::status_t;
using utils::EffectParamReader;
@@ -75,9 +75,11 @@
break;
}
default: {
- // TODO: implement vendor extension parameters
- ALOGW("%s unknown param %s", __func__, param.toString().c_str());
- return BAD_VALUE;
+ // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+ VendorExtension ext = VALUE_OR_RETURN_STATUS(
+ aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+ aidlParam = MAKE_SPECIFIC_PARAMETER(Virtualizer, virtualizer, vendor, ext);
+ break;
}
}
return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
@@ -153,9 +155,7 @@
return param.writeToValue(&deviceType);
}
default: {
- // TODO: implement vendor extension parameters
- ALOGW("%s unknown param %s", __func__, param.toString().c_str());
- return BAD_VALUE;
+ VENDOR_EXTENSION_GET_AND_RETURN(Virtualizer, virtualizer, param);
}
}
}
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVisualizer.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVisualizer.cpp
index 7e1e6d7..2d5af59 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVisualizer.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionVisualizer.cpp
@@ -24,7 +24,6 @@
#include <error/expected_utils.h>
#include <media/AidlConversionNdk.h>
#include <media/AidlConversionEffect.h>
-#include <media/audiohal/AudioEffectUuid.h>
#include <system/audio_effects/effect_visualizer.h>
#include <utils/Log.h>
@@ -34,9 +33,10 @@
namespace android {
namespace effect {
-using ::aidl::android::aidl_utils::statusTFromBinderStatus;
using ::aidl::android::getParameterSpecificField;
+using ::aidl::android::aidl_utils::statusTFromBinderStatus;
using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::VendorExtension;
using ::aidl::android::hardware::audio::effect::Visualizer;
using ::android::status_t;
using utils::EffectParamReader;
@@ -72,9 +72,11 @@
break;
}
default: {
- // TODO: implement vendor extension parameters
- ALOGW("%s unknown param %s", __func__, param.toString().c_str());
- return BAD_VALUE;
+ // for vendor extension, copy data area to the DefaultExtension, parameter ignored
+ VendorExtension ext = VALUE_OR_RETURN_STATUS(
+ aidl::android::legacy2aidl_EffectParameterReader_Data_VendorExtension(param));
+ aidlParam = MAKE_SPECIFIC_PARAMETER(Visualizer, visualizer, vendor, ext);
+ break;
}
}
return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
@@ -130,9 +132,7 @@
return param.writeToValue(&value);
}
default: {
- // TODO: implement vendor extension parameters
- ALOGW("%s unknown param %s", __func__, param.toString().c_str());
- return BAD_VALUE;
+ VENDOR_EXTENSION_GET_AND_RETURN(Visualizer, visualizer, param);
}
}
}
diff --git a/media/libaudiohal/include/media/audiohal/AudioEffectUuid.h b/media/libaudiohal/include/media/audiohal/AudioEffectUuid.h
deleted file mode 100644
index 3b8076f..0000000
--- a/media/libaudiohal/include/media/audiohal/AudioEffectUuid.h
+++ /dev/null
@@ -1,118 +0,0 @@
-/*
- * Copyright (C) 2023 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#pragma once
-
-#include <aidl/android/media/audio/common/AudioUuid.h>
-
-namespace android {
-namespace effect {
-
-using ::aidl::android::media::audio::common::AudioUuid;
-
-// 7b491460-8d4d-11e0-bd61-0002a5d5c51b.
-static const AudioUuid kAcousticEchoCancelerTypeUUID = {static_cast<int32_t>(0x7b491460),
- 0x8d4d,
- 0x11e0,
- 0xbd61,
- {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
-// 0xae3c653b-be18-4ab8-8938-418f0a7f06ac
-static const AudioUuid kAutomaticGainControl2TypeUUID = {static_cast<int32_t>(0xae3c653b),
- 0xbe18,
- 0x4ab8,
- 0x8938,
- {0x41, 0x8f, 0x0a, 0x7f, 0x06, 0xac}};
-// 0634f220-ddd4-11db-a0fc-0002a5d5c51b
-static const AudioUuid kBassBoostTypeUUID = {static_cast<int32_t>(0x0634f220),
- 0xddd4,
- 0x11db,
- 0xa0fc,
- {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
-// fa81862a-588b-11ed-9b6a-0242ac120002
-static const AudioUuid kDownmixTypeUUID = {static_cast<int32_t>(0x381e49cc),
- 0xa858,
- 0x4aa2,
- 0x87f6,
- {0xe8, 0x38, 0x8e, 0x76, 0x01, 0xb2}};
-// 7261676f-6d75-7369-6364-28e2fd3ac39e
-static const AudioUuid kDynamicsProcessingTypeUUID = {static_cast<int32_t>(0x7261676f),
- 0x6d75,
- 0x7369,
- 0x6364,
- {0x28, 0xe2, 0xfd, 0x3a, 0xc3, 0x9e}};
-// 0bed4300-ddd6-11db-8f34-0002a5d5c51b.
-static const AudioUuid kEqualizerTypeUUID = {static_cast<int32_t>(0x0bed4300),
- 0xddd6,
- 0x11db,
- 0x8f34,
- {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
-// 1411e6d6-aecd-4021-a1cf-a6aceb0d71e5
-static const AudioUuid kHapticGeneratorTypeUUID = {static_cast<int32_t>(0x1411e6d6),
- 0xaecd,
- 0x4021,
- 0xa1cf,
- {0xa6, 0xac, 0xeb, 0x0d, 0x71, 0xe5}};
-// fe3199be-aed0-413f-87bb-11260eb63cf1
-static const AudioUuid kLoudnessEnhancerTypeUUID = {static_cast<int32_t>(0xfe3199be),
- 0xaed0,
- 0x413f,
- 0x87bb,
- {0x11, 0x26, 0x0e, 0xb6, 0x3c, 0xf1}};
-// c2e5d5f0-94bd-4763-9cac-4e234d06839e
-static const AudioUuid kEnvReverbTypeUUID = {static_cast<int32_t>(0xc2e5d5f0),
- 0x94bd,
- 0x4763,
- 0x9cac,
- {0x4e, 0x23, 0x4d, 0x06, 0x83, 0x9e}};
-// 58b4b260-8e06-11e0-aa8e-0002a5d5c51b
-static const AudioUuid kNoiseSuppressionTypeUUID = {static_cast<int32_t>(0x58b4b260),
- 0x8e06,
- 0x11e0,
- 0xaa8e,
- {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
-// 47382d60-ddd8-11db-bf3a-0002a5d5c51b
-static const AudioUuid kPresetReverbTypeUUID = {static_cast<int32_t>(0x47382d60),
- 0xddd8,
- 0x11db,
- 0xbf3a,
- {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
-// ccd4cf09-a79d-46c2-9aae-06a1698d6c8f
-static const AudioUuid kSpatializerTypeUUID = {static_cast<int32_t>(0xccd4cf09),
- 0xa79d,
- 0x46c2,
- 0x9aae,
- {0x06, 0xa1, 0x69, 0x8d, 0x6c, 0x8f}};
-// 37cc2c00-dddd-11db-8577-0002a5d5c51b
-static const AudioUuid kVirtualizerTypeUUID = {static_cast<int32_t>(0x37cc2c00),
- 0xdddd,
- 0x11db,
- 0x8577,
- {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
-// e46b26a0-dddd-11db-8afd-0002a5d5c51b
-static const AudioUuid kVisualizerTypeUUID = {static_cast<int32_t>(0xe46b26a0),
- 0xdddd,
- 0x11db,
- 0x8afd,
- {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
-// fa81a2b8-588b-11ed-9b6a-0242ac120002
-static const AudioUuid kVolumeTypeUUID = {static_cast<int32_t>(0xfa81a2b8),
- 0x588b,
- 0x11ed,
- 0x9b6a,
- {0x02, 0x42, 0xac, 0x12, 0x00, 0x02}};
-
-} // namespace effect
-} // namespace android
diff --git a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
index 2df2f5d..e8d8998 100644
--- a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
@@ -132,13 +132,14 @@
std::vector<media::audio::common::AudioMMapPolicyInfo> *policyInfos) = 0;
virtual int32_t getAAudioMixerBurstCount() = 0;
virtual int32_t getAAudioHardwareBurstMinUsec() = 0;
+
virtual int32_t supportsBluetoothVariableLatency(bool* supports) = 0;
// Update the connection status of an external device.
- virtual status_t setConnectedState(const struct audio_port_v7* port, bool connected) {
- ALOGE("%s override me port %p connected %d", __func__, port, connected);
- return OK;
- }
+ virtual status_t setConnectedState(const struct audio_port_v7* port, bool connected) = 0;
+
+ // Enable simulation of external devices connection at the HAL level.
+ virtual status_t setSimulateDeviceConnections(bool enabled) = 0;
virtual error::Result<audio_hw_sync_t> getHwAvSync() = 0;
diff --git a/media/libaudiohal/tests/Android.bp b/media/libaudiohal/tests/Android.bp
index 2f78dd0..8210f7d 100644
--- a/media/libaudiohal/tests/Android.bp
+++ b/media/libaudiohal/tests/Android.bp
@@ -20,18 +20,12 @@
default_applicable_licenses: ["frameworks_av_license"],
}
-cc_test {
- name: "EffectsFactoryHalInterfaceTest",
+cc_defaults {
+ name: "AudioHalTestDefaults",
test_suites: ["device-tests"],
-
- srcs: [
- "EffectsFactoryHalInterface_test.cpp",
- ],
-
defaults: [
"latest_android_media_audio_common_types_ndk_shared",
],
-
cflags: [
"-Wall",
"-Wextra",
@@ -48,8 +42,31 @@
"libutils",
"libvibrator",
],
+}
- header_libs: [
- "libaudiohal_headers",
+cc_test {
+ name: "EffectsFactoryHalInterfaceTest",
+ srcs: ["EffectsFactoryHalInterface_test.cpp"],
+ defaults: ["AudioHalTestDefaults"],
+ header_libs: ["libaudiohal_headers"],
+}
+
+cc_test {
+ name: "EffectProxyTest",
+ srcs: [
+ "EffectProxy_test.cpp",
+ ":audio_effectproxy_src_files",
],
+ defaults: [
+ "AudioHalTestDefaults",
+ "latest_android_hardware_audio_effect_ndk_shared",
+ "libaudiohal_default",
+ "use_libaidlvintf_gtest_helper_static",
+ ],
+ shared_libs: [
+ "android.hardware.common.fmq-V1-ndk",
+ "libbinder_ndk",
+ "libfmq",
+ ],
+ header_libs: ["libaudiohalimpl_headers"],
}
diff --git a/media/libaudiohal/tests/EffectProxy_test.cpp b/media/libaudiohal/tests/EffectProxy_test.cpp
new file mode 100644
index 0000000..92e3dce
--- /dev/null
+++ b/media/libaudiohal/tests/EffectProxy_test.cpp
@@ -0,0 +1,357 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#include <cstddef>
+#include <cstdint>
+#include <memory>
+#include <utility>
+#define LOG_TAG "EffectProxyTest"
+
+#include <aidl/android/media/audio/common/AudioUuid.h>
+#include <aidl/Vintf.h>
+#include <android/binder_manager.h>
+#include <gtest/gtest.h>
+#include <utils/RefBase.h>
+
+#include "EffectProxy.h"
+
+/**
+ * This test suite is depending on audio effect AIDL service.
+ */
+namespace android {
+
+using ::aidl::android::hardware::audio::effect::CommandId;
+using ::aidl::android::hardware::audio::effect::Descriptor;
+using ::aidl::android::hardware::audio::effect::Flags;
+using ::aidl::android::hardware::audio::effect::IEffect;
+using ::aidl::android::hardware::audio::effect::IFactory;
+using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::State;
+using ::aidl::android::media::audio::common::AudioChannelLayout;
+using ::aidl::android::media::audio::common::AudioFormatDescription;
+using ::aidl::android::media::audio::common::AudioFormatType;
+using ::aidl::android::media::audio::common::AudioUuid;
+using ::aidl::android::media::audio::common::PcmType;
+using ::android::effect::EffectProxy;
+
+class EffectProxyTest : public testing::Test {
+ public:
+ void SetUp() override {
+ auto serviceName = android::getAidlHalInstanceNames(IFactory::descriptor);
+ // only unit test with the first one in case more than one EffectFactory service exist
+ ASSERT_NE(0ul, serviceName.size());
+ mFactory = IFactory::fromBinder(
+ ndk::SpAIBinder(AServiceManager_waitForService(serviceName[0].c_str())));
+ ASSERT_NE(nullptr, mFactory);
+ mFactory->queryEffects(std::nullopt, std::nullopt, std::nullopt, &mDescs);
+ for (const auto& desc : mDescs) {
+ if (desc.common.id.proxy.has_value()) {
+ mProxyDescs.insert({desc.common.id, desc});
+ }
+ }
+ }
+
+ void TearDown() override {}
+
+ const AudioFormatDescription kDefaultFormatDescription = {
+ .type = AudioFormatType::PCM, .pcm = PcmType::FLOAT_32_BIT, .encoding = ""};
+
+ Parameter::Common createParamCommon(
+ int session = 0, int ioHandle = -1, int iSampleRate = 48000, int oSampleRate = 48000,
+ long iFrameCount = 0x100, long oFrameCount = 0x100,
+ AudioChannelLayout inputChannelLayout =
+ AudioChannelLayout::make<AudioChannelLayout::layoutMask>(
+ AudioChannelLayout::LAYOUT_STEREO),
+ AudioChannelLayout outputChannelLayout =
+ AudioChannelLayout::make<AudioChannelLayout::layoutMask>(
+ AudioChannelLayout::LAYOUT_STEREO)) {
+ Parameter::Common common;
+ common.session = session;
+ common.ioHandle = ioHandle;
+
+ auto& input = common.input;
+ auto& output = common.output;
+ input.base.sampleRate = iSampleRate;
+ input.base.channelMask = inputChannelLayout;
+ input.base.format = kDefaultFormatDescription;
+ input.frameCount = iFrameCount;
+ output.base.sampleRate = oSampleRate;
+ output.base.channelMask = outputChannelLayout;
+ output.base.format = kDefaultFormatDescription;
+ output.frameCount = oFrameCount;
+ return common;
+ }
+
+ static bool isFlagSet(const ::aidl::android::hardware::audio::effect::Descriptor& desc,
+ Flags::HardwareAccelerator flag) {
+ return desc.common.flags.hwAcceleratorMode == flag;
+ }
+
+ enum TupleIndex { HANDLE, DESCRIPTOR };
+ using EffectProxyTuple = std::tuple<std::shared_ptr<EffectProxy>, std::vector<Descriptor>>;
+
+ std::map<AudioUuid, EffectProxyTuple> createAllProxies() {
+ std::map<AudioUuid, EffectProxyTuple> proxyMap;
+ for (const auto& itor : mProxyDescs) {
+ const auto& uuid = itor.first.proxy.value();
+ if (proxyMap.end() == proxyMap.find(uuid)) {
+ std::get<TupleIndex::HANDLE>(proxyMap[uuid]) =
+ ndk::SharedRefBase::make<EffectProxy>(itor.first, mFactory);
+ }
+ }
+ return proxyMap;
+ }
+
+ bool addAllSubEffects(std::map<AudioUuid, EffectProxyTuple> proxyMap) {
+ for (auto& itor : mProxyDescs) {
+ const auto& uuid = itor.first.proxy.value();
+ if (proxyMap.end() == proxyMap.find(uuid)) {
+ return false;
+ }
+ auto& proxy = std::get<TupleIndex::HANDLE>(proxyMap[uuid]);
+ if (!proxy->addSubEffect(itor.second).isOk()) {
+ return false;
+ }
+ std::get<TupleIndex::DESCRIPTOR>(proxyMap[uuid]).emplace_back(itor.second);
+ }
+ return true;
+ }
+
+ std::shared_ptr<IFactory> mFactory;
+ std::vector<Descriptor> mDescs;
+ std::map<Descriptor::Identity, Descriptor> mProxyDescs;
+};
+
+TEST_F(EffectProxyTest, createProxy) {
+ auto proxyMap = createAllProxies();
+ // if there are some descriptor defined with proxy, then proxyMap can not be empty
+ EXPECT_EQ(mProxyDescs.size() == 0, proxyMap.size() == 0);
+}
+
+TEST_F(EffectProxyTest, addSubEffectsCreateAndDestroy) {
+ auto proxyMap = createAllProxies();
+ ASSERT_TRUE(addAllSubEffects(proxyMap));
+
+ for (const auto& itor : proxyMap) {
+ auto& proxy = std::get<TupleIndex::HANDLE>(itor.second);
+ EXPECT_TRUE(proxy->create().isOk());
+ EXPECT_TRUE(proxy->destroy().isOk());
+ }
+}
+
+TEST_F(EffectProxyTest, addSubEffectsCreateOpenCloseDestroy) {
+ auto proxyMap = createAllProxies();
+ EXPECT_TRUE(addAllSubEffects(proxyMap));
+
+ Parameter::Common common = createParamCommon();
+ IEffect::OpenEffectReturn ret;
+ for (const auto& itor : proxyMap) {
+ auto& proxy = std::get<TupleIndex::HANDLE>(itor.second);
+ EXPECT_TRUE(proxy->create().isOk());
+ EXPECT_TRUE(proxy->open(common, std::nullopt, &ret).isOk());
+ EXPECT_TRUE(proxy->close().isOk());
+ EXPECT_TRUE(proxy->destroy().isOk());
+ }
+}
+
+// Add sub-effects, set active sub-effect with different checkers
+TEST_F(EffectProxyTest, setOffloadParam) {
+ auto proxyMap = createAllProxies();
+ EXPECT_TRUE(addAllSubEffects(proxyMap));
+
+ // Any flag exist should be able to set successfully
+ bool isNoneExist = false, isSimpleExist = false, isTunnelExist = false;
+ for (const auto& itor : mProxyDescs) {
+ isNoneExist = isNoneExist || isFlagSet(itor.second, Flags::HardwareAccelerator::NONE);
+ isSimpleExist = isSimpleExist || isFlagSet(itor.second, Flags::HardwareAccelerator::SIMPLE);
+ isTunnelExist = isTunnelExist || isFlagSet(itor.second, Flags::HardwareAccelerator::TUNNEL);
+ }
+
+ Parameter::Common common = createParamCommon();
+ IEffect::OpenEffectReturn ret;
+ for (const auto& itor : proxyMap) {
+ auto& proxy = std::get<TupleIndex::HANDLE>(itor.second);
+ EXPECT_TRUE(proxy->create().isOk());
+ EXPECT_TRUE(proxy->open(common, std::nullopt, &ret).isOk());
+ effect_offload_param_t offloadParam{false, 0};
+ EXPECT_EQ(isNoneExist || isSimpleExist, proxy->setOffloadParam(&offloadParam).isOk());
+ offloadParam.isOffload = true;
+ EXPECT_EQ(isTunnelExist, proxy->setOffloadParam(&offloadParam).isOk());
+ EXPECT_TRUE(proxy->close().isOk());
+ EXPECT_TRUE(proxy->destroy().isOk());
+ }
+}
+TEST_F(EffectProxyTest, destroyWithoutCreate) {
+ auto proxyMap = createAllProxies();
+ ASSERT_TRUE(addAllSubEffects(proxyMap));
+
+ for (const auto& itor : proxyMap) {
+ auto& proxy = std::get<TupleIndex::HANDLE>(itor.second);
+ EXPECT_TRUE(proxy->destroy().isOk());
+ }
+}
+
+TEST_F(EffectProxyTest, closeWithoutOpen) {
+ auto proxyMap = createAllProxies();
+ ASSERT_TRUE(addAllSubEffects(proxyMap));
+
+ for (const auto& itor : proxyMap) {
+ auto& proxy = std::get<TupleIndex::HANDLE>(itor.second);
+ EXPECT_TRUE(proxy->create().isOk());
+
+ EXPECT_TRUE(proxy->close().isOk());
+ EXPECT_TRUE(proxy->destroy().isOk());
+ }
+}
+
+// Add sub-effects, set active sub-effect, create, open, and send command, expect success handling
+TEST_F(EffectProxyTest, normalSequency) {
+ auto proxyMap = createAllProxies();
+ ASSERT_TRUE(addAllSubEffects(proxyMap));
+
+ bool isTunnelExist = [&]() {
+ for (const auto& itor : mProxyDescs) {
+ if (isFlagSet(itor.second, Flags::HardwareAccelerator::TUNNEL)) {
+ return true;
+ }
+ }
+ return false;
+ }();
+
+ Parameter::Common common = createParamCommon();
+ IEffect::OpenEffectReturn ret;
+ Parameter::VolumeStereo volumeStereo({.left = .1f, .right = -0.8f});
+ Parameter param = Parameter::make<Parameter::volumeStereo>(volumeStereo);
+ Parameter::Id id = Parameter::Id::make<Parameter::Id::commonTag>(Parameter::volumeStereo);
+ State state;
+ for (const auto& itor : proxyMap) {
+ Parameter expect;
+ auto& proxy = std::get<TupleIndex::HANDLE>(itor.second);
+ effect_offload_param_t offloadParam{true, 0};
+ EXPECT_EQ(isTunnelExist, proxy->setOffloadParam(&offloadParam).isOk());
+
+ EXPECT_TRUE(proxy->create().isOk());
+ EXPECT_TRUE(proxy->open(common, std::nullopt, &ret).isOk());
+
+ EXPECT_TRUE(proxy->setParameter(param).isOk());
+ EXPECT_TRUE(proxy->getParameter(id, &expect).isOk());
+ EXPECT_EQ(expect, param);
+
+ EXPECT_TRUE(proxy->command(CommandId::START).isOk());
+ EXPECT_TRUE(proxy->getState(&state).isOk());
+ EXPECT_EQ(State::PROCESSING, state);
+
+ EXPECT_TRUE(proxy->command(CommandId::STOP).isOk());
+ EXPECT_TRUE(proxy->getState(&state).isOk());
+ EXPECT_EQ(State::IDLE, state);
+
+ EXPECT_TRUE(proxy->close().isOk());
+ EXPECT_TRUE(proxy->destroy().isOk());
+ }
+}
+
+// setParameter, change active sub-effect, verify with getParameter
+TEST_F(EffectProxyTest, changeActiveSubAndVerifyParameter) {
+ auto proxyMap = createAllProxies();
+ EXPECT_TRUE(addAllSubEffects(proxyMap));
+
+ bool isNoneExist = false, isSimpleExist = false, isTunnelExist = false;
+ for (const auto& itor : mProxyDescs) {
+ isNoneExist = isNoneExist || isFlagSet(itor.second, Flags::HardwareAccelerator::NONE);
+ isSimpleExist = isSimpleExist || isFlagSet(itor.second, Flags::HardwareAccelerator::SIMPLE);
+ isTunnelExist = isTunnelExist || isFlagSet(itor.second, Flags::HardwareAccelerator::TUNNEL);
+ }
+
+ Parameter::Common common = createParamCommon();
+ IEffect::OpenEffectReturn ret;
+ Parameter::VolumeStereo volumeStereo({.left = .5f, .right = .8f});
+ Parameter param = Parameter::make<Parameter::volumeStereo>(volumeStereo);
+ Parameter::Id id = Parameter::Id::make<Parameter::Id::commonTag>(Parameter::volumeStereo);
+ for (const auto& itor : proxyMap) {
+ Parameter expect;
+ auto& proxy = std::get<TupleIndex::HANDLE>(itor.second);
+ EXPECT_TRUE(proxy->create().isOk());
+ EXPECT_TRUE(proxy->open(common, std::nullopt, &ret).isOk());
+ EXPECT_TRUE(proxy->setParameter(param).isOk());
+ EXPECT_TRUE(proxy->getParameter(id, &expect).isOk());
+ EXPECT_EQ(expect, param);
+
+ effect_offload_param_t offloadParam{false, 0};
+ EXPECT_EQ(isNoneExist || isSimpleExist, proxy->setOffloadParam(&offloadParam).isOk());
+ EXPECT_TRUE(proxy->getParameter(id, &expect).isOk());
+ EXPECT_EQ(expect, param);
+
+ offloadParam.isOffload = true;
+ EXPECT_EQ(isTunnelExist, proxy->setOffloadParam(&offloadParam).isOk());
+ EXPECT_TRUE(proxy->getParameter(id, &expect).isOk());
+ EXPECT_EQ(expect, param);
+
+ EXPECT_TRUE(proxy->close().isOk());
+ EXPECT_TRUE(proxy->destroy().isOk());
+ }
+}
+
+// send command, change active sub-effect, then verify the state with getState
+TEST_F(EffectProxyTest, changeActiveSubAndVerifyState) {
+ auto proxyMap = createAllProxies();
+ ASSERT_TRUE(addAllSubEffects(proxyMap));
+
+ bool isNoneExist = false, isSimpleExist = false, isTunnelExist = false;
+ for (const auto& itor : mProxyDescs) {
+ isNoneExist = isNoneExist || isFlagSet(itor.second, Flags::HardwareAccelerator::NONE);
+ isSimpleExist = isSimpleExist || isFlagSet(itor.second, Flags::HardwareAccelerator::SIMPLE);
+ isTunnelExist = isTunnelExist || isFlagSet(itor.second, Flags::HardwareAccelerator::TUNNEL);
+ }
+
+ Parameter::Common common = createParamCommon();
+ IEffect::OpenEffectReturn ret;
+ State state;
+ for (const auto& itor : proxyMap) {
+ Parameter expect;
+ auto& proxy = std::get<TupleIndex::HANDLE>(itor.second);
+ EXPECT_TRUE(proxy->create().isOk());
+ EXPECT_TRUE(proxy->getState(&state).isOk());
+ EXPECT_EQ(State::INIT, state);
+ EXPECT_TRUE(proxy->open(common, std::nullopt, &ret).isOk());
+ EXPECT_TRUE(proxy->getState(&state).isOk());
+ EXPECT_EQ(State::IDLE, state);
+ EXPECT_TRUE(proxy->command(CommandId::START).isOk());
+ EXPECT_TRUE(proxy->getState(&state).isOk());
+ EXPECT_EQ(State::PROCESSING, state);
+
+ effect_offload_param_t offloadParam{false, 0};
+ EXPECT_EQ(isNoneExist || isSimpleExist, proxy->setOffloadParam(&offloadParam).isOk());
+ EXPECT_TRUE(proxy->getState(&state).isOk());
+ EXPECT_EQ(State::PROCESSING, state);
+
+ offloadParam.isOffload = true;
+ EXPECT_EQ(isTunnelExist, proxy->setOffloadParam(&offloadParam).isOk());
+ EXPECT_TRUE(proxy->getState(&state).isOk());
+ EXPECT_EQ(State::PROCESSING, state);
+
+ EXPECT_TRUE(proxy->command(CommandId::STOP).isOk());
+ EXPECT_TRUE(proxy->getState(&state).isOk());
+ EXPECT_EQ(State::IDLE, state);
+
+ EXPECT_TRUE(proxy->close().isOk());
+ EXPECT_TRUE(proxy->getState(&state).isOk());
+ EXPECT_EQ(State::INIT, state);
+ EXPECT_TRUE(proxy->destroy().isOk());
+ }
+}
+
+} // namespace android
diff --git a/media/libaudiohal/tests/EffectsFactoryHalInterface_test.cpp b/media/libaudiohal/tests/EffectsFactoryHalInterface_test.cpp
index a8843d6..c076ccc 100644
--- a/media/libaudiohal/tests/EffectsFactoryHalInterface_test.cpp
+++ b/media/libaudiohal/tests/EffectsFactoryHalInterface_test.cpp
@@ -27,6 +27,7 @@
#include <media/audiohal/EffectsFactoryHalInterface.h>
#include <system/audio_effects/audio_effects_utils.h>
#include <system/audio_effects/effect_aec.h>
+#include <system/audio_effects/effect_agc.h>
#include <system/audio_effects/effect_agc2.h>
#include <system/audio_effects/effect_bassboost.h>
#include <system/audio_effects/effect_downmix.h>
@@ -157,6 +158,9 @@
std::make_tuple(FX_IID_AEC,
createEffectParamCombination(AEC_PARAM_ECHO_DELAY, 0xff /* echoDelayMs */,
sizeof(int32_t) /* returnValueSize */)),
+ std::make_tuple(FX_IID_AGC,
+ createEffectParamCombination(AGC_PARAM_TARGET_LEVEL, 20 /* targetLevel */,
+ sizeof(int16_t) /* returnValueSize */)),
std::make_tuple(FX_IID_AGC2, createEffectParamCombination(
AGC2_PARAM_FIXED_DIGITAL_GAIN, 15 /* digitalGainDb */,
sizeof(int32_t) /* returnValueSize */)),
diff --git a/media/libeffects/downmix/aidl/DownmixContext.cpp b/media/libeffects/downmix/aidl/DownmixContext.cpp
index 43bfeed..ac893d8 100644
--- a/media/libeffects/downmix/aidl/DownmixContext.cpp
+++ b/media/libeffects/downmix/aidl/DownmixContext.cpp
@@ -21,8 +21,8 @@
#include "DownmixContext.h"
using aidl::android::hardware::audio::effect::IEffect;
-using ::aidl::android::media::audio::common::AudioChannelLayout;
-using ::android::hardware::audio::common::getChannelCount;
+using aidl::android::hardware::audio::common::getChannelCount;
+using aidl::android::media::audio::common::AudioChannelLayout;
namespace aidl::android::hardware::audio::effect {
diff --git a/media/libeffects/downmix/aidl/EffectDownmix.cpp b/media/libeffects/downmix/aidl/EffectDownmix.cpp
index 17d0736..7068c5c 100644
--- a/media/libeffects/downmix/aidl/EffectDownmix.cpp
+++ b/media/libeffects/downmix/aidl/EffectDownmix.cpp
@@ -17,19 +17,20 @@
#define LOG_TAG "AHAL_DownmixImpl"
#include <android-base/logging.h>
+#include <system/audio_effects/effect_uuid.h>
#include "EffectDownmix.h"
using aidl::android::hardware::audio::effect::Descriptor;
using aidl::android::hardware::audio::effect::DownmixImpl;
+using aidl::android::hardware::audio::effect::getEffectImplUuidDownmix;
+using aidl::android::hardware::audio::effect::getEffectTypeUuidDownmix;
using aidl::android::hardware::audio::effect::IEffect;
-using aidl::android::hardware::audio::effect::kDownmixImplUUID;
-using aidl::android::hardware::audio::effect::kDownmixTypeUUID;
using aidl::android::media::audio::common::AudioUuid;
extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid,
std::shared_ptr<IEffect>* instanceSpp) {
- if (!in_impl_uuid || *in_impl_uuid != kDownmixImplUUID) {
+ if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidDownmix()) {
LOG(ERROR) << __func__ << "uuid not supported";
return EX_ILLEGAL_ARGUMENT;
}
@@ -44,7 +45,7 @@
}
extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) {
- if (!in_impl_uuid || *in_impl_uuid != kDownmixImplUUID) {
+ if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidDownmix()) {
LOG(ERROR) << __func__ << "uuid not supported";
return EX_ILLEGAL_ARGUMENT;
}
@@ -56,11 +57,12 @@
const std::string DownmixImpl::kEffectName = "Multichannel Downmix To Stereo";
const Descriptor DownmixImpl::kDescriptor = {
- .common = {
- .id = {.type = kDownmixTypeUUID, .uuid = kDownmixImplUUID, .proxy = std::nullopt},
- .flags = {.type = Flags::Type::INSERT, .insert = Flags::Insert::FIRST},
- .name = DownmixImpl::kEffectName,
- .implementor = "The Android Open Source Project"}};
+ .common = {.id = {.type = getEffectTypeUuidDownmix(),
+ .uuid = getEffectImplUuidDownmix(),
+ .proxy = std::nullopt},
+ .flags = {.type = Flags::Type::INSERT, .insert = Flags::Insert::FIRST},
+ .name = DownmixImpl::kEffectName,
+ .implementor = "The Android Open Source Project"}};
ndk::ScopedAStatus DownmixImpl::getDescriptor(Descriptor* _aidl_return) {
RETURN_IF(!_aidl_return, EX_ILLEGAL_ARGUMENT, "Parameter:nullptr");
diff --git a/media/libeffects/downmix/aidl/EffectDownmix.h b/media/libeffects/downmix/aidl/EffectDownmix.h
index d590133..812d26b 100644
--- a/media/libeffects/downmix/aidl/EffectDownmix.h
+++ b/media/libeffects/downmix/aidl/EffectDownmix.h
@@ -21,7 +21,6 @@
#include "DownmixContext.h"
#include "effect-impl/EffectImpl.h"
-#include "effect-impl/EffectUUID.h"
namespace aidl::android::hardware::audio::effect {
diff --git a/media/libeffects/dynamicsproc/Android.bp b/media/libeffects/dynamicsproc/Android.bp
index 736a086..7838117 100644
--- a/media/libeffects/dynamicsproc/Android.bp
+++ b/media/libeffects/dynamicsproc/Android.bp
@@ -92,6 +92,10 @@
"dynamicsprocessingdefaults",
],
+ static_libs: [
+ "libaudioaidlranges",
+ ],
+
visibility: [
"//hardware/interfaces/audio/aidl/default",
],
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
index 4af5fd8..f1619a8 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
@@ -17,6 +17,7 @@
#define LOG_TAG "AHAL_DynamicsProcessingLibEffects"
#include <android-base/logging.h>
+#include <system/audio_effects/effect_uuid.h>
#include "DynamicsProcessing.h"
@@ -25,15 +26,16 @@
using aidl::android::hardware::audio::effect::Descriptor;
using aidl::android::hardware::audio::effect::DynamicsProcessingImpl;
+using aidl::android::hardware::audio::effect::getEffectImplUuidDynamicsProcessing;
+using aidl::android::hardware::audio::effect::getEffectTypeUuidDynamicsProcessing;
using aidl::android::hardware::audio::effect::IEffect;
-using aidl::android::hardware::audio::effect::kDynamicsProcessingImplUUID;
using aidl::android::hardware::audio::effect::State;
using aidl::android::media::audio::common::AudioUuid;
using aidl::android::media::audio::common::PcmType;
extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid,
std::shared_ptr<IEffect>* instanceSpp) {
- if (!in_impl_uuid || *in_impl_uuid != kDynamicsProcessingImplUUID) {
+ if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidDynamicsProcessing()) {
LOG(ERROR) << __func__ << "uuid not supported";
return EX_ILLEGAL_ARGUMENT;
}
@@ -48,7 +50,7 @@
}
extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) {
- if (!in_impl_uuid || *in_impl_uuid != kDynamicsProcessingImplUUID) {
+ if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidDynamicsProcessing()) {
LOG(ERROR) << __func__ << "uuid not supported";
return EX_ILLEGAL_ARGUMENT;
}
@@ -60,36 +62,139 @@
const std::string DynamicsProcessingImpl::kEffectName = "DynamicsProcessing";
-const DynamicsProcessing::EqBandConfig DynamicsProcessingImpl::kEqBandConfigMin =
+static const Range::DynamicsProcessingRange kEngineConfigRange = {
+ .min = DynamicsProcessing::make<
+ DynamicsProcessing::engineArchitecture>(DynamicsProcessing::EngineArchitecture(
+ {.resolutionPreference =
+ DynamicsProcessing::ResolutionPreference::FAVOR_FREQUENCY_RESOLUTION,
+ .preferredProcessingDurationMs = 0,
+ .preEqStage = {.inUse = false, .bandCount = 0},
+ .postEqStage = {.inUse = false, .bandCount = 0},
+ .mbcStage = {.inUse = false, .bandCount = 0},
+ .limiterInUse = false})),
+ .max = DynamicsProcessing::make<
+ DynamicsProcessing::engineArchitecture>(DynamicsProcessing::EngineArchitecture(
+ {.resolutionPreference =
+ DynamicsProcessing::ResolutionPreference::FAVOR_FREQUENCY_RESOLUTION,
+ .preferredProcessingDurationMs = std::numeric_limits<float>::max(),
+ .preEqStage = {.inUse = true, .bandCount = std::numeric_limits<int>::max()},
+ .postEqStage = {.inUse = true, .bandCount = std::numeric_limits<int>::max()},
+ .mbcStage = {.inUse = true, .bandCount = std::numeric_limits<int>::max()},
+ .limiterInUse = true}))};
+
+static const DynamicsProcessing::ChannelConfig kChannelConfigMin =
+ DynamicsProcessing::ChannelConfig({.channel = 0, .enable = false});
+
+static const DynamicsProcessing::ChannelConfig kChannelConfigMax =
+ DynamicsProcessing::ChannelConfig(
+ {.channel = std::numeric_limits<int>::max(), .enable = true});
+
+static const Range::DynamicsProcessingRange kPreEqChannelConfigRange = {
+ .min = DynamicsProcessing::make<DynamicsProcessing::preEq>({kChannelConfigMin}),
+ .max = DynamicsProcessing::make<DynamicsProcessing::preEq>({kChannelConfigMax})};
+
+static const Range::DynamicsProcessingRange kPostEqChannelConfigRange = {
+ .min = DynamicsProcessing::make<DynamicsProcessing::postEq>({kChannelConfigMin}),
+ .max = DynamicsProcessing::make<DynamicsProcessing::postEq>({kChannelConfigMax})};
+
+static const Range::DynamicsProcessingRange kMbcChannelConfigRange = {
+ .min = DynamicsProcessing::make<DynamicsProcessing::mbc>({kChannelConfigMin}),
+ .max = DynamicsProcessing::make<DynamicsProcessing::mbc>({kChannelConfigMax})};
+
+static const DynamicsProcessing::EqBandConfig kEqBandConfigMin =
DynamicsProcessing::EqBandConfig({.channel = 0,
.band = 0,
.enable = false,
.cutoffFrequencyHz = 220,
- .gainDb = std::numeric_limits<float>::min()});
-const DynamicsProcessing::EqBandConfig DynamicsProcessingImpl::kEqBandConfigMax =
+ .gainDb = std::numeric_limits<float>::lowest()});
+
+static const DynamicsProcessing::EqBandConfig kEqBandConfigMax =
DynamicsProcessing::EqBandConfig({.channel = std::numeric_limits<int>::max(),
.band = std::numeric_limits<int>::max(),
.enable = true,
.cutoffFrequencyHz = 20000,
.gainDb = std::numeric_limits<float>::max()});
-const Range::DynamicsProcessingRange DynamicsProcessingImpl::kPreEqBandRange = {
- .min = DynamicsProcessing::make<DynamicsProcessing::preEqBand>(
- {DynamicsProcessingImpl::kEqBandConfigMin}),
- .max = DynamicsProcessing::make<DynamicsProcessing::preEqBand>(
- {DynamicsProcessingImpl::kEqBandConfigMax})};
-const Range::DynamicsProcessingRange DynamicsProcessingImpl::kPostEqBandRange = {
- .min = DynamicsProcessing::make<DynamicsProcessing::postEqBand>(
- {DynamicsProcessingImpl::kEqBandConfigMin}),
- .max = DynamicsProcessing::make<DynamicsProcessing::postEqBand>(
- {DynamicsProcessingImpl::kEqBandConfigMax})};
-const Range DynamicsProcessingImpl::kRange =
- Range::make<Range::dynamicsProcessing>({DynamicsProcessingImpl::kPreEqBandRange});
-const Capability DynamicsProcessingImpl::kCapability = {.range = {DynamicsProcessingImpl::kRange}};
+static const Range::DynamicsProcessingRange kPreEqBandConfigRange = {
+ .min = DynamicsProcessing::make<DynamicsProcessing::preEqBand>({kEqBandConfigMin}),
+ .max = DynamicsProcessing::make<DynamicsProcessing::preEqBand>({kEqBandConfigMax})};
+
+static const Range::DynamicsProcessingRange kPostEqBandConfigRange = {
+ .min = DynamicsProcessing::make<DynamicsProcessing::postEqBand>({kEqBandConfigMin}),
+ .max = DynamicsProcessing::make<DynamicsProcessing::postEqBand>({kEqBandConfigMax})};
+
+static const Range::DynamicsProcessingRange kMbcBandConfigRange = {
+ .min = DynamicsProcessing::make<DynamicsProcessing::mbcBand>(
+ {DynamicsProcessing::MbcBandConfig(
+ {.channel = 0,
+ .band = 0,
+ .enable = false,
+ .cutoffFrequencyHz = 220,
+ .attackTimeMs = 0,
+ .releaseTimeMs = 0,
+ .ratio = 0,
+ .thresholdDb = std::numeric_limits<float>::lowest(),
+ .kneeWidthDb = 0,
+ .noiseGateThresholdDb = std::numeric_limits<float>::lowest(),
+ .expanderRatio = 0,
+ .preGainDb = std::numeric_limits<float>::lowest(),
+ .postGainDb = std::numeric_limits<float>::lowest()})}),
+ .max = DynamicsProcessing::make<DynamicsProcessing::mbcBand>(
+ {DynamicsProcessing::MbcBandConfig(
+ {.channel = std::numeric_limits<int>::max(),
+ .band = std::numeric_limits<int>::max(),
+ .enable = true,
+ .cutoffFrequencyHz = 20000,
+ .attackTimeMs = std::numeric_limits<float>::max(),
+ .releaseTimeMs = std::numeric_limits<float>::max(),
+ .ratio = std::numeric_limits<float>::max(),
+ .thresholdDb = 0,
+ .kneeWidthDb = std::numeric_limits<float>::max(),
+ .noiseGateThresholdDb = 0,
+ .expanderRatio = std::numeric_limits<float>::max(),
+ .preGainDb = std::numeric_limits<float>::max(),
+ .postGainDb = std::numeric_limits<float>::max()})})};
+
+static const Range::DynamicsProcessingRange kInputGainRange = {
+ .min = DynamicsProcessing::make<DynamicsProcessing::inputGain>(
+ {DynamicsProcessing::InputGain(
+ {.channel = 0, .gainDb = std::numeric_limits<float>::lowest()})}),
+ .max = DynamicsProcessing::make<DynamicsProcessing::inputGain>(
+ {DynamicsProcessing::InputGain({.channel = std::numeric_limits<int>::max(),
+ .gainDb = std::numeric_limits<float>::max()})})};
+
+static const Range::DynamicsProcessingRange kLimiterRange = {
+ .min = DynamicsProcessing::make<DynamicsProcessing::limiter>(
+ {DynamicsProcessing::LimiterConfig(
+ {.channel = 0,
+ .enable = false,
+ .linkGroup = std::numeric_limits<int>::min(),
+ .attackTimeMs = 0,
+ .releaseTimeMs = 0,
+ .ratio = 0,
+ .thresholdDb = std::numeric_limits<float>::min(),
+ .postGainDb = std::numeric_limits<float>::min()})}),
+ .max = DynamicsProcessing::make<DynamicsProcessing::limiter>(
+ {DynamicsProcessing::LimiterConfig(
+ {.channel = std::numeric_limits<int>::max(),
+ .enable = true,
+ .linkGroup = std::numeric_limits<int>::max(),
+ .attackTimeMs = std::numeric_limits<float>::max(),
+ .releaseTimeMs = std::numeric_limits<float>::max(),
+ .ratio = std::numeric_limits<float>::max(),
+ .thresholdDb = 0,
+ .postGainDb = std::numeric_limits<float>::max()})})};
+
+const std::vector<Range::DynamicsProcessingRange> kRanges = {
+ kEngineConfigRange, kPreEqChannelConfigRange, kPostEqChannelConfigRange,
+ kMbcChannelConfigRange, kPreEqBandConfigRange, kPostEqBandConfigRange,
+ kMbcBandConfigRange, kInputGainRange, kLimiterRange};
+
+const Capability DynamicsProcessingImpl::kCapability = {.range = kRanges};
const Descriptor DynamicsProcessingImpl::kDescriptor = {
- .common = {.id = {.type = kDynamicsProcessingTypeUUID,
- .uuid = kDynamicsProcessingImplUUID,
+ .common = {.id = {.type = getEffectTypeUuidDynamicsProcessing(),
+ .uuid = getEffectImplUuidDynamicsProcessing(),
.proxy = std::nullopt},
.flags = {.type = Flags::Type::INSERT,
.insert = Flags::Insert::LAST,
@@ -156,14 +261,19 @@
}
}
+bool DynamicsProcessingImpl::isParamInRange(const Parameter::Specific& specific) {
+ auto& dp = specific.get<Parameter::Specific::dynamicsProcessing>();
+ return DynamicsProcessingRanges::isParamInRange(dp, kRanges);
+}
+
ndk::ScopedAStatus DynamicsProcessingImpl::setParameterSpecific(
const Parameter::Specific& specific) {
RETURN_IF(Parameter::Specific::dynamicsProcessing != specific.getTag(), EX_ILLEGAL_ARGUMENT,
"EffectNotSupported");
RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
+ RETURN_IF(!isParamInRange(specific), EX_ILLEGAL_ARGUMENT, "outOfRange");
auto& param = specific.get<Parameter::Specific::dynamicsProcessing>();
- // TODO: check range here, dynamicsProcessing need customized method for nested parameters.
auto tag = param.getTag();
switch (tag) {
@@ -221,7 +331,7 @@
EX_ILLEGAL_ARGUMENT, "setInputGainFailed");
return ndk::ScopedAStatus::ok();
}
- case DynamicsProcessing::vendorExtension: {
+ case DynamicsProcessing::vendor: {
LOG(ERROR) << __func__ << " unsupported tag: " << toString(tag);
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
EX_ILLEGAL_ARGUMENT, "DPVendorExtensionTagNotSupported");
@@ -301,7 +411,7 @@
mContext->getInputGain()));
return ndk::ScopedAStatus::ok();
}
- case DynamicsProcessing::vendorExtension: {
+ case DynamicsProcessing::vendor: {
LOG(ERROR) << __func__ << " wrong vendor tag in CommonTag: " << toString(tag);
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
EX_ILLEGAL_ARGUMENT, "DPVendorExtensionTagInWrongId");
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h
index 26b6ead..1e1e72e 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h
@@ -18,9 +18,9 @@
#include <aidl/android/hardware/audio/effect/BnEffect.h>
-#include "effect-impl/EffectImpl.h"
-#include "effect-impl/EffectUUID.h"
#include "DynamicsProcessingContext.h"
+#include "EffectRangeSpecific.h"
+#include "effect-impl/EffectImpl.h"
namespace aidl::android::hardware::audio::effect {
@@ -52,14 +52,10 @@
std::string getEffectName() override { return kEffectName; }
private:
- static const DynamicsProcessing::EqBandConfig kEqBandConfigMin;
- static const DynamicsProcessing::EqBandConfig kEqBandConfigMax;
- static const Range::DynamicsProcessingRange kPreEqBandRange;
- static const Range::DynamicsProcessingRange kPostEqBandRange;
- static const Range kRange;
std::shared_ptr<DynamicsProcessingContext> mContext;
ndk::ScopedAStatus getParameterDynamicsProcessing(const DynamicsProcessing::Tag& tag,
Parameter::Specific* specific);
+ bool isParamInRange(const Parameter::Specific& specific);
};
} // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
index 7978cc5..9d77135 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
@@ -16,11 +16,11 @@
#define LOG_TAG "AHAL_DPLibEffectsContext"
-#include "DynamicsProcessing.h"
#include "DynamicsProcessingContext.h"
+#include "DynamicsProcessing.h"
-#include <functional>
#include <sys/param.h>
+#include <functional>
#include <unordered_set>
namespace aidl::android::hardware::audio::effect {
@@ -64,6 +64,7 @@
RetCode DynamicsProcessingContext::setCommon(const Parameter::Common& common) {
mCommon = common;
init();
+ LOG(INFO) << __func__ << common.toString();
return RetCode::SUCCESS;
}
@@ -82,7 +83,7 @@
if (block < minBlockSize) {
block = minBlockSize;
} else if (!powerof2(block)) {
- //find next highest power of 2.
+ // find next highest power of 2.
block = 1 << (32 - __builtin_clz(block));
}
mDpFreq->configure(block, block >> 1, sampleRate);
@@ -90,9 +91,6 @@
RetCode DynamicsProcessingContext::setEngineArchitecture(
const DynamicsProcessing::EngineArchitecture& engineArchitecture) {
- RETURN_VALUE_IF(!validateEngineConfig(engineArchitecture), RetCode::ERROR_ILLEGAL_PARAMETER,
- "illegalEngineConfig");
-
std::lock_guard lg(mMutex);
if (!mEngineInited || mEngineArchitecture != engineArchitecture) {
if (engineArchitecture.resolutionPreference ==
@@ -133,10 +131,12 @@
RetCode DynamicsProcessingContext::setPreEqBand(
const std::vector<DynamicsProcessing::EqBandConfig>& bands) {
std::lock_guard lg(mMutex);
- RETURN_VALUE_IF(!mEngineArchitecture.postEqStage.inUse, RetCode::ERROR_ILLEGAL_PARAMETER,
- "postEqNotInUse");
- return setBands_l<DynamicsProcessing::EqBandConfig>(
- bands, mEngineArchitecture.preEqStage.bandCount, StageType::PREEQ);
+ RETURN_VALUE_IF(!mEngineArchitecture.preEqStage.inUse, RetCode::ERROR_ILLEGAL_PARAMETER,
+ "preEqNotInUse");
+ RETURN_VALUE_IF(
+ !validateBandConfig(bands, mChannelCount, mEngineArchitecture.preEqStage.bandCount),
+ RetCode::ERROR_ILLEGAL_PARAMETER, "eqBandNotValid");
+ return setBands_l<DynamicsProcessing::EqBandConfig>(bands, StageType::PREEQ);
}
RetCode DynamicsProcessingContext::setPostEqBand(
@@ -144,8 +144,10 @@
std::lock_guard lg(mMutex);
RETURN_VALUE_IF(!mEngineArchitecture.postEqStage.inUse, RetCode::ERROR_ILLEGAL_PARAMETER,
"postEqNotInUse");
- return setBands_l<DynamicsProcessing::EqBandConfig>(
- bands, mEngineArchitecture.postEqStage.bandCount, StageType::POSTEQ);
+ RETURN_VALUE_IF(
+ !validateBandConfig(bands, mChannelCount, mEngineArchitecture.postEqStage.bandCount),
+ RetCode::ERROR_ILLEGAL_PARAMETER, "eqBandNotValid");
+ return setBands_l<DynamicsProcessing::EqBandConfig>(bands, StageType::POSTEQ);
}
RetCode DynamicsProcessingContext::setMbcBand(
@@ -153,8 +155,10 @@
std::lock_guard lg(mMutex);
RETURN_VALUE_IF(!mEngineArchitecture.mbcStage.inUse, RetCode::ERROR_ILLEGAL_PARAMETER,
"mbcNotInUse");
- return setBands_l<DynamicsProcessing::MbcBandConfig>(
- bands, mEngineArchitecture.preEqStage.bandCount, StageType::MBC);
+ RETURN_VALUE_IF(
+ !validateBandConfig(bands, mChannelCount, mEngineArchitecture.mbcStage.bandCount),
+ RetCode::ERROR_ILLEGAL_PARAMETER, "eqBandNotValid");
+ return setBands_l<DynamicsProcessing::MbcBandConfig>(bands, StageType::MBC);
}
RetCode DynamicsProcessingContext::setLimiter(
@@ -162,13 +166,17 @@
std::lock_guard lg(mMutex);
RETURN_VALUE_IF(!mEngineArchitecture.limiterInUse, RetCode::ERROR_ILLEGAL_PARAMETER,
"limiterNotInUse");
- return setBands_l<DynamicsProcessing::LimiterConfig>(limiters, -1, StageType::LIMITER);
+ RETURN_VALUE_IF(!validateLimiterConfig(limiters, mChannelCount),
+ RetCode::ERROR_ILLEGAL_PARAMETER, "limiterConfigNotValid");
+ return setBands_l<DynamicsProcessing::LimiterConfig>(limiters, StageType::LIMITER);
}
RetCode DynamicsProcessingContext::setInputGain(
const std::vector<DynamicsProcessing::InputGain>& inputGains) {
std::lock_guard lg(mMutex);
- return setBands_l<DynamicsProcessing::InputGain>(inputGains, -1, StageType::INPUTGAIN);
+ RETURN_VALUE_IF(!validateInputGainConfig(inputGains, mChannelCount),
+ RetCode::ERROR_ILLEGAL_PARAMETER, "inputGainNotValid");
+ return setBands_l<DynamicsProcessing::InputGain>(inputGains, StageType::INPUTGAIN);
}
DynamicsProcessing::EngineArchitecture DynamicsProcessingContext::getEngineArchitecture() {
@@ -287,8 +295,8 @@
void DynamicsProcessingContext::init() {
std::lock_guard lg(mMutex);
mState = DYNAMICS_PROCESSING_STATE_INITIALIZED;
- mChannelCount =
- ::android::hardware::audio::common::getChannelCount(mCommon.input.base.channelMask);
+ mChannelCount = ::aidl::android::hardware::audio::common::getChannelCount(
+ mCommon.input.base.channelMask);
}
dp_fx::DPChannel* DynamicsProcessingContext::getChannel_l(int channel) {
@@ -405,45 +413,33 @@
return eqBands;
}
-/**
- * When StageEnablement is in use, bandCount needs to be positive.
- */
-bool DynamicsProcessingContext::validateStageEnablement(
- const DynamicsProcessing::StageEnablement& enablement) {
- return !enablement.inUse || (enablement.inUse && enablement.bandCount > 0);
-}
-
-bool DynamicsProcessingContext::validateEngineConfig(
- const DynamicsProcessing::EngineArchitecture& engine) {
- return engine.preferredProcessingDurationMs >= 0 &&
- validateStageEnablement(engine.preEqStage) &&
- validateStageEnablement(engine.postEqStage) && validateStageEnablement(engine.mbcStage);
-}
-
-bool DynamicsProcessingContext::validateEqBandConfig(const DynamicsProcessing::EqBandConfig& band,
- int maxChannel, int maxBand) {
- return validateChannel(band.channel, maxChannel) && validateBand(band.band, maxBand);
-}
-
-bool DynamicsProcessingContext::validateMbcBandConfig(const DynamicsProcessing::MbcBandConfig& band,
- int maxChannel, int maxBand) {
- return validateChannel(band.channel, maxChannel) && validateBand(band.band, maxBand) &&
- validateTime(band.attackTimeMs) && validateTime(band.releaseTimeMs) &&
- validateRatio(band.ratio) && validateBandDb(band.thresholdDb) &&
- validateBandDb(band.kneeWidthDb) && validateBandDb(band.noiseGateThresholdDb) &&
- validateRatio(band.expanderRatio);
+template <typename T>
+bool DynamicsProcessingContext::validateBandConfig(const std::vector<T>& bands, int maxChannel,
+ int maxBand) {
+ std::vector<float> freqs(bands.size(), -1);
+ for (auto band : bands) {
+ if (!validateChannel(band.channel, maxChannel)) return false;
+ if (!validateBand(band.band, maxBand)) return false;
+ freqs[band.band] = band.cutoffFrequencyHz;
+ }
+ if (std::count(freqs.begin(), freqs.end(), -1)) return false;
+ return std::is_sorted(freqs.begin(), freqs.end());
}
bool DynamicsProcessingContext::validateLimiterConfig(
- const DynamicsProcessing::LimiterConfig& limiter, int maxChannel) {
- return validateChannel(limiter.channel, maxChannel) && validateTime(limiter.attackTimeMs) &&
- validateTime(limiter.releaseTimeMs) && validateRatio(limiter.ratio) &&
- validateBandDb(limiter.thresholdDb);
+ const std::vector<DynamicsProcessing::LimiterConfig>& cfgs, int maxChannel) {
+ for (auto cfg : cfgs) {
+ if (!validateChannel(cfg.channel, maxChannel)) return false;
+ }
+ return true;
}
-bool DynamicsProcessingContext::validateInputGainConfig(const DynamicsProcessing::InputGain& gain,
- int maxChannel) {
- return validateChannel(gain.channel, maxChannel);
+bool DynamicsProcessingContext::validateInputGainConfig(
+ const std::vector<DynamicsProcessing::InputGain>& cfgs, int maxChannel) {
+ for (auto cfg : cfgs) {
+ if (!validateChannel(cfg.channel, maxChannel)) return false;
+ }
+ return true;
}
template <typename D>
@@ -482,7 +478,6 @@
}
RetCode DynamicsProcessingContext::setDpChannelBand_l(const std::any& anyConfig, StageType type,
- int maxCh, int maxBand,
std::set<std::pair<int, int>>& chBandSet) {
RETURN_VALUE_IF(!anyConfig.has_value(), RetCode::ERROR_ILLEGAL_PARAMETER, "bandInvalid");
RetCode ret = RetCode::SUCCESS;
@@ -493,8 +488,6 @@
case StageType::POSTEQ: {
dp_fx::DPEq* dp;
const auto& config = std::any_cast<DynamicsProcessing::EqBandConfig>(anyConfig);
- RETURN_VALUE_IF(!validateEqBandConfig(config, maxCh, maxBand),
- RetCode::ERROR_ILLEGAL_PARAMETER, "eqBandNotValid");
RETURN_VALUE_IF(
nullptr == (dp = getEqWithType_l(type, config.channel)) || !dp->isEnabled(),
RetCode::ERROR_ILLEGAL_PARAMETER, "dpEqNotExist");
@@ -507,8 +500,6 @@
case StageType::MBC: {
dp_fx::DPMbc* dp;
const auto& config = std::any_cast<DynamicsProcessing::MbcBandConfig>(anyConfig);
- RETURN_VALUE_IF(!validateMbcBandConfig(config, maxCh, maxBand),
- RetCode::ERROR_ILLEGAL_PARAMETER, "mbcBandNotValid");
RETURN_VALUE_IF(nullptr == (dp = getMbc_l(config.channel)) || !dp->isEnabled(),
RetCode::ERROR_ILLEGAL_PARAMETER, "dpMbcNotExist");
dp_fx::DPMbcBand band;
@@ -523,8 +514,6 @@
case StageType::LIMITER: {
dp_fx::DPChannel* dp;
const auto& config = std::any_cast<DynamicsProcessing::LimiterConfig>(anyConfig);
- RETURN_VALUE_IF(!validateLimiterConfig(config, maxCh),
- RetCode::ERROR_ILLEGAL_PARAMETER, "limiterBandNotValid");
RETURN_VALUE_IF(nullptr == (dp = getChannel_l(config.channel)),
RetCode::ERROR_ILLEGAL_PARAMETER, "dpChNotExist");
dp_fx::DPLimiter limiter;
@@ -538,8 +527,6 @@
case StageType::INPUTGAIN: {
dp_fx::DPChannel* dp;
const auto& config = std::any_cast<DynamicsProcessing::InputGain>(anyConfig);
- RETURN_VALUE_IF(!validateInputGainConfig(config, maxCh),
- RetCode::ERROR_ILLEGAL_PARAMETER, "inputGainNotValid");
RETURN_VALUE_IF(nullptr == (dp = getChannel_l(config.channel)),
RetCode::ERROR_ILLEGAL_PARAMETER, "dpChNotExist");
dp->setInputGain(config.gainDb);
@@ -554,14 +541,12 @@
}
template <typename T /* BandConfig */>
-RetCode DynamicsProcessingContext::setBands_l(
- const std::vector<T>& bands, int maxBand, StageType type) {
+RetCode DynamicsProcessingContext::setBands_l(const std::vector<T>& bands, StageType type) {
RetCode ret = RetCode::SUCCESS;
std::set<std::pair<int /* channel */, int /* band */>> bandSet;
for (const auto& it : bands) {
- if (RetCode::SUCCESS !=
- setDpChannelBand_l(std::make_any<T>(it), type, mChannelCount, maxBand, bandSet)) {
+ if (RetCode::SUCCESS != setDpChannelBand_l(std::make_any<T>(it), type, bandSet)) {
LOG(WARNING) << __func__ << " skipping band " << it.toString();
ret = RetCode::ERROR_ILLEGAL_PARAMETER;
continue;
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h
index 8be784e..b8539f6 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h
@@ -103,28 +103,22 @@
RetCode setDpChannels_l(const std::vector<DynamicsProcessing::ChannelConfig>& channels,
bool stageInUse, StageType type) REQUIRES(mMutex);
template <typename T /* BandConfig */>
- RetCode setBands_l(const std::vector<T>& bands, int maxBand, StageType type) REQUIRES(mMutex);
- RetCode setDpChannelBand_l(const std::any& anyConfig, StageType type, int maxCh, int maxBand,
+ RetCode setBands_l(const std::vector<T>& bands, StageType type) REQUIRES(mMutex);
+ RetCode setDpChannelBand_l(const std::any& anyConfig, StageType type,
std::set<std::pair<int, int>>& chBandSet) REQUIRES(mMutex);
std::vector<DynamicsProcessing::EqBandConfig> getEqBandConfigs(StageType type);
std::vector<DynamicsProcessing::ChannelConfig> getChannelConfig(StageType type);
- bool validateStageEnablement(const DynamicsProcessing::StageEnablement& enablement);
- bool validateEngineConfig(const DynamicsProcessing::EngineArchitecture& engine);
- bool validateEqBandConfig(const DynamicsProcessing::EqBandConfig& band, int maxChannel,
- int maxBand);
- bool validateMbcBandConfig(const DynamicsProcessing::MbcBandConfig& band, int maxChannel,
- int maxBand);
- bool validateLimiterConfig(const DynamicsProcessing::LimiterConfig& limiter, int maxChannel);
- bool validateInputGainConfig(const DynamicsProcessing::InputGain& gain, int maxChannel);
+ template <typename T /* BandConfig */>
+ bool validateBandConfig(const std::vector<T>& bands, int maxChannel, int maxBand);
+ bool validateLimiterConfig(const std::vector<DynamicsProcessing::LimiterConfig>& cfgs,
+ int maxChannel);
+ bool validateInputGainConfig(const std::vector<DynamicsProcessing::InputGain>& cfgs,
+ int maxChannel);
- inline bool validateCutoffFrequency(float freq);
inline bool validateChannel(int ch, int maxCh) { return ch >= 0 && ch < maxCh; }
inline bool validateBand(int band, int maxBand) { return band >= 0 && band < maxBand; }
- inline bool validateTime(int time) { return time >= 0; }
- inline bool validateRatio(int ratio) { return ratio >= 0; }
- inline bool validateBandDb(int db) { return db <= 0; }
};
} // namespace aidl::android::hardware::audio::effect
\ No newline at end of file
diff --git a/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.cpp b/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.cpp
index 7e22482..031477f 100644
--- a/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.cpp
+++ b/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.cpp
@@ -16,20 +16,22 @@
#define LOG_TAG "AHAL_HapticGeneratorImpl"
-#include "EffectHapticGenerator.h"
-
#include <android-base/logging.h>
#include <audio_effects/effect_hapticgenerator.h>
+#include <system/audio_effects/effect_uuid.h>
+
+#include "EffectHapticGenerator.h"
using aidl::android::hardware::audio::effect::Descriptor;
+using aidl::android::hardware::audio::effect::getEffectImplUuidHapticGenerator;
+using aidl::android::hardware::audio::effect::getEffectTypeUuidHapticGenerator;
using aidl::android::hardware::audio::effect::HapticGeneratorImpl;
using aidl::android::hardware::audio::effect::IEffect;
-using aidl::android::hardware::audio::effect::kHapticGeneratorImplUUID;
using aidl::android::media::audio::common::AudioUuid;
extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid,
std::shared_ptr<IEffect>* instanceSpp) {
- if (!in_impl_uuid || *in_impl_uuid != kHapticGeneratorImplUUID) {
+ if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidHapticGenerator()) {
LOG(ERROR) << __func__ << "uuid not supported";
return EX_ILLEGAL_ARGUMENT;
}
@@ -44,7 +46,7 @@
}
extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) {
- if (!in_impl_uuid || *in_impl_uuid != kHapticGeneratorImplUUID) {
+ if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidHapticGenerator()) {
LOG(ERROR) << __func__ << "uuid not supported";
return EX_ILLEGAL_ARGUMENT;
}
@@ -56,8 +58,8 @@
const std::string HapticGeneratorImpl::kEffectName = "Haptic Generator";
const Descriptor HapticGeneratorImpl::kDescriptor = {
- .common = {.id = {.type = kHapticGeneratorTypeUUID,
- .uuid = kHapticGeneratorImplUUID,
+ .common = {.id = {.type = getEffectTypeUuidHapticGenerator(),
+ .uuid = getEffectImplUuidHapticGenerator(),
.proxy = std::nullopt},
.flags = {.type = Flags::Type::INSERT, .insert = Flags::Insert::FIRST},
.name = HapticGeneratorImpl::kEffectName,
diff --git a/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h b/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h
index 02ca392..fe9616a 100644
--- a/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h
+++ b/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h
@@ -20,7 +20,6 @@
#include "HapticGeneratorContext.h"
#include "effect-impl/EffectImpl.h"
-#include "effect-impl/EffectUUID.h"
namespace aidl::android::hardware::audio::effect {
diff --git a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
index 8ed579b..de44e05 100644
--- a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
+++ b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
@@ -17,6 +17,7 @@
#define LOG_TAG "AHAL_HapticGeneratorContext"
#include <Utils.h>
+#include <android-base/logging.h>
#include <android-base/parsedouble.h>
#include <android-base/properties.h>
@@ -193,9 +194,9 @@
mParams.mVibratorInfo.resonantFrequencyHz = DEFAULT_RESONANT_FREQUENCY;
mParams.mVibratorInfo.qFactor = DEFAULT_BSF_ZERO_Q;
- mParams.mAudioChannelCount = ::android::hardware::audio::common::getChannelCount(
+ mParams.mAudioChannelCount = ::aidl::android::hardware::audio::common::getChannelCount(
inputChMask, ~media::audio::common::AudioChannelLayout::LAYOUT_HAPTIC_AB);
- mParams.mHapticChannelCount = ::android::hardware::audio::common::getChannelCount(
+ mParams.mHapticChannelCount = ::aidl::android::hardware::audio::common::getChannelCount(
outputChMask, media::audio::common::AudioChannelLayout::LAYOUT_HAPTIC_AB);
LOG_ALWAYS_FATAL_IF(mParams.mHapticChannelCount > 2, "haptic channel count is too large");
for (size_t i = 0; i < mParams.mHapticChannelCount; ++i) {
diff --git a/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp
index 9d8bc80..a7d9282 100644
--- a/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp
+++ b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp
@@ -17,19 +17,21 @@
#define LOG_TAG "AHAL_LoudnessEnhancerImpl"
#include <android-base/logging.h>
+#include <system/audio_effects/effect_uuid.h>
#include "EffectLoudnessEnhancer.h"
using aidl::android::hardware::audio::effect::Descriptor;
+using aidl::android::hardware::audio::effect::getEffectImplUuidLoudnessEnhancer;
+using aidl::android::hardware::audio::effect::getEffectTypeUuidLoudnessEnhancer;
using aidl::android::hardware::audio::effect::IEffect;
-using aidl::android::hardware::audio::effect::kLoudnessEnhancerImplUUID;
using aidl::android::hardware::audio::effect::LoudnessEnhancerImpl;
using aidl::android::hardware::audio::effect::State;
using aidl::android::media::audio::common::AudioUuid;
extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid,
std::shared_ptr<IEffect>* instanceSpp) {
- if (!in_impl_uuid || *in_impl_uuid != kLoudnessEnhancerImplUUID) {
+ if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidLoudnessEnhancer()) {
LOG(ERROR) << __func__ << "uuid not supported";
return EX_ILLEGAL_ARGUMENT;
}
@@ -44,7 +46,7 @@
}
extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) {
- if (!in_impl_uuid || *in_impl_uuid != kLoudnessEnhancerImplUUID) {
+ if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidLoudnessEnhancer()) {
LOG(ERROR) << __func__ << "uuid not supported";
return EX_ILLEGAL_ARGUMENT;
}
@@ -56,8 +58,8 @@
const std::string LoudnessEnhancerImpl::kEffectName = "Loudness Enhancer";
const Descriptor LoudnessEnhancerImpl::kDescriptor = {
- .common = {.id = {.type = kLoudnessEnhancerTypeUUID,
- .uuid = kLoudnessEnhancerImplUUID,
+ .common = {.id = {.type = getEffectTypeUuidLoudnessEnhancer(),
+ .uuid = getEffectImplUuidLoudnessEnhancer(),
.proxy = std::nullopt},
.flags = {.type = Flags::Type::INSERT, .insert = Flags::Insert::FIRST},
.name = LoudnessEnhancerImpl::kEffectName,
diff --git a/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h
index 6402fd2..5b9e924 100644
--- a/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h
+++ b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h
@@ -19,7 +19,6 @@
#include <aidl/android/hardware/audio/effect/BnEffect.h>
#include "effect-impl/EffectImpl.h"
-#include "effect-impl/EffectUUID.h"
#include "LoudnessEnhancerContext.h"
namespace aidl::android::hardware::audio::effect {
diff --git a/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp
index 033b222..bc3fa45 100644
--- a/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp
+++ b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp
@@ -14,6 +14,10 @@
* limitations under the License.
*/
+#define LOG_TAG "LoudnessEnhancerContext"
+
+#include <Utils.h>
+
#include "LoudnessEnhancerContext.h"
namespace aidl::android::hardware::audio::effect {
@@ -21,17 +25,15 @@
LoudnessEnhancerContext::LoudnessEnhancerContext(int statusDepth, const Parameter::Common& common)
: EffectContext(statusDepth, common) {
LOG(DEBUG) << __func__;
- mState = LOUDNESS_ENHANCER_STATE_UNINITIALIZED;
- mSampleRate = common.input.base.sampleRate;
init_params();
}
LoudnessEnhancerContext::~LoudnessEnhancerContext() {
LOG(DEBUG) << __func__;
- mState = LOUDNESS_ENHANCER_STATE_UNINITIALIZED;
}
RetCode LoudnessEnhancerContext::enable() {
+ std::lock_guard lg(mMutex);
if (mState != LOUDNESS_ENHANCER_STATE_INITIALIZED) {
return RetCode::ERROR_EFFECT_LIB_ERROR;
}
@@ -40,6 +42,7 @@
}
RetCode LoudnessEnhancerContext::disable() {
+ std::lock_guard lg(mMutex);
if (mState != LOUDNESS_ENHANCER_STATE_ACTIVE) {
return RetCode::ERROR_EFFECT_LIB_ERROR;
}
@@ -49,12 +52,10 @@
void LoudnessEnhancerContext::reset() {
float targetAmp = pow(10, mGain / 2000.0f); // mB to linear amplification
- {
- std::lock_guard lg(mMutex);
- if (mCompressor != nullptr) {
- // Get samplingRate from input
- mCompressor->Initialize(targetAmp, mSampleRate);
- }
+ std::lock_guard lg(mMutex);
+ if (mCompressor != nullptr) {
+ // Get samplingRate from input
+ mCompressor->Initialize(targetAmp, mCommon.input.base.sampleRate);
}
}
@@ -75,39 +76,41 @@
auto frameSize = getInputFrameSize();
RETURN_VALUE_IF(0 == frameSize, status, "zeroFrameSize");
+ std::lock_guard lg(mMutex);
+ status = {STATUS_INVALID_OPERATION, 0, 0};
+ RETURN_VALUE_IF(mState != LOUDNESS_ENHANCER_STATE_ACTIVE, status, "stateNotActive");
+
LOG(DEBUG) << __func__ << " start processing";
- {
- std::lock_guard lg(mMutex);
- // PcmType is always expected to be Float 32 bit.
- constexpr float scale = 1 << 15; // power of 2 is lossless conversion to int16_t range
- constexpr float inverseScale = 1.f / scale;
- const float inputAmp = pow(10, mGain / 2000.0f) * scale;
- float leftSample, rightSample;
- if (mCompressor != nullptr) {
- for (int inIdx = 0; inIdx < samples; inIdx += 2) {
- // makeup gain is applied on the input of the compressor
- leftSample = inputAmp * in[inIdx];
- rightSample = inputAmp * in[inIdx + 1];
- mCompressor->Compress(&leftSample, &rightSample);
- in[inIdx] = leftSample * inverseScale;
- in[inIdx + 1] = rightSample * inverseScale;
- }
- } else {
- for (int inIdx = 0; inIdx < samples; inIdx += 2) {
- leftSample = inputAmp * in[inIdx];
- rightSample = inputAmp * in[inIdx + 1];
- in[inIdx] = leftSample * inverseScale;
- in[inIdx + 1] = rightSample * inverseScale;
- }
+ // PcmType is always expected to be Float 32 bit.
+ constexpr float scale = 1 << 15; // power of 2 is lossless conversion to int16_t range
+ constexpr float inverseScale = 1.f / scale;
+ const float inputAmp = pow(10, mGain / 2000.0f) * scale;
+ float leftSample, rightSample;
+
+ if (mCompressor != nullptr) {
+ for (int inIdx = 0; inIdx < samples; inIdx += 2) {
+ // makeup gain is applied on the input of the compressor
+ leftSample = inputAmp * in[inIdx];
+ rightSample = inputAmp * in[inIdx + 1];
+ mCompressor->Compress(&leftSample, &rightSample);
+ in[inIdx] = leftSample * inverseScale;
+ in[inIdx + 1] = rightSample * inverseScale;
}
- bool accumulate = false;
- if (in != out) {
- for (int i = 0; i < samples; i++) {
- if (accumulate) {
- out[i] += in[i];
- } else {
- out[i] = in[i];
- }
+ } else {
+ for (int inIdx = 0; inIdx < samples; inIdx += 2) {
+ leftSample = inputAmp * in[inIdx];
+ rightSample = inputAmp * in[inIdx + 1];
+ in[inIdx] = leftSample * inverseScale;
+ in[inIdx + 1] = rightSample * inverseScale;
+ }
+ }
+ bool accumulate = false;
+ if (in != out) {
+ for (int i = 0; i < samples; i++) {
+ if (accumulate) {
+ out[i] += in[i];
+ } else {
+ out[i] = in[i];
}
}
}
@@ -115,15 +118,17 @@
}
void LoudnessEnhancerContext::init_params() {
+ int channelCount = ::aidl::android::hardware::audio::common::getChannelCount(
+ mCommon.input.base.channelMask);
+ LOG_ALWAYS_FATAL_IF(channelCount != 2, "channel count %d not supported", channelCount);
+
mGain = LOUDNESS_ENHANCER_DEFAULT_TARGET_GAIN_MB;
float targetAmp = pow(10, mGain / 2000.0f); // mB to linear amplification
LOG(DEBUG) << __func__ << "Target gain = " << mGain << "mB <=> factor = " << targetAmp;
- {
- std::lock_guard lg(mMutex);
- mCompressor = std::make_unique<le_fx::AdaptiveDynamicRangeCompression>();
- mCompressor->Initialize(targetAmp, mSampleRate);
- }
+ std::lock_guard lg(mMutex);
+ mCompressor = std::make_unique<le_fx::AdaptiveDynamicRangeCompression>();
+ mCompressor->Initialize(targetAmp, mCommon.input.base.sampleRate);
mState = LOUDNESS_ENHANCER_STATE_INITIALIZED;
}
diff --git a/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h
index b478b27..9a1ec4c 100644
--- a/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h
+++ b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h
@@ -46,9 +46,8 @@
private:
std::mutex mMutex;
- LoudnessEnhancerState mState;
- int mSampleRate;
- int mGain;
+ LoudnessEnhancerState mState GUARDED_BY(mMutex) = LOUDNESS_ENHANCER_STATE_UNINITIALIZED;
+ int mGain = LOUDNESS_ENHANCER_DEFAULT_TARGET_GAIN_MB;
// In this implementation, there is no coupling between the compression on the left and right
// channels
std::unique_ptr<le_fx::AdaptiveDynamicRangeCompression> mCompressor GUARDED_BY(mMutex);
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
index 6124356..d026e2b 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
@@ -15,7 +15,9 @@
*/
#include <cstddef>
+
#define LOG_TAG "BundleContext"
+#include <android-base/logging.h>
#include <Utils.h>
#include "BundleContext.h"
@@ -690,7 +692,7 @@
std::vector<Virtualizer::ChannelAngle> BundleContext::getSpeakerAngles(
const Virtualizer::SpeakerAnglesPayload payload) {
std::vector<Virtualizer::ChannelAngle> angles;
- auto chCount = ::android::hardware::audio::common::getChannelCount(payload.layout);
+ auto chCount = ::aidl::android::hardware::audio::common::getChannelCount(payload.layout);
RETURN_VALUE_IF(!isConfigSupportedVirtualizer(chCount, payload.device), angles,
"payloadNotSupported");
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h b/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h
index 520371b..b3371a3 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h
@@ -18,7 +18,8 @@
#include <array>
#include <aidl/android/hardware/audio/effect/BnEffect.h>
-#include "effect-impl/EffectUUID.h"
+#include <system/audio_effects/effect_uuid.h>
+
#include "effect-impl/EffectTypes.h"
#include "LVM.h"
@@ -82,33 +83,36 @@
MAKE_RANGE(Equalizer, centerFreqMh, std::vector<int>({1}), std::vector<int>({}))};
static const Capability kEqCap = {.range = kEqRanges};
static const std::string kEqualizerEffectName = "EqualizerBundle";
-static const Descriptor kEqualizerDesc = {.common = {.id = {.type = kEqualizerTypeUUID,
- .uuid = kEqualizerBundleImplUUID,
- .proxy = kEqualizerProxyUUID},
- .flags = {.type = Flags::Type::INSERT,
- .insert = Flags::Insert::FIRST,
- .volume = Flags::Volume::CTRL},
- .name = kEqualizerEffectName,
- .implementor = "NXP Software Ltd."},
- .capability = kEqCap};
+static const Descriptor kEqualizerDesc = {
+ .common = {.id = {.type = getEffectTypeUuidEqualizer(),
+ .uuid = getEffectImplUuidEqualizerBundle(),
+ .proxy = getEffectImplUuidEqualizerProxy()},
+
+ .flags = {.type = Flags::Type::INSERT,
+ .insert = Flags::Insert::FIRST,
+ .volume = Flags::Volume::CTRL},
+ .name = kEqualizerEffectName,
+ .implementor = "NXP Software Ltd."},
+ .capability = kEqCap};
static const int mMaxStrengthSupported = 1000;
static const std::vector<Range::BassBoostRange> kBassBoostRanges = {
MAKE_RANGE(BassBoost, strengthPm, 0, mMaxStrengthSupported)};
static const Capability kBassBoostCap = {.range = kBassBoostRanges};
static const std::string kBassBoostEffectName = "Dynamic Bass Boost";
-static const Descriptor kBassBoostDesc = {.common = {.id = {.type = kBassBoostTypeUUID,
- .uuid = kBassBoostBundleImplUUID,
- .proxy = kBassBoostProxyUUID},
- .flags = {.type = Flags::Type::INSERT,
- .insert = Flags::Insert::FIRST,
- .volume = Flags::Volume::CTRL,
- .deviceIndication = true},
- .cpuLoad = BASS_BOOST_CUP_LOAD_ARM9E,
- .memoryUsage = BUNDLE_MEM_USAGE,
- .name = kBassBoostEffectName,
- .implementor = "NXP Software Ltd."},
- .capability = kBassBoostCap};
+static const Descriptor kBassBoostDesc = {
+ .common = {.id = {.type = getEffectTypeUuidBassBoost(),
+ .uuid = getEffectImplUuidBassBoostBundle(),
+ .proxy = getEffectImplUuidBassBoostProxy()},
+ .flags = {.type = Flags::Type::INSERT,
+ .insert = Flags::Insert::FIRST,
+ .volume = Flags::Volume::CTRL,
+ .deviceIndication = true},
+ .cpuLoad = BASS_BOOST_CUP_LOAD_ARM9E,
+ .memoryUsage = BUNDLE_MEM_USAGE,
+ .name = kBassBoostEffectName,
+ .implementor = "NXP Software Ltd."},
+ .capability = kBassBoostCap};
static const std::vector<Range::VirtualizerRange> kVirtualizerRanges = {
MAKE_RANGE(Virtualizer, strengthPm, 0, mMaxStrengthSupported)};
@@ -116,9 +120,9 @@
static const std::string kVirtualizerEffectName = "Virtualizer";
static const Descriptor kVirtualizerDesc = {
- .common = {.id = {.type = kVirtualizerTypeUUID,
- .uuid = kVirtualizerBundleImplUUID,
- .proxy = kVirtualizerProxyUUID},
+ .common = {.id = {.type = getEffectTypeUuidVirtualizer(),
+ .uuid = getEffectImplUuidVirtualizerBundle(),
+ .proxy = getEffectImplUuidVirtualizerProxy()},
.flags = {.type = Flags::Type::INSERT,
.insert = Flags::Insert::LAST,
.volume = Flags::Volume::CTRL,
@@ -133,17 +137,18 @@
MAKE_RANGE(Volume, levelDb, -9600, 0)};
static const Capability kVolumeCap = {.range = kVolumeRanges};
static const std::string kVolumeEffectName = "Volume";
-static const Descriptor kVolumeDesc = {.common = {.id = {.type = kVolumeTypeUUID,
- .uuid = kVolumeBundleImplUUID,
- .proxy = std::nullopt},
- .flags = {.type = Flags::Type::INSERT,
- .insert = Flags::Insert::LAST,
- .volume = Flags::Volume::CTRL},
- .cpuLoad = VOLUME_CUP_LOAD_ARM9E,
- .memoryUsage = BUNDLE_MEM_USAGE,
- .name = kVolumeEffectName,
- .implementor = "NXP Software Ltd."},
- .capability = kVolumeCap};
+static const Descriptor kVolumeDesc = {
+ .common = {.id = {.type = getEffectTypeUuidVolume(),
+ .uuid = getEffectImplUuidVolumeBundle(),
+ .proxy = std::nullopt},
+ .flags = {.type = Flags::Type::INSERT,
+ .insert = Flags::Insert::LAST,
+ .volume = Flags::Volume::CTRL},
+ .cpuLoad = VOLUME_CUP_LOAD_ARM9E,
+ .memoryUsage = BUNDLE_MEM_USAGE,
+ .name = kVolumeEffectName,
+ .implementor = "NXP Software Ltd."},
+ .capability = kVolumeCap};
/* The following tables have been computed using the actual levels measured by the output of
* white noise or pink noise (IEC268-1) for the EQ and BassBoost Effects. These are estimates of
diff --git a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp
index 1678570..cd9fb60 100644
--- a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp
+++ b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp
@@ -30,19 +30,21 @@
#include <LVM.h>
#include <limits.h>
+using aidl::android::hardware::audio::effect::getEffectImplUuidBassBoostBundle;
using aidl::android::hardware::audio::effect::Descriptor;
using aidl::android::hardware::audio::effect::EffectBundleAidl;
+using aidl::android::hardware::audio::effect::getEffectImplUuidEqualizerBundle;
using aidl::android::hardware::audio::effect::IEffect;
-using aidl::android::hardware::audio::effect::kBassBoostBundleImplUUID;
-using aidl::android::hardware::audio::effect::kEqualizerBundleImplUUID;
-using aidl::android::hardware::audio::effect::kVirtualizerBundleImplUUID;
-using aidl::android::hardware::audio::effect::kVolumeBundleImplUUID;
using aidl::android::hardware::audio::effect::State;
+using aidl::android::hardware::audio::effect::getEffectImplUuidVirtualizerBundle;
+using aidl::android::hardware::audio::effect::getEffectImplUuidVolumeBundle;
using aidl::android::media::audio::common::AudioUuid;
bool isUuidSupported(const AudioUuid* uuid) {
- return (*uuid == kEqualizerBundleImplUUID || *uuid == kBassBoostBundleImplUUID ||
- *uuid == kVirtualizerBundleImplUUID || *uuid == kVolumeBundleImplUUID);
+ return (*uuid == getEffectImplUuidBassBoostBundle() ||
+ *uuid == getEffectImplUuidEqualizerBundle() ||
+ *uuid == getEffectImplUuidVirtualizerBundle() ||
+ *uuid == getEffectImplUuidVolumeBundle());
}
extern "C" binder_exception_t createEffect(const AudioUuid* uuid,
@@ -66,13 +68,13 @@
LOG(ERROR) << __func__ << "uuid not supported";
return EX_ILLEGAL_ARGUMENT;
}
- if (*in_impl_uuid == kEqualizerBundleImplUUID) {
+ if (*in_impl_uuid == getEffectImplUuidEqualizerBundle()) {
*_aidl_return = aidl::android::hardware::audio::effect::lvm::kEqualizerDesc;
- } else if (*in_impl_uuid == kBassBoostBundleImplUUID) {
+ } else if (*in_impl_uuid == getEffectImplUuidBassBoostBundle()) {
*_aidl_return = aidl::android::hardware::audio::effect::lvm:: kBassBoostDesc;
- } else if (*in_impl_uuid == kVirtualizerBundleImplUUID) {
+ } else if (*in_impl_uuid == getEffectImplUuidVirtualizerBundle()) {
*_aidl_return = aidl::android::hardware::audio::effect::lvm::kVirtualizerDesc;
- } else if (*in_impl_uuid == kVolumeBundleImplUUID) {
+ } else if (*in_impl_uuid == getEffectImplUuidVolumeBundle()) {
*_aidl_return = aidl::android::hardware::audio::effect::lvm::kVolumeDesc;
}
return EX_NONE;
@@ -82,19 +84,19 @@
EffectBundleAidl::EffectBundleAidl(const AudioUuid& uuid) {
LOG(DEBUG) << __func__ << uuid.toString();
- if (uuid == kEqualizerBundleImplUUID) {
+ if (uuid == getEffectImplUuidEqualizerBundle()) {
mType = lvm::BundleEffectType::EQUALIZER;
mDescriptor = &lvm::kEqualizerDesc;
mEffectName = &lvm::kEqualizerEffectName;
- } else if (uuid == kBassBoostBundleImplUUID) {
+ } else if (uuid == getEffectImplUuidBassBoostBundle()) {
mType = lvm::BundleEffectType::BASS_BOOST;
mDescriptor = &lvm::kBassBoostDesc;
mEffectName = &lvm::kBassBoostEffectName;
- } else if (uuid == kVirtualizerBundleImplUUID) {
+ } else if (uuid == getEffectImplUuidVirtualizerBundle()) {
mType = lvm::BundleEffectType::VIRTUALIZER;
mDescriptor = &lvm::kVirtualizerDesc;
mEffectName = &lvm::kVirtualizerEffectName;
- } else if (uuid == kVolumeBundleImplUUID) {
+ } else if (uuid == getEffectImplUuidVolumeBundle()) {
mType = lvm::BundleEffectType::VOLUME;
mDescriptor = &lvm::kVolumeDesc;
mEffectName = &lvm::kVolumeEffectName;
@@ -308,7 +310,7 @@
eqParam.set<Equalizer::centerFreqMh>(mContext->getEqualizerCenterFreqs());
break;
}
- case Equalizer::vendorExtension: {
+ case Equalizer::vendor: {
LOG(ERROR) << __func__ << " not handled tag: " << toString(tag);
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
EX_ILLEGAL_ARGUMENT, "unsupportedTag");
@@ -373,8 +375,9 @@
ndk::ScopedAStatus EffectBundleAidl::getParameterVirtualizer(const Virtualizer::Id& id,
Parameter::Specific* specific) {
- RETURN_IF(id.getTag() != Virtualizer::Id::commonTag, EX_ILLEGAL_ARGUMENT,
- "VirtualizerTagNotSupported");
+ RETURN_IF((id.getTag() != Virtualizer::Id::commonTag) &&
+ (id.getTag() != Virtualizer::Id::speakerAnglesPayload),
+ EX_ILLEGAL_ARGUMENT, "VirtualizerTagNotSupported");
RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
Virtualizer vrParam;
diff --git a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.h b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.h
index 0330e5a..ec1abe8 100644
--- a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.h
+++ b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.h
@@ -23,7 +23,6 @@
#include <android-base/logging.h>
#include "effect-impl/EffectImpl.h"
-#include "effect-impl/EffectUUID.h"
#include "BundleContext.h"
#include "BundleTypes.h"
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp b/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp
index e9bdf94..73141b6 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp
@@ -31,17 +31,19 @@
using aidl::android::hardware::audio::effect::Descriptor;
using aidl::android::hardware::audio::effect::EffectReverb;
+using aidl::android::hardware::audio::effect::getEffectImplUuidAuxEnvReverb;
+using aidl::android::hardware::audio::effect::getEffectImplUuidAuxPresetReverb;
+using aidl::android::hardware::audio::effect::getEffectImplUuidInsertEnvReverb;
+using aidl::android::hardware::audio::effect::getEffectImplUuidInsertPresetReverb;
using aidl::android::hardware::audio::effect::IEffect;
-using aidl::android::hardware::audio::effect::kAuxEnvReverbImplUUID;
-using aidl::android::hardware::audio::effect::kAuxPresetReverbImplUUID;
-using aidl::android::hardware::audio::effect::kInsertEnvReverbImplUUID;
-using aidl::android::hardware::audio::effect::kInsertPresetReverbImplUUID;
using aidl::android::hardware::audio::effect::State;
using aidl::android::media::audio::common::AudioUuid;
bool isReverbUuidSupported(const AudioUuid* uuid) {
- return (*uuid == kAuxEnvReverbImplUUID || *uuid == kInsertEnvReverbImplUUID ||
- *uuid == kAuxPresetReverbImplUUID || *uuid == kInsertPresetReverbImplUUID);
+ return (*uuid == getEffectImplUuidAuxEnvReverb() ||
+ *uuid == getEffectImplUuidAuxPresetReverb() ||
+ *uuid == getEffectImplUuidInsertEnvReverb() ||
+ *uuid == getEffectImplUuidInsertPresetReverb());
}
extern "C" binder_exception_t createEffect(const AudioUuid* uuid,
@@ -61,19 +63,18 @@
}
extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) {
- if (!in_impl_uuid || !isReverbUuidSupported(in_impl_uuid)) {
+ if (*in_impl_uuid == getEffectImplUuidAuxEnvReverb()) {
+ *_aidl_return = aidl::android::hardware::audio::effect::lvm::kAuxEnvReverbDesc;
+ } else if (*in_impl_uuid == getEffectImplUuidInsertEnvReverb()) {
+ *_aidl_return = aidl::android::hardware::audio::effect::lvm::kInsertEnvReverbDesc;
+ } else if (*in_impl_uuid == getEffectImplUuidAuxPresetReverb()) {
+ *_aidl_return = aidl::android::hardware::audio::effect::lvm::kAuxPresetReverbDesc;
+ } else if (*in_impl_uuid == getEffectImplUuidInsertPresetReverb()) {
+ *_aidl_return = aidl::android::hardware::audio::effect::lvm::kInsertPresetReverbDesc;
+ } else {
LOG(ERROR) << __func__ << "uuid not supported";
return EX_ILLEGAL_ARGUMENT;
}
- if (*in_impl_uuid == kAuxEnvReverbImplUUID) {
- *_aidl_return = aidl::android::hardware::audio::effect::lvm::kAuxEnvReverbDesc;
- } else if (*in_impl_uuid == kInsertEnvReverbImplUUID) {
- *_aidl_return = aidl::android::hardware::audio::effect::lvm::kInsertEnvReverbDesc;
- } else if (*in_impl_uuid == kAuxPresetReverbImplUUID) {
- *_aidl_return = aidl::android::hardware::audio::effect::lvm::kAuxPresetReverbDesc;
- } else if (*in_impl_uuid == kInsertPresetReverbImplUUID) {
- *_aidl_return = aidl::android::hardware::audio::effect::lvm::kInsertPresetReverbDesc;
- }
return EX_NONE;
}
@@ -81,19 +82,19 @@
EffectReverb::EffectReverb(const AudioUuid& uuid) {
LOG(DEBUG) << __func__ << uuid.toString();
- if (uuid == kAuxEnvReverbImplUUID) {
+ if (uuid == getEffectImplUuidAuxEnvReverb()) {
mType = lvm::ReverbEffectType::AUX_ENV;
mDescriptor = &lvm::kAuxEnvReverbDesc;
mEffectName = &lvm::kAuxEnvReverbEffectName;
- } else if (uuid == kInsertEnvReverbImplUUID) {
+ } else if (uuid == getEffectImplUuidInsertEnvReverb()) {
mType = lvm::ReverbEffectType::INSERT_ENV;
mDescriptor = &lvm::kInsertEnvReverbDesc;
mEffectName = &lvm::kInsertEnvReverbEffectName;
- } else if (uuid == kAuxPresetReverbImplUUID) {
+ } else if (uuid == getEffectImplUuidAuxPresetReverb()) {
mType = lvm::ReverbEffectType::AUX_PRESET;
mDescriptor = &lvm::kAuxPresetReverbDesc;
mEffectName = &lvm::kAuxPresetReverbEffectName;
- } else if (uuid == kInsertPresetReverbImplUUID) {
+ } else if (uuid == getEffectImplUuidInsertPresetReverb()) {
mType = lvm::ReverbEffectType::INSERT_PRESET;
mDescriptor = &lvm::kInsertPresetReverbDesc;
mEffectName = &lvm::kInsertPresetReverbEffectName;
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
index 87aa12b..79e67f2 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
@@ -15,7 +15,9 @@
*/
#include <cstddef>
+
#define LOG_TAG "ReverbContext"
+#include <android-base/logging.h>
#include <Utils.h>
#include "ReverbContext.h"
@@ -301,7 +303,7 @@
/* General parameters */
params.OperatingMode = LVM_MODE_ON;
params.SampleRate = LVM_FS_44100;
- params.SourceFormat = (::android::hardware::audio::common::getChannelCount(
+ params.SourceFormat = (::aidl::android::hardware::audio::common::getChannelCount(
mCommon.input.base.channelMask) == 1
? LVM_MONO
: LVM_STEREO);
@@ -363,10 +365,10 @@
LOG(DEBUG) << __func__ << " start processing";
std::lock_guard lg(mMutex);
- int channels =
- ::android::hardware::audio::common::getChannelCount(mCommon.input.base.channelMask);
- int outChannels =
- ::android::hardware::audio::common::getChannelCount(mCommon.output.base.channelMask);
+ int channels = ::aidl::android::hardware::audio::common::getChannelCount(
+ mCommon.input.base.channelMask);
+ int outChannels = ::aidl::android::hardware::audio::common::getChannelCount(
+ mCommon.output.base.channelMask);
int frameCount = mCommon.input.frameCount;
// Reverb only effects the stereo channels in multichannel source.
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbTypes.h b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbTypes.h
index 8dcda87..37f9287 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbTypes.h
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbTypes.h
@@ -20,7 +20,8 @@
#include <android/binder_enums.h>
#include <audio_effects/effect_environmentalreverb.h>
#include <audio_effects/effect_presetreverb.h>
-#include "effect-impl/EffectUUID.h"
+#include <system/audio_effects/effect_uuid.h>
+
#include "effect-impl/EffectTypes.h"
// from Reverb/lib
#include "LVREV.h"
@@ -50,29 +51,31 @@
// NXP SW auxiliary environmental reverb
static const std::string kAuxEnvReverbEffectName = "Auxiliary Environmental Reverb";
-static const Descriptor kAuxEnvReverbDesc = {.common = {.id = {.type = kEnvReverbTypeUUID,
- .uuid = kAuxEnvReverbImplUUID,
- .proxy = std::nullopt},
- .flags = {.type = Flags::Type::AUXILIARY},
- .cpuLoad = kCpuLoadARM9E,
- .memoryUsage = kMemUsage,
- .name = kAuxEnvReverbEffectName,
- .implementor = "NXP Software Ltd."},
- .capability = kEnvReverbCap};
+static const Descriptor kAuxEnvReverbDesc = {
+ .common = {.id = {.type = getEffectTypeUuidEnvReverb(),
+ .uuid = getEffectImplUuidAuxEnvReverb(),
+ .proxy = std::nullopt},
+ .flags = {.type = Flags::Type::AUXILIARY},
+ .cpuLoad = kCpuLoadARM9E,
+ .memoryUsage = kMemUsage,
+ .name = kAuxEnvReverbEffectName,
+ .implementor = "NXP Software Ltd."},
+ .capability = kEnvReverbCap};
// NXP SW insert environmental reverb
static const std::string kInsertEnvReverbEffectName = "Insert Environmental Reverb";
-static const Descriptor kInsertEnvReverbDesc = {.common = {.id = {.type = kEnvReverbTypeUUID,
- .uuid = kInsertEnvReverbImplUUID,
- .proxy = std::nullopt},
- .flags = {.type = Flags::Type::INSERT,
- .insert = Flags::Insert::FIRST,
- .volume = Flags::Volume::CTRL},
- .cpuLoad = kCpuLoadARM9E,
- .memoryUsage = kMemUsage,
- .name = kInsertEnvReverbEffectName,
- .implementor = "NXP Software Ltd."},
- .capability = kEnvReverbCap};
+static const Descriptor kInsertEnvReverbDesc = {
+ .common = {.id = {.type = getEffectTypeUuidEnvReverb(),
+ .uuid = getEffectImplUuidInsertEnvReverb(),
+ .proxy = std::nullopt},
+ .flags = {.type = Flags::Type::INSERT,
+ .insert = Flags::Insert::FIRST,
+ .volume = Flags::Volume::CTRL},
+ .cpuLoad = kCpuLoadARM9E,
+ .memoryUsage = kMemUsage,
+ .name = kInsertEnvReverbEffectName,
+ .implementor = "NXP Software Ltd."},
+ .capability = kEnvReverbCap};
static const std::vector<PresetReverb::Presets> kSupportedPresets{
ndk::enum_range<PresetReverb::Presets>().begin(),
@@ -85,8 +88,8 @@
// NXP SW auxiliary preset reverb
static const std::string kAuxPresetReverbEffectName = "Auxiliary Preset Reverb";
static const Descriptor kAuxPresetReverbDesc = {
- .common = {.id = {.type = kPresetReverbTypeUUID,
- .uuid = kAuxPresetReverbImplUUID,
+ .common = {.id = {.type = getEffectTypeUuidPresetReverb(),
+ .uuid = getEffectImplUuidAuxPresetReverb(),
.proxy = std::nullopt},
.flags = {.type = Flags::Type::AUXILIARY},
.cpuLoad = kCpuLoadARM9E,
@@ -98,8 +101,8 @@
// NXP SW insert preset reverb
static const std::string kInsertPresetReverbEffectName = "Insert Preset Reverb";
static const Descriptor kInsertPresetReverbDesc = {
- .common = {.id = {.type = kPresetReverbTypeUUID,
- .uuid = kInsertPresetReverbImplUUID,
+ .common = {.id = {.type = getEffectTypeUuidPresetReverb(),
+ .uuid = getEffectImplUuidInsertPresetReverb(),
.proxy = std::nullopt},
.flags = {.type = Flags::Type::INSERT,
.insert = Flags::Insert::FIRST,
diff --git a/media/libeffects/preprocessing/aidl/EffectPreProcessing.cpp b/media/libeffects/preprocessing/aidl/EffectPreProcessing.cpp
index b9df915..e8ae8b3 100644
--- a/media/libeffects/preprocessing/aidl/EffectPreProcessing.cpp
+++ b/media/libeffects/preprocessing/aidl/EffectPreProcessing.cpp
@@ -24,19 +24,22 @@
#include "EffectPreProcessing.h"
+using aidl::android::hardware::audio::effect::getEffectImplUuidAcousticEchoCancelerSw;
+using aidl::android::hardware::audio::effect::getEffectImplUuidAutomaticGainControlV1Sw;
+using aidl::android::hardware::audio::effect::getEffectImplUuidAutomaticGainControlV2Sw;
+using aidl::android::hardware::audio::effect::getEffectImplUuidNoiseSuppressionSw;
+
using aidl::android::hardware::audio::effect::Descriptor;
using aidl::android::hardware::audio::effect::EffectPreProcessing;
using aidl::android::hardware::audio::effect::IEffect;
-using aidl::android::hardware::audio::effect::kAcousticEchoCancelerSwImplUUID;
-using aidl::android::hardware::audio::effect::kAutomaticGainControlV1SwImplUUID;
-using aidl::android::hardware::audio::effect::kAutomaticGainControlV2SwImplUUID;
-using aidl::android::hardware::audio::effect::kNoiseSuppressionSwImplUUID;
using aidl::android::hardware::audio::effect::State;
using aidl::android::media::audio::common::AudioUuid;
bool isPreProcessingUuidSupported(const AudioUuid& uuid) {
- return (uuid == kAcousticEchoCancelerSwImplUUID || uuid == kAutomaticGainControlV1SwImplUUID ||
- uuid == kAutomaticGainControlV2SwImplUUID || uuid == kNoiseSuppressionSwImplUUID);
+ return uuid == getEffectImplUuidAcousticEchoCancelerSw() ||
+ uuid == getEffectImplUuidAutomaticGainControlV1Sw() ||
+ uuid == getEffectImplUuidAutomaticGainControlV2Sw() ||
+ uuid == getEffectImplUuidNoiseSuppressionSw();
}
extern "C" binder_exception_t createEffect(const AudioUuid* uuid,
@@ -60,13 +63,13 @@
LOG(ERROR) << __func__ << "uuid not supported";
return EX_ILLEGAL_ARGUMENT;
}
- if (*in_impl_uuid == kAcousticEchoCancelerSwImplUUID) {
+ if (*in_impl_uuid == getEffectImplUuidAcousticEchoCancelerSw()) {
*_aidl_return = aidl::android::hardware::audio::effect::kAcousticEchoCancelerDesc;
- } else if (*in_impl_uuid == kAutomaticGainControlV1SwImplUUID) {
+ } else if (*in_impl_uuid == getEffectImplUuidAutomaticGainControlV1Sw()) {
*_aidl_return = aidl::android::hardware::audio::effect::kAutomaticGainControlV1Desc;
- } else if (*in_impl_uuid == kAutomaticGainControlV2SwImplUUID) {
+ } else if (*in_impl_uuid == getEffectImplUuidAutomaticGainControlV2Sw()) {
*_aidl_return = aidl::android::hardware::audio::effect::kAutomaticGainControlV2Desc;
- } else if (*in_impl_uuid == kNoiseSuppressionSwImplUUID) {
+ } else if (*in_impl_uuid == getEffectImplUuidNoiseSuppressionSw()) {
*_aidl_return = aidl::android::hardware::audio::effect::kNoiseSuppressionDesc;
}
return EX_NONE;
@@ -76,19 +79,19 @@
EffectPreProcessing::EffectPreProcessing(const AudioUuid& uuid) {
LOG(DEBUG) << __func__ << uuid.toString();
- if (uuid == kAcousticEchoCancelerSwImplUUID) {
+ if (uuid == getEffectImplUuidAcousticEchoCancelerSw()) {
mType = PreProcessingEffectType::ACOUSTIC_ECHO_CANCELLATION;
mDescriptor = &kAcousticEchoCancelerDesc;
mEffectName = &kAcousticEchoCancelerEffectName;
- } else if (uuid == kAutomaticGainControlV1SwImplUUID) {
+ } else if (uuid == getEffectImplUuidAutomaticGainControlV1Sw()) {
mType = PreProcessingEffectType::AUTOMATIC_GAIN_CONTROL_V1;
mDescriptor = &kAutomaticGainControlV1Desc;
mEffectName = &kAutomaticGainControlV1EffectName;
- } else if (uuid == kAutomaticGainControlV2SwImplUUID) {
+ } else if (uuid == getEffectImplUuidAutomaticGainControlV2Sw()) {
mType = PreProcessingEffectType::AUTOMATIC_GAIN_CONTROL_V2;
mDescriptor = &kAutomaticGainControlV2Desc;
mEffectName = &kAutomaticGainControlV2EffectName;
- } else if (uuid == kNoiseSuppressionSwImplUUID) {
+ } else if (uuid == getEffectImplUuidNoiseSuppressionSw()) {
mType = PreProcessingEffectType::NOISE_SUPPRESSION;
mDescriptor = &kNoiseSuppressionDesc;
mEffectName = &kNoiseSuppressionEffectName;
diff --git a/media/libeffects/preprocessing/aidl/PreProcessingContext.cpp b/media/libeffects/preprocessing/aidl/PreProcessingContext.cpp
index 104277e..c1e4eda 100644
--- a/media/libeffects/preprocessing/aidl/PreProcessingContext.cpp
+++ b/media/libeffects/preprocessing/aidl/PreProcessingContext.cpp
@@ -148,11 +148,11 @@
void PreProcessingContext::updateConfigs(const Parameter::Common& common) {
mInputConfig.set_sample_rate_hz(common.input.base.sampleRate);
- mInputConfig.set_num_channels(
- ::android::hardware::audio::common::getChannelCount(common.input.base.channelMask));
+ mInputConfig.set_num_channels(::aidl::android::hardware::audio::common::getChannelCount(
+ common.input.base.channelMask));
mOutputConfig.set_sample_rate_hz(common.input.base.sampleRate);
- mOutputConfig.set_num_channels(
- ::android::hardware::audio::common::getChannelCount(common.output.base.channelMask));
+ mOutputConfig.set_num_channels(::aidl::android::hardware::audio::common::getChannelCount(
+ common.output.base.channelMask));
}
RetCode PreProcessingContext::setAcousticEchoCancelerEchoDelay(int echoDelayUs) {
diff --git a/media/libeffects/preprocessing/aidl/PreProcessingTypes.h b/media/libeffects/preprocessing/aidl/PreProcessingTypes.h
index 2c880d4..4c2b8ba 100644
--- a/media/libeffects/preprocessing/aidl/PreProcessingTypes.h
+++ b/media/libeffects/preprocessing/aidl/PreProcessingTypes.h
@@ -16,15 +16,17 @@
#pragma once
+#include <optional>
+
#include <aidl/android/hardware/audio/effect/BnEffect.h>
#include <audio_effects/effect_aec.h>
#include <audio_effects/effect_agc.h>
#include <audio_effects/effect_agc2.h>
#include <audio_effects/effect_ns.h>
+#include <system/audio_effects/effect_uuid.h>
#include "effect-impl/EffectTypes.h"
-#include "effect-impl/EffectUUID.h"
namespace aidl::android::hardware::audio::effect {
@@ -34,9 +36,9 @@
MAKE_RANGE(AcousticEchoCanceler, AcousticEchoCanceler::echoDelayUs, 0, 500)};
static const Capability kAcousticEchoCancelerCap = {.range = kAcousticEchoCancelerRanges};
static const Descriptor kAcousticEchoCancelerDesc = {
- .common = {.id = {.type = kAcousticEchoCancelerTypeUUID,
- .uuid = kAcousticEchoCancelerSwImplUUID,
- .proxy = kEffectNullUuid},
+ .common = {.id = {.type = getEffectTypeUuidAcousticEchoCanceler(),
+ .uuid = getEffectImplUuidAcousticEchoCancelerSw(),
+ .proxy = std::nullopt},
.flags = {.type = Flags::Type::PRE_PROC, .deviceIndication = true},
.name = kAcousticEchoCancelerEffectName,
.implementor = "The Android Open Source Project"},
@@ -49,9 +51,9 @@
MAKE_RANGE(AutomaticGainControlV1, AutomaticGainControlV1::maxCompressionGainDb, 0, 9000)};
static const Capability kAutomaticGainControlV1Cap = {.range = kAutomaticGainControlV1Ranges};
static const Descriptor kAutomaticGainControlV1Desc = {
- .common = {.id = {.type = kAutomaticGainControlV1TypeUUID,
- .uuid = kAutomaticGainControlV1SwImplUUID,
- .proxy = kEffectNullUuid},
+ .common = {.id = {.type = getEffectTypeUuidAutomaticGainControlV1(),
+ .uuid = getEffectImplUuidAutomaticGainControlV1Sw(),
+ .proxy = std::nullopt},
.flags = {.type = Flags::Type::PRE_PROC, .deviceIndication = true},
.name = kAutomaticGainControlV1EffectName,
.implementor = "The Android Open Source Project"},
@@ -69,9 +71,9 @@
AutomaticGainControlV2::LevelEstimator::RMS)};
static const Capability kAutomaticGainControlV2Cap = {.range = kAutomaticGainControlV2Ranges};
static const Descriptor kAutomaticGainControlV2Desc = {
- .common = {.id = {.type = kAutomaticGainControlV2TypeUUID,
- .uuid = kAutomaticGainControlV2SwImplUUID,
- .proxy = kEffectNullUuid},
+ .common = {.id = {.type = getEffectTypeUuidAutomaticGainControlV2(),
+ .uuid = getEffectImplUuidAutomaticGainControlV2Sw(),
+ .proxy = std::nullopt},
.flags = {.type = Flags::Type::PRE_PROC, .deviceIndication = true},
.name = kAutomaticGainControlV2EffectName,
.implementor = "The Android Open Source Project"},
@@ -80,9 +82,9 @@
// Noise suppression
static const std::string kNoiseSuppressionEffectName = "Noise Suppression";
static const Descriptor kNoiseSuppressionDesc = {
- .common = {.id = {.type = kNoiseSuppressionTypeUUID,
- .uuid = kNoiseSuppressionSwImplUUID,
- .proxy = kEffectNullUuid},
+ .common = {.id = {.type = getEffectTypeUuidNoiseSuppression(),
+ .uuid = getEffectImplUuidNoiseSuppressionSw(),
+ .proxy = std::nullopt},
.flags = {.type = Flags::Type::PRE_PROC, .deviceIndication = true},
.name = kNoiseSuppressionEffectName,
.implementor = "The Android Open Source Project"}};
diff --git a/media/libeffects/visualizer/aidl/Visualizer.cpp b/media/libeffects/visualizer/aidl/Visualizer.cpp
index 6e7833c..53bfb41 100644
--- a/media/libeffects/visualizer/aidl/Visualizer.cpp
+++ b/media/libeffects/visualizer/aidl/Visualizer.cpp
@@ -17,18 +17,21 @@
#define LOG_TAG "AHAL_VisualizerLibEffects"
#include <android-base/logging.h>
+#include <system/audio_effects/effect_uuid.h>
+
#include "Visualizer.h"
using aidl::android::hardware::audio::effect::Descriptor;
+using aidl::android::hardware::audio::effect::getEffectImplUuidVisualizer;
+using aidl::android::hardware::audio::effect::getEffectTypeUuidVisualizer;
using aidl::android::hardware::audio::effect::IEffect;
-using aidl::android::hardware::audio::effect::VisualizerImpl;
-using aidl::android::hardware::audio::effect::kVisualizerImplUUID;
using aidl::android::hardware::audio::effect::State;
+using aidl::android::hardware::audio::effect::VisualizerImpl;
using aidl::android::media::audio::common::AudioUuid;
extern "C" binder_exception_t createEffect(const AudioUuid* in_impl_uuid,
std::shared_ptr<IEffect>* instanceSpp) {
- if (!in_impl_uuid || *in_impl_uuid != kVisualizerImplUUID) {
+ if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidVisualizer()) {
LOG(ERROR) << __func__ << "uuid not supported";
return EX_ILLEGAL_ARGUMENT;
}
@@ -43,7 +46,7 @@
}
extern "C" binder_exception_t queryEffect(const AudioUuid* in_impl_uuid, Descriptor* _aidl_return) {
- if (!in_impl_uuid || *in_impl_uuid != kVisualizerImplUUID) {
+ if (!in_impl_uuid || *in_impl_uuid != getEffectImplUuidVisualizer()) {
LOG(ERROR) << __func__ << "uuid not supported";
return EX_ILLEGAL_ARGUMENT;
}
@@ -65,8 +68,8 @@
const Capability VisualizerImpl::kCapability = {
.range = Range::make<Range::visualizer>(VisualizerImpl::kRanges)};
const Descriptor VisualizerImpl::kDescriptor = {
- .common = {.id = {.type = kVisualizerTypeUUID,
- .uuid = kVisualizerImplUUID,
+ .common = {.id = {.type = getEffectTypeUuidVisualizer(),
+ .uuid = getEffectImplUuidVisualizer(),
.proxy = std::nullopt},
.flags = {.type = Flags::Type::INSERT,
.insert = Flags::Insert::LAST,
diff --git a/media/libeffects/visualizer/aidl/Visualizer.h b/media/libeffects/visualizer/aidl/Visualizer.h
index f6e1d6d..ec725db 100644
--- a/media/libeffects/visualizer/aidl/Visualizer.h
+++ b/media/libeffects/visualizer/aidl/Visualizer.h
@@ -19,7 +19,6 @@
#include <aidl/android/hardware/audio/effect/BnEffect.h>
#include "effect-impl/EffectImpl.h"
-#include "effect-impl/EffectUUID.h"
#include "VisualizerContext.h"
diff --git a/media/libeffects/visualizer/aidl/VisualizerContext.cpp b/media/libeffects/visualizer/aidl/VisualizerContext.cpp
index 4405407..5d0d08d 100644
--- a/media/libeffects/visualizer/aidl/VisualizerContext.cpp
+++ b/media/libeffects/visualizer/aidl/VisualizerContext.cpp
@@ -17,18 +17,19 @@
#include "VisualizerContext.h"
#include <algorithm>
+#include <math.h>
+#include <time.h>
+
#include <android/binder_status.h>
#include <audio_utils/primitives.h>
-#include <math.h>
#include <system/audio.h>
-#include <time.h>
#include <Utils.h>
#ifndef BUILD_FLOAT
#error AIDL Visualizer only support float 32bits, make sure add cflags -DBUILD_FLOAT,
#endif
-using android::hardware::audio::common::getChannelCount;
+using aidl::android::hardware::audio::common::getChannelCount;
namespace aidl::android::hardware::audio::effect {
@@ -191,9 +192,15 @@
std::vector<uint8_t> VisualizerContext::capture() {
std::vector<uint8_t> result;
std::lock_guard lg(mMutex);
- RETURN_VALUE_IF(mState != State::ACTIVE, result, "illegalState");
- const uint32_t deltaMs = getDeltaTimeMsFromUpdatedTime_l();
+ // cts android.media.audio.cts.VisualizerTest expecting silence data when effect not running
+ // RETURN_VALUE_IF(mState != State::ACTIVE, result, "illegalState");
+ if (mState != State::ACTIVE) {
+ result.resize(mCaptureSamples);
+ memset(result.data(), 0x80, mCaptureSamples);
+ return result;
+ }
+ const uint32_t deltaMs = getDeltaTimeMsFromUpdatedTime_l();
// if audio framework has stopped playing audio although the effect is still active we must
// clear the capture buffer to return silence
if ((mLastCaptureIdx == mCaptureIdx) && (mBufferUpdateTime.tv_sec != 0) &&
diff --git a/media/libeffects/visualizer/aidl/VisualizerContext.h b/media/libeffects/visualizer/aidl/VisualizerContext.h
index 3cb711e..958035f 100644
--- a/media/libeffects/visualizer/aidl/VisualizerContext.h
+++ b/media/libeffects/visualizer/aidl/VisualizerContext.h
@@ -83,7 +83,7 @@
uint32_t mLastCaptureIdx GUARDED_BY(mMutex) = 0;
Visualizer::ScalingMode mScalingMode GUARDED_BY(mMutex) = Visualizer::ScalingMode::NORMALIZED;
struct timespec mBufferUpdateTime GUARDED_BY(mMutex);
- // capture buf with 8 bits PCM
+ // capture buf with 8 bits mono PCM samples
std::array<uint8_t, kMaxCaptureBufSize> mCaptureBuf GUARDED_BY(mMutex);
uint32_t mDownstreamLatency GUARDED_BY(mMutex) = 0;
uint32_t mCaptureSamples GUARDED_BY(mMutex) = kMaxCaptureBufSize;
diff --git a/media/libheadtracking/Android.bp b/media/libheadtracking/Android.bp
index 9636949..9955862 100644
--- a/media/libheadtracking/Android.bp
+++ b/media/libheadtracking/Android.bp
@@ -16,6 +16,7 @@
"Pose.cpp",
"PoseBias.cpp",
"PoseDriftCompensator.cpp",
+ "PosePredictor.cpp",
"PoseRateLimiter.cpp",
"QuaternionUtil.cpp",
"ScreenHeadFusion.cpp",
@@ -39,6 +40,12 @@
cflags: [
"-Wthread-safety",
],
+ product_variables: {
+ debuggable: {
+ // enable experiments only in userdebug and eng builds
+ cflags: ["-DENABLE_VERIFICATION"],
+ },
+ },
}
cc_library {
@@ -80,6 +87,7 @@
"Pose-test.cpp",
"PoseBias-test.cpp",
"PoseDriftCompensator-test.cpp",
+ "PosePredictor.cpp",
"PoseRateLimiter-test.cpp",
"QuaternionUtil-test.cpp",
"ScreenHeadFusion-test.cpp",
diff --git a/media/libheadtracking/HeadTrackingProcessor-test.cpp b/media/libheadtracking/HeadTrackingProcessor-test.cpp
index b9dd0b8..5190f52 100644
--- a/media/libheadtracking/HeadTrackingProcessor-test.cpp
+++ b/media/libheadtracking/HeadTrackingProcessor-test.cpp
@@ -82,6 +82,8 @@
std::unique_ptr<HeadTrackingProcessor> processor = createHeadTrackingProcessor(
Options{.predictionDuration = 2.f}, HeadTrackingMode::WORLD_RELATIVE);
+ processor->setPosePredictorType(PosePredictorType::TWIST);
+
// Establish a baseline for the drift compensators.
processor->setWorldToHeadPose(0, Pose3f(), Twist3f());
processor->setWorldToScreenPose(0, Pose3f());
diff --git a/media/libheadtracking/HeadTrackingProcessor.cpp b/media/libheadtracking/HeadTrackingProcessor.cpp
index 9db4afa..54d08d2 100644
--- a/media/libheadtracking/HeadTrackingProcessor.cpp
+++ b/media/libheadtracking/HeadTrackingProcessor.cpp
@@ -22,6 +22,7 @@
#include "ModeSelector.h"
#include "PoseBias.h"
+#include "PosePredictor.h"
#include "ScreenHeadFusion.h"
#include "StillnessDetector.h"
@@ -59,8 +60,8 @@
void setWorldToHeadPose(int64_t timestamp, const Pose3f& worldToHead,
const Twist3f& headTwist) override {
- Pose3f predictedWorldToHead =
- worldToHead * integrate(headTwist, mOptions.predictionDuration);
+ const Pose3f predictedWorldToHead = mPosePredictor.predict(
+ timestamp, worldToHead, headTwist, mOptions.predictionDuration);
mHeadPoseBias.setInput(predictedWorldToHead);
mHeadStillnessDetector.setInput(timestamp, predictedWorldToHead);
mWorldToHeadTimestamp = timestamp;
@@ -161,6 +162,10 @@
}
}
+ void setPosePredictorType(PosePredictorType type) override {
+ mPosePredictor.setPosePredictorType(type);
+ }
+
std::string toString_l(unsigned level) const override {
std::string prefixSpace(level, ' ');
std::string ss = prefixSpace + "HeadTrackingProcessor:\n";
@@ -186,6 +191,7 @@
prefixSpace.c_str(), mOptions.screenStillnessRotationalThreshold);
ss += mModeSelector.toString(level + 1);
ss += mRateLimiter.toString(level + 1);
+ ss += mPosePredictor.toString(level + 1);
ss.append(prefixSpace + "ReCenterHistory:\n");
ss += mLocalLog.dumpToString((prefixSpace + " ").c_str(), mMaxLocalLogLine);
return ss;
@@ -207,6 +213,7 @@
ScreenHeadFusion mScreenHeadFusion;
ModeSelector mModeSelector;
PoseRateLimiter mRateLimiter;
+ PosePredictor mPosePredictor;
static constexpr std::size_t mMaxLocalLogLine = 10;
SimpleLog mLocalLog{mMaxLocalLogLine};
};
@@ -230,5 +237,26 @@
return "EnumNotImplemented";
};
+std::string toString(PosePredictorType posePredictorType) {
+ switch (posePredictorType) {
+ case PosePredictorType::AUTO: return "AUTO";
+ case PosePredictorType::LAST: return "LAST";
+ case PosePredictorType::TWIST: return "TWIST";
+ case PosePredictorType::LEAST_SQUARES: return "LEAST_SQUARES";
+ }
+ return "UNKNOWN" + std::to_string((int)posePredictorType);
+}
+
+bool isValidPosePredictorType(PosePredictorType posePredictorType) {
+ switch (posePredictorType) {
+ case PosePredictorType::AUTO:
+ case PosePredictorType::LAST:
+ case PosePredictorType::TWIST:
+ case PosePredictorType::LEAST_SQUARES:
+ return true;
+ }
+ return false;
+}
+
} // namespace media
} // namespace android
diff --git a/media/libheadtracking/ModeSelector.cpp b/media/libheadtracking/ModeSelector.cpp
index 6277090..7ee21b3 100644
--- a/media/libheadtracking/ModeSelector.cpp
+++ b/media/libheadtracking/ModeSelector.cpp
@@ -117,10 +117,12 @@
std::string ModeSelector::toString(unsigned level) const {
std::string prefixSpace(level, ' ');
std::string ss(prefixSpace);
- StringAppendF(&ss, "ModeSelector: ScreenToStage %s\n",
- mScreenToStage.toString().c_str());
- ss.append(prefixSpace + "Mode downgrade history:\n");
- ss += mLocalLog.dumpToString((prefixSpace + " ").c_str(), sMaxLocalLogLine);
+ ss.append("ModeSelector: ScreenToStage ")
+ .append(mScreenToStage.toString())
+ .append("\n")
+ .append(prefixSpace)
+ .append("Mode change history:\n")
+ .append(mLocalLog.dumpToString((prefixSpace + " ").c_str(), sMaxLocalLogLine));
return ss;
}
diff --git a/media/libheadtracking/PosePredictor.cpp b/media/libheadtracking/PosePredictor.cpp
new file mode 100644
index 0000000..5209d54
--- /dev/null
+++ b/media/libheadtracking/PosePredictor.cpp
@@ -0,0 +1,246 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "PosePredictor.h"
+
+namespace android::media {
+
+namespace {
+#ifdef ENABLE_VERIFICATION
+constexpr bool kEnableVerification = true;
+constexpr std::array<int, 3> kLookAheadMs{ 50, 100, 200 };
+#else
+constexpr bool kEnableVerification = false;
+constexpr std::array<int, 0> kLookAheadMs{};
+#endif
+
+} // namespace
+
+void LeastSquaresPredictor::add(int64_t atNs, const Pose3f& pose, const Twist3f& twist)
+{
+ (void)twist;
+ mLastAtNs = atNs;
+ mLastPose = pose;
+ const auto q = pose.rotation();
+ const double datNs = static_cast<double>(atNs);
+ mRw.add({datNs, q.w()});
+ mRx.add({datNs, q.x()});
+ mRy.add({datNs, q.y()});
+ mRz.add({datNs, q.z()});
+}
+
+Pose3f LeastSquaresPredictor::predict(int64_t atNs) const
+{
+ if (mRw.getN() < kMinimumSamplesForPrediction) return mLastPose;
+
+ /*
+ * Using parametric form, we have q(t) = { w(t), x(t), y(t), z(t) }.
+ * We compute the least squares prediction of w, x, y, z.
+ */
+ const double dLookahead = static_cast<double>(atNs);
+ Eigen::Quaternionf lsq(
+ mRw.getYFromX(dLookahead),
+ mRx.getYFromX(dLookahead),
+ mRy.getYFromX(dLookahead),
+ mRz.getYFromX(dLookahead));
+
+ /*
+ * We cheat here, since the result lsq is the least squares prediction
+ * in H (arbitrary quaternion), not the least squares prediction in
+ * SO(3) (unit quaternion).
+ *
+ * In other words, the result for lsq is most likely not a unit quaternion.
+ * To solve this, we normalize, thereby selecting the closest unit quaternion
+ * in SO(3) to the prediction in H.
+ */
+ lsq.normalize();
+ return Pose3f(lsq);
+}
+
+void LeastSquaresPredictor::reset() {
+ mLastAtNs = {};
+ mLastPose = {};
+ mRw.reset();
+ mRx.reset();
+ mRy.reset();
+ mRz.reset();
+}
+
+std::string LeastSquaresPredictor::toString(size_t index) const {
+ std::string s(index, ' ');
+ s.append("LeastSquaresPredictor using alpha: ")
+ .append(std::to_string(mAlpha))
+ .append(" last pose: ")
+ .append(mLastPose.toString())
+ .append("\n");
+ return s;
+}
+
+// Formatting
+static inline std::vector<size_t> createDelimiterIdx(size_t predictors, size_t lookaheads) {
+ if (lookaheads == 0) return {};
+ --lookaheads;
+ std::vector<size_t> delimiterIdx(lookaheads);
+ for (size_t i = 0; i < lookaheads; ++i) {
+ delimiterIdx[i] = (i + 1) * predictors;
+ }
+ return delimiterIdx;
+}
+
+PosePredictor::PosePredictor()
+ : mPredictors{
+ // First predictors must match switch in getCurrentPredictor()
+ std::make_shared<LastPredictor>(),
+ std::make_shared<TwistPredictor>(),
+ std::make_shared<LeastSquaresPredictor>(),
+ // After this, can place additional predictors here for comparison such as
+ // std::make_shared<LeastSquaresPredictor>(0.25),
+ }
+ , mLookaheadMs(kLookAheadMs.begin(), kLookAheadMs.end())
+ , mVerifiers(std::size(mLookaheadMs) * std::size(mPredictors))
+ , mDelimiterIdx(createDelimiterIdx(std::size(mPredictors), std::size(mLookaheadMs)))
+ , mPredictionRecorder(
+ std::size(mVerifiers) /* vectorSize */, std::chrono::seconds(1), 10 /* maxLogLine */,
+ mDelimiterIdx)
+ , mPredictionDurableRecorder(
+ std::size(mVerifiers) /* vectorSize */, std::chrono::minutes(1), 10 /* maxLogLine */,
+ mDelimiterIdx)
+ {
+}
+
+Pose3f PosePredictor::predict(
+ int64_t timestampNs, const Pose3f& pose, const Twist3f& twist, float predictionDurationNs)
+{
+ if (timestampNs - mLastTimestampNs > kMaximumSampleIntervalBeforeResetNs) {
+ for (const auto& predictor : mPredictors) {
+ predictor->reset();
+ }
+ ++mResets;
+ }
+ mLastTimestampNs = timestampNs;
+
+ auto selectedPredictor = getCurrentPredictor();
+ if constexpr (kEnableVerification) {
+ // Update all Predictors
+ for (const auto& predictor : mPredictors) {
+ predictor->add(timestampNs, pose, twist);
+ }
+
+ // Update Verifiers and calculate errors
+ std::vector<float> error(std::size(mVerifiers));
+ for (size_t i = 0; i < mLookaheadMs.size(); ++i) {
+ constexpr float RADIAN_TO_DEGREES = 180 / M_PI;
+ const int64_t atNs =
+ timestampNs + mLookaheadMs[i] * PosePredictorVerifier::kMillisToNanos;
+
+ for (size_t j = 0; j < mPredictors.size(); ++j) {
+ const size_t idx = i * std::size(mPredictors) + j;
+ mVerifiers[idx].verifyActualPose(timestampNs, pose);
+ mVerifiers[idx].addPredictedPose(atNs, mPredictors[j]->predict(atNs));
+ error[idx] = RADIAN_TO_DEGREES * mVerifiers[idx].lastError();
+ }
+ }
+ // Record errors
+ mPredictionRecorder.record(error);
+ mPredictionDurableRecorder.record(error);
+ } else /* constexpr */ {
+ selectedPredictor->add(timestampNs, pose, twist);
+ }
+
+ // Deliver prediction
+ const int64_t predictionTimeNs = timestampNs + (int64_t)predictionDurationNs;
+ return selectedPredictor->predict(predictionTimeNs);
+}
+
+void PosePredictor::setPosePredictorType(PosePredictorType type) {
+ if (!isValidPosePredictorType(type)) return;
+ if (type == mSetType) return;
+ mSetType = type;
+ if (type == android::media::PosePredictorType::AUTO) {
+ type = android::media::PosePredictorType::LEAST_SQUARES;
+ }
+ if (type != mCurrentType) {
+ mCurrentType = type;
+ if constexpr (!kEnableVerification) {
+ // Verification keeps all predictors up-to-date.
+ // If we don't enable verification, we must reset the current predictor.
+ getCurrentPredictor()->reset();
+ }
+ }
+}
+
+std::string PosePredictor::toString(size_t index) const {
+ std::string prefixSpace(index, ' ');
+ std::string ss(prefixSpace);
+ ss.append("PosePredictor:\n")
+ .append(prefixSpace)
+ .append(" Current Prediction Type: ")
+ .append(android::media::toString(mCurrentType))
+ .append("\n")
+ .append(prefixSpace)
+ .append(" Resets: ")
+ .append(std::to_string(mResets))
+ .append("\n")
+ .append(getCurrentPredictor()->toString(index + 1));
+ if constexpr (kEnableVerification) {
+ // dump verification
+ ss.append(prefixSpace)
+ .append(" Prediction abs error (L1) degrees [ type (");
+ for (size_t i = 0; i < mPredictors.size(); ++i) {
+ if (i > 0) ss.append(" , ");
+ ss.append(mPredictors[i]->name());
+ }
+ ss.append(" ) x ( ");
+ for (size_t i = 0; i < mLookaheadMs.size(); ++i) {
+ if (i > 0) ss.append(" : ");
+ ss.append(std::to_string(mLookaheadMs[i]));
+ }
+ std::vector<float> cumulativeAverageErrors(std::size(mVerifiers));
+ for (size_t i = 0; i < cumulativeAverageErrors.size(); ++i) {
+ cumulativeAverageErrors[i] = mVerifiers[i].cumulativeAverageError();
+ }
+ ss.append(" ) ms ]\n")
+ .append(prefixSpace)
+ .append(" Cumulative Average Error:\n")
+ .append(prefixSpace)
+ .append(" ")
+ .append(VectorRecorder::toString(cumulativeAverageErrors, mDelimiterIdx, "%.3g"))
+ .append("\n")
+ .append(prefixSpace)
+ .append(" PerMinuteHistory:\n")
+ .append(mPredictionDurableRecorder.toString(index + 3))
+ .append(prefixSpace)
+ .append(" PerSecondHistory:\n")
+ .append(mPredictionRecorder.toString(index + 3));
+ }
+ return ss;
+}
+
+std::shared_ptr<PredictorBase> PosePredictor::getCurrentPredictor() const {
+ // we don't use a map here, we look up directly
+ switch (mCurrentType) {
+ default:
+ case android::media::PosePredictorType::LAST:
+ return mPredictors[0];
+ case android::media::PosePredictorType::TWIST:
+ return mPredictors[1];
+ case android::media::PosePredictorType::AUTO: // shouldn't occur here.
+ case android::media::PosePredictorType::LEAST_SQUARES:
+ return mPredictors[2];
+ }
+}
+
+} // namespace android::media
diff --git a/media/libheadtracking/PosePredictor.h b/media/libheadtracking/PosePredictor.h
new file mode 100644
index 0000000..53211e3
--- /dev/null
+++ b/media/libheadtracking/PosePredictor.h
@@ -0,0 +1,215 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "PosePredictorVerifier.h"
+#include <memory>
+#include <audio_utils/Statistics.h>
+#include <media/PosePredictorType.h>
+#include <media/Twist.h>
+#include <media/VectorRecorder.h>
+
+namespace android::media {
+
+// Interface for generic pose predictors
+class PredictorBase {
+public:
+ virtual ~PredictorBase() = default;
+ virtual void add(int64_t atNs, const Pose3f& pose, const Twist3f& twist) = 0;
+ virtual Pose3f predict(int64_t atNs) const = 0;
+ virtual void reset() = 0;
+ virtual std::string name() const = 0;
+ virtual std::string toString(size_t index) const = 0;
+};
+
+/**
+ * LastPredictor uses the last sample Pose for prediction
+ *
+ * This class is not thread-safe.
+ */
+class LastPredictor : public PredictorBase {
+public:
+ void add(int64_t atNs, const Pose3f& pose, const Twist3f& twist) override {
+ (void)atNs;
+ (void)twist;
+ mLastPose = pose;
+ }
+
+ Pose3f predict(int64_t atNs) const override {
+ (void)atNs;
+ return mLastPose;
+ }
+
+ void reset() override {
+ mLastPose = {};
+ }
+
+ std::string name() const override {
+ return "LAST";
+ }
+
+ std::string toString(size_t index) const override {
+ std::string s(index, ' ');
+ s.append("LastPredictor using last pose: ")
+ .append(mLastPose.toString())
+ .append("\n");
+ return s;
+ }
+
+private:
+ Pose3f mLastPose;
+};
+
+/**
+ * TwistPredictor uses the last sample Twist and Pose for prediction
+ *
+ * This class is not thread-safe.
+ */
+class TwistPredictor : public PredictorBase {
+public:
+ void add(int64_t atNs, const Pose3f& pose, const Twist3f& twist) override {
+ mLastAtNs = atNs;
+ mLastPose = pose;
+ mLastTwist = twist;
+ }
+
+ Pose3f predict(int64_t atNs) const override {
+ return mLastPose * integrate(mLastTwist, atNs - mLastAtNs);
+ }
+
+ void reset() override {
+ mLastAtNs = {};
+ mLastPose = {};
+ mLastTwist = {};
+ }
+
+ std::string name() const override {
+ return "TWIST";
+ }
+
+ std::string toString(size_t index) const override {
+ std::string s(index, ' ');
+ s.append("TwistPredictor using last pose: ")
+ .append(mLastPose.toString())
+ .append(" last twist: ")
+ .append(mLastTwist.toString())
+ .append("\n");
+ return s;
+ }
+
+private:
+ int64_t mLastAtNs{};
+ Pose3f mLastPose;
+ Twist3f mLastTwist;
+};
+
+
+/**
+ * LeastSquaresPredictor uses the Pose history for prediction.
+ *
+ * A exponential weighted least squares is used.
+ *
+ * This class is not thread-safe.
+ */
+class LeastSquaresPredictor : public PredictorBase {
+public:
+ // alpha is the exponential decay.
+ LeastSquaresPredictor(double alpha = kDefaultAlphaEstimator)
+ : mAlpha(alpha)
+ , mRw(alpha)
+ , mRx(alpha)
+ , mRy(alpha)
+ , mRz(alpha)
+ {}
+
+ void add(int64_t atNs, const Pose3f& pose, const Twist3f& twist) override;
+ Pose3f predict(int64_t atNs) const override;
+ void reset() override;
+ std::string name() const override {
+ return "LEAST_SQUARES(" + std::to_string(mAlpha) + ")";
+ }
+ std::string toString(size_t index) const override;
+
+private:
+ const double mAlpha;
+ int64_t mLastAtNs{};
+ Pose3f mLastPose;
+ static constexpr double kDefaultAlphaEstimator = 0.2;
+ static constexpr size_t kMinimumSamplesForPrediction = 4;
+ audio_utils::LinearLeastSquaresFit<double> mRw;
+ audio_utils::LinearLeastSquaresFit<double> mRx;
+ audio_utils::LinearLeastSquaresFit<double> mRy;
+ audio_utils::LinearLeastSquaresFit<double> mRz;
+};
+
+/*
+ * PosePredictor predicts the pose given sensor input at a time in the future.
+ *
+ * This class is not thread safe.
+ */
+class PosePredictor {
+public:
+ PosePredictor();
+
+ Pose3f predict(int64_t timestampNs, const Pose3f& pose, const Twist3f& twist,
+ float predictionDurationNs);
+
+ void setPosePredictorType(PosePredictorType type);
+
+ // convert predictions to a printable string
+ std::string toString(size_t index) const;
+
+private:
+ static constexpr int64_t kMaximumSampleIntervalBeforeResetNs =
+ 300'000'000;
+
+ // Predictors
+ const std::vector<std::shared_ptr<PredictorBase>> mPredictors;
+
+ // Verifiers, create one for an array of future lookaheads for comparison.
+ const std::vector<int> mLookaheadMs;
+
+ std::vector<PosePredictorVerifier> mVerifiers;
+
+ const std::vector<size_t> mDelimiterIdx;
+
+ // Recorders
+ media::VectorRecorder mPredictionRecorder{
+ std::size(mVerifiers) /* vectorSize */, std::chrono::seconds(1), 10 /* maxLogLine */,
+ mDelimiterIdx};
+ media::VectorRecorder mPredictionDurableRecorder{
+ std::size(mVerifiers) /* vectorSize */, std::chrono::minutes(1), 10 /* maxLogLine */,
+ mDelimiterIdx};
+
+ // Status
+
+ // SetType is the externally set predictor type. It may include AUTO.
+ PosePredictorType mSetType = PosePredictorType::LEAST_SQUARES;
+
+ // CurrentType is the actual predictor type used by this class.
+ // It does not include AUTO because that metatype means the class
+ // chooses the best predictor type based on sensor statistics.
+ PosePredictorType mCurrentType = PosePredictorType::LEAST_SQUARES;
+
+ int64_t mResets{};
+ int64_t mLastTimestampNs{};
+
+ // Returns current predictor
+ std::shared_ptr<PredictorBase> getCurrentPredictor() const;
+};
+
+} // namespace android::media
diff --git a/media/libheadtracking/PosePredictorVerifier.h b/media/libheadtracking/PosePredictorVerifier.h
new file mode 100644
index 0000000..6b4a357
--- /dev/null
+++ b/media/libheadtracking/PosePredictorVerifier.h
@@ -0,0 +1,75 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <string>
+
+#include <audio_utils/Statistics.h>
+#include <media/Pose.h>
+
+namespace android::media {
+
+/**
+ * PosePredictorVerifier is used to validate predictions
+ *
+ * This class is not thread-safe
+ */
+class PosePredictorVerifier {
+public:
+ std::string toString() const {
+ return mErrorStats.toString();
+ }
+
+ static constexpr int64_t kMillisToNanos = 1000000;
+
+ void verifyActualPose(int64_t timestampNs, const Pose3f& pose) {
+ for (auto it = mPredictions.begin(); it != mPredictions.end();) {
+ if (it->first < timestampNs) {
+ it = mPredictions.erase(it);
+ } else {
+ int64_t dt = it->first - timestampNs;
+ if (std::abs(dt) < 10 * kMillisToNanos) {
+ const float angle = pose.rotation().angularDistance(it->second.rotation());
+ const float error = std::abs(angle); // L1 (absolute difference) here.
+ mLastError = error;
+ mErrorStats.add(error);
+ }
+ break;
+ }
+ }
+ }
+
+ void addPredictedPose(int64_t atNs, const Pose3f& pose) {
+ mPredictions.emplace_back(atNs, pose);
+ }
+
+ float lastError() const {
+ return mLastError;
+ }
+
+ float cumulativeAverageError() const {
+ return mErrorStats.getMean();
+ }
+
+private:
+ static constexpr double kCumulativeErrorAlpha = 0.999;
+ std::deque<std::pair<int64_t, Pose3f>> mPredictions;
+ float mLastError{};
+ android::audio_utils::Statistics<double> mErrorStats{kCumulativeErrorAlpha};
+};
+
+} // namespace androd::media
diff --git a/media/libheadtracking/Twist.cpp b/media/libheadtracking/Twist.cpp
index 63b9e69..fdec694 100644
--- a/media/libheadtracking/Twist.cpp
+++ b/media/libheadtracking/Twist.cpp
@@ -15,7 +15,7 @@
*/
#include "media/Twist.h"
-
+#include <android-base/stringprintf.h>
#include "media/QuaternionUtil.h"
namespace android {
@@ -39,5 +39,11 @@
return os;
}
+std::string Twist3f::toString() const {
+ return base::StringPrintf("[%0.2f, %0.2f, %0.2f, %0.2f, %0.2f, %0.2f]",
+ mTranslationalVelocity[0], mTranslationalVelocity[1], mTranslationalVelocity[2],
+ mRotationalVelocity[0], mRotationalVelocity[1], mRotationalVelocity[2]);
+}
+
} // namespace media
} // namespace android
diff --git a/media/libheadtracking/VectorRecorder.cpp b/media/libheadtracking/VectorRecorder.cpp
index 5d0588e..5c87d05 100644
--- a/media/libheadtracking/VectorRecorder.cpp
+++ b/media/libheadtracking/VectorRecorder.cpp
@@ -21,7 +21,7 @@
// Convert data to string with level indentation.
// No need for a lock as the SimpleLog is thread-safe.
std::string VectorRecorder::toString(size_t indent) const {
- return mRecordLog.dumpToString(std::string(indent + 1, ' ').c_str(), mMaxLocalLogLine);
+ return mRecordLog.dumpToString(std::string(indent, ' ').c_str(), mMaxLocalLogLine);
}
// Record into local log when it is time.
@@ -36,9 +36,9 @@
sumToAverage_l();
mRecordLog.log(
"mean: %s, min: %s, max %s, calculated %zu samples in %0.4f second(s)",
- toString(mSum).c_str(),
- toString(mMin).c_str(),
- toString(mMax).c_str(),
+ toString(mSum, mDelimiterIdx, mFormatString.c_str()).c_str(),
+ toString(mMin, mDelimiterIdx, mFormatString.c_str()).c_str(),
+ toString(mMax, mDelimiterIdx, mFormatString.c_str()).c_str(),
mNumberOfSamples,
mNumberOfSecondsSinceFirstSample.count());
resetRecord_l();
diff --git a/media/libheadtracking/include/media/HeadTrackingProcessor.h b/media/libheadtracking/include/media/HeadTrackingProcessor.h
index b4c78a0..d2b78f2 100644
--- a/media/libheadtracking/include/media/HeadTrackingProcessor.h
+++ b/media/libheadtracking/include/media/HeadTrackingProcessor.h
@@ -19,6 +19,7 @@
#include "HeadTrackingMode.h"
#include "Pose.h"
+#include "PosePredictorType.h"
#include "Twist.h"
namespace android {
@@ -99,6 +100,11 @@
bool recenterHead = true, bool recenterScreen = true, std::string source = "") = 0;
/**
+ * Set the predictor type.
+ */
+ virtual void setPosePredictorType(PosePredictorType type) = 0;
+
+ /**
* Dump HeadTrackingProcessor parameters under caller lock.
*/
virtual std::string toString_l(unsigned level) const = 0;
diff --git a/media/libheadtracking/include/media/PosePredictorType.h b/media/libheadtracking/include/media/PosePredictorType.h
new file mode 100644
index 0000000..aa76d5d
--- /dev/null
+++ b/media/libheadtracking/include/media/PosePredictorType.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <string>
+
+namespace android::media {
+
+enum class PosePredictorType {
+ /** Use best predictor determined from sensor input */
+ AUTO,
+
+ /** Use last pose for future prediction */
+ LAST,
+
+ /** Use twist angular velocity for future prediction */
+ TWIST,
+
+ /** Use weighted least squares history of prior poses (ignoring twist) */
+ LEAST_SQUARES,
+};
+
+std::string toString(PosePredictorType posePredictorType);
+bool isValidPosePredictorType(PosePredictorType posePredictorType);
+
+} // namespace android::media
diff --git a/media/libheadtracking/include/media/Twist.h b/media/libheadtracking/include/media/Twist.h
index 291cea3..51b83d8 100644
--- a/media/libheadtracking/include/media/Twist.h
+++ b/media/libheadtracking/include/media/Twist.h
@@ -66,6 +66,9 @@
return Twist3f(mTranslationalVelocity / s, mRotationalVelocity / s);
}
+ // Convert instance to a string representation.
+ std::string toString() const;
+
private:
Eigen::Vector3f mTranslationalVelocity;
Eigen::Vector3f mRotationalVelocity;
diff --git a/media/libheadtracking/include/media/VectorRecorder.h b/media/libheadtracking/include/media/VectorRecorder.h
index 1fb7521..4103a7d 100644
--- a/media/libheadtracking/include/media/VectorRecorder.h
+++ b/media/libheadtracking/include/media/VectorRecorder.h
@@ -34,9 +34,25 @@
*/
class VectorRecorder {
public:
+ /**
+ * @param vectorSize is the size of the vector input.
+ * If the input does not match this size, it is ignored.
+ * @param threshold is the time interval we bucket for averaging.
+ * @param maxLogLine is the number of lines we log. At this
+ * threshold, the oldest line will expire when the new line comes in.
+ * @param delimiterIdx is an optional array of delimiter indices that
+ * replace the ',' with a ':'. For example if delimiterIdx = { 3 } then
+ * the above example would format as [0.00, 0.00, 0.00 : -1.29, -0.50, 15.27].
+ * @param formatString is the sprintf format string for the double converted data
+ * to use.
+ */
VectorRecorder(
- size_t vectorSize, std::chrono::duration<double> threshold, int maxLogLine)
+ size_t vectorSize, std::chrono::duration<double> threshold, int maxLogLine,
+ std::vector<size_t> delimiterIdx = {},
+ const std::string_view formatString = {})
: mVectorSize(vectorSize)
+ , mDelimiterIdx(std::move(delimiterIdx))
+ , mFormatString(formatString)
, mRecordLog(maxLogLine)
, mRecordThreshold(threshold)
{
@@ -55,19 +71,38 @@
/**
* Format vector to a string, [0.00, 0.00, 0.00, -1.29, -0.50, 15.27].
+ *
+ * @param delimiterIdx is an optional array of delimiter indices that
+ * replace the ',' with a ':'. For example if delimiterIdx = { 3 } then
+ * the above example would format as [0.00, 0.00, 0.00 : -1.29, -0.50, 15.27].
+ * @param formatString is the sprintf format string for the double converted data
+ * to use.
*/
template <typename T>
- static std::string toString(const std::vector<T>& record) {
+ static std::string toString(const std::vector<T>& record,
+ const std::vector<size_t>& delimiterIdx = {},
+ const char * const formatString = nullptr) {
if (record.size() == 0) {
return "[]";
}
std::string ss = "[";
+ auto nextDelimiter = delimiterIdx.begin();
for (size_t i = 0; i < record.size(); ++i) {
if (i > 0) {
- ss.append(", ");
+ if (nextDelimiter != delimiterIdx.end()
+ && *nextDelimiter <= i) {
+ ss.append(" : ");
+ ++nextDelimiter;
+ } else {
+ ss.append(", ");
+ }
}
- base::StringAppendF(&ss, "%0.2lf", static_cast<double>(record[i]));
+ if (formatString != nullptr && *formatString) {
+ base::StringAppendF(&ss, formatString, static_cast<double>(record[i]));
+ } else {
+ base::StringAppendF(&ss, "%5.2lf", static_cast<double>(record[i]));
+ }
}
ss.append("]");
return ss;
@@ -77,6 +112,8 @@
static constexpr int mMaxLocalLogLine = 10;
const size_t mVectorSize;
+ const std::vector<size_t> mDelimiterIdx;
+ const std::string mFormatString;
// Local log for historical vector data.
// Locked internally, so does not need mutex below.
diff --git a/media/libheif/HeifDecoderImpl.cpp b/media/libheif/HeifDecoderImpl.cpp
index 7c78900..2ba1fc3 100644
--- a/media/libheif/HeifDecoderImpl.cpp
+++ b/media/libheif/HeifDecoderImpl.cpp
@@ -26,7 +26,6 @@
#include <binder/IMemory.h>
#include <binder/MemoryDealer.h>
#include <drm/drm_framework_common.h>
-#include <log/log.h>
#include <media/mediametadataretriever.h>
#include <media/stagefright/MediaSource.h>
#include <media/stagefright/foundation/ADebug.h>
diff --git a/media/libmedia/IMediaMetadataRetriever.cpp b/media/libmedia/IMediaMetadataRetriever.cpp
index 8a3b84e..86427ed 100644
--- a/media/libmedia/IMediaMetadataRetriever.cpp
+++ b/media/libmedia/IMediaMetadataRetriever.cpp
@@ -27,40 +27,6 @@
#include <utils/String8.h>
#include <utils/KeyedVector.h>
-// The binder is supposed to propagate the scheduler group across
-// the binder interface so that remote calls are executed with
-// the same priority as local calls. This is currently not working
-// so this change puts in a temporary hack to fix the issue with
-// metadata retrieval which can be a huge CPU hit if done on a
-// foreground thread.
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
-
-#undef LOG_TAG
-#define LOG_TAG "IMediaMetadataRetriever"
-#include <utils/Log.h>
-#include <cutils/sched_policy.h>
-
-namespace android {
-
-static void sendSchedPolicy(Parcel& data)
-{
- SchedPolicy policy;
- get_sched_policy(gettid(), &policy);
- data.writeInt32(policy);
-}
-
-static void setSchedPolicy(const Parcel& data)
-{
- SchedPolicy policy = (SchedPolicy) data.readInt32();
- set_sched_policy(gettid(), policy);
-}
-static void restoreSchedPolicy()
-{
- set_sched_policy(gettid(), SP_FOREGROUND);
-}
-}; // end namespace android
-#endif
-
namespace android {
enum {
@@ -157,9 +123,6 @@
data.writeInt32(option);
data.writeInt32(colorFormat);
data.writeInt32(metaOnly);
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
- sendSchedPolicy(data);
-#endif
remote()->transact(GET_FRAME_AT_TIME, data, &reply);
status_t ret = reply.readInt32();
if (ret != NO_ERROR) {
@@ -178,9 +141,6 @@
data.writeInt32(colorFormat);
data.writeInt32(metaOnly);
data.writeInt32(thumbnail);
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
- sendSchedPolicy(data);
-#endif
remote()->transact(GET_IMAGE_AT_INDEX, data, &reply);
status_t ret = reply.readInt32();
if (ret != NO_ERROR) {
@@ -202,9 +162,6 @@
data.writeInt32(top);
data.writeInt32(right);
data.writeInt32(bottom);
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
- sendSchedPolicy(data);
-#endif
remote()->transact(GET_IMAGE_RECT_AT_INDEX, data, &reply);
status_t ret = reply.readInt32();
if (ret != NO_ERROR) {
@@ -223,9 +180,6 @@
data.writeInt32(index);
data.writeInt32(colorFormat);
data.writeInt32(metaOnly);
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
- sendSchedPolicy(data);
-#endif
remote()->transact(GET_FRAME_AT_INDEX, data, &reply);
status_t ret = reply.readInt32();
if (ret != NO_ERROR) {
@@ -238,9 +192,6 @@
{
Parcel data, reply;
data.writeInterfaceToken(IMediaMetadataRetriever::getInterfaceDescriptor());
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
- sendSchedPolicy(data);
-#endif
remote()->transact(EXTRACT_ALBUM_ART, data, &reply);
status_t ret = reply.readInt32();
if (ret != NO_ERROR) {
@@ -253,9 +204,6 @@
{
Parcel data, reply;
data.writeInterfaceToken(IMediaMetadataRetriever::getInterfaceDescriptor());
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
- sendSchedPolicy(data);
-#endif
data.writeInt32(keyCode);
remote()->transact(EXTRACT_METADATA, data, &reply);
status_t ret = reply.readInt32();
@@ -366,9 +314,6 @@
bool metaOnly = (data.readInt32() != 0);
ALOGV("getTimeAtTime: time(%" PRId64 " us), option(%d), colorFormat(%d), metaOnly(%d)",
timeUs, option, colorFormat, metaOnly);
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
- setSchedPolicy(data);
-#endif
sp<IMemory> bitmap = getFrameAtTime(timeUs, option, colorFormat, metaOnly);
if (bitmap != 0) { // Don't send NULL across the binder interface
reply->writeInt32(NO_ERROR);
@@ -376,9 +321,6 @@
} else {
reply->writeInt32(UNKNOWN_ERROR);
}
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
- restoreSchedPolicy();
-#endif
return NO_ERROR;
} break;
case GET_IMAGE_AT_INDEX: {
@@ -389,9 +331,6 @@
bool thumbnail = (data.readInt32() != 0);
ALOGV("getImageAtIndex: index(%d), colorFormat(%d), metaOnly(%d), thumbnail(%d)",
index, colorFormat, metaOnly, thumbnail);
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
- setSchedPolicy(data);
-#endif
sp<IMemory> bitmap = getImageAtIndex(index, colorFormat, metaOnly, thumbnail);
if (bitmap != 0) { // Don't send NULL across the binder interface
reply->writeInt32(NO_ERROR);
@@ -399,9 +338,6 @@
} else {
reply->writeInt32(UNKNOWN_ERROR);
}
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
- restoreSchedPolicy();
-#endif
return NO_ERROR;
} break;
@@ -415,9 +351,6 @@
int bottom = data.readInt32();
ALOGV("getImageRectAtIndex: index(%d), colorFormat(%d), rect {%d, %d, %d, %d}",
index, colorFormat, left, top, right, bottom);
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
- setSchedPolicy(data);
-#endif
sp<IMemory> bitmap = getImageRectAtIndex(
index, colorFormat, left, top, right, bottom);
if (bitmap != 0) { // Don't send NULL across the binder interface
@@ -426,9 +359,6 @@
} else {
reply->writeInt32(UNKNOWN_ERROR);
}
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
- restoreSchedPolicy();
-#endif
return NO_ERROR;
} break;
@@ -439,9 +369,6 @@
bool metaOnly = (data.readInt32() != 0);
ALOGV("getFrameAtIndex: index(%d), colorFormat(%d), metaOnly(%d)",
index, colorFormat, metaOnly);
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
- setSchedPolicy(data);
-#endif
sp<IMemory> frame = getFrameAtIndex(index, colorFormat, metaOnly);
if (frame != nullptr) { // Don't send NULL across the binder interface
reply->writeInt32(NO_ERROR);
@@ -449,16 +376,10 @@
} else {
reply->writeInt32(UNKNOWN_ERROR);
}
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
- restoreSchedPolicy();
-#endif
return NO_ERROR;
} break;
case EXTRACT_ALBUM_ART: {
CHECK_INTERFACE(IMediaMetadataRetriever, data, reply);
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
- setSchedPolicy(data);
-#endif
sp<IMemory> albumArt = extractAlbumArt();
if (albumArt != 0) { // Don't send NULL across the binder interface
reply->writeInt32(NO_ERROR);
@@ -466,16 +387,10 @@
} else {
reply->writeInt32(UNKNOWN_ERROR);
}
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
- restoreSchedPolicy();
-#endif
return NO_ERROR;
} break;
case EXTRACT_METADATA: {
CHECK_INTERFACE(IMediaMetadataRetriever, data, reply);
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
- setSchedPolicy(data);
-#endif
int keyCode = data.readInt32();
const char* value = extractMetadata(keyCode);
if (value != NULL) { // Don't send NULL across the binder interface
@@ -484,9 +399,6 @@
} else {
reply->writeInt32(UNKNOWN_ERROR);
}
-#ifndef DISABLE_GROUP_SCHEDULE_HACK
- restoreSchedPolicy();
-#endif
return NO_ERROR;
} break;
default:
diff --git a/media/libmedia/include/media/omx/1.0/Conversion.h b/media/libmedia/include/media/omx/1.0/Conversion.h
index 811936b..37cb059 100644
--- a/media/libmedia/include/media/omx/1.0/Conversion.h
+++ b/media/libmedia/include/media/omx/1.0/Conversion.h
@@ -606,8 +606,16 @@
t->attr.height = l.getHeight();
t->attr.stride = l.getStride();
t->attr.format = static_cast<PixelFormat>(l.getPixelFormat());
- t->attr.layerCount = l.getLayerCount();
- t->attr.usage = l.getUsage();
+ // HACK
+ // anwBuffer.layerCount 8 bytes : GraphicBuffer::layerCount 4 bytes
+ // anwBuffer.usage 4 bytes : GraphicBuffer::usage 8 bytes
+ // We would like to retain high part of usage with high part of layerCount
+ uint64_t usage = l.getUsage();
+ uint32_t usageHigh = (usage >> 32);
+ uint32_t usageLow = (0xFFFFFFFF & usage);
+ uint32_t layerLow = l.getLayerCount();
+ t->attr.layerCount = ((uint64_t(usageHigh) << 32) | layerLow);
+ t->attr.usage = usageLow;
t->attr.id = l.getId();
t->attr.generationNumber = l.getGenerationNumber();
t->nativeHandle = hidl_handle(l.handle);
@@ -637,30 +645,37 @@
}
}
- size_t const numInts = 12 + (handle ? handle->numInts : 0);
+ size_t const numInts = 13 + (handle ? handle->numInts : 0);
int32_t* ints = new int32_t[numInts];
size_t numFds = static_cast<size_t>(handle ? handle->numFds : 0);
int* fds = new int[numFds];
- ints[0] = 'GBFR';
+ ints[0] = 'GB01';
ints[1] = static_cast<int32_t>(t.attr.width);
ints[2] = static_cast<int32_t>(t.attr.height);
ints[3] = static_cast<int32_t>(t.attr.stride);
ints[4] = static_cast<int32_t>(t.attr.format);
- ints[5] = static_cast<int32_t>(t.attr.layerCount);
+ // HACK
+ // anwBuffer.layerCount 8 bytes : GraphicBuffer::layerCount 4 bytes
+ // anwBuffer.usage 4 bytes : GraphicBuffer::usage 8 bytes
+ // We would like to retain high part of usage with high part of layerCount
+ uint32_t layer = (0xFFFFFFFF & t.attr.layerCount);
+ uint32_t usageHigh = (t.attr.layerCount >> 32);
+ ints[5] = layer;
ints[6] = static_cast<int32_t>(t.attr.usage);
ints[7] = static_cast<int32_t>(t.attr.id >> 32);
ints[8] = static_cast<int32_t>(t.attr.id & 0xFFFFFFFF);
ints[9] = static_cast<int32_t>(t.attr.generationNumber);
ints[10] = 0;
ints[11] = 0;
+ ints[12] = usageHigh;
if (handle) {
ints[10] = static_cast<int32_t>(handle->numFds);
ints[11] = static_cast<int32_t>(handle->numInts);
int* intsStart = handle->data + handle->numFds;
std::copy(handle->data, intsStart, fds);
- std::copy(intsStart, intsStart + handle->numInts, &ints[12]);
+ std::copy(intsStart, intsStart + handle->numInts, &ints[13]);
}
void const* constBuffer = static_cast<void const*>(ints);
diff --git a/media/libmedia/tests/codeclist/Android.bp b/media/libmedia/tests/codeclist/Android.bp
index d4494f6..6f3010c 100644
--- a/media/libmedia/tests/codeclist/Android.bp
+++ b/media/libmedia/tests/codeclist/Android.bp
@@ -25,7 +25,7 @@
cc_test {
name: "CodecListTest",
- test_suites: ["device-tests", "mts-media"],
+ test_suites: ["device-tests"],
gtest: true,
// Support multilib variants (using different suffix per sub-architecture), which is needed on
diff --git a/media/libmediahelper/AudioParameter.cpp b/media/libmediahelper/AudioParameter.cpp
index 382a920..9a8156e 100644
--- a/media/libmediahelper/AudioParameter.cpp
+++ b/media/libmediahelper/AudioParameter.cpp
@@ -61,6 +61,12 @@
AUDIO_PARAMETER_DEVICE_ADDITIONAL_OUTPUT_DELAY;
const char * const AudioParameter::keyMaxAdditionalOutputDeviceDelay =
AUDIO_PARAMETER_DEVICE_MAX_ADDITIONAL_OUTPUT_DELAY;
+const char * const AudioParameter::keyOffloadCodecAverageBitRate = AUDIO_OFFLOAD_CODEC_AVG_BIT_RATE;
+const char * const AudioParameter::keyOffloadCodecSampleRate = AUDIO_OFFLOAD_CODEC_SAMPLE_RATE;
+const char * const AudioParameter::keyOffloadCodecChannels = AUDIO_OFFLOAD_CODEC_NUM_CHANNEL;
+const char * const AudioParameter::keyOffloadCodecDelaySamples = AUDIO_OFFLOAD_CODEC_DELAY_SAMPLES;
+const char * const AudioParameter::keyOffloadCodecPaddingSamples =
+ AUDIO_OFFLOAD_CODEC_PADDING_SAMPLES;
AudioParameter::AudioParameter(const String8& keyValuePairs)
{
@@ -226,4 +232,9 @@
}
}
+bool AudioParameter::containsKey(const String8& key) const
+{
+ return mParameters.indexOfKey(key) >= 0;
+}
+
} // namespace android
diff --git a/media/libmediahelper/include/media/AudioParameter.h b/media/libmediahelper/include/media/AudioParameter.h
index 9a6ca8a..41aff7c 100644
--- a/media/libmediahelper/include/media/AudioParameter.h
+++ b/media/libmediahelper/include/media/AudioParameter.h
@@ -107,6 +107,12 @@
static const char * const keyAdditionalOutputDeviceDelay;
static const char * const keyMaxAdditionalOutputDeviceDelay;
+ static const char * const keyOffloadCodecAverageBitRate;
+ static const char * const keyOffloadCodecSampleRate;
+ static const char * const keyOffloadCodecChannels;
+ static const char * const keyOffloadCodecDelaySamples;
+ static const char * const keyOffloadCodecPaddingSamples;
+
String8 toString() const { return toStringImpl(true); }
String8 keysToString() const { return toStringImpl(false); }
@@ -117,6 +123,12 @@
status_t remove(const String8& key);
+ status_t get(const String8& key, int& value) const {
+ return getInt(key, value);
+ }
+ status_t get(const String8& key, float& value) const {
+ return getFloat(key, value);
+ }
status_t get(const String8& key, String8& value) const;
status_t getInt(const String8& key, int& value) const;
status_t getFloat(const String8& key, float& value) const;
@@ -125,6 +137,7 @@
size_t size() const { return mParameters.size(); }
+ bool containsKey(const String8& key) const;
private:
String8 mKeyValuePairs;
KeyedVector <String8, String8> mParameters;
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index bdf1cbc..9e9e9d8 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -1837,7 +1837,6 @@
} else {
mAttributes = NULL;
}
-
setMinBufferCount();
}
diff --git a/media/libmediaplayerservice/fuzzer/Android.bp b/media/libmediaplayerservice/fuzzer/Android.bp
index 5abac81..5e95c87 100644
--- a/media/libmediaplayerservice/fuzzer/Android.bp
+++ b/media/libmediaplayerservice/fuzzer/Android.bp
@@ -46,6 +46,14 @@
"android-media-fuzzing-reports@google.com",
],
componentid: 155276,
+ hotlists: [
+ "4593311",
+ ],
+ description: "The fuzzer targets the APIs of libmediaplayerservice",
+ vector: "remote",
+ service_privilege: "privileged",
+ users: "multi_user",
+ fuzzed_code_usage: "shipped",
},
}
diff --git a/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp b/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp
index b197042..fdac1a1 100644
--- a/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp
+++ b/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp
@@ -21,7 +21,7 @@
#include <AudioFlinger.h>
#include <MediaPlayerService.h>
#include <ResourceManagerService.h>
-#include <ServiceManager.h>
+#include <fakeservicemanager/FakeServiceManager.h>
#include <StagefrightRecorder.h>
#include <camera/Camera.h>
#include <camera/android/hardware/ICamera.h>
@@ -315,7 +315,7 @@
* Initializing a FakeServiceManager and adding the instances
* of all the required services
*/
- sp<IServiceManager> fakeServiceManager = new ServiceManager();
+ sp<IServiceManager> fakeServiceManager = new FakeServiceManager();
setDefaultServiceManager(fakeServiceManager);
MediaPlayerService::instantiate();
AudioFlinger::instantiate();
diff --git a/media/libmediaplayerservice/nuplayer/AWakeLock.cpp b/media/libmediaplayerservice/nuplayer/AWakeLock.cpp
index 25a8ae4..366956c 100644
--- a/media/libmediaplayerservice/nuplayer/AWakeLock.cpp
+++ b/media/libmediaplayerservice/nuplayer/AWakeLock.cpp
@@ -67,6 +67,7 @@
if (status.isOk()) {
mWakeLockToken = binder;
mWakeLockCount++;
+ ALOGI("AwakeLock acquired");
return true;
}
}
@@ -93,6 +94,7 @@
IPCThreadState::self()->restoreCallingIdentity(token);
}
mWakeLockToken.clear();
+ ALOGI("AwakeLock released");
}
}
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index 5c6c5fd..e5f2b2b 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -3032,6 +3032,16 @@
}
}
+ void NuPlayer::dump(AString& logString) {
+ logString.append("renderer(");
+ if (mRenderer != nullptr) {
+ mRenderer->dump(logString);
+ } else {
+ logString.append("null");
+ }
+ logString.append(")");
+ }
+
// Modular DRM begin
status_t NuPlayer::prepareDrm(const uint8_t uuid[16], const Vector<uint8_t> &drmSessionId)
{
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
index 52b2041..8da09c4 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
@@ -1104,14 +1104,14 @@
static_cast<MediaBufferHolder*>(holder.get())->mediaBuffer() : nullptr;
}
if (mediaBuf != NULL) {
- if (mediaBuf->size() > codecBuffer->capacity()) {
+ if (mediaBuf->range_length() > codecBuffer->capacity()) {
handleError(ERROR_BUFFER_TOO_SMALL);
mDequeuedInputBuffers.push_back(bufferIx);
return false;
}
- codecBuffer->setRange(0, mediaBuf->size());
- memcpy(codecBuffer->data(), mediaBuf->data(), mediaBuf->size());
+ codecBuffer->setRange(0, mediaBuf->range_length());
+ memcpy(codecBuffer->data(), mediaBuf->data(), mediaBuf->range_length());
MetaDataBase &meta_data = mediaBuf->meta_data();
cryptInfo = NuPlayerDrm::getSampleCryptoInfo(meta_data);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index ceea2f4..c6595ba 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -969,13 +969,16 @@
}
if (locked) {
- snprintf(buf, sizeof(buf), " state(%d), atEOS(%d), looping(%d), autoLoop(%d)\n",
+ snprintf(buf, sizeof(buf), " state(%d), atEOS(%d), looping(%d), autoLoop(%d), ",
mState, mAtEOS, mLooping, mAutoLoop);
+ logString.append(buf);
+ mPlayer->dump(logString);
+ logString.append("\n");
mLock.unlock();
} else {
snprintf(buf, sizeof(buf), " NPD(%p) lock is taken\n", this);
+ logString.append(buf);
}
- logString.append(buf);
for (size_t i = 0; i < trackStats.size(); ++i) {
const sp<AMessage> &stats = trackStats.itemAt(i);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index 0382df3..9dae16e 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -478,6 +478,23 @@
msg->postAndAwaitResponse(&response);
}
+void NuPlayer::Renderer::dump(AString& logString) {
+ Mutex::Autolock autoLock(mLock);
+ logString.append("paused(");
+ logString.append(mPaused);
+ logString.append("), offloading(");
+ logString.append(offloadingAudio());
+ logString.append("), wakelock(acquired=");
+ mWakelockAcquireEvent.dump(logString);
+ logString.append(", timeout=");
+ mWakelockTimeoutEvent.dump(logString);
+ logString.append(", release=");
+ mWakelockReleaseEvent.dump(logString);
+ logString.append(", cancel=");
+ mWakelockCancelEvent.dump(logString);
+ logString.append(")");
+}
+
void NuPlayer::Renderer::changeAudioFormat(
const sp<AMessage> &format,
bool offloadOnly,
@@ -792,6 +809,10 @@
{
int32_t generation;
CHECK(msg->findInt32("drainGeneration", &generation));
+ mWakelockTimeoutEvent.updateValues(
+ uptimeMillis(),
+ generation,
+ mAudioOffloadPauseTimeoutGeneration);
if (generation != mAudioOffloadPauseTimeoutGeneration) {
break;
}
@@ -807,6 +828,10 @@
{
int32_t generation;
CHECK(msg->findInt32("drainGeneration", &generation));
+ mWakelockReleaseEvent.updateValues(
+ uptimeMillis(),
+ generation,
+ mAudioOffloadPauseTimeoutGeneration);
if (generation != mAudioOffloadPauseTimeoutGeneration) {
break;
}
@@ -1914,6 +1939,9 @@
void NuPlayer::Renderer::startAudioOffloadPauseTimeout() {
if (offloadingAudio()) {
mWakeLock->acquire();
+ mWakelockAcquireEvent.updateValues(uptimeMillis(),
+ mAudioOffloadPauseTimeoutGeneration,
+ mAudioOffloadPauseTimeoutGeneration);
sp<AMessage> msg = new AMessage(kWhatAudioOffloadPauseTimeout, this);
msg->setInt32("drainGeneration", mAudioOffloadPauseTimeoutGeneration);
msg->post(kOffloadPauseMaxUs);
@@ -1930,6 +1958,9 @@
// Note: The acquired wakelock prevents the device from suspending
// immediately after offload pause (in case a resume happens shortly thereafter).
mWakeLock->release(true);
+ mWakelockCancelEvent.updateValues(uptimeMillis(),
+ mAudioOffloadPauseTimeoutGeneration,
+ mAudioOffloadPauseTimeoutGeneration);
++mAudioOffloadPauseTimeoutGeneration;
}
@@ -2165,4 +2196,14 @@
notify->post();
}
+void NuPlayer::Renderer::WakeLockEvent::dump(AString& logString) {
+ logString.append("[");
+ logString.append(mTimeMs);
+ logString.append(",");
+ logString.append(mEventTimeoutGeneration);
+ logString.append(",");
+ logString.append(mRendererTimeoutGeneration);
+ logString.append("]");
+}
+
} // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayer.h
index adb7075..7dc97ea 100644
--- a/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayer.h
+++ b/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayer.h
@@ -104,6 +104,8 @@
void setTargetBitrate(int bitrate /* bps */);
+ void dump(AString& logString);
+
protected:
virtual ~NuPlayer();
diff --git a/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerRenderer.h b/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerRenderer.h
index 3640678..2ca040f 100644
--- a/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerRenderer.h
+++ b/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerRenderer.h
@@ -83,6 +83,8 @@
bool isStreaming);
void closeAudioSink();
+ void dump(AString& logString);
+
// re-open audio sink after all pending audio buffers played.
void changeAudioFormat(
const sp<AMessage> &format,
@@ -235,6 +237,32 @@
status_t getCurrentPositionFromAnchor(
int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo = false);
+ struct WakeLockEvent{
+ int64_t mTimeMs;
+ int32_t mEventTimeoutGeneration;
+ int32_t mRendererTimeoutGeneration;
+
+ WakeLockEvent():
+ mTimeMs(0),
+ mEventTimeoutGeneration(0),
+ mRendererTimeoutGeneration(0) {}
+
+ void updateValues(int64_t timeMs,
+ int32_t eventGeneration,
+ int32_t rendererGeneration) {
+ mTimeMs = timeMs;
+ mEventTimeoutGeneration = eventGeneration;
+ mRendererTimeoutGeneration = rendererGeneration;
+ }
+
+ void dump(AString& logString);
+ };
+
+ WakeLockEvent mWakelockAcquireEvent;
+ WakeLockEvent mWakelockTimeoutEvent;
+ WakeLockEvent mWakelockReleaseEvent;
+ WakeLockEvent mWakelockCancelEvent;
+
void notifyEOSCallback();
size_t fillAudioBuffer(void *buffer, size_t size);
diff --git a/media/libnbaio/Android.bp b/media/libnbaio/Android.bp
index e9422cc..89e9806 100644
--- a/media/libnbaio/Android.bp
+++ b/media/libnbaio/Android.bp
@@ -68,6 +68,10 @@
// ],
// static_libs: ["libsndfile"],
+ shared_libs: [
+ "libmediautils",
+ ],
+
header_libs: ["libaudiohal_headers"],
export_include_dirs: ["include"],
diff --git a/media/libnbaio/AudioStreamOutSink.cpp b/media/libnbaio/AudioStreamOutSink.cpp
index 581867f..0ab5874 100644
--- a/media/libnbaio/AudioStreamOutSink.cpp
+++ b/media/libnbaio/AudioStreamOutSink.cpp
@@ -50,6 +50,14 @@
mFormat = Format_from_SR_C(config.sample_rate,
audio_channel_count_from_out_mask(config.channel_mask), config.format);
mFrameSize = Format_frameSize(mFormat);
+
+ // update format for MEL computation
+ auto processor = mMelProcessor.load();
+ if (processor) {
+ processor->updateAudioFormat(config.sample_rate,
+ audio_channel_count_from_out_mask(config.channel_mask),
+ config.format);
+ }
}
return NBAIO_Sink::negotiate(offers, numOffers, counterOffers, numCounterOffers);
}
@@ -63,8 +71,15 @@
size_t written;
status_t ret = mStream->write(buffer, count * mFrameSize, &written);
if (ret == OK && written > 0) {
+ // Send to MelProcessor for sound dose measurement.
+ auto processor = mMelProcessor.load();
+ if (processor) {
+ processor->process(buffer, written);
+ }
+
written /= mFrameSize;
mFramesWritten += written;
+
return written;
} else {
// FIXME verify HAL implementations are returning the correct error codes e.g. WOULD_BLOCK
@@ -85,4 +100,28 @@
return OK;
}
+void AudioStreamOutSink::startMelComputation(const sp<audio_utils::MelProcessor>& processor)
+{
+ ALOGV("%s start mel computation for device %d", __func__, processor->getDeviceId());
+
+ mMelProcessor.store(processor);
+ if (processor) {
+ // update format for MEL computation
+ processor->updateAudioFormat(mFormat.mSampleRate,
+ mFormat.mChannelCount,
+ mFormat.mFormat);
+ processor->resume();
+ }
+
+}
+
+void AudioStreamOutSink::stopMelComputation()
+{
+ auto melProcessor = mMelProcessor.load();
+ if (melProcessor != nullptr) {
+ ALOGV("%s pause mel computation for device %d", __func__, melProcessor->getDeviceId());
+ melProcessor->pause();
+ }
+}
+
} // namespace android
diff --git a/media/libnbaio/include/media/nbaio/AudioStreamOutSink.h b/media/libnbaio/include/media/nbaio/AudioStreamOutSink.h
index 635f67f..7b5aa06 100644
--- a/media/libnbaio/include/media/nbaio/AudioStreamOutSink.h
+++ b/media/libnbaio/include/media/nbaio/AudioStreamOutSink.h
@@ -17,7 +17,9 @@
#ifndef ANDROID_AUDIO_STREAM_OUT_SINK_H
#define ANDROID_AUDIO_STREAM_OUT_SINK_H
+#include <audio_utils/MelProcessor.h>
#include <media/nbaio/NBAIO.h>
+#include <mediautils/Synchronization.h>
namespace android {
@@ -48,6 +50,10 @@
// NBAIO_Sink end
+ void startMelComputation(const sp<audio_utils::MelProcessor>& processor);
+
+ void stopMelComputation();
+
#if 0 // until necessary
sp<StreamOutHalInterface> stream() const { return mStream; }
#endif
@@ -55,6 +61,7 @@
private:
sp<StreamOutHalInterface> mStream;
size_t mStreamBufferSizeBytes; // as reported by get_buffer_size()
+ mediautils::atomic_sp<audio_utils::MelProcessor> mMelProcessor;
};
} // namespace android
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 4a5524d..505775b 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -6793,6 +6793,8 @@
info->checkReadFence("onOutputBufferDrained before queueBuffer");
err = mCodec->mNativeWindow->queueBuffer(
mCodec->mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd);
+ // TODO(b/266211548): Poll the native window for rendered buffers, since when queueing
+ // buffers, the frame event history delta is retrieved.
info->mFenceFd = -1;
if (err == OK) {
info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
diff --git a/media/libstagefright/ACodecBufferChannel.cpp b/media/libstagefright/ACodecBufferChannel.cpp
index c5a59ff..8f2bed2 100644
--- a/media/libstagefright/ACodecBufferChannel.cpp
+++ b/media/libstagefright/ACodecBufferChannel.cpp
@@ -347,7 +347,8 @@
size_t offset,
const CryptoPlugin::SubSample *subSamples,
size_t numSubSamples,
- const sp<MediaCodecBuffer> &buffer) {
+ const sp<MediaCodecBuffer> &buffer,
+ AString* errorDetailMsg) {
std::shared_ptr<const std::vector<const BufferInfo>> array(
std::atomic_load(&mInputBuffers));
BufferInfoIterator it = findClientBuffer(array, buffer);
@@ -371,7 +372,6 @@
ssize_t result = -1;
ssize_t codecDataOffset = 0;
if (mCrypto != NULL) {
- AString errorDetailMsg;
hardware::drm::V1_0::DestinationBuffer destination;
if (secure) {
destination.type = DrmBufferType::NATIVE_HANDLE;
@@ -387,7 +387,7 @@
result = mCrypto->decrypt(key, iv, mode, pattern,
source, it->mClientBuffer->offset(),
- subSamples, numSubSamples, destination, &errorDetailMsg);
+ subSamples, numSubSamples, destination, errorDetailMsg);
if (result < 0) {
return result;
@@ -441,7 +441,9 @@
result = (ssize_t)_bytesWritten;
detailedError = _detailedError;
});
-
+ if (errorDetailMsg) {
+ errorDetailMsg->setTo(detailedError.c_str(), detailedError.size());
+ }
if (!returnVoid.isOk() || status != Status::OK || result < 0) {
ALOGE("descramble failed, trans=%s, status=%d, result=%zd",
returnVoid.description().c_str(), status, result);
@@ -485,6 +487,10 @@
return OK;
}
+void ACodecBufferChannel::pollForRenderedBuffers() {
+ // TODO(b/266211548): Poll the native window for rendered buffers.
+}
+
status_t ACodecBufferChannel::discardBuffer(const sp<MediaCodecBuffer> &buffer) {
std::shared_ptr<const std::vector<const BufferInfo>> array(
std::atomic_load(&mInputBuffers));
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index ddc0f2f..569a25f 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -238,6 +238,7 @@
"CallbackMediaSource.cpp",
"CameraSource.cpp",
"CameraSourceTimeLapse.cpp",
+ "CodecErrorLog.cpp",
"CryptoAsync.cpp",
"FrameDecoder.cpp",
"HevcUtils.cpp",
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 842327d..967c316 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -150,7 +150,8 @@
if (camera == 0) {
mCamera = Camera::connect(cameraId, clientName, clientUid, clientPid,
- /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/true);
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
+ /*forceSlowJpegMode*/false);
if (mCamera == 0) return -EBUSY;
mCameraFlags &= ~FLAGS_HOT_CAMERA;
} else {
diff --git a/media/libstagefright/CodecErrorLog.cpp b/media/libstagefright/CodecErrorLog.cpp
new file mode 100644
index 0000000..9785623
--- /dev/null
+++ b/media/libstagefright/CodecErrorLog.cpp
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CodecErrorLog"
+
+#include <log/log.h>
+#include <media/stagefright/CodecErrorLog.h>
+
+namespace android {
+
+void CodecErrorLog::log(const char *tag, const char *message) {
+ std::unique_lock lock(mLock);
+ ALOG(LOG_ERROR, tag, "%s", message);
+ mStream << message << std::endl;
+}
+
+void CodecErrorLog::log(const char *tag, const std::string &message) {
+ log(tag, message.c_str());
+}
+
+std::string CodecErrorLog::extract() {
+ std::unique_lock lock(mLock);
+ std::string msg = mStream.str();
+ mStream.str("");
+ return msg;
+}
+
+void CodecErrorLog::clear() {
+ std::unique_lock lock(mLock);
+ mStream.str("");
+}
+
+} // namespace android
diff --git a/media/libstagefright/CryptoAsync.cpp b/media/libstagefright/CryptoAsync.cpp
index 32fd3be..8b5c8ed 100644
--- a/media/libstagefright/CryptoAsync.cpp
+++ b/media/libstagefright/CryptoAsync.cpp
@@ -153,7 +153,7 @@
// attach buffer
err = channel->attachEncryptedBuffer(
memory, secure, key, iv, mode, pattern,
- offset, subSamples, numSubSamples, buffer);
+ offset, subSamples, numSubSamples, buffer, &errorDetailMsg);
// a generic error
auto handleError = [this, &err, &msg]() {
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index 2370a7b..b5bd975 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -240,6 +240,9 @@
sp<IMemory> metaMem =
allocMetaFrame(trackMeta, width, height, tileWidth, tileHeight, dstBpp, bitDepth);
+ if (metaMem == nullptr) {
+ return NULL;
+ }
// try to fill sequence meta's duration based on average frame rate,
// default to 33ms if frame rate is unavailable.
@@ -542,7 +545,7 @@
if (dstFormat() == COLOR_Format32bitABGR2101010) {
videoFormat->setInt32("color-format", COLOR_FormatYUVP010);
} else {
- videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar);
+ videoFormat->setInt32("color-format", COLOR_FormatYUV420Flexible);
}
// For the thumbnail extraction case, try to allocate single buffer in both
@@ -685,7 +688,6 @@
if (mCaptureLayer != nullptr) {
return captureSurface();
}
-
ColorConverter converter((OMX_COLOR_FORMATTYPE)srcFormat, dstFormat());
uint32_t standard, range, transfer;
@@ -698,8 +700,18 @@
if (!outputFormat->findInt32("color-transfer", (int32_t*)&transfer)) {
transfer = 0;
}
+ sp<ABuffer> imgObj;
+ if (videoFrameBuffer->meta()->findBuffer("image-data", &imgObj)) {
+ MediaImage2 *imageData = nullptr;
+ imageData = (MediaImage2 *)(imgObj.get()->data());
+ if (imageData != nullptr) {
+ converter.setSrcMediaImage2(*imageData);
+ }
+ }
+ if (srcFormat == COLOR_FormatYUV420Flexible && imgObj.get() == nullptr) {
+ return ERROR_UNSUPPORTED;
+ }
converter.setSrcColorSpace(standard, range, transfer);
-
if (converter.isValid()) {
converter.convert(
(const uint8_t *)videoFrameBuffer->data(),
@@ -864,7 +876,7 @@
if (dstFormat() == COLOR_Format32bitABGR2101010) {
videoFormat->setInt32("color-format", COLOR_FormatYUVP010);
} else {
- videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar);
+ videoFormat->setInt32("color-format", COLOR_FormatYUV420Flexible);
}
if ((mGridRows == 1) && (mGridCols == 1)) {
@@ -967,6 +979,17 @@
if (!outputFormat->findInt32("color-transfer", (int32_t*)&transfer)) {
transfer = 0;
}
+ sp<ABuffer> imgObj;
+ if (videoFrameBuffer->meta()->findBuffer("image-data", &imgObj)) {
+ MediaImage2 *imageData = nullptr;
+ imageData = (MediaImage2 *)(imgObj.get()->data());
+ if (imageData != nullptr) {
+ converter.setSrcMediaImage2(*imageData);
+ }
+ }
+ if (srcFormat == COLOR_FormatYUV420Flexible && imgObj.get() == nullptr) {
+ return ERROR_UNSUPPORTED;
+ }
converter.setSrcColorSpace(standard, range, transfer);
int32_t crop_left, crop_top, crop_right, crop_bottom;
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index e799490..c9287e5 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -20,6 +20,7 @@
#include <utils/Log.h>
#include <set>
+#include <random>
#include <stdlib.h>
#include <inttypes.h>
@@ -41,6 +42,7 @@
#include <android/binder_ibinder.h>
#include <android/binder_manager.h>
#include <android/dlext.h>
+#include <android-base/stringprintf.h>
#include <binder/IMemory.h>
#include <binder/IServiceManager.h>
#include <binder/MemoryDealer.h>
@@ -99,6 +101,7 @@
// These must be kept synchronized with the constants there.
static const char *kCodecLogSessionId = "android.media.mediacodec.log-session-id";
static const char *kCodecCodec = "android.media.mediacodec.codec"; /* e.g. OMX.google.aac.decoder */
+static const char *kCodecId = "android.media.mediacodec.id";
static const char *kCodecMime = "android.media.mediacodec.mime"; /* e.g. audio/mime */
static const char *kCodecMode = "android.media.mediacodec.mode"; /* audio, video */
static const char *kCodecModeVideo = "video"; /* values returned for kCodecMode */
@@ -218,7 +221,7 @@
sp<MediaCodec> codec = mMediaCodec.promote();
if (codec == NULL) {
// Codec is already gone, so remove the resources as well
- ::ndk::SpAIBinder binder(AServiceManager_getService("media.resource_manager"));
+ ::ndk::SpAIBinder binder(AServiceManager_waitForService("media.resource_manager"));
std::shared_ptr<IResourceManagerService> service =
IResourceManagerService::fromBinder(binder);
if (service == nullptr) {
@@ -290,6 +293,9 @@
void removeClient();
void markClientForPendingRemoval();
bool reclaimResource(const std::vector<MediaResourceParcel> &resources);
+ void notifyClientCreated();
+ void notifyClientStarted(ClientConfigParcel& clientConfig);
+ void notifyClientStopped(ClientConfigParcel& clientConfig);
inline void setCodecName(const char* name) {
mCodecName = name;
@@ -331,7 +337,7 @@
}
status_t MediaCodec::ResourceManagerServiceProxy::init() {
- ::ndk::SpAIBinder binder(AServiceManager_getService("media.resource_manager"));
+ ::ndk::SpAIBinder binder(AServiceManager_waitForService("media.resource_manager"));
mService = IResourceManagerService::fromBinder(binder);
if (mService == nullptr) {
ALOGE("Failed to get ResourceManagerService");
@@ -468,6 +474,32 @@
return status.isOk() && success;
}
+void MediaCodec::ResourceManagerServiceProxy::notifyClientCreated() {
+ ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
+ .uid = static_cast<int32_t>(mUid),
+ .id = getId(mClient),
+ .name = mCodecName};
+ mService->notifyClientCreated(clientInfo);
+}
+
+void MediaCodec::ResourceManagerServiceProxy::notifyClientStarted(
+ ClientConfigParcel& clientConfig) {
+ clientConfig.clientInfo.pid = static_cast<int32_t>(mPid);
+ clientConfig.clientInfo.uid = static_cast<int32_t>(mUid);
+ clientConfig.clientInfo.id = getId(mClient);
+ clientConfig.clientInfo.name = mCodecName;
+ mService->notifyClientStarted(clientConfig);
+}
+
+void MediaCodec::ResourceManagerServiceProxy::notifyClientStopped(
+ ClientConfigParcel& clientConfig) {
+ clientConfig.clientInfo.pid = static_cast<int32_t>(mPid);
+ clientConfig.clientInfo.uid = static_cast<int32_t>(mUid);
+ clientConfig.clientInfo.id = getId(mClient);
+ clientConfig.clientInfo.name = mCodecName;
+ mService->notifyClientStopped(clientConfig);
+}
+
////////////////////////////////////////////////////////////////////////////////
MediaCodec::BufferInfo::BufferInfo() : mOwnedByClient(false) {}
@@ -535,6 +567,7 @@
kWhatOutputFramesRendered = 'outR',
kWhatOutputBuffersChanged = 'outC',
kWhatFirstTunnelFrameReady = 'ftfR',
+ kWhatPollForRenderedBuffers = 'plrb',
};
class CryptoAsyncCallback : public CryptoAsync::CryptoAsyncCallback {
@@ -859,6 +892,23 @@
return new PersistentSurface(bufferProducer, bufferSource);
}
+// GenerateCodecId generates a 64bit Random ID for each codec that is created.
+// The Codec ID is generated as:
+// - A process-unique random high 32bits
+// - An atomic sequence low 32bits
+//
+static uint64_t GenerateCodecId() {
+ static std::atomic_uint64_t sId = [] {
+ std::random_device rd;
+ std::mt19937 gen(rd());
+ std::uniform_int_distribution<uint32_t> distrib(0, UINT32_MAX);
+ uint32_t randomID = distrib(gen);
+ uint64_t id = randomID;
+ return id << 32;
+ }();
+ return sId++;
+}
+
MediaCodec::MediaCodec(
const sp<ALooper> &looper, pid_t pid, uid_t uid,
std::function<sp<CodecBase>(const AString &, const char *)> getCodecBase,
@@ -901,6 +951,7 @@
mInputBufferCounter(0),
mGetCodecBase(getCodecBase),
mGetCodecInfo(getCodecInfo) {
+ mCodecId = GenerateCodecId();
mResourceManagerProxy = new ResourceManagerServiceProxy(pid, uid,
::ndk::SharedRefBase::make<ResourceManagerClient>(this, pid, uid));
if (!mGetCodecBase) {
@@ -909,10 +960,12 @@
};
}
if (!mGetCodecInfo) {
- mGetCodecInfo = [](const AString &name, sp<MediaCodecInfo> *info) -> status_t {
+ mGetCodecInfo = [&log = mErrorLog](const AString &name,
+ sp<MediaCodecInfo> *info) -> status_t {
*info = nullptr;
const sp<IMediaCodecList> mcl = MediaCodecList::getInstance();
if (!mcl) {
+ log.log(LOG_TAG, "Fatal error: failed to initialize MediaCodecList");
return NO_INIT; // if called from Java should raise IOException
}
AString tmp = name;
@@ -927,6 +980,8 @@
*info = mcl->getCodecInfo(codecIdx);
return OK;
}
+ log.log(LOG_TAG, base::StringPrintf("Codec with name '%s' is not found on the device.",
+ name.c_str()));
return NAME_NOT_FOUND;
};
}
@@ -982,6 +1037,7 @@
void MediaCodec::updateMediametrics() {
if (mMetricsHandle == 0) {
+ ALOGW("no metrics handle found");
return;
}
@@ -1234,12 +1290,14 @@
// ensure mutex while we do our own work
Mutex::Autolock _lock(mMetricsLock);
if (mMetricsHandle != 0) {
- if (mediametrics_count(mMetricsHandle) > 0) {
+ if (mMetricsToUpload && mediametrics_count(mMetricsHandle) > 0) {
mediametrics_selfRecord(mMetricsHandle);
}
mediametrics_delete(mMetricsHandle);
mMetricsHandle = 0;
}
+ // we no longer have anything pending upload
+ mMetricsToUpload = false;
}
void MediaCodec::updateLowLatency(const sp<AMessage> &msg) {
@@ -1675,6 +1733,8 @@
status_t MediaCodec::init(const AString &name) {
status_t err = mResourceManagerProxy->init();
if (err != OK) {
+ mErrorLog.log(LOG_TAG, base::StringPrintf(
+ "Fatal error: failed to initialize ResourceManager (err=%d)", err));
mCodec = NULL; // remove the codec
return err;
}
@@ -1694,11 +1754,14 @@
if (!name.startsWith("android.filter.")) {
err = mGetCodecInfo(name, &mCodecInfo);
if (err != OK) {
+ mErrorLog.log(LOG_TAG, base::StringPrintf(
+ "Getting codec info with name '%s' failed (err=%d)", name.c_str(), err));
mCodec = NULL; // remove the codec.
return err;
}
if (mCodecInfo == nullptr) {
- ALOGE("Getting codec info with name '%s' failed", name.c_str());
+ mErrorLog.log(LOG_TAG, base::StringPrintf(
+ "Getting codec info with name '%s' failed", name.c_str()));
return NAME_NOT_FOUND;
}
secureCodec = name.endsWith(".secure");
@@ -1721,7 +1784,8 @@
mCodec = mGetCodecBase(name, owner);
if (mCodec == NULL) {
- ALOGE("Getting codec base with name '%s' (owner='%s') failed", name.c_str(), owner);
+ mErrorLog.log(LOG_TAG, base::StringPrintf(
+ "Getting codec base with name '%s' (from '%s' HAL) failed", name.c_str(), owner));
return NAME_NOT_FOUND;
}
@@ -1733,7 +1797,7 @@
mCodecLooper->setName("CodecLooper");
err = mCodecLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
if (OK != err) {
- ALOGE("Codec Looper failed to start");
+ mErrorLog.log(LOG_TAG, "Fatal error: codec looper failed to start");
return err;
}
}
@@ -1792,6 +1856,12 @@
break;
}
}
+
+ if (OK == err) {
+ // Notify the ResourceManager that, this codec has been created
+ // (initialized) successfully.
+ mResourceManagerProxy->notifyClientCreated();
+ }
return err;
}
@@ -1838,6 +1908,7 @@
const sp<ICrypto> &crypto,
const sp<IDescrambler> &descrambler,
uint32_t flags) {
+
sp<AMessage> msg = new AMessage(kWhatConfigure, this);
mediametrics_handle_t nextMetricsHandle = mediametrics_create(kCodecKeyName);
@@ -1845,6 +1916,7 @@
format->findString("log-session-id", &mLogSessionId);
if (nextMetricsHandle != 0) {
+ mediametrics_setInt64(nextMetricsHandle, kCodecId, mCodecId);
int32_t profile = 0;
if (format->findInt32("profile", &profile)) {
mediametrics_setInt32(nextMetricsHandle, kCodecProfile, profile);
@@ -1908,7 +1980,9 @@
// Prevent possible integer overflow in downstream code.
if (mWidth < 0 || mHeight < 0 ||
(uint64_t)mWidth * mHeight > (uint64_t)INT32_MAX / 4) {
- ALOGE("Invalid size(s), width=%d, height=%d", mWidth, mHeight);
+ mErrorLog.log(LOG_TAG, base::StringPrintf(
+ "Invalid size(s), width=%d, height=%d", mWidth, mHeight));
+ mediametrics_delete(nextMetricsHandle);
return BAD_VALUE;
}
@@ -3078,17 +3152,17 @@
sp<MediaCodecBuffer> *buffer, sp<AMessage> *format) {
// use mutex instead of a context switch
if (mReleasedByResourceManager) {
- ALOGE("getBufferAndFormat - resource already released");
+ mErrorLog.log(LOG_TAG, "resource already released");
return DEAD_OBJECT;
}
if (buffer == NULL) {
- ALOGE("getBufferAndFormat - null MediaCodecBuffer");
+ mErrorLog.log(LOG_TAG, "null buffer");
return INVALID_OPERATION;
}
if (format == NULL) {
- ALOGE("getBufferAndFormat - null AMessage");
+ mErrorLog.log(LOG_TAG, "null format");
return INVALID_OPERATION;
}
@@ -3096,7 +3170,9 @@
format->clear();
if (!isExecuting()) {
- ALOGE("getBufferAndFormat - not executing");
+ mErrorLog.log(LOG_TAG, base::StringPrintf(
+ "Invalid to call %s; only valid in Executing states",
+ apiStateString().c_str()));
return INVALID_OPERATION;
}
@@ -3108,6 +3184,7 @@
if (index >= buffers.size()) {
ALOGE("getBufferAndFormat - trying to get buffer with "
"bad index (index=%zu buffer_size=%zu)", index, buffers.size());
+ mErrorLog.log(LOG_TAG, base::StringPrintf("Bad index (index=%zu)", index));
return INVALID_OPERATION;
}
@@ -3115,6 +3192,7 @@
if (!info.mOwnedByClient) {
ALOGE("getBufferAndFormat - invalid operation "
"(the index %zu is not owned by client)", index);
+ mErrorLog.log(LOG_TAG, base::StringPrintf("index %zu is not owned by client", index));
return INVALID_OPERATION;
}
@@ -3242,6 +3320,7 @@
void MediaCodec::cancelPendingDequeueOperations() {
if (mFlags & kFlagDequeueInputPending) {
+ mErrorLog.log(LOG_TAG, "Pending dequeue input buffer request cancelled");
PostReplyWithError(mDequeueInputReplyID, INVALID_OPERATION);
++mDequeueInputTimeoutGeneration;
@@ -3250,6 +3329,7 @@
}
if (mFlags & kFlagDequeueOutputPending) {
+ mErrorLog.log(LOG_TAG, "Pending dequeue output buffer request cancelled");
PostReplyWithError(mDequeueOutputReplyID, INVALID_OPERATION);
++mDequeueOutputTimeoutGeneration;
@@ -3259,8 +3339,16 @@
}
bool MediaCodec::handleDequeueInputBuffer(const sp<AReplyToken> &replyID, bool newRequest) {
- if (!isExecuting() || (mFlags & kFlagIsAsync)
- || (newRequest && (mFlags & kFlagDequeueInputPending))) {
+ if (!isExecuting()) {
+ mErrorLog.log(LOG_TAG, base::StringPrintf(
+ "Invalid to call %s; only valid in executing state",
+ apiStateString().c_str()));
+ PostReplyWithError(replyID, INVALID_OPERATION);
+ } else if (mFlags & kFlagIsAsync) {
+ mErrorLog.log(LOG_TAG, "Invalid to call in async mode");
+ PostReplyWithError(replyID, INVALID_OPERATION);
+ } else if (newRequest && (mFlags & kFlagDequeueInputPending)) {
+ mErrorLog.log(LOG_TAG, "Invalid to call while another dequeue input request is pending");
PostReplyWithError(replyID, INVALID_OPERATION);
return true;
} else if (mFlags & kFlagStickyError) {
@@ -3284,8 +3372,16 @@
MediaCodec::DequeueOutputResult MediaCodec::handleDequeueOutputBuffer(
const sp<AReplyToken> &replyID, bool newRequest) {
- if (!isExecuting() || (mFlags & kFlagIsAsync)
- || (newRequest && (mFlags & kFlagDequeueOutputPending))) {
+ if (!isExecuting()) {
+ mErrorLog.log(LOG_TAG, base::StringPrintf(
+ "Invalid to call %s; only valid in executing state",
+ apiStateString().c_str()));
+ PostReplyWithError(replyID, INVALID_OPERATION);
+ } else if (mFlags & kFlagIsAsync) {
+ mErrorLog.log(LOG_TAG, "Invalid to call in async mode");
+ PostReplyWithError(replyID, INVALID_OPERATION);
+ } else if (newRequest && (mFlags & kFlagDequeueOutputPending)) {
+ mErrorLog.log(LOG_TAG, "Invalid to call while another dequeue output request is pending");
PostReplyWithError(replyID, INVALID_OPERATION);
} else if (mFlags & kFlagStickyError) {
PostReplyWithError(replyID, getStickyError());
@@ -3338,6 +3434,17 @@
return DequeueOutputResult::kRepliedWithError;
}
+
+inline void MediaCodec::initClientConfigParcel(ClientConfigParcel& clientConfig) {
+ clientConfig.codecType = toMediaResourceSubType(mDomain);
+ clientConfig.isEncoder = mFlags & kFlagIsEncoder;
+ clientConfig.isHardware = !MediaCodecList::isSoftwareCodec(mComponentName);
+ clientConfig.width = mWidth;
+ clientConfig.height = mHeight;
+ clientConfig.timeStamp = systemTime(SYSTEM_TIME_MONOTONIC) / 1000LL;
+ clientConfig.id = mCodecId;
+}
+
void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
switch (msg->what()) {
case kWhatCodecNotify:
@@ -3584,14 +3691,8 @@
mediametrics_setInt32(mMetricsHandle, kCodecSecure, 0);
}
- MediaCodecInfo::Attributes attr = mCodecInfo
- ? mCodecInfo->getAttributes()
- : MediaCodecInfo::Attributes(0);
- if (mDomain == DOMAIN_VIDEO || !(attr & MediaCodecInfo::kFlagIsSoftwareOnly)) {
- // software audio codecs are currently ignored.
- mResourceManagerProxy->addResource(MediaResource::CodecResource(
+ mResourceManagerProxy->addResource(MediaResource::CodecResource(
mFlags & kFlagIsSecure, toMediaResourceSubType(mDomain)));
- }
postPendingRepliesAndDeferredMessages("kWhatComponentAllocated");
break;
@@ -3761,6 +3862,11 @@
mResourceManagerProxy->addResource(
MediaResource::GraphicMemoryResource(getGraphicBufferSize()));
}
+ // Notify the RM that the codec is in use (has been started).
+ ClientConfigParcel clientConfig;
+ initClientConfigParcel(clientConfig);
+ mResourceManagerProxy->notifyClientStarted(clientConfig);
+
setState(STARTED);
postPendingRepliesAndDeferredMessages("kWhatStartCompleted");
@@ -3991,6 +4097,11 @@
mState, stateString(mState).c_str());
break;
}
+ // Notify the RM that the codec has been stopped.
+ ClientConfigParcel clientConfig;
+ initClientConfigParcel(clientConfig);
+ mResourceManagerProxy->notifyClientStopped(clientConfig);
+
setState(INITIALIZED);
if (mReplyID) {
postPendingRepliesAndDeferredMessages("kWhatStopCompleted");
@@ -4113,6 +4224,9 @@
// callback can't be set after codec is executing,
// or before it's initialized (as the callback
// will be cleared when it goes to INITIALIZED)
+ mErrorLog.log(LOG_TAG, base::StringPrintf(
+ "Invalid to call %s; only valid at Initialized state",
+ apiStateString().c_str()));
PostReplyWithError(replyID, INVALID_OPERATION);
break;
}
@@ -4144,6 +4258,9 @@
case kWhatConfigure:
{
if (mState != INITIALIZED) {
+ mErrorLog.log(LOG_TAG, base::StringPrintf(
+ "configure() is valid only at Initialized state; currently %s",
+ apiStateString().c_str()));
PostReplyWithError(msg, INVALID_OPERATION);
break;
}
@@ -4173,6 +4290,10 @@
initMediametrics();
}
+ // from this point forward, in this configure/use/release lifecycle, we want to
+ // upload our data
+ mMetricsToUpload = true;
+
int32_t push;
if (msg->findInt32("push-blank-buffers-on-shutdown", &push) && push != 0) {
mFlags |= kFlagPushBlankBuffersOnShutdown;
@@ -4202,7 +4323,8 @@
if (flags & CONFIGURE_FLAG_USE_BLOCK_MODEL ||
flags & CONFIGURE_FLAG_USE_CRYPTO_ASYNC) {
if (!(mFlags & kFlagIsAsync)) {
- ALOGE("Error: configuration requires async operation");
+ mErrorLog.log(
+ LOG_TAG, "Block model is only valid with callback set (async mode)");
PostReplyWithError(replyID, INVALID_OPERATION);
break;
}
@@ -4210,11 +4332,10 @@
mFlags |= kFlagUseBlockModel;
}
if (flags & CONFIGURE_FLAG_USE_CRYPTO_ASYNC) {
- // silently disable crytoasync with blockmodel
- if (!(mFlags & kFlagUseBlockModel)) {
- mFlags |= kFlagUseCryptoAsync;
- } else {
- ALOGW("CrytoAsync not yet enabled for block model, falling back to normal");
+ mFlags |= kFlagUseCryptoAsync;
+ if ((mFlags & kFlagUseBlockModel)) {
+ ALOGW("CrytoAsync not yet enabled for block model,\
+ falling back to normal");
}
}
}
@@ -4244,17 +4365,23 @@
mBufferChannel->setDescrambler(mDescrambler);
if ((mFlags & kFlagUseCryptoAsync) &&
mCrypto && (mDomain == DOMAIN_VIDEO)) {
- mCryptoAsync = new CryptoAsync(mBufferChannel);
- mCryptoAsync->setCallback(
- std::make_unique<CryptoAsyncCallback>(new AMessage(kWhatCodecNotify, this)));
- mCryptoLooper = new ALooper();
- mCryptoLooper->setName("CryptoAsyncLooper");
- mCryptoLooper->registerHandler(mCryptoAsync);
- status_t err = mCryptoLooper->start();
- if (err != OK) {
- ALOGE("Crypto Looper failed to start");
- mCryptoAsync = nullptr;
- mCryptoLooper = nullptr;
+ // set kFlagUseCryptoAsync but do-not use this for block model
+ // this is to propagate the error in onCryptoError()
+ // TODO (b/274628160): Enable Use of CONFIG_FLAG_USE_CRYPTO_ASYNC
+ // with CONFIGURE_FLAG_USE_BLOCK_MODEL)
+ if (!(mFlags & kFlagUseBlockModel)) {
+ mCryptoAsync = new CryptoAsync(mBufferChannel);
+ mCryptoAsync->setCallback(
+ std::make_unique<CryptoAsyncCallback>(new AMessage(kWhatCodecNotify, this)));
+ mCryptoLooper = new ALooper();
+ mCryptoLooper->setName("CryptoAsyncLooper");
+ mCryptoLooper->registerHandler(mCryptoAsync);
+ status_t err = mCryptoLooper->start();
+ if (err != OK) {
+ ALOGE("Crypto Looper failed to start");
+ mCryptoAsync = nullptr;
+ mCryptoLooper = nullptr;
+ }
}
}
@@ -4300,9 +4427,13 @@
sp<Surface> surface = static_cast<Surface *>(obj.get());
if (mSurface == NULL) {
// do not support setting surface if it was not set
+ mErrorLog.log(LOG_TAG,
+ "Cannot set surface if the codec is not configured with "
+ "a surface already");
err = INVALID_OPERATION;
} else if (obj == NULL) {
// do not support unsetting surface
+ mErrorLog.log(LOG_TAG, "Unsetting surface is not supported");
err = BAD_VALUE;
} else {
err = connectToSurface(surface);
@@ -4333,6 +4464,9 @@
}
default:
+ mErrorLog.log(LOG_TAG, base::StringPrintf(
+ "setSurface() is valid only at Executing states; currently %s",
+ apiStateString().c_str()));
err = INVALID_OPERATION;
break;
}
@@ -4346,6 +4480,9 @@
{
// Must be configured, but can't have been started yet.
if (mState != CONFIGURED) {
+ mErrorLog.log(LOG_TAG, base::StringPrintf(
+ "setInputSurface() is valid only at Configured state; currently %s",
+ apiStateString().c_str()));
PostReplyWithError(msg, INVALID_OPERATION);
break;
}
@@ -4381,6 +4518,9 @@
PostReplyWithError(msg, OK);
break;
} else if (mState != CONFIGURED) {
+ mErrorLog.log(LOG_TAG, base::StringPrintf(
+ "start() is valid only at Configured state; currently %s",
+ apiStateString().c_str()));
PostReplyWithError(msg, INVALID_OPERATION);
break;
}
@@ -4460,6 +4600,7 @@
if (mFlags & kFlagIsAsync) {
onError(DEAD_OBJECT, ACTION_CODE_FATAL);
}
+ mErrorLog.log(LOG_TAG, "Released by resource manager");
mReleasedByResourceManager = true;
}
@@ -4496,6 +4637,7 @@
// the previous stop/release completes and then reply with OK.
status_t err = mState == targetState ? OK : INVALID_OPERATION;
response->setInt32("err", err);
+ // TODO: mErrorLog
if (err == OK && targetState == UNINITIALIZED) {
mComponentName.clear();
}
@@ -4604,13 +4746,13 @@
CHECK(msg->senderAwaitsResponse(&replyID));
if (mFlags & kFlagIsAsync) {
- ALOGE("dequeueInputBuffer can't be used in async mode");
+ mErrorLog.log(LOG_TAG, "dequeueInputBuffer can't be used in async mode");
PostReplyWithError(replyID, INVALID_OPERATION);
break;
}
if (mHaveInputSurface) {
- ALOGE("dequeueInputBuffer can't be used with input surface");
+ mErrorLog.log(LOG_TAG, "dequeueInputBuffer can't be used with input surface");
PostReplyWithError(replyID, INVALID_OPERATION);
break;
}
@@ -4665,6 +4807,9 @@
CHECK(msg->senderAwaitsResponse(&replyID));
if (!isExecuting()) {
+ mErrorLog.log(LOG_TAG, base::StringPrintf(
+ "queueInputBuffer() is valid only at Executing states; currently %s",
+ apiStateString().c_str()));
PostReplyWithError(replyID, INVALID_OPERATION);
break;
} else if (mFlags & kFlagStickyError) {
@@ -4692,7 +4837,7 @@
CHECK(msg->senderAwaitsResponse(&replyID));
if (mFlags & kFlagIsAsync) {
- ALOGE("dequeueOutputBuffer can't be used in async mode");
+ mErrorLog.log(LOG_TAG, "dequeueOutputBuffer can't be used in async mode");
PostReplyWithError(replyID, INVALID_OPERATION);
break;
}
@@ -4759,6 +4904,9 @@
CHECK(msg->senderAwaitsResponse(&replyID));
if (!isExecuting()) {
+ mErrorLog.log(LOG_TAG, base::StringPrintf(
+ "releaseOutputBuffer() is valid only at Executing states; currently %s",
+ apiStateString().c_str()));
PostReplyWithError(replyID, INVALID_OPERATION);
break;
} else if (mFlags & kFlagStickyError) {
@@ -4772,9 +4920,25 @@
break;
}
+ case kWhatPollForRenderedBuffers:
+ {
+ if (isExecuting()) {
+ mBufferChannel->pollForRenderedBuffers();
+ }
+ break;
+ }
+
case kWhatSignalEndOfInputStream:
{
- if (!isExecuting() || !mHaveInputSurface) {
+ if (!isExecuting()) {
+ mErrorLog.log(LOG_TAG, base::StringPrintf(
+ "signalEndOfInputStream() is valid only at Executing states; currently %s",
+ apiStateString().c_str()));
+ PostReplyWithError(msg, INVALID_OPERATION);
+ break;
+ } else if (!mHaveInputSurface) {
+ mErrorLog.log(
+ LOG_TAG, "signalEndOfInputStream() called without an input surface set");
PostReplyWithError(msg, INVALID_OPERATION);
break;
} else if (mFlags & kFlagStickyError) {
@@ -4798,7 +4962,14 @@
{
sp<AReplyToken> replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
- if (!isExecuting() || (mFlags & kFlagIsAsync)) {
+ if (!isExecuting()) {
+ mErrorLog.log(LOG_TAG, base::StringPrintf(
+ "getInput/OutputBuffers() is valid only at Executing states; currently %s",
+ apiStateString().c_str()));
+ PostReplyWithError(replyID, INVALID_OPERATION);
+ break;
+ } else if (mFlags & kFlagIsAsync) {
+ mErrorLog.log(LOG_TAG, "getInput/OutputBuffers() is not supported with callbacks");
PostReplyWithError(replyID, INVALID_OPERATION);
break;
} else if (mFlags & kFlagStickyError) {
@@ -4831,6 +5002,9 @@
case kWhatFlush:
{
if (!isExecuting()) {
+ mErrorLog.log(LOG_TAG, base::StringPrintf(
+ "flush() is valid only at Executing states; currently %s",
+ apiStateString().c_str()));
PostReplyWithError(msg, INVALID_OPERATION);
break;
} else if (mFlags & kFlagStickyError) {
@@ -4874,10 +5048,17 @@
sp<AReplyToken> replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
- if ((mState != CONFIGURED && mState != STARTING &&
- mState != STARTED && mState != FLUSHING &&
- mState != FLUSHED)
- || format == NULL) {
+ if (mState != CONFIGURED && mState != STARTING &&
+ mState != STARTED && mState != FLUSHING &&
+ mState != FLUSHED) {
+ mErrorLog.log(LOG_TAG, base::StringPrintf(
+ "getInput/OutputFormat() is valid at Executing states "
+ "and Configured state; currently %s",
+ apiStateString().c_str()));
+ PostReplyWithError(replyID, INVALID_OPERATION);
+ break;
+ } else if (format == NULL) {
+ mErrorLog.log(LOG_TAG, "Fatal error: format is not initialized");
PostReplyWithError(replyID, INVALID_OPERATION);
break;
} else if (mFlags & kFlagStickyError) {
@@ -4912,6 +5093,7 @@
CHECK(msg->senderAwaitsResponse(&replyID));
if (mComponentName.empty()) {
+ mErrorLog.log(LOG_TAG, "Fatal error: name is not set");
PostReplyWithError(replyID, INVALID_OPERATION);
break;
}
@@ -5079,7 +5261,7 @@
size_t i = 0;
for (;;) {
sp<ABuffer> csd;
- if (!format->findBuffer(AStringPrintf("csd-%u", i).c_str(), &csd)) {
+ if (!format->findBuffer(base::StringPrintf("csd-%zu", i).c_str(), &csd)) {
break;
}
if (csd->size() == 0) {
@@ -5114,7 +5296,7 @@
}
sDealer = new MemoryDealer(
newDealerCapacity,
- AStringPrintf("CSD(%dMB)", newDealerCapacity / 1048576).c_str());
+ base::StringPrintf("CSD(%zuMB)", newDealerCapacity / 1048576).c_str());
mem = sDealer->allocate(csd->size());
}
memcpy(mem->unsecurePointer(), csd->data(), csd->size());
@@ -5125,9 +5307,14 @@
FetchLinearBlock(csd->size(), {std::string{mComponentName.c_str()}});
C2WriteView view{block->map().get()};
if (view.error() != C2_OK) {
+ mErrorLog.log(LOG_TAG, "Fatal error: failed to allocate and map a block");
return -EINVAL;
}
if (csd->size() > view.capacity()) {
+ mErrorLog.log(LOG_TAG, base::StringPrintf(
+ "Fatal error: allocated block is too small "
+ "(csd size %zu; block cap %u)",
+ csd->size(), view.capacity()));
return -EINVAL;
}
memcpy(view.base(), csd->data(), csd->size());
@@ -5138,10 +5325,16 @@
const sp<MediaCodecBuffer> &codecInputData = info.mData;
if (csd->size() > codecInputData->capacity()) {
+ mErrorLog.log(LOG_TAG, base::StringPrintf(
+ "CSD is too large to fit in input buffer "
+ "(csd size %zu; buffer cap %zu)",
+ csd->size(), codecInputData->capacity()));
return -EINVAL;
}
if (codecInputData->data() == NULL) {
ALOGV("Input buffer %zu is not properly allocated", bufferIndex);
+ mErrorLog.log(LOG_TAG, base::StringPrintf(
+ "Fatal error: input buffer %zu is not properly allocated", bufferIndex));
return -EINVAL;
}
@@ -5194,6 +5387,7 @@
mActivityNotify.clear();
mCallback.clear();
+ mErrorLog.clear();
}
if (newState == UNINITIALIZED) {
@@ -5314,6 +5508,7 @@
if (!hasCryptoOrDescrambler()) {
ALOGE("[%s] queuing secure buffer without mCrypto or mDescrambler!",
mComponentName.c_str());
+ mErrorLog.log(LOG_TAG, "queuing secure buffer without mCrypto or mDescrambler!");
return -EINVAL;
}
CHECK(msg->findPointer("subSamples", (void **)&subSamples));
@@ -5336,12 +5531,21 @@
}
if (index >= mPortBuffers[kPortIndexInput].size()) {
+ mErrorLog.log(LOG_TAG, base::StringPrintf(
+ "index out of range (index=%zu)", mPortBuffers[kPortIndexInput].size()));
return -ERANGE;
}
BufferInfo *info = &mPortBuffers[kPortIndexInput][index];
sp<MediaCodecBuffer> buffer = info->mData;
- if (buffer == nullptr || !info->mOwnedByClient) {
+ if (buffer == nullptr) {
+ mErrorLog.log(LOG_TAG, base::StringPrintf(
+ "Fatal error: failed to fetch buffer for index %zu", index));
+ return -EACCES;
+ }
+ if (!info->mOwnedByClient) {
+ mErrorLog.log(LOG_TAG, base::StringPrintf(
+ "client does not own the buffer #%zu", index));
return -EACCES;
}
auto setInputBufferParams = [this, &buffer]
@@ -5422,10 +5626,26 @@
if (c2Buffer) {
err = mBufferChannel->attachBuffer(c2Buffer, buffer);
} else if (memory) {
+ AString errorDetailMsg;
err = mBufferChannel->attachEncryptedBuffer(
memory, (mFlags & kFlagIsSecure), key, iv, mode, pattern,
- offset, subSamples, numSubSamples, buffer);
+ offset, subSamples, numSubSamples, buffer, &errorDetailMsg);
+ if (err != OK && hasCryptoOrDescrambler()
+ && (mFlags & kFlagUseCryptoAsync)) {
+ // create error detail
+ AString errorDetailMsg;
+ sp<AMessage> cryptoErrorInfo = new AMessage();
+ buildCryptoInfoAMessage(cryptoErrorInfo, CryptoAsync::kActionDecrypt);
+ cryptoErrorInfo->setInt32("err", err);
+ cryptoErrorInfo->setInt32("actionCode", ACTION_CODE_FATAL);
+ cryptoErrorInfo->setString("errorDetail", errorDetailMsg);
+ onCryptoError(cryptoErrorInfo);
+ // we want cryptoError to be in the callback
+ // but Codec IllegalStateException to be triggered.
+ err = INVALID_OPERATION;
+ }
} else {
+ mErrorLog.log(LOG_TAG, "Fatal error: invalid queue request without a buffer");
err = UNKNOWN_ERROR;
}
if (err == OK && !buffer->asC2Buffer()
@@ -5446,12 +5666,17 @@
offset = buffer->offset();
size = buffer->size();
if (err != OK) {
- ALOGI("block model buffer attach failed: err = %s (%d)",
- StrMediaError(err).c_str(), err);
+ ALOGE("block model buffer attach failed: err = %s (%d)",
+ StrMediaError(err).c_str(), err);
return err;
}
}
+
if (offset + size > buffer->capacity()) {
+ mErrorLog.log(LOG_TAG, base::StringPrintf(
+ "buffer offset and size goes beyond the capacity: "
+ "offset=%zu, size=%zu, cap=%zu",
+ offset, size, buffer->capacity()));
return -EINVAL;
}
buffer->setRange(offset, size);
@@ -5530,14 +5755,13 @@
size_t MediaCodec::CreateFramesRenderedMessage(
const std::list<FrameRenderTracker::Info> &done, sp<AMessage> &msg) {
size_t index = 0;
-
for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin();
it != done.cend(); ++it) {
if (it->getRenderTimeNs() < 0) {
continue; // dropped frame from tracking
}
- msg->setInt64(AStringPrintf("%zu-media-time-us", index).c_str(), it->getMediaTimeUs());
- msg->setInt64(AStringPrintf("%zu-system-nano", index).c_str(), it->getRenderTimeNs());
+ msg->setInt64(base::StringPrintf("%zu-media-time-us", index).c_str(), it->getMediaTimeUs());
+ msg->setInt64(base::StringPrintf("%zu-system-nano", index).c_str(), it->getRenderTimeNs());
++index;
}
return index;
@@ -5553,16 +5777,28 @@
}
if (!isExecuting()) {
+ mErrorLog.log(LOG_TAG, base::StringPrintf(
+ "releaseOutputBuffer() is valid at Executing states; currently %s",
+ apiStateString().c_str()));
return -EINVAL;
}
if (index >= mPortBuffers[kPortIndexOutput].size()) {
+ mErrorLog.log(LOG_TAG, base::StringPrintf(
+ "index out of range (index=%zu)", mPortBuffers[kPortIndexOutput].size()));
return -ERANGE;
}
BufferInfo *info = &mPortBuffers[kPortIndexOutput][index];
- if (info->mData == nullptr || !info->mOwnedByClient) {
+ if (!info->mOwnedByClient) {
+ mErrorLog.log(LOG_TAG, base::StringPrintf(
+ "client does not own the buffer #%zu", index));
+ return -EACCES;
+ }
+ if (info->mData == nullptr) {
+ mErrorLog.log(LOG_TAG, base::StringPrintf(
+ "Fatal error: null buffer for index %zu", index));
return -EACCES;
}
@@ -5579,11 +5815,13 @@
int64_t mediaTimeUs = -1;
buffer->meta()->findInt64("timeUs", &mediaTimeUs);
+ bool noRenderTime = false;
int64_t renderTimeNs = 0;
if (!msg->findInt64("timestampNs", &renderTimeNs)) {
// use media timestamp if client did not request a specific render timestamp
ALOGV("using buffer PTS of %lld", (long long)mediaTimeUs);
renderTimeNs = mediaTimeUs * 1000;
+ noRenderTime = true;
}
if (mSoftRenderer != NULL) {
@@ -5601,10 +5839,33 @@
}
}
}
+
+ // If rendering to the screen, then schedule a time in the future to poll to see if this
+ // frame was ever rendered to seed onFrameRendered callbacks.
+ if (mIsSurfaceToScreen) {
+ // can't initialize this in the constructor because the Looper parent class needs to be
+ // initialized first
+ if (mMsgPollForRenderedBuffers == nullptr) {
+ mMsgPollForRenderedBuffers = new AMessage(kWhatPollForRenderedBuffers, this);
+ }
+ // Schedule the poll to occur 100ms after the render time - should be safe for
+ // determining if the frame was ever rendered. If no render time was specified, the
+ // presentation timestamp is used instead, which almost certainly occurs in the past,
+ // since it's almost always a zero-based offset from the start of the stream. In these
+ // scenarios, we expect the frame to be rendered with no delay.
+ int64_t delayUs = noRenderTime ? 0 : renderTimeNs / 1000 - ALooper::GetNowUs();
+ delayUs += 100 * 1000; /* 100ms in microseconds */
+ status_t err =
+ mMsgPollForRenderedBuffers->postUnique(/* token= */ mMsgPollForRenderedBuffers,
+ delayUs);
+ if (err != OK) {
+ ALOGE("unexpected failure to post pollForRenderedBuffers: %d", err);
+ }
+ }
status_t err = mBufferChannel->renderOutputBuffer(buffer, renderTimeNs);
if (err == NO_INIT) {
- ALOGE("rendering to non-initilized(obsolete) surface");
+ mErrorLog.log(LOG_TAG, "rendering to non-initialized(obsolete) surface");
return err;
}
if (err != OK) {
@@ -5852,6 +6113,9 @@
}
status_t MediaCodec::onSetParameters(const sp<AMessage> ¶ms) {
+ if (mState == UNINITIALIZED || mState == INITIALIZING) {
+ return NO_INIT;
+ }
updateLowLatency(params);
mapFormat(mComponentName, params, nullptr, false);
updateTunnelPeek(params);
@@ -5884,12 +6148,14 @@
memcpy(csd->data() + 4, nalStart, nalSize);
mOutputFormat->setBuffer(
- AStringPrintf("csd-%u", csdIndex).c_str(), csd);
+ base::StringPrintf("csd-%u", csdIndex).c_str(), csd);
++csdIndex;
}
if (csdIndex != 2) {
+ mErrorLog.log(LOG_TAG, base::StringPrintf(
+ "codec config data contains %u NAL units; expected 2.", csdIndex));
return ERROR_MALFORMED;
}
} else {
@@ -5931,6 +6197,32 @@
mDeferredMessages.clear();
}
+std::string MediaCodec::apiStateString() {
+ const char *rval = NULL;
+ char rawbuffer[16]; // room for "%d"
+
+ switch (mState) {
+ case UNINITIALIZED:
+ rval = (mFlags & kFlagStickyError) ? "at Error state" : "at Released state";
+ break;
+ case INITIALIZING: rval = "while constructing"; break;
+ case INITIALIZED: rval = "at Uninitialized state"; break;
+ case CONFIGURING: rval = "during configure()"; break;
+ case CONFIGURED: rval = "at Configured state"; break;
+ case STARTING: rval = "during start()"; break;
+ case STARTED: rval = "at Running state"; break;
+ case FLUSHING: rval = "during flush()"; break;
+ case FLUSHED: rval = "at Flushed state"; break;
+ case STOPPING: rval = "during stop()"; break;
+ case RELEASING: rval = "during release()"; break;
+ default:
+ snprintf(rawbuffer, sizeof(rawbuffer), "at %d", mState);
+ rval = rawbuffer;
+ break;
+ }
+ return rval;
+}
+
std::string MediaCodec::stateString(State state) {
const char *rval = NULL;
char rawbuffer[16]; // room for "%d"
diff --git a/media/libstagefright/MediaCodecList.cpp b/media/libstagefright/MediaCodecList.cpp
index 78b7288..4ad3276 100644
--- a/media/libstagefright/MediaCodecList.cpp
+++ b/media/libstagefright/MediaCodecList.cpp
@@ -31,6 +31,7 @@
#include <media/stagefright/xmlparser/MediaCodecsXmlParser.h>
#include <media/stagefright/CCodec.h>
#include <media/stagefright/Codec2InfoBuilder.h>
+#include <media/stagefright/MediaCodecConstants.h>
#include <media/stagefright/MediaCodecList.h>
#include <media/stagefright/MediaCodecListOverrides.h>
#include <media/stagefright/MediaErrors.h>
@@ -356,17 +357,6 @@
void MediaCodecList::findMatchingCodecs(
const char *mime, bool encoder, uint32_t flags, const sp<AMessage> &format,
Vector<AString> *matches) {
- findMatchingCodecs(mime, encoder, flags, format, matches, /* checkProfile= */ true);
- if (matches->empty()) {
- ALOGV("no matching codec found, retrying without profile check");
- findMatchingCodecs(mime, encoder, flags, format, matches, /* checkProfile= */ false);
- }
-}
-
-//static
-void MediaCodecList::findMatchingCodecs(
- const char *mime, bool encoder, uint32_t flags, const sp<AMessage> &format,
- Vector<AString> *matches, bool checkProfile) {
matches->clear();
const sp<IMediaCodecList> list = getInstance();
@@ -390,7 +380,7 @@
AString componentName = info->getCodecName();
- if (!codecHandlesFormat(mime, info, format, checkProfile)) {
+ if (!codecHandlesFormat(mime, info, format)) {
ALOGV("skipping codec '%s' which doesn't satisfy format %s",
componentName.c_str(), format->debugString(2).c_str());
continue;
@@ -409,12 +399,23 @@
property_get_bool("debug.stagefright.swcodec", false)) {
matches->sort(compareSoftwareCodecsFirst);
}
+
+ // if we did NOT find anything maybe it's because of a profile mismatch.
+ // let's recurse after trimming the profile from the format to see if that yields
+ // a suitable codec.
+ //
+ int profile = -1;
+ if (matches->empty() && format != nullptr && format->findInt32(KEY_PROFILE, &profile)) {
+ ALOGV("no matching codec found, retrying without profile");
+ sp<AMessage> formatNoProfile = format->dup();
+ formatNoProfile->removeEntryByName(KEY_PROFILE);
+ findMatchingCodecs(mime, encoder, flags, formatNoProfile, matches);
+ }
}
// static
bool MediaCodecList::codecHandlesFormat(
- const char *mime, const sp<MediaCodecInfo> &info, const sp<AMessage> &format,
- bool checkProfile) {
+ const char *mime, const sp<MediaCodecInfo> &info, const sp<AMessage> &format) {
if (format == nullptr) {
ALOGD("codecHandlesFormat: no format, so no extra checks");
@@ -522,7 +523,7 @@
}
int32_t profile = -1;
- if (checkProfile && format->findInt32("profile", &profile)) {
+ if (format->findInt32(KEY_PROFILE, &profile)) {
Vector<MediaCodecInfo::ProfileLevel> profileLevels;
capabilities->getSupportedProfileLevels(&profileLevels);
auto it = profileLevels.begin();
diff --git a/media/libstagefright/NuMediaExtractor.cpp b/media/libstagefright/NuMediaExtractor.cpp
index 0536f2a..d736734 100644
--- a/media/libstagefright/NuMediaExtractor.cpp
+++ b/media/libstagefright/NuMediaExtractor.cpp
@@ -639,9 +639,11 @@
numPageSamples = -1;
}
+ // insert, including accounting for the space used.
memcpy((uint8_t *)buffer->data() + mbuf->range_length(),
&numPageSamples,
sizeof(numPageSamples));
+ buffer->setRange(buffer->offset(), buffer->size() + sizeof(numPageSamples));
uint32_t type;
const void *data;
@@ -690,6 +692,8 @@
ssize_t minIndex = fetchAllTrackSamples();
+ buffer->setRange(0, 0); // start with an empty buffer
+
if (minIndex < 0) {
return ERROR_END_OF_STREAM;
}
@@ -705,25 +709,25 @@
sampleSize += sizeof(int32_t);
}
+ // capacity() is ok since we cleared out the buffer
if (buffer->capacity() < sampleSize) {
return -ENOMEM;
}
+ const size_t srclen = it->mBuffer->range_length();
const uint8_t *src =
(const uint8_t *)it->mBuffer->data()
+ it->mBuffer->range_offset();
- memcpy((uint8_t *)buffer->data(), src, it->mBuffer->range_length());
+ memcpy((uint8_t *)buffer->data(), src, srclen);
+ buffer->setRange(0, srclen);
status_t err = OK;
if (info->mTrackFlags & kIsVorbis) {
+ // adjusts range when it inserts the extra bits
err = appendVorbisNumPageSamples(it->mBuffer, buffer);
}
- if (err == OK) {
- buffer->setRange(0, sampleSize);
- }
-
return err;
}
diff --git a/media/libstagefright/OWNERS b/media/libstagefright/OWNERS
index e67496e..f02e168 100644
--- a/media/libstagefright/OWNERS
+++ b/media/libstagefright/OWNERS
@@ -7,3 +7,5 @@
# go/android-fwk-media-solutions for info on areas of ownership.
include platform/frameworks/av:/media/janitors/media_solutions_OWNERS
+
+per-file Camera*.cpp = file:/camera/OWNERS
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index c5b5199..863177d 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -798,6 +798,8 @@
{ "dvb-audio-description", kKeyDvbAudioDescription},
{ "dvb-teletext-magazine-number", kKeyDvbTeletextMagazineNumber},
{ "dvb-teletext-page-number", kKeyDvbTeletextPageNumber},
+ { "profile", kKeyAudioProfile },
+ { "level", kKeyAudioLevel },
}
};
diff --git a/media/libstagefright/colorconversion/ColorConverter.cpp b/media/libstagefright/colorconversion/ColorConverter.cpp
index 5e7a4c4..9d2568e 100644
--- a/media/libstagefright/colorconversion/ColorConverter.cpp
+++ b/media/libstagefright/colorconversion/ColorConverter.cpp
@@ -33,10 +33,8 @@
#include <functional>
#include <sys/time.h>
-#define USE_LIBYUV
#define PERF_PROFILING 0
-
#if defined(__aarch64__) || defined(__ARM_NEON__)
#define USE_NEON_Y410 1
#else
@@ -48,6 +46,48 @@
#endif
namespace android {
+typedef const struct libyuv::YuvConstants LibyuvConstants;
+
+struct LibyuvConstPair {
+ const LibyuvConstants *yuv;
+ const LibyuvConstants *yvu;
+};
+
+// Function to resolve YUV Matrices defined in libyuv
+static LibyuvConstPair getLibYUVMatrix(
+ const ColorConverter::ColorSpace &colorSpace, bool is10Bit) {
+ LibyuvConstPair matrix = {nullptr, nullptr};
+ const bool isFullRange = (colorSpace.mRange == ColorUtils::kColorRangeFull);
+ if (colorSpace.isI601()) {
+ matrix.yuv = &libyuv::kYuvI601Constants;
+ matrix.yvu = &libyuv::kYvuI601Constants;
+ } else if (colorSpace.isJ601()) {
+ matrix.yuv = &libyuv::kYuvJPEGConstants;
+ matrix.yvu = &libyuv::kYvuJPEGConstants;
+ } else if (colorSpace.isH709()) {
+ matrix.yuv = &libyuv::kYuvH709Constants;
+ matrix.yvu = &libyuv::kYvuH709Constants;
+ } else if (colorSpace.isF709()) {
+ matrix.yuv = &libyuv::kYuvF709Constants;
+ matrix.yvu = &libyuv::kYvuF709Constants;
+ } else if (colorSpace.isBt2020()) {
+ matrix.yuv = &libyuv::kYuv2020Constants;
+ matrix.yvu = &libyuv::kYvu2020Constants;
+ } else if (colorSpace.isBtV2020()) {
+ matrix.yuv = &libyuv::kYuvV2020Constants;
+ matrix.yvu = &libyuv::kYvuV2020Constants;
+ } else {
+ // unspecified
+ if (isFullRange) {
+ matrix.yuv = is10Bit ? &libyuv::kYuvV2020Constants : &libyuv::kYuvJPEGConstants;
+ matrix.yvu = is10Bit ? &libyuv::kYvuV2020Constants : &libyuv::kYvuJPEGConstants;
+ } else {
+ matrix.yuv = is10Bit ? &libyuv::kYuv2020Constants : &libyuv::kYuvI601Constants;
+ matrix.yvu = is10Bit ? &libyuv::kYvu2020Constants : &libyuv::kYvuI601Constants;
+ }
+ }
+ return matrix;
+}
static bool isRGB(OMX_COLOR_FORMATTYPE colorFormat) {
return colorFormat == OMX_COLOR_Format16bitRGB565
@@ -56,28 +96,234 @@
|| colorFormat == COLOR_Format32bitABGR2101010;
}
-bool ColorConverter::ColorSpace::isBt2020() const {
- return (mStandard == ColorUtils::kColorStandardBT2020);
+// check for limited Range
+bool ColorConverter::ColorSpace::isLimitedRange() const {
+ return mRange == ColorUtils::kColorRangeLimited;
}
-bool ColorConverter::ColorSpace::isH420() const {
+// BT.2020 limited range YUV to RGB
+bool ColorConverter::ColorSpace::isBt2020() const {
+ return (mStandard == ColorUtils::kColorStandardBT2020
+ && mRange == ColorUtils::kColorRangeLimited);
+}
+
+// BT.2020 full range YUV to RGB
+bool ColorConverter::ColorSpace::isBtV2020() const {
+ return (mStandard == ColorUtils::kColorStandardBT2020
+ && mRange == ColorUtils::kColorRangeFull);
+}
+
+// BT.709 full range YUV to RGB
+bool ColorConverter::ColorSpace::isF709() const {
+ return (mStandard == ColorUtils::kColorStandardBT709
+ && mRange == ColorUtils::kColorRangeFull);
+}
+
+// BT.709 limited range YUV to RGB
+bool ColorConverter::ColorSpace::isH709() const {
return (mStandard == ColorUtils::kColorStandardBT709)
&& (mRange == ColorUtils::kColorRangeLimited);
}
+// BT.601 limited range YUV to RGB
// the matrix coefficients are the same for both 601.625 and 601.525 standards
-bool ColorConverter::ColorSpace::isI420() const {
+bool ColorConverter::ColorSpace::isI601() const {
return ((mStandard == ColorUtils::kColorStandardBT601_625)
|| (mStandard == ColorUtils::kColorStandardBT601_525))
&& (mRange == ColorUtils::kColorRangeLimited);
}
-bool ColorConverter::ColorSpace::isJ420() const {
+// BT.601 full range YUV to RGB
+bool ColorConverter::ColorSpace::isJ601() const {
return ((mStandard == ColorUtils::kColorStandardBT601_625)
|| (mStandard == ColorUtils::kColorStandardBT601_525))
&& (mRange == ColorUtils::kColorRangeFull);
}
+// Utility functions for MediaImage2
+static MediaImage2 CreateYUV420PlanarMediaImage2(
+ uint32_t width, uint32_t height, uint32_t stride,
+ uint32_t vstride, uint32_t bitDepth) {
+ const uint32_t componentBytes = (bitDepth + 7) / 8;
+ return MediaImage2 {
+ .mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV,
+ .mNumPlanes = 3,
+ .mWidth = width,
+ .mHeight = height,
+ .mBitDepth = bitDepth,
+ .mBitDepthAllocated = componentBytes * 8,
+ .mPlane = {
+ {
+ .mOffset = 0,
+ .mColInc = static_cast<int32_t>(componentBytes),
+ .mRowInc = static_cast<int32_t>(stride),
+ .mHorizSubsampling = 1,
+ .mVertSubsampling = 1,
+ },
+ {
+ .mOffset = stride * vstride,
+ .mColInc = static_cast<int32_t>(componentBytes),
+ .mRowInc = static_cast<int32_t>(stride / 2),
+ .mHorizSubsampling = 2,
+ .mVertSubsampling = 2,
+ },
+ {
+ .mOffset = stride * vstride * 5 / 4,
+ .mColInc = static_cast<int32_t>(componentBytes),
+ .mRowInc = static_cast<int32_t>(stride / 2),
+ .mHorizSubsampling = 2,
+ .mVertSubsampling = 2,
+ }
+ },
+ };
+}
+
+static MediaImage2 CreateYUV420SemiPlanarMediaImage2(
+ uint32_t width, uint32_t height, uint32_t stride,
+ uint32_t vstride, uint32_t bitDepth, bool uv = true /*nv12 or not*/) {
+ const uint32_t componentBytes = (bitDepth + 7) / 8;
+ return MediaImage2 {
+ .mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV,
+ .mNumPlanes = 3,
+ .mWidth = width,
+ .mHeight = height,
+ .mBitDepth = bitDepth,
+ .mBitDepthAllocated = componentBytes * 8,
+ .mPlane = {
+ {
+ .mOffset = 0,
+ .mColInc = static_cast<int32_t>(componentBytes),
+ .mRowInc = static_cast<int32_t>(stride),
+ .mHorizSubsampling = 1,
+ .mVertSubsampling = 1,
+ },
+ {
+ .mOffset = stride * vstride + (uv ? 0 : componentBytes),
+ .mColInc = static_cast<int32_t>(2 * componentBytes),
+ .mRowInc = static_cast<int32_t>(stride),
+ .mHorizSubsampling = 2,
+ .mVertSubsampling = 2,
+ },
+ {
+ .mOffset = stride * vstride + (uv ? componentBytes : 0),
+ .mColInc = static_cast<int32_t>(2 * componentBytes),
+ .mRowInc = static_cast<int32_t>(stride),
+ .mHorizSubsampling = 2,
+ .mVertSubsampling = 2,
+ }
+ },
+ };
+}
+
+ColorConverter::Image::Image(const MediaImage2& img)
+ :mImage(img),
+ mLayout(ImageLayoutUnknown),
+ mSampling(ImageSamplingUnknown) {
+ const MediaImage2::PlaneInfo &yPlane =
+ img.mPlane[MediaImage2::PlaneIndex::Y];
+ const MediaImage2::PlaneInfo &uPlane =
+ img.mPlane[MediaImage2::PlaneIndex::U];
+ const MediaImage2::PlaneInfo &vPlane =
+ img.mPlane[MediaImage2::PlaneIndex::V];
+
+ if (mImage.mNumPlanes != 3) {
+ ALOGE("Conversion error: MediaImage2 mNumPlanes != 3");
+ mLayout = ImageLayoutUnknown;
+ mSampling = ImageSamplingUnknown;
+ mBitDepth = ImageBitDepthInvalid;
+ return;
+ }
+
+ if (mImage.mBitDepth == 8
+ && yPlane.mColInc == 1
+ && uPlane.mColInc == 1
+ && vPlane.mColInc == 1
+ && yPlane.mVertSubsampling == 1
+ && uPlane.mVertSubsampling == 2
+ && vPlane.mVertSubsampling == 2) {
+ mLayout = ImageLayout420Planar;
+ mSampling = ImageSamplingYUV420;
+ } else if (mImage.mBitDepth == 8
+ && yPlane.mColInc == 1
+ && uPlane.mColInc == 2
+ && vPlane.mColInc == 2
+ && yPlane.mVertSubsampling == 1
+ && uPlane.mVertSubsampling == 2
+ && vPlane.mVertSubsampling == 2
+ && ((vPlane.mOffset == uPlane.mOffset + 1) ||
+ (uPlane.mOffset == vPlane.mOffset + 1))) {
+ mLayout = ImageLayout420SemiPlanar;
+ mSampling = ImageSamplingYUV420;
+ }
+
+ mBitDepth = ImageBitDepthInvalid;
+ switch (img.mBitDepth) {
+ case 8:
+ mBitDepth = ImageBitDepth8;
+ break;
+
+ case 10:
+ case 12:
+ case 16:
+ default:
+ // TODO: Implement 10b, 12b and 16b using MediaImage2
+ mBitDepth = ImageBitDepthInvalid;
+ }
+
+}
+
+status_t ColorConverter::Image::getYUVPlaneOffsetAndStride(
+ const BitmapParams &src,
+ uint32_t *y_offset,
+ uint32_t *u_offset,
+ uint32_t *v_offset,
+ size_t *y_stride,
+ size_t *u_stride,
+ size_t *v_stride) const {
+
+ if (y_offset == nullptr || u_offset == nullptr || v_offset == nullptr
+ || y_stride == nullptr || u_stride == nullptr || v_stride == nullptr) {
+ return ERROR_UNSUPPORTED;
+ }
+
+ if (mImage.mNumPlanes != 3) {
+ return ERROR_UNSUPPORTED;
+ }
+
+ const MediaImage2::PlaneInfo &yPlane = mImage.mPlane[MediaImage2::PlaneIndex::Y];
+ *y_offset = yPlane.mOffset
+ + src.mCropTop * yPlane.mRowInc
+ + src.mCropLeft * yPlane.mColInc;
+
+ const MediaImage2::PlaneInfo &uPlane = mImage.mPlane[MediaImage2::PlaneIndex::U];
+ *u_offset = uPlane.mOffset
+ + (src.mCropTop / uPlane.mVertSubsampling) * uPlane.mRowInc
+ + (src.mCropLeft / uPlane.mHorizSubsampling) * uPlane.mColInc;
+
+ const MediaImage2::PlaneInfo &vPlane = mImage.mPlane[MediaImage2::PlaneIndex::V];
+ *v_offset = vPlane.mOffset
+ + (src.mCropTop / vPlane.mVertSubsampling) * vPlane.mRowInc
+ + (src.mCropLeft / vPlane.mHorizSubsampling) * vPlane.mColInc;
+
+ *y_stride = yPlane.mRowInc;
+ *u_stride = uPlane.mRowInc;
+ *v_stride = vPlane.mRowInc;
+
+ return OK;
+}
+
+bool ColorConverter::Image::isNV21() const {
+ if (getLayout() == ImageLayout420SemiPlanar) {
+ const MediaImage2::PlaneInfo &uPlane = mImage.mPlane[MediaImage2::PlaneIndex::U];
+ const MediaImage2::PlaneInfo &vPlane = mImage.mPlane[MediaImage2::PlaneIndex::V];
+
+ int componentBytes = (mImage.mBitDepthAllocated) / 8;
+
+ return (((vPlane.mOffset + componentBytes) == uPlane.mOffset));
+ }
+ return false;
+}
+
/**
* This class approximates the standard YUV to RGB conversions by factoring the matrix
* coefficients to 1/256th-s (as dividing by 256 is easy to do with right shift). The chosen value
@@ -227,8 +473,42 @@
mClip10Bit = NULL;
}
+// Set MediaImage2 Flexible formats
+void ColorConverter::setSrcMediaImage2(MediaImage2 img) {
+ mSrcImage = Image(img);
+ }
+
+bool ColorConverter::isValidForMediaImage2() const {
+
+ if (!mSrcImage
+ || mSrcImage->getMediaImage2().mType != MediaImage2::MEDIA_IMAGE_TYPE_YUV) {
+ // TODO: support Yonly or RGB etc?
+ return false;
+ }
+ // try to identify the src format
+
+ BitDepth_t srcBitDepth = mSrcImage->getBitDepth();
+
+ //TODO: support 12b and 16b ?
+ if (srcBitDepth == ImageBitDepthInvalid) {
+ return false;
+ }
+
+ return ((srcBitDepth == ImageBitDepth8 &&
+ (mDstFormat == OMX_COLOR_Format16bitRGB565
+ || mDstFormat == OMX_COLOR_Format32BitRGBA8888
+ || mDstFormat == OMX_COLOR_Format32bitBGRA8888))
+
+ || (srcBitDepth == ImageBitDepth10
+ && (mDstFormat == COLOR_Format32bitABGR2101010)));
+}
+
bool ColorConverter::isValid() const {
switch ((int32_t)mSrcFormat) {
+ case COLOR_FormatYUV420Flexible:
+ return isValidForMediaImage2();
+ break;
+
case OMX_COLOR_FormatYUV420Planar16:
if (mDstFormat == OMX_COLOR_FormatYUV444Y410) {
return true;
@@ -240,22 +520,23 @@
|| mDstFormat == OMX_COLOR_Format32bitBGRA8888;
case OMX_COLOR_FormatCbYCrY:
- case OMX_QCOM_COLOR_FormatYVU420SemiPlanar:
- case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar:
return mDstFormat == OMX_COLOR_Format16bitRGB565;
case OMX_COLOR_FormatYUV420SemiPlanar:
-#ifdef USE_LIBYUV
+ case OMX_QCOM_COLOR_FormatYVU420SemiPlanar:
+ case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar:
+ if (mSrcImage) {
+ return isValidForMediaImage2();
+ }
return mDstFormat == OMX_COLOR_Format16bitRGB565
|| mDstFormat == OMX_COLOR_Format32BitRGBA8888
|| mDstFormat == OMX_COLOR_Format32bitBGRA8888;
-#else
- return mDstFormat == OMX_COLOR_Format16bitRGB565;
-#endif
+
case COLOR_FormatYUVP010:
return mDstFormat == COLOR_Format32bitABGR2101010;
default:
+ //TODO: Should this be enabled for MediaImage2?
return false;
}
}
@@ -320,6 +601,13 @@
mStride = mWidth;
break;
+ case COLOR_FormatYUV420Flexible:
+ // MediaImage2 should be used.
+ mBpp = 1;
+ mStride = mWidth;
+
+ break;
+
default:
ALOGE("Unsupported color format %d", mColorFormat);
mBpp = 1;
@@ -340,6 +628,14 @@
return mCropBottom - mCropTop + 1;
}
+bool ColorConverter::BitmapParams::isValid() const {
+ if (!((mStride & 1) == 0 // stride must be even
+ && mStride >= mBpp * cropWidth())) {
+ return false;
+ }
+ return true;
+}
+
status_t ColorConverter::convert(
const void *srcBits,
size_t srcWidth, size_t srcHeight, size_t srcStride,
@@ -352,83 +648,83 @@
BitmapParams src(
const_cast<void *>(srcBits),
srcWidth, srcHeight, srcStride,
- srcCropLeft, srcCropTop, srcCropRight, srcCropBottom, mSrcFormat);
+ srcCropLeft, srcCropTop, srcCropRight, srcCropBottom,
+ mSrcFormat);
BitmapParams dst(
dstBits,
dstWidth, dstHeight, dstStride,
dstCropLeft, dstCropTop, dstCropRight, dstCropBottom, mDstFormat);
- if (!((src.mCropLeft & 1) == 0
- && src.cropWidth() == dst.cropWidth()
- && src.cropHeight() == dst.cropHeight())) {
+ if (!(src.isValid()
+ && dst.isValid()
+ && (src.mCropLeft & 1) == 0
+ && src.cropWidth() == dst.cropWidth()
+ && src.cropHeight() == dst.cropHeight())) {
return ERROR_UNSUPPORTED;
}
-
- status_t err;
-
- switch ((int32_t)mSrcFormat) {
- case OMX_COLOR_FormatYUV420Planar:
-#ifdef USE_LIBYUV
- err = convertYUV420PlanarUseLibYUV(src, dst);
-#else
- err = convertYUV420Planar(src, dst);
+#if PERF_PROFILING
+ int64_t startTimeUs = ALooper::GetNowUs();
#endif
+ status_t err;
+ switch ((int32_t)mSrcFormat) {
+ case COLOR_FormatYUV420Flexible:
+ err = convertYUVMediaImage(src, dst);
+ break;
+
+ case OMX_COLOR_FormatYUV420Planar:
+ if (!mSrcImage) {
+ mSrcImage = Image(CreateYUV420PlanarMediaImage2(
+ srcWidth, srcHeight, srcStride, srcHeight, 8 /*bitDepth*/));
+ }
+ err = convertYUVMediaImage(src, dst);
+
break;
case OMX_COLOR_FormatYUV420Planar16:
- {
-#if PERF_PROFILING
- int64_t startTimeUs = ALooper::GetNowUs();
-#endif
err = convertYUV420Planar16(src, dst);
-#if PERF_PROFILING
- int64_t endTimeUs = ALooper::GetNowUs();
- ALOGD("convertYUV420Planar16 took %lld us", (long long) (endTimeUs - startTimeUs));
-#endif
break;
- }
case COLOR_FormatYUVP010:
- {
-#if PERF_PROFILING
- int64_t startTimeUs = ALooper::GetNowUs();
-#endif
err = convertYUVP010(src, dst);
-#if PERF_PROFILING
- int64_t endTimeUs = ALooper::GetNowUs();
- ALOGD("convertYUVP010 took %lld us", (long long) (endTimeUs - startTimeUs));
-#endif
+
break;
- }
case OMX_COLOR_FormatCbYCrY:
err = convertCbYCrY(src, dst);
break;
case OMX_QCOM_COLOR_FormatYVU420SemiPlanar:
- err = convertQCOMYUV420SemiPlanar(src, dst);
+ if (!mSrcImage) {
+ mSrcImage = Image(CreateYUV420SemiPlanarMediaImage2(
+ srcWidth, srcHeight, srcStride, srcHeight, 8 /*bitDepth*/, false));
+ }
+ err = convertYUVMediaImage(src, dst);
+
break;
case OMX_COLOR_FormatYUV420SemiPlanar:
-#ifdef USE_LIBYUV
- err = convertYUV420SemiPlanarUseLibYUV(src, dst);
-#else
- err = convertYUV420SemiPlanar(src, dst);
-#endif
- break;
-
case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar:
- err = convertTIYUV420PackedSemiPlanar(src, dst);
+ if (!mSrcImage) {
+ mSrcImage = Image(CreateYUV420SemiPlanarMediaImage2(
+ srcWidth, srcHeight, srcStride, srcHeight, 8 /*bitDepth*/));
+ }
+ err = convertYUVMediaImage(src, dst);
+
break;
default:
- {
+
CHECK(!"Should not be here. Unknown color conversion.");
break;
- }
}
+#if PERF_PROFILING
+ int64_t endTimeUs = ALooper::GetNowUs();
+ ALOGD("%s image took %lld us", asString_ColorFormat(mSrcFormat,"Unknown"),
+ (long long) (endTimeUs - startTimeUs));
+#endif
+
return err;
}
@@ -466,6 +762,7 @@
}
}
+// Interleaved YUV 422 CbYCrY to RGB565
status_t ColorConverter::convertCbYCrY(
const BitmapParams &src, const BitmapParams &dst) {
// XXX Untested
@@ -488,10 +785,10 @@
+ dst.mCropTop * dst.mWidth + dst.mCropLeft;
const uint8_t *src_ptr = (const uint8_t *)src.mBits
- + (src.mCropTop * dst.mWidth + src.mCropLeft) * 2;
+ + (src.mCropTop * src.mWidth + src.mCropLeft) * 2;
for (size_t y = 0; y < src.cropHeight(); ++y) {
- for (size_t x = 0; x < src.cropWidth(); x += 2) {
+ for (size_t x = 0; x < src.cropWidth() - 1; x += 2) {
signed y1 = (signed)src_ptr[2 * x + 1] - _c16;
signed y2 = (signed)src_ptr[2 * x + 3] - _c16;
signed u = (signed)src_ptr[2 * x] - 128;
@@ -536,67 +833,103 @@
return OK;
}
+status_t ColorConverter::getSrcYUVPlaneOffsetAndStride(
+ const BitmapParams &src,
+ uint32_t *y_offset, uint32_t *u_offset, uint32_t *v_offset,
+ size_t *y_stride, size_t *u_stride, size_t *v_stride) const {
+ if (y_offset == nullptr || u_offset == nullptr || v_offset == nullptr
+ || y_stride == nullptr || u_stride == nullptr || v_stride == nullptr) {
+ ALOGE("nullptrs given for yuv source offset / stride");
+ return ERROR_MALFORMED;
+ }
+
+ if (mSrcImage) {
+ // if we have MediaImage2; get the info from MediaImage2
+ return mSrcImage->getYUVPlaneOffsetAndStride(src, y_offset, u_offset, v_offset,
+ y_stride, u_stride, v_stride);
+ }
+ return ERROR_UNSUPPORTED;
+}
/*
libyuv supports the following color spaces:
- I420: BT.601 limited range
- J420: BT.601 full range (jpeg)
- H420: BT.709 limited range
+ I601: BT.601 limited range
+ J601: BT.601 full range (jpeg)
+ H709: BT.709 limited range
+ F709: BT.709 Full range
+ 2020: BT.2020 limited range
+ V2020: BT.2020 Full range
*/
-#define DECLARE_YUV2RGBFUNC(func, rgb) int (*func)( \
- const uint8_t*, int, const uint8_t*, int, \
- const uint8_t*, int, uint8_t*, int, int, int) \
- = mSrcColorSpace.isH420() ? libyuv::H420To##rgb \
- : mSrcColorSpace.isJ420() ? libyuv::J420To##rgb \
- : libyuv::I420To##rgb
-
status_t ColorConverter::convertYUV420PlanarUseLibYUV(
const BitmapParams &src, const BitmapParams &dst) {
- // Fall back to our conversion if libyuv does not support the color space.
- // I420 (BT.601 limited) is default, so don't fall back if we end up using it anyway.
- if (!mSrcColorSpace.isH420() && !mSrcColorSpace.isJ420()
- // && !mSrcColorSpace.isI420() /* same as line below */
- && getMatrix() != &BT601_LIMITED) {
- return convertYUV420Planar(src, dst);
+ LibyuvConstPair yuvConstants =
+ getLibYUVMatrix(mSrcColorSpace, false);
+
+ uint32_t y_offset = 0, u_offset = 0, v_offset = 0;
+ size_t src_stride_y =0, src_stride_u = 0, src_stride_v = 0;
+ if (getSrcYUVPlaneOffsetAndStride(src, &y_offset, &u_offset, &v_offset,
+ &src_stride_y, &src_stride_u, &src_stride_v) != OK) {
+ return ERROR_UNSUPPORTED;
}
uint8_t *dst_ptr = (uint8_t *)dst.mBits
+ dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp;
- const uint8_t *src_y =
- (const uint8_t *)src.mBits + src.mCropTop * src.mStride + src.mCropLeft;
+ const uint8_t *src_y = (const uint8_t *)src.mBits + y_offset;
- const uint8_t *src_u =
- (const uint8_t *)src.mBits + src.mStride * src.mHeight
- + (src.mCropTop / 2) * (src.mStride / 2) + (src.mCropLeft / 2);
+ const uint8_t *src_u = (const uint8_t *)src.mBits + u_offset;
- const uint8_t *src_v =
- src_u + (src.mStride / 2) * (src.mHeight / 2);
+ const uint8_t *src_v = (const uint8_t *)src.mBits + v_offset;
switch (mDstFormat) {
case OMX_COLOR_Format16bitRGB565:
{
- DECLARE_YUV2RGBFUNC(func, RGB565);
- (*func)(src_y, src.mStride, src_u, src.mStride / 2, src_v, src.mStride / 2,
- (uint8_t *)dst_ptr, dst.mStride, src.cropWidth(), src.cropHeight());
- break;
- }
+ libyuv::I420ToRGB565Matrix(src_y,
+ src_stride_y,
+ src_u,
+ src_stride_u,
+ src_v,
+ src_stride_v,
+ dst_ptr,
+ dst.mStride,
+ yuvConstants.yuv,
+ src.cropWidth(),
+ src.cropHeight());
- case OMX_COLOR_Format32BitRGBA8888:
- {
- DECLARE_YUV2RGBFUNC(func, ABGR);
- (*func)(src_y, src.mStride, src_u, src.mStride / 2, src_v, src.mStride / 2,
- (uint8_t *)dst_ptr, dst.mStride, src.cropWidth(), src.cropHeight());
break;
}
case OMX_COLOR_Format32bitBGRA8888:
{
- DECLARE_YUV2RGBFUNC(func, ARGB);
- (*func)(src_y, src.mStride, src_u, src.mStride / 2, src_v, src.mStride / 2,
- (uint8_t *)dst_ptr, dst.mStride, src.cropWidth(), src.cropHeight());
+ libyuv::I420ToARGBMatrix(src_y,
+ src_stride_y,
+ src_u,
+ src_stride_u,
+ src_v,
+ src_stride_v,
+ (uint8_t*)dst_ptr,
+ dst.mStride,
+ yuvConstants.yuv,
+ src.cropWidth(),
+ src.cropHeight());
+ break;
+ }
+
+ case OMX_COLOR_Format32BitRGBA8888:
+ {
+ libyuv::I420ToARGBMatrix(src_y,
+ src_stride_y,
+ src_v,
+ src_stride_v,
+ src_u,
+ src_stride_u,
+ (uint8_t*)dst_ptr,
+ dst.mStride,
+ yuvConstants.yvu,
+ src.cropWidth(),
+ src.cropHeight());
break;
}
@@ -609,38 +942,90 @@
status_t ColorConverter::convertYUV420SemiPlanarUseLibYUV(
const BitmapParams &src, const BitmapParams &dst) {
- // Fall back to our conversion if libyuv does not support the color space.
- // libyuv only supports BT.601 limited range NV12. Don't fall back if we end up using it anyway.
- if (// !mSrcColorSpace.isI420() && /* same as below */
- getMatrix() != &BT601_LIMITED) {
- return convertYUV420SemiPlanar(src, dst);
- }
+ LibyuvConstPair yuvConstants =
+ getLibYUVMatrix(mSrcColorSpace, false);
+ uint32_t y_offset = 0, u_offset = 0, v_offset = 0;
+ size_t src_stride_y =0, src_stride_u = 0, src_stride_v = 0;
+ if (getSrcYUVPlaneOffsetAndStride(src, &y_offset, &u_offset, &v_offset,
+ &src_stride_y, &src_stride_u, &src_stride_v) != OK) {
+ return ERROR_UNSUPPORTED;
+ }
+ (void)v_offset;
uint8_t *dst_ptr = (uint8_t *)dst.mBits
+ dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp;
- const uint8_t *src_y =
- (const uint8_t *)src.mBits + src.mCropTop * src.mStride + src.mCropLeft;
+ const uint8_t *src_y = (const uint8_t *)src.mBits + y_offset;
- const uint8_t *src_u =
- (const uint8_t *)src.mBits + src.mStride * src.mHeight
- + (src.mCropTop / 2) * src.mStride + src.mCropLeft;
+ const uint8_t *src_u = (const uint8_t *)src.mBits + u_offset;
+
+ const uint8_t *src_v = (const uint8_t *)src.mBits + v_offset;
+
+ bool isNV21 = (u_offset == (v_offset + 1)) ? true : false;
+
+ // libyuv function signature for semiplanar formats;
+ std::function<int(const uint8_t*, int,
+ const uint8_t*, int, uint8_t *, int,
+ LibyuvConstants *, int, int)> libyuvFunc;
switch (mDstFormat) {
case OMX_COLOR_Format16bitRGB565:
- libyuv::NV12ToRGB565(src_y, src.mStride, src_u, src.mStride, (uint8_t *)dst_ptr,
- dst.mStride, src.cropWidth(), src.cropHeight());
+ {
+ // Note: We don't seem to have similar function for NV21
+ libyuv::NV12ToRGB565Matrix(src_y,
+ src_stride_y,
+ src_u,
+ src_stride_u,
+ (uint8_t*)dst_ptr,
+ dst.mStride,
+ yuvConstants.yuv,
+ src.cropWidth(),
+ src.cropHeight());
break;
-
+ }
case OMX_COLOR_Format32bitBGRA8888:
- libyuv::NV12ToARGB(src_y, src.mStride, src_u, src.mStride, (uint8_t *)dst_ptr,
- dst.mStride, src.cropWidth(), src.cropHeight());
+ {
+ if (src_stride_u != src_stride_v) {
+ return ERROR_UNSUPPORTED;
+ }
+
+ libyuvFunc = isNV21 ? libyuv:: NV21ToARGBMatrix : libyuv:: NV12ToARGBMatrix;
+
+ libyuvFunc(src_y,
+ src_stride_y,
+ isNV21 ? src_v: src_u,
+ // src_stride_v should be equal to src_stride_u
+ // but this is done like this for readability
+ isNV21 ? src_stride_v : src_stride_u,
+ (uint8_t*)dst_ptr,
+ dst.mStride,
+ yuvConstants.yuv,
+ src.cropWidth(),
+ src.cropHeight());
break;
+ }
case OMX_COLOR_Format32BitRGBA8888:
- libyuv::NV12ToABGR(src_y, src.mStride, src_u, src.mStride, (uint8_t *)dst_ptr,
- dst.mStride, src.cropWidth(), src.cropHeight());
+ {
+
+ if (src_stride_u != src_stride_v) {
+ return ERROR_UNSUPPORTED;
+ }
+
+ libyuvFunc = isNV21 ? libyuv::NV12ToARGBMatrix : libyuv::NV21ToARGBMatrix;
+
+ libyuvFunc(src_y,
+ src_stride_y,
+ isNV21 ? src_v : src_u,
+ // src_stride_v should be equal to src_stride_u
+ isNV21 ? src_stride_v : src_stride_u,
+ (uint8_t*)dst_ptr,
+ dst.mStride,
+ yuvConstants.yvu,
+ src.cropWidth(),
+ src.cropHeight());
break;
+ }
default:
return ERROR_UNSUPPORTED;
@@ -650,27 +1035,75 @@
}
std::function<void (void *, void *, void *, size_t,
- signed *, signed *, signed *, signed *)>
-getReadFromSrc(OMX_COLOR_FORMATTYPE srcFormat) {
- switch(srcFormat) {
- case OMX_COLOR_FormatYUV420Planar:
- return [](void *src_y, void *src_u, void *src_v, size_t x,
- signed *y1, signed *y2, signed *u, signed *v) {
- *y1 = ((uint8_t*)src_y)[x];
- *y2 = ((uint8_t*)src_y)[x + 1];
- *u = ((uint8_t*)src_u)[x / 2] - 128;
- *v = ((uint8_t*)src_v)[x / 2] - 128;
- };
- case OMX_COLOR_FormatYUV420Planar16:
+ signed *, signed *, signed *, signed *)>
+getReadFromChromaHorizSubsampled2Image8b(std::optional<MediaImage2> image,
+ OMX_COLOR_FORMATTYPE srcFormat) {
+ // this function is for reading src only
+ // when both chromas are horizontally subsampled by 2
+ // this returns 2 luma for one chroma.
+ if (image) {
+ uint32_t uColInc =
+ image->mPlane[MediaImage2::PlaneIndex::U].mColInc;
+ uint32_t vColInc =
+ image->mPlane[MediaImage2::PlaneIndex::V].mColInc;
+ uint32_t uHorizSubsampling =
+ image->mPlane[MediaImage2::PlaneIndex::U].mHorizSubsampling;
+ uint32_t vHorizSubsampling =
+ image->mPlane[MediaImage2::PlaneIndex::V].mHorizSubsampling;
+
+ if (!(uHorizSubsampling == 2 && vHorizSubsampling == 2)) {
+ return nullptr;
+ }
+
+ if (image->mBitDepthAllocated == 8) {
+
+ return [uColInc, vColInc, uHorizSubsampling, vHorizSubsampling]
+ (void *src_y, void *src_u, void *src_v, size_t x,
+ signed *y1, signed *y2, signed *u, signed *v) {
+ *y1 = ((uint8_t *)src_y)[x];
+ *y2 = ((uint8_t *)src_y)[x + 1];
+ *u = ((uint8_t *)src_u)[(x / uHorizSubsampling) * uColInc] - 128;
+ *v = ((uint8_t *)src_v)[(x / vHorizSubsampling) * vColInc] - 128;
+ };
+ }
+ }
+ if (srcFormat == OMX_COLOR_FormatYUV420Planar16) {
+ // OMX_COLOR_FormatYUV420Planar16
return [](void *src_y, void *src_u, void *src_v, size_t x,
signed *y1, signed *y2, signed *u, signed *v) {
- *y1 = (signed)(((uint16_t*)src_y)[x] >> 2);
- *y2 = (signed)(((uint16_t*)src_y)[x + 1] >> 2);
- *u = (signed)(((uint16_t*)src_u)[x / 2] >> 2) - 128;
- *v = (signed)(((uint16_t*)src_v)[x / 2] >> 2) - 128;
+ *y1 = (uint8_t)(((uint16_t*)src_y)[x] >> 2);
+ *y2 = (uint8_t)(((uint16_t*)src_y)[x + 1] >> 2);
+ *u = (uint8_t)(((uint16_t*)src_u)[x / 2] >> 2) - 128;
+ *v = (uint8_t)(((uint16_t*)src_v)[x / 2] >> 2) - 128;
};
- default:
- TRESPASS();
+ }
+ return nullptr;
+}
+
+std::function<void (void *, void *, void *, size_t,
+ signed *, signed *, signed *)>
+getReadFromImage(std::optional<MediaImage2> image, OMX_COLOR_FORMATTYPE &srcFormat) {
+ (void)srcFormat;
+ if (image) {
+ uint32_t uColInc =
+ image->mPlane[MediaImage2::PlaneIndex::U].mColInc;
+ uint32_t vColInc =
+ image->mPlane[MediaImage2::PlaneIndex::V].mColInc;
+ uint32_t uHorizSubsampling =
+ image->mPlane[MediaImage2::PlaneIndex::U].mHorizSubsampling;
+ uint32_t vHorizSubsampling =
+ image->mPlane[MediaImage2::PlaneIndex::V].mHorizSubsampling;
+
+ if (image->mBitDepthAllocated == 8) {
+
+ return [uColInc, vColInc, uHorizSubsampling, vHorizSubsampling]
+ (void *src_y, void *src_u, void *src_v, size_t x,
+ signed *y1, signed *u, signed *v) {
+ *y1 = ((uint8_t *)src_y)[x];
+ *u = ((uint8_t *)src_u)[(x / uHorizSubsampling) * uColInc] - 128;
+ *v = ((uint8_t *)src_v)[(x / vHorizSubsampling) * vColInc] - 128;
+ };
+ }
}
return nullptr;
}
@@ -769,8 +1202,178 @@
return nullptr;
}
-status_t ColorConverter::convertYUV420Planar(
+status_t ColorConverter::convertYUVMediaImage(
const BitmapParams &src, const BitmapParams &dst) {
+ // first see if we can do this as a 420Planar or 420SemiPlanar 8b
+
+ if(!mSrcImage ||
+ mSrcImage->getMediaImage2().mType != MediaImage2::MEDIA_IMAGE_TYPE_YUV
+ || mSrcImage->getMediaImage2().mNumPlanes != 3) {
+ ALOGE("Cannot convert without MediaImage2 or MediaImage is not Valid YUV");
+ return ERROR_UNSUPPORTED;
+ }
+ if (mSrcImage->getBitDepth() == ImageBitDepth8
+ && mSrcImage->getSampling() == ImageSamplingYUV420) {
+ Layout_t layout = mSrcImage->getLayout();
+ switch (layout) {
+ case Layout_t::ImageLayout420Planar:
+ {
+ return convertYUV420PlanarUseLibYUV(src, dst);
+ break;
+ }
+
+ case Layout_t::ImageLayout420SemiPlanar:
+ {
+ // Note: libyuv doesn't support NV21 -> RGB565
+ if (!(mSrcImage->isNV21() && mDstFormat == OMX_COLOR_Format16bitRGB565)) {
+ status_t ret = convertYUV420SemiPlanarUseLibYUV(src, dst);
+ // This function may fail if some specific conditions are not
+ // met for semiPlanar formats like strideU != strideV.
+ // if failed, this will fail before attempting conversion, so
+ // no additional memcpy will be involved here.
+ // Upon failure, this will fall into pixel based processing below.
+ if (ret == OK) {
+ return ret;
+ }
+ }
+ break;
+ }
+ default:
+ // we will handle this case below.
+ break;
+ }
+ }
+ const struct Coeffs *matrix = getMatrix();
+ if (!matrix) {
+ return ERROR_UNSUPPORTED;
+ }
+
+ signed _b_u = matrix->_b_u;
+ signed _neg_g_u = -matrix->_g_u;
+ signed _neg_g_v = -matrix->_g_v;
+ signed _r_v = matrix->_r_v;
+ signed _y = matrix->_y;
+
+ uint8_t *dst_ptr = (uint8_t *)dst.mBits
+ + dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp;
+
+
+ uint32_t y_offset = 0, u_offset = 0, v_offset = 0;
+ size_t src_stride_y =0, src_stride_u = 0, src_stride_v = 0;
+ if (getSrcYUVPlaneOffsetAndStride(src, &y_offset, &u_offset, &v_offset,
+ &src_stride_y, &src_stride_u, &src_stride_v) != OK) {
+ return ERROR_UNSUPPORTED;
+ }
+ uint32_t uVertSubsampling =
+ mSrcImage->getMediaImage2().mPlane[MediaImage2::PlaneIndex::U].mVertSubsampling;
+ uint32_t vVertSubsampling =
+ mSrcImage->getMediaImage2().mPlane[MediaImage2::PlaneIndex::V].mVertSubsampling;
+
+ //TODO: optimize for chroma sampling, reading and writing multiple pixels
+ // within the same loop
+ signed _c16 = 0;
+ void *kAdjustedClip = nullptr;
+ if (mSrcImage->getBitDepth() != ImageBitDepth8) {
+ ALOGE("BitDepth != 8 for MediaImage2");
+ return ERROR_UNSUPPORTED;
+ }
+ _c16 = mSrcColorSpace.mRange == ColorUtils::kColorRangeLimited ? 16 : 0;
+ kAdjustedClip = initClip();
+
+ auto writeToDst = getWriteToDst(mDstFormat, (void *)kAdjustedClip);
+ uint8_t *src_y = (uint8_t *)src.mBits + y_offset;
+ uint8_t *src_u = (uint8_t *)src.mBits + u_offset;
+ uint8_t *src_v = (uint8_t *)src.mBits + v_offset;
+
+ switch (mSrcImage->getSampling()) {
+
+ case ImageSamplingYUV420:
+ {
+ // get read function that can read
+ // chroma sampling 2 with image
+ auto readFromSrcImage = getReadFromChromaHorizSubsampled2Image8b(
+ mSrcImage->getMediaImage2(), mSrcFormat);
+ if (readFromSrcImage == nullptr) {
+ ALOGE("Cannot get a read function for this MediaImage2");
+ return ERROR_UNSUPPORTED;
+ }
+ for (size_t y = 0; y < src.cropHeight(); ++y) {
+ for (size_t x = 0; x < src.cropWidth(); x += 2) {
+ signed y1, y2, u, v;
+ readFromSrcImage(src_y, src_u, src_v, x, &y1, &y2, &u, &v);
+
+ signed u_b = u * _b_u;
+ signed u_g = u * _neg_g_u;
+ signed v_g = v * _neg_g_v;
+ signed v_r = v * _r_v;
+
+ y1 = y1 - _c16;
+ signed tmp1 = y1 * _y + 128;
+ signed b1 = (tmp1 + u_b) / 256;
+ signed g1 = (tmp1 + v_g + u_g) / 256;
+ signed r1 = (tmp1 + v_r) / 256;
+
+ y2 = y2 - _c16;
+ signed tmp2 = y2 * _y + 128;
+ signed b2 = (tmp2 + u_b) / 256;
+ signed g2 = (tmp2 + v_g + u_g) / 256;
+ signed r2 = (tmp2 + v_r) / 256;
+
+ bool uncropped = x + 1 < src.cropWidth();
+ writeToDst(dst_ptr + x * dst.mBpp, uncropped, r1, g1, b1, r2, g2, b2);
+ }
+ src_y += src_stride_y;
+ src_u += (((y + 1) % uVertSubsampling) == 0) ? src_stride_u : 0;
+ src_v += (((y + 1) % vVertSubsampling) == 0) ? src_stride_v : 0;
+
+ dst_ptr += dst.mStride;
+ }
+ break;
+ }
+
+ default:
+ {
+ // Interleaved or any other formats.
+ auto readFromSrcImage = getReadFromImage(mSrcImage->getMediaImage2(), mSrcFormat);
+ if (readFromSrcImage == nullptr) {
+ ALOGE("Cannot get a read function for this MediaImage2");
+ return ERROR_UNSUPPORTED;
+ }
+ for (size_t y = 0; y < src.cropHeight(); ++y) {
+ for (size_t x = 0; x < src.cropWidth(); x += 1) {
+ signed y1, y2, u, v;
+ readFromSrcImage(src_y, src_u, src_v, x, &y1, &u, &v);
+
+ signed u_b = u * _b_u;
+ signed u_g = u * _neg_g_u;
+ signed v_g = v * _neg_g_v;
+ signed v_r = v * _r_v;
+
+ y1 = y1 - _c16;
+ signed tmp1 = y1 * _y + 128;
+ signed b1 = (tmp1 + u_b) / 256;
+ signed g1 = (tmp1 + v_g + u_g) / 256;
+ signed r1 = (tmp1 + v_r) / 256;
+
+ writeToDst(dst_ptr + x * dst.mBpp, false, r1, g1, b1, 0, 0, 0);
+ }
+ src_y += src_stride_y;
+ src_u += (((y + 1) % uVertSubsampling) == 0) ? src_stride_u : 0;
+ src_v += (((y + 1) % vVertSubsampling) == 0) ? src_stride_v : 0;
+
+ dst_ptr += dst.mStride;
+ }
+ }
+ }
+ return OK;
+}
+
+status_t ColorConverter::convertYUV420Planar16(
+ const BitmapParams &src, const BitmapParams &dst) {
+ if (mDstFormat == OMX_COLOR_FormatYUV444Y410) {
+ return convertYUV420Planar16ToY410(src, dst);
+ }
+
const struct Coeffs *matrix = getMatrix();
if (!matrix) {
return ERROR_UNSUPPORTED;
@@ -785,7 +1388,7 @@
uint8_t *kAdjustedClip = initClip();
- auto readFromSrc = getReadFromSrc(mSrcFormat);
+ auto readFromSrc = getReadFromChromaHorizSubsampled2Image8b(std::nullopt, mSrcFormat);
auto writeToDst = getWriteToDst(mDstFormat, (void *)kAdjustedClip);
uint8_t *dst_ptr = (uint8_t *)dst.mBits
@@ -832,19 +1435,9 @@
dst_ptr += dst.mStride;
}
-
return OK;
}
-status_t ColorConverter::convertYUV420Planar16(
- const BitmapParams &src, const BitmapParams &dst) {
- if (mDstFormat == OMX_COLOR_FormatYUV444Y410) {
- return convertYUV420Planar16ToY410(src, dst);
- }
-
- return convertYUV420Planar(src, dst);
-}
-
status_t ColorConverter::convertYUVP010(
const BitmapParams &src, const BitmapParams &dst) {
if (mDstFormat == COLOR_Format32bitABGR2101010) {
@@ -1123,117 +1716,6 @@
#endif // USE_NEON_Y410
-status_t ColorConverter::convertQCOMYUV420SemiPlanar(
- const BitmapParams &src, const BitmapParams &dst) {
- const uint8_t *src_y =
- (const uint8_t *)src.mBits + src.mCropTop * src.mWidth + src.mCropLeft;
-
- const uint8_t *src_u =
- (const uint8_t *)src_y + src.mWidth * src.mHeight
- + src.mCropTop * src.mWidth + src.mCropLeft;
-
- /* QCOMYUV420SemiPlanar is NV21, while MediaCodec uses NV12 */
- return convertYUV420SemiPlanarBase(
- src, dst, src_y, src_u, src.mWidth /* row_inc */, true /* isNV21 */);
-}
-
-status_t ColorConverter::convertTIYUV420PackedSemiPlanar(
- const BitmapParams &src, const BitmapParams &dst) {
- const uint8_t *src_y =
- (const uint8_t *)src.mBits + src.mCropTop * src.mWidth + src.mCropLeft;
-
- const uint8_t *src_u =
- (const uint8_t *)src_y + src.mWidth * (src.mHeight - src.mCropTop / 2);
-
- return convertYUV420SemiPlanarBase(
- src, dst, src_y, src_u, src.mWidth /* row_inc */);
-}
-
-status_t ColorConverter::convertYUV420SemiPlanar(
- const BitmapParams &src, const BitmapParams &dst) {
- const uint8_t *src_y =
- (const uint8_t *)src.mBits + src.mCropTop * src.mStride + src.mCropLeft;
-
- const uint8_t *src_u =
- (const uint8_t *)src.mBits + src.mHeight * src.mStride +
- (src.mCropTop / 2) * src.mStride + src.mCropLeft;
-
- return convertYUV420SemiPlanarBase(
- src, dst, src_y, src_u, src.mStride /* row_inc */);
-}
-
-status_t ColorConverter::convertYUV420SemiPlanarBase(
- const BitmapParams &src, const BitmapParams &dst,
- const uint8_t *src_y, const uint8_t *src_u, size_t row_inc, bool isNV21) {
- const struct Coeffs *matrix = getMatrix();
- if (!matrix) {
- return ERROR_UNSUPPORTED;
- }
-
- signed _b_u = matrix->_b_u;
- signed _neg_g_u = -matrix->_g_u;
- signed _neg_g_v = -matrix->_g_v;
- signed _r_v = matrix->_r_v;
- signed _y = matrix->_y;
- signed _c16 = mSrcColorSpace.mRange == ColorUtils::kColorRangeLimited ? 16 : 0;
-
- uint8_t *kAdjustedClip = initClip();
-
- uint16_t *dst_ptr = (uint16_t *)((uint8_t *)
- dst.mBits + dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp);
-
- for (size_t y = 0; y < src.cropHeight(); ++y) {
- for (size_t x = 0; x < src.cropWidth(); x += 2) {
- signed y1 = (signed)src_y[x] - _c16;
- signed y2 = (signed)src_y[x + 1] - _c16;
-
- signed u = (signed)src_u[(x & ~1) + isNV21] - 128;
- signed v = (signed)src_u[(x & ~1) + !isNV21] - 128;
-
- signed u_b = u * _b_u;
- signed u_g = u * _neg_g_u;
- signed v_g = v * _neg_g_v;
- signed v_r = v * _r_v;
-
- signed tmp1 = y1 * _y + 128;
- signed b1 = (tmp1 + u_b) / 256;
- signed g1 = (tmp1 + v_g + u_g) / 256;
- signed r1 = (tmp1 + v_r) / 256;
-
- signed tmp2 = y2 * _y + 128;
- signed b2 = (tmp2 + u_b) / 256;
- signed g2 = (tmp2 + v_g + u_g) / 256;
- signed r2 = (tmp2 + v_r) / 256;
-
- uint32_t rgb1 =
- ((kAdjustedClip[r1] >> 3) << 11)
- | ((kAdjustedClip[g1] >> 2) << 5)
- | (kAdjustedClip[b1] >> 3);
-
- uint32_t rgb2 =
- ((kAdjustedClip[r2] >> 3) << 11)
- | ((kAdjustedClip[g2] >> 2) << 5)
- | (kAdjustedClip[b2] >> 3);
-
- if (x + 1 < src.cropWidth()) {
- *(uint32_t *)(&dst_ptr[x]) = (rgb2 << 16) | rgb1;
- } else {
- dst_ptr[x] = rgb1;
- }
- }
-
- src_y += row_inc;
-
- if (y & 1) {
- src_u += row_inc;
- }
-
- dst_ptr = (uint16_t*)((uint8_t*)dst_ptr + dst.mStride);
- }
-
- return OK;
-}
-
uint8_t *ColorConverter::initClip() {
if (mClip == NULL) {
mClip = new uint8_t[CLIP_RANGE_MAX_8BIT - CLIP_RANGE_MIN_8BIT + 1];
diff --git a/media/libstagefright/colorconversion/fuzzer/color_conversion_fuzzer.cpp b/media/libstagefright/colorconversion/fuzzer/color_conversion_fuzzer.cpp
index 7c2bfe5..b91f7dc 100644
--- a/media/libstagefright/colorconversion/fuzzer/color_conversion_fuzzer.cpp
+++ b/media/libstagefright/colorconversion/fuzzer/color_conversion_fuzzer.cpp
@@ -53,6 +53,7 @@
int32_t height) {
int32_t frameSize;
switch ((int32_t)colorFormat) {
+ case OMX_COLOR_FormatCbYCrY: // Interleaved YUV422
case OMX_COLOR_Format16bitRGB565: {
frameSize = 2 * stride * height;
break;
@@ -71,7 +72,6 @@
}
case OMX_COLOR_FormatYUV420Planar:
case OMX_COLOR_FormatYUV420SemiPlanar:
- case OMX_COLOR_FormatCbYCrY:
case OMX_QCOM_COLOR_FormatYVU420SemiPlanar:
case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar:
default: {
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index cd801b8..d3fd790 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -360,8 +360,7 @@
<Feature name="bitrate-modes" value="VBR,CBR" />
<Attribute name="software-codec" />
</MediaCodec>
- <MediaCodec name="c2.android.av1.encoder" type="video/av01" variant="slow-cpu,!slow-cpu">
- <!-- TODO: implement a mechanism to prevent AV1 Encoder usage on pre-U devices -->
+ <MediaCodec name="c2.android.av1.encoder" type="video/av01" enabled="false" minsdk="34" variant="slow-cpu,!slow-cpu">
<Limit name="alignment" value="2x2" />
<Limit name="block-size" value="16x16" />
<Variant name="!slow-cpu">
@@ -375,6 +374,7 @@
<Limit name="bitrate" range="1-5000000" />
</Variant>
<Limit name="quality" range="0-100" default="80" />
+ <Limit name="complexity" range="0-5" default="0" />
<Feature name="bitrate-modes" value="VBR,CBR,CQ" />
<Attribute name="software-codec" />
</MediaCodec>
diff --git a/media/libstagefright/httplive/fuzzer/Android.bp b/media/libstagefright/httplive/fuzzer/Android.bp
index 85fd8b7..dd49714 100644
--- a/media/libstagefright/httplive/fuzzer/Android.bp
+++ b/media/libstagefright/httplive/fuzzer/Android.bp
@@ -62,5 +62,13 @@
"android-media-fuzzing-reports@google.com",
],
componentid: 155276,
+ hotlists: [
+ "4593311",
+ ],
+ description: "The fuzzer targets the APIs of libstagefright_httplive",
+ vector: "remote",
+ service_privilege: "privileged",
+ users: "multi_user",
+ fuzzed_code_usage: "shipped",
},
}
diff --git a/media/libstagefright/include/ACodecBufferChannel.h b/media/libstagefright/include/ACodecBufferChannel.h
index da962d1..903280f 100644
--- a/media/libstagefright/include/ACodecBufferChannel.h
+++ b/media/libstagefright/include/ACodecBufferChannel.h
@@ -94,9 +94,11 @@
size_t offset,
const CryptoPlugin::SubSample *subSamples,
size_t numSubSamples,
- const sp<MediaCodecBuffer> &buffer) override;
+ const sp<MediaCodecBuffer> &buffer,
+ AString* errorDetailMsg) override;
virtual status_t renderOutputBuffer(
const sp<MediaCodecBuffer> &buffer, int64_t timestampNs) override;
+ virtual void pollForRenderedBuffers() override;
virtual status_t discardBuffer(const sp<MediaCodecBuffer> &buffer) override;
virtual void getInputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
virtual void getOutputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
diff --git a/media/libstagefright/include/media/stagefright/CodecBase.h b/media/libstagefright/include/media/stagefright/CodecBase.h
index 48721ec..916d41e 100644
--- a/media/libstagefright/include/media/stagefright/CodecBase.h
+++ b/media/libstagefright/include/media/stagefright/CodecBase.h
@@ -385,7 +385,8 @@
size_t offset,
const CryptoPlugin::SubSample *subSamples,
size_t numSubSamples,
- const sp<MediaCodecBuffer> &buffer) {
+ const sp<MediaCodecBuffer> &buffer,
+ AString* errorDetailMsg) {
(void)memory;
(void)secure;
(void)key;
@@ -396,6 +397,7 @@
(void)subSamples;
(void)numSubSamples;
(void)buffer;
+ (void)errorDetailMsg;
return -ENOSYS;
}
/**
@@ -407,6 +409,14 @@
*/
virtual status_t renderOutputBuffer(
const sp<MediaCodecBuffer> &buffer, int64_t timestampNs) = 0;
+
+ /**
+ * Poll for updates about rendered buffers.
+ *
+ * Triggers callbacks to CodecCallback::onOutputFramesRendered.
+ */
+ virtual void pollForRenderedBuffers() = 0;
+
/**
* Discard a buffer to the underlying CodecBase object.
*
diff --git a/media/libstagefright/include/media/stagefright/CodecErrorLog.h b/media/libstagefright/include/media/stagefright/CodecErrorLog.h
new file mode 100644
index 0000000..673117a
--- /dev/null
+++ b/media/libstagefright/include/media/stagefright/CodecErrorLog.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright 2023, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC_ERROR_LOG_H_
+
+#define CODEC_ERROR_LOG_H_
+
+#include <sstream>
+#include <string>
+
+#include <android-base/thread_annotations.h>
+
+#include <media/stagefright/foundation/AString.h>
+
+namespace android {
+
+/**
+ * CodecErrorLog gathers what happened during codec failures, and make them
+ * available to clients for debugging purpose.
+ */
+class CodecErrorLog {
+public:
+ CodecErrorLog() = default;
+
+ /**
+ * Log a line of message.
+ *
+ * \note the message should be readable to developers who may not be
+ * familiar with MediaCodec internals
+ */
+ void log(const char *tag, const char *message);
+ void log(const char *tag, const std::string &message);
+
+ /**
+ * Extract the accumulated log as string. This operation clears the log.
+ */
+ std::string extract();
+
+ /**
+ * Clears the previous log.
+ */
+ void clear();
+
+private:
+ mutable std::mutex mLock;
+ std::stringstream mStream GUARDED_BY(mLock);
+};
+
+} // namespace android
+
+#endif // CODEC_ERROR_LOG_H_
diff --git a/media/libstagefright/include/media/stagefright/ColorConverter.h b/media/libstagefright/include/media/stagefright/ColorConverter.h
index 7a05f00..e8b89c7 100644
--- a/media/libstagefright/include/media/stagefright/ColorConverter.h
+++ b/media/libstagefright/include/media/stagefright/ColorConverter.h
@@ -23,7 +23,10 @@
#include <stdint.h>
#include <utils/Errors.h>
+#include <optional>
+
#include <OMX_Video.h>
+#include <media/hardware/VideoAPI.h>
namespace android {
@@ -35,6 +38,8 @@
bool isDstRGB() const;
+ void setSrcMediaImage2(MediaImage2 img);
+
void setSrcColorSpace(uint32_t standard, uint32_t range, uint32_t transfer);
status_t convert(
@@ -49,18 +54,91 @@
struct Coeffs; // matrix coefficients
-private:
struct ColorSpace {
uint32_t mStandard;
uint32_t mRange;
uint32_t mTransfer;
- bool isBt2020() const;
-
+ bool isLimitedRange() const;
// libyuv helper methods
- bool isH420() const;
- bool isI420() const;
- bool isJ420() const;
+ // BT.2020 limited Range
+ bool isBt2020() const;
+ // BT.2020 full range
+ bool isBtV2020() const;
+ // 709 limited range
+ bool isH709() const;
+ // 709 full range
+ bool isF709() const;
+ // 601 limited range
+ bool isI601() const;
+ // 601 full range
+ // also called "JPEG" in libyuv
+ bool isJ601() const;
+ };
+
+private:
+
+ typedef enum : uint8_t {
+ ImageLayoutUnknown = 0x0,
+ ImageLayout420SemiPlanar = 0x1,
+ ImageLayout420Planar = 0x2
+ } Layout_t;
+
+ typedef enum : uint8_t {
+ ImageSamplingUnknown = 0x0,
+ ImageSamplingYUV420 = 0x1,
+ } Sampling_t;
+
+ //this is the actual usable bit
+ typedef enum : uint8_t {
+ ImageBitDepthInvalid = 0x0,
+ ImageBitDepth8 = 0x1,
+ ImageBitDepth10 = 0x2,
+ ImageBitDepth12 = 0x3,
+ ImageBitDepth16 = 0x4
+ } BitDepth_t;
+
+ struct BitmapParams;
+
+
+ class Image {
+ public:
+ Image(const MediaImage2& img);
+ virtual ~Image() {}
+
+ const MediaImage2 getMediaImage2() const {
+ return mImage;
+ }
+
+ Layout_t getLayout() const {
+ return mLayout;
+ }
+ Sampling_t getSampling() const {
+ return mSampling;
+ }
+ BitDepth_t getBitDepth() const {
+ return mBitDepth;
+ }
+
+ // Returns the plane offset for this image
+ // after accounting for the src Crop offsets
+ status_t getYUVPlaneOffsetAndStride(
+ const BitmapParams &src,
+ uint32_t *y_offset,
+ uint32_t *u_offset,
+ uint32_t *v_offset,
+ size_t *y_stride,
+ size_t *u_stride,
+ size_t *v_stride
+ ) const;
+
+ bool isNV21() const;
+
+ private:
+ MediaImage2 mImage;
+ Layout_t mLayout;
+ Sampling_t mSampling;
+ BitDepth_t mBitDepth;
};
struct BitmapParams {
@@ -74,6 +152,8 @@
size_t cropWidth() const;
size_t cropHeight() const;
+ bool isValid() const;
+
void *mBits;
OMX_COLOR_FORMATTYPE mColorFormat;
size_t mWidth, mHeight;
@@ -82,6 +162,7 @@
};
OMX_COLOR_FORMATTYPE mSrcFormat, mDstFormat;
+ std::optional<Image> mSrcImage;
ColorSpace mSrcColorSpace;
uint8_t *mClip;
uint16_t *mClip10Bit;
@@ -89,14 +170,30 @@
uint8_t *initClip();
uint16_t *initClip10Bit();
+ // resolve YUVFormat from YUV420Flexible
+ bool isValidForMediaImage2() const;
+
+ // get plane offsets from Formats
+ status_t getSrcYUVPlaneOffsetAndStride(
+ const BitmapParams &src,
+ uint32_t *y_offset,
+ uint32_t *u_offset,
+ uint32_t *v_offset,
+ size_t *y_stride,
+ size_t *u_stride,
+ size_t *v_stride) const;
+
+ status_t convertYUVMediaImage(
+ const BitmapParams &src, const BitmapParams &dst);
+
// returns the YUV2RGB matrix coefficients according to the color aspects and bit depth
const struct Coeffs *getMatrix() const;
status_t convertCbYCrY(
const BitmapParams &src, const BitmapParams &dst);
- status_t convertYUV420Planar(
- const BitmapParams &src, const BitmapParams &dst);
+ // status_t convertYUV420Planar(
+ // const BitmapParams &src, const BitmapParams &dst);
status_t convertYUV420PlanarUseLibYUV(
const BitmapParams &src, const BitmapParams &dst);
@@ -113,19 +210,6 @@
status_t convertYUV420Planar16ToRGB(
const BitmapParams &src, const BitmapParams &dst);
- status_t convertQCOMYUV420SemiPlanar(
- const BitmapParams &src, const BitmapParams &dst);
-
- status_t convertYUV420SemiPlanar(
- const BitmapParams &src, const BitmapParams &dst);
-
- status_t convertYUV420SemiPlanarBase(
- const BitmapParams &src, const BitmapParams &dst,
- const uint8_t *src_y, const uint8_t *src_u, size_t row_inc, bool isNV21 = false);
-
- status_t convertTIYUV420PackedSemiPlanar(
- const BitmapParams &src, const BitmapParams &dst);
-
status_t convertYUVP010(
const BitmapParams &src, const BitmapParams &dst);
@@ -133,6 +217,7 @@
const BitmapParams &src, const BitmapParams &dst);
ColorConverter(const ColorConverter &);
+
ColorConverter &operator=(const ColorConverter &);
};
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index 65d9f7d..1cc281b 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -28,6 +28,7 @@
#include <media/MediaMetrics.h>
#include <media/MediaProfiles.h>
#include <media/stagefright/foundation/AHandler.h>
+#include <media/stagefright/CodecErrorLog.h>
#include <media/stagefright/FrameRenderTracker.h>
#include <utils/Vector.h>
@@ -39,6 +40,7 @@
namespace android {
namespace media {
class MediaResourceParcel;
+class ClientConfigParcel;
} // media
} // android
} // aidl
@@ -71,6 +73,7 @@
using hardware::cas::native::V1_0::IDescrambler;
using aidl::android::media::MediaResourceParcel;
+using aidl::android::media::ClientConfigParcel;
struct MediaCodec : public AHandler {
enum Domain {
@@ -301,6 +304,8 @@
T value;
};
+ inline CodecErrorLog &getErrorLog() { return mErrorLog; }
+
protected:
virtual ~MediaCodec();
virtual void onMessageReceived(const sp<AMessage> &msg);
@@ -325,6 +330,7 @@
RELEASING,
};
std::string stateString(State state);
+ std::string apiStateString();
enum {
kPortIndexInput = 0,
@@ -442,6 +448,7 @@
Mutex mMetricsLock;
mediametrics_handle_t mMetricsHandle = 0;
+ bool mMetricsToUpload = false;
nsecs_t mLifetimeStartNs = 0;
void initMediametrics();
void updateMediametrics();
@@ -453,6 +460,8 @@
void updateTunnelPeek(const sp<AMessage> &msg);
void updatePlaybackDuration(const sp<AMessage> &msg);
+ inline void initClientConfigParcel(ClientConfigParcel& clientConfig);
+
sp<AMessage> mOutputFormat;
sp<AMessage> mInputFormat;
sp<AMessage> mCallback;
@@ -646,6 +655,9 @@
// when low latency is on
int64_t mInputBufferCounter; // number of input buffers queued since last reset/flush
+ // A rescheduleable message that periodically polls for rendered buffers
+ sp<AMessage> mMsgPollForRenderedBuffers;
+
class ReleaseSurface;
std::unique_ptr<ReleaseSurface> mReleaseSurface;
@@ -702,11 +714,15 @@
};
Histogram mLatencyHist;
+ // An unique ID for the codec - Used by the metrics.
+ uint64_t mCodecId = 0;
std::function<sp<CodecBase>(const AString &, const char *)> mGetCodecBase;
std::function<status_t(const AString &, sp<MediaCodecInfo> *)> mGetCodecInfo;
friend class MediaTestHelper;
+ CodecErrorLog mErrorLog;
+
DISALLOW_EVIL_CONSTRUCTORS(MediaCodec);
};
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index 4e9623b..7334639 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -21,15 +21,15 @@
namespace {
// from MediaCodecInfo.java
-constexpr int32_t AVCProfileBaseline = 0x01;
-constexpr int32_t AVCProfileMain = 0x02;
-constexpr int32_t AVCProfileExtended = 0x04;
-constexpr int32_t AVCProfileHigh = 0x08;
-constexpr int32_t AVCProfileHigh10 = 0x10;
-constexpr int32_t AVCProfileHigh422 = 0x20;
-constexpr int32_t AVCProfileHigh444 = 0x40;
-constexpr int32_t AVCProfileConstrainedBaseline = 0x10000;
-constexpr int32_t AVCProfileConstrainedHigh = 0x80000;
+inline constexpr int32_t AVCProfileBaseline = 0x01;
+inline constexpr int32_t AVCProfileMain = 0x02;
+inline constexpr int32_t AVCProfileExtended = 0x04;
+inline constexpr int32_t AVCProfileHigh = 0x08;
+inline constexpr int32_t AVCProfileHigh10 = 0x10;
+inline constexpr int32_t AVCProfileHigh422 = 0x20;
+inline constexpr int32_t AVCProfileHigh444 = 0x40;
+inline constexpr int32_t AVCProfileConstrainedBaseline = 0x10000;
+inline constexpr int32_t AVCProfileConstrainedHigh = 0x80000;
inline static const char *asString_AVCProfile(int32_t i, const char *def = "??") {
switch (i) {
@@ -46,26 +46,26 @@
}
}
-constexpr int32_t AVCLevel1 = 0x01;
-constexpr int32_t AVCLevel1b = 0x02;
-constexpr int32_t AVCLevel11 = 0x04;
-constexpr int32_t AVCLevel12 = 0x08;
-constexpr int32_t AVCLevel13 = 0x10;
-constexpr int32_t AVCLevel2 = 0x20;
-constexpr int32_t AVCLevel21 = 0x40;
-constexpr int32_t AVCLevel22 = 0x80;
-constexpr int32_t AVCLevel3 = 0x100;
-constexpr int32_t AVCLevel31 = 0x200;
-constexpr int32_t AVCLevel32 = 0x400;
-constexpr int32_t AVCLevel4 = 0x800;
-constexpr int32_t AVCLevel41 = 0x1000;
-constexpr int32_t AVCLevel42 = 0x2000;
-constexpr int32_t AVCLevel5 = 0x4000;
-constexpr int32_t AVCLevel51 = 0x8000;
-constexpr int32_t AVCLevel52 = 0x10000;
-constexpr int32_t AVCLevel6 = 0x20000;
-constexpr int32_t AVCLevel61 = 0x40000;
-constexpr int32_t AVCLevel62 = 0x80000;
+inline constexpr int32_t AVCLevel1 = 0x01;
+inline constexpr int32_t AVCLevel1b = 0x02;
+inline constexpr int32_t AVCLevel11 = 0x04;
+inline constexpr int32_t AVCLevel12 = 0x08;
+inline constexpr int32_t AVCLevel13 = 0x10;
+inline constexpr int32_t AVCLevel2 = 0x20;
+inline constexpr int32_t AVCLevel21 = 0x40;
+inline constexpr int32_t AVCLevel22 = 0x80;
+inline constexpr int32_t AVCLevel3 = 0x100;
+inline constexpr int32_t AVCLevel31 = 0x200;
+inline constexpr int32_t AVCLevel32 = 0x400;
+inline constexpr int32_t AVCLevel4 = 0x800;
+inline constexpr int32_t AVCLevel41 = 0x1000;
+inline constexpr int32_t AVCLevel42 = 0x2000;
+inline constexpr int32_t AVCLevel5 = 0x4000;
+inline constexpr int32_t AVCLevel51 = 0x8000;
+inline constexpr int32_t AVCLevel52 = 0x10000;
+inline constexpr int32_t AVCLevel6 = 0x20000;
+inline constexpr int32_t AVCLevel61 = 0x40000;
+inline constexpr int32_t AVCLevel62 = 0x80000;
inline static const char *asString_AVCLevel(int32_t i, const char *def = "??") {
switch (i) {
@@ -93,15 +93,15 @@
}
}
-constexpr int32_t H263ProfileBaseline = 0x01;
-constexpr int32_t H263ProfileH320Coding = 0x02;
-constexpr int32_t H263ProfileBackwardCompatible = 0x04;
-constexpr int32_t H263ProfileISWV2 = 0x08;
-constexpr int32_t H263ProfileISWV3 = 0x10;
-constexpr int32_t H263ProfileHighCompression = 0x20;
-constexpr int32_t H263ProfileInternet = 0x40;
-constexpr int32_t H263ProfileInterlace = 0x80;
-constexpr int32_t H263ProfileHighLatency = 0x100;
+inline constexpr int32_t H263ProfileBaseline = 0x01;
+inline constexpr int32_t H263ProfileH320Coding = 0x02;
+inline constexpr int32_t H263ProfileBackwardCompatible = 0x04;
+inline constexpr int32_t H263ProfileISWV2 = 0x08;
+inline constexpr int32_t H263ProfileISWV3 = 0x10;
+inline constexpr int32_t H263ProfileHighCompression = 0x20;
+inline constexpr int32_t H263ProfileInternet = 0x40;
+inline constexpr int32_t H263ProfileInterlace = 0x80;
+inline constexpr int32_t H263ProfileHighLatency = 0x100;
inline static const char *asString_H263Profile(int32_t i, const char *def = "??") {
switch (i) {
@@ -118,14 +118,14 @@
}
}
-constexpr int32_t H263Level10 = 0x01;
-constexpr int32_t H263Level20 = 0x02;
-constexpr int32_t H263Level30 = 0x04;
-constexpr int32_t H263Level40 = 0x08;
-constexpr int32_t H263Level45 = 0x10;
-constexpr int32_t H263Level50 = 0x20;
-constexpr int32_t H263Level60 = 0x40;
-constexpr int32_t H263Level70 = 0x80;
+inline constexpr int32_t H263Level10 = 0x01;
+inline constexpr int32_t H263Level20 = 0x02;
+inline constexpr int32_t H263Level30 = 0x04;
+inline constexpr int32_t H263Level40 = 0x08;
+inline constexpr int32_t H263Level45 = 0x10;
+inline constexpr int32_t H263Level50 = 0x20;
+inline constexpr int32_t H263Level60 = 0x40;
+inline constexpr int32_t H263Level70 = 0x80;
inline static const char *asString_H263Level(int32_t i, const char *def = "??") {
switch (i) {
@@ -141,22 +141,22 @@
}
}
-constexpr int32_t MPEG4ProfileSimple = 0x01;
-constexpr int32_t MPEG4ProfileSimpleScalable = 0x02;
-constexpr int32_t MPEG4ProfileCore = 0x04;
-constexpr int32_t MPEG4ProfileMain = 0x08;
-constexpr int32_t MPEG4ProfileNbit = 0x10;
-constexpr int32_t MPEG4ProfileScalableTexture = 0x20;
-constexpr int32_t MPEG4ProfileSimpleFace = 0x40;
-constexpr int32_t MPEG4ProfileSimpleFBA = 0x80;
-constexpr int32_t MPEG4ProfileBasicAnimated = 0x100;
-constexpr int32_t MPEG4ProfileHybrid = 0x200;
-constexpr int32_t MPEG4ProfileAdvancedRealTime = 0x400;
-constexpr int32_t MPEG4ProfileCoreScalable = 0x800;
-constexpr int32_t MPEG4ProfileAdvancedCoding = 0x1000;
-constexpr int32_t MPEG4ProfileAdvancedCore = 0x2000;
-constexpr int32_t MPEG4ProfileAdvancedScalable = 0x4000;
-constexpr int32_t MPEG4ProfileAdvancedSimple = 0x8000;
+inline constexpr int32_t MPEG4ProfileSimple = 0x01;
+inline constexpr int32_t MPEG4ProfileSimpleScalable = 0x02;
+inline constexpr int32_t MPEG4ProfileCore = 0x04;
+inline constexpr int32_t MPEG4ProfileMain = 0x08;
+inline constexpr int32_t MPEG4ProfileNbit = 0x10;
+inline constexpr int32_t MPEG4ProfileScalableTexture = 0x20;
+inline constexpr int32_t MPEG4ProfileSimpleFace = 0x40;
+inline constexpr int32_t MPEG4ProfileSimpleFBA = 0x80;
+inline constexpr int32_t MPEG4ProfileBasicAnimated = 0x100;
+inline constexpr int32_t MPEG4ProfileHybrid = 0x200;
+inline constexpr int32_t MPEG4ProfileAdvancedRealTime = 0x400;
+inline constexpr int32_t MPEG4ProfileCoreScalable = 0x800;
+inline constexpr int32_t MPEG4ProfileAdvancedCoding = 0x1000;
+inline constexpr int32_t MPEG4ProfileAdvancedCore = 0x2000;
+inline constexpr int32_t MPEG4ProfileAdvancedScalable = 0x4000;
+inline constexpr int32_t MPEG4ProfileAdvancedSimple = 0x8000;
inline static const char *asString_MPEG4Profile(int32_t i, const char *def = "??") {
switch (i) {
@@ -180,16 +180,16 @@
}
}
-constexpr int32_t MPEG4Level0 = 0x01;
-constexpr int32_t MPEG4Level0b = 0x02;
-constexpr int32_t MPEG4Level1 = 0x04;
-constexpr int32_t MPEG4Level2 = 0x08;
-constexpr int32_t MPEG4Level3 = 0x10;
-constexpr int32_t MPEG4Level3b = 0x18;
-constexpr int32_t MPEG4Level4 = 0x20;
-constexpr int32_t MPEG4Level4a = 0x40;
-constexpr int32_t MPEG4Level5 = 0x80;
-constexpr int32_t MPEG4Level6 = 0x100;
+inline constexpr int32_t MPEG4Level0 = 0x01;
+inline constexpr int32_t MPEG4Level0b = 0x02;
+inline constexpr int32_t MPEG4Level1 = 0x04;
+inline constexpr int32_t MPEG4Level2 = 0x08;
+inline constexpr int32_t MPEG4Level3 = 0x10;
+inline constexpr int32_t MPEG4Level3b = 0x18;
+inline constexpr int32_t MPEG4Level4 = 0x20;
+inline constexpr int32_t MPEG4Level4a = 0x40;
+inline constexpr int32_t MPEG4Level5 = 0x80;
+inline constexpr int32_t MPEG4Level6 = 0x100;
inline static const char *asString_MPEG4Level(int32_t i, const char *def = "??") {
switch (i) {
@@ -207,12 +207,12 @@
}
}
-constexpr int32_t MPEG2ProfileSimple = 0x00;
-constexpr int32_t MPEG2ProfileMain = 0x01;
-constexpr int32_t MPEG2Profile422 = 0x02;
-constexpr int32_t MPEG2ProfileSNR = 0x03;
-constexpr int32_t MPEG2ProfileSpatial = 0x04;
-constexpr int32_t MPEG2ProfileHigh = 0x05;
+inline constexpr int32_t MPEG2ProfileSimple = 0x00;
+inline constexpr int32_t MPEG2ProfileMain = 0x01;
+inline constexpr int32_t MPEG2Profile422 = 0x02;
+inline constexpr int32_t MPEG2ProfileSNR = 0x03;
+inline constexpr int32_t MPEG2ProfileSpatial = 0x04;
+inline constexpr int32_t MPEG2ProfileHigh = 0x05;
inline static const char *asString_MPEG2Profile(int32_t i, const char *def = "??") {
switch (i) {
@@ -226,11 +226,11 @@
}
}
-constexpr int32_t MPEG2LevelLL = 0x00;
-constexpr int32_t MPEG2LevelML = 0x01;
-constexpr int32_t MPEG2LevelH14 = 0x02;
-constexpr int32_t MPEG2LevelHL = 0x03;
-constexpr int32_t MPEG2LevelHP = 0x04;
+inline constexpr int32_t MPEG2LevelLL = 0x00;
+inline constexpr int32_t MPEG2LevelML = 0x01;
+inline constexpr int32_t MPEG2LevelH14 = 0x02;
+inline constexpr int32_t MPEG2LevelHL = 0x03;
+inline constexpr int32_t MPEG2LevelHP = 0x04;
inline static const char *asString_MPEG2Level(int32_t i, const char *def = "??") {
switch (i) {
@@ -243,18 +243,18 @@
}
}
-constexpr int32_t AACObjectMain = 1;
-constexpr int32_t AACObjectLC = 2;
-constexpr int32_t AACObjectSSR = 3;
-constexpr int32_t AACObjectLTP = 4;
-constexpr int32_t AACObjectHE = 5;
-constexpr int32_t AACObjectScalable = 6;
-constexpr int32_t AACObjectERLC = 17;
-constexpr int32_t AACObjectERScalable = 20;
-constexpr int32_t AACObjectLD = 23;
-constexpr int32_t AACObjectHE_PS = 29;
-constexpr int32_t AACObjectELD = 39;
-constexpr int32_t AACObjectXHE = 42;
+inline constexpr int32_t AACObjectMain = 1;
+inline constexpr int32_t AACObjectLC = 2;
+inline constexpr int32_t AACObjectSSR = 3;
+inline constexpr int32_t AACObjectLTP = 4;
+inline constexpr int32_t AACObjectHE = 5;
+inline constexpr int32_t AACObjectScalable = 6;
+inline constexpr int32_t AACObjectERLC = 17;
+inline constexpr int32_t AACObjectERScalable = 20;
+inline constexpr int32_t AACObjectLD = 23;
+inline constexpr int32_t AACObjectHE_PS = 29;
+inline constexpr int32_t AACObjectELD = 39;
+inline constexpr int32_t AACObjectXHE = 42;
inline static const char *asString_AACObject(int32_t i, const char *def = "??") {
switch (i) {
@@ -274,10 +274,10 @@
}
}
-constexpr int32_t VP8Level_Version0 = 0x01;
-constexpr int32_t VP8Level_Version1 = 0x02;
-constexpr int32_t VP8Level_Version2 = 0x04;
-constexpr int32_t VP8Level_Version3 = 0x08;
+inline constexpr int32_t VP8Level_Version0 = 0x01;
+inline constexpr int32_t VP8Level_Version1 = 0x02;
+inline constexpr int32_t VP8Level_Version2 = 0x04;
+inline constexpr int32_t VP8Level_Version3 = 0x08;
inline static const char *asString_VP8Level(int32_t i, const char *def = "??") {
switch (i) {
@@ -289,7 +289,7 @@
}
}
-constexpr int32_t VP8ProfileMain = 0x01;
+inline constexpr int32_t VP8ProfileMain = 0x01;
inline static const char *asString_VP8Profile(int32_t i, const char *def = "??") {
switch (i) {
@@ -298,14 +298,14 @@
}
}
-constexpr int32_t VP9Profile0 = 0x01;
-constexpr int32_t VP9Profile1 = 0x02;
-constexpr int32_t VP9Profile2 = 0x04;
-constexpr int32_t VP9Profile3 = 0x08;
-constexpr int32_t VP9Profile2HDR = 0x1000;
-constexpr int32_t VP9Profile3HDR = 0x2000;
-constexpr int32_t VP9Profile2HDR10Plus = 0x4000;
-constexpr int32_t VP9Profile3HDR10Plus = 0x8000;
+inline constexpr int32_t VP9Profile0 = 0x01;
+inline constexpr int32_t VP9Profile1 = 0x02;
+inline constexpr int32_t VP9Profile2 = 0x04;
+inline constexpr int32_t VP9Profile3 = 0x08;
+inline constexpr int32_t VP9Profile2HDR = 0x1000;
+inline constexpr int32_t VP9Profile3HDR = 0x2000;
+inline constexpr int32_t VP9Profile2HDR10Plus = 0x4000;
+inline constexpr int32_t VP9Profile3HDR10Plus = 0x8000;
inline static const char *asString_VP9Profile(int32_t i, const char *def = "??") {
switch (i) {
@@ -321,20 +321,20 @@
}
}
-constexpr int32_t VP9Level1 = 0x1;
-constexpr int32_t VP9Level11 = 0x2;
-constexpr int32_t VP9Level2 = 0x4;
-constexpr int32_t VP9Level21 = 0x8;
-constexpr int32_t VP9Level3 = 0x10;
-constexpr int32_t VP9Level31 = 0x20;
-constexpr int32_t VP9Level4 = 0x40;
-constexpr int32_t VP9Level41 = 0x80;
-constexpr int32_t VP9Level5 = 0x100;
-constexpr int32_t VP9Level51 = 0x200;
-constexpr int32_t VP9Level52 = 0x400;
-constexpr int32_t VP9Level6 = 0x800;
-constexpr int32_t VP9Level61 = 0x1000;
-constexpr int32_t VP9Level62 = 0x2000;
+inline constexpr int32_t VP9Level1 = 0x1;
+inline constexpr int32_t VP9Level11 = 0x2;
+inline constexpr int32_t VP9Level2 = 0x4;
+inline constexpr int32_t VP9Level21 = 0x8;
+inline constexpr int32_t VP9Level3 = 0x10;
+inline constexpr int32_t VP9Level31 = 0x20;
+inline constexpr int32_t VP9Level4 = 0x40;
+inline constexpr int32_t VP9Level41 = 0x80;
+inline constexpr int32_t VP9Level5 = 0x100;
+inline constexpr int32_t VP9Level51 = 0x200;
+inline constexpr int32_t VP9Level52 = 0x400;
+inline constexpr int32_t VP9Level6 = 0x800;
+inline constexpr int32_t VP9Level61 = 0x1000;
+inline constexpr int32_t VP9Level62 = 0x2000;
inline static const char *asString_VP9Level(int32_t i, const char *def = "??") {
switch (i) {
@@ -356,10 +356,10 @@
}
}
-constexpr int32_t AV1ProfileMain8 = 0x1;
-constexpr int32_t AV1ProfileMain10 = 0x2;
-constexpr int32_t AV1ProfileMain10HDR10 = 0x1000;
-constexpr int32_t AV1ProfileMain10HDR10Plus = 0x2000;
+inline constexpr int32_t AV1ProfileMain8 = 0x1;
+inline constexpr int32_t AV1ProfileMain10 = 0x2;
+inline constexpr int32_t AV1ProfileMain10HDR10 = 0x1000;
+inline constexpr int32_t AV1ProfileMain10HDR10Plus = 0x2000;
inline static const char *asString_AV1Profile(int32_t i, const char *def = "??") {
switch (i) {
@@ -371,30 +371,30 @@
}
}
-constexpr int32_t AV1Level2 = 0x1;
-constexpr int32_t AV1Level21 = 0x2;
-constexpr int32_t AV1Level22 = 0x4;
-constexpr int32_t AV1Level23 = 0x8;
-constexpr int32_t AV1Level3 = 0x10;
-constexpr int32_t AV1Level31 = 0x20;
-constexpr int32_t AV1Level32 = 0x40;
-constexpr int32_t AV1Level33 = 0x80;
-constexpr int32_t AV1Level4 = 0x100;
-constexpr int32_t AV1Level41 = 0x200;
-constexpr int32_t AV1Level42 = 0x400;
-constexpr int32_t AV1Level43 = 0x800;
-constexpr int32_t AV1Level5 = 0x1000;
-constexpr int32_t AV1Level51 = 0x2000;
-constexpr int32_t AV1Level52 = 0x4000;
-constexpr int32_t AV1Level53 = 0x8000;
-constexpr int32_t AV1Level6 = 0x10000;
-constexpr int32_t AV1Level61 = 0x20000;
-constexpr int32_t AV1Level62 = 0x40000;
-constexpr int32_t AV1Level63 = 0x80000;
-constexpr int32_t AV1Level7 = 0x100000;
-constexpr int32_t AV1Level71 = 0x200000;
-constexpr int32_t AV1Level72 = 0x400000;
-constexpr int32_t AV1Level73 = 0x800000;
+inline constexpr int32_t AV1Level2 = 0x1;
+inline constexpr int32_t AV1Level21 = 0x2;
+inline constexpr int32_t AV1Level22 = 0x4;
+inline constexpr int32_t AV1Level23 = 0x8;
+inline constexpr int32_t AV1Level3 = 0x10;
+inline constexpr int32_t AV1Level31 = 0x20;
+inline constexpr int32_t AV1Level32 = 0x40;
+inline constexpr int32_t AV1Level33 = 0x80;
+inline constexpr int32_t AV1Level4 = 0x100;
+inline constexpr int32_t AV1Level41 = 0x200;
+inline constexpr int32_t AV1Level42 = 0x400;
+inline constexpr int32_t AV1Level43 = 0x800;
+inline constexpr int32_t AV1Level5 = 0x1000;
+inline constexpr int32_t AV1Level51 = 0x2000;
+inline constexpr int32_t AV1Level52 = 0x4000;
+inline constexpr int32_t AV1Level53 = 0x8000;
+inline constexpr int32_t AV1Level6 = 0x10000;
+inline constexpr int32_t AV1Level61 = 0x20000;
+inline constexpr int32_t AV1Level62 = 0x40000;
+inline constexpr int32_t AV1Level63 = 0x80000;
+inline constexpr int32_t AV1Level7 = 0x100000;
+inline constexpr int32_t AV1Level71 = 0x200000;
+inline constexpr int32_t AV1Level72 = 0x400000;
+inline constexpr int32_t AV1Level73 = 0x800000;
inline static const char *asString_AV1Level(int32_t i, const char *def = "??") {
switch (i) {
@@ -426,11 +426,11 @@
}
}
-constexpr int32_t HEVCProfileMain = 0x01;
-constexpr int32_t HEVCProfileMain10 = 0x02;
-constexpr int32_t HEVCProfileMainStill = 0x04;
-constexpr int32_t HEVCProfileMain10HDR10 = 0x1000;
-constexpr int32_t HEVCProfileMain10HDR10Plus = 0x2000;
+inline constexpr int32_t HEVCProfileMain = 0x01;
+inline constexpr int32_t HEVCProfileMain10 = 0x02;
+inline constexpr int32_t HEVCProfileMainStill = 0x04;
+inline constexpr int32_t HEVCProfileMain10HDR10 = 0x1000;
+inline constexpr int32_t HEVCProfileMain10HDR10Plus = 0x2000;
inline static const char *asString_HEVCProfile(int32_t i, const char *def = "??") {
switch (i) {
@@ -443,32 +443,32 @@
}
}
-constexpr int32_t HEVCMainTierLevel1 = 0x1;
-constexpr int32_t HEVCHighTierLevel1 = 0x2;
-constexpr int32_t HEVCMainTierLevel2 = 0x4;
-constexpr int32_t HEVCHighTierLevel2 = 0x8;
-constexpr int32_t HEVCMainTierLevel21 = 0x10;
-constexpr int32_t HEVCHighTierLevel21 = 0x20;
-constexpr int32_t HEVCMainTierLevel3 = 0x40;
-constexpr int32_t HEVCHighTierLevel3 = 0x80;
-constexpr int32_t HEVCMainTierLevel31 = 0x100;
-constexpr int32_t HEVCHighTierLevel31 = 0x200;
-constexpr int32_t HEVCMainTierLevel4 = 0x400;
-constexpr int32_t HEVCHighTierLevel4 = 0x800;
-constexpr int32_t HEVCMainTierLevel41 = 0x1000;
-constexpr int32_t HEVCHighTierLevel41 = 0x2000;
-constexpr int32_t HEVCMainTierLevel5 = 0x4000;
-constexpr int32_t HEVCHighTierLevel5 = 0x8000;
-constexpr int32_t HEVCMainTierLevel51 = 0x10000;
-constexpr int32_t HEVCHighTierLevel51 = 0x20000;
-constexpr int32_t HEVCMainTierLevel52 = 0x40000;
-constexpr int32_t HEVCHighTierLevel52 = 0x80000;
-constexpr int32_t HEVCMainTierLevel6 = 0x100000;
-constexpr int32_t HEVCHighTierLevel6 = 0x200000;
-constexpr int32_t HEVCMainTierLevel61 = 0x400000;
-constexpr int32_t HEVCHighTierLevel61 = 0x800000;
-constexpr int32_t HEVCMainTierLevel62 = 0x1000000;
-constexpr int32_t HEVCHighTierLevel62 = 0x2000000;
+inline constexpr int32_t HEVCMainTierLevel1 = 0x1;
+inline constexpr int32_t HEVCHighTierLevel1 = 0x2;
+inline constexpr int32_t HEVCMainTierLevel2 = 0x4;
+inline constexpr int32_t HEVCHighTierLevel2 = 0x8;
+inline constexpr int32_t HEVCMainTierLevel21 = 0x10;
+inline constexpr int32_t HEVCHighTierLevel21 = 0x20;
+inline constexpr int32_t HEVCMainTierLevel3 = 0x40;
+inline constexpr int32_t HEVCHighTierLevel3 = 0x80;
+inline constexpr int32_t HEVCMainTierLevel31 = 0x100;
+inline constexpr int32_t HEVCHighTierLevel31 = 0x200;
+inline constexpr int32_t HEVCMainTierLevel4 = 0x400;
+inline constexpr int32_t HEVCHighTierLevel4 = 0x800;
+inline constexpr int32_t HEVCMainTierLevel41 = 0x1000;
+inline constexpr int32_t HEVCHighTierLevel41 = 0x2000;
+inline constexpr int32_t HEVCMainTierLevel5 = 0x4000;
+inline constexpr int32_t HEVCHighTierLevel5 = 0x8000;
+inline constexpr int32_t HEVCMainTierLevel51 = 0x10000;
+inline constexpr int32_t HEVCHighTierLevel51 = 0x20000;
+inline constexpr int32_t HEVCMainTierLevel52 = 0x40000;
+inline constexpr int32_t HEVCHighTierLevel52 = 0x80000;
+inline constexpr int32_t HEVCMainTierLevel6 = 0x100000;
+inline constexpr int32_t HEVCHighTierLevel6 = 0x200000;
+inline constexpr int32_t HEVCMainTierLevel61 = 0x400000;
+inline constexpr int32_t HEVCHighTierLevel61 = 0x800000;
+inline constexpr int32_t HEVCMainTierLevel62 = 0x1000000;
+inline constexpr int32_t HEVCHighTierLevel62 = 0x2000000;
inline static const char *asString_HEVCTierLevel(int32_t i, const char *def = "??") {
switch (i) {
@@ -502,17 +502,17 @@
}
}
-constexpr int32_t DolbyVisionProfileDvavPer = 0x1;
-constexpr int32_t DolbyVisionProfileDvavPen = 0x2;
-constexpr int32_t DolbyVisionProfileDvheDer = 0x4;
-constexpr int32_t DolbyVisionProfileDvheDen = 0x8;
-constexpr int32_t DolbyVisionProfileDvheDtr = 0x10;
-constexpr int32_t DolbyVisionProfileDvheStn = 0x20;
-constexpr int32_t DolbyVisionProfileDvheDth = 0x40;
-constexpr int32_t DolbyVisionProfileDvheDtb = 0x80;
-constexpr int32_t DolbyVisionProfileDvheSt = 0x100;
-constexpr int32_t DolbyVisionProfileDvavSe = 0x200;
-constexpr int32_t DolbyVisionProfileDvav110 = 0x400;
+inline constexpr int32_t DolbyVisionProfileDvavPer = 0x1;
+inline constexpr int32_t DolbyVisionProfileDvavPen = 0x2;
+inline constexpr int32_t DolbyVisionProfileDvheDer = 0x4;
+inline constexpr int32_t DolbyVisionProfileDvheDen = 0x8;
+inline constexpr int32_t DolbyVisionProfileDvheDtr = 0x10;
+inline constexpr int32_t DolbyVisionProfileDvheStn = 0x20;
+inline constexpr int32_t DolbyVisionProfileDvheDth = 0x40;
+inline constexpr int32_t DolbyVisionProfileDvheDtb = 0x80;
+inline constexpr int32_t DolbyVisionProfileDvheSt = 0x100;
+inline constexpr int32_t DolbyVisionProfileDvavSe = 0x200;
+inline constexpr int32_t DolbyVisionProfileDvav110 = 0x400;
inline static const char *asString_DolbyVisionProfile(int32_t i, const char *def = "??") {
switch (i) {
@@ -531,18 +531,18 @@
}
}
-constexpr int32_t DolbyVisionLevelHd24 = 0x1;
-constexpr int32_t DolbyVisionLevelHd30 = 0x2;
-constexpr int32_t DolbyVisionLevelFhd24 = 0x4;
-constexpr int32_t DolbyVisionLevelFhd30 = 0x8;
-constexpr int32_t DolbyVisionLevelFhd60 = 0x10;
-constexpr int32_t DolbyVisionLevelUhd24 = 0x20;
-constexpr int32_t DolbyVisionLevelUhd30 = 0x40;
-constexpr int32_t DolbyVisionLevelUhd48 = 0x80;
-constexpr int32_t DolbyVisionLevelUhd60 = 0x100;
-constexpr int32_t DolbyVisionLevelUhd120 = 0x200;
-constexpr int32_t DolbyVisionLevel8k30 = 0x400;
-constexpr int32_t DolbyVisionLevel8k60 = 0x800;
+inline constexpr int32_t DolbyVisionLevelHd24 = 0x1;
+inline constexpr int32_t DolbyVisionLevelHd30 = 0x2;
+inline constexpr int32_t DolbyVisionLevelFhd24 = 0x4;
+inline constexpr int32_t DolbyVisionLevelFhd30 = 0x8;
+inline constexpr int32_t DolbyVisionLevelFhd60 = 0x10;
+inline constexpr int32_t DolbyVisionLevelUhd24 = 0x20;
+inline constexpr int32_t DolbyVisionLevelUhd30 = 0x40;
+inline constexpr int32_t DolbyVisionLevelUhd48 = 0x80;
+inline constexpr int32_t DolbyVisionLevelUhd60 = 0x100;
+inline constexpr int32_t DolbyVisionLevelUhd120 = 0x200;
+inline constexpr int32_t DolbyVisionLevel8k30 = 0x400;
+inline constexpr int32_t DolbyVisionLevel8k60 = 0x800;
inline static const char *asString_DolbyVisionLevel(int32_t i, const char *def = "??") {
switch (i) {
@@ -562,10 +562,10 @@
}
}
-constexpr int32_t BITRATE_MODE_CBR = 2;
-constexpr int32_t BITRATE_MODE_CBR_FD = 3;
-constexpr int32_t BITRATE_MODE_CQ = 0;
-constexpr int32_t BITRATE_MODE_VBR = 1;
+inline constexpr int32_t BITRATE_MODE_CBR = 2;
+inline constexpr int32_t BITRATE_MODE_CBR_FD = 3;
+inline constexpr int32_t BITRATE_MODE_CQ = 0;
+inline constexpr int32_t BITRATE_MODE_VBR = 1;
inline static const char *asString_BitrateMode(int32_t i, const char *def = "??") {
switch (i) {
@@ -577,61 +577,61 @@
}
}
-constexpr int32_t COLOR_Format12bitRGB444 = 3;
-constexpr int32_t COLOR_Format16bitARGB1555 = 5;
-constexpr int32_t COLOR_Format16bitARGB4444 = 4;
-constexpr int32_t COLOR_Format16bitBGR565 = 7;
-constexpr int32_t COLOR_Format16bitRGB565 = 6;
-constexpr int32_t COLOR_Format18bitARGB1665 = 9;
-constexpr int32_t COLOR_Format18BitBGR666 = 41;
-constexpr int32_t COLOR_Format18bitRGB666 = 8;
-constexpr int32_t COLOR_Format19bitARGB1666 = 10;
-constexpr int32_t COLOR_Format24BitABGR6666 = 43;
-constexpr int32_t COLOR_Format24bitARGB1887 = 13;
-constexpr int32_t COLOR_Format24BitARGB6666 = 42;
-constexpr int32_t COLOR_Format24bitBGR888 = 12;
-constexpr int32_t COLOR_Format24bitRGB888 = 11;
-constexpr int32_t COLOR_Format25bitARGB1888 = 14;
-constexpr int32_t COLOR_Format32bitABGR2101010 = 0x7F00AAA2;
-constexpr int32_t COLOR_Format32bitABGR8888 = 0x7F00A000;
-constexpr int32_t COLOR_Format32bitARGB8888 = 16;
-constexpr int32_t COLOR_Format32bitBGRA8888 = 15;
-constexpr int32_t COLOR_Format64bitABGRFloat = 0x7F000F16;
-constexpr int32_t COLOR_Format8bitRGB332 = 2;
-constexpr int32_t COLOR_FormatCbYCrY = 27;
-constexpr int32_t COLOR_FormatCrYCbY = 28;
-constexpr int32_t COLOR_FormatL16 = 36;
-constexpr int32_t COLOR_FormatL2 = 33;
-constexpr int32_t COLOR_FormatL24 = 37;
-constexpr int32_t COLOR_FormatL32 = 38;
-constexpr int32_t COLOR_FormatL4 = 34;
-constexpr int32_t COLOR_FormatL8 = 35;
-constexpr int32_t COLOR_FormatMonochrome = 1;
-constexpr int32_t COLOR_FormatRawBayer10bit = 31;
-constexpr int32_t COLOR_FormatRawBayer8bit = 30;
-constexpr int32_t COLOR_FormatRawBayer8bitcompressed = 32;
-constexpr int32_t COLOR_FormatRGBAFlexible = 0x7F36A888;
-constexpr int32_t COLOR_FormatRGBFlexible = 0x7F36B888;
-constexpr int32_t COLOR_FormatSurface = 0x7F000789;
-constexpr int32_t COLOR_FormatYCbYCr = 25;
-constexpr int32_t COLOR_FormatYCrYCb = 26;
-constexpr int32_t COLOR_FormatYUV411PackedPlanar = 18;
-constexpr int32_t COLOR_FormatYUV411Planar = 17;
-constexpr int32_t COLOR_FormatYUV420Flexible = 0x7F420888;
-constexpr int32_t COLOR_FormatYUV420PackedPlanar = 20;
-constexpr int32_t COLOR_FormatYUV420PackedSemiPlanar = 39;
-constexpr int32_t COLOR_FormatYUV420Planar = 19;
-constexpr int32_t COLOR_FormatYUV420SemiPlanar = 21;
-constexpr int32_t COLOR_FormatYUV422Flexible = 0x7F422888;
-constexpr int32_t COLOR_FormatYUV422PackedPlanar = 23;
-constexpr int32_t COLOR_FormatYUV422PackedSemiPlanar = 40;
-constexpr int32_t COLOR_FormatYUV422Planar = 22;
-constexpr int32_t COLOR_FormatYUV422SemiPlanar = 24;
-constexpr int32_t COLOR_FormatYUV444Flexible = 0x7F444888;
-constexpr int32_t COLOR_FormatYUV444Interleaved = 29;
-constexpr int32_t COLOR_FormatYUVP010 = 54;
-constexpr int32_t COLOR_QCOM_FormatYUV420SemiPlanar = 0x7fa30c00;
-constexpr int32_t COLOR_TI_FormatYUV420PackedSemiPlanar = 0x7f000100;
+inline constexpr int32_t COLOR_Format12bitRGB444 = 3;
+inline constexpr int32_t COLOR_Format16bitARGB1555 = 5;
+inline constexpr int32_t COLOR_Format16bitARGB4444 = 4;
+inline constexpr int32_t COLOR_Format16bitBGR565 = 7;
+inline constexpr int32_t COLOR_Format16bitRGB565 = 6;
+inline constexpr int32_t COLOR_Format18bitARGB1665 = 9;
+inline constexpr int32_t COLOR_Format18BitBGR666 = 41;
+inline constexpr int32_t COLOR_Format18bitRGB666 = 8;
+inline constexpr int32_t COLOR_Format19bitARGB1666 = 10;
+inline constexpr int32_t COLOR_Format24BitABGR6666 = 43;
+inline constexpr int32_t COLOR_Format24bitARGB1887 = 13;
+inline constexpr int32_t COLOR_Format24BitARGB6666 = 42;
+inline constexpr int32_t COLOR_Format24bitBGR888 = 12;
+inline constexpr int32_t COLOR_Format24bitRGB888 = 11;
+inline constexpr int32_t COLOR_Format25bitARGB1888 = 14;
+inline constexpr int32_t COLOR_Format32bitABGR2101010 = 0x7F00AAA2;
+inline constexpr int32_t COLOR_Format32bitABGR8888 = 0x7F00A000;
+inline constexpr int32_t COLOR_Format32bitARGB8888 = 16;
+inline constexpr int32_t COLOR_Format32bitBGRA8888 = 15;
+inline constexpr int32_t COLOR_Format64bitABGRFloat = 0x7F000F16;
+inline constexpr int32_t COLOR_Format8bitRGB332 = 2;
+inline constexpr int32_t COLOR_FormatCbYCrY = 27;
+inline constexpr int32_t COLOR_FormatCrYCbY = 28;
+inline constexpr int32_t COLOR_FormatL16 = 36;
+inline constexpr int32_t COLOR_FormatL2 = 33;
+inline constexpr int32_t COLOR_FormatL24 = 37;
+inline constexpr int32_t COLOR_FormatL32 = 38;
+inline constexpr int32_t COLOR_FormatL4 = 34;
+inline constexpr int32_t COLOR_FormatL8 = 35;
+inline constexpr int32_t COLOR_FormatMonochrome = 1;
+inline constexpr int32_t COLOR_FormatRawBayer10bit = 31;
+inline constexpr int32_t COLOR_FormatRawBayer8bit = 30;
+inline constexpr int32_t COLOR_FormatRawBayer8bitcompressed = 32;
+inline constexpr int32_t COLOR_FormatRGBAFlexible = 0x7F36A888;
+inline constexpr int32_t COLOR_FormatRGBFlexible = 0x7F36B888;
+inline constexpr int32_t COLOR_FormatSurface = 0x7F000789;
+inline constexpr int32_t COLOR_FormatYCbYCr = 25;
+inline constexpr int32_t COLOR_FormatYCrYCb = 26;
+inline constexpr int32_t COLOR_FormatYUV411PackedPlanar = 18;
+inline constexpr int32_t COLOR_FormatYUV411Planar = 17;
+inline constexpr int32_t COLOR_FormatYUV420Flexible = 0x7F420888;
+inline constexpr int32_t COLOR_FormatYUV420PackedPlanar = 20;
+inline constexpr int32_t COLOR_FormatYUV420PackedSemiPlanar = 39;
+inline constexpr int32_t COLOR_FormatYUV420Planar = 19;
+inline constexpr int32_t COLOR_FormatYUV420SemiPlanar = 21;
+inline constexpr int32_t COLOR_FormatYUV422Flexible = 0x7F422888;
+inline constexpr int32_t COLOR_FormatYUV422PackedPlanar = 23;
+inline constexpr int32_t COLOR_FormatYUV422PackedSemiPlanar = 40;
+inline constexpr int32_t COLOR_FormatYUV422Planar = 22;
+inline constexpr int32_t COLOR_FormatYUV422SemiPlanar = 24;
+inline constexpr int32_t COLOR_FormatYUV444Flexible = 0x7F444888;
+inline constexpr int32_t COLOR_FormatYUV444Interleaved = 29;
+inline constexpr int32_t COLOR_FormatYUVP010 = 54;
+inline constexpr int32_t COLOR_QCOM_FormatYUV420SemiPlanar = 0x7fa30c00;
+inline constexpr int32_t COLOR_TI_FormatYUV420PackedSemiPlanar = 0x7f000100;
inline static const char *asString_ColorFormat(int32_t i, const char *def = "??") {
switch (i) {
@@ -694,199 +694,200 @@
}
}
-constexpr char FEATURE_AdaptivePlayback[] = "adaptive-playback";
-constexpr char FEATURE_EncodingStatistics[] = "encoding-statistics";
-constexpr char FEATURE_IntraRefresh[] = "intra-refresh";
-constexpr char FEATURE_PartialFrame[] = "partial-frame";
-constexpr char FEATURE_QpBounds[] = "qp-bounds";
-constexpr char FEATURE_SecurePlayback[] = "secure-playback";
-constexpr char FEATURE_TunneledPlayback[] = "tunneled-playback";
+inline constexpr char FEATURE_AdaptivePlayback[] = "adaptive-playback";
+inline constexpr char FEATURE_EncodingStatistics[] = "encoding-statistics";
+inline constexpr char FEATURE_IntraRefresh[] = "intra-refresh";
+inline constexpr char FEATURE_PartialFrame[] = "partial-frame";
+inline constexpr char FEATURE_QpBounds[] = "qp-bounds";
+inline constexpr char FEATURE_SecurePlayback[] = "secure-playback";
+inline constexpr char FEATURE_TunneledPlayback[] = "tunneled-playback";
// from MediaFormat.java
-constexpr char MIMETYPE_VIDEO_VP8[] = "video/x-vnd.on2.vp8";
-constexpr char MIMETYPE_VIDEO_VP9[] = "video/x-vnd.on2.vp9";
-constexpr char MIMETYPE_VIDEO_AV1[] = "video/av01";
-constexpr char MIMETYPE_VIDEO_AVC[] = "video/avc";
-constexpr char MIMETYPE_VIDEO_HEVC[] = "video/hevc";
-constexpr char MIMETYPE_VIDEO_MPEG4[] = "video/mp4v-es";
-constexpr char MIMETYPE_VIDEO_H263[] = "video/3gpp";
-constexpr char MIMETYPE_VIDEO_MPEG2[] = "video/mpeg2";
-constexpr char MIMETYPE_VIDEO_RAW[] = "video/raw";
-constexpr char MIMETYPE_VIDEO_DOLBY_VISION[] = "video/dolby-vision";
-constexpr char MIMETYPE_VIDEO_SCRAMBLED[] = "video/scrambled";
+inline constexpr char MIMETYPE_VIDEO_VP8[] = "video/x-vnd.on2.vp8";
+inline constexpr char MIMETYPE_VIDEO_VP9[] = "video/x-vnd.on2.vp9";
+inline constexpr char MIMETYPE_VIDEO_AV1[] = "video/av01";
+inline constexpr char MIMETYPE_VIDEO_AVC[] = "video/avc";
+inline constexpr char MIMETYPE_VIDEO_HEVC[] = "video/hevc";
+inline constexpr char MIMETYPE_VIDEO_MPEG4[] = "video/mp4v-es";
+inline constexpr char MIMETYPE_VIDEO_H263[] = "video/3gpp";
+inline constexpr char MIMETYPE_VIDEO_MPEG2[] = "video/mpeg2";
+inline constexpr char MIMETYPE_VIDEO_RAW[] = "video/raw";
+inline constexpr char MIMETYPE_VIDEO_DOLBY_VISION[] = "video/dolby-vision";
+inline constexpr char MIMETYPE_VIDEO_SCRAMBLED[] = "video/scrambled";
-constexpr char MIMETYPE_AUDIO_AMR_NB[] = "audio/3gpp";
-constexpr char MIMETYPE_AUDIO_AMR_WB[] = "audio/amr-wb";
-constexpr char MIMETYPE_AUDIO_MPEG[] = "audio/mpeg";
-constexpr char MIMETYPE_AUDIO_AAC[] = "audio/mp4a-latm";
-constexpr char MIMETYPE_AUDIO_QCELP[] = "audio/qcelp";
-constexpr char MIMETYPE_AUDIO_VORBIS[] = "audio/vorbis";
-constexpr char MIMETYPE_AUDIO_OPUS[] = "audio/opus";
-constexpr char MIMETYPE_AUDIO_G711_ALAW[] = "audio/g711-alaw";
-constexpr char MIMETYPE_AUDIO_G711_MLAW[] = "audio/g711-mlaw";
-constexpr char MIMETYPE_AUDIO_RAW[] = "audio/raw";
-constexpr char MIMETYPE_AUDIO_FLAC[] = "audio/flac";
-constexpr char MIMETYPE_AUDIO_MSGSM[] = "audio/gsm";
-constexpr char MIMETYPE_AUDIO_AC3[] = "audio/ac3";
-constexpr char MIMETYPE_AUDIO_EAC3[] = "audio/eac3";
-constexpr char MIMETYPE_AUDIO_SCRAMBLED[] = "audio/scrambled";
+inline constexpr char MIMETYPE_AUDIO_AMR_NB[] = "audio/3gpp";
+inline constexpr char MIMETYPE_AUDIO_AMR_WB[] = "audio/amr-wb";
+inline constexpr char MIMETYPE_AUDIO_MPEG[] = "audio/mpeg";
+inline constexpr char MIMETYPE_AUDIO_AAC[] = "audio/mp4a-latm";
+inline constexpr char MIMETYPE_AUDIO_QCELP[] = "audio/qcelp";
+inline constexpr char MIMETYPE_AUDIO_VORBIS[] = "audio/vorbis";
+inline constexpr char MIMETYPE_AUDIO_OPUS[] = "audio/opus";
+inline constexpr char MIMETYPE_AUDIO_G711_ALAW[] = "audio/g711-alaw";
+inline constexpr char MIMETYPE_AUDIO_G711_MLAW[] = "audio/g711-mlaw";
+inline constexpr char MIMETYPE_AUDIO_RAW[] = "audio/raw";
+inline constexpr char MIMETYPE_AUDIO_FLAC[] = "audio/flac";
+inline constexpr char MIMETYPE_AUDIO_MSGSM[] = "audio/gsm";
+inline constexpr char MIMETYPE_AUDIO_AC3[] = "audio/ac3";
+inline constexpr char MIMETYPE_AUDIO_EAC3[] = "audio/eac3";
+inline constexpr char MIMETYPE_AUDIO_SCRAMBLED[] = "audio/scrambled";
-constexpr char MIMETYPE_IMAGE_ANDROID_HEIC[] = "image/vnd.android.heic";
+inline constexpr char MIMETYPE_IMAGE_ANDROID_HEIC[] = "image/vnd.android.heic";
-constexpr char MIMETYPE_TEXT_CEA_608[] = "text/cea-608";
-constexpr char MIMETYPE_TEXT_CEA_708[] = "text/cea-708";
-constexpr char MIMETYPE_TEXT_SUBRIP[] = "application/x-subrip";
-constexpr char MIMETYPE_TEXT_VTT[] = "text/vtt";
+inline constexpr char MIMETYPE_TEXT_CEA_608[] = "text/cea-608";
+inline constexpr char MIMETYPE_TEXT_CEA_708[] = "text/cea-708";
+inline constexpr char MIMETYPE_TEXT_SUBRIP[] = "application/x-subrip";
+inline constexpr char MIMETYPE_TEXT_VTT[] = "text/vtt";
-constexpr int32_t COLOR_RANGE_FULL = 1;
-constexpr int32_t COLOR_RANGE_LIMITED = 2;
-constexpr int32_t COLOR_STANDARD_BT2020 = 6;
-constexpr int32_t COLOR_STANDARD_BT601_NTSC = 4;
-constexpr int32_t COLOR_STANDARD_BT601_PAL = 2;
-constexpr int32_t COLOR_STANDARD_BT709 = 1;
-constexpr int32_t COLOR_TRANSFER_HLG = 7;
-constexpr int32_t COLOR_TRANSFER_LINEAR = 1;
-constexpr int32_t COLOR_TRANSFER_SDR_VIDEO = 3;
-constexpr int32_t COLOR_TRANSFER_ST2084 = 6;
+inline constexpr int32_t COLOR_RANGE_FULL = 1;
+inline constexpr int32_t COLOR_RANGE_LIMITED = 2;
+inline constexpr int32_t COLOR_STANDARD_BT2020 = 6;
+inline constexpr int32_t COLOR_STANDARD_BT601_NTSC = 4;
+inline constexpr int32_t COLOR_STANDARD_BT601_PAL = 2;
+inline constexpr int32_t COLOR_STANDARD_BT709 = 1;
+inline constexpr int32_t COLOR_TRANSFER_HLG = 7;
+inline constexpr int32_t COLOR_TRANSFER_LINEAR = 1;
+inline constexpr int32_t COLOR_TRANSFER_SDR_VIDEO = 3;
+inline constexpr int32_t COLOR_TRANSFER_ST2084 = 6;
-constexpr int32_t PICTURE_TYPE_I = 1;
-constexpr int32_t PICTURE_TYPE_P = 2;
-constexpr int32_t PICTURE_TYPE_B = 3;
-constexpr int32_t PICTURE_TYPE_UNKNOWN = 0;
+inline constexpr int32_t PICTURE_TYPE_I = 1;
+inline constexpr int32_t PICTURE_TYPE_P = 2;
+inline constexpr int32_t PICTURE_TYPE_B = 3;
+inline constexpr int32_t PICTURE_TYPE_UNKNOWN = 0;
-constexpr int32_t VIDEO_ENCODING_STATISTICS_LEVEL_1 = 1;
-constexpr int32_t VIDEO_ENCODING_STATISTICS_LEVEL_NONE = 0;
+inline constexpr int32_t VIDEO_ENCODING_STATISTICS_LEVEL_1 = 1;
+inline constexpr int32_t VIDEO_ENCODING_STATISTICS_LEVEL_NONE = 0;
-constexpr char KEY_AAC_DRC_ALBUM_MODE[] = "aac-drc-album-mode";
-constexpr char KEY_AAC_DRC_ATTENUATION_FACTOR[] = "aac-drc-cut-level";
-constexpr char KEY_AAC_DRC_BOOST_FACTOR[] = "aac-drc-boost-level";
-constexpr char KEY_AAC_DRC_EFFECT_TYPE[] = "aac-drc-effect-type";
-constexpr char KEY_AAC_DRC_HEAVY_COMPRESSION[] = "aac-drc-heavy-compression";
-constexpr char KEY_AAC_DRC_OUTPUT_LOUDNESS[] = "aac-drc-output-loudness";
-constexpr char KEY_AAC_DRC_TARGET_REFERENCE_LEVEL[] = "aac-target-ref-level";
-constexpr char KEY_AAC_ENCODED_TARGET_LEVEL[] = "aac-encoded-target-level";
-constexpr char KEY_AAC_MAX_OUTPUT_CHANNEL_COUNT[] = "aac-max-output-channel_count";
-constexpr char KEY_AAC_PROFILE[] = "aac-profile";
-constexpr char KEY_AAC_SBR_MODE[] = "aac-sbr-mode";
-constexpr char KEY_ALLOW_FRAME_DROP[] = "allow-frame-drop";
-constexpr char KEY_AUDIO_SESSION_ID[] = "audio-session-id";
-constexpr char KEY_BIT_RATE[] = "bitrate";
-constexpr char KEY_BITRATE_MODE[] = "bitrate-mode";
-constexpr char KEY_CA_SESSION_ID[] = "ca-session-id";
-constexpr char KEY_CA_SYSTEM_ID[] = "ca-system-id";
-constexpr char KEY_CA_PRIVATE_DATA[] = "ca-private-data";
-constexpr char KEY_CAPTURE_RATE[] = "capture-rate";
-constexpr char KEY_CHANNEL_COUNT[] = "channel-count"; // value N, eq to range 1..N
-constexpr char KEY_CHANNEL_MASK[] = "channel-mask";
-constexpr char KEY_COLOR_FORMAT[] = "color-format";
-constexpr char KEY_COLOR_RANGE[] = "color-range";
-constexpr char KEY_COLOR_STANDARD[] = "color-standard";
-constexpr char KEY_COLOR_TRANSFER[] = "color-transfer";
-constexpr char KEY_COMPLEXITY[] = "complexity";
-constexpr char KEY_CREATE_INPUT_SURFACE_SUSPENDED[] = "create-input-buffers-suspended";
-constexpr char KEY_DURATION[] = "durationUs";
-constexpr char KEY_FEATURE_[] = "feature-";
-constexpr char KEY_FLAC_COMPRESSION_LEVEL[] = "flac-compression-level";
-constexpr char KEY_FRAME_RATE[] = "frame-rate";
-constexpr char KEY_GRID_COLUMNS[] = "grid-cols";
-constexpr char KEY_GRID_ROWS[] = "grid-rows";
-constexpr char KEY_HDR_STATIC_INFO[] = "hdr-static-info";
-constexpr char KEY_HDR10_PLUS_INFO[] = "hdr10-plus-info";
-constexpr char KEY_HEIGHT[] = "height";
-constexpr char KEY_I_FRAME_INTERVAL[] = "i-frame-interval";
-constexpr char KEY_INTRA_REFRESH_PERIOD[] = "intra-refresh-period";
-constexpr char KEY_IS_ADTS[] = "is-adts";
-constexpr char KEY_IS_AUTOSELECT[] = "is-autoselect";
-constexpr char KEY_IS_DEFAULT[] = "is-default";
-constexpr char KEY_IS_FORCED_SUBTITLE[] = "is-forced-subtitle";
-constexpr char KEY_IS_TIMED_TEXT[] = "is-timed-text";
-constexpr char KEY_LANGUAGE[] = "language";
-constexpr char KEY_LATENCY[] = "latency";
-constexpr char KEY_LEVEL[] = "level";
-constexpr char KEY_LOW_LATENCY[] = "low-latency";
-constexpr char KEY_MAX_B_FRAMES[] = "max-bframes";
-constexpr char KEY_MAX_BIT_RATE[] = "max-bitrate";
-constexpr char KEY_MAX_FPS_TO_ENCODER[] = "max-fps-to-encoder";
-constexpr char KEY_MAX_HEIGHT[] = "max-height";
-constexpr char KEY_MAX_INPUT_SIZE[] = "max-input-size";
-constexpr char KEY_MAX_OUTPUT_CHANNEL_COUNT[] = "max-output-channel-count";
-constexpr char KEY_MAX_PTS_GAP_TO_ENCODER[] = "max-pts-gap-to-encoder";
-constexpr char KEY_MAX_WIDTH[] = "max-width";
-constexpr char KEY_MIME[] = "mime";
-constexpr char KEY_OPERATING_RATE[] = "operating-rate";
-constexpr char KEY_OUTPUT_REORDER_DEPTH[] = "output-reorder-depth";
-constexpr char KEY_PCM_ENCODING[] = "pcm-encoding";
-constexpr char KEY_PICTURE_TYPE[] = "picture-type";
-constexpr char KEY_PIXEL_ASPECT_RATIO_HEIGHT[] = "sar-height";
-constexpr char KEY_PIXEL_ASPECT_RATIO_WIDTH[] = "sar-width";
-constexpr char KEY_PREPEND_HEADER_TO_SYNC_FRAMES[] = "prepend-sps-pps-to-idr-frames";
-constexpr char KEY_PRIORITY[] = "priority";
-constexpr char KEY_PROFILE[] = "profile";
-constexpr char KEY_PUSH_BLANK_BUFFERS_ON_STOP[] = "push-blank-buffers-on-shutdown";
-constexpr char KEY_QUALITY[] = "quality";
-constexpr char KEY_REPEAT_PREVIOUS_FRAME_AFTER[] = "repeat-previous-frame-after";
-constexpr char KEY_ROTATION[] = "rotation-degrees";
-constexpr char KEY_SAMPLE_RATE[] = "sample-rate";
-constexpr char KEY_SLICE_HEIGHT[] = "slice-height";
-constexpr char KEY_STRIDE[] = "stride";
-constexpr char KEY_TEMPORAL_LAYERING[] = "ts-schema";
-constexpr char KEY_TILE_HEIGHT[] = "tile-height";
-constexpr char KEY_TILE_WIDTH[] = "tile-width";
-constexpr char KEY_TRACK_ID[] = "track-id";
-constexpr char KEY_VIDEO_ENCODING_STATISTICS_LEVEL[] = "video-encoding-statistics-level";
-constexpr char KEY_VIDEO_QP_AVERAGE[] = "video-qp-average";
-constexpr char KEY_VIDEO_QP_B_MAX[] = "video-qp-b-max";
-constexpr char KEY_VIDEO_QP_B_MIN[] = "video-qp-b-min";
-constexpr char KEY_VIDEO_QP_I_MAX[] = "video-qp-i-max";
-constexpr char KEY_VIDEO_QP_I_MIN[] = "video-qp-i-min";
-constexpr char KEY_VIDEO_QP_MAX[] = "video-qp-max";
-constexpr char KEY_VIDEO_QP_MIN[] = "video-qp-min";
-constexpr char KEY_VIDEO_QP_P_MAX[] = "video-qp-p-max";
-constexpr char KEY_VIDEO_QP_P_MIN[] = "video-qp-p-min";
-constexpr char KEY_WIDTH[] = "width";
+inline constexpr char KEY_AAC_DRC_ALBUM_MODE[] = "aac-drc-album-mode";
+inline constexpr char KEY_AAC_DRC_ATTENUATION_FACTOR[] = "aac-drc-cut-level";
+inline constexpr char KEY_AAC_DRC_BOOST_FACTOR[] = "aac-drc-boost-level";
+inline constexpr char KEY_AAC_DRC_EFFECT_TYPE[] = "aac-drc-effect-type";
+inline constexpr char KEY_AAC_DRC_HEAVY_COMPRESSION[] = "aac-drc-heavy-compression";
+inline constexpr char KEY_AAC_DRC_OUTPUT_LOUDNESS[] = "aac-drc-output-loudness";
+inline constexpr char KEY_AAC_DRC_TARGET_REFERENCE_LEVEL[] = "aac-target-ref-level";
+inline constexpr char KEY_AAC_ENCODED_TARGET_LEVEL[] = "aac-encoded-target-level";
+inline constexpr char KEY_AAC_MAX_OUTPUT_CHANNEL_COUNT[] = "aac-max-output-channel_count";
+inline constexpr char KEY_AAC_PROFILE[] = "aac-profile";
+inline constexpr char KEY_AAC_SBR_MODE[] = "aac-sbr-mode";
+inline constexpr char KEY_ALLOW_FRAME_DROP[] = "allow-frame-drop";
+inline constexpr char KEY_AUDIO_SESSION_ID[] = "audio-session-id";
+inline constexpr char KEY_BIT_RATE[] = "bitrate";
+inline constexpr char KEY_BITRATE_MODE[] = "bitrate-mode";
+inline constexpr char KEY_CA_SESSION_ID[] = "ca-session-id";
+inline constexpr char KEY_CA_SYSTEM_ID[] = "ca-system-id";
+inline constexpr char KEY_CA_PRIVATE_DATA[] = "ca-private-data";
+inline constexpr char KEY_CAPTURE_RATE[] = "capture-rate";
+inline constexpr char KEY_CHANNEL_COUNT[] = "channel-count"; // value N, eq to range 1..N
+inline constexpr char KEY_CHANNEL_MASK[] = "channel-mask";
+inline constexpr char KEY_COLOR_FORMAT[] = "color-format";
+inline constexpr char KEY_COLOR_RANGE[] = "color-range";
+inline constexpr char KEY_COLOR_STANDARD[] = "color-standard";
+inline constexpr char KEY_COLOR_TRANSFER[] = "color-transfer";
+inline constexpr char KEY_COMPLEXITY[] = "complexity";
+inline constexpr char KEY_CREATE_INPUT_SURFACE_SUSPENDED[] = "create-input-buffers-suspended";
+inline constexpr char KEY_DURATION[] = "durationUs";
+inline constexpr char KEY_FEATURE_[] = "feature-";
+inline constexpr char KEY_FLAC_COMPRESSION_LEVEL[] = "flac-compression-level";
+inline constexpr char KEY_FRAME_RATE[] = "frame-rate";
+inline constexpr char KEY_GRID_COLUMNS[] = "grid-cols";
+inline constexpr char KEY_GRID_ROWS[] = "grid-rows";
+inline constexpr char KEY_HDR_STATIC_INFO[] = "hdr-static-info";
+inline constexpr char KEY_HDR10_PLUS_INFO[] = "hdr10-plus-info";
+inline constexpr char KEY_HEIGHT[] = "height";
+inline constexpr char KEY_I_FRAME_INTERVAL[] = "i-frame-interval";
+inline constexpr char KEY_INTRA_REFRESH_PERIOD[] = "intra-refresh-period";
+inline constexpr char KEY_IS_ADTS[] = "is-adts";
+inline constexpr char KEY_IS_AUTOSELECT[] = "is-autoselect";
+inline constexpr char KEY_IS_DEFAULT[] = "is-default";
+inline constexpr char KEY_IS_FORCED_SUBTITLE[] = "is-forced-subtitle";
+inline constexpr char KEY_IS_TIMED_TEXT[] = "is-timed-text";
+inline constexpr char KEY_LANGUAGE[] = "language";
+inline constexpr char KEY_LATENCY[] = "latency";
+inline constexpr char KEY_LEVEL[] = "level";
+inline constexpr char KEY_LOW_LATENCY[] = "low-latency";
+inline constexpr char KEY_MAX_B_FRAMES[] = "max-bframes";
+inline constexpr char KEY_MAX_BIT_RATE[] = "max-bitrate";
+inline constexpr char KEY_MAX_FPS_TO_ENCODER[] = "max-fps-to-encoder";
+inline constexpr char KEY_MAX_HEIGHT[] = "max-height";
+inline constexpr char KEY_MAX_INPUT_SIZE[] = "max-input-size";
+inline constexpr char KEY_MAX_OUTPUT_CHANNEL_COUNT[] = "max-output-channel-count";
+inline constexpr char KEY_MAX_PTS_GAP_TO_ENCODER[] = "max-pts-gap-to-encoder";
+inline constexpr char KEY_MAX_WIDTH[] = "max-width";
+inline constexpr char KEY_MIME[] = "mime";
+inline constexpr char KEY_OPERATING_RATE[] = "operating-rate";
+inline constexpr char KEY_OUTPUT_REORDER_DEPTH[] = "output-reorder-depth";
+inline constexpr char KEY_PCM_ENCODING[] = "pcm-encoding";
+inline constexpr char KEY_PICTURE_TYPE[] = "picture-type";
+inline constexpr char KEY_PIXEL_ASPECT_RATIO_HEIGHT[] = "sar-height";
+inline constexpr char KEY_PIXEL_ASPECT_RATIO_WIDTH[] = "sar-width";
+inline constexpr char KEY_PREPEND_HEADER_TO_SYNC_FRAMES[] = "prepend-sps-pps-to-idr-frames";
+inline constexpr char KEY_PRIORITY[] = "priority";
+inline constexpr char KEY_PROFILE[] = "profile";
+inline constexpr char KEY_PUSH_BLANK_BUFFERS_ON_STOP[] = "push-blank-buffers-on-shutdown";
+inline constexpr char KEY_QUALITY[] = "quality";
+inline constexpr char KEY_REPEAT_PREVIOUS_FRAME_AFTER[] = "repeat-previous-frame-after";
+inline constexpr char KEY_ROTATION[] = "rotation-degrees";
+inline constexpr char KEY_SAMPLE_RATE[] = "sample-rate";
+inline constexpr char KEY_SLICE_HEIGHT[] = "slice-height";
+inline constexpr char KEY_STRIDE[] = "stride";
+inline constexpr char KEY_TEMPORAL_LAYERING[] = "ts-schema";
+inline constexpr char KEY_TILE_HEIGHT[] = "tile-height";
+inline constexpr char KEY_TILE_WIDTH[] = "tile-width";
+inline constexpr char KEY_TRACK_ID[] = "track-id";
+inline constexpr char KEY_VIDEO_ENCODING_STATISTICS_LEVEL[] = "video-encoding-statistics-level";
+inline constexpr char KEY_VIDEO_QP_AVERAGE[] = "video-qp-average";
+inline constexpr char KEY_VIDEO_QP_B_MAX[] = "video-qp-b-max";
+inline constexpr char KEY_VIDEO_QP_B_MIN[] = "video-qp-b-min";
+inline constexpr char KEY_VIDEO_QP_I_MAX[] = "video-qp-i-max";
+inline constexpr char KEY_VIDEO_QP_I_MIN[] = "video-qp-i-min";
+inline constexpr char KEY_VIDEO_QP_MAX[] = "video-qp-max";
+inline constexpr char KEY_VIDEO_QP_MIN[] = "video-qp-min";
+inline constexpr char KEY_VIDEO_QP_P_MAX[] = "video-qp-p-max";
+inline constexpr char KEY_VIDEO_QP_P_MIN[] = "video-qp-p-min";
+inline constexpr char KEY_WIDTH[] = "width";
// from MediaCodec.java
-constexpr int32_t ERROR_INSUFFICIENT_OUTPUT_PROTECTION = 4;
-constexpr int32_t ERROR_INSUFFICIENT_RESOURCE = 1100;
-constexpr int32_t ERROR_KEY_EXPIRED = 2;
-constexpr int32_t ERROR_NO_KEY = 1;
-constexpr int32_t ERROR_RECLAIMED = 1101;
-constexpr int32_t ERROR_RESOURCE_BUSY = 3;
-constexpr int32_t ERROR_SESSION_NOT_OPENED = 5;
-constexpr int32_t ERROR_UNSUPPORTED_OPERATION = 6;
-constexpr char CODEC[] = "android.media.mediacodec.codec";
-constexpr char ENCODER[] = "android.media.mediacodec.encoder";
-constexpr char HEIGHT[] = "android.media.mediacodec.height";
-constexpr char MIME_TYPE[] = "android.media.mediacodec.mime";
-constexpr char MODE[] = "android.media.mediacodec.mode";
-constexpr char MODE_AUDIO[] = "audio";
-constexpr char MODE_VIDEO[] = "video";
-constexpr char ROTATION[] = "android.media.mediacodec.rotation";
-constexpr char SECURE[] = "android.media.mediacodec.secure";
-constexpr char WIDTH[] = "android.media.mediacodec.width";
+inline constexpr int32_t ERROR_INSUFFICIENT_OUTPUT_PROTECTION = 4;
+inline constexpr int32_t ERROR_INSUFFICIENT_RESOURCE = 1100;
+inline constexpr int32_t ERROR_KEY_EXPIRED = 2;
+inline constexpr int32_t ERROR_NO_KEY = 1;
+inline constexpr int32_t ERROR_RECLAIMED = 1101;
+inline constexpr int32_t ERROR_RESOURCE_BUSY = 3;
+inline constexpr int32_t ERROR_SESSION_NOT_OPENED = 5;
+inline constexpr int32_t ERROR_UNSUPPORTED_OPERATION = 6;
+inline constexpr char CODEC[] = "android.media.mediacodec.codec";
+inline constexpr char ENCODER[] = "android.media.mediacodec.encoder";
+inline constexpr char HEIGHT[] = "android.media.mediacodec.height";
+inline constexpr char MIME_TYPE[] = "android.media.mediacodec.mime";
+inline constexpr char MODE[] = "android.media.mediacodec.mode";
+inline constexpr char MODE_AUDIO[] = "audio";
+inline constexpr char MODE_VIDEO[] = "video";
+inline constexpr char ROTATION[] = "android.media.mediacodec.rotation";
+inline constexpr char SECURE[] = "android.media.mediacodec.secure";
+inline constexpr char WIDTH[] = "android.media.mediacodec.width";
-constexpr int32_t BUFFER_FLAG_CODEC_CONFIG = 2;
-constexpr int32_t BUFFER_FLAG_END_OF_STREAM = 4;
-constexpr int32_t BUFFER_FLAG_KEY_FRAME = 1;
-constexpr int32_t BUFFER_FLAG_PARTIAL_FRAME = 8;
-constexpr int32_t BUFFER_FLAG_DECODE_ONLY = 32;
-constexpr int32_t BUFFER_FLAG_SYNC_FRAME = 1;
-constexpr int32_t CONFIGURE_FLAG_ENCODE = 1;
-constexpr int32_t CONFIGURE_FLAG_USE_BLOCK_MODEL = 2;
-constexpr int32_t CRYPTO_MODE_AES_CBC = 2;
-constexpr int32_t CRYPTO_MODE_AES_CTR = 1;
-constexpr int32_t CRYPTO_MODE_UNENCRYPTED = 0;
-constexpr int32_t INFO_OUTPUT_BUFFERS_CHANGED = -3;
-constexpr int32_t INFO_OUTPUT_FORMAT_CHANGED = -2;
-constexpr int32_t INFO_TRY_AGAIN_LATER = -1;
-constexpr int32_t VIDEO_SCALING_MODE_SCALE_TO_FIT = 1;
-constexpr int32_t VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING = 2;
-constexpr char PARAMETER_KEY_OFFSET_TIME[] = "time-offset-us";
-constexpr char PARAMETER_KEY_REQUEST_SYNC_FRAME[] = "request-sync";
-constexpr char PARAMETER_KEY_SUSPEND[] = "drop-input-frames";
-constexpr char PARAMETER_KEY_SUSPEND_TIME[] = "drop-start-time-us";
-constexpr char PARAMETER_KEY_TUNNEL_PEEK[] = "tunnel-peek";
-constexpr char PARAMETER_KEY_VIDEO_BITRATE[] = "video-bitrate";
+inline constexpr int32_t BUFFER_FLAG_CODEC_CONFIG = 2;
+inline constexpr int32_t BUFFER_FLAG_DECODE_ONLY = 32;
+inline constexpr int32_t BUFFER_FLAG_END_OF_STREAM = 4;
+inline constexpr int32_t BUFFER_FLAG_KEY_FRAME = 1;
+inline constexpr int32_t BUFFER_FLAG_MUXER_DATA = 16;
+inline constexpr int32_t BUFFER_FLAG_PARTIAL_FRAME = 8;
+inline constexpr int32_t BUFFER_FLAG_SYNC_FRAME = 1;
+inline constexpr int32_t CONFIGURE_FLAG_ENCODE = 1;
+inline constexpr int32_t CONFIGURE_FLAG_USE_BLOCK_MODEL = 2;
+inline constexpr int32_t CRYPTO_MODE_AES_CBC = 2;
+inline constexpr int32_t CRYPTO_MODE_AES_CTR = 1;
+inline constexpr int32_t CRYPTO_MODE_UNENCRYPTED = 0;
+inline constexpr int32_t INFO_OUTPUT_BUFFERS_CHANGED = -3;
+inline constexpr int32_t INFO_OUTPUT_FORMAT_CHANGED = -2;
+inline constexpr int32_t INFO_TRY_AGAIN_LATER = -1;
+inline constexpr int32_t VIDEO_SCALING_MODE_SCALE_TO_FIT = 1;
+inline constexpr int32_t VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING = 2;
+inline constexpr char PARAMETER_KEY_OFFSET_TIME[] = "time-offset-us";
+inline constexpr char PARAMETER_KEY_REQUEST_SYNC_FRAME[] = "request-sync";
+inline constexpr char PARAMETER_KEY_SUSPEND[] = "drop-input-frames";
+inline constexpr char PARAMETER_KEY_SUSPEND_TIME[] = "drop-start-time-us";
+inline constexpr char PARAMETER_KEY_TUNNEL_PEEK[] = "tunnel-peek";
+inline constexpr char PARAMETER_KEY_VIDEO_BITRATE[] = "video-bitrate";
}
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecList.h b/media/libstagefright/include/media/stagefright/MediaCodecList.h
index 56c6a45..08a5324 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecList.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecList.h
@@ -114,19 +114,10 @@
MediaCodecList(const MediaCodecList&) = delete;
MediaCodecList& operator=(const MediaCodecList&) = delete;
- static void findMatchingCodecs(
- const char *mime,
- bool createEncoder,
- uint32_t flags,
- const sp<AMessage> &format,
- Vector<AString> *matchingCodecs,
- bool checkProfile);
-
static bool codecHandlesFormat(
const char *mime,
const sp<MediaCodecInfo> &info,
- const sp<AMessage> &format,
- bool checkProfile);
+ const sp<AMessage> &format);
};
} // namespace android
diff --git a/media/libstagefright/include/media/stagefright/MetaDataBase.h b/media/libstagefright/include/media/stagefright/MetaDataBase.h
index 2ca0e33..a7d2eb9 100644
--- a/media/libstagefright/include/media/stagefright/MetaDataBase.h
+++ b/media/libstagefright/include/media/stagefright/MetaDataBase.h
@@ -117,6 +117,12 @@
kKeyVideoProfile = 'vprf', // int32_t
kKeyVideoLevel = 'vlev', // int32_t
+ // audio profile and level
+ // The codec framework doesn't distinguish between video and audio profiles,
+ // so there is no need to define a separate key
+ kKeyAudioProfile = 'vprf', // int32_t
+ kKeyAudioLevel = 'vlev', // int32_t
+
kKey2ByteNalLength = '2NAL', // int32_t (bool)
// Identify the file output format for authoring
diff --git a/media/libstagefright/omx/1.0/WGraphicBufferSource.cpp b/media/libstagefright/omx/1.0/WGraphicBufferSource.cpp
index f7bf3ba..f4ccaba 100644
--- a/media/libstagefright/omx/1.0/WGraphicBufferSource.cpp
+++ b/media/libstagefright/omx/1.0/WGraphicBufferSource.cpp
@@ -143,7 +143,7 @@
// use consumer usage bits queried from encoder, but always add
// HW_VIDEO_ENCODER for backward compatibility.
- uint32_t consumerUsage;
+ uint64_t consumerUsage;
void *_params = &consumerUsage;
uint8_t *params = static_cast<uint8_t*>(_params);
fnStatus = UNKNOWN_ERROR;
@@ -155,15 +155,32 @@
outParams.data() + outParams.size(),
params);
});
+
+ // try 64 bit consumer usage first
auto transStatus = omxNode->getParameter(
- static_cast<uint32_t>(OMX_IndexParamConsumerUsageBits),
+ static_cast<uint32_t>(OMX_IndexParamConsumerUsageBits64),
inHidlBytes(&consumerUsage, sizeof(consumerUsage)),
_hidl_cb);
if (!transStatus.isOk()) {
return toStatus(FAILED_TRANSACTION);
}
if (fnStatus != OK) {
- consumerUsage = 0;
+ // try 32 bit consumer usage upon failure
+ uint32_t usage;
+ _params = &usage;
+ params = static_cast<uint8_t*>(_params);
+ transStatus = omxNode->getParameter(
+ static_cast<uint32_t>(OMX_IndexParamConsumerUsageBits),
+ inHidlBytes(&usage, sizeof(usage)),
+ _hidl_cb);
+ if (!transStatus.isOk()) {
+ return toStatus(FAILED_TRANSACTION);
+ }
+ if (fnStatus != OK) {
+ consumerUsage = 0;
+ } else {
+ consumerUsage = usage;
+ }
}
OMX_PARAM_PORTDEFINITIONTYPE def;
diff --git a/media/libstagefright/omx/OmxGraphicBufferSource.cpp b/media/libstagefright/omx/OmxGraphicBufferSource.cpp
index 9484046..33481e3 100644
--- a/media/libstagefright/omx/OmxGraphicBufferSource.cpp
+++ b/media/libstagefright/omx/OmxGraphicBufferSource.cpp
@@ -85,7 +85,7 @@
int32_t bufferCount,
uint32_t frameWidth,
uint32_t frameHeight,
- uint32_t consumerUsage) {
+ uint64_t consumerUsage) {
if (omxNode == NULL) {
return BAD_VALUE;
}
diff --git a/media/libstagefright/omx/include/media/stagefright/omx/1.0/Conversion.h b/media/libstagefright/omx/include/media/stagefright/omx/1.0/Conversion.h
index 264c01d..1c3cb4e 100644
--- a/media/libstagefright/omx/include/media/stagefright/omx/1.0/Conversion.h
+++ b/media/libstagefright/omx/include/media/stagefright/omx/1.0/Conversion.h
@@ -425,8 +425,16 @@
t->attr.anwBuffer.stride = graphicBuffer->getStride();
t->attr.anwBuffer.format = static_cast<PixelFormat>(
graphicBuffer->getPixelFormat());
- t->attr.anwBuffer.layerCount = graphicBuffer->getLayerCount();
- t->attr.anwBuffer.usage = graphicBuffer->getUsage();
+ // HACK
+ // anwBuffer.layerCount 8 bytes : GraphicBuffer::layerCount 4 bytes
+ // anwBuffer.usage 4 bytes : GraphicBuffer::usage 8 bytes
+ // We would like to retain high part of usage with high part of layerCount
+ uint64_t usage = graphicBuffer->getUsage();
+ uint32_t usageHigh = (usage >> 32);
+ uint32_t usageLow = (0xFFFFFFFF & usage);
+ uint32_t layerLow = graphicBuffer->getLayerCount();
+ t->attr.anwBuffer.layerCount = ((uint64_t(usageHigh) << 32) | layerLow);
+ t->attr.anwBuffer.usage = usageLow;
t->nativeHandle = graphicBuffer->handle;
return t;
}
diff --git a/media/libstagefright/omx/include/media/stagefright/omx/OmxGraphicBufferSource.h b/media/libstagefright/omx/include/media/stagefright/omx/OmxGraphicBufferSource.h
index e576d75..a23efac 100644
--- a/media/libstagefright/omx/include/media/stagefright/omx/OmxGraphicBufferSource.h
+++ b/media/libstagefright/omx/include/media/stagefright/omx/OmxGraphicBufferSource.h
@@ -70,7 +70,7 @@
int32_t bufferCount,
uint32_t frameWidth,
uint32_t frameHeight,
- uint32_t consumerUsage);
+ uint64_t consumerUsage);
// Rest of the interface in GraphicBufferSource.
diff --git a/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp b/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
index a8e64b6..71ddbe5 100644
--- a/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
+++ b/media/libstagefright/tests/mediacodec/MediaCodecTest.cpp
@@ -62,7 +62,8 @@
size_t offset,
const CryptoPlugin::SubSample *subSamples,
size_t numSubSamples,
- const sp<MediaCodecBuffer> &buffer),
+ const sp<MediaCodecBuffer> &buffer,
+ AString* errorDetailMsg),
(override));
MOCK_METHOD(status_t, renderOutputBuffer,
(const sp<MediaCodecBuffer> &buffer, int64_t timestampNs),
@@ -70,6 +71,7 @@
MOCK_METHOD(status_t, discardBuffer, (const sp<MediaCodecBuffer> &buffer), (override));
MOCK_METHOD(void, getInputBufferArray, (Vector<sp<MediaCodecBuffer>> *array), (override));
MOCK_METHOD(void, getOutputBufferArray, (Vector<sp<MediaCodecBuffer>> *array), (override));
+ MOCK_METHOD(void, pollForRenderedBuffers, (), (override));
};
class MockCodec : public CodecBase {
diff --git a/media/libstagefright/webm/WebmFrameThread.cpp b/media/libstagefright/webm/WebmFrameThread.cpp
index cdbd745..7d1442b 100644
--- a/media/libstagefright/webm/WebmFrameThread.cpp
+++ b/media/libstagefright/webm/WebmFrameThread.cpp
@@ -336,7 +336,6 @@
}
void WebmFrameMediaSourceThread::run() {
- int32_t count = 0;
int64_t timestampUs = 0xdeadbeef;
int64_t lastTimestampUs = 0; // Previous sample time stamp
int64_t lastDurationUs = 0; // Previous sample duration
@@ -367,7 +366,6 @@
buffer = NULL;
continue;
}
- ++count;
// adjust time-stamps after pause/resume
if (mResumed) {
diff --git a/media/libstagefright/writer_fuzzers/Android.bp b/media/libstagefright/writer_fuzzers/Android.bp
index b81f27e..58aa7cd 100644
--- a/media/libstagefright/writer_fuzzers/Android.bp
+++ b/media/libstagefright/writer_fuzzers/Android.bp
@@ -57,6 +57,14 @@
"android-media-fuzzing-reports@google.com",
],
componentid: 155276,
+ hotlists: [
+ "4593311",
+ ],
+ description: "The fuzzers target the APIs of all the various writers",
+ vector: "local_no_privileges_required",
+ service_privilege: "constrained",
+ users: "multi_user",
+ fuzzed_code_usage: "shipped",
},
}
diff --git a/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp b/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
index 67c6102..8c1ef3b 100644
--- a/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
+++ b/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
@@ -19,6 +19,8 @@
#include <media/stagefright/xmlparser/MediaCodecsXmlParser.h>
+#include <android/api-level.h>
+
#include <android-base/logging.h>
#include <android-base/macros.h>
#include <android-base/properties.h>
@@ -30,6 +32,7 @@
#include <expat.h>
#include <stdio.h>
+#include <stdlib.h>
#include <string.h>
#include <sys/stat.h>
@@ -360,7 +363,7 @@
status_t updateMediaCodec(
const char *rank, const StringSet &domain, const StringSet &variants,
- const char *enabled);
+ const char *enabled, const char *minsdk);
};
status_t parseXmlFilesInSearchDirs(
@@ -493,6 +496,9 @@
}
}
+// current SDK for this device; filled in when initializing the parser.
+static int mysdk = 0;
+
MediaCodecsXmlParser::Impl::Parser::Parser(State *state, std::string path)
: mState(state),
mPath(path),
@@ -502,6 +508,20 @@
if (end != std::string::npos) {
mHrefBase = path.substr(0, end + 1);
}
+
+#if defined(__ANDROID_API_U__)
+ // this is sdk calculation is intended only for devices >= U
+ static std::once_flag sCheckOnce;
+
+ std::call_once(sCheckOnce, [&](){
+ mysdk = android_get_device_api_level();
+
+ // work around main development branch being on same SDK as the last dessert release.
+ if (__ANDROID_API__ == __ANDROID_API_FUTURE__) {
+ mysdk++;
+ }
+ });
+#endif // __ANDROID_API_U__
}
void MediaCodecsXmlParser::Impl::Parser::parseXmlFile() {
@@ -930,6 +950,7 @@
const char *a_domain = nullptr;
const char *a_variant = nullptr;
const char *a_enabled = nullptr;
+ const char *a_minsdk = nullptr;
size_t i = 0;
while (attrs[i] != nullptr) {
@@ -953,6 +974,8 @@
a_variant = attrs[++i];
} else if (strEq(attrs[i], "enabled")) {
a_enabled = attrs[++i];
+ } else if (strEq(attrs[i], "minsdk")) {
+ a_minsdk = attrs[++i];
} else {
PLOGD("MediaCodec: ignoring unrecognized attribute '%s'", attrs[i]);
++i;
@@ -981,7 +1004,7 @@
return updateMediaCodec(
a_rank, parseCommaSeparatedStringSet(a_domain),
- parseCommaSeparatedStringSet(a_variant), a_enabled);
+ parseCommaSeparatedStringSet(a_variant), a_enabled, a_minsdk);
}
MediaCodecsXmlParser::Impl::Result
@@ -1035,7 +1058,7 @@
status_t MediaCodecsXmlParser::Impl::Parser::updateMediaCodec(
const char *rank, const StringSet &domains, const StringSet &variants,
- const char *enabled) {
+ const char *enabled, const char *minsdk) {
CHECK(mState->inCodec());
CodecProperties &codec = mState->codec();
@@ -1048,6 +1071,7 @@
codec.variantSet = variants;
+ // we allow sets of domains...
for (const std::string &domain : domains) {
if (domain.size() && domain.at(0) == '!') {
codec.domainSet.erase(domain.substr(1));
@@ -1065,6 +1089,49 @@
ALOGD("disabling %s", mState->codecName().c_str());
}
}
+
+ // evaluate against passed minsdk, with lots of logging to explain the logic
+ //
+ // if current sdk >= minsdk, we want to enable the codec
+ // this OVERRIDES any enabled="true|false" setting on the codec.
+ // (enabled=true minsdk=35 on a sdk 34 device results in a disabled codec)
+ //
+ // Although minsdk is not parsed before Android U, we can carry media_codecs.xml
+ // using this to devices earlier (e.g. as part of mainline). An example is appropriate.
+ //
+ // we have a codec that we want enabled in Android V (sdk=35), so we use:
+ // <MediaCodec ..... enabled="false" minsdk="35" >
+ //
+ // on Q/R/S/T: it sees enabled=false, but ignores the unrecognized minsdk
+ // so the codec will be disabled
+ // on U: it sees enabled=false, and sees minsdk=35, but U==34 and 34 < 35
+ // so the codec will be disabled
+ // on V: it sees enabled=false, and sees minsdk=35, V==35 and 35 >= 35
+ // so the codec will be enabled
+ //
+ // if we know the XML files will be used only on devices >= U, we can skip the enabled=false
+ // piece. Android mainline's support horizons say we will be using the enabled=false for
+ // another 4-5 years after U.
+ //
+ if (minsdk != nullptr) {
+ char *p = nullptr;
+ int sdk = strtol(minsdk, &p, 0);
+ if (p == minsdk || sdk < 0) {
+ ALOGE("minsdk parsing '%s' yielded %d, mapping to 0", minsdk, sdk);
+ sdk = 0;
+ }
+ // minsdk="#" means: "enable if sdk is >= #, disable otherwise"
+ if (mysdk < sdk) {
+ ALOGI("codec %s disabled, device sdk %d < required %d",
+ mState->codecName().c_str(), mysdk, sdk);
+ codec.quirkSet.emplace("attribute::disabled");
+ } else {
+ ALOGI("codec %s enabled, device sdk %d >= required %d",
+ mState->codecName().c_str(), mysdk, sdk);
+ codec.quirkSet.erase("attribute::disabled");
+ }
+ }
+
return OK;
}
diff --git a/media/libstagefright/xmlparser/api/current.txt b/media/libstagefright/xmlparser/api/current.txt
index ecfd85e..95c347a 100644
--- a/media/libstagefright/xmlparser/api/current.txt
+++ b/media/libstagefright/xmlparser/api/current.txt
@@ -84,6 +84,7 @@
method public java.util.List<media.codecs.Feature> getFeature_optional();
method public java.util.List<media.codecs.Limit> getLimit_optional();
method public java.util.List<media.codecs.Mapping> getMapping_optional();
+ method public String getMinsdk();
method public String getName();
method public java.util.List<media.codecs.Quirk> getQuirk_optional();
method public String getRank();
@@ -95,6 +96,7 @@
method public java.util.List<media.codecs.Variant> getVariant_optional();
method public void setDomain(String);
method public void setEnabled(String);
+ method public void setMinsdk(String);
method public void setName(String);
method public void setRank(String);
method public void setType(String);
diff --git a/media/libstagefright/xmlparser/media_codecs.xsd b/media/libstagefright/xmlparser/media_codecs.xsd
index c9a7efc..33f3a27 100644
--- a/media/libstagefright/xmlparser/media_codecs.xsd
+++ b/media/libstagefright/xmlparser/media_codecs.xsd
@@ -74,6 +74,7 @@
<xs:attribute name="domain" type="xs:string"/>
<xs:attribute name="variant" type="xs:string"/>
<xs:attribute name="enabled" type="xs:string"/>
+ <xs:attribute name="minsdk" type="xs:string"/>
</xs:complexType>
<xs:complexType name="Quirk">
<xs:attribute name="name" type="xs:string"/>
diff --git a/media/libstagefright/xmlparser/test/XMLParserTest.cpp b/media/libstagefright/xmlparser/test/XMLParserTest.cpp
index 7629d97..2c5821e 100644
--- a/media/libstagefright/xmlparser/test/XMLParserTest.cpp
+++ b/media/libstagefright/xmlparser/test/XMLParserTest.cpp
@@ -145,6 +145,33 @@
},
{}, "");
+ // minsdk
+ setCodecProperties("test12.encoder", true, 12, {"attribute::disabled"}, {}, {}, "video/t12",
+ {
+ pair<string, string>("tuning-enable-goal", "no"),
+ },
+ {}, "");
+ setCodecProperties("test13.encoder", true, 13, {"attribute::disabled"}, {}, {}, "video/t13",
+ {
+ pair<string, string>("tuning-enable-goal", "no"),
+ },
+ {}, "");
+ setCodecProperties("test14.encoder", true, 14, {"attribute::disabled"}, {}, {}, "video/t14",
+ {
+ pair<string, string>("tuning-enable-goal", "no"),
+ },
+ {}, "");
+ setCodecProperties("test15.encoder", true, 15, {}, {}, {}, "video/t15",
+ {
+ pair<string, string>("tuning-enable-goal", "yes"),
+ },
+ {}, "");
+ setCodecProperties("test16.encoder", true, 16, {}, {}, {}, "video/t16",
+ {
+ pair<string, string>("tuning-enable-goal", "yes"),
+ },
+ {}, "");
+
setRoleProperties("audio_decoder.mp3", false, 1, "audio/mpeg", "test1.decoder",
{pair<string, string>("attribute::disabled", "present"),
pair<string, string>("rank", "4")});
@@ -191,6 +218,22 @@
pair<string, string>("tuning-pi", "3.1415")
});
+ // minsdk
+ setRoleProperties("video_encoder.t12", true, 12, "video/t12", "test12.encoder",
+ {pair<string, string>("tuning-enable-goal", "no"),
+ pair<string, string>("attribute::disabled", "present") });
+ setRoleProperties("video_encoder.t13", true, 13, "video/t13", "test13.encoder",
+ {pair<string, string>("tuning-enable-goal", "no"),
+ pair<string, string>("attribute::disabled", "present") });
+ setRoleProperties("video_encoder.t14", true, 14, "video/t14", "test14.encoder",
+ {pair<string, string>("tuning-enable-goal", "no"),
+ pair<string, string>("attribute::disabled", "present") });
+ setRoleProperties("video_encoder.t15", true, 15, "video/t15", "test15.encoder",
+ {pair<string, string>("tuning-enable-goal", "yes")});
+ setRoleProperties("video_encoder.t16", true, 16, "video/t16", "test16.encoder",
+ {pair<string, string>("tuning-enable-goal", "yes")});
+
+
setServiceAttribute(
{pair<string, string>("domain-telephony", "0"), pair<string, string>("domain-tv", "0"),
pair<string, string>("setting2", "0"), pair<string, string>("variant-variant1", "0")});
diff --git a/media/libstagefright/xmlparser/test/testdata/media_codecs_unit_test.xml b/media/libstagefright/xmlparser/test/testdata/media_codecs_unit_test.xml
index 8cae423..e066927 100644
--- a/media/libstagefright/xmlparser/test/testdata/media_codecs_unit_test.xml
+++ b/media/libstagefright/xmlparser/test/testdata/media_codecs_unit_test.xml
@@ -88,5 +88,21 @@
<Tuning name="hungry" value="yes"/>
<Tuning name="pi" value="3.1415"/>
</MediaCodec>
+ <!-- test minsdk -->
+ <MediaCodec name="test12.encoder" type="video/t12" minsdk="100">
+ <Tuning name="enable-goal" value="no"/>
+ </MediaCodec>
+ <MediaCodec name="test13.encoder" type="video/t13" enabled="false" minsdk="100">
+ <Tuning name="enable-goal" value="no"/>
+ </MediaCodec>
+ <MediaCodec name="test14.encoder" type="video/t14" enabled="true" minsdk="100">
+ <Tuning name="enable-goal" value="no"/>
+ </MediaCodec>
+ <MediaCodec name="test15.encoder" type="video/t15" minsdk="34">
+ <Tuning name="enable-goal" value="yes"/>
+ </MediaCodec>
+ <MediaCodec name="test16.encoder" type="video/t16" enabled="false" minsdk="34">
+ <Tuning name="enable-goal" value="yes"/>
+ </MediaCodec>
</Encoders>
</Included>
diff --git a/media/module/bqhelper/GraphicBufferSource.cpp b/media/module/bqhelper/GraphicBufferSource.cpp
index cff14ac..3202cc5 100644
--- a/media/module/bqhelper/GraphicBufferSource.cpp
+++ b/media/module/bqhelper/GraphicBufferSource.cpp
@@ -1150,7 +1150,7 @@
int32_t bufferCount,
uint32_t frameWidth,
uint32_t frameHeight,
- uint32_t consumerUsage) {
+ uint64_t consumerUsage) {
if (component == NULL) {
return BAD_VALUE;
}
diff --git a/media/module/bqhelper/include/media/stagefright/bqhelper/GraphicBufferSource.h b/media/module/bqhelper/include/media/stagefright/bqhelper/GraphicBufferSource.h
index fe6bcce..4e4fbfd 100644
--- a/media/module/bqhelper/include/media/stagefright/bqhelper/GraphicBufferSource.h
+++ b/media/module/bqhelper/include/media/stagefright/bqhelper/GraphicBufferSource.h
@@ -129,7 +129,7 @@
int32_t bufferCount,
uint32_t frameWidth,
uint32_t frameHeight,
- uint32_t consumerUsage);
+ uint64_t consumerUsage);
// This is called after the last input frame has been submitted or buffer
// timestamp is greater or equal than stopTimeUs. We need to submit an empty
diff --git a/media/module/codecs/amrnb/enc/fuzzer/Android.bp b/media/module/codecs/amrnb/enc/fuzzer/Android.bp
index 2c041b7..bcbcee2 100644
--- a/media/module/codecs/amrnb/enc/fuzzer/Android.bp
+++ b/media/module/codecs/amrnb/enc/fuzzer/Android.bp
@@ -48,5 +48,13 @@
"android-media-fuzzing-reports@google.com",
],
componentid: 155276,
+ hotlists: [
+ "4593311",
+ ],
+ description: "The fuzzer targets the APIs of libstagefright_amrnbenc library",
+ vector: "local_no_privileges_required",
+ service_privilege: "constrained",
+ users: "multi_user",
+ fuzzed_code_usage: "shipped",
},
}
diff --git a/media/module/codecs/amrnb/fuzzer/Android.bp b/media/module/codecs/amrnb/fuzzer/Android.bp
index 833a7ba..3f29267 100644
--- a/media/module/codecs/amrnb/fuzzer/Android.bp
+++ b/media/module/codecs/amrnb/fuzzer/Android.bp
@@ -48,5 +48,13 @@
"android-media-fuzzing-reports@google.com",
],
componentid: 155276,
+ hotlists: [
+ "4593311",
+ ],
+ description: "The fuzzer targets the APIs of libstagefright_amrnbdec library",
+ vector: "remote",
+ service_privilege: "constrained",
+ users: "multi_user",
+ fuzzed_code_usage: "shipped",
},
}
diff --git a/media/module/codecs/amrwb/dec/fuzzer/Android.bp b/media/module/codecs/amrwb/dec/fuzzer/Android.bp
index 16f08fa..31a20ff 100644
--- a/media/module/codecs/amrwb/dec/fuzzer/Android.bp
+++ b/media/module/codecs/amrwb/dec/fuzzer/Android.bp
@@ -48,5 +48,13 @@
"android-media-fuzzing-reports@google.com",
],
componentid: 155276,
+ hotlists: [
+ "4593311",
+ ],
+ description: "The fuzzer targets the APIs of libstagefright_amrwbdec library",
+ vector: "remote",
+ service_privilege: "constrained",
+ users: "multi_user",
+ fuzzed_code_usage: "shipped",
},
}
diff --git a/media/module/codecs/amrwb/enc/fuzzer/Android.bp b/media/module/codecs/amrwb/enc/fuzzer/Android.bp
index f74fa4f..c2c13e1 100644
--- a/media/module/codecs/amrwb/enc/fuzzer/Android.bp
+++ b/media/module/codecs/amrwb/enc/fuzzer/Android.bp
@@ -48,5 +48,13 @@
"android-media-fuzzing-reports@google.com",
],
componentid: 155276,
+ hotlists: [
+ "4593311",
+ ],
+ description: "The fuzzer targets the APIs of libstagefright_amrwbenc library",
+ vector: "local_no_privileges_required",
+ service_privilege: "constrained",
+ users: "multi_user",
+ fuzzed_code_usage: "shipped",
},
}
diff --git a/media/module/codecs/g711/fuzzer/Android.bp b/media/module/codecs/g711/fuzzer/Android.bp
index 376cce7..397fb9a 100644
--- a/media/module/codecs/g711/fuzzer/Android.bp
+++ b/media/module/codecs/g711/fuzzer/Android.bp
@@ -44,6 +44,14 @@
"android-media-fuzzing-reports@google.com",
],
componentid: 155276,
+ hotlists: [
+ "4593311",
+ ],
+ description: "The fuzzer targets the APIs of codecs_g711dec library with a special focus on Alaw APIs",
+ vector: "remote",
+ service_privilege: "constrained",
+ users: "multi_user",
+ fuzzed_code_usage: "shipped",
},
}
@@ -61,5 +69,13 @@
"android-media-fuzzing-reports@google.com",
],
componentid: 155276,
+ hotlists: [
+ "4593311",
+ ],
+ description: "The fuzzer targets the APIs of codecs_g711dec library with a special focus on Mlaw APIs",
+ vector: "remote",
+ service_privilege: "constrained",
+ users: "multi_user",
+ fuzzed_code_usage: "shipped",
},
}
diff --git a/media/module/codecs/m4v_h263/fuzzer/Android.bp b/media/module/codecs/m4v_h263/fuzzer/Android.bp
index a052c11..4d0ed18 100644
--- a/media/module/codecs/m4v_h263/fuzzer/Android.bp
+++ b/media/module/codecs/m4v_h263/fuzzer/Android.bp
@@ -50,6 +50,14 @@
"android-media-fuzzing-reports@google.com",
],
componentid: 155276,
+ hotlists: [
+ "4593311",
+ ],
+ description: "The fuzzers target the APIs of libstagefright_m4vh263dec library",
+ vector: "remote",
+ service_privilege: "constrained",
+ users: "multi_user",
+ fuzzed_code_usage: "shipped",
},
}
@@ -98,6 +106,14 @@
"android-media-fuzzing-reports@google.com",
],
componentid: 155276,
+ hotlists: [
+ "4593311",
+ ],
+ description: "The fuzzers target the APIs of libstagefright_m4vh263enc library",
+ vector: "local_no_privileges_required",
+ service_privilege: "constrained",
+ users: "multi_user",
+ fuzzed_code_usage: "shipped",
},
}
diff --git a/media/module/codecs/mp3dec/fuzzer/Android.bp b/media/module/codecs/mp3dec/fuzzer/Android.bp
index 514a8a8..c5e0b1f 100644
--- a/media/module/codecs/mp3dec/fuzzer/Android.bp
+++ b/media/module/codecs/mp3dec/fuzzer/Android.bp
@@ -44,5 +44,13 @@
"android-media-fuzzing-reports@google.com",
],
componentid: 155276,
+ hotlists: [
+ "4593311",
+ ],
+ description: "The fuzzer targets the APIs of libstagefright_mp3dec",
+ vector: "remote",
+ service_privilege: "constrained",
+ users: "multi_user",
+ fuzzed_code_usage: "shipped",
},
}
diff --git a/media/module/codecserviceregistrant/fuzzer/Android.bp b/media/module/codecserviceregistrant/fuzzer/Android.bp
index 0b9affd..1cb8c2b 100644
--- a/media/module/codecserviceregistrant/fuzzer/Android.bp
+++ b/media/module/codecserviceregistrant/fuzzer/Android.bp
@@ -41,5 +41,13 @@
"android-media-fuzzing-reports@google.com",
],
componentid: 155276,
+ hotlists: [
+ "4593311",
+ ],
+ description: "The fuzzer targets the APIs of libmedia_codecserviceregistrant",
+ vector: "local_no_privileges_required",
+ service_privilege: "constrained",
+ users: "multi_user",
+ fuzzed_code_usage: "shipped",
},
}
diff --git a/media/module/extractors/fuzzers/Android.bp b/media/module/extractors/fuzzers/Android.bp
index b3e34d2..91ca7b1 100644
--- a/media/module/extractors/fuzzers/Android.bp
+++ b/media/module/extractors/fuzzers/Android.bp
@@ -72,6 +72,14 @@
"android-media-fuzzing-reports@google.com",
],
componentid: 155276,
+ hotlists: [
+ "4593311",
+ ],
+ description: "The fuzzers targets the APIs of all the various extractors",
+ vector: "remote",
+ service_privilege: "constrained",
+ users: "multi_user",
+ fuzzed_code_usage: "shipped",
},
}
diff --git a/media/module/foundation/AHandler.cpp b/media/module/foundation/AHandler.cpp
index 7dbbe54..d8b0aaf 100644
--- a/media/module/foundation/AHandler.cpp
+++ b/media/module/foundation/AHandler.cpp
@@ -24,8 +24,10 @@
namespace android {
void AHandler::deliverMessage(const sp<AMessage> &msg) {
+ setDeliveryStatus(true, msg->what(), ALooper::GetNowUs());
onMessageReceived(msg);
mMessageCounter++;
+ setDeliveryStatus(false, 0, 0);
if (mVerboseStats) {
uint32_t what = msg->what();
@@ -38,4 +40,19 @@
}
}
+void AHandler::setDeliveryStatus(bool delivering, uint32_t what, int64_t startUs) {
+ AutoMutex autoLock(mLock);
+ mDeliveringMessage = delivering;
+ mCurrentMessageWhat = what;
+ mCurrentMessageStartTimeUs = startUs;
+}
+
+void AHandler::getDeliveryStatus(bool& delivering, uint32_t& what, int64_t& durationUs) {
+ AutoMutex autoLock(mLock);
+ delivering = mDeliveringMessage;
+ what = mCurrentMessageWhat;
+ durationUs = mCurrentMessageStartTimeUs == 0 ?
+ 0 : ALooper::GetNowUs() - mCurrentMessageStartTimeUs;
+}
+
} // namespace android
diff --git a/media/module/foundation/ALooper.cpp b/media/module/foundation/ALooper.cpp
index a276722..61bac02 100644
--- a/media/module/foundation/ALooper.cpp
+++ b/media/module/foundation/ALooper.cpp
@@ -69,6 +69,10 @@
return systemTime(SYSTEM_TIME_MONOTONIC) / 1000LL;
}
+int64_t ALooper::getNowUs() {
+ return GetNowUs();
+}
+
ALooper::ALooper()
: mRunningLocally(false) {
// clean up stale AHandlers. Doing it here instead of in the destructor avoids
@@ -170,11 +174,11 @@
int64_t whenUs;
if (delayUs > 0) {
- int64_t nowUs = GetNowUs();
+ int64_t nowUs = getNowUs();
whenUs = (delayUs > INT64_MAX - nowUs ? INT64_MAX : nowUs + delayUs);
} else {
- whenUs = GetNowUs();
+ whenUs = getNowUs();
}
List<Event>::iterator it = mEventQueue.begin();
@@ -185,6 +189,7 @@
Event event;
event.mWhenUs = whenUs;
event.mMessage = msg;
+ event.mToken = nullptr;
if (it == mEventQueue.begin()) {
mQueueChangedCondition.signal();
@@ -193,7 +198,57 @@
mEventQueue.insert(it, event);
}
+status_t ALooper::postUnique(const sp<AMessage> &msg, const sp<RefBase> &token, int64_t delayUs) {
+ if (token == nullptr) {
+ return -EINVAL;
+ }
+ Mutex::Autolock autoLock(mLock);
+
+ int64_t whenUs;
+ if (delayUs > 0) {
+ int64_t nowUs = getNowUs();
+ whenUs = (delayUs > INT64_MAX - nowUs ? INT64_MAX : nowUs + delayUs);
+ } else {
+ whenUs = getNowUs();
+ }
+
+ // We only need to wake the loop up if we're rescheduling to the earliest event in the queue.
+ // This needs to be checked now, before we reschedule the message, in case this message is
+ // already at the beginning of the queue.
+ bool shouldAwakeLoop = mEventQueue.empty() || whenUs < mEventQueue.begin()->mWhenUs;
+
+ // Erase any previously-posted event with this token.
+ for (auto i = mEventQueue.begin(); i != mEventQueue.end();) {
+ if (i->mToken == token) {
+ i = mEventQueue.erase(i);
+ } else {
+ ++i;
+ }
+ }
+
+ // Find the insertion point for the rescheduled message.
+ List<Event>::iterator i = mEventQueue.begin();
+ while (i != mEventQueue.end() && i->mWhenUs <= whenUs) {
+ ++i;
+ }
+
+ Event event;
+ event.mWhenUs = whenUs;
+ event.mMessage = msg;
+ event.mToken = token;
+ mEventQueue.insert(i, event);
+
+ // If we rescheduled the event to be earlier than the first event, then we need to wake up the
+ // looper earlier than it was previously scheduled to be woken up. Otherwise, it can sleep until
+ // the previous wake-up time and then go to sleep again if needed.
+ if (shouldAwakeLoop){
+ mQueueChangedCondition.signal();
+ }
+ return OK;
+}
+
bool ALooper::loop() {
+
Event event;
{
@@ -206,7 +261,7 @@
return true;
}
int64_t whenUs = (*mEventQueue.begin()).mWhenUs;
- int64_t nowUs = GetNowUs();
+ int64_t nowUs = getNowUs();
if (whenUs > nowUs) {
int64_t delayUs = whenUs - nowUs;
diff --git a/media/module/foundation/ALooperRoster.cpp b/media/module/foundation/ALooperRoster.cpp
index 4334f1e..5625c7f 100644
--- a/media/module/foundation/ALooperRoster.cpp
+++ b/media/module/foundation/ALooperRoster.cpp
@@ -143,8 +143,20 @@
s.append(looper->getName());
sp<AHandler> handler = info.mHandler.promote();
if (handler != NULL) {
+ bool deliveringMessages;
+ uint32_t currentMessageWhat;
+ int64_t currentDeliveryDurationUs;
+ handler->getDeliveryStatus(deliveringMessages,
+ currentMessageWhat,
+ currentDeliveryDurationUs);
handler->mVerboseStats = verboseStats;
- s.appendFormat(": %" PRIu64 " messages processed", handler->mMessageCounter);
+ s.appendFormat(": %" PRIu64 " messages processed, delivering "
+ "%d, current msg %" PRIu32 ", current msg "
+ "durationUs %" PRIu64 "",
+ handler->mMessageCounter,
+ deliveringMessages,
+ currentMessageWhat,
+ currentDeliveryDurationUs);
if (verboseStats) {
for (size_t j = 0; j < handler->mMessages.size(); j++) {
char fourcc[15];
diff --git a/media/module/foundation/AMessage.cpp b/media/module/foundation/AMessage.cpp
index 5c99cc9..7cc7c41 100644
--- a/media/module/foundation/AMessage.cpp
+++ b/media/module/foundation/AMessage.cpp
@@ -430,6 +430,17 @@
return OK;
}
+status_t AMessage::postUnique(const sp<RefBase> &token, int64_t delayUs) {
+ sp<ALooper> looper = mLooper.promote();
+ if (looper == NULL) {
+ ALOGW("failed to post message as target looper for handler %d is gone.",
+ mTarget);
+ return -ENOENT;
+ }
+
+ return looper->postUnique(this, token, delayUs);
+}
+
status_t AMessage::postAndAwaitResponse(sp<AMessage> *response) {
sp<ALooper> looper = mLooper.promote();
if (looper == NULL) {
@@ -950,6 +961,11 @@
return mItems.size();
}
+/* static */
+size_t AMessage::maxAllowedEntries() {
+ return kMaxNumItems;
+}
+
const char *AMessage::getEntryNameAt(size_t index, Type *type) const {
if (index >= mItems.size()) {
*type = kTypeInt32;
diff --git a/media/module/foundation/MediaDefs.cpp b/media/module/foundation/MediaDefs.cpp
index 4a75f90..7abab63 100644
--- a/media/module/foundation/MediaDefs.cpp
+++ b/media/module/foundation/MediaDefs.cpp
@@ -72,6 +72,7 @@
const char *MEDIA_MIMETYPE_AUDIO_DTS = "audio/vnd.dts";
const char *MEDIA_MIMETYPE_AUDIO_DTS_HD = "audio/vnd.dts.hd";
const char *MEDIA_MIMETYPE_AUDIO_DTS_HD_MA = "audio/vnd.dts.hd;profile=dtsma";
+const char *MEDIA_MIMETYPE_AUDIO_DTS_UHD = "audio/vnd.dts.uhd";
const char *MEDIA_MIMETYPE_AUDIO_DTS_UHD_P1 = "audio/vnd.dts.uhd;profile=p1";
const char *MEDIA_MIMETYPE_AUDIO_DTS_UHD_P2 = "audio/vnd.dts.uhd;profile=p2";
const char *MEDIA_MIMETYPE_AUDIO_EVRC = "audio/evrc";
diff --git a/media/module/foundation/include/media/stagefright/foundation/AHandler.h b/media/module/foundation/include/media/stagefright/foundation/AHandler.h
index 337460a..c9e4f69 100644
--- a/media/module/foundation/include/media/stagefright/foundation/AHandler.h
+++ b/media/module/foundation/include/media/stagefright/foundation/AHandler.h
@@ -30,7 +30,10 @@
AHandler()
: mID(0),
mVerboseStats(false),
- mMessageCounter(0) {
+ mMessageCounter(0),
+ mDeliveringMessage(false),
+ mCurrentMessageWhat(0),
+ mCurrentMessageStartTimeUs(0){
}
ALooper::handler_id id() const {
@@ -69,8 +72,17 @@
uint64_t mMessageCounter;
KeyedVector<uint32_t, uint32_t> mMessages;
+ Mutex mLock;
+ bool mDeliveringMessage;
+ uint32_t mCurrentMessageWhat;
+ int64_t mCurrentMessageStartTimeUs;
+
void deliverMessage(const sp<AMessage> &msg);
+ void setDeliveryStatus(bool, uint32_t, int64_t);
+ void getDeliveryStatus(bool&, uint32_t&, int64_t&);
+
+
DISALLOW_EVIL_CONSTRUCTORS(AHandler);
};
diff --git a/media/module/foundation/include/media/stagefright/foundation/ALooper.h b/media/module/foundation/include/media/stagefright/foundation/ALooper.h
index 09c469b..60bda1f 100644
--- a/media/module/foundation/include/media/stagefright/foundation/ALooper.h
+++ b/media/module/foundation/include/media/stagefright/foundation/ALooper.h
@@ -59,6 +59,9 @@
}
protected:
+ // overridable by test harness
+ virtual int64_t getNowUs();
+
virtual ~ALooper();
private:
@@ -67,6 +70,7 @@
struct Event {
int64_t mWhenUs;
sp<AMessage> mMessage;
+ sp<RefBase> mToken;
};
Mutex mLock;
@@ -87,9 +91,14 @@
// START --- methods used only by AMessage
- // posts a message on this looper with the given timeout
+ // Posts a message on this looper with the given timeout.
void post(const sp<AMessage> &msg, int64_t delayUs);
+ // Post a message uniquely on this looper with the given timeout.
+ // This method ensures that there is exactly one message with the same token pending posted on
+ // this looper after the call returns. A null token will result in an EINVAL error status.
+ status_t postUnique(const sp<AMessage> &msg, const sp<RefBase> &token, int64_t delayUs);
+
// creates a reply token to be used with this looper
sp<AReplyToken> createReplyToken();
// waits for a response for the reply token. If status is OK, the response
diff --git a/media/module/foundation/include/media/stagefright/foundation/AMessage.h b/media/module/foundation/include/media/stagefright/foundation/AMessage.h
index 960212a..7594565 100644
--- a/media/module/foundation/include/media/stagefright/foundation/AMessage.h
+++ b/media/module/foundation/include/media/stagefright/foundation/AMessage.h
@@ -141,6 +141,11 @@
status_t post(int64_t delayUs = 0);
+ // Post a message uniquely to its target with the given timeout.
+ // This method ensures that there is exactly one message with the same token posted to its
+ // target after the call returns. A null token will result in an EINVAL error status.
+ status_t postUnique(const sp<RefBase> &token, int64_t delayUs = 0);
+
// Posts the message to its target and waits for a response (or error)
// before returning.
status_t postAndAwaitResponse(sp<AMessage> *response);
@@ -194,6 +199,7 @@
};
size_t countEntries() const;
+ static size_t maxAllowedEntries();
const char *getEntryNameAt(size_t index, Type *type) const;
/**
diff --git a/media/module/foundation/include/media/stagefright/foundation/MediaDefs.h b/media/module/foundation/include/media/stagefright/foundation/MediaDefs.h
index 740336a..05ee7fc 100644
--- a/media/module/foundation/include/media/stagefright/foundation/MediaDefs.h
+++ b/media/module/foundation/include/media/stagefright/foundation/MediaDefs.h
@@ -74,6 +74,7 @@
extern const char *MEDIA_MIMETYPE_AUDIO_DTS;
extern const char *MEDIA_MIMETYPE_AUDIO_DTS_HD;
extern const char *MEDIA_MIMETYPE_AUDIO_DTS_HD_MA;
+extern const char *MEDIA_MIMETYPE_AUDIO_DTS_UHD;
extern const char *MEDIA_MIMETYPE_AUDIO_DTS_UHD_P1;
extern const char *MEDIA_MIMETYPE_AUDIO_DTS_UHD_P2;
extern const char *MEDIA_MIMETYPE_AUDIO_EVRC;
diff --git a/media/module/foundation/tests/AMessage_test.cpp b/media/module/foundation/tests/AMessage_test.cpp
index 2b11326..08062e5 100644
--- a/media/module/foundation/tests/AMessage_test.cpp
+++ b/media/module/foundation/tests/AMessage_test.cpp
@@ -17,18 +17,65 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "AData_test"
+#include <gmock/gmock.h>
#include <gtest/gtest.h>
#include <utils/RefBase.h>
#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/AHandler.h>
+#include <media/stagefright/foundation/ALooper.h>
using namespace android;
-class AMessageTest : public ::testing::Test {
+using ::testing::InSequence;
+using ::testing::NiceMock;
+
+class LooperWithSettableClock : public ALooper {
+public:
+ LooperWithSettableClock() : mClockUs(0) {}
+
+ void setClockUs(int64_t nowUs) {
+ mClockUs = nowUs;
+ }
+
+ int64_t getNowUs() override {
+ return mClockUs;
+ }
+
+private:
+ int64_t mClockUs;
};
+timespec millis100 = {0, 100L*1000*1000};
-TEST(AMessage_tests, item_manipulation) {
+class MockHandler : public AHandler {
+public:
+ MOCK_METHOD(void, onMessageReceived, (const sp<AMessage>&), (override));
+};
+
+TEST(AMessage_tests, countsAndLimits) {
+ sp<AMessage> m1 = new AMessage();
+
+ // clear, countEntries, maxAllowedEntries
+
+ EXPECT_EQ(0, m1->countEntries());
+
+ m1->setInt32("smaller", INT32_MAX - 2);
+ m1->setInt64("big", INT64_MAX);
+ m1->setString("bigBallOfString", "whatever");
+ EXPECT_EQ(3, m1->countEntries());
+
+ m1->clear();
+ EXPECT_EQ(0, m1->countEntries());
+
+ EXPECT_TRUE(m1->maxAllowedEntries() > 0);
+ EXPECT_TRUE(AMessage::maxAllowedEntries() > 0);
+
+ // static function, make sure we get a consistent answer
+ EXPECT_EQ(m1->maxAllowedEntries(), AMessage::maxAllowedEntries());
+}
+
+TEST(AMessage_tests, settersAndGetters) {
sp<AMessage> m1 = new AMessage();
m1->setInt32("value", 2);
@@ -120,6 +167,171 @@
EXPECT_TRUE(m1->findInt32("alittlelonger", &i32));
EXPECT_NE(OK, m1->removeEntryByName("notpresent"));
-
}
+TEST(AMessage_tests, deliversMultipleMessagesInOrderImmediately) {
+ sp<NiceMock<MockHandler>> mockHandler = new NiceMock<MockHandler>;
+ sp<LooperWithSettableClock> looper = new LooperWithSettableClock();
+ looper->registerHandler(mockHandler);
+
+ sp<AMessage> msgNow1 = new AMessage(0, mockHandler);
+ msgNow1->post();
+ sp<AMessage> msgNow2 = new AMessage(0, mockHandler);
+ msgNow2->post();
+
+ {
+ InSequence inSequence;
+ EXPECT_CALL(*mockHandler, onMessageReceived(msgNow1)).Times(1);
+ EXPECT_CALL(*mockHandler, onMessageReceived(msgNow2)).Times(1);
+ }
+ looper->start();
+ nanosleep(&millis100, nullptr); // just enough time for the looper thread to run
+}
+
+TEST(AMessage_tests, doesNotDeliverDelayedMessageImmediately) {
+ sp<NiceMock<MockHandler>> mockHandler = new NiceMock<MockHandler>;
+ sp<LooperWithSettableClock> looper = new LooperWithSettableClock();
+ looper->registerHandler(mockHandler);
+
+ sp<AMessage> msgNow = new AMessage(0, mockHandler);
+ msgNow->post();
+ sp<AMessage> msgDelayed = new AMessage(0, mockHandler);
+ msgDelayed->post(100);
+
+ EXPECT_CALL(*mockHandler, onMessageReceived(msgNow)).Times(1);
+ // note: never called
+ EXPECT_CALL(*mockHandler, onMessageReceived(msgDelayed)).Times(0);
+ looper->start();
+ nanosleep(&millis100, nullptr); // just enough time for the looper thread to run
+}
+
+TEST(AMessage_tests, deliversDelayedMessagesInSequence) {
+ sp<NiceMock<MockHandler>> mockHandler = new NiceMock<MockHandler>;
+ sp<LooperWithSettableClock> looper = new LooperWithSettableClock();
+ looper->registerHandler(mockHandler);
+
+ sp<AMessage> msgIn500 = new AMessage(0, mockHandler);
+ msgIn500->post(500);
+ sp<AMessage> msgNow = new AMessage(0, mockHandler);
+ msgNow->post();
+ sp<AMessage> msgIn100 = new AMessage(0, mockHandler);
+ msgIn100->post(100);
+ // not expected to be received
+ sp<AMessage> msgIn1000 = new AMessage(0, mockHandler);
+ msgIn1000->post(1000);
+
+ looper->setClockUs(500);
+ {
+ InSequence inSequence;
+
+ EXPECT_CALL(*mockHandler, onMessageReceived(msgNow)).Times(1);
+ EXPECT_CALL(*mockHandler, onMessageReceived(msgIn100)).Times(1);
+ EXPECT_CALL(*mockHandler, onMessageReceived(msgIn500)).Times(1);
+ }
+ // note: never called
+ EXPECT_CALL(*mockHandler, onMessageReceived(msgIn1000)).Times(0);
+ looper->start();
+ nanosleep(&millis100, nullptr); // just enough time for the looper thread to run
+}
+
+TEST(AMessage_tests, deliversDelayedUniqueMessage) {
+ sp<NiceMock<MockHandler>> mockHandler = new NiceMock<MockHandler>;
+ sp<LooperWithSettableClock> looper = new LooperWithSettableClock();
+ looper->registerHandler(mockHandler);
+
+ sp<AMessage> msg = new AMessage(0, mockHandler);
+ msg->postUnique(msg, 50);
+
+ looper->setClockUs(50);
+ EXPECT_CALL(*mockHandler, onMessageReceived(msg)).Times(1);
+ looper->start();
+ nanosleep(&millis100, nullptr); // just enough time for the looper thread to run
+}
+
+TEST(AMessage_tests, deliversImmediateUniqueMessage) {
+ sp<NiceMock<MockHandler>> mockHandler = new NiceMock<MockHandler>;
+ // note: we don't need to set the clock, but we do want a stable clock that doesn't advance
+ sp<LooperWithSettableClock> looper = new LooperWithSettableClock();
+ looper->registerHandler(mockHandler);
+
+ sp<AMessage> msg = new AMessage(0, mockHandler);
+ msg->postUnique(msg, 0);
+
+ EXPECT_CALL(*mockHandler, onMessageReceived(msg)).Times(1);
+ looper->start();
+ nanosleep(&millis100, nullptr); // just enough time for the looper thread to run
+}
+
+TEST(AMessage_tests, doesNotDeliverUniqueMessageAfterRescheduleLater) {
+ sp<NiceMock<MockHandler>> mockHandler = new NiceMock<MockHandler>;
+ sp<LooperWithSettableClock> looper = new LooperWithSettableClock();
+ looper->registerHandler(mockHandler);
+
+ sp<AMessage> msg = new AMessage(0, mockHandler);
+ msg->postUnique(msg, 50);
+ msg->postUnique(msg, 100); // reschedule for later
+
+ looper->setClockUs(50); // if the message is correctly rescheduled, it should not be delivered
+ // Never called because the message was rescheduled to a later point in time
+ EXPECT_CALL(*mockHandler, onMessageReceived(msg)).Times(0);
+ looper->start();
+ nanosleep(&millis100, nullptr); // just enough time for the looper thread to run
+}
+
+TEST(AMessage_tests, deliversUniqueMessageAfterRescheduleEarlier) {
+ sp<NiceMock<MockHandler>> mockHandler = new NiceMock<MockHandler>;
+ sp<LooperWithSettableClock> looper = new LooperWithSettableClock();
+ looper->registerHandler(mockHandler);
+
+ sp<AMessage> msg = new AMessage(0, mockHandler);
+ msg->postUnique(msg, 100);
+ msg->postUnique(msg, 50); // reschedule to fire earlier
+
+ looper->setClockUs(50); // if the message is rescheduled correctly, it should be delivered
+ EXPECT_CALL(*mockHandler, onMessageReceived(msg)).Times(1);
+ looper->start();
+ nanosleep(&millis100, nullptr); // just enough time for the looper thread to run
+}
+
+TEST(AMessage_tests, deliversSameMessageTwice) {
+ sp<NiceMock<MockHandler>> mockHandler = new NiceMock<MockHandler>;
+ sp<LooperWithSettableClock> looper = new LooperWithSettableClock();
+ looper->registerHandler(mockHandler);
+
+ sp<AMessage> msg = new AMessage(0, mockHandler);
+ msg->post(50);
+ msg->post(100);
+
+ looper->setClockUs(100);
+ EXPECT_CALL(*mockHandler, onMessageReceived(msg)).Times(2);
+ looper->start();
+ nanosleep(&millis100, nullptr); // just enough time for the looper thread to run
+}
+
+// When messages are posted twice with the same token, it will only be delivered once after being
+// rescheduled.
+TEST(AMessage_tests, deliversUniqueMessageOnce) {
+ sp<NiceMock<MockHandler>> mockHandler = new NiceMock<MockHandler>;
+ sp<LooperWithSettableClock> looper = new LooperWithSettableClock();
+ looper->registerHandler(mockHandler);
+
+ sp<AMessage> msg1 = new AMessage(0, mockHandler);
+ msg1->postUnique(msg1, 50);
+ sp<AMessage> msg2 = new AMessage(0, mockHandler);
+ msg2->postUnique(msg1, 75); // note, using the same token as msg1
+
+ looper->setClockUs(100);
+ EXPECT_CALL(*mockHandler, onMessageReceived(msg1)).Times(0);
+ EXPECT_CALL(*mockHandler, onMessageReceived(msg2)).Times(1);
+ looper->start();
+ nanosleep(&millis100, nullptr); // just enough time for the looper thread to run
+}
+
+TEST(AMessage_tests, postUnique_withNullToken_returnsInvalidArgument) {
+ sp<NiceMock<MockHandler>> mockHandler = new NiceMock<MockHandler>;
+ sp<ALooper> looper = new ALooper();
+ looper->registerHandler(mockHandler);
+
+ sp<AMessage> msg = new AMessage(0, mockHandler);
+ EXPECT_EQ(msg->postUnique(nullptr, 0), -EINVAL);
+}
diff --git a/media/module/foundation/tests/Android.bp b/media/module/foundation/tests/Android.bp
index e72ce43..c409dd2 100644
--- a/media/module/foundation/tests/Android.bp
+++ b/media/module/foundation/tests/Android.bp
@@ -20,10 +20,14 @@
shared_libs: [
"liblog",
- "libstagefright_foundation",
"libutils",
],
+ static_libs: [
+ "libstagefright_foundation",
+ "libgmock",
+ ],
+
srcs: [
"AData_test.cpp",
"AMessage_test.cpp",
diff --git a/media/module/mpeg2ts/ATSParser.cpp b/media/module/mpeg2ts/ATSParser.cpp
index 1482072..6aeea3b 100644
--- a/media/module/mpeg2ts/ATSParser.cpp
+++ b/media/module/mpeg2ts/ATSParser.cpp
@@ -556,7 +556,15 @@
if (descriptor_length > ES_info_length) {
return ERROR_MALFORMED;
}
- if (descriptor_tag == DESCRIPTOR_CA && descriptor_length >= 4) {
+
+ // The DTS descriptor is used in the PSI PMT to identify streams which carry
+ // DTS audio(core only). If a DTS descriptor is present, a DTS-HD or DTS-UHD
+ // descriptors shall not be present in the same ES_info descriptor loop.
+ if (descriptor_tag == DESCRIPTOR_DTS) {
+ info.mType = STREAMTYPE_DTS;
+ ES_info_length -= descriptor_length;
+ br->skipBits(descriptor_length * 8);
+ } else if (descriptor_tag == DESCRIPTOR_CA && descriptor_length >= 4) {
hasStreamCA = true;
streamCA.mSystemID = br->getBits(16);
streamCA.mPID = br->getBits(16) & 0x1fff;
@@ -575,6 +583,16 @@
if (descTagExt == EXT_DESCRIPTOR_DVB_AC4) {
info.mTypeExt = EXT_DESCRIPTOR_DVB_AC4;
br->skipBits(descriptor_length * 8);
+ } else if (descTagExt == EXT_DESCRIPTOR_DVB_DTS_HD) {
+ // DTS HD extended descriptor which can accommodate core only formats
+ // as well as extension only and core + extension combinations.
+ info.mTypeExt = EXT_DESCRIPTOR_DVB_DTS_HD;
+ br->skipBits(descriptor_length * 8);
+ } else if (descTagExt == EXT_DESCRIPTOR_DVB_DTS_UHD) {
+ // The DTS-UHD descriptor is used in the PSI PMT to identify streams
+ // which carry DTS-UHD audio
+ info.mTypeExt = EXT_DESCRIPTOR_DVB_DTS_UHD;
+ br->skipBits(descriptor_length * 8);
} else if (descTagExt == EXT_DESCRIPTOR_DVB_AUDIO_PRESELECTION &&
descriptor_length >= 1) {
// DVB BlueBook A038 Table 110
@@ -920,9 +938,17 @@
mode = ElementaryStreamQueue::EAC3;
break;
+ case STREAMTYPE_DTS:
+ mode = ElementaryStreamQueue::DTS;
+ break;
+
case STREAMTYPE_PES_PRIVATE_DATA:
if (mStreamTypeExt == EXT_DESCRIPTOR_DVB_AC4) {
mode = ElementaryStreamQueue::AC4;
+ } else if (mStreamTypeExt == EXT_DESCRIPTOR_DVB_DTS_HD) {
+ mode = ElementaryStreamQueue::DTS_HD;
+ } else if (mStreamTypeExt == EXT_DESCRIPTOR_DVB_DTS_UHD) {
+ mode = ElementaryStreamQueue::DTS_UHD;
}
break;
@@ -1158,9 +1184,12 @@
case STREAMTYPE_EAC3:
case STREAMTYPE_AAC_ENCRYPTED:
case STREAMTYPE_AC3_ENCRYPTED:
+ case STREAMTYPE_DTS:
return true;
case STREAMTYPE_PES_PRIVATE_DATA:
- return mStreamTypeExt == EXT_DESCRIPTOR_DVB_AC4;
+ return (mStreamTypeExt == EXT_DESCRIPTOR_DVB_AC4
+ || mStreamTypeExt == EXT_DESCRIPTOR_DVB_DTS_HD
+ || mStreamTypeExt == EXT_DESCRIPTOR_DVB_DTS_UHD);
default:
return false;
diff --git a/media/module/mpeg2ts/ESQueue.cpp b/media/module/mpeg2ts/ESQueue.cpp
index 192ba77..2dc7b0a 100644
--- a/media/module/mpeg2ts/ESQueue.cpp
+++ b/media/module/mpeg2ts/ESQueue.cpp
@@ -362,6 +362,436 @@
return OK;
}
+#define RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bitstream, size) \
+ do { \
+ if ((bitstream).numBitsLeft() < (size)) { \
+ ALOGE("Not enough bits left for further parsing"); \
+ return ERROR_MALFORMED; } \
+ } while (0)
+
+// Parse DTS Digital Surround and DTS Express(LBR) stream header
+static status_t parseDTSHDSyncFrame(
+ const uint8_t *ptr, size_t size, unsigned &frameSize, sp<MetaData> *metaData) {
+ static const unsigned channelCountTable[] = {1, 2, 2, 2, 2, 3, 3, 4,
+ 4, 5, 6, 6, 6, 7, 8, 8};
+ static const unsigned samplingRateTableCoreSS[] = {0, 8000, 16000, 32000, 0, 0, 11025, 22050,
+ 44100, 0, 0, 12000, 24000, 48000, 0, 0};
+ static const unsigned samplingRateTableExtSS[] = {8000, 16000, 32000, 64000, 128000,
+ 22050, 44100, 88200, 176400, 352800,
+ 12000, 24000, 48000, 96000, 192000, 384000};
+
+ const uint32_t DTSHD_SYNC_CORE_16BIT_BE = 0x7ffe8001;
+ const uint32_t DTSHD_SYNC_EXSS_16BIT_BE = 0x64582025;
+
+ uint32_t numChannels = 0, samplingRate = 0;
+ bool isLBR = false;
+
+ ABitReader bits(ptr, size);
+
+ RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, 32);
+ uint32_t dtshdSyncWord = bits.getBits(32);
+
+ // Expecting DTS Digital Surround or DTS Express(LBR) streams only
+ if (dtshdSyncWord == DTSHD_SYNC_CORE_16BIT_BE) { // DTS Digital Surround Header
+ RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (1 + 5 + 1 + 7 + 14 + 6 + 4 + 15 + 2));
+
+ // FTYPE, SHORT, CRC, NBLKS
+ bits.skipBits(1 + 5 + 1 + 7);
+
+ frameSize = bits.getBits(14) + 1;
+ uint32_t amode = bits.getBits(6);
+ uint32_t freqIndex = bits.getBits(4);
+
+ // RATE, FIXEDBIT, DYNF, TIMEF, AUXF, HDCD, EXT_AUDIO_ID, EXT_AUDIO, ASPF
+ bits.skipBits(5 + 1 + 1 + 1 + 1 + 1 + 3 + 1 + 1);
+
+ uint32_t lfeFlag = bits.getBits(2);
+ numChannels = (amode <= 15) ? channelCountTable[amode] : 0;
+ numChannels += ((lfeFlag == 1) || (lfeFlag == 2)) ? 1 : 0;
+ samplingRate = (freqIndex <= 15) ? samplingRateTableCoreSS[freqIndex] : 0;
+
+ isLBR = false;
+ } else if (dtshdSyncWord == DTSHD_SYNC_EXSS_16BIT_BE) { // DTS Express(LBR) Header
+ RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (8 + 2 + 1));
+
+ uint32_t extHeadersize, extSSFsize;
+ uint32_t numAudioPresent = 1, numAssets = 1;
+ uint32_t nuActiveExSSMask[8];
+
+ // userDefinedBits
+ bits.skipBits(8);
+
+ uint32_t extSSIndex = bits.getBits(2);
+ uint32_t headerSizeType = bits.getBits(1);
+
+ if (headerSizeType == 0) {
+ RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (8 + 16));
+
+ extHeadersize = bits.getBits(8) + 1;
+ extSSFsize = bits.getBits(16) + 1;
+ } else {
+ RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (12 + 20));
+
+ extHeadersize = bits.getBits(12) + 1;
+ extSSFsize = bits.getBits(20) + 1;
+ }
+
+ RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (1));
+
+ uint32_t staticFieldsPresent = bits.getBits(1);
+
+ if (staticFieldsPresent) {
+ RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (2 + 3 + 1));
+
+ // nuRefClockCode, nuExSSFrameDurationCode
+ bits.skipBits(2 + 3);
+
+ if (bits.getBits(1)) {
+ RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (32 + 4));
+
+ bits.skipBits(32 + 4);
+ }
+
+ RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (3 + 3));
+
+ // numAudioPresent, numAssets
+ bits.skipBits(3 + 3);
+
+ for (uint32_t nAuPr = 0; nAuPr < numAudioPresent; nAuPr++) {
+ RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (extSSIndex + 1));
+
+ nuActiveExSSMask[nAuPr] = bits.getBits(extSSIndex + 1);
+ }
+
+ for (uint32_t nAuPr = 0; nAuPr < numAudioPresent; nAuPr++) {
+ for (uint32_t nSS = 0; nSS < extSSIndex + 1; nSS++) {
+ if (((nuActiveExSSMask[nAuPr] >> nSS) & 0x1) == 1) {
+ RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, 8);
+
+ // nuActiveAssetMask
+ bits.skipBits(8);
+ }
+ }
+ }
+
+ RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, 1);
+
+ // bMixMetadataEnbl
+ if (bits.getBits(1)) {
+ RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (2 + 2 + 2));
+
+ // nuMixMetadataAdjLevel
+ bits.skipBits(2);
+
+ uint32_t bits4MixOutMask = (bits.getBits(2) + 1) << 2;
+ uint32_t numMixOutConfigs = bits.getBits(2) + 1;
+
+ for (int ns = 0; ns < numMixOutConfigs; ns++) {
+ RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, bits4MixOutMask);
+
+ // nuMixOutChMask
+ bits.skipBits(bits4MixOutMask);
+ }
+ }
+ }
+
+ for (int nAst = 0; nAst < numAssets; nAst++) {
+ int bits4ExSSFsize = (headerSizeType == 0) ? 16 : 20;
+
+ RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, bits4ExSSFsize);
+
+ bits.skipBits(bits4ExSSFsize);
+ }
+
+ /* Asset descriptor */
+ for (int nAst = 0; nAst < numAssets; nAst++) {
+ RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (9 + 3));
+
+ // nuAssetDescriptFsize, nuAssetIndex
+ bits.skipBits(9 + 3);
+
+ if (staticFieldsPresent) {
+ RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, 1);
+
+ // bAssetTypeDescrPresent
+ if (bits.getBits(1)) {
+ RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, 4);
+
+ // nuAssetTypeDescriptor
+ bits.skipBits(4);
+ }
+
+ RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, 1);
+
+ // bLanguageDescrPresent
+ if (bits.getBits(1)) {
+ RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, 24);
+
+ // LanguageDescriptor
+ bits.skipBits(24);
+ }
+
+ RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, 1);
+
+ // bInfoTextPresent
+ if (bits.getBits(1)) {
+ RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, 10);
+
+ uint32_t nuInfoTextByteSize = bits.getBits(10) + 1;
+
+ RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (nuInfoTextByteSize * 8));
+
+ // InfoTextString
+ bits.skipBits(nuInfoTextByteSize * 8);
+ }
+
+ RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (5 + 4 + 8));
+
+ // nuBitResolution
+ bits.skipBits(5);
+
+ samplingRate = samplingRateTableExtSS[bits.getBits(4)];
+ numChannels = bits.getBits(8) + 1;
+ }
+ }
+
+ frameSize = extHeadersize + extSSFsize;
+ isLBR = true;
+ } else {
+ ALOGE("No valid sync word in DTS/DTSHD header");
+ return ERROR_MALFORMED;
+ }
+
+ if (metaData != NULL) {
+ if (isLBR) {
+ (*metaData)->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_DTS_HD);
+ (*metaData)->setInt32(kKeyAudioProfile, 0x2); // CodecProfileLevel.DTS_HDProfileLBR
+ } else {
+ (*metaData)->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_DTS);
+ }
+ (*metaData)->setInt32(kKeyChannelCount, numChannels);
+ (*metaData)->setInt32(kKeySampleRate, samplingRate);
+ }
+ return OK;
+}
+
+static status_t extractVarLenBitFields(
+ ABitReader *bits, size_t *bitsUsed, uint32_t *value,
+ unsigned ucTable[], bool extractAndAddFlag) {
+
+ static const unsigned bitsUsedTbl[8] = {1, 1, 1, 1, 2, 2, 3, 3}; // prefix code lengths
+ static const unsigned indexTbl[8] = {0, 0, 0, 0, 1, 1, 2, 3}; // code to prefix code index map
+
+ /* Clone the bitstream */
+ ABitReader bitStream(bits->data(), bits->numBitsLeft() / 8);
+ ABitReader bitstreamClone(bits->data(), bits->numBitsLeft() / 8);
+
+ RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bitstreamClone, 3);
+
+ unsigned code = bitstreamClone.getBits(3);
+ unsigned totalBitsUsed = bitsUsedTbl[code];
+ unsigned unIndex = indexTbl[code];
+
+ RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bitStream, totalBitsUsed);
+
+ bitStream.skipBits(totalBitsUsed);
+
+ uint32_t unValue = 0;
+ if (ucTable[unIndex] > 0) {
+ if (extractAndAddFlag) {
+ for (unsigned un = 0; un < unIndex; un++) {
+ unValue += (1 << ucTable[un]);
+ }
+
+ RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bitStream, ucTable[unIndex]);
+
+ unValue += bitStream.getBits(ucTable[unIndex]);
+ totalBitsUsed += ucTable[unIndex];
+ } else {
+ RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bitStream, ucTable[unIndex]);
+
+ unValue += bitStream.getBits(ucTable[unIndex]);
+ totalBitsUsed += ucTable[unIndex];
+ }
+ }
+
+ *bitsUsed = (size_t)totalBitsUsed;
+ *value = unValue;
+ return OK;
+}
+
+// Parse DTS UHD Profile-2 stream header
+static status_t parseDTSUHDSyncFrame(
+ const uint8_t *ptr, size_t size, unsigned &frameSize, sp<MetaData> *metaData) {
+
+ static const uint32_t DTSUHD_SYNC_CORE_16BIT_BE = 0x40411BF2;
+ static const uint32_t DTSUHD_NONSYNC_CORE_16BIT_BE = 0x71C442E8;
+
+ unsigned audioSamplRate = 0;
+
+ ABitReader bits(ptr, size);
+
+ RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, 32);
+
+ uint32_t syncWord = bits.getBits(32);
+
+ bool isSyncFrameFlag = false;
+ switch (syncWord) {
+ case DTSUHD_SYNC_CORE_16BIT_BE:
+ isSyncFrameFlag = true;
+ break;
+ case DTSUHD_NONSYNC_CORE_16BIT_BE:
+ isSyncFrameFlag = false;
+ break;
+ default:
+ ALOGE("No valid sync word in DTSUHD header");
+ return ERROR_MALFORMED; // invalid sync word
+ }
+
+ unsigned uctable1[4] = { 5, 8, 10, 12 };
+ uint32_t sizeOfFTOCPayload = 0;
+ size_t nuBitsUsed = 0;
+ status_t status = OK;
+
+ status = extractVarLenBitFields(&bits, &nuBitsUsed, &sizeOfFTOCPayload, uctable1, true);
+
+ if (status != OK) {
+ ALOGE("Failed to extractVarLenBitFields from DTSUHD header");
+ return ERROR_MALFORMED;
+ }
+
+ bits.skipBits(nuBitsUsed);
+
+ if (isSyncFrameFlag) {
+ RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (1 + 2 + 3 + 2 + 1));
+
+ // FullChannelBasedMixFlag, ETSI TS 103 491 V1.2.1, Section 6.4.6.1
+ if (!(bits.getBits(1))) {
+ // This implementation only supports full channel mask-based
+ // audio presentation (i.e. 2.0, 5.1, 11.1 mix without objects)
+ ALOGE("Objects not supported, only DTSUHD full channel mask-based mix");
+ return ERROR_MALFORMED;
+ }
+
+ // BaseDuration, FrameDuration
+ bits.skipBits(2 + 3);
+
+ unsigned clockRateIndex = bits.getBits(2);
+ unsigned clockRateHertz = 0;
+
+ switch (clockRateIndex) {
+ case 0:
+ clockRateHertz = 32000;
+ break;
+ case 1:
+ clockRateHertz = 44100;
+ break;
+ case 2:
+ clockRateHertz = 48000;
+ break;
+ default:
+ ALOGE("Invalid clockRateIndex in DTSUHD header");
+ return ERROR_MALFORMED;
+ }
+
+ if (bits.getBits(1)) {
+ RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, (32 + 4));
+
+ bits.skipBits(32 + 4);
+ }
+
+ RETURN_ERROR_IF_NOT_ENOUGH_BYTES_LEFT(bits, 2);
+
+ unsigned samplRateMultiplier = (1 << bits.getBits(2));
+ audioSamplRate = clockRateHertz * samplRateMultiplier;
+ }
+
+ uint32_t chunkPayloadBytes = 0;
+ int numOfMDChunks = isSyncFrameFlag ? 1 : 0; // Metadata chunks
+ for (int nmdc = 0; nmdc < numOfMDChunks; nmdc++) {
+ unsigned uctable2[4] = {6, 9, 12, 15};
+ uint32_t nuMDChunkSize = 0;
+ nuBitsUsed = 0;
+
+ status = extractVarLenBitFields(&bits, &nuBitsUsed, &nuMDChunkSize, uctable2, true);
+ if (status != OK) {
+ ALOGE("Failed to extractVarLenBitFields from DTSUHD header");
+ return ERROR_MALFORMED;
+ }
+
+ bits.skipBits(nuBitsUsed);
+
+ if (nuMDChunkSize > 32767) {
+ ALOGE("Unsupported number of metadata chunks in DTSUHD header");
+ return ERROR_MALFORMED;
+ }
+ chunkPayloadBytes += nuMDChunkSize;
+ }
+
+ // Ony one audio chunk is supported
+ int numAudioChunks = 1;
+ for (int nac = 0; nac < numAudioChunks; nac++) {
+ uint32_t acID = 256, nuAudioChunkSize = 0;
+
+ // isSyncFrameFlag means that ACID is present
+ if (isSyncFrameFlag) {
+ unsigned uctable3[4] = {2, 4, 6, 8};
+ nuBitsUsed = 0;
+
+ status = extractVarLenBitFields(&bits, &nuBitsUsed, &acID, uctable3, true);
+
+ if (status != OK) {
+ ALOGE("Failed to extractVarLenBitFields from DTSUHD header");
+ return ERROR_MALFORMED;
+ }
+
+ bits.skipBits(nuBitsUsed);
+ }
+
+ nuBitsUsed = 0;
+ if (acID == 0) {
+ nuAudioChunkSize = 0;
+ } else {
+ unsigned uctable4[4] = {9, 11, 13, 16};
+
+ status = extractVarLenBitFields(&bits, &nuBitsUsed, &nuAudioChunkSize, uctable4, true);
+
+ if (status != OK) {
+ ALOGE("Failed to extractVarLenBitFields from DTSUHD header");
+ return ERROR_MALFORMED;
+ }
+ }
+
+ if (nuAudioChunkSize > 65535){
+ ALOGE("Unsupported number of audio chunks in DTSUHD header");
+ return ERROR_MALFORMED;
+ }
+
+ chunkPayloadBytes += nuAudioChunkSize;
+ }
+
+ frameSize = (sizeOfFTOCPayload + 1) + chunkPayloadBytes;
+
+ if (metaData != NULL) {
+ (*metaData)->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_DTS_UHD);
+ (*metaData)->setInt32(kKeyAudioProfile, 0x2); // CodecProfileLevel.DTS_UHDProfileP2
+ (*metaData)->setInt32(kKeyChannelCount, 2); // Setting default channel count as stereo
+ (*metaData)->setInt32(kKeySampleRate, audioSamplRate);
+ }
+
+ return OK;
+}
+
+static status_t isSeeminglyValidDTSHDHeader(const uint8_t *ptr, size_t size,unsigned &frameSize)
+{
+ return parseDTSHDSyncFrame(ptr, size, frameSize, NULL);
+}
+
+static status_t isSeeminglyValidDTSUHDHeader(const uint8_t *ptr, size_t size,unsigned &frameSize)
+{
+ return parseDTSUHDSyncFrame(ptr, size, frameSize, NULL);
+}
+
static status_t IsSeeminglyValidAC4Header(const uint8_t *ptr, size_t size, unsigned &frameSize) {
return parseAC4SyncFrame(ptr, size, frameSize, NULL);
}
@@ -655,6 +1085,70 @@
break;
}
+ case DTS: // Checking for DTS or DTS-HD syncword
+ case DTS_HD:
+ {
+ uint8_t *ptr = (uint8_t *)data;
+ unsigned frameSize = 0;
+ ssize_t startOffset = -1;
+
+ for (size_t i = 0; i < size; ++i) {
+ if (isSeeminglyValidDTSHDHeader(&ptr[i], size - i, frameSize) == OK) {
+ startOffset = i;
+ break;
+ }
+ }
+
+ if (startOffset < 0) {
+ return ERROR_MALFORMED;
+ }
+ if (startOffset > 0) {
+ ALOGI("found something resembling a DTS-HD syncword at "
+ "offset %zd",
+ startOffset);
+ }
+
+ if (frameSize != size - startOffset) {
+ ALOGV("DTS-HD frame size is %u bytes, while the buffer size is %zd bytes.",
+ frameSize, size - startOffset);
+ }
+
+ data = &ptr[startOffset];
+ size -= startOffset;
+ break;
+ }
+
+ case DTS_UHD:
+ {
+ uint8_t *ptr = (uint8_t *)data;
+ ssize_t startOffset = -1;
+ unsigned frameSize = 0;
+
+ for (size_t i = 0; i < size; ++i) {
+ if (isSeeminglyValidDTSUHDHeader(&ptr[i], size - i, frameSize) == OK) {
+ startOffset = i;
+ break;
+ }
+ }
+
+ if (startOffset < 0) {
+ return ERROR_MALFORMED;
+ }
+ if (startOffset >= 0) {
+ ALOGI("found something resembling a DTS UHD syncword"
+ "syncword at offset %zd",
+ startOffset);
+ }
+
+ if (frameSize != size - startOffset) {
+ ALOGV("DTS-UHD frame size is %u bytes, while the buffer size is %zd bytes.",
+ frameSize, size - startOffset);
+ }
+ data = &ptr[startOffset];
+ size -= startOffset;
+ break;
+ }
+
case PCM_AUDIO:
case METADATA:
{
@@ -928,6 +1422,11 @@
return dequeueAccessUnitPCMAudio();
case METADATA:
return dequeueAccessUnitMetadata();
+ case DTS: // Using same dequeue function for both DTS and DTS-HD types.
+ case DTS_HD:
+ return dequeueAccessUnitDTSOrDTSHD();
+ case DTS_UHD:
+ return dequeueAccessUnitDTSUHD();
default:
if (mMode != MPEG_AUDIO) {
ALOGE("Unknown mode");
@@ -937,6 +1436,113 @@
}
}
+sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitDTSOrDTSHD() {
+ unsigned syncStartPos = 0; // in bytes
+ unsigned payloadSize = 0;
+ sp<MetaData> format = new MetaData;
+
+ ALOGV("dequeueAccessUnitDTSOrDTSHD[%d]: mBuffer %p(%zu)", mAUIndex,
+ mBuffer->data(), mBuffer->size());
+
+ while (true) {
+ if (syncStartPos + 4 >= mBuffer->size()) {
+ return NULL;
+ }
+ uint8_t *ptr = mBuffer->data() + syncStartPos;
+ size_t size = mBuffer->size() - syncStartPos;
+ status_t status = parseDTSHDSyncFrame(ptr, size, payloadSize, &format);
+ if (status == 0) {
+ break;
+ }
+ ++syncStartPos;
+ }
+
+ if (mBuffer->size() < syncStartPos + payloadSize) {
+ ALOGV("Not enough buffer size for DTS/DTS-HD");
+ return NULL;
+ }
+
+ if (mFormat == NULL) {
+ mFormat = format;
+ }
+
+ int64_t timeUs = fetchTimestamp(syncStartPos + payloadSize);
+ if (timeUs < 0LL) {
+ ALOGE("negative timeUs");
+ return NULL;
+ }
+ mAUIndex++;
+
+ sp<ABuffer> accessUnit = new ABuffer(syncStartPos + payloadSize);
+ memcpy(accessUnit->data(), mBuffer->data(), syncStartPos + payloadSize);
+
+ accessUnit->meta()->setInt64("timeUs", timeUs);
+ accessUnit->meta()->setInt32("isSync", 1);
+
+ memmove(
+ mBuffer->data(),
+ mBuffer->data() + syncStartPos + payloadSize,
+ mBuffer->size() - syncStartPos - payloadSize);
+
+ mBuffer->setRange(0, mBuffer->size() - syncStartPos - payloadSize);
+
+ return accessUnit;
+}
+
+sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitDTSUHD()
+{
+ unsigned syncStartPos = 0; // in bytes
+ unsigned payloadSize = 0;
+ sp<MetaData> format = new MetaData;
+
+ ALOGV("dequeueAccessUnitDTSUHD[%d]: mBuffer %p(%zu)", mAUIndex,
+ mBuffer->data(), mBuffer->size());
+
+ while (true) {
+ if (syncStartPos + 4 >= mBuffer->size()) {
+ return NULL;
+ }
+ uint8_t *ptr = mBuffer->data() + syncStartPos;
+ size_t size = mBuffer->size() - syncStartPos;
+ status_t status = parseDTSUHDSyncFrame(ptr, size, payloadSize, &format);
+ if (status == 0) {
+ break;
+ }
+ ++syncStartPos;
+ }
+
+ if (mBuffer->size() < syncStartPos + payloadSize) {
+ ALOGV("Not enough buffer size for DTS-UHD");
+ return NULL;
+ }
+
+ if (mFormat == NULL) {
+ mFormat = format;
+ }
+
+ int64_t timeUs = fetchTimestamp(syncStartPos + payloadSize);
+ if (timeUs < 0LL) {
+ ALOGE("negative timeUs");
+ return NULL;
+ }
+ mAUIndex++;
+
+ sp<ABuffer> accessUnit = new ABuffer(syncStartPos + payloadSize);
+ memcpy(accessUnit->data(), mBuffer->data(), syncStartPos + payloadSize);
+
+ accessUnit->meta()->setInt64("timeUs", timeUs);
+ accessUnit->meta()->setInt32("isSync", 1);
+
+ memmove(
+ mBuffer->data(),
+ mBuffer->data() + syncStartPos + payloadSize,
+ mBuffer->size() - syncStartPos - payloadSize);
+
+ mBuffer->setRange(0, mBuffer->size() - syncStartPos - payloadSize);
+
+ return accessUnit;
+}
+
sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitEAC3() {
unsigned syncStartPos = 0; // in bytes
unsigned payloadSize = 0;
diff --git a/media/module/mpeg2ts/include/mpeg2ts/ATSParser.h b/media/module/mpeg2ts/include/mpeg2ts/ATSParser.h
index 49578d3..b658c5a 100644
--- a/media/module/mpeg2ts/include/mpeg2ts/ATSParser.h
+++ b/media/module/mpeg2ts/include/mpeg2ts/ATSParser.h
@@ -157,6 +157,9 @@
STREAMTYPE_LPCM_AC3 = 0x83,
STREAMTYPE_EAC3 = 0x87,
+ // DTS audio stream type which contains only Core substream
+ STREAMTYPE_DTS = 0x8A,
+
//Sample Encrypted types
STREAMTYPE_H264_ENCRYPTED = 0xDB,
STREAMTYPE_AAC_ENCRYPTED = 0xCF,
@@ -168,6 +171,7 @@
DESCRIPTOR_CA = 0x09,
// DVB BlueBook A038 Table 12
+ DESCRIPTOR_DTS = 0x7B,
DESCRIPTOR_DVB_EXTENSION = 0x7F,
};
@@ -175,6 +179,8 @@
enum {
EXT_DESCRIPTOR_DVB_AC4 = 0x15,
EXT_DESCRIPTOR_DVB_AUDIO_PRESELECTION = 0x19,
+ EXT_DESCRIPTOR_DVB_DTS_HD = 0x0E,
+ EXT_DESCRIPTOR_DVB_DTS_UHD = 0x21,
EXT_DESCRIPTOR_DVB_RESERVED_MAX = 0x7F,
};
diff --git a/media/module/mpeg2ts/include/mpeg2ts/ESQueue.h b/media/module/mpeg2ts/include/mpeg2ts/ESQueue.h
index a06bd6a..550a0e4 100644
--- a/media/module/mpeg2ts/include/mpeg2ts/ESQueue.h
+++ b/media/module/mpeg2ts/include/mpeg2ts/ESQueue.h
@@ -45,6 +45,9 @@
MPEG4_VIDEO,
PCM_AUDIO,
METADATA,
+ DTS,
+ DTS_HD,
+ DTS_UHD,
};
enum Flags {
@@ -125,6 +128,8 @@
sp<ABuffer> dequeueAccessUnitMPEG4Video();
sp<ABuffer> dequeueAccessUnitPCMAudio();
sp<ABuffer> dequeueAccessUnitMetadata();
+ sp<ABuffer> dequeueAccessUnitDTSOrDTSHD();
+ sp<ABuffer> dequeueAccessUnitDTSUHD();
// consume a logical (compressed) access unit of size "size",
// returns its timestamp in us (or -1 if no time information).
diff --git a/media/mtp/tests/MtpFuzzer/mtp_handle_fuzzer.cpp b/media/mtp/tests/MtpFuzzer/mtp_handle_fuzzer.cpp
index 676345a..7dcdc3f 100644
--- a/media/mtp/tests/MtpFuzzer/mtp_handle_fuzzer.cpp
+++ b/media/mtp/tests/MtpFuzzer/mtp_handle_fuzzer.cpp
@@ -128,10 +128,10 @@
std::unique_ptr<IMtpHandle> handle;
if (mFdp.ConsumeBool()) {
std::unique_ptr<IMtpHandle> mtpCompactHandle(new MtpFfsCompatHandle(controlFd));
- handle = move(mtpCompactHandle);
+ handle = std::move(mtpCompactHandle);
} else {
std::unique_ptr<IMtpHandle> mtpHandle(new MtpFfsHandle(controlFd));
- handle = move(mtpHandle);
+ handle = std::move(mtpHandle);
}
int32_t mtpHandle = mFdp.ConsumeIntegralInRange<size_t>(kMinAPICase, kMaxMtpHandleAPI);
diff --git a/media/ndk/NdkMediaFormat.cpp b/media/ndk/NdkMediaFormat.cpp
index c0de4e4..66b5dec 100644
--- a/media/ndk/NdkMediaFormat.cpp
+++ b/media/ndk/NdkMediaFormat.cpp
@@ -368,6 +368,7 @@
EXPORT const char* AMEDIAFORMAT_KEY_ALBUM = "album";
EXPORT const char* AMEDIAFORMAT_KEY_ALBUMART = "albumart";
EXPORT const char* AMEDIAFORMAT_KEY_ALBUMARTIST = "albumartist";
+EXPORT const char* AMEDIAFORMAT_KEY_ALLOW_FRAME_DROP = "allow-frame-drop";
EXPORT const char* AMEDIAFORMAT_KEY_ARTIST = "artist";
EXPORT const char* AMEDIAFORMAT_KEY_AUDIO_PRESENTATION_INFO = "audio-presentation-info";
EXPORT const char* AMEDIAFORMAT_KEY_AUDIO_PRESENTATION_PRESENTATION_ID =
@@ -444,6 +445,7 @@
EXPORT const char* AMEDIAFORMAT_KEY_LYRICIST = "lyricist";
EXPORT const char* AMEDIAFORMAT_KEY_MANUFACTURER = "manufacturer";
EXPORT const char* AMEDIAFORMAT_KEY_MAX_BIT_RATE = "max-bitrate";
+EXPORT const char* AMEDIAFORMAT_KEY_MAX_B_FRAMES = "max-bframes";
EXPORT const char* AMEDIAFORMAT_KEY_MAX_FPS_TO_ENCODER = "max-fps-to-encoder";
EXPORT const char* AMEDIAFORMAT_KEY_MAX_HEIGHT = "max-height";
EXPORT const char* AMEDIAFORMAT_KEY_MAX_INPUT_SIZE = "max-input-size";
diff --git a/media/ndk/include/media/NdkMediaCodec.h b/media/ndk/include/media/NdkMediaCodec.h
index 4938f76..598beb7 100644
--- a/media/ndk/include/media/NdkMediaCodec.h
+++ b/media/ndk/include/media/NdkMediaCodec.h
@@ -63,11 +63,41 @@
typedef struct AMediaCodecBufferInfo AMediaCodecBufferInfo;
typedef struct AMediaCodecCryptoInfo AMediaCodecCryptoInfo;
+
+/**
+ * Definitions of per-buffer flags for operation with NdkMediaCodec.
+ *
+ * The semantics of these enums match those of the same name
+ * in {@link android.media.MediaCodec}.
+ */
enum {
+ /**
+ * This indicates that the (encoded) buffer marked as such contains
+ * the data for a key frame.
+ *
+ * Semantics are the same as {@link android.media.MediaCodec#BUFFER_FLAG_KEY_FRAME}
+ */
+ AMEDIACODEC_BUFFER_FLAG_KEY_FRAME = 1, // introduced in API 34
AMEDIACODEC_BUFFER_FLAG_CODEC_CONFIG = 2,
AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM = 4,
AMEDIACODEC_BUFFER_FLAG_PARTIAL_FRAME = 8,
+ /**
+ * This indicates that the buffer contains non-media data for the
+ * muxer to process.
+ *
+ * Semantics are the same as {@link android.media.MediaCodec#BUFFER_FLAG_MUXER_DATA}
+ */
+ AMEDIACODEC_BUFFER_FLAG_MUXER_DATA = 16, // introduced in API 34
+ /**
+ * This indicates that the buffer is decoded and updates the internal state of the decoder,
+ * but does not produce any output buffer.
+ *
+ * Semantics are the same as {@link android.media.MediaCodec#BUFFER_FLAG_DECODE_ONLY}
+ */
+ AMEDIACODEC_BUFFER_FLAG_DECODE_ONLY = 32, // introduced in API 34
+};
+enum {
AMEDIACODEC_CONFIGURE_FLAG_ENCODE = 1,
AMEDIACODEC_INFO_OUTPUT_BUFFERS_CHANGED = -3,
AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED = -2,
diff --git a/media/ndk/include/media/NdkMediaFormat.h b/media/ndk/include/media/NdkMediaFormat.h
index 2195657..b2cdf8d 100644
--- a/media/ndk/include/media/NdkMediaFormat.h
+++ b/media/ndk/include/media/NdkMediaFormat.h
@@ -135,6 +135,14 @@
extern const char* AMEDIAFORMAT_KEY_AAC_MAX_OUTPUT_CHANNEL_COUNT __INTRODUCED_IN(28);
extern const char* AMEDIAFORMAT_KEY_AAC_PROFILE __INTRODUCED_IN(21);
extern const char* AMEDIAFORMAT_KEY_AAC_SBR_MODE __INTRODUCED_IN(28);
+/**
+ * A key for applications to opt out of allowing
+ * a Surface to discard undisplayed/unconsumed frames
+ * as means to catch up after falling behind.
+ *
+ * Semantics match those of {@link android.media.MediaFormat#KEY_ALLOW_FRAME_DROP}
+ */
+extern const char* AMEDIAFORMAT_KEY_ALLOW_FRAME_DROP __INTRODUCED_IN(34);
extern const char* AMEDIAFORMAT_KEY_AUDIO_SESSION_ID __INTRODUCED_IN(28);
extern const char* AMEDIAFORMAT_KEY_BITRATE_MODE __INTRODUCED_IN(28);
extern const char* AMEDIAFORMAT_KEY_BIT_RATE __INTRODUCED_IN(21);
@@ -169,6 +177,13 @@
extern const char* AMEDIAFORMAT_KEY_LANGUAGE __INTRODUCED_IN(21);
extern const char* AMEDIAFORMAT_KEY_LATENCY __INTRODUCED_IN(28);
extern const char* AMEDIAFORMAT_KEY_LEVEL __INTRODUCED_IN(28);
+/**
+ * A key describing the maximum number of B frames between I or P frames,
+ * to be used by a video encoder.
+ *
+ * Semantics match those of {@link android.media.MediaFormat#KEY_MAX_B_FRAMES}
+ */
+extern const char* AMEDIAFORMAT_KEY_MAX_B_FRAMES __INTRODUCED_IN(34);
extern const char* AMEDIAFORMAT_KEY_MAX_HEIGHT __INTRODUCED_IN(21);
extern const char* AMEDIAFORMAT_KEY_MAX_INPUT_SIZE __INTRODUCED_IN(21);
extern const char* AMEDIAFORMAT_KEY_MAX_WIDTH __INTRODUCED_IN(21);
diff --git a/media/ndk/include/media/NdkMediaMuxer.h b/media/ndk/include/media/NdkMediaMuxer.h
index d7eccb8..1674ffa 100644
--- a/media/ndk/include/media/NdkMediaMuxer.h
+++ b/media/ndk/include/media/NdkMediaMuxer.h
@@ -48,10 +48,22 @@
struct AMediaMuxer;
typedef struct AMediaMuxer AMediaMuxer;
+/**
+ * Defines the output format. These constants are used with constructor.
+ *
+ * These enums match the ones used in {@link android.media.MediaMuxer.OutputFormat}
+ */
typedef enum {
+ /** MPEG4 media file format*/
AMEDIAMUXER_OUTPUT_FORMAT_MPEG_4 = 0,
- AMEDIAMUXER_OUTPUT_FORMAT_WEBM = 1,
- AMEDIAMUXER_OUTPUT_FORMAT_THREE_GPP = 2,
+ /** WEBM media file format*/
+ AMEDIAMUXER_OUTPUT_FORMAT_WEBM = 1,
+ /** 3GPP media file format*/
+ AMEDIAMUXER_OUTPUT_FORMAT_THREE_GPP = 2,
+ /** HEIF media file format*/
+ AMEDIAMUXER_OUTPUT_FORMAT_HEIF = 3, // introduced in API 34
+ /** Ogg media file format*/
+ AMEDIAMUXER_OUTPUT_FORMAT_OGG = 4, // introduced in API 34
} OutputFormat;
typedef enum {
diff --git a/media/ndk/libmediandk.map.txt b/media/ndk/libmediandk.map.txt
index 2b5bacf..4dd81ab 100644
--- a/media/ndk/libmediandk.map.txt
+++ b/media/ndk/libmediandk.map.txt
@@ -47,6 +47,7 @@
AMEDIAFORMAT_KEY_ALBUM; # var introduced=29
AMEDIAFORMAT_KEY_ALBUMART; # var introduced=29
AMEDIAFORMAT_KEY_ALBUMARTIST; # var introduced=29
+ AMEDIAFORMAT_KEY_ALLOW_FRAME_DROP; # var introduced=34
AMEDIAFORMAT_KEY_ARTIST; # var introduced=29
AMEDIAFORMAT_KEY_AUDIO_PRESENTATION_INFO; # var introduced=29
AMEDIAFORMAT_KEY_AUDIO_SESSION_ID; # var introduced=28
@@ -119,6 +120,7 @@
AMEDIAFORMAT_KEY_LOW_LATENCY; # var introduced=30
AMEDIAFORMAT_KEY_LYRICIST; # var introduced=29
AMEDIAFORMAT_KEY_MANUFACTURER; # var introduced=29
+ AMEDIAFORMAT_KEY_MAX_B_FRAMES; # var introduced=34
AMEDIAFORMAT_KEY_MAX_BIT_RATE; # var introduced=29
AMEDIAFORMAT_KEY_MAX_FPS_TO_ENCODER; # var introduced=29
AMEDIAFORMAT_KEY_MAX_HEIGHT; # var introduced=21
diff --git a/media/utils/ProcessInfo.cpp b/media/utils/ProcessInfo.cpp
index 13f16b1..6296351 100644
--- a/media/utils/ProcessInfo.cpp
+++ b/media/utils/ProcessInfo.cpp
@@ -30,10 +30,64 @@
static constexpr int32_t INVALID_ADJ = -10000;
static constexpr int32_t NATIVE_ADJ = -1000;
+/* Make sure this matches with ActivityManager::PROCESS_STATE_NONEXISTENT
+ * #include <binder/ActivityManager.h>
+ * using ActivityManager::PROCESS_STATE_NONEXISTENT;
+ */
+static constexpr int32_t PROCESS_STATE_NONEXISTENT = 20;
+
ProcessInfo::ProcessInfo() {}
+/*
+ * Checks whether the list of processes with given pids exist or not.
+ *
+ * Arguments:
+ * - pids (input): List of pids for which to check whether they are Existent or not.
+ * - existent (output): boolean vector corresponds to Existent state of each pids.
+ *
+ * On successful return:
+ * - existent[i] true corresponds to pids[i] still active and
+ * - existent[i] false corresponds to pids[i] already terminated (Nonexistent)
+ * On unsuccessful return, the output argument existent is invalid.
+ */
+bool ProcessInfo::checkProcessExistent(const std::vector<int32_t>& pids,
+ std::vector<bool>* existent) {
+ sp<IBinder> binder = defaultServiceManager()->waitForService(String16("processinfo"));
+ sp<IProcessInfoService> service = interface_cast<IProcessInfoService>(binder);
+
+ // Get the process state of the applications managed/tracked by the ActivityManagerService.
+ // Don't have to look into the native processes.
+ // If we really need the state of native process, then we can use ==> mOverrideMap
+ size_t count = pids.size();
+ std::vector<int32_t> states(count, PROCESS_STATE_NONEXISTENT);
+ status_t err = service->getProcessStatesFromPids(count,
+ const_cast<int32_t*>(pids.data()),
+ states.data());
+ if (err != OK) {
+ ALOGE("%s: IProcessInfoService::getProcessStatesFromPids failed with %d",
+ __func__, err);
+ return false;
+ }
+
+ existent->clear();
+ for (size_t index = 0; index < states.size(); index++) {
+ // If this process is not tracked by ActivityManagerService, look for overrides.
+ if (states[index] == PROCESS_STATE_NONEXISTENT) {
+ std::scoped_lock lock{mOverrideLock};
+ std::map<int, ProcessInfoOverride>::iterator it =
+ mOverrideMap.find(pids[index]);
+ if (it != mOverrideMap.end()) {
+ states[index] = it->second.procState;
+ }
+ }
+ existent->push_back(states[index] != PROCESS_STATE_NONEXISTENT);
+ }
+
+ return true;
+}
+
bool ProcessInfo::getPriority(int pid, int* priority) {
- sp<IBinder> binder = defaultServiceManager()->getService(String16("processinfo"));
+ sp<IBinder> binder = defaultServiceManager()->waitForService(String16("processinfo"));
sp<IProcessInfoService> service = interface_cast<IProcessInfoService>(binder);
size_t length = 1;
diff --git a/media/utils/include/mediautils/MethodStatistics.h b/media/utils/include/mediautils/MethodStatistics.h
index 6d7e990..c8b36d8 100644
--- a/media/utils/include/mediautils/MethodStatistics.h
+++ b/media/utils/include/mediautils/MethodStatistics.h
@@ -59,7 +59,7 @@
void event(C&& code, FloatType executeMs) {
std::lock_guard lg(mLock);
auto it = mStatisticsMap.lower_bound(code);
- if (it != mStatisticsMap.end() && it->first == code) {
+ if (it != mStatisticsMap.end() && it->first == static_cast<Code>(code)) {
it->second.add(executeMs);
} else {
// StatsType ctor takes an optional array of data for initialization.
diff --git a/media/utils/include/mediautils/ProcessInfo.h b/media/utils/include/mediautils/ProcessInfo.h
index 9afa3df..c27c939 100644
--- a/media/utils/include/mediautils/ProcessInfo.h
+++ b/media/utils/include/mediautils/ProcessInfo.h
@@ -33,6 +33,8 @@
virtual bool isPidUidTrusted(int pid, int uid);
virtual bool overrideProcessInfo(int pid, int procState, int oomScore);
virtual void removeProcessInfoOverride(int pid);
+ bool checkProcessExistent(const std::vector<int32_t>& pids,
+ std::vector<bool>* existent) override;
protected:
virtual ~ProcessInfo();
diff --git a/media/utils/include/mediautils/ProcessInfoInterface.h b/media/utils/include/mediautils/ProcessInfoInterface.h
index b6529fc..e3384ba 100644
--- a/media/utils/include/mediautils/ProcessInfoInterface.h
+++ b/media/utils/include/mediautils/ProcessInfoInterface.h
@@ -17,16 +17,73 @@
#ifndef PROCESS_INFO_INTERFACE_H_
#define PROCESS_INFO_INTERFACE_H_
+#include <vector>
#include <utils/RefBase.h>
namespace android {
struct ProcessInfoInterface : public RefBase {
+ /*
+ * Gets the priority of the process (with given pid) as oom score.
+ *
+ * @param[in] pid pid of the process.
+ * @param[out] priority of the process.
+ *
+ * @return true for successful return and false otherwise.
+ */
virtual bool getPriority(int pid, int* priority) = 0;
+ /*
+ * Check whether the given pid is trusted or not.
+ *
+ * @param[in] pid pid of the process.
+ *
+ * @return true for trusted process and false otherwise.
+ */
virtual bool isPidTrusted(int pid) = 0;
+ /*
+ * Check whether the given pid and uid is trusted or not.
+ *
+ * @param[in] pid pid of the process.
+ * @param[in] uid uid of the process.
+ *
+ * @return true for trusted process and false otherwise.
+ */
virtual bool isPidUidTrusted(int pid, int uid) = 0;
+ /*
+ * Override process state and oom score of the pid.
+ *
+ * @param[in] pid pid of the process.
+ * @param[in] procState new state of the process to override with.
+ * @param[in] oomScore new oom score of the process to override with.
+ *
+ * @return true upon success and false otherwise.
+ */
virtual bool overrideProcessInfo(int pid, int procState, int oomScore) = 0;
+ /*
+ * Remove the override info of the given process.
+ *
+ * @param[in] pid pid of the process.
+ */
virtual void removeProcessInfoOverride(int pid) = 0;
+ /*
+ * Checks whether the list of processes with given pids exist or not.
+ *
+ * @param[in] pids List of pids for which to check whether they are Existent or not.
+ * @param[out] existent boolean vector corresponds to Existent state of each pids.
+ *
+ * @return true for successful return and false otherwise.
+ * On successful return:
+ * - existent[i] true corresponds to pids[i] still active and
+ * - existent[i] false corresponds to pids[i] already terminated (Nonexistent)
+ * On unsuccessful return, the output argument existent is invalid.
+ */
+ virtual bool checkProcessExistent(const std::vector<int32_t>& pids,
+ std::vector<bool>* existent) {
+ // A default implementation.
+ (void)pids;
+ (void)existent;
+ return false;
+ }
protected:
virtual ~ProcessInfoInterface() {}
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index 43ef311..663df69 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -19,12 +19,128 @@
],
}
+tidy_errors = [
+ // https://clang.llvm.org/extra/clang-tidy/checks/list.html
+ // For many categories, the checks are too many to specify individually.
+ // Feel free to disable as needed - as warnings are generally ignored,
+ // we treat warnings as errors.
+ "android-*",
+ "bugprone-*",
+ "cert-*",
+ "clang-analyzer-security*",
+ "google-*",
+ "misc-*",
+ //"modernize-*", // explicitly list the modernize as they can be subjective.
+ "modernize-avoid-bind",
+ //"modernize-avoid-c-arrays", // std::array<> can be verbose
+ "modernize-concat-nested-namespaces",
+ //"modernize-deprecated-headers", // C headers still ok even if there is C++ equivalent.
+ "modernize-deprecated-ios-base-aliases",
+ "modernize-loop-convert",
+ "modernize-make-shared",
+ "modernize-make-unique",
+ // "modernize-pass-by-value",
+ "modernize-raw-string-literal",
+ "modernize-redundant-void-arg",
+ "modernize-replace-auto-ptr",
+ "modernize-replace-random-shuffle",
+ "modernize-return-braced-init-list",
+ "modernize-shrink-to-fit",
+ "modernize-unary-static-assert",
+ // "modernize-use-auto", // found in MediaMetricsService.h, debatable - auto can obscure type
+ "modernize-use-bool-literals",
+ "modernize-use-default-member-init",
+ "modernize-use-emplace",
+ "modernize-use-equals-default",
+ "modernize-use-equals-delete",
+ // "modernize-use-nodiscard",
+ "modernize-use-noexcept",
+ "modernize-use-nullptr",
+ "modernize-use-override",
+ //"modernize-use-trailing-return-type", // not necessarily more readable
+ "modernize-use-transparent-functors",
+ "modernize-use-uncaught-exceptions",
+ "modernize-use-using",
+ "performance-*",
+
+ // Remove some pedantic stylistic requirements.
+ "-google-readability-casting", // C++ casts not always necessary and may be verbose
+ "-google-readability-todo", // do not require TODO(info)
+
+ "-bugprone-unhandled-self-assignment",
+ "-bugprone-suspicious-string-compare",
+ "-cert-oop54-cpp", // found in TransactionLog.h
+ "-bugprone-narrowing-conversions", // b/182410845
+
+ // TODO(b/275642749) Reenable these warnings
+ "-bugprone-assignment-in-if-condition",
+ "-bugprone-forward-declaration-namespace",
+ "-bugprone-parent-virtual-call",
+ "-cert-dcl59-cpp",
+ "-cert-err34-c",
+ "-google-build-namespaces",
+ "-google-build-using-namespace",
+ "-google-default-arguments",
+ "-google-runtime-int",
+ "-misc-const-correctness",
+ "-misc-non-private-member-variables-in-classes",
+ "-modernize-concat-nested-namespaces",
+ "-modernize-loop-convert",
+ "-modernize-use-default-member-init",
+ "-modernize-use-equals-default",
+ "-modernize-use-nullptr",
+ "-modernize-use-override",
+ "-modernize-use-using",
+ "-performance-no-int-to-ptr",
+]
+
+// Eventually use common tidy defaults
+cc_defaults {
+ name: "audioflinger_flags_defaults",
+ // https://clang.llvm.org/docs/UsersManual.html#command-line-options
+ // https://clang.llvm.org/docs/DiagnosticsReference.html
+ cflags: [
+ "-Wall",
+ "-Wdeprecated",
+ "-Werror",
+ "-Werror=implicit-fallthrough",
+ "-Werror=sometimes-uninitialized",
+ "-Werror=conditional-uninitialized",
+ "-Wextra",
+
+ // suppress some warning chatter.
+ "-Wno-deprecated-copy-with-dtor",
+ "-Wno-deprecated-copy-with-user-provided-dtor",
+
+ "-Wredundant-decls",
+ "-Wshadow",
+ "-Wstrict-aliasing",
+ "-fstrict-aliasing",
+ "-Wthread-safety",
+ //"-Wthread-safety-negative", // experimental - looks broken in R.
+ "-Wunreachable-code",
+ "-Wunreachable-code-break",
+ "-Wunreachable-code-return",
+ "-Wunused",
+ "-Wused-but-marked-unused",
+ "-D_LIBCPP_ENABLE_THREAD_SAFETY_ANNOTATIONS",
+ ],
+ // https://clang.llvm.org/extra/clang-tidy/
+ tidy: true,
+ tidy_checks: tidy_errors,
+ tidy_checks_as_errors: tidy_errors,
+ tidy_flags: [
+ "-format-style=file",
+ ],
+}
+
cc_library_shared {
name: "libaudioflinger",
defaults: [
"latest_android_media_audio_common_types_cpp_shared",
"latest_android_hardware_audio_core_sounddose_ndk_shared",
+ "audioflinger_flags_defaults",
],
srcs: [
@@ -67,6 +183,7 @@
"av-types-aidl-cpp",
"effect-aidl-cpp",
"libaudioclient_aidl_conversion",
+ "libactivitymanager_aidl",
"libaudioflinger_timing",
"libaudiofoundation",
"libaudiohal",
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 3b73333..3c0f8f3 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -229,6 +229,7 @@
BINDER_METHOD_ENTRY(getAAudioMixerBurstCount) \
BINDER_METHOD_ENTRY(getAAudioHardwareBurstMinUsec) \
BINDER_METHOD_ENTRY(setDeviceConnectedState) \
+BINDER_METHOD_ENTRY(setSimulateDeviceConnections) \
BINDER_METHOD_ENTRY(setRequestedLatencyMode) \
BINDER_METHOD_ENTRY(getSupportedLatencyModes) \
BINDER_METHOD_ENTRY(setBluetoothVariableLatencyEnabled) \
@@ -502,6 +503,25 @@
return final_result;
}
+status_t AudioFlinger::setSimulateDeviceConnections(bool enabled) {
+ bool at_least_one_succeeded = false;
+ status_t last_error = INVALID_OPERATION;
+ Mutex::Autolock _l(mLock);
+ AutoMutex lock(mHardwareLock);
+ mHardwareStatus = AUDIO_HW_SET_SIMULATE_CONNECTIONS;
+ for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
+ sp<DeviceHalInterface> dev = mAudioHwDevs.valueAt(i)->hwDevice();
+ status_t result = dev->setSimulateDeviceConnections(enabled);
+ if (result == OK) {
+ at_least_one_succeeded = true;
+ } else {
+ last_error = result;
+ }
+ }
+ mHardwareStatus = AUDIO_HW_IDLE;
+ return at_least_one_succeeded ? OK : last_error;
+}
+
// getDefaultVibratorInfo_l must be called with AudioFlinger lock held.
std::optional<media::AudioVibratorInfo> AudioFlinger::getDefaultVibratorInfo_l() {
if (mAudioVibratorInfos.empty()) {
@@ -706,7 +726,7 @@
}
status_t AudioFlinger::addEffectToHal(audio_port_handle_t deviceId,
- audio_module_handle_t hwModuleId, sp<EffectHalInterface> effect) {
+ audio_module_handle_t hwModuleId, const sp<EffectHalInterface>& effect) {
AutoMutex lock(mHardwareLock);
AudioHwDevice *audioHwDevice = mAudioHwDevs.valueFor(hwModuleId);
if (audioHwDevice == nullptr) {
@@ -716,7 +736,7 @@
}
status_t AudioFlinger::removeEffectFromHal(audio_port_handle_t deviceId,
- audio_module_handle_t hwModuleId, sp<EffectHalInterface> effect) {
+ audio_module_handle_t hwModuleId, const sp<EffectHalInterface>& effect) {
AutoMutex lock(mHardwareLock);
AudioHwDevice *audioHwDevice = mAudioHwDevs.valueFor(hwModuleId);
if (audioHwDevice == nullptr) {
@@ -838,6 +858,7 @@
}
status_t AudioFlinger::dump(int fd, const Vector<String16>& args)
+NO_THREAD_SAFETY_ANALYSIS // conditional try lock
{
if (!dumpAllowed()) {
dumpPermissionDenial(fd, args);
@@ -943,7 +964,6 @@
// to lookup the service if it's not running, as it will block for a second
if (sMediaLogServiceAsBinder != 0) {
dprintf(fd, "\nmedia.log:\n");
- Vector<String16> args;
sMediaLogServiceAsBinder->dump(fd, args);
}
@@ -1445,8 +1465,9 @@
if (NO_ERROR == ret) {
Mutex::Autolock _l(mLock);
mMode = mode;
- for (size_t i = 0; i < mPlaybackThreads.size(); i++)
+ for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
mPlaybackThreads.valueAt(i)->setMode(mode);
+ }
}
mediametrics::LogItem(mMetricsId)
@@ -1794,7 +1815,7 @@
// forwardAudioHwSyncToDownstreamPatches_l() must be called with AudioFlinger::mLock held
void AudioFlinger::forwardParametersToDownstreamPatches_l(
audio_io_handle_t upStream, const String8& keyValuePairs,
- std::function<bool(const sp<PlaybackThread>&)> useThread)
+ const std::function<bool(const sp<PlaybackThread>&)>& useThread)
{
std::vector<PatchPanel::SoftwarePatch> swPatches;
if (mPatchPanel.getDownstreamSoftwarePatches(upStream, &swPatches) != OK) return;
@@ -1810,7 +1831,7 @@
// Update downstream patches for all playback threads attached to an MSD module
void AudioFlinger::updateDownStreamPatches_l(const struct audio_patch *patch,
- const std::set<audio_io_handle_t> streams)
+ const std::set<audio_io_handle_t>& streams)
{
for (const audio_io_handle_t stream : streams) {
PlaybackThread *playbackThread = checkPlaybackThread_l(stream);
@@ -2016,24 +2037,29 @@
mHardwareStatus = AUDIO_HW_GET_INPUT_BUFFER_SIZE;
sp<DeviceHalInterface> dev = mPrimaryHardwareDev->hwDevice();
+
std::vector<audio_channel_mask_t> channelMasks = {channelMask};
- if (channelMask != AUDIO_CHANNEL_IN_MONO)
+ if (channelMask != AUDIO_CHANNEL_IN_MONO) {
channelMasks.push_back(AUDIO_CHANNEL_IN_MONO);
- if (channelMask != AUDIO_CHANNEL_IN_STEREO)
+ }
+ if (channelMask != AUDIO_CHANNEL_IN_STEREO) {
channelMasks.push_back(AUDIO_CHANNEL_IN_STEREO);
+ }
std::vector<audio_format_t> formats = {format};
- if (format != AUDIO_FORMAT_PCM_16_BIT)
+ if (format != AUDIO_FORMAT_PCM_16_BIT) {
formats.push_back(AUDIO_FORMAT_PCM_16_BIT);
+ }
std::vector<uint32_t> sampleRates = {sampleRate};
static const uint32_t SR_44100 = 44100;
static const uint32_t SR_48000 = 48000;
-
- if (sampleRate != SR_48000)
+ if (sampleRate != SR_48000) {
sampleRates.push_back(SR_48000);
- if (sampleRate != SR_44100)
+ }
+ if (sampleRate != SR_44100) {
sampleRates.push_back(SR_44100);
+ }
mHardwareStatus = AUDIO_HW_IDLE;
@@ -2504,7 +2530,7 @@
// session and move it to this thread.
sp<EffectChain> chain = getOrphanEffectChain_l(sessionId);
if (chain != 0) {
- Mutex::Autolock _l(thread->mLock);
+ Mutex::Autolock _l2(thread->mLock);
thread->addEffectChain_l(chain);
}
break;
@@ -2906,7 +2932,7 @@
sp<AudioFlinger::ThreadBase> AudioFlinger::openOutput_l(audio_module_handle_t module,
audio_io_handle_t *output,
audio_config_t *halConfig,
- audio_config_base_t *mixerConfig __unused,
+ audio_config_base_t *mixerConfig,
audio_devices_t deviceType,
const String8& address,
audio_output_flags_t flags)
@@ -3032,7 +3058,6 @@
aidl2legacy_int32_t_audio_output_flags_t_mask(request.flags));
audio_io_handle_t output;
- uint32_t latencyMs;
ALOGI("openOutput() this %p, module %d Device %s, SamplingRate %d, Format %#08x, "
"Channels %#x, flags %#x",
@@ -3055,6 +3080,7 @@
sp<ThreadBase> thread = openOutput_l(module, &output, &halConfig,
&mixerConfig, deviceType, address, flags);
if (thread != 0) {
+ uint32_t latencyMs = 0;
if ((flags & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) == 0) {
PlaybackThread *playbackThread = (PlaybackThread *)thread.get();
latencyMs = playbackThread->latency();
@@ -3403,7 +3429,7 @@
continue;
}
if (t->hasAudioSession(chain->sessionId()) != 0) {
- Mutex::Autolock _l(t->mLock);
+ Mutex::Autolock _l2(t->mLock);
ALOGV("closeInput() found thread %d for effect session %d",
t->id(), chain->sessionId());
t->addEffectChain_l(chain);
@@ -3614,7 +3640,8 @@
}
for (size_t i = 0; i < chains.size(); i++) {
- sp<EffectChain> ec = chains[i];
+ // clang-tidy suggests const ref
+ sp<EffectChain> ec = chains[i]; // NOLINT(performance-unnecessary-copy-initialization)
int sessionid = ec->sessionId();
sp<ThreadBase> t = ec->thread().promote();
if (t == 0) {
@@ -3679,6 +3706,20 @@
return thread;
}
+// checkOutputThread_l() must be called with AudioFlinger::mLock held
+sp<AudioFlinger::ThreadBase> AudioFlinger::checkOutputThread_l(audio_io_handle_t ioHandle) const
+{
+ if (audio_unique_id_get_use(ioHandle) != AUDIO_UNIQUE_ID_USE_OUTPUT) {
+ return nullptr;
+ }
+
+ sp<AudioFlinger::ThreadBase> thread = mPlaybackThreads.valueFor(ioHandle);
+ if (thread == nullptr) {
+ thread = mMmapThreads.valueFor(ioHandle);
+ }
+ return thread;
+}
+
// checkPlaybackThread_l() must be called with AudioFlinger::mLock held
AudioFlinger::PlaybackThread *AudioFlinger::checkPlaybackThread_l(audio_io_handle_t output) const
{
@@ -3783,7 +3824,7 @@
PlaybackThread *thread = primaryPlaybackThread_l();
if (thread == NULL) {
- return DeviceTypeSet();
+ return {};
}
return thread->outDeviceTypes();
@@ -4295,7 +4336,7 @@
// session and used it instead of creating a new one.
sp<EffectChain> chain = getOrphanEffectChain_l(sessionId);
if (chain != 0) {
- Mutex::Autolock _l(thread->mLock);
+ Mutex::Autolock _l2(thread->mLock);
thread->addEffectChain_l(chain);
}
}
@@ -4413,6 +4454,7 @@
status_t AudioFlinger::moveEffectChain_l(audio_session_t sessionId,
AudioFlinger::PlaybackThread *srcThread,
AudioFlinger::PlaybackThread *dstThread)
+NO_THREAD_SAFETY_ANALYSIS // requires srcThread and dstThread locks
{
ALOGV("moveEffectChain_l() session %d from thread %p to thread %p",
sessionId, srcThread, dstThread);
@@ -4442,11 +4484,12 @@
// transfer all effects one by one so that new effect chain is created on new thread with
// correct buffer sizes and audio parameters and effect engines reconfigured accordingly
sp<EffectChain> dstChain;
- sp<EffectModule> effect = chain->getEffectFromId_l(0);
Vector< sp<EffectModule> > removed;
status_t status = NO_ERROR;
std::string errorString;
- while (effect != nullptr) {
+ // process effects one by one.
+ for (sp<EffectModule> effect = chain->getEffectFromId_l(0); effect != nullptr;
+ effect = chain->getEffectFromId_l(0)) {
srcThread->removeEffect_l(effect);
removed.add(effect);
status = dstThread->addEffect_l(effect);
@@ -4467,7 +4510,6 @@
break;
}
}
- effect = chain->getEffectFromId_l(0);
}
size_t restored = 0;
@@ -4571,6 +4613,7 @@
}
bool AudioFlinger::isNonOffloadableGlobalEffectEnabled_l()
+NO_THREAD_SAFETY_ANALYSIS // thread lock for getEffectChain_l.
{
if (mGlobalEffectEnableTime != 0 &&
((systemTime() - mGlobalEffectEnableTime) < kMinGlobalEffectEnabletimeNs)) {
@@ -4653,12 +4696,9 @@
// ----------------------------------------------------------------------------
status_t AudioFlinger::onTransactWrapper(TransactionCode code,
- const Parcel& data,
- uint32_t flags,
+ [[maybe_unused]] const Parcel& data,
+ [[maybe_unused]] uint32_t flags,
const std::function<status_t()>& delegate) {
- (void) data;
- (void) flags;
-
// make sure transactions reserved to AudioPolicyManager do not come from other processes
switch (code) {
case TransactionCode::SET_STREAM_VOLUME:
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 9fc503b..077fa26 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -124,7 +124,6 @@
class EffectsFactoryHalInterface;
class FastMixer;
class IAudioManager;
-class ISoundDoseCallback;
class PassthruBufferProvider;
class RecordBufferConverter;
class ServerProxy;
@@ -300,6 +299,8 @@
virtual status_t setDeviceConnectedState(const struct audio_port_v7 *port, bool connected);
+ virtual status_t setSimulateDeviceConnections(bool enabled);
+
virtual status_t setRequestedLatencyMode(
audio_io_handle_t output, audio_latency_mode_t mode);
@@ -341,12 +342,12 @@
static void onExternalVibrationStop(const sp<os::ExternalVibration>& externalVibration);
status_t addEffectToHal(audio_port_handle_t deviceId,
- audio_module_handle_t hwModuleId, sp<EffectHalInterface> effect);
+ audio_module_handle_t hwModuleId, const sp<EffectHalInterface>& effect);
status_t removeEffectFromHal(audio_port_handle_t deviceId,
- audio_module_handle_t hwModuleId, sp<EffectHalInterface> effect);
+ audio_module_handle_t hwModuleId, const sp<EffectHalInterface>& effect);
void updateDownStreamPatches_l(const struct audio_patch *patch,
- const std::set<audio_io_handle_t> streams);
+ const std::set<audio_io_handle_t>& streams);
std::optional<media::AudioVibratorInfo> getDefaultVibratorInfo_l();
@@ -389,7 +390,7 @@
audio_session_t triggerSession,
audio_session_t listenerSession,
sync_event_callback_t callBack,
- wp<RefBase> cookie)
+ const wp<RefBase>& cookie)
: mType(type), mTriggerSession(triggerSession), mListenerSession(listenerSession),
mCallback(callBack), mCookie(cookie)
{}
@@ -757,12 +758,15 @@
audio_port_handle_t *handle);
virtual status_t stop(audio_port_handle_t handle);
virtual status_t standby();
+ status_t reportData(const void* buffer, size_t frameCount) override;
private:
const sp<MmapThread> mThread;
};
ThreadBase *checkThread_l(audio_io_handle_t ioHandle) const;
+ sp<AudioFlinger::ThreadBase> checkOutputThread_l(audio_io_handle_t ioHandle) const
+ REQUIRES(mLock);
PlaybackThread *checkPlaybackThread_l(audio_io_handle_t output) const;
MixerThread *checkMixerThread_l(audio_io_handle_t output) const;
RecordThread *checkRecordThread_l(audio_io_handle_t input) const;
@@ -866,7 +870,7 @@
void updateOutDevicesForRecordThreads_l(const DeviceDescriptorBaseVector& devices);
void forwardParametersToDownstreamPatches_l(
audio_io_handle_t upStream, const String8& keyValuePairs,
- std::function<bool(const sp<PlaybackThread>&)> useThread = nullptr);
+ const std::function<bool(const sp<PlaybackThread>&)>& useThread = nullptr);
// AudioStreamIn is immutable, so their fields are const.
// For emphasis, we could also make all pointers to them be "const *",
@@ -879,7 +883,8 @@
sp<DeviceHalInterface> hwDev() const { return audioHwDev->hwDevice(); }
- AudioStreamIn(AudioHwDevice *dev, sp<StreamInHalInterface> in, audio_input_flags_t flags) :
+ AudioStreamIn(AudioHwDevice *dev, const sp<StreamInHalInterface>& in,
+ audio_input_flags_t flags) :
audioHwDev(dev), stream(in), flags(flags) {}
status_t read(void *buffer, size_t bytes, size_t *read) override {
return stream->read(buffer, bytes, read);
@@ -950,6 +955,7 @@
AUDIO_HW_GET_MASTER_MUTE, // get_master_mute
AUDIO_HW_GET_MICROPHONES, // getMicrophones
AUDIO_HW_SET_CONNECTED_STATE, // setConnectedState
+ AUDIO_HW_SET_SIMULATE_CONNECTIONS, // setSimulateDeviceConnections
};
mutable hardware_call_state mHardwareStatus; // for dump only
diff --git a/services/audioflinger/AudioHwDevice.h b/services/audioflinger/AudioHwDevice.h
index 1749f3f..d071922 100644
--- a/services/audioflinger/AudioHwDevice.h
+++ b/services/audioflinger/AudioHwDevice.h
@@ -46,7 +46,7 @@
AudioHwDevice(audio_module_handle_t handle,
const char *moduleName,
- sp<DeviceHalInterface> hwDevice,
+ const sp<DeviceHalInterface>& hwDevice,
Flags flags)
: mHandle(handle)
, mModuleName(strdup(moduleName))
diff --git a/services/audioflinger/AutoPark.h b/services/audioflinger/AutoPark.h
index 9ac7b65..83f6b7d 100644
--- a/services/audioflinger/AutoPark.h
+++ b/services/audioflinger/AutoPark.h
@@ -58,4 +58,4 @@
FastThreadState::Command mPreviousCommand;
}; // class AutoPark
-} // namespace
+} // namespace android
diff --git a/services/audioflinger/DeviceEffectManager.cpp b/services/audioflinger/DeviceEffectManager.cpp
index 9105500..2f61a01 100644
--- a/services/audioflinger/DeviceEffectManager.cpp
+++ b/services/audioflinger/DeviceEffectManager.cpp
@@ -145,7 +145,9 @@
return status;
}
-void AudioFlinger::DeviceEffectManager::dump(int fd) {
+void AudioFlinger::DeviceEffectManager::dump(int fd)
+NO_THREAD_SAFETY_ANALYSIS // conditional try lock
+{
const bool locked = dumpTryLock(mLock);
if (!locked) {
String8 result("DeviceEffectManager may be deadlocked\n");
diff --git a/services/audioflinger/DeviceEffectManager.h b/services/audioflinger/DeviceEffectManager.h
index 7602f12..395781d 100644
--- a/services/audioflinger/DeviceEffectManager.h
+++ b/services/audioflinger/DeviceEffectManager.h
@@ -45,11 +45,11 @@
int32_t sessionId, int32_t deviceId,
sp<EffectHalInterface> *effect);
status_t addEffectToHal(audio_port_handle_t deviceId, audio_module_handle_t hwModuleId,
- sp<EffectHalInterface> effect) {
+ const sp<EffectHalInterface>& effect) {
return mAudioFlinger.addEffectToHal(deviceId, hwModuleId, effect);
};
status_t removeEffectFromHal(audio_port_handle_t deviceId, audio_module_handle_t hwModuleId,
- sp<EffectHalInterface> effect) {
+ const sp<EffectHalInterface>& effect) {
return mAudioFlinger.removeEffectFromHal(deviceId, hwModuleId, effect);
};
@@ -74,7 +74,7 @@
class DeviceEffectManagerCallback : public EffectCallbackInterface {
public:
- DeviceEffectManagerCallback(DeviceEffectManager& manager)
+ explicit DeviceEffectManagerCallback(DeviceEffectManager& manager)
: mManager(manager) {}
status_t createEffectHal(const effect_uuid_t *pEffectUuid,
@@ -105,10 +105,10 @@
size_t frameCount() const override { return 0; }
uint32_t latency() const override { return 0; }
- status_t addEffectToHal(sp<EffectHalInterface> effect __unused) override {
+ status_t addEffectToHal(const sp<EffectHalInterface>& /* effect */) override {
return NO_ERROR;
}
- status_t removeEffectFromHal(sp<EffectHalInterface> effect __unused) override {
+ status_t removeEffectFromHal(const sp<EffectHalInterface>& /* effect */) override {
return NO_ERROR;
}
@@ -133,11 +133,11 @@
int newEffectId() { return mManager.audioFlinger().nextUniqueId(AUDIO_UNIQUE_ID_USE_EFFECT); }
status_t addEffectToHal(audio_port_handle_t deviceId,
- audio_module_handle_t hwModuleId, sp<EffectHalInterface> effect) {
+ audio_module_handle_t hwModuleId, const sp<EffectHalInterface>& effect) {
return mManager.addEffectToHal(deviceId, hwModuleId, effect);
}
status_t removeEffectFromHal(audio_port_handle_t deviceId,
- audio_module_handle_t hwModuleId, sp<EffectHalInterface> effect) {
+ audio_module_handle_t hwModuleId, const sp<EffectHalInterface>& effect) {
return mManager.removeEffectFromHal(deviceId, hwModuleId, effect);
}
private:
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index 84b9c40..19e4151 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -498,6 +498,7 @@
}
void AudioFlinger::EffectBase::dump(int fd, const Vector<String16>& args __unused)
+NO_THREAD_SAFETY_ANALYSIS // conditional try lock
{
String8 result;
@@ -569,7 +570,6 @@
mMaxDisableWaitCnt(1), // set by configure(), should be >= 1
mDisableWaitCnt(0), // set by process() and updateState()
mOffloaded(false),
- mAddedToHal(false),
mIsOutput(false)
#ifdef FLOAT_EFFECT_CHAIN
, mSupportsFloat(false)
@@ -1103,12 +1103,12 @@
{
if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_PRE_PROC ||
(mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_POST_PROC) {
- if (mAddedToHal) {
+ if (mCurrentHalStream == getCallback()->io()) {
return;
}
(void)getCallback()->addEffectToHal(mEffectInterface);
- mAddedToHal = true;
+ mCurrentHalStream = getCallback()->io();
}
}
@@ -1204,12 +1204,11 @@
{
if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_PRE_PROC ||
(mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_POST_PROC) {
- if (!mAddedToHal) {
- return NO_ERROR;
+ if (mCurrentHalStream != getCallback()->io()) {
+ return (mCurrentHalStream == AUDIO_IO_HANDLE_NONE) ? NO_ERROR : INVALID_OPERATION;
}
-
getCallback()->removeEffectFromHal(mEffectInterface);
- mAddedToHal = false;
+ mCurrentHalStream = AUDIO_IO_HANDLE_NONE;
}
return NO_ERROR;
}
@@ -1249,13 +1248,13 @@
return -EINVAL;
}
if (cmdCode == EFFECT_CMD_GET_PARAM &&
- (maxReplySize < sizeof(effect_param_t) ||
+ (maxReplySize < static_cast<signed>(sizeof(effect_param_t)) ||
param->psize > maxReplySize - sizeof(effect_param_t))) {
android_errorWriteLog(0x534e4554, "29251553");
return -EINVAL;
}
if (cmdCode == EFFECT_CMD_GET_PARAM &&
- (sizeof(effect_param_t) > maxReplySize
+ (static_cast<signed>(sizeof(effect_param_t)) > maxReplySize
|| param->psize > maxReplySize - sizeof(effect_param_t)
|| param->vsize > maxReplySize - sizeof(effect_param_t)
- param->psize
@@ -1685,6 +1684,7 @@
}
void AudioFlinger::EffectModule::dump(int fd, const Vector<String16>& args)
+NO_THREAD_SAFETY_ANALYSIS // conditional try lock
{
EffectBase::dump(fd, args);
@@ -1946,7 +1946,7 @@
}
mCblkMemory.clear(); // free the shared memory before releasing the heap it belongs to
// Client destructor must run with AudioFlinger client mutex locked
- Mutex::Autolock _l(mClient->audioFlinger()->mClientLock);
+ Mutex::Autolock _l2(mClient->audioFlinger()->mClientLock);
mClient.clear();
}
}
@@ -2010,14 +2010,14 @@
}
if (cmdCode == EFFECT_CMD_ENABLE) {
- if (maxResponseSize < sizeof(int)) {
+ if (maxResponseSize < static_cast<signed>(sizeof(int))) {
android_errorWriteLog(0x534e4554, "32095713");
RETURN(BAD_VALUE);
}
writeToBuffer(NO_ERROR, response);
return enable(_aidl_return);
} else if (cmdCode == EFFECT_CMD_DISABLE) {
- if (maxResponseSize < sizeof(int)) {
+ if (maxResponseSize < static_cast<signed>(sizeof(int))) {
android_errorWriteLog(0x534e4554, "32095713");
RETURN(BAD_VALUE);
}
@@ -2041,7 +2041,7 @@
RETURN(INVALID_OPERATION);
}
- if (maxResponseSize < sizeof(int)) {
+ if (maxResponseSize < (signed)sizeof(int)) {
android_errorWriteLog(0x534e4554, "32095713");
RETURN(BAD_VALUE);
}
@@ -2050,7 +2050,7 @@
// No need to trylock() here as this function is executed in the binder thread serving a
// particular client process: no risk to block the whole media server process or mixer
// threads if we are stuck here
- Mutex::Autolock _l(mCblk->lock);
+ Mutex::Autolock _l2(mCblk->lock);
// keep local copy of index in case of client corruption b/32220769
const uint32_t clientIndex = mCblk->clientIndex;
const uint32_t serverIndex = mCblk->serverIndex;
@@ -2153,6 +2153,7 @@
}
void AudioFlinger::EffectHandle::dumpToBuffer(char* buffer, size_t size)
+NO_THREAD_SAFETY_ANALYSIS // conditional try lock
{
bool locked = mCblk != NULL && AudioFlinger::dumpTryLock(mCblk->lock);
@@ -2407,7 +2408,7 @@
}
} else {
effect->setInBuffer(mInBuffer);
- if (idx_insert == previousSize) {
+ if (idx_insert == static_cast<ssize_t>(previousSize)) {
if (idx_insert != 0) {
mEffects[idx_insert-1]->configure();
mEffects[idx_insert-1]->setOutBuffer(mInBuffer);
@@ -2467,7 +2468,7 @@
}
// remember position of first insert effect and by default
// select this as insert position for new effect
- if (idx_insert == size) {
+ if (idx_insert == static_cast<ssize_t>(size)) {
idx_insert = i;
}
// remember position of last insert effect claiming
@@ -2697,6 +2698,7 @@
}
void AudioFlinger::EffectChain::dump(int fd, const Vector<String16>& args)
+NO_THREAD_SAFETY_ANALYSIS // conditional try lock
{
String8 result;
@@ -3049,7 +3051,7 @@
}
status_t AudioFlinger::EffectChain::EffectCallback::addEffectToHal(
- sp<EffectHalInterface> effect) {
+ const sp<EffectHalInterface>& effect) {
status_t result = NO_INIT;
sp<ThreadBase> t = thread().promote();
if (t == nullptr) {
@@ -3065,7 +3067,7 @@
}
status_t AudioFlinger::EffectChain::EffectCallback::removeEffectFromHal(
- sp<EffectHalInterface> effect) {
+ const sp<EffectHalInterface>& effect) {
status_t result = NO_INIT;
sp<ThreadBase> t = thread().promote();
if (t == nullptr) {
@@ -3207,15 +3209,20 @@
return t->frameCount();
}
-uint32_t AudioFlinger::EffectChain::EffectCallback::latency() const {
+uint32_t AudioFlinger::EffectChain::EffectCallback::latency() const
+NO_THREAD_SAFETY_ANALYSIS // latency_l() access
+{
sp<ThreadBase> t = thread().promote();
if (t == nullptr) {
return 0;
}
+ // TODO(b/275956781) - this requires the thread lock.
return t->latency_l();
}
-void AudioFlinger::EffectChain::EffectCallback::setVolumeForOutput(float left, float right) const {
+void AudioFlinger::EffectChain::EffectCallback::setVolumeForOutput(float left, float right) const
+NO_THREAD_SAFETY_ANALYSIS // setVolumeForOutput_l() access
+{
sp<ThreadBase> t = thread().promote();
if (t == nullptr) {
return;
@@ -3459,7 +3466,7 @@
}
status_t AudioFlinger::DeviceEffectProxy::addEffectToHal(
- sp<EffectHalInterface> effect) {
+ const sp<EffectHalInterface>& effect) {
if (mHalEffect == nullptr) {
return NO_INIT;
}
@@ -3468,7 +3475,7 @@
}
status_t AudioFlinger::DeviceEffectProxy::removeEffectFromHal(
- sp<EffectHalInterface> effect) {
+ const sp<EffectHalInterface>& effect) {
if (mHalEffect == nullptr) {
return NO_INIT;
}
@@ -3506,7 +3513,9 @@
return audio_channel_count_from_in_mask(channelMask());
}
-void AudioFlinger::DeviceEffectProxy::dump(int fd, int spaces) {
+void AudioFlinger::DeviceEffectProxy::dump(int fd, int spaces)
+NO_THREAD_SAFETY_ANALYSIS // conditional try lock
+{
const Vector<String16> args;
EffectBase::dump(fd, args);
@@ -3585,7 +3594,7 @@
}
status_t AudioFlinger::DeviceEffectProxy::ProxyCallback::addEffectToHal(
- sp<EffectHalInterface> effect) {
+ const sp<EffectHalInterface>& effect) {
sp<DeviceEffectProxy> proxy = mProxy.promote();
if (proxy == nullptr) {
return NO_INIT;
@@ -3594,7 +3603,7 @@
}
status_t AudioFlinger::DeviceEffectProxy::ProxyCallback::removeEffectFromHal(
- sp<EffectHalInterface> effect) {
+ const sp<EffectHalInterface>& effect) {
sp<DeviceEffectProxy> proxy = mProxy.promote();
if (proxy == nullptr) {
return NO_INIT;
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index 7b71a85..885d3e5 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -45,8 +45,8 @@
// Non trivial methods usually implemented with help from ThreadBase:
// pay attention to mutex locking order
virtual uint32_t latency() const { return 0; }
- virtual status_t addEffectToHal(sp<EffectHalInterface> effect) = 0;
- virtual status_t removeEffectFromHal(sp<EffectHalInterface> effect) = 0;
+ virtual status_t addEffectToHal(const sp<EffectHalInterface>& effect) = 0;
+ virtual status_t removeEffectFromHal(const sp<EffectHalInterface>& effect) = 0;
virtual void setVolumeForOutput(float left, float right) const = 0;
virtual bool disconnectEffectHandle(EffectHandle *handle, bool unpinIfLast) = 0;
virtual void checkSuspendOnEffectEnabled(const sp<EffectBase>& effect,
@@ -159,8 +159,8 @@
bool isPinned() const { return mPinned; }
void unPin() { mPinned = false; }
- void lock() { mLock.lock(); }
- void unlock() { mLock.unlock(); }
+ void lock() ACQUIRE(mLock) { mLock.lock(); }
+ void unlock() RELEASE(mLock) { mLock.unlock(); }
status_t updatePolicyState();
@@ -319,7 +319,8 @@
// sending disable command.
uint32_t mDisableWaitCnt; // current process() calls count during disable period.
bool mOffloaded; // effect is currently offloaded to the audio DSP
- bool mAddedToHal; // effect has been added to the audio HAL
+ // effect has been added to this HAL input stream
+ audio_io_handle_t mCurrentHalStream = AUDIO_IO_HANDLE_NONE;
bool mIsOutput; // direction of the AF thread
#ifdef FLOAT_EFFECT_CHAIN
@@ -459,10 +460,10 @@
void process_l();
- void lock() {
+ void lock() ACQUIRE(mLock) {
mLock.lock();
}
- void unlock() {
+ void unlock() RELEASE(mLock) {
mLock.unlock();
}
@@ -608,8 +609,8 @@
size_t frameCount() const override;
uint32_t latency() const override;
- status_t addEffectToHal(sp<EffectHalInterface> effect) override;
- status_t removeEffectFromHal(sp<EffectHalInterface> effect) override;
+ status_t addEffectToHal(const sp<EffectHalInterface>& effect) override;
+ status_t removeEffectFromHal(const sp<EffectHalInterface>& effect) override;
bool disconnectEffectHandle(EffectHandle *handle, bool unpinIfLast) override;
void setVolumeForOutput(float left, float right) const override;
@@ -724,8 +725,8 @@
size_t removeEffect(const sp<EffectModule>& effect);
- status_t addEffectToHal(sp<EffectHalInterface> effect);
- status_t removeEffectFromHal(sp<EffectHalInterface> effect);
+ status_t addEffectToHal(const sp<EffectHalInterface>& effect);
+ status_t removeEffectFromHal(const sp<EffectHalInterface>& effect);
const AudioDeviceTypeAddr& device() { return mDevice; };
bool isOutput() const;
@@ -769,8 +770,8 @@
size_t frameCount() const override { return 0; }
uint32_t latency() const override { return 0; }
- status_t addEffectToHal(sp<EffectHalInterface> effect) override;
- status_t removeEffectFromHal(sp<EffectHalInterface> effect) override;
+ status_t addEffectToHal(const sp<EffectHalInterface>& effect) override;
+ status_t removeEffectFromHal(const sp<EffectHalInterface>& effect) override;
bool disconnectEffectHandle(EffectHandle *handle, bool unpinIfLast) override;
void setVolumeForOutput(float left __unused, float right __unused) const override {}
diff --git a/services/audioflinger/FastCaptureDumpState.cpp b/services/audioflinger/FastCaptureDumpState.cpp
index b8b3866..243dfa5 100644
--- a/services/audioflinger/FastCaptureDumpState.cpp
+++ b/services/audioflinger/FastCaptureDumpState.cpp
@@ -51,4 +51,4 @@
periodSec * 1e3, mSilenced ? "true" : "false");
}
-} // android
+} // namespace android
diff --git a/services/audioflinger/FastCaptureDumpState.h b/services/audioflinger/FastCaptureDumpState.h
index a1b8706..34ce456 100644
--- a/services/audioflinger/FastCaptureDumpState.h
+++ b/services/audioflinger/FastCaptureDumpState.h
@@ -38,6 +38,6 @@
bool mSilenced = false; // capture is silenced
};
-} // android
+} // namespace android
#endif // ANDROID_AUDIO_FAST_CAPTURE_DUMP_STATE_H
diff --git a/services/audioflinger/FastCaptureState.cpp b/services/audioflinger/FastCaptureState.cpp
index c4d5e45..918ba9c 100644
--- a/services/audioflinger/FastCaptureState.cpp
+++ b/services/audioflinger/FastCaptureState.cpp
@@ -42,4 +42,4 @@
LOG_ALWAYS_FATAL("%s", __func__);
}
-} // android
+} // namespace android
diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp
index 26bd92d..61dd3f2 100644
--- a/services/audioflinger/FastMixer.cpp
+++ b/services/audioflinger/FastMixer.cpp
@@ -79,8 +79,6 @@
mMasterMono(false),
mThreadIoHandle(parentIoHandle)
{
- (void)mThreadIoHandle; // prevent unused warning, see C++17 [[maybe_unused]]
-
// FIXME pass sInitial as parameter to base class constructor, and make it static local
mPrevious = &sInitial;
mCurrent = &sInitial;
diff --git a/services/audioflinger/FastMixer.h b/services/audioflinger/FastMixer.h
index 97ab635..d71519f 100644
--- a/services/audioflinger/FastMixer.h
+++ b/services/audioflinger/FastMixer.h
@@ -107,7 +107,8 @@
std::atomic<float> mMasterBalance{};
std::atomic_int_fast64_t mBoottimeOffset;
- const audio_io_handle_t mThreadIoHandle; // parent thread id for debugging purposes
+ // parent thread id for debugging purposes
+ [[maybe_unused]] const audio_io_handle_t mThreadIoHandle;
#ifdef TEE_SINK
NBAIO_Tee mTee;
#endif
diff --git a/services/audioflinger/FastMixerDumpState.cpp b/services/audioflinger/FastMixerDumpState.cpp
index 3f20282..d041882 100644
--- a/services/audioflinger/FastMixerDumpState.cpp
+++ b/services/audioflinger/FastMixerDumpState.cpp
@@ -203,4 +203,4 @@
}
}
-} // android
+} // namespace android
diff --git a/services/audioflinger/FastMixerDumpState.h b/services/audioflinger/FastMixerDumpState.h
index 9b91cbc..294ef78 100644
--- a/services/audioflinger/FastMixerDumpState.h
+++ b/services/audioflinger/FastMixerDumpState.h
@@ -81,6 +81,6 @@
TimestampVerifier<int64_t /* frame count */, int64_t /* time ns */> mTimestampVerifier;
};
-} // android
+} // namespace android
#endif // ANDROID_AUDIO_FAST_MIXER_DUMP_STATE_H
diff --git a/services/audioflinger/FastThread.h b/services/audioflinger/FastThread.h
index 3f6b206..2f0f73f 100644
--- a/services/audioflinger/FastThread.h
+++ b/services/audioflinger/FastThread.h
@@ -92,6 +92,6 @@
}; // class FastThread
-} // android
+} // namespace android
#endif // ANDROID_AUDIO_FAST_THREAD_H
diff --git a/services/audioflinger/FastThreadDumpState.cpp b/services/audioflinger/FastThreadDumpState.cpp
index 964a725..e91073f 100644
--- a/services/audioflinger/FastThreadDumpState.cpp
+++ b/services/audioflinger/FastThreadDumpState.cpp
@@ -56,4 +56,4 @@
}
#endif
-} // android
+} // namespace android
diff --git a/services/audioflinger/FastThreadDumpState.h b/services/audioflinger/FastThreadDumpState.h
index 1ce0914..0b20e55 100644
--- a/services/audioflinger/FastThreadDumpState.h
+++ b/services/audioflinger/FastThreadDumpState.h
@@ -67,6 +67,6 @@
}; // struct FastThreadDumpState
-} // android
+} // namespace android
#endif // ANDROID_AUDIO_FAST_THREAD_DUMP_STATE_H
diff --git a/services/audioflinger/FastThreadState.h b/services/audioflinger/FastThreadState.h
index 54c0dc6..9fb4e06 100644
--- a/services/audioflinger/FastThreadState.h
+++ b/services/audioflinger/FastThreadState.h
@@ -50,6 +50,6 @@
static const char *commandToString(Command command);
}; // struct FastThreadState
-} // android
+} // namespace android
#endif // ANDROID_AUDIO_FAST_THREAD_STATE_H
diff --git a/services/audioflinger/MelReporter.cpp b/services/audioflinger/MelReporter.cpp
index cfa02bb..3d5aae2 100644
--- a/services/audioflinger/MelReporter.cpp
+++ b/services/audioflinger/MelReporter.cpp
@@ -81,6 +81,10 @@
}
bool AudioFlinger::MelReporter::shouldComputeMelForDeviceType(audio_devices_t device) {
+ if (mSoundDoseManager->isCsdDisabled()) {
+ ALOGV("%s csd is disabled", __func__);
+ return false;
+ }
if (mSoundDoseManager->forceComputeCsdOnAllDevices()) {
return true;
}
@@ -102,6 +106,11 @@
void AudioFlinger::MelReporter::updateMetadataForCsd(audio_io_handle_t streamHandle,
const std::vector<playback_track_metadata_v7_t>& metadataVec) {
+ if (mSoundDoseManager->isCsdDisabled()) {
+ ALOGV("%s csd is disabled", __func__);
+ return;
+ }
+
std::lock_guard _laf(mAudioFlinger.mLock);
std::lock_guard _l(mLock);
auto activeMelPatchId = activePatchStreamHandle_l(streamHandle);
@@ -133,6 +142,10 @@
void AudioFlinger::MelReporter::onCreateAudioPatch(audio_patch_handle_t handle,
const PatchPanel::Patch& patch) {
+ if (mSoundDoseManager->isCsdDisabled()) {
+ ALOGV("%s csd is disabled", __func__);
+ return;
+ }
if (useHalSoundDoseInterface()) {
ALOGV("%s using HAL sound dose, ignore new patch", __func__);
return;
@@ -150,7 +163,7 @@
audio_io_handle_t streamHandle = patch.mAudioPatch.sources[0].ext.mix.handle;
ActiveMelPatch newPatch;
newPatch.streamHandle = streamHandle;
- for (int i = 0; i < patch.mAudioPatch.num_sinks; ++ i) {
+ for (size_t i = 0; i < patch.mAudioPatch.num_sinks; ++i) {
if (patch.mAudioPatch.sinks[i].type == AUDIO_PORT_TYPE_DEVICE
&& shouldComputeMelForDeviceType(patch.mAudioPatch.sinks[i].ext.device.type)) {
audio_port_handle_t deviceId = patch.mAudioPatch.sinks[i].id;
@@ -161,17 +174,21 @@
}
}
- std::lock_guard _afl(mAudioFlinger.mLock);
- std::lock_guard _l(mLock);
- ALOGV("%s add patch handle %d to active devices", __func__, handle);
- startMelComputationForActivePatch_l(newPatch);
- newPatch.csdActive = true;
- mActiveMelPatches[handle] = newPatch;
+ if (!newPatch.deviceHandles.empty()) {
+ std::lock_guard _afl(mAudioFlinger.mLock);
+ std::lock_guard _l(mLock);
+ ALOGV("%s add patch handle %d to active devices", __func__, handle);
+ startMelComputationForActivePatch_l(newPatch);
+ newPatch.csdActive = true;
+ mActiveMelPatches[handle] = newPatch;
+ }
}
-void AudioFlinger::MelReporter::startMelComputationForActivePatch_l(const ActiveMelPatch& patch) {
- auto thread = mAudioFlinger.checkPlaybackThread_l(patch.streamHandle);
- if (thread == nullptr) {
+void AudioFlinger::MelReporter::startMelComputationForActivePatch_l(const ActiveMelPatch& patch)
+NO_THREAD_SAFETY_ANALYSIS // access of AudioFlinger::checkOutputThread_l
+{
+ auto outputThread = mAudioFlinger.checkOutputThread_l(patch.streamHandle);
+ if (outputThread == nullptr) {
ALOGE("%s cannot find thread for stream handle %d", __func__, patch.streamHandle);
return;
}
@@ -180,16 +197,24 @@
++mActiveDevices[deviceHandle];
ALOGI("%s add stream %d that uses device %d for CSD, nr of streams: %d", __func__,
patch.streamHandle, deviceHandle, mActiveDevices[deviceHandle]);
- thread->startMelComputation(mSoundDoseManager->getOrCreateProcessorForDevice(
- deviceHandle,
- patch.streamHandle,
- thread->mSampleRate,
- thread->mChannelCount,
- thread->mFormat));
+
+ if (outputThread != nullptr) {
+ outputThread->startMelComputation_l(mSoundDoseManager->getOrCreateProcessorForDevice(
+ deviceHandle,
+ patch.streamHandle,
+ outputThread->mSampleRate,
+ outputThread->mChannelCount,
+ outputThread->mFormat));
}
+ }
}
void AudioFlinger::MelReporter::onReleaseAudioPatch(audio_patch_handle_t handle) {
+ if (mSoundDoseManager->isCsdDisabled()) {
+ ALOGV("%s csd is disabled", __func__);
+ return;
+ }
+
ActiveMelPatch melPatch;
{
std::lock_guard _l(mLock);
@@ -223,13 +248,16 @@
mUseHalSoundDoseInterface = true;
}
-void AudioFlinger::MelReporter::stopMelComputationForPatch_l(const ActiveMelPatch& patch) {
+void AudioFlinger::MelReporter::stopMelComputationForPatch_l(const ActiveMelPatch& patch)
+NO_THREAD_SAFETY_ANALYSIS // access of AudioFlinger::checkOutputThread_l
+{
if (!patch.csdActive) {
// no need to stop CSD inactive patches
return;
}
- auto thread = mAudioFlinger.checkPlaybackThread_l(patch.streamHandle);
+ auto outputThread = mAudioFlinger.checkOutputThread_l(patch.streamHandle);
+
ALOGV("%s: stop MEL for stream id: %d", __func__, patch.streamHandle);
for (const auto& deviceId : patch.deviceHandles) {
if (mActiveDevices[deviceId] > 0) {
@@ -242,9 +270,8 @@
}
}
- mSoundDoseManager->removeStreamProcessor(patch.streamHandle);
- if (thread != nullptr) {
- thread->stopMelComputation();
+ if (outputThread != nullptr) {
+ outputThread->stopMelComputation_l();
}
}
diff --git a/services/audioflinger/MelReporter.h b/services/audioflinger/MelReporter.h
index c1b291f..81a307a 100644
--- a/services/audioflinger/MelReporter.h
+++ b/services/audioflinger/MelReporter.h
@@ -90,12 +90,13 @@
void stopInternalMelComputation();
/** Should be called with the following order of locks: mAudioFlinger.mLock -> mLock. */
- void stopMelComputationForPatch_l(const ActiveMelPatch& patch);
+ void stopMelComputationForPatch_l(const ActiveMelPatch& patch) REQUIRES(mLock);
/** Should be called with the following order of locks: mAudioFlinger.mLock -> mLock. */
- void startMelComputationForActivePatch_l(const ActiveMelPatch& patch);
+ void startMelComputationForActivePatch_l(const ActiveMelPatch& patch) REQUIRES(mLock);
- std::optional<audio_patch_handle_t> activePatchStreamHandle_l(audio_io_handle_t streamHandle);
+ std::optional<audio_patch_handle_t>
+ activePatchStreamHandle_l(audio_io_handle_t streamHandle) REQUIRES(mLock);
bool useHalSoundDoseInterface();
diff --git a/services/audioflinger/PatchCommandThread.cpp b/services/audioflinger/PatchCommandThread.cpp
index c3cb7e7..f4aab1f 100644
--- a/services/audioflinger/PatchCommandThread.cpp
+++ b/services/audioflinger/PatchCommandThread.cpp
@@ -56,7 +56,9 @@
releaseAudioPatchCommand(handle);
}
-bool AudioFlinger::PatchCommandThread::threadLoop() {
+bool AudioFlinger::PatchCommandThread::threadLoop()
+NO_THREAD_SAFETY_ANALYSIS // bug in clang compiler.
+{
std::unique_lock _l(mLock);
while (!exitPending()) {
diff --git a/services/audioflinger/PatchCommandThread.h b/services/audioflinger/PatchCommandThread.h
index b7853f0..b52e0a9 100644
--- a/services/audioflinger/PatchCommandThread.h
+++ b/services/audioflinger/PatchCommandThread.h
@@ -84,7 +84,7 @@
class ReleaseAudioPatchData : public CommandData {
public:
- ReleaseAudioPatchData(audio_patch_handle_t handle)
+ explicit ReleaseAudioPatchData(audio_patch_handle_t handle)
: mHandle(handle) {}
audio_patch_handle_t mHandle;
diff --git a/services/audioflinger/PatchPanel.h b/services/audioflinger/PatchPanel.h
index 68a3800..5555766 100644
--- a/services/audioflinger/PatchPanel.h
+++ b/services/audioflinger/PatchPanel.h
@@ -199,7 +199,7 @@
return mRecord.handle() != AUDIO_PATCH_HANDLE_NONE ||
mPlayback.handle() != AUDIO_PATCH_HANDLE_NONE; }
- void setThread(sp<ThreadBase> thread) { mThread = thread; }
+ void setThread(const sp<ThreadBase>& thread) { mThread = thread; }
wp<ThreadBase> thread() const { return mThread; }
// returns the latency of the patch (from record to playback).
diff --git a/services/audioflinger/StateQueue.cpp b/services/audioflinger/StateQueue.cpp
index 9d4188f..38ce2c2 100644
--- a/services/audioflinger/StateQueue.cpp
+++ b/services/audioflinger/StateQueue.cpp
@@ -187,7 +187,9 @@
} // namespace android
-// hack for gcc
+// Hack to avoid explicit template instantiation of
+// template class StateQueue<FastCaptureState>;
+// template class StateQueue<FastMixerState>;
#ifdef STATE_QUEUE_INSTANTIATIONS
-#include STATE_QUEUE_INSTANTIATIONS
+#include STATE_QUEUE_INSTANTIATIONS // NOLINT(bugprone-suspicious-include)
#endif
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 163e2a0..76c9ad8 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -376,7 +376,7 @@
// try three times to get the clock offset, choose the one
// with the minimum gap in measurements.
const int tries = 3;
- nsecs_t bestGap, measured;
+ nsecs_t bestGap = 0, measured = 0; // not required, initialized for clang-tidy
for (int i = 0; i < tries; ++i) {
const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
const nsecs_t tbase = systemTime(clockbase);
@@ -627,6 +627,7 @@
// sendConfigEvent_l() must be called with ThreadBase::mLock held
// Can temporarily release the lock if waiting for a reply from processConfigEvents_l().
status_t AudioFlinger::ThreadBase::sendConfigEvent_l(sp<ConfigEvent>& event)
+NO_THREAD_SAFETY_ANALYSIS // condition variable
{
status_t status = NO_ERROR;
@@ -942,6 +943,7 @@
}
void AudioFlinger::ThreadBase::dump(int fd, const Vector<String16>& args)
+NO_THREAD_SAFETY_ANALYSIS // conditional try lock
{
dprintf(fd, "\n%s thread %p, name %s, tid %d, type %d (%s):\n", isOutput() ? "Output" : "Input",
this, mThreadName, getTid(), type(), threadTypeToString(type()));
@@ -1310,7 +1312,9 @@
void AudioFlinger::ThreadBase::checkSuspendOnEffectEnabled(bool enabled,
audio_session_t sessionId,
- bool threadLocked) {
+ bool threadLocked)
+NO_THREAD_SAFETY_ANALYSIS // manual locking
+{
if (!threadLocked) {
mLock.lock();
}
@@ -1794,6 +1798,7 @@
void AudioFlinger::ThreadBase::lockEffectChains_l(
Vector< sp<AudioFlinger::EffectChain> >& effectChains)
+NO_THREAD_SAFETY_ANALYSIS // calls EffectChain::lock()
{
effectChains = mEffectChains;
for (size_t i = 0; i < mEffectChains.size(); i++) {
@@ -1803,6 +1808,7 @@
void AudioFlinger::ThreadBase::unlockEffectChains(
const Vector< sp<AudioFlinger::EffectChain> >& effectChains)
+NO_THREAD_SAFETY_ANALYSIS // calls EffectChain::unlock()
{
for (size_t i = 0; i < effectChains.size(); i++) {
effectChains[i]->unlock();
@@ -1910,7 +1916,7 @@
template <typename T>
void AudioFlinger::ThreadBase::ActiveTracks<T>::updatePowerState(
- sp<ThreadBase> thread, bool force) {
+ const sp<ThreadBase>& thread, bool force) {
// Updates ActiveTracks client uids to the thread wakelock.
if (mActiveTracksGeneration != mLastActiveTracksGeneration || force) {
thread->updateWakeLockUids_l(getWakeLockUids());
@@ -2045,6 +2051,21 @@
return AudioSystem::getStrategyForStream(stream);
}
+// startMelComputation_l() must be called with AudioFlinger::mLock held
+void AudioFlinger::ThreadBase::startMelComputation_l(
+ const sp<audio_utils::MelProcessor>& /*processor*/)
+{
+ // Do nothing
+ ALOGW("%s: ThreadBase does not support CSD", __func__);
+}
+
+// stopMelComputation_l() must be called with AudioFlinger::mLock held
+void AudioFlinger::ThreadBase::stopMelComputation_l()
+{
+ // Do nothing
+ ALOGW("%s: ThreadBase does not support CSD", __func__);
+}
+
// ----------------------------------------------------------------------------
// Playback
// ----------------------------------------------------------------------------
@@ -2773,6 +2794,7 @@
// addTrack_l() must be called with ThreadBase::mLock held
status_t AudioFlinger::PlaybackThread::addTrack_l(const sp<Track>& track)
+NO_THREAD_SAFETY_ANALYSIS // release and re-acquire mLock
{
status_t status = ALREADY_EXISTS;
@@ -2886,6 +2908,9 @@
if (!trackActive) {
removeTrack_l(track);
} else if (track->isFastTrack() || track->isOffloaded() || track->isDirect()) {
+ if (track->isPausePending()) {
+ track->pauseAck();
+ }
track->mState = TrackBase::STOPPING_1;
}
@@ -2926,7 +2951,7 @@
if (initCheck() == NO_ERROR && mOutput->stream->getParameters(keys, &out_s8) == OK) {
return out_s8;
}
- return String8();
+ return {};
}
status_t AudioFlinger::DirectOutputThread::selectPresentation(int presentationId, int programId) {
@@ -3357,7 +3382,7 @@
}
void AudioFlinger::PlaybackThread::threadLoop_removeTracks(
- const Vector< sp<Track> >& tracksToRemove)
+ [[maybe_unused]] const Vector< sp<Track> >& tracksToRemove)
{
// Miscellaneous track cleanup when removed from the active list,
// called without Thread lock but synchronized with threadLoop processing.
@@ -3368,8 +3393,6 @@
addBatteryData(IMediaPlayerService::kBatteryDataAudioFlingerStop);
}
}
-#else
- (void)tracksToRemove; // suppress unused warning
#endif
}
@@ -3428,12 +3451,6 @@
if (framesWritten > 0) {
bytesWritten = framesWritten * mFrameSize;
- // Send to MelProcessor for sound dose measurement.
- auto processor = mMelProcessor.load();
- if (processor) {
- processor->process((char *)mSinkBuffer + offset, bytesWritten);
- }
-
#ifdef TEE_SINK
mTee.write((char *)mSinkBuffer + offset, framesWritten);
#endif
@@ -3476,17 +3493,22 @@
return bytesWritten;
}
-void AudioFlinger::PlaybackThread::startMelComputation(
+// startMelComputation_l() must be called with AudioFlinger::mLock held
+void AudioFlinger::PlaybackThread::startMelComputation_l(
const sp<audio_utils::MelProcessor>& processor)
{
- ALOGV("%s: starting mel processor for thread %d", __func__, id());
- mMelProcessor = processor;
+ auto outputSink = static_cast<AudioStreamOutSink*>(mOutputSink.get());
+ if (outputSink != nullptr) {
+ outputSink->startMelComputation(processor);
+ }
}
-void AudioFlinger::PlaybackThread::stopMelComputation() {
- if (mMelProcessor.load() != nullptr) {
- ALOGV("%s: stopping mel processor for thread %d", __func__, id());
- mMelProcessor = nullptr;
+// stopMelComputation_l() must be called with AudioFlinger::mLock held
+void AudioFlinger::PlaybackThread::stopMelComputation_l()
+{
+ auto outputSink = static_cast<AudioStreamOutSink*>(mOutputSink.get());
+ if (outputSink != nullptr) {
+ outputSink->stopMelComputation();
}
}
@@ -3691,10 +3713,10 @@
size_t numSamples = mNormalFrameCount
* (audio_channel_count_from_out_mask(mMixerChannelMask)
+ mHapticChannelCount);
- status_t result = mAudioFlinger->mEffectsFactoryHal->allocateBuffer(
+ const status_t allocateStatus = mAudioFlinger->mEffectsFactoryHal->allocateBuffer(
numSamples * sizeof(effect_buffer_t),
&halInBuffer);
- if (result != OK) return result;
+ if (allocateStatus != OK) return allocateStatus;
#ifdef FLOAT_EFFECT_CHAIN
buffer = halInBuffer ? halInBuffer->audioBuffer()->f32 : buffer;
#else
@@ -3780,8 +3802,8 @@
}
// detach all tracks with same session ID from this chain
- for (size_t i = 0; i < mTracks.size(); ++i) {
- sp<Track> track = mTracks[i];
+ for (size_t j = 0; j < mTracks.size(); ++j) {
+ sp<Track> track = mTracks[j];
if (session == track->sessionId()) {
track->setMainBuffer(reinterpret_cast<effect_buffer_t*>(mSinkBuffer));
chain->decTrackCnt();
@@ -3834,6 +3856,7 @@
}
bool AudioFlinger::PlaybackThread::threadLoop()
+NO_THREAD_SAFETY_ANALYSIS // manual locking of AudioFlinger
{
tlNBLogWriter = mNBLogWriter.get();
@@ -3902,7 +3925,7 @@
// is more informational.
if (mAudioFlinger->mLock.tryLock() == NO_ERROR) {
std::vector<PatchPanel::SoftwarePatch> swPatches;
- double latencyMs;
+ double latencyMs = 0.; // not required; initialized for clang-tidy
status_t status = INVALID_OPERATION;
audio_patch_handle_t downstreamPatchHandle = AUDIO_PATCH_HANDLE_NONE;
if (mAudioFlinger->mPatchPanel.getDownstreamSoftwarePatches(id(), &swPatches) == OK
@@ -3922,8 +3945,7 @@
ALOGVV("new downstream latency %lf ms", latencyMs);
} else {
ALOGD("out of range downstream latency %lf ms", latencyMs);
- if (latencyMs < minLatency) latencyMs = minLatency;
- else if (latencyMs > maxLatency) latencyMs = maxLatency;
+ latencyMs = std::clamp(latencyMs, minLatency, maxLatency);
}
mDownstreamLatencyStatMs.add(latencyMs);
}
@@ -4612,6 +4634,7 @@
// removeTracks_l() must be called with ThreadBase::mLock held
void AudioFlinger::PlaybackThread::removeTracks_l(const Vector< sp<Track> >& tracksToRemove)
+NO_THREAD_SAFETY_ANALYSIS // release and re-acquire mLock
{
for (const auto& track : tracksToRemove) {
mActiveTracks.remove(track);
@@ -4734,8 +4757,8 @@
"as it does not support audio patches",
patch->sinks[i].ext.device.type);
type = static_cast<audio_devices_t>(type | patch->sinks[i].ext.device.type);
- deviceTypeAddrs.push_back(AudioDeviceTypeAddr(patch->sinks[i].ext.device.type,
- patch->sinks[i].ext.device.address));
+ deviceTypeAddrs.emplace_back(patch->sinks[i].ext.device.type,
+ patch->sinks[i].ext.device.address);
}
audio_port_handle_t sinkPortId = patch->sinks[0].id;
@@ -4950,14 +4973,15 @@
// When it wakes up after a maximum latency, it runs a few cycles quickly before
// finally blocking. Note the pipe implementation rounds up the request to a power of 2.
MonoPipe *monoPipe = new MonoPipe(mNormalFrameCount * 4, format, true /*writeCanBlock*/);
- const NBAIO_Format offers[1] = {format};
- size_t numCounterOffers = 0;
+ const NBAIO_Format offersFast[1] = {format};
+ size_t numCounterOffersFast = 0;
#if !LOG_NDEBUG
ssize_t index =
#else
(void)
#endif
- monoPipe->negotiate(offers, 1, NULL, numCounterOffers);
+ monoPipe->negotiate(offersFast, std::size(offersFast),
+ nullptr /* counterOffers */, numCounterOffersFast);
ALOG_ASSERT(index == 0);
monoPipe->setAvgFrames((mScreenState & 1) ?
(monoPipe->maxFrames() * 7) / 8 : mNormalFrameCount * 2);
@@ -5384,7 +5408,7 @@
// tallyUnderrunFrames() is called to update the track counters
// with the number of underrun frames for a particular mixer period.
// We defer tallying until we know the final mixer status.
- void tallyUnderrunFrames(sp<Track> track, size_t underrunFrames) {
+ void tallyUnderrunFrames(const sp<Track>& track, size_t underrunFrames) {
mUnderrunFrames.emplace_back(track, underrunFrames);
}
@@ -5644,7 +5668,7 @@
// during last round
size_t desiredFrames;
const uint32_t sampleRate = track->mAudioTrackServerProxy->getSampleRate();
- AudioPlaybackRate playbackRate = track->mAudioTrackServerProxy->getPlaybackRate();
+ const AudioPlaybackRate playbackRate = track->mAudioTrackServerProxy->getPlaybackRate();
desiredFrames = sourceFramesNeededWithTimestretch(
sampleRate, mNormalFrameCount, mSampleRate, playbackRate.mSpeed);
@@ -5837,12 +5861,12 @@
AudioMixer::SAMPLE_RATE,
(void *)(uintptr_t)reqSampleRate);
- AudioPlaybackRate playbackRate = proxy->getPlaybackRate();
mAudioMixer->setParameter(
trackId,
AudioMixer::TIMESTRETCH,
AudioMixer::PLAYBACK_RATE,
- &playbackRate);
+ // cast away constness for this generic API.
+ const_cast<void *>(reinterpret_cast<const void *>(&playbackRate)));
/*
* Select the appropriate output buffer for the track.
@@ -5994,7 +6018,7 @@
}
// Push the new FastMixer state if necessary
- bool pauseAudioWatchdog = false;
+ [[maybe_unused]] bool pauseAudioWatchdog = false;
if (didModify) {
state->mFastTracksGen++;
// if the fast mixer was active, but now there are no fast tracks, then put it in cold idle
@@ -6226,12 +6250,12 @@
mAudioMixer = new AudioMixer(mNormalFrameCount, mSampleRate);
for (const auto &track : mTracks) {
const int trackId = track->id();
- status_t status = mAudioMixer->create(
+ const status_t createStatus = mAudioMixer->create(
trackId,
track->mChannelMask,
track->mFormat,
track->mSessionId);
- ALOGW_IF(status != NO_ERROR,
+ ALOGW_IF(createStatus != NO_ERROR,
"%s(): AudioMixer cannot create track(%d)"
" mask %#x, format %#x, sessionId %d",
__func__,
@@ -6471,9 +6495,13 @@
if (right > GAIN_FLOAT_UNITY) {
right = GAIN_FLOAT_UNITY;
}
-
- left *= v * mMasterBalanceLeft; // DirectOutputThread balance applied as track volume
- right *= v * mMasterBalanceRight;
+ left *= v;
+ right *= v;
+ if (mAudioFlinger->getMode() != AUDIO_MODE_IN_COMMUNICATION
+ || audio_channel_count_from_out_mask(mChannelMask) > 1) {
+ left *= mMasterBalanceLeft; // DirectOutputThread balance applied as track volume
+ right *= mMasterBalanceRight;
+ }
}
track->processMuteEvent_l(mAudioFlinger->getOrCreateAudioManager(),
@@ -7402,7 +7430,7 @@
void AudioFlinger::DuplicatingThread::threadLoop_mix()
{
// mix buffers...
- if (outputsReady(outputTracks)) {
+ if (outputsReady()) {
mAudioMixer->process();
} else {
if (mMixerBufferValid) {
@@ -7473,7 +7501,7 @@
}
}
-void AudioFlinger::DuplicatingThread::dumpInternals_l(int fd, const Vector<String16>& args __unused)
+void AudioFlinger::DuplicatingThread::dumpInternals_l(int fd, const Vector<String16>& args)
{
MixerThread::dumpInternals_l(fd, args);
@@ -7577,9 +7605,7 @@
}
}
-
-bool AudioFlinger::DuplicatingThread::outputsReady(
- const SortedVector< sp<OutputTrack> > &outputTracks)
+bool AudioFlinger::DuplicatingThread::outputsReady()
{
for (size_t i = 0; i < outputTracks.size(); i++) {
sp<ThreadBase> thread = outputTracks[i]->thread().promote();
@@ -7797,7 +7823,7 @@
size_t numCounterOffers = 0;
const NBAIO_Format offers[1] = {Format_from_SR_C(mSampleRate, mChannelCount, mFormat)};
#if !LOG_NDEBUG
- ssize_t index =
+ [[maybe_unused]] ssize_t index =
#else
(void)
#endif
@@ -7846,14 +7872,16 @@
// pipe will be shared directly with fast clients, so clear to avoid leaking old information
memset(pipeBuffer, 0, pipeSize);
Pipe *pipe = new Pipe(pipeFramesP2, format, pipeBuffer);
- const NBAIO_Format offers[1] = {format};
- size_t numCounterOffers = 0;
- ssize_t index = pipe->negotiate(offers, 1, NULL, numCounterOffers);
+ const NBAIO_Format offersFast[1] = {format};
+ size_t numCounterOffersFast = 0;
+ [[maybe_unused]] ssize_t index = pipe->negotiate(offersFast, std::size(offersFast),
+ nullptr /* counterOffers */, numCounterOffersFast);
ALOG_ASSERT(index == 0);
mPipeSink = pipe;
PipeReader *pipeReader = new PipeReader(*pipe);
- numCounterOffers = 0;
- index = pipeReader->negotiate(offers, 1, NULL, numCounterOffers);
+ numCounterOffersFast = 0;
+ index = pipeReader->negotiate(offersFast, std::size(offersFast),
+ nullptr /* counterOffers */, numCounterOffersFast);
ALOG_ASSERT(index == 0);
mPipeSource = pipeReader;
mPipeFramesP2 = pipeFramesP2;
@@ -8201,7 +8229,7 @@
// copy to the right place. Permitted because mRsmpInBuffer was over-allocated.
int32_t rear = mRsmpInRear & (mRsmpInFramesP2 - 1);
- ssize_t framesRead;
+ ssize_t framesRead = 0; // not needed, remove clang-tidy warning.
const int64_t lastIoBeginNs = systemTime(); // start IO timing
// If an NBAIO source is present, use it to read the normal capture's data
@@ -8394,8 +8422,9 @@
// straight from RecordThread buffer to RecordTrack buffer.
AudioBufferProvider::Buffer buffer;
buffer.frameCount = framesOut;
- status_t status = activeTrack->mResamplerBufferProvider->getNextBuffer(&buffer);
- if (status == OK && buffer.frameCount != 0) {
+ const status_t getNextBufferStatus =
+ activeTrack->mResamplerBufferProvider->getNextBuffer(&buffer);
+ if (getNextBufferStatus == OK && buffer.frameCount != 0) {
ALOGV_IF(buffer.frameCount != framesOut,
"%s() read less than expected (%zu vs %zu)",
__func__, buffer.frameCount, framesOut);
@@ -8405,7 +8434,7 @@
} else {
framesOut = 0;
ALOGE("%s() cannot fill request, status: %d, frameCount: %zu",
- __func__, status, buffer.frameCount);
+ __func__, getNextBufferStatus, buffer.frameCount);
}
} else {
// process frames from the RecordThread buffer provider to the RecordTrack
@@ -8825,7 +8854,6 @@
// or using a separate command thread
recordTrack->mState = TrackBase::STARTING_1;
mActiveTracks.add(recordTrack);
- status_t status = NO_ERROR;
if (recordTrack->isExternalTrack()) {
mLock.unlock();
status = AudioSystem::startInput(recordTrack->portId());
@@ -9016,7 +9044,7 @@
// "best effort" behavior of the API.
if (sharedOffset < 0) {
sharedAudioStartFrames = mRsmpInRear;
- } else if (sharedOffset > mRsmpInFrames) {
+ } else if (sharedOffset > static_cast<signed>(mRsmpInFrames)) {
sharedAudioStartFrames =
audio_utils::safe_sub_overflow(mRsmpInRear, (int32_t)mRsmpInFrames);
}
@@ -9307,7 +9335,8 @@
audio_format_t reqFormat = mFormat;
uint32_t samplingRate = mSampleRate;
// TODO this may change if we want to support capture from HDMI PCM multi channel (e.g on TVs).
- audio_channel_mask_t channelMask = audio_channel_in_mask_from_count(mChannelCount);
+ [[maybe_unused]] audio_channel_mask_t channelMask =
+ audio_channel_in_mask_from_count(mChannelCount);
AudioParameter param = AudioParameter(keyValuePair);
int value;
@@ -9393,7 +9422,7 @@
return out_s8;
}
}
- return String8();
+ return {};
}
void AudioFlinger::RecordThread::ioConfigChanged(audio_io_config_event_t event, pid_t pid,
@@ -9634,7 +9663,7 @@
maxFilled = filled;
}
}
- if (maxFilled > mRsmpInFrames) {
+ if (maxFilled > static_cast<signed>(mRsmpInFrames)) {
(void)__builtin_sub_overflow(mRsmpInRear, mRsmpInFrames, &oldestFront);
}
return oldestFront;
@@ -9823,10 +9852,14 @@
return mThread->standby();
}
+status_t AudioFlinger::MmapThreadHandle::reportData(const void* buffer, size_t frameCount) {
+ return mThread->reportData(buffer, frameCount);
+}
+
AudioFlinger::MmapThread::MmapThread(
const sp<AudioFlinger>& audioFlinger, audio_io_handle_t id,
- AudioHwDevice *hwDev, sp<StreamHalInterface> stream, bool systemReady, bool isOut)
+ AudioHwDevice *hwDev, const sp<StreamHalInterface>& stream, bool systemReady, bool isOut)
: ThreadBase(audioFlinger, id, (isOut ? MMAP_PLAYBACK : MMAP_CAPTURE), systemReady, isOut),
mSessionId(AUDIO_SESSION_NONE),
mPortId(AUDIO_PORT_HANDLE_NONE),
@@ -10032,8 +10065,10 @@
mHalVolFloat = -1.0f;
} else if (!track->isSilenced_l()) {
for (const sp<MmapTrack> &t : mActiveTracks) {
- if (t->isSilenced_l() && t->uid() != client.attributionSource.uid)
+ if (t->isSilenced_l()
+ && t->uid() != static_cast<uid_t>(client.attributionSource.uid)) {
t->invalidate();
+ }
}
}
@@ -10133,6 +10168,10 @@
return NO_ERROR;
}
+status_t AudioFlinger::MmapThread::reportData(const void* /*buffer*/, size_t /*frameCount*/) {
+ // This is a stub implementation. The MmapPlaybackThread overrides this function.
+ return INVALID_OPERATION;
+}
void AudioFlinger::MmapThread::readHalParameters_l()
{
@@ -10266,7 +10305,7 @@
if (initCheck() == NO_ERROR && mHalStream->getParameters(keys, &out_s8) == OK) {
return out_s8;
}
- return String8();
+ return {};
}
void AudioFlinger::MmapThread::ioConfigChanged(audio_io_config_event_t event, pid_t pid,
@@ -10296,6 +10335,7 @@
status_t AudioFlinger::MmapThread::createAudioPatch_l(const struct audio_patch *patch,
audio_patch_handle_t *handle)
+NO_THREAD_SAFETY_ANALYSIS // elease and re-acquire mLock
{
status_t status = NO_ERROR;
@@ -10313,8 +10353,8 @@
"as it does not support audio patches",
patch->sinks[i].ext.device.type);
type = static_cast<audio_devices_t>(type | patch->sinks[i].ext.device.type);
- sinkDeviceTypeAddrs.push_back(AudioDeviceTypeAddr(patch->sinks[i].ext.device.type,
- patch->sinks[i].ext.device.address));
+ sinkDeviceTypeAddrs.emplace_back(patch->sinks[i].ext.device.type,
+ patch->sinks[i].ext.device.address);
}
deviceId = patch->sinks[0].id;
numDevices = mPatch.num_sinks;
@@ -10523,6 +10563,7 @@
}
void AudioFlinger::MmapThread::checkInvalidTracks_l()
+NO_THREAD_SAFETY_ANALYSIS // release and re-acquire mLock
{
sp<MmapStreamCallback> callback;
for (const sp<MmapTrack> &track : mActiveTracks) {
@@ -10696,6 +10737,7 @@
}
void AudioFlinger::MmapPlaybackThread::processVolume_l()
+NO_THREAD_SAFETY_ANALYSIS // access of track->processMuteEvent_l
{
float volume;
@@ -10815,6 +10857,40 @@
return status;
}
+status_t AudioFlinger::MmapPlaybackThread::reportData(const void* buffer, size_t frameCount) {
+ // Send to MelProcessor for sound dose measurement.
+ auto processor = mMelProcessor.load();
+ if (processor) {
+ processor->process(buffer, frameCount * mFrameSize);
+ }
+
+ return NO_ERROR;
+}
+
+// startMelComputation_l() must be called with AudioFlinger::mLock held
+void AudioFlinger::MmapPlaybackThread::startMelComputation_l(
+ const sp<audio_utils::MelProcessor>& processor)
+{
+ ALOGV("%s: starting mel processor for thread %d", __func__, id());
+ mMelProcessor.store(processor);
+ if (processor) {
+ processor->resume();
+ }
+
+ // no need to update output format for MMapPlaybackThread since it is
+ // assigned constant for each thread
+}
+
+// stopMelComputation_l() must be called with AudioFlinger::mLock held
+void AudioFlinger::MmapPlaybackThread::stopMelComputation_l()
+{
+ ALOGV("%s: pausing mel processor for thread %d", __func__, id());
+ auto melProcessor = mMelProcessor.load();
+ if (melProcessor != nullptr) {
+ melProcessor->pause();
+ }
+}
+
void AudioFlinger::MmapPlaybackThread::dumpInternals_l(int fd, const Vector<String16>& args)
{
MmapThread::dumpInternals_l(fd, args);
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index ddae7ae..7b4c150 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -164,7 +164,7 @@
class SetParameterConfigEventData : public ConfigEventData {
public:
- explicit SetParameterConfigEventData(String8 keyValuePairs) :
+ explicit SetParameterConfigEventData(const String8& keyValuePairs) :
mKeyValuePairs(keyValuePairs) {}
virtual void dump(char *buffer, size_t size) {
@@ -176,7 +176,7 @@
class SetParameterConfigEvent : public ConfigEvent {
public:
- explicit SetParameterConfigEvent(String8 keyValuePairs) :
+ explicit SetParameterConfigEvent(const String8& keyValuePairs) :
ConfigEvent(CFG_EVENT_SET_PARAMETER) {
mData = new SetParameterConfigEventData(keyValuePairs);
mWaitStatus = true;
@@ -576,6 +576,9 @@
virtual bool isStreamInitialized() = 0;
+ virtual void startMelComputation_l(const sp<audio_utils::MelProcessor>& processor);
+ virtual void stopMelComputation_l();
+
protected:
// entry describing an effect being suspended in mSuspendedSessions keyed vector
@@ -799,7 +802,7 @@
// ThreadBase thread.
void clear();
// periodically called in the threadLoop() to update power state uids.
- void updatePowerState(sp<ThreadBase> thread, bool force = false);
+ void updatePowerState(const sp<ThreadBase>& thread, bool force = false);
/** @return true if one or move active tracks was added or removed since the
* last time this function was called or the vector was created.
@@ -1110,8 +1113,8 @@
return INVALID_OPERATION;
}
- void startMelComputation(const sp<audio_utils::MelProcessor>& processor);
- void stopMelComputation();
+ void startMelComputation_l(const sp<audio_utils::MelProcessor>& processor) override;
+ void stopMelComputation_l() override;
protected:
// updated by readOutputParameters_l()
@@ -1215,8 +1218,6 @@
audio_channel_mask_t mMixerChannelMask = AUDIO_CHANNEL_NONE;
private:
- mediautils::atomic_sp<audio_utils::MelProcessor> mMelProcessor;
-
// mMasterMute is in both PlaybackThread and in AudioFlinger. When a
// PlaybackThread needs to find out if master-muted, it checks it's local
// copy rather than the one in AudioFlinger. This optimization saves a lock.
@@ -1290,7 +1291,7 @@
template <typename T>
class Tracks {
public:
- Tracks(bool saveDeletedTrackIds) :
+ explicit Tracks(bool saveDeletedTrackIds) :
mSaveDeletedTrackIds(saveDeletedTrackIds) { }
// SortedVector methods
@@ -1319,7 +1320,7 @@
return mTracks.end();
}
- size_t processDeletedTrackIds(std::function<void(int)> f) {
+ size_t processDeletedTrackIds(const std::function<void(int)>& f) {
for (const int trackId : mDeletedTrackIds) {
f(trackId);
}
@@ -1441,7 +1442,7 @@
class IsTimestampAdvancing {
public:
// The timestamp will not be checked any faster than the specified time.
- IsTimestampAdvancing(nsecs_t minimumTimeBetweenChecksNs)
+ explicit IsTimestampAdvancing(nsecs_t minimumTimeBetweenChecksNs)
: mMinimumTimeBetweenChecksNs(minimumTimeBetweenChecksNs)
{
clear();
@@ -1758,7 +1759,7 @@
void dumpInternals_l(int fd, const Vector<String16>& args) override;
private:
- bool outputsReady(const SortedVector< sp<OutputTrack> > &outputTracks);
+ bool outputsReady();
protected:
// threadLoop snippets
virtual void threadLoop_mix();
@@ -2107,7 +2108,7 @@
#include "MmapTracks.h"
MmapThread(const sp<AudioFlinger>& audioFlinger, audio_io_handle_t id,
- AudioHwDevice *hwDev, sp<StreamHalInterface> stream, bool systemReady,
+ AudioHwDevice *hwDev, const sp<StreamHalInterface>& stream, bool systemReady,
bool isOut);
virtual ~MmapThread();
@@ -2130,6 +2131,7 @@
status_t stop(audio_port_handle_t handle);
status_t standby();
virtual status_t getExternalPosition(uint64_t *position, int64_t *timeNaos) = 0;
+ virtual status_t reportData(const void* buffer, size_t frameCount);
// RefBase
virtual void onFirstRef();
@@ -2272,6 +2274,11 @@
return !(mOutput == nullptr || mOutput->stream == nullptr);
}
+ status_t reportData(const void* buffer, size_t frameCount) override;
+
+ void startMelComputation_l(const sp<audio_utils::MelProcessor>& processor) override;
+ void stopMelComputation_l() override;
+
protected:
void dumpInternals_l(int fd, const Vector<String16>& args) override;
@@ -2281,6 +2288,8 @@
bool mMasterMute;
bool mStreamMute;
AudioStreamOut* mOutput;
+
+ mediautils::atomic_sp<audio_utils::MelProcessor> mMelProcessor;
};
class MmapCaptureThread : public MmapThread
diff --git a/services/audioflinger/TrackBase.h b/services/audioflinger/TrackBase.h
index f305aa8..254fb91 100644
--- a/services/audioflinger/TrackBase.h
+++ b/services/audioflinger/TrackBase.h
@@ -124,7 +124,7 @@
* This may be called without the thread lock.
*/
virtual double bufferLatencyMs() const {
- return mServerProxy->framesReadySafe() * 1000 / sampleRate();
+ return mServerProxy->framesReadySafe() * 1000. / sampleRate();
}
/** returns whether the track supports server latency computation.
@@ -432,7 +432,7 @@
{
public:
using Timeout = std::optional<std::chrono::nanoseconds>;
- PatchTrackBase(sp<ClientProxy> proxy, const ThreadBase& thread,
+ PatchTrackBase(const sp<ClientProxy>& proxy, const ThreadBase& thread,
const Timeout& timeout);
void setPeerTimeout(std::chrono::nanoseconds timeout);
template <typename T>
diff --git a/services/audioflinger/TrackMetrics.h b/services/audioflinger/TrackMetrics.h
index 6fc70d6..f3425df 100644
--- a/services/audioflinger/TrackMetrics.h
+++ b/services/audioflinger/TrackMetrics.h
@@ -17,6 +17,9 @@
#ifndef ANDROID_AUDIO_TRACKMETRICS_H
#define ANDROID_AUDIO_TRACKMETRICS_H
+#include <binder/IActivityManager.h>
+#include <binder/IPCThreadState.h>
+#include <binder/IServiceManager.h>
#include <mutex>
namespace android {
@@ -38,10 +41,13 @@
* We currently deliver metrics based on an AudioIntervalGroup.
*/
class TrackMetrics final {
+
+
public:
- TrackMetrics(std::string metricsId, bool isOut)
+ TrackMetrics(std::string metricsId, bool isOut, int clientUid)
: mMetricsId(std::move(metricsId))
, mIsOut(isOut)
+ , mUid(clientUid)
{} // we don't log a constructor item, we wait for more info in logConstructor().
~TrackMetrics() {
@@ -64,6 +70,18 @@
AMEDIAMETRICS_PROP_EVENT_VALUE_BEGINAUDIOINTERVALGROUP, devices.c_str());
}
++mIntervalCount;
+ const auto& mActivityManager = getActivityManager();
+ if (mActivityManager) {
+ if (mIsOut) {
+ mActivityManager->logFgsApiBegin(AUDIO_API,
+ mUid,
+ IPCThreadState::self() -> getCallingPid());
+ } else {
+ mActivityManager->logFgsApiBegin(MICROPHONE_API,
+ mUid,
+ IPCThreadState::self() -> getCallingPid());
+ }
+ }
}
void logConstructor(pid_t creatorPid, uid_t creatorUid, int32_t internalTrackId,
@@ -93,6 +111,18 @@
logVolume_l(mVolume); // flush out the last volume.
mLastVolumeChangeTimeNs = 0;
}
+ const auto& mActivityManager = getActivityManager();
+ if (mActivityManager) {
+ if (mIsOut) {
+ mActivityManager->logFgsApiEnd(AUDIO_API,
+ mUid,
+ IPCThreadState::self() -> getCallingPid());
+ } else {
+ mActivityManager->logFgsApiEnd(MICROPHONE_API,
+ mUid,
+ IPCThreadState::self() -> getCallingPid());
+ }
+ }
}
void logInvalidate() const {
@@ -113,7 +143,8 @@
mDeviceStartupMs.add(startupMs);
}
- void updateMinMaxVolume(int64_t durationNs, double deviceVolume) {
+ void updateMinMaxVolume_l(int64_t durationNs, double deviceVolume)
+ REQUIRES(mLock) {
if (deviceVolume > mMaxVolume) {
mMaxVolume = deviceVolume;
mMaxVolumeDurationNs = durationNs;
@@ -165,7 +196,7 @@
mDeviceTimeNs += durationNs;
mCumulativeTimeNs += durationNs;
}
- updateMinMaxVolume(durationNs, mVolume); // always update.
+ updateMinMaxVolume_l(durationNs, mVolume); // always update.
mVolume = volume;
mLastVolumeChangeTimeNs = timeNs;
}
@@ -221,9 +252,25 @@
// do not reset mUnderrunCount - it keeps continuously running for tracks.
}
+ // Meyer's singleton is thread-safe.
+ static const sp<IActivityManager>& getActivityManager() {
+ static const auto activityManager = []() -> sp<IActivityManager> {
+ const sp<IServiceManager> sm(defaultServiceManager());
+ if (sm != nullptr) {
+ return interface_cast<IActivityManager>(sm->checkService(String16("activity")));
+ }
+ return nullptr;
+ }();
+ return activityManager;
+ }
+
const std::string mMetricsId;
const bool mIsOut; // if true, than a playback track, otherwise used for record.
+ static constexpr int AUDIO_API = 5;
+ static constexpr int MICROPHONE_API = 6;
+ const int mUid;
+
mutable std::mutex mLock;
// Devices in the interval group.
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 1fbf720..8faaffe 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -118,7 +118,7 @@
mThreadIoHandle(thread ? thread->id() : AUDIO_IO_HANDLE_NONE),
mPortId(portId),
mIsInvalid(false),
- mTrackMetrics(std::move(metricsId), isOut),
+ mTrackMetrics(std::move(metricsId), isOut, clientUid),
mCreatorPid(creatorPid)
{
const uid_t callingUid = IPCThreadState::self()->getCallingUid();
@@ -304,7 +304,7 @@
return NO_ERROR;
}
-AudioFlinger::ThreadBase::PatchTrackBase::PatchTrackBase(sp<ClientProxy> proxy,
+AudioFlinger::ThreadBase::PatchTrackBase::PatchTrackBase(const sp<ClientProxy>& proxy,
const ThreadBase& thread,
const Timeout& timeout)
: mProxy(proxy)
@@ -1319,8 +1319,9 @@
// must be called with thread lock held
void AudioFlinger::PlaybackThread::Track::flushAck()
{
- if (!isOffloaded() && !isDirect())
+ if (!isOffloaded() && !isDirect()) {
return;
+ }
// Clear the client ring buffer so that the app can prime the buffer while paused.
// Otherwise it might not get cleared until playback is resumed and obtainBuffer() is called.
@@ -1851,23 +1852,23 @@
//To be called with thread lock held
bool AudioFlinger::PlaybackThread::Track::isResumePending() {
-
- if (mState == RESUMING)
+ if (mState == RESUMING) {
return true;
+ }
/* Resume is pending if track was stopping before pause was called */
if (mState == STOPPING_1 &&
- mResumeToStopping)
+ mResumeToStopping) {
return true;
+ }
return false;
}
//To be called with thread lock held
void AudioFlinger::PlaybackThread::Track::resumeAck() {
-
-
- if (mState == RESUMING)
+ if (mState == RESUMING) {
mState = ACTIVE;
+ }
// Other possibility of pending resume is stopping_1 state
// Do not update the state from stopping as this prevents
@@ -2135,7 +2136,10 @@
if (thread != 0 && !thread->standby()) {
if (mBufferQueue.size() < kMaxOverFlowBuffers) {
pInBuffer = new Buffer;
- pInBuffer->mBuffer = malloc(inBuffer.frameCount * mFrameSize);
+ const size_t bufferSize = inBuffer.frameCount * mFrameSize;
+ pInBuffer->mBuffer = malloc(bufferSize);
+ LOG_ALWAYS_FATAL_IF(pInBuffer->mBuffer == nullptr,
+ "%s: Unable to malloc size %zu", __func__, bufferSize);
pInBuffer->frameCount = inBuffer.frameCount;
pInBuffer->raw = pInBuffer->mBuffer;
memcpy(pInBuffer->raw, inBuffer.raw, inBuffer.frameCount * mFrameSize);
@@ -2299,7 +2303,7 @@
buf.mFrameCount = buffer->frameCount;
buf.mRaw = buffer->raw;
mPeerProxy->releaseBuffer(&buf);
- TrackBase::releaseBuffer(buffer);
+ TrackBase::releaseBuffer(buffer); // Note: this is the base class.
}
status_t AudioFlinger::PlaybackThread::PatchTrack::obtainBuffer(Proxy::Buffer* buffer,
@@ -2913,7 +2917,7 @@
{
void *ptr = nullptr;
(void)posix_memalign(&ptr, alignment, size);
- return std::unique_ptr<void, decltype(free)*>(ptr, free);
+ return {ptr, free};
}
AudioFlinger::RecordThread::PassthruPatchRecord::PassthruPatchRecord(
diff --git a/services/audioflinger/sounddose/Android.bp b/services/audioflinger/sounddose/Android.bp
index 6d9a0cc..0a8c8be 100644
--- a/services/audioflinger/sounddose/Android.bp
+++ b/services/audioflinger/sounddose/Android.bp
@@ -41,6 +41,7 @@
cflags: [
"-Wall",
"-Werror",
+ "-DBACKEND_NDK",
],
}
diff --git a/services/audioflinger/sounddose/SoundDoseManager.cpp b/services/audioflinger/sounddose/SoundDoseManager.cpp
index 03a14d0..827f7d4 100644
--- a/services/audioflinger/sounddose/SoundDoseManager.cpp
+++ b/services/audioflinger/sounddose/SoundDoseManager.cpp
@@ -20,15 +20,11 @@
#include "SoundDoseManager.h"
-#if !defined(BACKEND_NDK)
-#define BACKEND_NDK
-#endif
-
#include "android/media/SoundDoseRecord.h"
#include <android-base/stringprintf.h>
#include <media/AidlConversionCppNdk.h>
#include <cinttypes>
-#include <time.h>
+#include <ctime>
#include <utils/Log.h>
namespace android {
@@ -54,34 +50,37 @@
size_t channelCount, audio_format_t format) {
std::lock_guard _l(mLock);
- if (mHalSoundDose != nullptr) {
- ALOGW("%s: using HAL MEL computation, no MelProcessor needed.", __func__);
+ if (mHalSoundDose != nullptr && !mDisableCsd) {
+ ALOGD("%s: using HAL MEL computation, no MelProcessor needed.", __func__);
return nullptr;
}
auto streamProcessor = mActiveProcessors.find(streamHandle);
- sp<audio_utils::MelProcessor> processor;
- if (streamProcessor != mActiveProcessors.end() &&
- (processor = streamProcessor->second.promote())) {
- ALOGV("%s: found callback for stream id %d", __func__, streamHandle);
- const auto activeTypeIt = mActiveDeviceTypes.find(deviceId);
- if (activeTypeIt != mActiveDeviceTypes.end()) {
- processor->setAttenuation(mMelAttenuationDB[activeTypeIt->second]);
+ if (streamProcessor != mActiveProcessors.end()) {
+ auto processor = streamProcessor->second.promote();
+ // if processor is nullptr it means it was removed by the playback
+ // thread and can be replaced in the mActiveProcessors map
+ if (processor != nullptr) {
+ ALOGV("%s: found callback for stream id %d", __func__, streamHandle);
+ const auto activeTypeIt = mActiveDeviceTypes.find(deviceId);
+ if (activeTypeIt != mActiveDeviceTypes.end()) {
+ processor->setAttenuation(mMelAttenuationDB[activeTypeIt->second]);
+ }
+ processor->setDeviceId(deviceId);
+ processor->setOutputRs2UpperBound(mRs2UpperBound);
+ return processor;
}
- processor->setDeviceId(deviceId);
- processor->setOutputRs2(mRs2Value);
- return processor;
- } else {
- ALOGV("%s: creating new callback for stream id %d", __func__, streamHandle);
- sp<audio_utils::MelProcessor> melProcessor = sp<audio_utils::MelProcessor>::make(
- sampleRate, channelCount, format, *this, deviceId, mRs2Value);
- const auto activeTypeIt = mActiveDeviceTypes.find(deviceId);
- if (activeTypeIt != mActiveDeviceTypes.end()) {
- melProcessor->setAttenuation(mMelAttenuationDB[activeTypeIt->second]);
- }
- mActiveProcessors[streamHandle] = melProcessor;
- return melProcessor;
}
+
+ ALOGV("%s: creating new callback for stream id %d", __func__, streamHandle);
+ sp<audio_utils::MelProcessor> melProcessor = sp<audio_utils::MelProcessor>::make(
+ sampleRate, channelCount, format, this, deviceId, mRs2UpperBound);
+ const auto activeTypeIt = mActiveDeviceTypes.find(deviceId);
+ if (activeTypeIt != mActiveDeviceTypes.end()) {
+ melProcessor->setAttenuation(mMelAttenuationDB[activeTypeIt->second]);
+ }
+ mActiveProcessors[streamHandle] = melProcessor;
+ return melProcessor;
}
bool SoundDoseManager::setHalSoundDoseInterface(const std::shared_ptr<ISoundDose>& halSoundDose) {
@@ -96,10 +95,10 @@
return false;
}
- if (!mHalSoundDose->setOutputRs2(mRs2Value).isOk()) {
+ if (!mHalSoundDose->setOutputRs2UpperBound(mRs2UpperBound).isOk()) {
ALOGW("%s: Cannot set RS2 value for momentary exposure %f",
__func__,
- mRs2Value);
+ mRs2UpperBound);
}
// initialize the HAL sound dose callback lazily
@@ -120,30 +119,30 @@
return true;
}
-void SoundDoseManager::setOutputRs2(float rs2Value) {
+void SoundDoseManager::setOutputRs2UpperBound(float rs2Value) {
ALOGV("%s", __func__);
std::lock_guard _l(mLock);
if (mHalSoundDose != nullptr) {
// using the HAL sound dose interface
- if (!mHalSoundDose->setOutputRs2(rs2Value).isOk()) {
+ if (!mHalSoundDose->setOutputRs2UpperBound(rs2Value).isOk()) {
ALOGE("%s: Cannot set RS2 value for momentary exposure %f", __func__, rs2Value);
return;
}
- mRs2Value = rs2Value;
+ mRs2UpperBound = rs2Value;
return;
}
for (auto& streamProcessor : mActiveProcessors) {
sp<audio_utils::MelProcessor> processor = streamProcessor.second.promote();
if (processor != nullptr) {
- status_t result = processor->setOutputRs2(rs2Value);
+ status_t result = processor->setOutputRs2UpperBound(rs2Value);
if (result != NO_ERROR) {
- ALOGW("%s: could not set RS2 value %f for stream %d", __func__, rs2Value,
+ ALOGW("%s: could not set RS2 upper bound %f for stream %d", __func__, rs2Value,
streamProcessor.first);
return;
}
- mRs2Value = rs2Value;
+ mRs2UpperBound = rs2Value;
}
}
}
@@ -263,11 +262,11 @@
}
}
-binder::Status SoundDoseManager::SoundDose::setOutputRs2(float value) {
+binder::Status SoundDoseManager::SoundDose::setOutputRs2UpperBound(float value) {
ALOGV("%s", __func__);
auto soundDoseManager = mSoundDoseManager.promote();
if (soundDoseManager != nullptr) {
- soundDoseManager->setOutputRs2(value);
+ soundDoseManager->setOutputRs2UpperBound(value);
}
return binder::Status::ok();
}
@@ -291,12 +290,21 @@
return binder::Status::ok();
}
-binder::Status SoundDoseManager::SoundDose::getOutputRs2(float* value) {
+binder::Status SoundDoseManager::SoundDose::disableCsd() {
+ ALOGV("%s", __func__);
+ auto soundDoseManager = mSoundDoseManager.promote();
+ if (soundDoseManager != nullptr) {
+ soundDoseManager->disableCsd();
+ }
+ return binder::Status::ok();
+}
+
+binder::Status SoundDoseManager::SoundDose::getOutputRs2UpperBound(float* value) {
ALOGV("%s", __func__);
auto soundDoseManager = mSoundDoseManager.promote();
if (soundDoseManager != nullptr) {
std::lock_guard _l(soundDoseManager->mLock);
- *value = soundDoseManager->mRs2Value;
+ *value = soundDoseManager->mRs2UpperBound;
}
return binder::Status::ok();
}
@@ -329,6 +337,16 @@
return binder::Status::ok();
}
+binder::Status SoundDoseManager::SoundDose::isSoundDoseHalSupported(bool* value) {
+ ALOGV("%s", __func__);
+ *value = false;
+ auto soundDoseManager = mSoundDoseManager.promote();
+ if (soundDoseManager != nullptr) {
+ *value = soundDoseManager->isSoundDoseHalSupported();
+ }
+ return binder::Status::ok();
+}
+
void SoundDoseManager::updateAttenuation(float attenuationDB, audio_devices_t deviceType) {
std::lock_guard _l(mLock);
ALOGV("%s: updating MEL processor attenuation for device type %d to %f",
@@ -347,6 +365,28 @@
}
}
+void SoundDoseManager::disableCsd() {
+ ALOGV("%s", __func__);
+
+ std::lock_guard _l(mLock);
+ mDisableCsd = true;
+
+ // Normally, there should be no active MelProcessors when this method is called
+ // We pause however every cached MelProcessor as a defensive mechanism to not
+ // have unnecessary processing
+ for (auto& activeEntry : mActiveProcessors) {
+ auto melProcessor = activeEntry.second.promote();
+ if (melProcessor != nullptr) {
+ melProcessor->pause();
+ }
+ }
+}
+
+bool SoundDoseManager::isCsdDisabled() {
+ std::lock_guard _l(mLock);
+ return mDisableCsd;
+}
+
void SoundDoseManager::setUseFrameworkMel(bool useFrameworkMel) {
// invalidate any HAL sound dose interface used
setHalSoundDoseInterface(nullptr);
@@ -370,6 +410,19 @@
return mComputeCsdOnAllDevices;
}
+bool SoundDoseManager::isSoundDoseHalSupported() const {
+ if (mDisableCsd) {
+ return false;
+ }
+
+ std::shared_ptr<ISoundDose> halSoundDose;
+ getHalSoundDose(&halSoundDose);
+ if (mHalSoundDose == nullptr) {
+ return false;
+ }
+ return true;
+}
+
void SoundDoseManager::getHalSoundDose(std::shared_ptr<ISoundDose>* halSoundDose) const {
std::lock_guard _l(mLock);
*halSoundDose = mHalSoundDose;
@@ -396,11 +449,16 @@
audio_port_handle_t deviceId) const {
ALOGV("%s", __func__);
+
sp<media::ISoundDoseCallback> soundDoseCallback;
std::vector<audio_utils::CsdRecord> records;
float currentCsd;
{
std::lock_guard _l(mLock);
+ if (mDisableCsd) {
+ return;
+ }
+
int64_t timestampSec = getMonotonicSecond();
@@ -436,6 +494,13 @@
void SoundDoseManager::onMomentaryExposure(float currentMel, audio_port_handle_t deviceId) const {
ALOGV("%s: Momentary exposure for device %d triggered: %f MEL", __func__, deviceId, currentMel);
+ {
+ std::lock_guard _l(mLock);
+ if (mDisableCsd) {
+ return;
+ }
+ }
+
auto soundDoseCallback = getSoundDoseCallback();
if (soundDoseCallback != nullptr) {
soundDoseCallback->onMomentaryExposure(currentMel, deviceId);
@@ -455,6 +520,14 @@
std::string SoundDoseManager::dump() const {
std::string output;
+ {
+ std::lock_guard _l(mLock);
+ if (mDisableCsd) {
+ base::StringAppendF(&output, "CSD is disabled");
+ return output;
+ }
+ }
+
mMelAggregator->foreachCsd([&output](audio_utils::CsdRecord csdRecord) {
base::StringAppendF(&output,
"CSD %f with average MEL %f in interval [%" PRId64 ", %" PRId64 "]",
diff --git a/services/audioflinger/sounddose/SoundDoseManager.h b/services/audioflinger/sounddose/SoundDoseManager.h
index f31a5d9..5081ce4 100644
--- a/services/audioflinger/sounddose/SoundDoseManager.h
+++ b/services/audioflinger/sounddose/SoundDoseManager.h
@@ -36,12 +36,12 @@
public:
/** CSD is computed with a rolling window of 7 days. */
static constexpr int64_t kCsdWindowSeconds = 604800; // 60s * 60m * 24h * 7d
- /** Default RS2 value in dBA as defined in IEC 62368-1 3rd edition. */
- static constexpr float kDefaultRs2Value = 100.f;
+ /** Default RS2 upper bound in dBA as defined in IEC 62368-1 3rd edition. */
+ static constexpr float kDefaultRs2UpperBound = 100.f;
SoundDoseManager()
: mMelAggregator(sp<audio_utils::MelAggregator>::make(kCsdWindowSeconds)),
- mRs2Value(kDefaultRs2Value) {};
+ mRs2UpperBound(kDefaultRs2UpperBound) {};
/**
* \brief Creates or gets the MelProcessor assigned to the streamHandle
@@ -68,12 +68,12 @@
void removeStreamProcessor(audio_io_handle_t streamHandle);
/**
- * Sets the output RS2 value for momentary exposure warnings. Must not be
+ * Sets the output RS2 upper bound for momentary exposure warnings. Must not be
* higher than 100dBA and not lower than 80dBA.
*
* \param rs2Value value to use for momentary exposure
*/
- void setOutputRs2(float rs2Value);
+ void setOutputRs2UpperBound(float rs2Value);
/**
* \brief Registers the interface for passing callbacks to the AudioService and gets
@@ -101,6 +101,9 @@
/** Clear all map entries with passed audio_port_handle_t. */
void clearMapDeviceIdEntries(audio_port_handle_t deviceId);
+ /** Returns true if CSD is disabled. */
+ bool isCsdDisabled();
+
std::string dump() const;
// used for testing only
@@ -129,14 +132,17 @@
virtual void binderDied(const wp<IBinder>& who);
/** BnSoundDose override */
- binder::Status setOutputRs2(float value) override;
+ binder::Status setOutputRs2UpperBound(float value) override;
binder::Status resetCsd(float currentCsd,
const std::vector<media::SoundDoseRecord>& records) override;
binder::Status updateAttenuation(float attenuationDB, int device) override;
- binder::Status getOutputRs2(float* value) override;
+ binder::Status getOutputRs2UpperBound(float* value) override;
+ binder::Status disableCsd() override;
+
binder::Status getCsd(float* value) override;
binder::Status forceUseFrameworkMel(bool useFrameworkMel) override;
binder::Status forceComputeCsdOnAllDevices(bool computeCsdOnAllDevices) override;
+ binder::Status isSoundDoseHalSupported(bool* value) override;
wp<SoundDoseManager> mSoundDoseManager;
const sp<media::ISoundDoseCallback> mSoundDoseCallback;
@@ -164,8 +170,10 @@
sp<media::ISoundDoseCallback> getSoundDoseCallback() const;
void updateAttenuation(float attenuationDB, audio_devices_t deviceType);
+ void disableCsd();
void setUseFrameworkMel(bool useFrameworkMel);
void setComputeCsdOnAllDevices(bool computeCsdOnAllDevices);
+ bool isSoundDoseHalSupported() const;
/** Returns the HAL sound dose interface or null if internal MEL computation is used. */
void getHalSoundDose(std::shared_ptr<ISoundDose>* halSoundDose) const;
@@ -183,7 +191,7 @@
std::map<AudioDeviceTypeAddr, audio_port_handle_t> mActiveDevices GUARDED_BY(mLock);
std::unordered_map<audio_port_handle_t, audio_devices_t> mActiveDeviceTypes GUARDED_BY(mLock);
- float mRs2Value GUARDED_BY(mLock);
+ float mRs2UpperBound GUARDED_BY(mLock);
std::unordered_map<audio_devices_t, float> mMelAttenuationDB GUARDED_BY(mLock);
sp<SoundDose> mSoundDose GUARDED_BY(mLock);
@@ -191,8 +199,10 @@
std::shared_ptr<ISoundDose> mHalSoundDose GUARDED_BY(mLock);
std::shared_ptr<HalSoundDoseCallback> mHalSoundDoseCallback GUARDED_BY(mLock);
- bool mUseFrameworkMel GUARDED_BY(mLock) = false;
+ bool mUseFrameworkMel GUARDED_BY(mLock) = true;
bool mComputeCsdOnAllDevices GUARDED_BY(mLock) = false;
+
+ bool mDisableCsd GUARDED_BY(mLock) = false;
};
} // namespace android
diff --git a/services/audioflinger/sounddose/tests/Android.bp b/services/audioflinger/sounddose/tests/Android.bp
index fef73dc..2a2addf 100644
--- a/services/audioflinger/sounddose/tests/Android.bp
+++ b/services/audioflinger/sounddose/tests/Android.bp
@@ -45,9 +45,10 @@
"-Wall",
"-Werror",
"-Wextra",
+ "-DBACKEND_NDK",
],
test_suites: [
"general-tests",
],
-}
\ No newline at end of file
+}
diff --git a/services/audioflinger/sounddose/tests/sounddosemanager_tests.cpp b/services/audioflinger/sounddose/tests/sounddosemanager_tests.cpp
index b18ca50..9fab77d 100644
--- a/services/audioflinger/sounddose/tests/sounddosemanager_tests.cpp
+++ b/services/audioflinger/sounddose/tests/sounddosemanager_tests.cpp
@@ -22,10 +22,6 @@
#include <aidl/android/hardware/audio/core/sounddose/BnSoundDose.h>
#include <gmock/gmock.h>
#include <gtest/gtest.h>
-
-#if !defined(BACKEND_NDK)
-#define BACKEND_NDK
-#endif
#include <media/AidlConversionCppNdk.h>
namespace android {
@@ -37,8 +33,8 @@
class HalSoundDoseMock : public BnSoundDose {
public:
- MOCK_METHOD(ndk::ScopedAStatus, getOutputRs2, (float*), (override));
- MOCK_METHOD(ndk::ScopedAStatus, setOutputRs2, (float), (override));
+ MOCK_METHOD(ndk::ScopedAStatus, getOutputRs2UpperBound, (float*), (override));
+ MOCK_METHOD(ndk::ScopedAStatus, setOutputRs2UpperBound, (float), (override));
MOCK_METHOD(ndk::ScopedAStatus, registerSoundDoseCallback,
(const std::shared_ptr<ISoundDose::IHalSoundDoseCallback>&), (override));
};
@@ -49,7 +45,7 @@
mSoundDoseManager = sp<SoundDoseManager>::make();
mHalSoundDose = ndk::SharedRefBase::make<HalSoundDoseMock>();
- ON_CALL(*mHalSoundDose.get(), setOutputRs2)
+ ON_CALL(*mHalSoundDose.get(), setOutputRs2UpperBound)
.WillByDefault([] (float rs2) {
EXPECT_EQ(rs2, ISoundDose::DEFAULT_MAX_RS2);
return ndk::ScopedAStatus::ok();
@@ -109,7 +105,7 @@
}
TEST_F(SoundDoseManagerTest, SetHalSoundDoseDisablesNewMelProcessorCallbacks) {
- EXPECT_CALL(*mHalSoundDose.get(), setOutputRs2).Times(1);
+ EXPECT_CALL(*mHalSoundDose.get(), setOutputRs2UpperBound).Times(1);
EXPECT_CALL(*mHalSoundDose.get(), registerSoundDoseCallback)
.Times(1)
.WillOnce([&] (const std::shared_ptr<ISoundDose::IHalSoundDoseCallback>& callback) {
@@ -127,7 +123,7 @@
}
TEST_F(SoundDoseManagerTest, SetHalSoundDoseRegistersHalCallbacks) {
- EXPECT_CALL(*mHalSoundDose.get(), setOutputRs2).Times(1);
+ EXPECT_CALL(*mHalSoundDose.get(), setOutputRs2UpperBound).Times(1);
EXPECT_CALL(*mHalSoundDose.get(), registerSoundDoseCallback)
.Times(1)
.WillOnce([&] (const std::shared_ptr<ISoundDose::IHalSoundDoseCallback>& callback) {
@@ -141,7 +137,7 @@
TEST_F(SoundDoseManagerTest, MomentaryExposureFromHalWithNoAddressIllegalArgument) {
std::shared_ptr<ISoundDose::IHalSoundDoseCallback> halCallback;
- EXPECT_CALL(*mHalSoundDose.get(), setOutputRs2).Times(1);
+ EXPECT_CALL(*mHalSoundDose.get(), setOutputRs2UpperBound).Times(1);
EXPECT_CALL(*mHalSoundDose.get(), registerSoundDoseCallback)
.Times(1)
.WillOnce([&] (const std::shared_ptr<ISoundDose::IHalSoundDoseCallback>& callback) {
@@ -162,7 +158,7 @@
TEST_F(SoundDoseManagerTest, MomentaryExposureFromHalAfterInternalSelectedReturnsException) {
std::shared_ptr<ISoundDose::IHalSoundDoseCallback> halCallback;
- EXPECT_CALL(*mHalSoundDose.get(), setOutputRs2).Times(1);
+ EXPECT_CALL(*mHalSoundDose.get(), setOutputRs2UpperBound).Times(1);
EXPECT_CALL(*mHalSoundDose.get(), registerSoundDoseCallback)
.Times(1)
.WillOnce([&] (const std::shared_ptr<ISoundDose::IHalSoundDoseCallback>& callback) {
@@ -184,7 +180,7 @@
TEST_F(SoundDoseManagerTest, OnNewMelValuesFromHalWithNoAddressIllegalArgument) {
std::shared_ptr<ISoundDose::IHalSoundDoseCallback> halCallback;
- EXPECT_CALL(*mHalSoundDose.get(), setOutputRs2).Times(1);
+ EXPECT_CALL(*mHalSoundDose.get(), setOutputRs2UpperBound).Times(1);
EXPECT_CALL(*mHalSoundDose.get(), registerSoundDoseCallback)
.Times(1)
.WillOnce([&] (const std::shared_ptr<ISoundDose::IHalSoundDoseCallback>& callback) {
@@ -243,7 +239,8 @@
}
TEST_F(SoundDoseManagerTest, GetDefaultForceUseFrameworkMel) {
- EXPECT_FALSE(mSoundDoseManager->forceUseFrameworkMel());
+ // TODO: for now dogfooding with internal MEL. Revert to false when using the HAL MELs
+ EXPECT_TRUE(mSoundDoseManager->forceUseFrameworkMel());
}
} // namespace
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index 8f9c60b..3d1cf76 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -246,6 +246,8 @@
unsigned int *num_ports,
struct audio_port_v7 *ports,
unsigned int *generation) = 0;
+ virtual status_t listDeclaredDevicePorts(media::AudioPortRole role,
+ std::vector<media::AudioPortFw>* result) = 0;
virtual status_t getAudioPort(struct audio_port_v7 *port) = 0;
virtual status_t createAudioPatch(const struct audio_patch *patch,
audio_patch_handle_t *handle,
diff --git a/services/audiopolicy/TEST_MAPPING b/services/audiopolicy/TEST_MAPPING
index 2612393..fa3a5d3 100644
--- a/services/audiopolicy/TEST_MAPPING
+++ b/services/audiopolicy/TEST_MAPPING
@@ -11,9 +11,7 @@
"include-filter": "com.google.android.gts.audio.AudioHostTest#testTwoChannelCapturing"
}
]
- }
- ],
- "postsubmit": [
+ },
{
"name": "CtsNativeMediaAAudioTestCases",
"options" : [
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index 52a000f..876911d 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -301,6 +301,10 @@
return mActiveClients;
}
+ // Returns 0 if not all active clients have the same exclusive preferred device
+ // or the number of active clients with the same exclusive preferred device
+ size_t sameExclusivePreferredDevicesCount() const;
+
bool useHwGain() const
{
return !devices().isEmpty() ? devices().itemAt(0)->hasGainController() : false;
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index a46186b..7ee6566 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -238,6 +238,27 @@
return clients;
}
+size_t AudioOutputDescriptor::sameExclusivePreferredDevicesCount() const
+{
+ audio_port_handle_t deviceId = AUDIO_PORT_HANDLE_NONE;
+ size_t count = 0;
+ for (const auto &client : getClientIterable()) {
+ if (client->active()) {
+ if (!(client->hasPreferredDevice() &&
+ client->isPreferredDeviceForExclusiveUse())) {
+ return 0;
+ }
+ if (deviceId == AUDIO_PORT_HANDLE_NONE) {
+ deviceId = client->preferredDeviceId();
+ } else if (deviceId != client->preferredDeviceId()) {
+ return 0;
+ }
+ count++;
+ }
+ }
+ return count;
+}
+
bool AudioOutputDescriptor::isAnyActive(VolumeSource volumeSourceToIgnore) const
{
return std::find_if(begin(mActiveClients), end(mActiveClients),
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index ba5a6a7..4cfdaad 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -278,17 +278,6 @@
mixesDisallowsRequestedDevice = true;
}
- if (!primaryOutputMix && (flags & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ)) {
- // AAudio does not support MMAP_NO_IRQ loopback render, and there is no way with
- // the current MmapStreamInterface::start to reject a specific client added to a shared
- // mmap stream.
- // As a result all MMAP_NOIRQ requests have to be rejected when an loopback render
- // policy is present. That ensures no shared mmap stream is used when an loopback
- // render policy is registered.
- ALOGD("%s: Rejecting MMAP_NOIRQ request due to LOOPBACK|RENDER mix present.", __func__);
- return INVALID_OPERATION;
- }
-
if (primaryOutputMix && primaryMix != nullptr) {
ALOGV("%s: Skiping %zu: Primary output already found", __func__, i);
continue; // Primary output already found
@@ -299,6 +288,13 @@
continue; // skip the mix
}
+ if (flags & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) {
+ // AAudio MMAP_NOIRQ streams cannot be routed using dynamic audio policy.
+ ALOGD("%s: Rejecting MMAP_NOIRQ request matched to dynamic audio policy mix.",
+ __func__);
+ return INVALID_OPERATION;
+ }
+
if (mixDevice != nullptr && mixDevice->equals(requestedDevice)) {
ALOGV("%s: Mix %zu: requested device mathches", __func__, i);
mixesDisallowsRequestedDevice = false;
diff --git a/services/audiopolicy/engine/common/include/EngineBase.h b/services/audiopolicy/engine/common/include/EngineBase.h
index 8cfa592..bc780f1 100644
--- a/services/audiopolicy/engine/common/include/EngineBase.h
+++ b/services/audiopolicy/engine/common/include/EngineBase.h
@@ -165,6 +165,10 @@
DeviceVector getActiveMediaDevices(const DeviceVector& availableDevices) const override;
+ void initializeDeviceSelectionCache() override;
+
+ void updateDeviceSelectionCache() override;
+
private:
/**
* Get media devices as the given role
@@ -193,6 +197,28 @@
/** current forced use configuration. */
audio_policy_forced_cfg_t mForceUse[AUDIO_POLICY_FORCE_USE_CNT] = {};
+
+protected:
+ /**
+ * Set the device information for a given strategy.
+ *
+ * @param strategy the strategy to set devices information
+ * @param devices the devices selected for the strategy
+ */
+ virtual void setStrategyDevices(const sp<ProductStrategy>& /*strategy*/,
+ const DeviceVector& /*devices*/) {
+ // In EngineBase, do nothing. It is up to the actual engine to decide if it is needed to
+ // set devices information for the given strategy.
+ }
+
+ /**
+ * Get devices that will be used for the given product strategy.
+ *
+ * @param strategy the strategy to query
+ */
+ virtual DeviceVector getDevicesForProductStrategy(product_strategy_t strategy) const = 0;
+
+ DeviceStrategyMap mDevicesForStrategies;
};
} // namespace audio_policy
diff --git a/services/audiopolicy/engine/common/src/EngineBase.cpp b/services/audiopolicy/engine/common/src/EngineBase.cpp
index 8015ae0..471424c 100644
--- a/services/audiopolicy/engine/common/src/EngineBase.cpp
+++ b/services/audiopolicy/engine/common/src/EngineBase.cpp
@@ -685,6 +685,26 @@
return activeDevices;
}
+void EngineBase::initializeDeviceSelectionCache() {
+ // Initializing the device selection cache with default device won't be harmful, it will be
+ // updated after the audio modules are initialized.
+ auto defaultDevices = DeviceVector(getApmObserver()->getDefaultOutputDevice());
+ for (const auto &iter : getProductStrategies()) {
+ const auto &strategy = iter.second;
+ mDevicesForStrategies[strategy->getId()] = defaultDevices;
+ setStrategyDevices(strategy, defaultDevices);
+ }
+}
+
+void EngineBase::updateDeviceSelectionCache() {
+ for (const auto &iter : getProductStrategies()) {
+ const auto& strategy = iter.second;
+ auto devices = getDevicesForProductStrategy(strategy->getId());
+ mDevicesForStrategies[strategy->getId()] = devices;
+ setStrategyDevices(strategy, devices);
+ }
+}
+
void EngineBase::dumpCapturePresetDevicesRoleMap(String8 *dst, int spaces) const
{
dst->appendFormat("\n%*sDevice role per capture preset dump:", spaces, "");
diff --git a/services/audiopolicy/engine/interface/EngineInterface.h b/services/audiopolicy/engine/interface/EngineInterface.h
index b8e35ed..ea8fc41 100644
--- a/services/audiopolicy/engine/interface/EngineInterface.h
+++ b/services/audiopolicy/engine/interface/EngineInterface.h
@@ -434,6 +434,16 @@
*/
virtual DeviceVector getActiveMediaDevices(const DeviceVector& availableDevices) const = 0;
+ /**
+ * @brief initializeDeviceSelectionCache. Device selection for AudioAttribute / Streams is
+ * cached in the engine in order to speed up process when the audio system is stable. When the
+ * audio system is initializing, not all audio devices information will be available. In that
+ * case, calling this function can allow the engine to initialize the device selection cache
+ * with default values.
+ * This must only be called when audio policy manager is initializing.
+ */
+ virtual void initializeDeviceSelectionCache() = 0;
+
virtual void dump(String8 *dst) const = 0;
protected:
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.cpp b/services/audiopolicy/engineconfigurable/src/Engine.cpp
index 9d53017..64f6cb4 100644
--- a/services/audiopolicy/engineconfigurable/src/Engine.cpp
+++ b/services/audiopolicy/engineconfigurable/src/Engine.cpp
@@ -356,14 +356,6 @@
return availableInputDevices.getDevice(deviceType, String8(address.c_str()), AUDIO_FORMAT_DEFAULT);
}
-void Engine::updateDeviceSelectionCache()
-{
- for (const auto &iter : getProductStrategies()) {
- const auto &strategy = iter.second;
- mDevicesForStrategies[strategy->getId()] = getDevicesForProductStrategy(strategy->getId());
- }
-}
-
void Engine::setDeviceAddressForProductStrategy(product_strategy_t strategy,
const std::string &address)
{
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.h b/services/audiopolicy/engineconfigurable/src/Engine.h
index 6ac20cd..d97efc7 100644
--- a/services/audiopolicy/engineconfigurable/src/Engine.h
+++ b/services/audiopolicy/engineconfigurable/src/Engine.h
@@ -67,8 +67,6 @@
sp<AudioPolicyMix> *mix = nullptr)
const override;
- void updateDeviceSelectionCache() override;
-
///
/// from AudioPolicyPluginInterface
///
@@ -123,15 +121,17 @@
status_t loadAudioPolicyEngineConfig();
- DeviceVector getDevicesForProductStrategy(product_strategy_t strategy) const;
DeviceVector getCachedDevices(product_strategy_t ps) const;
+ ///
+ /// from EngineBase
+ ///
+ DeviceVector getDevicesForProductStrategy(product_strategy_t strategy) const override;
+
/**
* Policy Parameter Manager hidden through a wrapper.
*/
ParameterManagerWrapper *mPolicyParameterMgr;
-
- DeviceStrategyMap mDevicesForStrategies;
};
} // namespace audio_policy
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index f2df7ac..ea56486 100644
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -166,8 +166,12 @@
// - cannot route from voice call RX OR
// - audio HAL version is < 3.0 and TX device is on the primary HW module
if (getPhoneState() == AUDIO_MODE_IN_CALL) {
- audio_devices_t txDevice = getDeviceForInputSource(
- AUDIO_SOURCE_VOICE_COMMUNICATION)->type();
+ audio_devices_t txDevice = AUDIO_DEVICE_NONE;
+ sp<DeviceDescriptor> txDeviceDesc =
+ getDeviceForInputSource(AUDIO_SOURCE_VOICE_COMMUNICATION);
+ if (txDeviceDesc != nullptr) {
+ txDevice = txDeviceDesc->type();
+ }
sp<AudioOutputDescriptor> primaryOutput = outputs.getPrimaryOutput();
LOG_ALWAYS_FATAL_IF(primaryOutput == nullptr, "Primary output not found");
DeviceVector availPrimaryInputDevices =
@@ -287,7 +291,8 @@
}));
if (!devices.isEmpty()) break;
devices = availableOutputDevices.getFirstDevicesFromTypes({
- AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET, AUDIO_DEVICE_OUT_EARPIECE});
+ AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET, AUDIO_DEVICE_OUT_EARPIECE,
+ AUDIO_DEVICE_OUT_SPEAKER});
} break;
case STRATEGY_SONIFICATION:
@@ -479,6 +484,37 @@
return devices;
}
+DeviceVector Engine::getPreferredAvailableDevicesForInputSource(
+ const DeviceVector& availableInputDevices, audio_source_t inputSource) const {
+ DeviceVector preferredAvailableDevVec = {};
+ AudioDeviceTypeAddrVector preferredDevices;
+ const status_t status = getDevicesForRoleAndCapturePreset(
+ inputSource, DEVICE_ROLE_PREFERRED, preferredDevices);
+ if (status == NO_ERROR) {
+ // Only use preferred devices when they are all available.
+ preferredAvailableDevVec =
+ availableInputDevices.getDevicesFromDeviceTypeAddrVec(preferredDevices);
+ if (preferredAvailableDevVec.size() == preferredDevices.size()) {
+ ALOGVV("%s using pref device %s for source %u",
+ __func__, preferredAvailableDevVec.toString().c_str(), inputSource);
+ return preferredAvailableDevVec;
+ }
+ }
+ return preferredAvailableDevVec;
+}
+
+DeviceVector Engine::getDisabledDevicesForInputSource(
+ const DeviceVector& availableInputDevices, audio_source_t inputSource) const {
+ DeviceVector disabledDevices = {};
+ AudioDeviceTypeAddrVector disabledDevicesTypeAddr;
+ const status_t status = getDevicesForRoleAndCapturePreset(
+ inputSource, DEVICE_ROLE_DISABLED, disabledDevicesTypeAddr);
+ if (status == NO_ERROR) {
+ disabledDevices =
+ availableInputDevices.getDevicesFromDeviceTypeAddrVec(disabledDevicesTypeAddr);
+ }
+ return disabledDevices;
+}
sp<DeviceDescriptor> Engine::getDeviceForInputSource(audio_source_t inputSource) const
{
@@ -510,6 +546,20 @@
}
}
+ // Use the preferred device for the input source if it is available.
+ DeviceVector preferredInputDevices = getPreferredAvailableDevicesForInputSource(
+ availableDevices, inputSource);
+ if (!preferredInputDevices.isEmpty()) {
+ // Currently, only support single device for input. The public JAVA API also only
+ // support setting single device as preferred device. In that case, returning the
+ // first device is OK here.
+ return preferredInputDevices[0];
+ }
+ // Remove the disabled device for the input source from the available input device list.
+ DeviceVector disabledInputDevices = getDisabledDevicesForInputSource(
+ availableDevices, inputSource);
+ availableDevices.remove(disabledInputDevices);
+
audio_devices_t commDeviceType =
getPreferredDeviceTypeForLegacyStrategy(availableOutputDevices, STRATEGY_PHONE);
@@ -549,22 +599,26 @@
}
}
switch (commDeviceType) {
- case AUDIO_DEVICE_OUT_BLE_HEADSET:
- device = availableDevices.getDevice(
- AUDIO_DEVICE_IN_BLE_HEADSET, String8(""), AUDIO_FORMAT_DEFAULT);
- break;
case AUDIO_DEVICE_OUT_SPEAKER:
device = availableDevices.getFirstExistingDevice({
AUDIO_DEVICE_IN_BACK_MIC, AUDIO_DEVICE_IN_BUILTIN_MIC,
AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_DEVICE_IN_USB_HEADSET});
break;
+ case AUDIO_DEVICE_OUT_BLE_HEADSET:
+ device = availableDevices.getDevice(
+ AUDIO_DEVICE_IN_BLE_HEADSET, String8(""), AUDIO_FORMAT_DEFAULT);
+ if (device != nullptr) {
+ break;
+ }
+ ALOGE("%s LE Audio selected for communication but input device not available",
+ __func__);
+ FALLTHROUGH_INTENDED;
default: // FORCE_NONE
device = availableDevices.getFirstExistingDevice({
AUDIO_DEVICE_IN_WIRED_HEADSET, AUDIO_DEVICE_IN_USB_HEADSET,
AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_DEVICE_IN_BLUETOOTH_BLE,
AUDIO_DEVICE_IN_BUILTIN_MIC});
break;
-
}
break;
@@ -651,15 +705,9 @@
return device;
}
-void Engine::updateDeviceSelectionCache()
-{
- for (const auto &iter : getProductStrategies()) {
- const auto& strategy = iter.second;
- auto devices = getDevicesForProductStrategy(strategy->getId());
- mDevicesForStrategies[strategy->getId()] = devices;
- strategy->setDeviceTypes(devices.types());
- strategy->setDeviceAddress(devices.getFirstValidAddress().c_str());
- }
+void Engine::setStrategyDevices(const sp<ProductStrategy>& strategy, const DeviceVector &devices) {
+ strategy->setDeviceTypes(devices.types());
+ strategy->setDeviceAddress(devices.getFirstValidAddress().c_str());
}
product_strategy_t Engine::getProductStrategyFromLegacy(legacy_strategy legacyStrategy) const {
diff --git a/services/audiopolicy/enginedefault/src/Engine.h b/services/audiopolicy/enginedefault/src/Engine.h
index ab556ee..714fef8 100644
--- a/services/audiopolicy/enginedefault/src/Engine.h
+++ b/services/audiopolicy/enginedefault/src/Engine.h
@@ -68,7 +68,10 @@
sp<AudioPolicyMix> *mix = nullptr)
const override;
- void updateDeviceSelectionCache() override;
+ void setStrategyDevices(const sp<ProductStrategy>& strategy,
+ const DeviceVector& devices) override;
+
+ DeviceVector getDevicesForProductStrategy(product_strategy_t strategy) const override;
private:
/* Copy facilities are put private to disable copy. */
@@ -88,8 +91,6 @@
DeviceVector availableOutputDevices,
const SwAudioOutputCollection &outputs) const;
- DeviceVector getDevicesForProductStrategy(product_strategy_t strategy) const;
-
sp<DeviceDescriptor> getDeviceForInputSource(audio_source_t inputSource) const;
product_strategy_t getProductStrategyFromLegacy(legacy_strategy legacyStrategy) const;
@@ -99,8 +100,10 @@
const DeviceVector& availableOutputDevices, product_strategy_t strategy) const;
DeviceVector getDisabledDevicesForProductStrategy(
const DeviceVector& availableOutputDevices, product_strategy_t strategy) const;
-
- DeviceStrategyMap mDevicesForStrategies;
+ DeviceVector getPreferredAvailableDevicesForInputSource(
+ const DeviceVector& availableInputDevices, audio_source_t inputSource) const;
+ DeviceVector getDisabledDevicesForInputSource(
+ const DeviceVector& availableInputDevices, audio_source_t inputSource) const;
std::map<product_strategy_t, legacy_strategy> mLegacyStrategyMap;
};
diff --git a/services/audiopolicy/fuzzer/Android.bp b/services/audiopolicy/fuzzer/Android.bp
index 9f6b703..621f643 100644
--- a/services/audiopolicy/fuzzer/Android.bp
+++ b/services/audiopolicy/fuzzer/Android.bp
@@ -63,6 +63,15 @@
],
data: [":audiopolicyfuzzer_configuration_files"],
fuzz_config: {
- cc: ["mnaganov@google.com"],
+ cc: ["mnaganov@google.com"],
+ componentid: 155276,
+ hotlists: [
+ "4593311",
+ ],
+ description: "The fuzzer targets the APIs of libaudiopolicy",
+ vector: "local_no_privileges_required",
+ service_privilege: "privileged",
+ users: "multi_user",
+ fuzzed_code_usage: "shipped",
},
}
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 6d7ed75..cc8b1a1 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -671,7 +671,10 @@
audio_attributes_t attr = { .source = AUDIO_SOURCE_VOICE_COMMUNICATION };
auto txSourceDevice = mEngine->getInputDeviceForAttributes(attr);
- ALOG_ASSERT(txSourceDevice != 0, "%s() input selected device not available", __func__);
+ if (txSourceDevice == nullptr) {
+ ALOGE("%s() selected input device not available", __func__);
+ return INVALID_OPERATION;
+ }
ALOGV("%s device rxDevice %s txDevice %s", __func__,
rxDevices.itemAt(0)->toString().c_str(), txSourceDevice->toString().c_str());
@@ -1582,6 +1585,10 @@
if ((*flags & (AUDIO_OUTPUT_FLAG_HW_AV_SYNC | AUDIO_OUTPUT_FLAG_MMAP_NOIRQ)) != 0) {
return AUDIO_IO_HANDLE_NONE;
}
+ // A request for Tuner cannot fallback to a mixed output
+ if ((directConfig.offload_info.content_id || directConfig.offload_info.sync_id)) {
+ return AUDIO_IO_HANDLE_NONE;
+ }
// ignoring channel mask due to downmix capability in mixer
@@ -2221,8 +2228,7 @@
outputDesc->setClientActive(client, true);
if (client->hasPreferredDevice(true)) {
- if (outputDesc->clientsList(true /*activeOnly*/).size() == 1 &&
- client->isPreferredDeviceForExclusiveUse()) {
+ if (outputDesc->sameExclusivePreferredDevicesCount() > 0) {
// Preferred device may be exclusive, use only if no other active clients on this output
devices = DeviceVector(
mAvailableOutputDevices.getDeviceFromId(client->preferredDeviceId()));
@@ -2454,7 +2460,8 @@
}
}
bool forceDeviceUpdate = false;
- if (client->hasPreferredDevice(true)) {
+ if (client->hasPreferredDevice(true) &&
+ outputDesc->sameExclusivePreferredDevicesCount() < 2) {
checkStrategyRoute(client->strategy(), AUDIO_IO_HANDLE_NONE);
forceDeviceUpdate = true;
}
@@ -3752,11 +3759,12 @@
bool AudioPolicyManager::areAllDevicesSupported(
const AudioDeviceTypeAddrVector& devices,
std::function<bool(audio_devices_t)> predicate,
- const char *context) {
+ const char *context,
+ bool matchAddress) {
for (size_t i = 0; i < devices.size(); i++) {
sp<DeviceDescriptor> devDesc = mHwModules.getDeviceDescriptor(
devices[i].mType, devices[i].getAddress(), String8(),
- AUDIO_FORMAT_DEFAULT, false /*allowToCreate*/, true /*matchAddress*/);
+ AUDIO_FORMAT_DEFAULT, false /*allowToCreate*/, matchAddress);
if (devDesc == nullptr || (predicate != nullptr && !predicate(devices[i].mType))) {
ALOGE("%s: device type %#x address %s not supported or not match predicate",
context, devices[i].mType, devices[i].getAddress());
@@ -3895,7 +3903,8 @@
ALOGV("%s() strategy=%d role=%d %s", __func__, strategy, role,
dumpAudioDeviceTypeAddrVector(devices).c_str());
- if (!areAllDevicesSupported(devices, audio_is_output_device, __func__)) {
+ if (!areAllDevicesSupported(
+ devices, audio_is_output_device, __func__, /*matchAddress*/false)) {
return BAD_VALUE;
}
status_t status = mEngine->removeDevicesRoleForStrategy(strategy, role, devices);
@@ -3995,7 +4004,8 @@
ALOGV("%s() audioSource=%d role=%d devices=%s", __func__, audioSource, role,
dumpAudioDeviceTypeAddrVector(devices).c_str());
- if (!areAllDevicesSupported(devices, audio_call_is_input_device, __func__)) {
+ if (!areAllDevicesSupported(
+ devices, audio_call_is_input_device, __func__, /*matchAddress*/false)) {
return BAD_VALUE;
}
@@ -4592,6 +4602,28 @@
return NO_ERROR;
}
+status_t AudioPolicyManager::listDeclaredDevicePorts(media::AudioPortRole role,
+ std::vector<media::AudioPortFw>* _aidl_return) {
+ auto pushPort = [&](const sp<DeviceDescriptor>& dev) -> status_t {
+ audio_port_v7 port;
+ dev->toAudioPort(&port);
+ auto aidlPort = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_v7_AudioPortFw(port));
+ _aidl_return->push_back(std::move(aidlPort));
+ return OK;
+ };
+
+ for (const auto& module : mHwModulesAll) {
+ for (const auto& dev : module->getDeclaredDevices()) {
+ if (role == media::AudioPortRole::NONE ||
+ ((role == media::AudioPortRole::SOURCE)
+ == audio_is_input_device(dev->type()))) {
+ RETURN_STATUS_IF_ERROR(pushPort(dev));
+ }
+ }
+ }
+ return OK;
+}
+
status_t AudioPolicyManager::getAudioPort(struct audio_port_v7 *port)
{
if (port == nullptr || port->id == AUDIO_PORT_HANDLE_NONE) {
@@ -5320,7 +5352,11 @@
*session = (audio_session_t)mpClientInterface->newAudioUniqueId(AUDIO_UNIQUE_ID_USE_SESSION);
*ioHandle = (audio_io_handle_t)mpClientInterface->newAudioUniqueId(AUDIO_UNIQUE_ID_USE_INPUT);
audio_attributes_t attr = { .source = AUDIO_SOURCE_HOTWORD };
- *device = mEngine->getInputDeviceForAttributes(attr)->type();
+ sp<DeviceDescriptor> deviceDesc = mEngine->getInputDeviceForAttributes(attr);
+ if (deviceDesc == nullptr) {
+ return INVALID_OPERATION;
+ }
+ *device = deviceDesc->type();
return mSoundTriggerSessions.acquireSession(*session, *ioHandle);
}
@@ -6027,7 +6063,9 @@
}
}
- mEngine->updateDeviceSelectionCache();
+ // The actual device selection cache will be updated when calling `updateDevicesAndOutputs`
+ // at the end of this function.
+ mEngine->initializeDeviceSelectionCache();
mCommunnicationStrategy = mEngine->getProductStrategyForAttributes(
mEngine->getAttributesForStreamType(AUDIO_STREAM_VOICE_CALL));
@@ -7709,7 +7747,8 @@
// if sco and call follow same curves, bypass forceUseForComm
if ((callVolSrc != btScoVolSrc) &&
((isVoiceVolSrc && isScoRequested) ||
- (isBtScoVolSrc && !(isScoRequested || isHAUsed)))) {
+ (isBtScoVolSrc && !(isScoRequested || isHAUsed))) &&
+ !isSingleDeviceType(deviceTypes, AUDIO_DEVICE_OUT_TELEPHONY_TX)) {
ALOGV("%s cannot set volume group %d volume when is%srequested for comm", __func__,
volumeSource, isScoRequested ? " " : " not ");
// Do not return an error here as AudioService will always set both voice call
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 3bbcf69..2924ee1 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -263,6 +263,8 @@
unsigned int *num_ports,
struct audio_port_v7 *ports,
unsigned int *generation);
+ status_t listDeclaredDevicePorts(media::AudioPortRole role,
+ std::vector<media::AudioPortFw>* result) override;
virtual status_t getAudioPort(struct audio_port_v7 *port);
virtual status_t createAudioPatch(const struct audio_patch *patch,
audio_patch_handle_t *handle,
@@ -1249,7 +1251,8 @@
bool areAllDevicesSupported(
const AudioDeviceTypeAddrVector& devices,
std::function<bool(audio_devices_t)> predicate,
- const char* context);
+ const char* context,
+ bool matchAddress = true);
bool isScoRequestedForComm() const;
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 35411f9..af7be52 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -301,7 +301,8 @@
audio_stream_type_t stream = VALUE_OR_RETURN_BINDER_STATUS(
aidl2legacy_AudioStreamType_audio_stream_type_t(streamAidl));
- if (uint32_t(stream) >= AUDIO_STREAM_PUBLIC_CNT) {
+ if (uint32_t(stream) >= AUDIO_STREAM_PUBLIC_CNT
+ && stream != AUDIO_STREAM_ASSISTANT && stream != AUDIO_STREAM_CALL_ASSISTANT) {
*_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(
legacy2aidl_audio_io_handle_t_int32_t(AUDIO_IO_HANDLE_NONE));
return Status::ok();
@@ -1540,6 +1541,17 @@
return Status::ok();
}
+Status AudioPolicyService::listDeclaredDevicePorts(media::AudioPortRole role,
+ std::vector<media::AudioPortFw>* _aidl_return) {
+ Mutex::Autolock _l(mLock);
+ if (mAudioPolicyManager == NULL) {
+ return binderStatusFromStatusT(NO_INIT);
+ }
+ AutoCallerClear acc;
+ return binderStatusFromStatusT(mAudioPolicyManager->listDeclaredDevicePorts(
+ role, _aidl_return));
+}
+
Status AudioPolicyService::getAudioPort(int portId,
media::AudioPortFw* _aidl_return) {
audio_port_v7 port{ .id = portId };
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 31d5249..59aabac 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -175,6 +175,8 @@
binder::Status listAudioPorts(media::AudioPortRole role, media::AudioPortType type,
Int* count, std::vector<media::AudioPortFw>* ports,
int32_t* _aidl_return) override;
+ binder::Status listDeclaredDevicePorts(media::AudioPortRole role,
+ std::vector<media::AudioPortFw>* _aidl_return) override;
binder::Status getAudioPort(int portId,
media::AudioPortFw* _aidl_return) override;
binder::Status createAudioPatch(const media::AudioPatchFw& patch, int32_t handle,
diff --git a/services/audiopolicy/service/Spatializer.cpp b/services/audiopolicy/service/Spatializer.cpp
index fa8d596..95b8e7c 100644
--- a/services/audiopolicy/service/Spatializer.cpp
+++ b/services/audiopolicy/service/Spatializer.cpp
@@ -20,6 +20,7 @@
//#define LOG_NDEBUG 0
#include <utils/Log.h>
+#include <algorithm>
#include <inttypes.h>
#include <limits.h>
#include <stdint.h>
@@ -94,6 +95,16 @@
return record;
}
+template<typename T>
+static constexpr const T& safe_clamp(const T& value, const T& low, const T& high) {
+ if constexpr (std::is_floating_point_v<T>) {
+ return value != value /* constexpr isnan */
+ ? low : std::clamp(value, low, high);
+ } else /* constexpr */ {
+ return std::clamp(value, low, high);
+ }
+}
+
// ---------------------------------------------------------------------------
class Spatializer::EngineCallbackHandler : public AHandler {
@@ -638,28 +649,48 @@
Status Spatializer::setDisplayOrientation(float physicalToLogicalAngle) {
ALOGV("%s physicalToLogicalAngle %f", __func__, physicalToLogicalAngle);
- if (!mSupportsHeadTracking) {
- return binderStatusFromStatusT(INVALID_OPERATION);
- }
- std::lock_guard lock(mLock);
- mDisplayOrientation = physicalToLogicalAngle;
mLocalLog.log("%s with %f", __func__, physicalToLogicalAngle);
+ const float angle = safe_clamp(physicalToLogicalAngle, 0.f, (float)(2. * M_PI));
+ // It is possible due to numerical inaccuracies to exceed the boundaries of 0 to 2 * M_PI.
+ ALOGI_IF(angle != physicalToLogicalAngle,
+ "%s: clamping %f to %f", __func__, physicalToLogicalAngle, angle);
+ std::lock_guard lock(mLock);
+ mDisplayOrientation = angle;
if (mPoseController != nullptr) {
- mPoseController->setDisplayOrientation(mDisplayOrientation);
+ // This turns on the rate-limiter.
+ mPoseController->setDisplayOrientation(angle);
}
if (mEngine != nullptr) {
setEffectParameter_l(
- SPATIALIZER_PARAM_DISPLAY_ORIENTATION, std::vector<float>{physicalToLogicalAngle});
+ SPATIALIZER_PARAM_DISPLAY_ORIENTATION, std::vector<float>{angle});
}
return Status::ok();
}
Status Spatializer::setHingeAngle(float hingeAngle) {
- std::lock_guard lock(mLock);
ALOGV("%s hingeAngle %f", __func__, hingeAngle);
+ mLocalLog.log("%s with %f", __func__, hingeAngle);
+ const float angle = safe_clamp(hingeAngle, 0.f, (float)(2. * M_PI));
+ // It is possible due to numerical inaccuracies to exceed the boundaries of 0 to 2 * M_PI.
+ ALOGI_IF(angle != hingeAngle,
+ "%s: clamping %f to %f", __func__, hingeAngle, angle);
+ std::lock_guard lock(mLock);
+ mHingeAngle = angle;
if (mEngine != nullptr) {
- mLocalLog.log("%s with %f", __func__, hingeAngle);
- setEffectParameter_l(SPATIALIZER_PARAM_HINGE_ANGLE, std::vector<float>{hingeAngle});
+ setEffectParameter_l(SPATIALIZER_PARAM_HINGE_ANGLE, std::vector<float>{angle});
+ }
+ return Status::ok();
+}
+
+Status Spatializer::setFoldState(bool folded) {
+ ALOGV("%s foldState %d", __func__, (int)folded);
+ mLocalLog.log("%s with %d", __func__, (int)folded);
+ std::lock_guard lock(mLock);
+ mFoldedState = folded;
+ if (mEngine != nullptr) {
+ // we don't suppress multiple calls with the same folded state - that's
+ // done at the caller.
+ setEffectParameter_l(SPATIALIZER_PARAM_FOLD_STATE, std::vector<uint8_t>{mFoldedState});
}
return Status::ok();
}
@@ -808,8 +839,7 @@
}
}
callback = mHeadTrackingCallback;
- mLocalLog.log("%s: %s, spatializerMode %s", __func__, media::toString(mode).c_str(),
- media::toString(spatializerMode).c_str());
+ mLocalLog.log("%s: updating mode to %s", __func__, media::toString(mode).c_str());
}
if (callback != nullptr) {
callback->onHeadTrackingModeChanged(spatializerMode);
@@ -864,6 +894,14 @@
checkSensorsState_l();
}
callback = mSpatializerCallback;
+
+ // Restore common effect state.
+ setEffectParameter_l(SPATIALIZER_PARAM_DISPLAY_ORIENTATION,
+ std::vector<float>{mDisplayOrientation});
+ setEffectParameter_l(SPATIALIZER_PARAM_FOLD_STATE,
+ std::vector<uint8_t>{mFoldedState});
+ setEffectParameter_l(SPATIALIZER_PARAM_HINGE_ANGLE,
+ std::vector<float>{mHingeAngle});
}
if (outputChanged && callback != nullptr) {
@@ -1076,13 +1114,13 @@
if (mPoseController != nullptr) {
ss.append(mPoseController->toString(level + 1))
.append(prefixSpace)
- .append("Pose (active stage-to-head) [tx, ty, tz, pitch, roll, yaw]:\n")
+ .append("Pose (active stage-to-head) [tx, ty, tz : pitch, roll, yaw]:\n")
.append(prefixSpace)
.append(" PerMinuteHistory:\n")
- .append(mPoseDurableRecorder.toString(level + 2))
+ .append(mPoseDurableRecorder.toString(level + 3))
.append(prefixSpace)
.append(" PerSecondHistory:\n")
- .append(mPoseRecorder.toString(level + 2));
+ .append(mPoseRecorder.toString(level + 3));
} else {
ss.append(prefixSpace).append("SpatializerPoseController not exist\n");
}
diff --git a/services/audiopolicy/service/Spatializer.h b/services/audiopolicy/service/Spatializer.h
index f54eacd..23de0c0 100644
--- a/services/audiopolicy/service/Spatializer.h
+++ b/services/audiopolicy/service/Spatializer.h
@@ -120,6 +120,7 @@
binder::Status setScreenSensor(int sensorHandle) override;
binder::Status setDisplayOrientation(float physicalToLogicalAngle) override;
binder::Status setHingeAngle(float hingeAngle) override;
+ binder::Status setFoldState(bool folded) override;
binder::Status getSupportedModes(std::vector<media::SpatializationMode>* modes) override;
binder::Status registerHeadTrackingCallback(
const sp<media::ISpatializerHeadTrackingCallback>& callback) override;
@@ -377,8 +378,13 @@
int32_t mScreenSensor GUARDED_BY(mLock) = SpatializerPoseController::INVALID_SENSOR;
/** Last display orientation received */
- static constexpr float kDisplayOrientationInvalid = 1000;
- float mDisplayOrientation GUARDED_BY(mLock) = kDisplayOrientationInvalid;
+ float mDisplayOrientation GUARDED_BY(mLock) = 0.f; // aligned to natural up orientation.
+
+ /** Last folded state */
+ bool mFoldedState GUARDED_BY(mLock) = false; // foldable: true means folded.
+
+ /** Last hinge angle */
+ float mHingeAngle GUARDED_BY(mLock) = 0.f; // foldable: 0.f is closed, M_PI flat open.
std::vector<media::SpatializationLevel> mLevels;
std::vector<media::SpatializerHeadTrackingMode> mHeadTrackingModes;
@@ -407,10 +413,10 @@
*/
// Record one log line per second (up to mMaxLocalLogLine) to capture most recent sensor data.
media::VectorRecorder mPoseRecorder GUARDED_BY(mLock) {
- 6 /* vectorSize */, std::chrono::seconds(1), mMaxLocalLogLine };
+ 6 /* vectorSize */, std::chrono::seconds(1), mMaxLocalLogLine, { 3 } /* delimiterIdx */};
// Record one log line per minute (up to mMaxLocalLogLine) to capture durable sensor data.
media::VectorRecorder mPoseDurableRecorder GUARDED_BY(mLock) {
- 6 /* vectorSize */, std::chrono::minutes(1), mMaxLocalLogLine };
+ 6 /* vectorSize */, std::chrono::minutes(1), mMaxLocalLogLine, { 3 } /* delimiterIdx */};
}; // Spatializer
}; // namespace android
diff --git a/services/audiopolicy/service/SpatializerPoseController.cpp b/services/audiopolicy/service/SpatializerPoseController.cpp
index 2ac2af7..874bde4 100644
--- a/services/audiopolicy/service/SpatializerPoseController.cpp
+++ b/services/audiopolicy/service/SpatializerPoseController.cpp
@@ -22,6 +22,7 @@
#define LOG_TAG "SpatializerPoseController"
//#define LOG_NDEBUG 0
+#include <cutils/properties.h>
#include <sensor/Sensor.h>
#include <media/MediaMetricsItem.h>
#include <media/QuaternionUtil.h>
@@ -47,11 +48,17 @@
// This is how fast, in rad/s, we allow rotation angle to shift during rate-limiting.
constexpr float kMaxRotationalVelocity = 0.8f;
-// This is how far into the future we predict the head pose, using linear extrapolation based on
-// twist (velocity). It should be set to a value that matches the characteristic durations of moving
-// one's head. The higher we set this, the more latency we are able to reduce, but setting this too
-// high will result in high prediction errors whenever the head accelerates (changes velocity).
-constexpr auto kPredictionDuration = 50ms;
+// This is how far into the future we predict the head pose.
+// The prediction duration should be based on the actual latency from
+// head-tracker to audio output, though setting the prediction duration too
+// high may result in higher prediction errors when the head accelerates or
+// decelerates (changes velocity).
+//
+// The head tracking predictor will do a best effort to achieve the requested
+// prediction duration. If the duration is too far in the future based on
+// current sensor variance, the predictor may internally restrict duration to what
+// is achievable with reasonable confidence as the "best prediction".
+constexpr auto kPredictionDuration = 120ms;
// After not getting a pose sample for this long, we would treat the measurement as stale.
// The max connection interval is 50ms, and HT sensor event interval can differ depending on the
@@ -99,7 +106,15 @@
.maxTranslationalVelocity = kMaxTranslationalVelocity / kTicksPerSecond,
.maxRotationalVelocity = kMaxRotationalVelocity / kTicksPerSecond,
.freshnessTimeout = Ticks(kFreshnessTimeout).count(),
- .predictionDuration = Ticks(kPredictionDuration).count(),
+ .predictionDuration = []() -> float {
+ const int duration_ms =
+ property_get_int32("audio.spatializer.prediction_duration_ms", -1);
+ if (duration_ms >= 0) {
+ return duration_ms * 1'000'000LL;
+ } else {
+ return Ticks(kPredictionDuration).count();
+ }
+ }(),
.autoRecenterWindowDuration = Ticks(kAutoRecenterWindowDuration).count(),
.autoRecenterTranslationalThreshold = kAutoRecenterTranslationThreshold,
.autoRecenterRotationalThreshold = kAutoRecenterRotationThreshold,
@@ -147,7 +162,14 @@
mShouldCalculate = false;
}
}
- }) {}
+ }) {
+ const media::PosePredictorType posePredictorType =
+ (media::PosePredictorType)
+ property_get_int32("audio.spatializer.pose_predictor_type", -1);
+ if (isValidPosePredictorType(posePredictorType)) {
+ mProcessor->setPosePredictorType(posePredictorType);
+ }
+ }
SpatializerPoseController::~SpatializerPoseController() {
{
@@ -290,22 +312,29 @@
const float delayMs = (elapsedRealtimeNano() - timestamp) * NANOS_TO_MILLIS; // CLOCK_BOOTTIME
if (sensor == mHeadSensor) {
- std::vector<float> pryxyzdt(8); // pitch, roll, yaw, rot_vel_x, rot_vel_y, rot_vel_z,
+ std::vector<float> pryprydt(8); // pitch, roll, yaw, d_pitch, d_roll, d_yaw,
// discontinuity, timestamp_delay
- media::quaternionToAngles(pose.rotation(), &pryxyzdt[0], &pryxyzdt[1], &pryxyzdt[2]);
+ media::quaternionToAngles(pose.rotation(), &pryprydt[0], &pryprydt[1], &pryprydt[2]);
if (twist) {
const auto rotationalVelocity = twist->rotationalVelocity();
- for (size_t i = 0; i < 3; ++i) {
- pryxyzdt[i + 3] = rotationalVelocity[i];
- }
+ // The rotational velocity is an intrinsic transform (i.e. based on the head
+ // coordinate system, not the world coordinate system). It is a 3 element vector:
+ // axis (d theta / dt).
+ //
+ // We leave rotational velocity relative to the head coordinate system,
+ // as the initial head tracking sensor's world frame is arbitrary.
+ media::quaternionToAngles(media::rotationVectorToQuaternion(rotationalVelocity),
+ &pryprydt[3], &pryprydt[4], &pryprydt[5]);
}
- pryxyzdt[6] = isNewReference;
- pryxyzdt[7] = delayMs;
- for (size_t i = 0; i < 3; ++i) { // pitch, roll, yaw only. rotational velocity in rad/s.
- pryxyzdt[i] *= RAD_TO_DEGREE;
+ pryprydt[6] = isNewReference;
+ pryprydt[7] = delayMs;
+ for (size_t i = 0; i < 6; ++i) {
+ // pitch, roll, yaw in degrees, referenced in degrees on the world frame.
+ // d_pitch, d_roll, d_yaw rotational velocity in degrees/s, based on the world frame.
+ pryprydt[i] *= RAD_TO_DEGREE;
}
- mHeadSensorRecorder.record(pryxyzdt);
- mHeadSensorDurableRecorder.record(pryxyzdt);
+ mHeadSensorRecorder.record(pryprydt);
+ mHeadSensorDurableRecorder.record(pryprydt);
mProcessor->setWorldToHeadPose(timestamp, pose,
twist.value_or(Twist3f()) / kTicksPerSecond);
@@ -346,15 +375,16 @@
if (mHeadSensor == INVALID_SENSOR) {
ss += "HeadSensor: INVALID\n";
} else {
- base::StringAppendF(&ss, "HeadSensor: 0x%08x (active world-to-head) "
- "[ pitch, roll, yaw, vx, vy, vz, disc, delay ] "
- "(degrees, rad/s, bool, ms)\n", mHeadSensor);
+ base::StringAppendF(&ss, "HeadSensor: 0x%08x "
+ "(active world-to-head : head-relative velocity) "
+ "[ pitch, roll, yaw : d_pitch, d_roll, d_yaw : disc : delay ] "
+ "(degrees, degrees/s, bool, ms)\n", mHeadSensor);
ss.append(prefixSpace)
.append(" PerMinuteHistory:\n")
- .append(mHeadSensorDurableRecorder.toString(level + 2))
+ .append(mHeadSensorDurableRecorder.toString(level + 3))
.append(prefixSpace)
.append(" PerSecondHistory:\n")
- .append(mHeadSensorRecorder.toString(level + 2));
+ .append(mHeadSensorRecorder.toString(level + 3));
}
ss += prefixSpace;
@@ -362,14 +392,14 @@
ss += "ScreenSensor: INVALID\n";
} else {
base::StringAppendF(&ss, "ScreenSensor: 0x%08x (active world-to-screen) "
- "[ pitch, roll, yaw, delay ] "
+ "[ pitch, roll, yaw : delay ] "
"(degrees, ms)\n", mScreenSensor);
ss.append(prefixSpace)
.append(" PerMinuteHistory:\n")
- .append(mScreenSensorDurableRecorder.toString(level + 2))
+ .append(mScreenSensorDurableRecorder.toString(level + 3))
.append(prefixSpace)
.append(" PerSecondHistory:\n")
- .append(mScreenSensorRecorder.toString(level + 2));
+ .append(mScreenSensorRecorder.toString(level + 3));
}
ss += prefixSpace;
diff --git a/services/audiopolicy/service/SpatializerPoseController.h b/services/audiopolicy/service/SpatializerPoseController.h
index ee2c2be..9d78188 100644
--- a/services/audiopolicy/service/SpatializerPoseController.h
+++ b/services/audiopolicy/service/SpatializerPoseController.h
@@ -133,14 +133,18 @@
bool mCalculated = false;
media::VectorRecorder mHeadSensorRecorder{
- 8 /* vectorSize */, std::chrono::seconds(1), 10 /* maxLogLine */};
+ 8 /* vectorSize */, std::chrono::seconds(1), 10 /* maxLogLine */,
+ { 3, 6, 7 } /* delimiterIdx */};
media::VectorRecorder mHeadSensorDurableRecorder{
- 8 /* vectorSize */, std::chrono::minutes(1), 10 /* maxLogLine */};
+ 8 /* vectorSize */, std::chrono::minutes(1), 10 /* maxLogLine */,
+ { 3, 6, 7 } /* delimiterIdx */};
media::VectorRecorder mScreenSensorRecorder{
- 4 /* vectorSize */, std::chrono::seconds(1), 10 /* maxLogLine */};
+ 4 /* vectorSize */, std::chrono::seconds(1), 10 /* maxLogLine */,
+ { 3 } /* delimiterIdx */};
media::VectorRecorder mScreenSensorDurableRecorder{
- 4 /* vectorSize */, std::chrono::minutes(1), 10 /* maxLogLine */};
+ 4 /* vectorSize */, std::chrono::minutes(1), 10 /* maxLogLine */,
+ { 3 } /* delimiterIdx */};
// It's important that mThread is the last variable in this class
// since we starts mThread in initializer list
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index ef829e1..412ab19 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -82,6 +82,14 @@
return criterion;
}
+// TODO b/182392769: use attribution source util
+AttributionSourceState createAttributionSourceState(uid_t uid) {
+ AttributionSourceState attributionSourceState;
+ attributionSourceState.uid = uid;
+ attributionSourceState.token = sp<BBinder>::make();
+ return attributionSourceState;
+}
+
} // namespace
TEST(AudioPolicyManagerTestInit, EngineFailure) {
@@ -271,10 +279,7 @@
AudioPolicyInterface::output_type_t outputType;
bool isSpatialized;
bool isBitPerfectInternal;
- // TODO b/182392769: use attribution source util
- AttributionSourceState attributionSource = AttributionSourceState();
- attributionSource.uid = uid;
- attributionSource.token = sp<BBinder>::make();
+ AttributionSourceState attributionSource = createAttributionSourceState(uid);
ASSERT_EQ(OK, mManager->getOutputForAttr(
&attr, output, session, &stream, attributionSource, &config, &flags,
selectedDeviceId, portId, {}, &outputType, &isSpatialized,
@@ -302,10 +307,7 @@
if (!portId) portId = &localPortId;
*portId = AUDIO_PORT_HANDLE_NONE;
AudioPolicyInterface::input_type_t inputType;
- // TODO b/182392769: use attribution source util
- AttributionSourceState attributionSource = AttributionSourceState();
- attributionSource.uid = 0;
- attributionSource.token = sp<BBinder>::make();
+ AttributionSourceState attributionSource = createAttributionSourceState(/*uid=*/ 0);
ASSERT_EQ(OK, mManager->getInputForAttr(
&attr, &input, riid, session, attributionSource, &config, flags,
selectedDeviceId, &inputType, portId));
@@ -1859,6 +1861,82 @@
/*expected_match=*/ false)
.withSessionId(TEST_SESSION_ID).withUsage(AUDIO_USAGE_MEDIA)));
+struct DPMmapTestParam {
+ DPMmapTestParam(int mixRouteFlags, audio_devices_t deviceType, const std::string& deviceAddress)
+ : mixRouteFlags(mixRouteFlags), deviceType(deviceType), deviceAddress(deviceAddress) {}
+
+ int mixRouteFlags;
+ audio_devices_t deviceType;
+ std::string deviceAddress;
+};
+
+class AudioPolicyManagerTestMMapPlaybackRerouting
+ : public AudioPolicyManagerTestDynamicPolicy,
+ public ::testing::WithParamInterface<DPMmapTestParam> {
+ protected:
+ void SetUp() override {
+ AudioPolicyManagerTestDynamicPolicy::SetUp();
+ audioConfig = AUDIO_CONFIG_INITIALIZER;
+ audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+ audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+ audioConfig.sample_rate = k48000SamplingRate;
+ }
+
+ audio_config_t audioConfig;
+ audio_io_handle_t mOutput;
+ audio_stream_type_t mStream = AUDIO_STREAM_DEFAULT;
+ audio_port_handle_t mSelectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ audio_port_handle_t mPortId;
+ AudioPolicyInterface::output_type_t mOutputType;
+ audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
+ bool mIsSpatialized;
+ bool mIsBitPerfect;
+};
+
+TEST_P(AudioPolicyManagerTestMMapPlaybackRerouting, MmapPlaybackStreamMatchingDapMixFails) {
+ // Add mix matching the test uid.
+ const int testUid = 12345;
+ const auto param = GetParam();
+ status_t ret = addPolicyMix(MIX_TYPE_PLAYERS, param.mixRouteFlags, param.deviceType,
+ param.deviceAddress, audioConfig, {createUidCriterion(testUid)});
+ ASSERT_EQ(NO_ERROR, ret);
+
+ // Geting output for matching uid and mmap-ed stream should fail.
+ audio_output_flags_t outputFlags = AUDIO_OUTPUT_FLAG_MMAP_NOIRQ;
+ ASSERT_EQ(INVALID_OPERATION,
+ mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
+ createAttributionSourceState(testUid), &audioConfig,
+ &outputFlags, &mSelectedDeviceId, &mPortId, {},
+ &mOutputType, &mIsSpatialized, &mIsBitPerfect));
+}
+
+TEST_P(AudioPolicyManagerTestMMapPlaybackRerouting, NonMmapPlaybackStreamMatchingDapMixSucceeds) {
+ // Add mix matching the test uid.
+ const int testUid = 12345;
+ const auto param = GetParam();
+ status_t ret = addPolicyMix(MIX_TYPE_PLAYERS, param.mixRouteFlags, param.deviceType,
+ param.deviceAddress, audioConfig, {createUidCriterion(testUid)});
+ ASSERT_EQ(NO_ERROR, ret);
+
+ // Geting output for matching uid should succeed for non-mmaped stream.
+ audio_output_flags_t outputFlags = AUDIO_OUTPUT_FLAG_NONE;
+ ASSERT_EQ(NO_ERROR,
+ mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
+ createAttributionSourceState(testUid), &audioConfig,
+ &outputFlags, &mSelectedDeviceId, &mPortId, {},
+ &mOutputType, &mIsSpatialized, &mIsBitPerfect));
+}
+
+INSTANTIATE_TEST_SUITE_P(
+ MmapPlaybackRerouting, AudioPolicyManagerTestMMapPlaybackRerouting,
+ testing::Values(DPMmapTestParam(MIX_ROUTE_FLAG_LOOP_BACK, AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
+ /*deviceAddress=*/"remote_submix_media"),
+ DPMmapTestParam(MIX_ROUTE_FLAG_LOOP_BACK_AND_RENDER,
+ AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
+ /*deviceAddress=*/"remote_submix_media"),
+ DPMmapTestParam(MIX_ROUTE_FLAG_RENDER, AUDIO_DEVICE_OUT_SPEAKER,
+ /*deviceAddress=*/"")));
+
class AudioPolicyManagerTestDPMixRecordInjection : public AudioPolicyManagerTestDynamicPolicy,
public testing::WithParamInterface<DPTestParam> {
protected:
@@ -2018,19 +2096,19 @@
// Connecting a valid output device with valid parameters should trigger a routing update
ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(
AUDIO_DEVICE_OUT_BLUETOOTH_SCO, AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
- "a", "b", AUDIO_FORMAT_DEFAULT));
+ "00:11:22:33:44:55", "b", AUDIO_FORMAT_DEFAULT));
ASSERT_EQ(1, mClient->getRoutingUpdatedCounter());
// Disconnecting a connected device should succeed and trigger a routing update
ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(
AUDIO_DEVICE_OUT_BLUETOOTH_SCO, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
- "a", "b", AUDIO_FORMAT_DEFAULT));
+ "00:11:22:33:44:55", "b", AUDIO_FORMAT_DEFAULT));
ASSERT_EQ(2, mClient->getRoutingUpdatedCounter());
// Disconnecting a disconnected device should fail and not trigger a routing update
ASSERT_EQ(INVALID_OPERATION, mManager->setDeviceConnectionState(
AUDIO_DEVICE_OUT_BLUETOOTH_SCO, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
- "a", "b", AUDIO_FORMAT_DEFAULT));
+ "00:11:22:33:44:55", "b", AUDIO_FORMAT_DEFAULT));
ASSERT_EQ(2, mClient->getRoutingUpdatedCounter());
// Changing force use should trigger an update
@@ -2158,9 +2236,9 @@
DeviceConnectionTestParams({AUDIO_DEVICE_OUT_HDMI, "test_out_hdmi",
"audio_policy_test_out_hdmi"}),
DeviceConnectionTestParams({AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET, "bt_hfp_in",
- "hfp_client_in"}),
+ "00:11:22:33:44:55"}),
DeviceConnectionTestParams({AUDIO_DEVICE_OUT_BLUETOOTH_SCO, "bt_hfp_out",
- "hfp_client_out"})
+ "00:11:22:33:44:55"})
)
);
@@ -2833,6 +2911,108 @@
mManager->getDevicesForRoleAndCapturePreset(audioSource, role, devices));
}
+TEST_F(AudioPolicyManagerDevicesRoleForCapturePresetTest, PreferredDeviceUsedForInput) {
+ const audio_source_t source = AUDIO_SOURCE_MIC;
+ const device_role_t role = DEVICE_ROLE_PREFERRED;
+ const std::string address = "card=1;device=0";
+ const std::string deviceName = "randomName";
+
+ ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(
+ AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+ address.c_str(), deviceName.c_str(), AUDIO_FORMAT_DEFAULT));
+ auto availableDevices = mManager->getAvailableInputDevices();
+ ASSERT_GT(availableDevices.size(), 1);
+
+ audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
+ attr.source = source;
+ audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
+ AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+ 48000));
+ auto selectedDevice = availableDevices.getDeviceFromId(selectedDeviceId);
+ ASSERT_NE(nullptr, selectedDevice);
+
+ sp<DeviceDescriptor> preferredDevice = nullptr;
+ for (const auto& device : availableDevices) {
+ if (device != selectedDevice) {
+ preferredDevice = device;
+ break;
+ }
+ }
+ ASSERT_NE(nullptr, preferredDevice);
+ // After setting preferred device for capture preset, the selected device for input should be
+ // the preferred device.
+ ASSERT_EQ(NO_ERROR,
+ mManager->setDevicesRoleForCapturePreset(source, role,
+ {preferredDevice->getDeviceTypeAddr()}));
+ selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
+ AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+ 48000));
+ ASSERT_EQ(preferredDevice, availableDevices.getDeviceFromId(selectedDeviceId));
+
+ // After clearing preferred device for capture preset, the selected device for input should be
+ // the same as original one.
+ ASSERT_EQ(NO_ERROR,
+ mManager->clearDevicesRoleForCapturePreset(source, role));
+ selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
+ AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+ 48000));
+ ASSERT_EQ(selectedDevice, availableDevices.getDeviceFromId(selectedDeviceId));
+
+ ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(
+ AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+ address.c_str(), deviceName.c_str(), AUDIO_FORMAT_DEFAULT));
+}
+
+TEST_F(AudioPolicyManagerDevicesRoleForCapturePresetTest, DisabledDeviceNotUsedForInput) {
+ const audio_source_t source = AUDIO_SOURCE_MIC;
+ const device_role_t role = DEVICE_ROLE_DISABLED;
+ const std::string address = "card=1;device=0";
+ const std::string deviceName = "randomName";
+
+ ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(
+ AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+ address.c_str(), deviceName.c_str(), AUDIO_FORMAT_DEFAULT));
+ auto availableDevices = mManager->getAvailableInputDevices();
+ ASSERT_GT(availableDevices.size(), 1);
+
+ audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
+ attr.source = source;
+ audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
+ AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+ 48000));
+ auto selectedDevice = availableDevices.getDeviceFromId(selectedDeviceId);
+ ASSERT_NE(nullptr, selectedDevice);
+
+ // After setting disabled device for capture preset, the disabled device must not be
+ // selected for input.
+ ASSERT_EQ(NO_ERROR,
+ mManager->setDevicesRoleForCapturePreset(source, role,
+ {selectedDevice->getDeviceTypeAddr()}));
+ selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
+ AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+ 48000));
+ ASSERT_NE(selectedDevice, availableDevices.getDeviceFromId(selectedDeviceId));
+
+ // After clearing disabled device for capture preset, the selected device for input should be
+ // the original one.
+ ASSERT_EQ(NO_ERROR,
+ mManager->clearDevicesRoleForCapturePreset(source, role));
+ selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
+ AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+ 48000));
+ ASSERT_EQ(selectedDevice, availableDevices.getDeviceFromId(selectedDeviceId));
+
+ ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(
+ AUDIO_DEVICE_IN_USB_DEVICE, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+ address.c_str(), deviceName.c_str(), AUDIO_FORMAT_DEFAULT));
+}
+
INSTANTIATE_TEST_CASE_P(
DevicesRoleForCapturePresetOperation,
AudioPolicyManagerDevicesRoleForCapturePresetTest,
diff --git a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
index 2eb771d..50ca26a 100644
--- a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
+++ b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
@@ -77,12 +77,14 @@
</devicePort>
<devicePort tagName="USB Device Out" type="AUDIO_DEVICE_OUT_USB_DEVICE" role="sink">
</devicePort>
+ <devicePort tagName="USB Device In" type="AUDIO_DEVICE_IN_USB_DEVICE" role="source">
+ </devicePort>
</devicePorts>
<routes>
<route type="mix" sink="Speaker"
sources="primary output,voip_rx"/>
<route type="mix" sink="primary input"
- sources="Built-In Mic,Hdmi-In Mic"/>
+ sources="Built-In Mic,Hdmi-In Mic,USB Device In"/>
<route type="mix" sink="voip_tx"
sources="Built-In Mic"/>
<route type="mix" sink="Hdmi"
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index 1c922ce..e818759 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -120,6 +120,7 @@
],
shared_libs: [
+ "libactivitymanager_aidl",
"libbase",
"libdl",
"libexif",
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index c812cd7..1564ff3 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -69,6 +69,8 @@
#include <private/android_filesystem_config.h>
#include <system/camera_vendor_tags.h>
#include <system/camera_metadata.h>
+#include <binder/IServiceManager.h>
+#include <binder/IActivityManager.h>
#include <system/camera.h>
@@ -137,6 +139,8 @@
"android.permission.CAMERA_OPEN_CLOSE_LISTENER");
static const String16
sCameraInjectExternalCameraPermission("android.permission.CAMERA_INJECT_EXTERNAL_CAMERA");
+// Constant integer for FGS Logging, used to denote the API type for logger
+static const int LOG_FGS_CAMERA_API = 1;
const char *sFileName = "lastOpenSessionDumpFile";
static constexpr int32_t kSystemNativeClientScore = resource_policy::PERCEPTIBLE_APP_ADJ;
static constexpr int32_t kSystemNativeClientState =
@@ -1024,7 +1028,7 @@
int api1CameraId, int facing, int sensorOrientation, int clientPid, uid_t clientUid,
int servicePid, std::pair<int, IPCTransport> deviceVersionAndTransport,
apiLevel effectiveApiLevel, bool overrideForPerfClass, bool overrideToPortrait,
- /*out*/sp<BasicClient>* client) {
+ bool forceSlowJpegMode, /*out*/sp<BasicClient>* client) {
// For HIDL devices
if (deviceVersionAndTransport.second == IPCTransport::HIDL) {
// Create CameraClient based on device version reported by the HAL.
@@ -1057,9 +1061,10 @@
sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
*client = new Camera2Client(cameraService, tmp, cameraService->mCameraServiceProxyWrapper,
packageName, featureId, cameraId, api1CameraId, facing, sensorOrientation,
- clientPid, clientUid, servicePid, overrideForPerfClass, overrideToPortrait);
- ALOGI("%s: Camera1 API (legacy), override to portrait %d", __FUNCTION__,
- overrideToPortrait);
+ clientPid, clientUid, servicePid, overrideForPerfClass, overrideToPortrait,
+ forceSlowJpegMode);
+ ALOGI("%s: Camera1 API (legacy), override to portrait %d, forceSlowJpegMode %d",
+ __FUNCTION__, overrideToPortrait, forceSlowJpegMode);
} else { // Camera2 API route
sp<hardware::camera2::ICameraDeviceCallbacks> tmp =
static_cast<hardware::camera2::ICameraDeviceCallbacks*>(cameraCb.get());
@@ -1157,7 +1162,8 @@
sp<ICameraClient>{nullptr}, id, cameraId,
internalPackageName, /*systemNativeClient*/ false, {}, uid, USE_CALLING_PID,
API_1, /*shimUpdateOnly*/ true, /*oomScoreOffset*/ 0,
- /*targetSdkVersion*/ __ANDROID_API_FUTURE__, /*overrideToPortrait*/ true, /*out*/ tmp)
+ /*targetSdkVersion*/ __ANDROID_API_FUTURE__, /*overrideToPortrait*/ true,
+ /*forceSlowJpegMode*/false, /*out*/ tmp)
).isOk()) {
ALOGE("%s: Error initializing shim metadata: %s", __FUNCTION__, ret.toString8().string());
}
@@ -1682,6 +1688,7 @@
int clientPid,
int targetSdkVersion,
bool overrideToPortrait,
+ bool forceSlowJpegMode,
/*out*/
sp<ICamera>* device) {
@@ -1693,7 +1700,7 @@
ret = connectHelper<ICameraClient,Client>(cameraClient, id, api1CameraId,
clientPackageName,/*systemNativeClient*/ false, {}, clientUid, clientPid, API_1,
/*shimUpdateOnly*/ false, /*oomScoreOffset*/ 0, targetSdkVersion,
- overrideToPortrait, /*out*/client);
+ overrideToPortrait, forceSlowJpegMode, /*out*/client);
if(!ret.isOk()) {
logRejected(id, CameraThreadState::getCallingPid(), String8(clientPackageName),
@@ -1702,6 +1709,15 @@
}
*device = client;
+
+ const sp<IServiceManager> sm(defaultServiceManager());
+ const auto& mActivityManager = getActivityManager();
+ if (mActivityManager) {
+ mActivityManager->logFgsApiBegin(LOG_FGS_CAMERA_API,
+ CameraThreadState::getCallingUid(),
+ CameraThreadState::getCallingPid());
+ }
+
return ret;
}
@@ -1823,7 +1839,8 @@
ret = connectHelper<hardware::camera2::ICameraDeviceCallbacks,CameraDeviceClient>(cameraCb, id,
/*api1CameraId*/-1, clientPackageNameAdj, systemNativeClient,clientFeatureId,
clientUid, USE_CALLING_PID, API_2, /*shimUpdateOnly*/ false, oomScoreOffset,
- targetSdkVersion, overrideToPortrait, /*out*/client);
+ targetSdkVersion, overrideToPortrait, /*forceSlowJpegMode*/false,
+ /*out*/client);
if(!ret.isOk()) {
logRejected(id, callingPid, String8(clientPackageNameAdj), ret.toString8());
@@ -1847,6 +1864,13 @@
ALOGE("%s: Error while creating the file: %s", __FUNCTION__, sFileName);
}
}
+ const sp<IServiceManager> sm(defaultServiceManager());
+ const auto& mActivityManager = getActivityManager();
+ if (mActivityManager) {
+ mActivityManager->logFgsApiBegin(LOG_FGS_CAMERA_API,
+ CameraThreadState::getCallingUid(),
+ CameraThreadState::getCallingPid());
+ }
return ret;
}
@@ -1885,7 +1909,8 @@
int api1CameraId, const String16& clientPackageNameMaybe, bool systemNativeClient,
const std::optional<String16>& clientFeatureId, int clientUid, int clientPid,
apiLevel effectiveApiLevel, bool shimUpdateOnly, int oomScoreOffset, int targetSdkVersion,
- bool overrideToPortrait, /*out*/sp<CLIENT>& device) {
+ bool overrideToPortrait, bool forceSlowJpegMode,
+ /*out*/sp<CLIENT>& device) {
binder::Status ret = binder::Status::ok();
bool isNonSystemNdk = false;
@@ -2001,7 +2026,8 @@
clientFeatureId, cameraId, api1CameraId, facing, orientation,
clientPid, clientUid, getpid(),
deviceVersionAndTransport, effectiveApiLevel, overrideForPerfClass,
- overrideToPortrait, /*out*/&tmp)).isOk()) {
+ overrideToPortrait, forceSlowJpegMode,
+ /*out*/&tmp)).isOk()) {
return ret;
}
client = static_cast<CLIENT*>(tmp.get());
@@ -3511,6 +3537,13 @@
// client shouldn't be able to call into us anymore
mClientPid = 0;
+ const auto& mActivityManager = getActivityManager();
+ if (mActivityManager) {
+ mActivityManager->logFgsApiEnd(LOG_FGS_CAMERA_API,
+ CameraThreadState::getCallingUid(),
+ CameraThreadState::getCallingPid());
+ }
+
return res;
}
@@ -3823,8 +3856,7 @@
// ----------------------------------------------------------------------------
void CameraService::Client::notifyError(int32_t errorCode,
- const CaptureResultExtras& resultExtras) {
- (void) resultExtras;
+ [[maybe_unused]] const CaptureResultExtras& resultExtras) {
if (mRemoteCallback != NULL) {
int32_t api1ErrorCode = CAMERA_ERROR_RELEASED;
if (errorCode == hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISABLED) {
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 59c5534..d8b14d7 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -29,6 +29,8 @@
#include <binder/ActivityManager.h>
#include <binder/AppOpsManager.h>
#include <binder/BinderService.h>
+#include <binder/IServiceManager.h>
+#include <binder/IActivityManager.h>
#include <binder/IAppOpsCallback.h>
#include <binder/IUidObserver.h>
#include <hardware/camera.h>
@@ -152,7 +154,7 @@
virtual binder::Status connect(const sp<hardware::ICameraClient>& cameraClient,
int32_t cameraId, const String16& clientPackageName,
int32_t clientUid, int clientPid, int targetSdkVersion,
- bool overrideToPortrait,
+ bool overrideToPortrait, bool forceSlowJpegMode,
/*out*/
sp<hardware::ICamera>* device) override;
@@ -596,6 +598,20 @@
private:
+ // TODO: b/263304156 update this to make use of a death callback for more
+ // robust/fault tolerant logging
+ static const sp<IActivityManager>& getActivityManager() {
+ static const char* kActivityService = "activity";
+ static const auto activityManager = []() -> sp<IActivityManager> {
+ const sp<IServiceManager> sm(defaultServiceManager());
+ if (sm != nullptr) {
+ return interface_cast<IActivityManager>(sm->checkService(String16(kActivityService)));
+ }
+ return nullptr;
+ }();
+ return activityManager;
+ }
+
/**
* Typesafe version of device status, containing both the HAL-layer and the service interface-
* layer values.
@@ -887,7 +903,8 @@
int api1CameraId, const String16& clientPackageNameMaybe, bool systemNativeClient,
const std::optional<String16>& clientFeatureId, int clientUid, int clientPid,
apiLevel effectiveApiLevel, bool shimUpdateOnly, int scoreOffset, int targetSdkVersion,
- bool overrideToPortrait, /*out*/sp<CLIENT>& device);
+ bool overrideToPortrait, bool forceSlowJpegMode,
+ /*out*/sp<CLIENT>& device);
// Lock guarding camera service state
Mutex mServiceLock;
@@ -1340,7 +1357,8 @@
const String8& cameraId, int api1CameraId, int facing, int sensorOrientation,
int clientPid, uid_t clientUid, int servicePid,
std::pair<int, IPCTransport> deviceVersionAndIPCTransport, apiLevel effectiveApiLevel,
- bool overrideForPerfClass, bool overrideToPortrait, /*out*/sp<BasicClient>* client);
+ bool overrideForPerfClass, bool overrideToPortrait, bool forceSlowJpegMode,
+ /*out*/sp<BasicClient>* client);
status_t checkCameraAccess(const String16& opPackageName);
diff --git a/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp
index b9f1224..402f8a2 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp
+++ b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp
@@ -19,6 +19,7 @@
#include "AidlCameraDeviceUser.h"
#include <aidl/AidlUtils.h>
#include <aidl/android/frameworks/cameraservice/device/CaptureMetadataInfo.h>
+#include <android-base/properties.h>
namespace android::frameworks::cameraservice::device::implementation {
@@ -35,7 +36,7 @@
using ::android::hardware::cameraservice::utils::conversion::aidl::cloneToAidl;
using ::android::hardware::cameraservice::utils::conversion::aidl::convertFromAidl;
using ::android::hardware::cameraservice::utils::conversion::aidl::convertToAidl;
-using ::android::hardware::cameraservice::utils::conversion::aidl::convertToAidl;
+using ::android::hardware::cameraservice::utils::conversion::aidl::filterVndkKeys;
using ::ndk::ScopedAStatus;
namespace {
@@ -55,6 +56,7 @@
AidlCameraDeviceUser::AidlCameraDeviceUser(const sp<UICameraDeviceUser>& deviceRemote):
mDeviceRemote(deviceRemote) {
mInitSuccess = initDevice();
+ mVndkVersion = base::GetIntProperty("ro.vndk.version", __ANDROID_API_FUTURE__);
}
bool AidlCameraDeviceUser::initDevice() {
@@ -171,6 +173,13 @@
ALOGE("%s: Failed to create default request: %s", __FUNCTION__, ret.toString8().string());
return fromUStatus(ret);
}
+
+ if (filterVndkKeys(mVndkVersion, metadata, /*isStatic*/false) != OK) {
+ ALOGE("%s: Unable to filter vndk metadata keys for version %d",
+ __FUNCTION__, mVndkVersion);
+ return fromSStatus(SStatus::UNKNOWN_ERROR);
+ }
+
const camera_metadata_t* rawMetadata = metadata.getAndLock();
cloneToAidl(rawMetadata, _aidl_return);
metadata.unlock(rawMetadata);
diff --git a/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.h b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.h
index afff197..8014951 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.h
+++ b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.h
@@ -109,6 +109,7 @@
std::shared_ptr<CaptureResultMetadataQueue> mCaptureResultMetadataQueue = nullptr;
bool mInitSuccess = false;
int32_t mRequestId = REQUEST_ID_NONE;
+ int mVndkVersion = -1;
};
} // namespace android::frameworks::cameraservice::device::implementation
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 23a70db..d71462f 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -63,7 +63,8 @@
uid_t clientUid,
int servicePid,
bool overrideForPerfClass,
- bool overrideToPortrait):
+ bool overrideToPortrait,
+ bool forceSlowJpegMode):
Camera2ClientBase(cameraService, cameraClient, cameraServiceProxyWrapper, clientPackageName,
false/*systemNativeClient - since no ndk for api1*/, clientFeatureId,
cameraDeviceId, api1CameraId, cameraFacing, sensorOrientation, clientPid,
@@ -79,6 +80,9 @@
SharedParameters::Lock l(mParameters);
l.mParameters.state = Parameters::DISCONNECTED;
+ if (forceSlowJpegMode) {
+ l.mParameters.isSlowJpegModeForced = true;
+ }
}
status_t Camera2Client::initialize(sp<CameraProviderManager> manager, const String8& monitorTags) {
@@ -1359,21 +1363,18 @@
|| l.mParameters.state == Parameters::VIDEO_SNAPSHOT);
}
-void Camera2Client::releaseRecordingFrame(const sp<IMemory>& mem) {
- (void)mem;
+void Camera2Client::releaseRecordingFrame([[maybe_unused]] const sp<IMemory>& mem) {
ATRACE_CALL();
ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
}
-void Camera2Client::releaseRecordingFrameHandle(native_handle_t *handle) {
- (void)handle;
+void Camera2Client::releaseRecordingFrameHandle([[maybe_unused]] native_handle_t *handle) {
ATRACE_CALL();
ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
}
void Camera2Client::releaseRecordingFrameHandleBatch(
- const std::vector<native_handle_t*>& handles) {
- (void)handles;
+ [[maybe_unused]] const std::vector<native_handle_t*>& handles) {
ATRACE_CALL();
ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
}
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index f035fea..6d7651f 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -114,7 +114,8 @@
uid_t clientUid,
int servicePid,
bool overrideForPerfClass,
- bool overrideToPortrait);
+ bool overrideToPortrait,
+ bool forceSlowJpegMode);
virtual ~Camera2Client();
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index 8b2af90..23570c2 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -990,9 +990,8 @@
Size maxJpegSize = getMaxSize(getAvailableJpegSizes());
int64_t minFrameDurationNs = getJpegStreamMinFrameDurationNs(maxJpegSize);
- slowJpegMode = false;
- if (minFrameDurationNs > kSlowJpegModeThreshold) {
- slowJpegMode = true;
+ slowJpegMode = isSlowJpegModeForced || minFrameDurationNs > kSlowJpegModeThreshold;
+ if (slowJpegMode) {
// Slow jpeg devices does not support video snapshot without
// slowing down preview.
// TODO: support video size video snapshot only?
@@ -2089,7 +2088,7 @@
paramsFlattened = newParams.flatten();
params = newParams;
- slowJpegMode = false;
+ slowJpegMode = isSlowJpegModeForced;
Size pictureSize = { pictureWidth, pictureHeight };
bool zslFrameRateSupported = false;
int64_t jpegMinFrameDurationNs = getJpegStreamMinFrameDurationNs(pictureSize);
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.h b/services/camera/libcameraservice/api1/client2/Parameters.h
index fd18a5d..2bd3d43 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.h
+++ b/services/camera/libcameraservice/api1/client2/Parameters.h
@@ -183,6 +183,8 @@
bool isDeviceZslSupported;
// Whether the device supports geometric distortion correction
bool isDistortionCorrectionSupported;
+ // Whether slowJpegMode is forced regardless of jpeg stream FPS
+ bool isSlowJpegModeForced;
// Overall camera state
enum State {
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 18b28b8..55b0f03 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -61,7 +61,7 @@
bool systemNativeClient,
const std::optional<String16>& clientFeatureId,
const String8& cameraId,
- int api1CameraId,
+ [[maybe_unused]] int api1CameraId,
int cameraFacing,
int sensorOrientation,
int clientPid,
@@ -81,8 +81,6 @@
servicePid,
overrideToPortrait),
mRemoteCallback(remoteCallback) {
- // We don't need it for API2 clients, but Camera2ClientBase requires it.
- (void) api1CameraId;
}
// Interface used by CameraService
@@ -191,11 +189,11 @@
// Cache physical camera ids corresponding to this device and also the high
// resolution sensors in this device + physical camera ids
mProviderManager->isLogicalCamera(mCameraIdStr.string(), &mPhysicalCameraIds);
- if (isUltraHighResolutionSensor(mCameraIdStr)) {
+ if (supportsUltraHighResolutionCapture(mCameraIdStr)) {
mHighResolutionSensors.insert(mCameraIdStr.string());
}
for (auto &physicalId : mPhysicalCameraIds) {
- if (isUltraHighResolutionSensor(String8(physicalId.c_str()))) {
+ if (supportsUltraHighResolutionCapture(String8(physicalId.c_str()))) {
mHighResolutionSensors.insert(physicalId.c_str());
}
}
@@ -958,7 +956,8 @@
camera3::DepthCompositeStream::isDepthCompositeStream(surfaces[0]);
bool isHeicCompositeStream = camera3::HeicCompositeStream::isHeicCompositeStream(surfaces[0]);
bool isJpegRCompositeStream =
- camera3::JpegRCompositeStream::isJpegRCompositeStream(surfaces[0]);
+ camera3::JpegRCompositeStream::isJpegRCompositeStream(surfaces[0]) &&
+ !mDevice->supportNativeJpegR();
if (isDepthCompositeStream || isHeicCompositeStream || isJpegRCompositeStream) {
sp<CompositeStream> compositeStream;
if (isDepthCompositeStream) {
@@ -1073,7 +1072,7 @@
outputConfiguration.getSensorPixelModesUsed();
if (SessionConfigurationUtils::checkAndOverrideSensorPixelModesUsed(
sensorPixelModesUsed, format, width, height, getStaticInfo(cameraIdUsed),
- /*allowRounding*/ false, &overriddenSensorPixelModesUsed) != OK) {
+ &overriddenSensorPixelModesUsed) != OK) {
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
"sensor pixel modes used not valid for deferred stream");
}
@@ -1843,7 +1842,8 @@
sp<Surface> s = new Surface(gbp, false /*controlledByApp*/);
isCompositeStream = camera3::DepthCompositeStream::isDepthCompositeStream(s) ||
camera3::HeicCompositeStream::isHeicCompositeStream(s) ||
- camera3::JpegRCompositeStream::isJpegRCompositeStream(s);
+ (camera3::JpegRCompositeStream::isJpegRCompositeStream(s) &&
+ !mDevice->supportNativeJpegR());
if (isCompositeStream) {
auto compositeIdx = mCompositeStreamMap.indexOfKey(IInterface::asBinder(gbp));
if (compositeIdx == NAME_NOT_FOUND) {
@@ -2026,8 +2026,20 @@
if (remoteCb != 0) {
remoteCb->onDeviceIdle();
}
+
+ std::vector<hardware::CameraStreamStats> fullStreamStats = streamStats;
+ {
+ Mutex::Autolock l(mCompositeLock);
+ for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
+ hardware::CameraStreamStats compositeStats;
+ mCompositeStreamMap.valueAt(i)->getStreamStats(&compositeStats);
+ if (compositeStats.mWidth > 0) {
+ fullStreamStats.push_back(compositeStats);
+ }
+ }
+ }
Camera2ClientBase::notifyIdleWithUserTag(requestCount, resultErrorCount, deviceError,
- streamStats, mUserTag, mVideoStabilizationMode);
+ fullStreamStats, mUserTag, mVideoStabilizationMode);
}
void CameraDeviceClient::notifyShutter(const CaptureResultExtras& resultExtras,
@@ -2247,9 +2259,9 @@
return mDevice->infoPhysical(cameraId);
}
-bool CameraDeviceClient::isUltraHighResolutionSensor(const String8 &cameraId) {
+bool CameraDeviceClient::supportsUltraHighResolutionCapture(const String8 &cameraId) {
const CameraMetadata &deviceInfo = getStaticInfo(cameraId);
- return SessionConfigurationUtils::isUltraHighResolutionSensor(deviceInfo);
+ return SessionConfigurationUtils::supportsUltraHighResolutionCapture(deviceInfo);
}
bool CameraDeviceClient::isSensorPixelModeConsistent(
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 36c627a..c6688a5 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -242,7 +242,7 @@
// Calculate the ANativeWindow transform from android.sensor.orientation
status_t getRotationTransformLocked(int mirrorMode, /*out*/int32_t* transform);
- bool isUltraHighResolutionSensor(const String8 &cameraId);
+ bool supportsUltraHighResolutionCapture(const String8 &cameraId);
bool isSensorPixelModeConsistent(const std::list<int> &streamIdList,
const CameraMetadata &settings);
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
index fb4d2f7..29d7e6f 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
@@ -106,7 +106,6 @@
void CameraOfflineSessionClient::clearStreamUseCaseOverrides() {
}
-
status_t CameraOfflineSessionClient::dump(int fd, const Vector<String16>& args) {
return BasicClient::dump(fd, args);
}
@@ -325,26 +324,20 @@
finishCameraStreamingOps();
}
-void CameraOfflineSessionClient::notifyAutoFocus(uint8_t newState, int triggerId) {
- (void)newState;
- (void)triggerId;
-
+void CameraOfflineSessionClient::notifyAutoFocus([[maybe_unused]] uint8_t newState,
+ [[maybe_unused]] int triggerId) {
ALOGV("%s: Autofocus state now %d, last trigger %d",
__FUNCTION__, newState, triggerId);
}
-void CameraOfflineSessionClient::notifyAutoExposure(uint8_t newState, int triggerId) {
- (void)newState;
- (void)triggerId;
-
+void CameraOfflineSessionClient::notifyAutoExposure([[maybe_unused]] uint8_t newState,
+ [[maybe_unused]] int triggerId) {
ALOGV("%s: Autoexposure state now %d, last trigger %d",
__FUNCTION__, newState, triggerId);
}
-void CameraOfflineSessionClient::notifyAutoWhitebalance(uint8_t newState, int triggerId) {
- (void)newState;
- (void)triggerId;
-
+void CameraOfflineSessionClient::notifyAutoWhitebalance([[maybe_unused]] uint8_t newState,
+ [[maybe_unused]] int triggerId) {
ALOGV("%s: Auto-whitebalance state now %d, last trigger %d", __FUNCTION__, newState,
triggerId);
}
diff --git a/services/camera/libcameraservice/api2/CompositeStream.cpp b/services/camera/libcameraservice/api2/CompositeStream.cpp
index 503cf23..4ed1c28 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/CompositeStream.cpp
@@ -87,6 +87,7 @@
mCaptureResults.clear();
mFrameNumberMap.clear();
mErrorFrameNumbers.clear();
+ mRequestTimeMap.clear();
}
return deleteInternalStreams();
@@ -97,6 +98,8 @@
Mutex::Autolock l(mMutex);
if (!mErrorState && (streamId == getStreamId())) {
mPendingCaptureResults.emplace(frameNumber, CameraMetadata());
+ auto ts = systemTime();
+ mRequestTimeMap.emplace(frameNumber, ts);
}
}
@@ -111,6 +114,11 @@
void CompositeStream::eraseResult(int64_t frameNumber) {
Mutex::Autolock l(mMutex);
+ auto requestTimeIt = mRequestTimeMap.find(frameNumber);
+ if (requestTimeIt != mRequestTimeMap.end()) {
+ mRequestTimeMap.erase(requestTimeIt);
+ }
+
auto it = mPendingCaptureResults.find(frameNumber);
if (it == mPendingCaptureResults.end()) {
return;
diff --git a/services/camera/libcameraservice/api2/CompositeStream.h b/services/camera/libcameraservice/api2/CompositeStream.h
index c27faba..a551d11 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.h
+++ b/services/camera/libcameraservice/api2/CompositeStream.h
@@ -83,6 +83,9 @@
// Notify when shutter notify is triggered
virtual void onShutter(const CaptureResultExtras& /*resultExtras*/, nsecs_t /*timestamp*/) {}
+ // Get composite stream stats
+ virtual void getStreamStats(hardware::CameraStreamStats* streamStats /*out*/) = 0;
+
void onResultAvailable(const CaptureResult& result);
bool onError(int32_t errorCode, const CaptureResultExtras& resultExtras);
@@ -140,6 +143,9 @@
// Keeps a set buffer/result frame numbers for any errors detected during processing.
std::set<int64_t> mErrorFrameNumbers;
+ // Frame number to request time map
+ std::unordered_map<int64_t, nsecs_t> mRequestTimeMap;
+
};
}; //namespace camera3
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
index a3547dd..737c2b5 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
@@ -98,7 +98,7 @@
}
getSupportedDepthSizes(staticInfo, /*maxResolution*/false, &mSupportedDepthSizes);
- if (SessionConfigurationUtils::isUltraHighResolutionSensor(staticInfo)) {
+ if (SessionConfigurationUtils::supportsUltraHighResolutionCapture(staticInfo)) {
getSupportedDepthSizes(staticInfo, true, &mSupportedDepthSizesMaximumResolution);
}
}
@@ -901,7 +901,7 @@
return BAD_VALUE;
}
- if (SessionConfigurationUtils::isUltraHighResolutionSensor(ch)) {
+ if (SessionConfigurationUtils::supportsUltraHighResolutionCapture(ch)) {
getSupportedDepthSizes(ch, /*maxResolution*/true, &depthSizesMaximumResolution);
if (depthSizesMaximumResolution.empty()) {
ALOGE("%s: No depth stream configurations for maximum resolution present",
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.h b/services/camera/libcameraservice/api2/DepthCompositeStream.h
index de0ed67..fbe99dd 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.h
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.h
@@ -69,6 +69,9 @@
static status_t getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/);
+ // Get composite stream stats
+ void getStreamStats(hardware::CameraStreamStats*) override {};
+
protected:
bool threadLoop() override;
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.h b/services/camera/libcameraservice/api2/HeicCompositeStream.h
index 3132183..602a247 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.h
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.h
@@ -75,6 +75,9 @@
static status_t getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/);
+ // Get composite stream stats
+ void getStreamStats(hardware::CameraStreamStats*) override {};
+
static bool isSizeSupportedByHeifEncoder(int32_t width, int32_t height,
bool* useHeic, bool* useGrid, int64_t* stall, AString* hevcName = nullptr);
static bool isInMemoryTempFileSupported();
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
index bb7dd81..5794747 100644
--- a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
@@ -52,6 +52,8 @@
mP010BufferAcquired(false),
mBlobBufferAcquired(false),
mOutputColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
+ mOutputStreamUseCase(0),
+ mFirstRequestLatency(-1),
mProducerListener(new ProducerListener()),
mMaxJpegBufferSize(-1),
mUHRMaxJpegBufferSize(-1),
@@ -152,15 +154,23 @@
// Negative timestamp indicates that something went wrong during the capture result
// collection process.
if (it->first >= 0) {
- mPendingInputFrames[it->first].frameNumber = std::get<0>(it->second);
+ auto frameNumber = std::get<0>(it->second);
+ mPendingInputFrames[it->first].frameNumber = frameNumber;
mPendingInputFrames[it->first].result = std::get<1>(it->second);
+ mSessionStatsBuilder.incResultCounter(false /*dropped*/);
}
mCaptureResults.erase(it);
}
while (!mFrameNumberMap.empty()) {
auto it = mFrameNumberMap.begin();
- mPendingInputFrames[it->second].frameNumber = it->first;
+ auto frameNumber = it->first;
+ mPendingInputFrames[it->second].frameNumber = frameNumber;
+ auto requestTimeIt = mRequestTimeMap.find(frameNumber);
+ if (requestTimeIt != mRequestTimeMap.end()) {
+ mPendingInputFrames[it->second].requestTimeNs = requestTimeIt->second;
+ mRequestTimeMap.erase(requestTimeIt);
+ }
mFrameNumberMap.erase(it);
}
@@ -176,6 +186,8 @@
}
if (frameFound) {
+ mSessionStatsBuilder.incCounter(mP010StreamId, true /*dropped*/,
+ 0 /*captureLatencyMs*/);
it = mErrorFrameNumbers.erase(it);
} else {
ALOGW("%s: Not able to find failing input with frame number: %" PRId64, __FUNCTION__,
@@ -193,6 +205,7 @@
bool newInputAvailable = false;
for (const auto& it : mPendingInputFrames) {
if ((!it.second.error) && (it.second.p010Buffer.data != nullptr) &&
+ (it.second.requestTimeNs != -1) &&
((it.second.jpegBuffer.data != nullptr) || !mSupportInternalJpeg) &&
(it.first < *currentTs)) {
*currentTs = it.first;
@@ -378,6 +391,14 @@
.blobSizeBytes = static_cast<int32_t>(actualJpegRSize)
};
memcpy(header, &blobHeader, sizeof(CameraBlob));
+
+ if (inputFrame.requestTimeNs != -1) {
+ auto captureLatency = ns2ms(systemTime() - inputFrame.requestTimeNs);
+ mSessionStatsBuilder.incCounter(mP010StreamId, false /*dropped*/, captureLatency);
+ if (mFirstRequestLatency == -1) {
+ mFirstRequestLatency = captureLatency;
+ }
+ }
outputANW->queueBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
return res;
@@ -404,6 +425,7 @@
//TODO: Figure out correct requestId
notifyError(inputFrame->frameNumber, -1 /*requestId*/);
inputFrame->errorNotified = true;
+ mSessionStatsBuilder.incCounter(mP010StreamId, true /*dropped*/, 0 /*captureLatencyMs*/);
}
}
@@ -608,6 +630,7 @@
}
mOutputColorSpace = colorSpace;
+ mOutputStreamUseCase = streamUseCase;
mBlobWidth = width;
mBlobHeight = height;
@@ -662,6 +685,8 @@
return res;
}
+ mSessionStatsBuilder.addStream(mP010StreamId);
+
run("JpegRCompositeStreamProc");
return NO_ERROR;
@@ -766,6 +791,7 @@
// characteristics data. The actual result data can be used for the jpeg quality but
// in case it is absent we can default to maximum.
eraseResult(resultExtras.frameNumber);
+ mSessionStatsBuilder.incResultCounter(true /*dropped*/);
}
bool JpegRCompositeStream::onStreamBufferError(const CaptureResultExtras& resultExtras) {
@@ -820,5 +846,31 @@
return NO_ERROR;
}
+void JpegRCompositeStream::getStreamStats(hardware::CameraStreamStats* streamStats) {
+ if ((streamStats == nullptr) || (mFirstRequestLatency != -1)) {
+ return;
+ }
+
+ bool deviceError;
+ std::map<int, StreamStats> stats;
+ mSessionStatsBuilder.buildAndReset(&streamStats->mRequestCount, &streamStats->mErrorCount,
+ &deviceError, &stats);
+ if (stats.find(mP010StreamId) != stats.end()) {
+ streamStats->mWidth = mBlobWidth;
+ streamStats->mHeight = mBlobHeight;
+ streamStats->mFormat = HAL_PIXEL_FORMAT_BLOB;
+ streamStats->mDataSpace = static_cast<int>(kJpegRDataSpace);
+ streamStats->mDynamicRangeProfile = mP010DynamicRange;
+ streamStats->mColorSpace = mOutputColorSpace;
+ streamStats->mStreamUseCase = mOutputStreamUseCase;
+ streamStats->mStartLatencyMs = mFirstRequestLatency;
+ streamStats->mHistogramType = hardware::CameraStreamStats::HISTOGRAM_TYPE_CAPTURE_LATENCY;
+ streamStats->mHistogramBins.assign(stats[mP010StreamId].mCaptureLatencyBins.begin(),
+ stats[mP010StreamId].mCaptureLatencyBins.end());
+ streamStats->mHistogramCounts.assign(stats[mP010StreamId].mCaptureLatencyHistogram.begin(),
+ stats[mP010StreamId].mCaptureLatencyHistogram.end());
+ }
+}
+
}; // namespace camera3
}; // namespace android
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.h b/services/camera/libcameraservice/api2/JpegRCompositeStream.h
index 4b462b5..3dfed30 100644
--- a/services/camera/libcameraservice/api2/JpegRCompositeStream.h
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.h
@@ -22,6 +22,7 @@
#include "system/graphics-base-v1.1.h"
#include "api1/client2/JpegProcessor.h"
+#include "utils/SessionStatsBuilder.h"
#include "CompositeStream.h"
@@ -65,6 +66,9 @@
static status_t getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/);
+ // Get composite stream stats
+ void getStreamStats(hardware::CameraStreamStats* streamStats) override;
+
protected:
bool threadLoop() override;
@@ -80,8 +84,10 @@
bool errorNotified;
int64_t frameNumber;
int32_t requestId;
+ nsecs_t requestTimeNs;
- InputFrame() : error(false), errorNotified(false), frameNumber(-1), requestId(-1) { }
+ InputFrame() : error(false), errorNotified(false), frameNumber(-1), requestId(-1),
+ requestTimeNs(-1) { }
};
status_t processInputFrame(nsecs_t ts, const InputFrame &inputFrame);
@@ -119,6 +125,8 @@
bool mP010BufferAcquired, mBlobBufferAcquired;
sp<Surface> mP010Surface, mBlobSurface, mOutputSurface;
int32_t mOutputColorSpace;
+ int64_t mOutputStreamUseCase;
+ nsecs_t mFirstRequestLatency;
sp<ProducerListener> mProducerListener;
ssize_t mMaxJpegBufferSize;
@@ -137,6 +145,8 @@
std::unordered_map<int64_t, InputFrame> mPendingInputFrames;
const CameraMetadata mStaticInfo;
+
+ SessionStatsBuilder mSessionStatsBuilder;
};
}; //namespace camera3
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 0a2819c..f1fc815 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -420,50 +420,38 @@
}
template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyShutter(const CaptureResultExtras& resultExtras,
- nsecs_t timestamp) {
- (void)resultExtras;
- (void)timestamp;
-
+void Camera2ClientBase<TClientBase>::notifyShutter(
+ [[maybe_unused]] const CaptureResultExtras& resultExtras,
+ [[maybe_unused]] nsecs_t timestamp) {
ALOGV("%s: Shutter notification for request id %" PRId32 " at time %" PRId64,
__FUNCTION__, resultExtras.requestId, timestamp);
}
template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyAutoFocus(uint8_t newState,
- int triggerId) {
- (void)newState;
- (void)triggerId;
-
+void Camera2ClientBase<TClientBase>::notifyAutoFocus([[maybe_unused]] uint8_t newState,
+ [[maybe_unused]] int triggerId) {
ALOGV("%s: Autofocus state now %d, last trigger %d",
__FUNCTION__, newState, triggerId);
}
template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyAutoExposure(uint8_t newState,
- int triggerId) {
- (void)newState;
- (void)triggerId;
-
+void Camera2ClientBase<TClientBase>::notifyAutoExposure([[maybe_unused]] uint8_t newState,
+ [[maybe_unused]] int triggerId) {
ALOGV("%s: Autoexposure state now %d, last trigger %d",
__FUNCTION__, newState, triggerId);
}
template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyAutoWhitebalance(uint8_t newState,
- int triggerId) {
- (void)newState;
- (void)triggerId;
-
+void Camera2ClientBase<TClientBase>::notifyAutoWhitebalance(
+ [[maybe_unused]] uint8_t newState,
+ [[maybe_unused]] int triggerId) {
ALOGV("%s: Auto-whitebalance state now %d, last trigger %d",
__FUNCTION__, newState, triggerId);
}
template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyPrepared(int streamId) {
- (void)streamId;
-
+void Camera2ClientBase<TClientBase>::notifyPrepared([[maybe_unused]] int streamId) {
ALOGV("%s: Stream %d now prepared",
__FUNCTION__, streamId);
}
@@ -475,9 +463,8 @@
}
template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyRepeatingRequestError(long lastFrameNumber) {
- (void)lastFrameNumber;
-
+void Camera2ClientBase<TClientBase>::notifyRepeatingRequestError(
+ [[maybe_unused]] long lastFrameNumber) {
ALOGV("%s: Repeating request was stopped. Last frame number is %ld",
__FUNCTION__, lastFrameNumber);
}
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index 6f15653..fd80cc5 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -113,6 +113,8 @@
*/
virtual const CameraMetadata& infoPhysical(const String8& physicalId) const = 0;
+ virtual bool supportNativeJpegR() const { return false; };
+
struct PhysicalCameraSettings {
std::string cameraId;
CameraMetadata metadata;
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index a1a3769..2ebb98a 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -316,6 +316,18 @@
return deviceInfo->supportNativeZoomRatio();
}
+bool CameraProviderManager::supportNativeJpegR(const std::string &id) const {
+ std::lock_guard<std::mutex> lock(mInterfaceMutex);
+ return supportNativeJpegRLocked(id);
+}
+
+bool CameraProviderManager::supportNativeJpegRLocked(const std::string &id) const {
+ auto deviceInfo = findDeviceInfoLocked(id);
+ if (deviceInfo == nullptr) return false;
+
+ return deviceInfo->supportNativeJpegR();
+}
+
status_t CameraProviderManager::getResourceCost(const std::string &id,
CameraResourceCost* cost) const {
std::lock_guard<std::mutex> lock(mInterfaceMutex);
@@ -1108,7 +1120,7 @@
}
status_t CameraProviderManager::ProviderInfo::DeviceInfo3::deriveJpegRTags(bool maxResolution) {
- if (kFrameworkJpegRDisabled) {
+ if (kFrameworkJpegRDisabled || mSupportsNativeJpegR) {
return OK;
}
@@ -2072,13 +2084,12 @@
CameraProviderManager::ProviderInfo::ProviderInfo(
const std::string &providerName,
const std::string &providerInstance,
- CameraProviderManager *manager) :
+ [[maybe_unused]] CameraProviderManager *manager) :
mProviderName(providerName),
mProviderInstance(providerInstance),
mProviderTagid(generateVendorTagId(providerName)),
mUniqueDeviceCount(0),
mManager(manager) {
- (void) mManager;
}
const std::string& CameraProviderManager::ProviderInfo::getType() const {
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index acf511b..ce4129c 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -248,6 +248,11 @@
bool supportNativeZoomRatio(const std::string &id) const;
/**
+ * Return true if the camera device has native Jpeg/R support.
+ */
+ bool supportNativeJpegR(const std::string &id) const;
+
+ /**
* Return the resource cost of this camera device
*/
status_t getResourceCost(const std::string &id,
@@ -568,6 +573,7 @@
bool hasFlashUnit() const { return mHasFlashUnit; }
bool supportNativeZoomRatio() const { return mSupportNativeZoomRatio; }
+ bool supportNativeJpegR() const { return mSupportsNativeJpegR; }
virtual status_t setTorchMode(bool enabled) = 0;
virtual status_t turnOnTorchWithStrengthLevel(int32_t torchStrength) = 0;
virtual status_t getTorchStrengthLevel(int32_t *torchStrength) = 0;
@@ -576,17 +582,15 @@
hardware::CameraInfo *info) const = 0;
virtual bool isAPI1Compatible() const = 0;
virtual status_t dumpState(int fd) = 0;
- virtual status_t getCameraCharacteristics(bool overrideForPerfClass,
- CameraMetadata *characteristics, bool overrideToPortrait) {
- (void) overrideForPerfClass;
- (void) characteristics;
- (void) overrideToPortrait;
+ virtual status_t getCameraCharacteristics(
+ [[maybe_unused]] bool overrideForPerfClass,
+ [[maybe_unused]] CameraMetadata *characteristics,
+ [[maybe_unused]] bool overrideToPortrait) {
return INVALID_OPERATION;
}
- virtual status_t getPhysicalCameraCharacteristics(const std::string& physicalCameraId,
- CameraMetadata *characteristics) const {
- (void) physicalCameraId;
- (void) characteristics;
+ virtual status_t getPhysicalCameraCharacteristics(
+ [[maybe_unused]] const std::string& physicalCameraId,
+ [[maybe_unused]] CameraMetadata *characteristics) const {
return INVALID_OPERATION;
}
@@ -611,13 +615,14 @@
mParentProvider(parentProvider), mTorchStrengthLevel(0),
mTorchMaximumStrengthLevel(0), mTorchDefaultStrengthLevel(0),
mHasFlashUnit(false), mSupportNativeZoomRatio(false),
- mPublicCameraIds(publicCameraIds) {}
+ mPublicCameraIds(publicCameraIds), mSupportsNativeJpegR(false) {}
virtual ~DeviceInfo() {}
protected:
bool mHasFlashUnit; // const after constructor
bool mSupportNativeZoomRatio; // const after constructor
const std::vector<std::string>& mPublicCameraIds;
+ bool mSupportsNativeJpegR;
};
std::vector<std::unique_ptr<DeviceInfo>> mDevices;
std::unordered_set<std::string> mUniqueCameraIds;
@@ -806,6 +811,8 @@
// No guarantees on the order of traversal
ProviderInfo::DeviceInfo* findDeviceInfoLocked(const std::string& id) const;
+ bool supportNativeJpegRLocked(const std::string &id) const;
+
// Map external providers to USB devices in order to handle USB hotplug
// events for lazy HALs
std::pair<std::vector<std::string>, sp<ProviderInfo>>
diff --git a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
index 30ebd91..64098ea 100644
--- a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
@@ -483,6 +483,9 @@
}
}
+ mSupportsNativeJpegR = mCameraCharacteristics.exists(
+ ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS);
+
mSystemCameraKind = getSystemCameraKind();
status_t res = fixupMonochromeTags();
@@ -506,8 +509,8 @@
ALOGE("%s: Unable to derive Jpeg/R tags based on camera and media capabilities: %s (%d)",
__FUNCTION__, strerror(-res), res);
}
-
- if (camera3::SessionConfigurationUtils::isUltraHighResolutionSensor(mCameraCharacteristics)) {
+ using camera3::SessionConfigurationUtils::supportsUltraHighResolutionCapture;
+ if (supportsUltraHighResolutionCapture(mCameraCharacteristics)) {
status_t status = addDynamicDepthTags(/*maxResolution*/true);
if (OK != status) {
ALOGE("%s: Failed appending dynamic depth tags for maximum resolution mode: %s (%d)",
@@ -732,8 +735,8 @@
camera::device::StreamConfiguration streamConfiguration;
bool earlyExit = false;
auto bRes = SessionConfigurationUtils::convertToHALStreamCombination(configuration,
- String8(mId.c_str()), mCameraCharacteristics, getMetadata, mPhysicalIds,
- streamConfiguration, overrideForPerfClass, &earlyExit);
+ String8(mId.c_str()), mCameraCharacteristics, mSupportsNativeJpegR, getMetadata,
+ mPhysicalIds, streamConfiguration, overrideForPerfClass, &earlyExit);
if (!bRes.isOk()) {
return UNKNOWN_ERROR;
@@ -781,7 +784,7 @@
SessionConfigurationUtils::targetPerfClassPrimaryCamera(
perfClassPrimaryCameraIds, cameraId, targetSdkVersion);
res = mManager->getCameraCharacteristicsLocked(cameraId, overrideForPerfClass, &deviceInfo,
- /*overrideToPortrait*/true);
+ /*overrideToPortrait*/false);
if (res != OK) {
return res;
}
@@ -789,7 +792,8 @@
[this](const String8 &id, bool overrideForPerfClass) {
CameraMetadata physicalDeviceInfo;
mManager->getCameraCharacteristicsLocked(id.string(), overrideForPerfClass,
- &physicalDeviceInfo, /*overrideToPortrait*/true);
+ &physicalDeviceInfo,
+ /*overrideToPortrait*/false);
return physicalDeviceInfo;
};
std::vector<std::string> physicalCameraIds;
@@ -797,7 +801,8 @@
bStatus =
SessionConfigurationUtils::convertToHALStreamCombination(
cameraIdAndSessionConfig.mSessionConfiguration,
- String8(cameraId.c_str()), deviceInfo, getMetadata,
+ String8(cameraId.c_str()), deviceInfo,
+ mManager->supportNativeJpegRLocked(cameraId), getMetadata,
physicalCameraIds, streamConfiguration,
overrideForPerfClass, &shouldExit);
if (!bStatus.isOk()) {
diff --git a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
index 0e83191..a13b937 100644
--- a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
@@ -442,8 +442,7 @@
}
void HidlProviderInfo::serviceDied(uint64_t cookie,
- const wp<hidl::base::V1_0::IBase>& who) {
- (void) who;
+ [[maybe_unused]] const wp<hidl::base::V1_0::IBase>& who) {
ALOGI("Camera provider '%s' has died; removing it", mProviderInstance.c_str());
if (cookie != mId) {
ALOGW("%s: Unexpected serviceDied cookie %" PRIu64 ", expected %" PRIu32,
@@ -625,7 +624,7 @@
__FUNCTION__, strerror(-res), res);
}
- if (SessionConfigurationUtils::isUltraHighResolutionSensor(mCameraCharacteristics)) {
+ if (SessionConfigurationUtils::supportsUltraHighResolutionCapture(mCameraCharacteristics)) {
status_t status = addDynamicDepthTags(/*maxResolution*/true);
if (OK != status) {
ALOGE("%s: Failed appending dynamic depth tags for maximum resolution mode: %s (%d)",
@@ -925,7 +924,7 @@
SessionConfigurationUtils::targetPerfClassPrimaryCamera(
perfClassPrimaryCameraIds, cameraId, targetSdkVersion);
res = mManager->getCameraCharacteristicsLocked(cameraId, overrideForPerfClass, &deviceInfo,
- /*overrideToPortrait*/true);
+ /*overrideToPortrait*/false);
if (res != OK) {
return res;
}
@@ -933,7 +932,7 @@
[this](const String8 &id, bool overrideForPerfClass) {
CameraMetadata physicalDeviceInfo;
mManager->getCameraCharacteristicsLocked(id.string(), overrideForPerfClass,
- &physicalDeviceInfo, /*overrideToPortrait*/true);
+ &physicalDeviceInfo, /*overrideToPortrait*/false);
return physicalDeviceInfo;
};
std::vector<std::string> physicalCameraIds;
diff --git a/services/camera/libcameraservice/device3/Camera3BufferManager.cpp b/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
index a556200..2ac38d5 100644
--- a/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
+++ b/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
@@ -451,10 +451,9 @@
return OK;
}
-void Camera3BufferManager::dump(int fd, const Vector<String16>& args) const {
+void Camera3BufferManager::dump(int fd, [[maybe_unused]] const Vector<String16>& args) const {
Mutex::Autolock l(mLock);
- (void) args;
String8 lines;
lines.appendFormat(" Total stream sets: %zu\n", mStreamSetMap.size());
for (size_t i = 0; i < mStreamSetMap.size(); i++) {
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 9faea20..153e999 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -53,6 +53,7 @@
#include <android/hardware/camera2/ICameraDeviceUser.h>
#include "CameraService.h"
+#include "aidl/android/hardware/graphics/common/Dataspace.h"
#include "aidl/AidlUtils.h"
#include "device3/Camera3Device.h"
#include "device3/Camera3FakeStream.h"
@@ -80,6 +81,7 @@
mLegacyClient(legacyClient),
mOperatingMode(NO_MODE),
mIsConstrainedHighSpeedConfiguration(false),
+ mSupportNativeJpegR(false),
mStatus(STATUS_UNINITIALIZED),
mStatusWaiters(0),
mUsePartialResult(false),
@@ -98,6 +100,9 @@
mNeedFixupMonochromeTags(false),
mOverrideForPerfClass(overrideForPerfClass),
mOverrideToPortrait(overrideToPortrait),
+ mRotateAndCropOverride(ANDROID_SCALER_ROTATE_AND_CROP_NONE),
+ mComposerOutput(false),
+ mAutoframingOverride(ANDROID_CONTROL_AUTOFRAMING_OFF),
mActivePhysicalId("")
{
ATRACE_CALL();
@@ -218,7 +223,7 @@
mZoomRatioMappers[mId.c_str()] = ZoomRatioMapper(&mDeviceInfo,
mSupportNativeZoomRatio, usePrecorrectArray);
- if (SessionConfigurationUtils::isUltraHighResolutionSensor(mDeviceInfo)) {
+ if (SessionConfigurationUtils::supportsUltraHighResolutionCapture(mDeviceInfo)) {
mUHRCropAndMeteringRegionMappers[mId.c_str()] =
UHRCropAndMeteringRegionMapper(mDeviceInfo, usePrecorrectArray);
}
@@ -406,7 +411,7 @@
// Get max jpeg size (area-wise) for default sensor pixel mode
camera3::Size maxDefaultJpegResolution =
SessionConfigurationUtils::getMaxJpegResolution(info,
- /*isUltraHighResolutionSensor*/false);
+ /*supportsUltraHighResolutionCapture*/false);
// Get max jpeg size (area-wise) for max resolution sensor pixel mode / 0 if
// not ultra high res sensor
camera3::Size uhrMaxJpegResolution =
@@ -499,9 +504,8 @@
return BAD_VALUE;
}
-status_t Camera3Device::dump(int fd, const Vector<String16> &args) {
+status_t Camera3Device::dump(int fd, [[maybe_unused]] const Vector<String16> &args) {
ATRACE_CALL();
- (void)args;
// Try to lock, but continue in case of failure (to avoid blocking in
// deadlocks)
@@ -1364,12 +1368,49 @@
set_camera_metadata_vendor_id(meta, mVendorTagId);
filteredParams.unlock(meta);
if (availableSessionKeys.count > 0) {
+ bool rotateAndCropSessionKey = false;
+ bool autoframingSessionKey = false;
for (size_t i = 0; i < availableSessionKeys.count; i++) {
camera_metadata_ro_entry entry = params.find(
availableSessionKeys.data.i32[i]);
if (entry.count > 0) {
filteredParams.update(entry);
}
+ if (ANDROID_SCALER_ROTATE_AND_CROP == availableSessionKeys.data.i32[i]) {
+ rotateAndCropSessionKey = true;
+ }
+ if (ANDROID_CONTROL_AUTOFRAMING == availableSessionKeys.data.i32[i]) {
+ autoframingSessionKey = true;
+ }
+ }
+
+ if (rotateAndCropSessionKey || autoframingSessionKey) {
+ sp<CaptureRequest> request = new CaptureRequest();
+ PhysicalCameraSettings settingsList;
+ settingsList.metadata = filteredParams;
+ request->mSettingsList.push_back(settingsList);
+
+ if (rotateAndCropSessionKey) {
+ auto rotateAndCropEntry = filteredParams.find(ANDROID_SCALER_ROTATE_AND_CROP);
+ if (rotateAndCropEntry.count > 0 &&
+ rotateAndCropEntry.data.u8[0] == ANDROID_SCALER_ROTATE_AND_CROP_AUTO) {
+ request->mRotateAndCropAuto = true;
+ } else {
+ request->mRotateAndCropAuto = false;
+ }
+
+ overrideAutoRotateAndCrop(request, mOverrideToPortrait, mRotateAndCropOverride);
+ }
+
+ if (autoframingSessionKey) {
+ auto autoframingEntry = filteredParams.find(ANDROID_CONTROL_AUTOFRAMING);
+ if (autoframingEntry.count > 0 &&
+ autoframingEntry.data.u8[0] == ANDROID_CONTROL_AUTOFRAMING_AUTO) {
+ overrideAutoframing(request, mAutoframingOverride);
+ }
+ }
+
+ filteredParams = request->mSettingsList.begin()->metadata;
}
}
@@ -1897,7 +1938,7 @@
streamUseCase = camera3Stream->getStreamUseCase();
}
streamStats.emplace_back(stream->getWidth(), stream->getHeight(),
- stream->getFormat(), streamMaxPreviewFps, stream->getDataSpace(), usage,
+ stream->getOriginalFormat(), streamMaxPreviewFps, stream->getDataSpace(), usage,
stream->getMaxHalBuffers(),
stream->getMaxTotalBuffers() - stream->getMaxHalBuffers(),
stream->getDynamicRangeProfile(), streamUseCase,
@@ -2398,7 +2439,7 @@
}
mGroupIdPhysicalCameraMap.clear();
- bool composerSurfacePresent = false;
+ mComposerOutput = false;
for (size_t i = 0; i < mOutputStreams.size(); i++) {
// Don't configure bidi streams twice, nor add them twice to the list
@@ -2421,7 +2462,10 @@
if (outputStream->format == HAL_PIXEL_FORMAT_BLOB) {
size_t k = i + ((mInputStream != nullptr) ? 1 : 0); // Input stream if present should
// always occupy the initial entry.
- if (outputStream->data_space == HAL_DATASPACE_V0_JFIF) {
+ if ((outputStream->data_space == HAL_DATASPACE_V0_JFIF) ||
+ (outputStream->data_space ==
+ static_cast<android_dataspace_t>(
+ aidl::android::hardware::graphics::common::Dataspace::JPEG_R))) {
bufferSizes[k] = static_cast<uint32_t>(
getJpegBufferSize(infoPhysical(String8(outputStream->physical_camera_id)),
outputStream->width, outputStream->height));
@@ -2441,7 +2485,7 @@
}
if (outputStream->usage & GraphicBuffer::USAGE_HW_COMPOSER) {
- composerSurfacePresent = true;
+ mComposerOutput = true;
}
}
@@ -2451,8 +2495,9 @@
// max_buffers, usage, and priv fields, as well as data_space and format
// fields for IMPLEMENTATION_DEFINED formats.
+ int64_t logId = mCameraServiceProxyWrapper->getCurrentLogIdForCamera(mId);
const camera_metadata_t *sessionBuffer = sessionParams.getAndLock();
- res = mInterface->configureStreams(sessionBuffer, &config, bufferSizes);
+ res = mInterface->configureStreams(sessionBuffer, &config, bufferSizes, logId);
sessionParams.unlock(sessionBuffer);
if (res == BAD_VALUE) {
@@ -2510,7 +2555,7 @@
}
}
- mRequestThread->setComposerSurface(composerSurfacePresent);
+ mRequestThread->setComposerSurface(mComposerOutput);
// Request thread needs to know to avoid using repeat-last-settings protocol
// across configure_streams() calls
@@ -3465,6 +3510,17 @@
latestRequestId = NAME_NOT_FOUND;
}
+ for (size_t i = 0; i < mNextRequests.size(); i++) {
+ auto& nextRequest = mNextRequests.editItemAt(i);
+ sp<CaptureRequest> captureRequest = nextRequest.captureRequest;
+ // Do not override rotate&crop for stream configurations that include
+ // SurfaceViews(HW_COMPOSER) output, unless mOverrideToPortrait is set.
+ // The display rotation there will be compensated by NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY
+ captureRequest->mRotateAndCropChanged = (mComposerOutput && !mOverrideToPortrait) ? false :
+ overrideAutoRotateAndCrop(captureRequest);
+ captureRequest->mAutoframingChanged = overrideAutoframing(captureRequest);
+ }
+
// 'mNextRequests' will at this point contain either a set of HFR batched requests
// or a single request from streaming or burst. In either case the first element
// should contain the latest camera settings that we need to check for any session
@@ -3614,19 +3670,15 @@
bool triggersMixedIn = (triggerCount > 0 || mPrevTriggers > 0);
mPrevTriggers = triggerCount;
- // Do not override rotate&crop for stream configurations that include
- // SurfaceViews(HW_COMPOSER) output, unless mOverrideToPortrait is set.
- // The display rotation there will be compensated by NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY
- bool rotateAndCropChanged = (mComposerOutput && !mOverrideToPortrait) ? false :
- overrideAutoRotateAndCrop(captureRequest);
- bool autoframingChanged = overrideAutoframing(captureRequest);
bool testPatternChanged = overrideTestPattern(captureRequest);
// If the request is the same as last, or we had triggers now or last time or
// changing overrides this time
bool newRequest =
- (mPrevRequest != captureRequest || triggersMixedIn || rotateAndCropChanged ||
- autoframingChanged || testPatternChanged) &&
+ (mPrevRequest != captureRequest || triggersMixedIn ||
+ captureRequest->mRotateAndCropChanged ||
+ captureRequest->mAutoframingChanged ||
+ testPatternChanged) &&
// Request settings are all the same within one batch, so only treat the first
// request in a batch as new
!(batchedRequest && i > 0);
@@ -4101,9 +4153,6 @@
camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropValue) {
ATRACE_CALL();
Mutex::Autolock l(mTriggerMutex);
- if (rotateAndCropValue == ANDROID_SCALER_ROTATE_AND_CROP_AUTO) {
- return BAD_VALUE;
- }
mRotateAndCropOverride = rotateAndCropValue;
return OK;
}
@@ -4112,9 +4161,6 @@
camera_metadata_enum_android_control_autoframing_t autoframingValue) {
ATRACE_CALL();
Mutex::Autolock l(mTriggerMutex);
- if (autoframingValue == ANDROID_CONTROL_AUTOFRAMING_AUTO) {
- return BAD_VALUE;
- }
mAutoframingOverride = autoframingValue;
return OK;
}
@@ -4702,13 +4748,20 @@
return OK;
}
-bool Camera3Device::RequestThread::overrideAutoRotateAndCrop(
- const sp<CaptureRequest> &request) {
+bool Camera3Device::RequestThread::overrideAutoRotateAndCrop(const sp<CaptureRequest> &request) {
+ ATRACE_CALL();
+ Mutex::Autolock l(mTriggerMutex);
+ return Camera3Device::overrideAutoRotateAndCrop(request, this->mOverrideToPortrait,
+ this->mRotateAndCropOverride);
+}
+
+bool Camera3Device::overrideAutoRotateAndCrop(const sp<CaptureRequest> &request,
+ bool overrideToPortrait,
+ camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropOverride) {
ATRACE_CALL();
- if (mOverrideToPortrait) {
- Mutex::Autolock l(mTriggerMutex);
- uint8_t rotateAndCrop_u8 = mRotateAndCropOverride;
+ if (overrideToPortrait) {
+ uint8_t rotateAndCrop_u8 = rotateAndCropOverride;
CameraMetadata &metadata = request->mSettingsList.begin()->metadata;
metadata.update(ANDROID_SCALER_ROTATE_AND_CROP,
&rotateAndCrop_u8, 1);
@@ -4716,24 +4769,44 @@
}
if (request->mRotateAndCropAuto) {
- Mutex::Autolock l(mTriggerMutex);
CameraMetadata &metadata = request->mSettingsList.begin()->metadata;
auto rotateAndCropEntry = metadata.find(ANDROID_SCALER_ROTATE_AND_CROP);
if (rotateAndCropEntry.count > 0) {
- if (rotateAndCropEntry.data.u8[0] == mRotateAndCropOverride) {
+ if (rotateAndCropEntry.data.u8[0] == rotateAndCropOverride) {
return false;
} else {
- rotateAndCropEntry.data.u8[0] = mRotateAndCropOverride;
+ rotateAndCropEntry.data.u8[0] = rotateAndCropOverride;
return true;
}
} else {
- uint8_t rotateAndCrop_u8 = mRotateAndCropOverride;
- metadata.update(ANDROID_SCALER_ROTATE_AND_CROP,
- &rotateAndCrop_u8, 1);
+ uint8_t rotateAndCrop_u8 = rotateAndCropOverride;
+ metadata.update(ANDROID_SCALER_ROTATE_AND_CROP, &rotateAndCrop_u8, 1);
return true;
}
}
+
+ return false;
+}
+
+bool Camera3Device::overrideAutoframing(const sp<CaptureRequest> &request /*out*/,
+ camera_metadata_enum_android_control_autoframing_t autoframingOverride) {
+ CameraMetadata &metadata = request->mSettingsList.begin()->metadata;
+ auto autoframingEntry = metadata.find(ANDROID_CONTROL_AUTOFRAMING);
+ if (autoframingEntry.count > 0) {
+ if (autoframingEntry.data.u8[0] == autoframingOverride) {
+ return false;
+ } else {
+ autoframingEntry.data.u8[0] = autoframingOverride;
+ return true;
+ }
+ } else {
+ uint8_t autoframing_u8 = autoframingOverride;
+ metadata.update(ANDROID_CONTROL_AUTOFRAMING,
+ &autoframing_u8, 1);
+ return true;
+ }
+
return false;
}
@@ -4742,23 +4815,9 @@
if (request->mAutoframingAuto) {
Mutex::Autolock l(mTriggerMutex);
- CameraMetadata &metadata = request->mSettingsList.begin()->metadata;
-
- auto autoframingEntry = metadata.find(ANDROID_CONTROL_AUTOFRAMING);
- if (autoframingEntry.count > 0) {
- if (autoframingEntry.data.u8[0] == mAutoframingOverride) {
- return false;
- } else {
- autoframingEntry.data.u8[0] = mAutoframingOverride;
- return true;
- }
- } else {
- uint8_t autoframing_u8 = mAutoframingOverride;
- metadata.update(ANDROID_CONTROL_AUTOFRAMING,
- &autoframing_u8, 1);
- return true;
- }
+ return Camera3Device::overrideAutoframing(request, mAutoframingOverride);
}
+
return false;
}
@@ -5246,6 +5305,10 @@
if (mRequestThread == nullptr) {
return INVALID_OPERATION;
}
+ if (rotateAndCropValue == ANDROID_SCALER_ROTATE_AND_CROP_AUTO) {
+ return BAD_VALUE;
+ }
+ mRotateAndCropOverride = rotateAndCropValue;
return mRequestThread->setRotateAndCropAutoBehavior(rotateAndCropValue);
}
@@ -5257,6 +5320,10 @@
if (mRequestThread == nullptr) {
return INVALID_OPERATION;
}
+ if (autoframingValue == ANDROID_CONTROL_AUTOFRAMING_AUTO) {
+ return BAD_VALUE;
+ }
+ mAutoframingOverride = autoframingValue;
return mRequestThread->setAutoframingAutoBehaviour(autoframingValue);
}
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 6985514..e045b98 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -119,6 +119,7 @@
status_t dumpWatchedEventsToVector(std::vector<std::string> &out) override;
const CameraMetadata& info() const override;
const CameraMetadata& infoPhysical(const String8& physicalId) const override;
+ bool supportNativeJpegR() const override { return mSupportNativeJpegR; };
// Capture and setStreamingRequest will configure streams if currently in
// idle state
@@ -411,7 +412,7 @@
virtual status_t configureStreams(const camera_metadata_t * sessionParams,
/*inout*/ camera_stream_configuration_t * config,
- const std::vector<uint32_t>& bufferSizes) = 0;
+ const std::vector<uint32_t>& bufferSizes, int64_t logId) = 0;
// The injection camera configures the streams to hal.
virtual status_t configureInjectedStreams(
@@ -543,6 +544,7 @@
CameraMetadata mDeviceInfo;
bool mSupportNativeZoomRatio;
+ bool mSupportNativeJpegR;
std::unordered_map<std::string, CameraMetadata> mPhysicalDeviceInfoMap;
CameraMetadata mRequestTemplateCache[CAMERA_TEMPLATE_COUNT];
@@ -625,9 +627,14 @@
// overriding of ROTATE_AND_CROP value and adjustment of coordinates
// in several other controls in both the request and the result
bool mRotateAndCropAuto;
+ // Indicates that the ROTATE_AND_CROP value within 'mSettingsList' was modified
+ // irrespective of the original value.
+ bool mRotateAndCropChanged = false;
// Whether this request has AUTOFRAMING_AUTO set, so need to override the AUTOFRAMING value
// in the capture request.
bool mAutoframingAuto;
+ // Indicates that the auto framing value within 'mSettingsList' was modified
+ bool mAutoframingChanged = false;
// Whether this capture request has its zoom ratio set to 1.0x before
// the framework overrides it for camera HAL consumption.
@@ -816,6 +823,15 @@
*/
static nsecs_t getMonoToBoottimeOffset();
+ // Override rotate_and_crop control if needed
+ static bool overrideAutoRotateAndCrop(const sp<CaptureRequest> &request /*out*/,
+ bool overrideToPortrait,
+ camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropOverride);
+
+ // Override auto framing control if needed
+ static bool overrideAutoframing(const sp<CaptureRequest> &request /*out*/,
+ camera_metadata_enum_android_control_autoframing_t autoframingOverride);
+
struct RequestTrigger {
// Metadata tag number, e.g. android.control.aePrecaptureTrigger
uint32_t metadataTag;
@@ -973,7 +989,7 @@
status_t addFakeTriggerIds(const sp<CaptureRequest> &request);
// Override rotate_and_crop control if needed; returns true if the current value was changed
- bool overrideAutoRotateAndCrop(const sp<CaptureRequest> &request);
+ bool overrideAutoRotateAndCrop(const sp<CaptureRequest> &request /*out*/);
// Override autoframing control if needed; returns true if the current value was changed
bool overrideAutoframing(const sp<CaptureRequest> &request);
@@ -1417,6 +1433,11 @@
// Whether the camera framework overrides the device characteristics for
// app compatibility reasons.
bool mOverrideToPortrait;
+ camera_metadata_enum_android_scaler_rotate_and_crop_t mRotateAndCropOverride;
+ bool mComposerOutput;
+
+ // Auto framing override value
+ camera_metadata_enum_android_control_autoframing mAutoframingOverride;
// Current active physical id of the logical multi-camera, if any
std::string mActivePhysicalId;
diff --git a/services/camera/libcameraservice/device3/Camera3FakeStream.cpp b/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
index 19afd69..8c0ac71 100644
--- a/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
@@ -67,8 +67,7 @@
return INVALID_OPERATION;
}
-void Camera3FakeStream::dump(int fd, const Vector<String16> &args) const {
- (void) args;
+void Camera3FakeStream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const {
String8 lines;
lines.appendFormat(" Stream[%d]: Fake\n", mId);
write(fd, lines.string(), lines.size());
@@ -82,9 +81,8 @@
return OK;
}
-status_t Camera3FakeStream::detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd) {
- (void) buffer;
- (void) fenceFd;
+status_t Camera3FakeStream::detachBuffer([[maybe_unused]] sp<GraphicBuffer>* buffer,
+ [[maybe_unused]] int* fenceFd) {
// Do nothing
return OK;
}
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
index a78d01e..fbaaf7b 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
@@ -74,8 +74,7 @@
return false;
}
-void Camera3IOStreamBase::dump(int fd, const Vector<String16> &args) const {
- (void) args;
+void Camera3IOStreamBase::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const {
String8 lines;
uint64_t consumerUsage = 0;
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.cpp b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
index 9a3f7ed..631bb43 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
@@ -104,17 +104,14 @@
status_t Camera3InputStream::returnBufferCheckedLocked(
const camera_stream_buffer &buffer,
- nsecs_t timestamp,
- nsecs_t readoutTimestamp,
- bool output,
+ [[maybe_unused]] nsecs_t timestamp,
+ [[maybe_unused]] nsecs_t readoutTimestamp,
+ [[maybe_unused]] bool output,
int32_t /*transform*/,
const std::vector<size_t>&,
/*out*/
sp<Fence> *releaseFenceOut) {
- (void)timestamp;
- (void)readoutTimestamp;
- (void)output;
ALOG_ASSERT(!output, "Expected output to be false");
status_t res;
@@ -218,8 +215,7 @@
return OK;
}
-void Camera3InputStream::dump(int fd, const Vector<String16> &args) const {
- (void) args;
+void Camera3InputStream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const {
String8 lines;
lines.appendFormat(" Stream[%d]: Input\n", mId);
write(fd, lines.string(), lines.size());
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 2227232..beef0da 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -24,6 +24,7 @@
#include <aidl/android/hardware/camera/device/CameraBlob.h>
#include <aidl/android/hardware/camera/device/CameraBlobId.h>
+#include "aidl/android/hardware/graphics/common/Dataspace.h"
#include <android-base/unique_fd.h>
#include <cutils/properties.h>
@@ -394,13 +395,12 @@
const camera_stream_buffer &buffer,
nsecs_t timestamp,
nsecs_t readoutTimestamp,
- bool output,
+ [[maybe_unused]] bool output,
int32_t transform,
const std::vector<size_t>& surface_ids,
/*out*/
sp<Fence> *releaseFenceOut) {
- (void)output;
ALOG_ASSERT(output, "Expected output to be true");
status_t res;
@@ -457,7 +457,10 @@
mTraceFirstBuffer = false;
}
// Fix CameraBlob id type discrepancy between HIDL and AIDL, details : http://b/229688810
- if (getFormat() == HAL_PIXEL_FORMAT_BLOB && getDataSpace() == HAL_DATASPACE_V0_JFIF) {
+ if (getFormat() == HAL_PIXEL_FORMAT_BLOB && (getDataSpace() == HAL_DATASPACE_V0_JFIF ||
+ (getDataSpace() ==
+ static_cast<android_dataspace_t>(
+ aidl::android::hardware::graphics::common::Dataspace::JPEG_R)))) {
if (mIPCTransport == IPCTransport::HIDL) {
fixUpHidlJpegBlobHeader(anwBuffer, anwReleaseFence);
}
@@ -519,8 +522,7 @@
return res;
}
-void Camera3OutputStream::dump(int fd, const Vector<String16> &args) const {
- (void) args;
+void Camera3OutputStream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const {
String8 lines;
lines.appendFormat(" Stream[%d]: Output\n", mId);
lines.appendFormat(" Consumer name: %s\n", mConsumerName.string());
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
index 738c314..f742a6d 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -639,9 +639,24 @@
if (deviceInfo != states.physicalDeviceInfoMap.end()) {
auto orientation = deviceInfo->second.find(ANDROID_SENSOR_ORIENTATION);
if (orientation.count > 0) {
+ int32_t transform;
ret = CameraUtils::getRotationTransform(deviceInfo->second,
- OutputConfiguration::MIRROR_MODE_AUTO, &request.transform);
- if (ret != OK) {
+ OutputConfiguration::MIRROR_MODE_AUTO, &transform);
+ if (ret == OK) {
+ // It is possible for camera providers to return the capture
+ // results after the processed frames. In such scenario, we will
+ // not be able to set the output transformation before the frames
+ // return back to the consumer for the current capture request
+ // but we could still try and configure it for any future requests
+ // that are still in flight. The assumption is that the physical
+ // device id remains the same for the duration of the pending queue.
+ for (size_t i = 0; i < states.inflightMap.size(); i++) {
+ auto &r = states.inflightMap.editValueAt(i);
+ if (r.requestTimeNs >= request.requestTimeNs) {
+ r.transform = transform;
+ }
+ }
+ } else {
ALOGE("%s: Failed to calculate current stream transformation: %s "
"(%d)", __FUNCTION__, strerror(-ret), ret);
}
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 4d8495f..4395455 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -955,9 +955,8 @@
}
}
-void Camera3Stream::dump(int fd, const Vector<String16> &args) const
+void Camera3Stream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const
{
- (void)args;
mBufferLimitLatency.dump(fd,
" Latency histogram for wait on max_buffers");
}
diff --git a/services/camera/libcameraservice/device3/DistortionMapper.cpp b/services/camera/libcameraservice/device3/DistortionMapper.cpp
index 15807bf..f0764b4 100644
--- a/services/camera/libcameraservice/device3/DistortionMapper.cpp
+++ b/services/camera/libcameraservice/device3/DistortionMapper.cpp
@@ -67,7 +67,7 @@
return res;
}
- bool mMaxResolution = SessionConfigurationUtils::isUltraHighResolutionSensor(deviceInfo);
+ bool mMaxResolution = SessionConfigurationUtils::supportsUltraHighResolutionCapture(deviceInfo);
if (mMaxResolution) {
res = setupStaticInfoLocked(deviceInfo, /*maxResolution*/true);
}
diff --git a/services/camera/libcameraservice/device3/UHRCropAndMeteringRegionMapper.cpp b/services/camera/libcameraservice/device3/UHRCropAndMeteringRegionMapper.cpp
index c558d91..ce7097a 100644
--- a/services/camera/libcameraservice/device3/UHRCropAndMeteringRegionMapper.cpp
+++ b/services/camera/libcameraservice/device3/UHRCropAndMeteringRegionMapper.cpp
@@ -91,6 +91,8 @@
if (meteringRegionsSetEntry.count == 1 &&
meteringRegionsSetEntry.data.u8[0] == entry.second.second) {
// metering region set by client, doesn't need to be fixed.
+ ALOGV("%s: Metering region %u set by client, they don't need to be fixed",
+ __FUNCTION__, entry.first);
continue;
}
camera_metadata_entry meteringRegionEntry = request->find(entry.first);
@@ -121,6 +123,7 @@
if (cropRegionSetEntry.count == 1 &&
cropRegionSetEntry.data.u8[0] == ANDROID_SCALER_CROP_REGION_SET_TRUE) {
// crop regions set by client, doesn't need to be fixed.
+ ALOGV("%s: crop region set by client, doesn't need to be fixed", __FUNCTION__);
return;
}
camera_metadata_entry_t cropRegionEntry = request->find(ANDROID_SCALER_CROP_REGION);
diff --git a/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp b/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
index 515259e..aaa1b70 100644
--- a/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
+++ b/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
@@ -153,9 +153,9 @@
return;
}
- bool isUltraHighResolutionSensor =
- camera3::SessionConfigurationUtils::isUltraHighResolutionSensor(*deviceInfo);
- if (isUltraHighResolutionSensor) {
+ bool supportsUltraHighResolutionCapture =
+ camera3::SessionConfigurationUtils::supportsUltraHighResolutionCapture(*deviceInfo);
+ if (supportsUltraHighResolutionCapture) {
if (!SessionConfigurationUtils::getArrayWidthAndHeight(deviceInfo,
ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
&arrayMaximumResolutionW, &arrayMaximumResolutionH)) {
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
index 30f6d18..3b1eba3 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
@@ -205,6 +205,7 @@
return res;
}
mSupportNativeZoomRatio = manager->supportNativeZoomRatio(mId.string());
+ mSupportNativeJpegR = manager->supportNativeJpegR(mId.string());
std::vector<std::string> physicalCameraIds;
bool isLogical = manager->isLogicalCamera(mId.string(), &physicalCameraIds);
@@ -213,7 +214,7 @@
// Do not override characteristics for physical cameras
res = manager->getCameraCharacteristics(
physicalId, /*overrideForPerfClass*/false, &mPhysicalDeviceInfoMap[physicalId],
- /*overrideToPortrait*/true);
+ mOverrideToPortrait);
if (res != OK) {
SET_ERR_L("Could not retrieve camera %s characteristics: %s (%d)",
physicalId.c_str(), strerror(-res), res);
@@ -238,7 +239,7 @@
&mPhysicalDeviceInfoMap[physicalId],
mSupportNativeZoomRatio, usePrecorrectArray);
- if (SessionConfigurationUtils::isUltraHighResolutionSensor(
+ if (SessionConfigurationUtils::supportsUltraHighResolutionCapture(
mPhysicalDeviceInfoMap[physicalId])) {
mUHRCropAndMeteringRegionMappers[physicalId] =
UHRCropAndMeteringRegionMapper(mPhysicalDeviceInfoMap[physicalId],
@@ -874,8 +875,9 @@
}
status_t AidlCamera3Device::AidlHalInterface::configureStreams(
- const camera_metadata_t *sessionParams,
- camera_stream_configuration *config, const std::vector<uint32_t>& bufferSizes) {
+ const camera_metadata_t *sessionParams,
+ camera_stream_configuration *config, const std::vector<uint32_t>& bufferSizes,
+ int64_t logId) {
using camera::device::StreamType;
using camera::device::StreamConfigurationMode;
@@ -960,6 +962,7 @@
requestedConfiguration.streamConfigCounter = mNextStreamConfigCounter++;
requestedConfiguration.multiResolutionInputImage = config->input_is_multi_resolution;
+ requestedConfiguration.logId = logId;
auto err = mAidlSession->configureStreams(requestedConfiguration, &finalConfiguration);
if (!err.isOk()) {
ALOGE("%s: Transaction error: %s", __FUNCTION__, err.getMessage());
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
index e61f8f7..8ee5c63 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
@@ -101,7 +101,9 @@
virtual status_t configureStreams(const camera_metadata_t *sessionParams,
/*inout*/ camera_stream_configuration_t *config,
- const std::vector<uint32_t>& bufferSizes) override;
+ const std::vector<uint32_t>& bufferSizes,
+ int64_t logId) override;
+
// The injection camera configures the streams to hal.
virtual status_t configureInjectedStreams(
const camera_metadata_t* sessionParams,
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
index 382b287..b367019 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
@@ -163,7 +163,7 @@
}
res = manager->getCameraCharacteristics(mId.string(), mOverrideForPerfClass, &mDeviceInfo,
- mOverrideToPortrait);
+ /*overrideToPortrait*/false);
if (res != OK) {
SET_ERR_L("Could not retrieve camera characteristics: %s (%d)", strerror(-res), res);
session->close();
@@ -178,7 +178,7 @@
// Do not override characteristics for physical cameras
res = manager->getCameraCharacteristics(
physicalId, /*overrideForPerfClass*/false, &mPhysicalDeviceInfoMap[physicalId],
- /*overrideToPortrait*/true);
+ /*overrideToPortrait*/false);
if (res != OK) {
SET_ERR_L("Could not retrieve camera %s characteristics: %s (%d)",
physicalId.c_str(), strerror(-res), res);
@@ -203,7 +203,7 @@
&mPhysicalDeviceInfoMap[physicalId],
mSupportNativeZoomRatio, usePrecorrectArray);
- if (SessionConfigurationUtils::isUltraHighResolutionSensor(
+ if (SessionConfigurationUtils::supportsUltraHighResolutionCapture(
mPhysicalDeviceInfoMap[physicalId])) {
mUHRCropAndMeteringRegionMappers[physicalId] =
UHRCropAndMeteringRegionMapper(mPhysicalDeviceInfoMap[physicalId],
@@ -880,7 +880,8 @@
status_t HidlCamera3Device::HidlHalInterface::configureStreams(
const camera_metadata_t *sessionParams,
- camera_stream_configuration *config, const std::vector<uint32_t>& bufferSizes) {
+ camera_stream_configuration *config, const std::vector<uint32_t>& bufferSizes,
+ int64_t /*logId*/) {
ATRACE_NAME("CameraHal::configureStreams");
if (!valid()) return INVALID_OPERATION;
status_t res = OK;
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
index 15bd5ba..7b216b2 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
@@ -110,7 +110,8 @@
virtual status_t configureStreams(const camera_metadata_t *sessionParams,
/*inout*/ camera_stream_configuration_t *config,
- const std::vector<uint32_t>& bufferSizes) override;
+ const std::vector<uint32_t>& bufferSizes,
+ int64_t logId) override;
// The injection camera configures the streams to hal.
virtual status_t configureInjectedStreams(
diff --git a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
index 26e813a..0f7f127 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
@@ -19,10 +19,12 @@
#include <gui/Surface.h>
#include <gui/bufferqueue/1.0/H2BGraphicBufferProducer.h>
+#include <aidl/AidlUtils.h>
#include <hidl/AidlCameraDeviceCallbacks.h>
#include <hidl/HidlCameraDeviceUser.h>
#include <hidl/Utils.h>
#include <android/hardware/camera/device/3.2/types.h>
+#include <android-base/properties.h>
namespace android {
namespace frameworks {
@@ -31,6 +33,7 @@
namespace V2_1 {
namespace implementation {
+using hardware::cameraservice::utils::conversion::aidl::filterVndkKeys;
using hardware::cameraservice::utils::conversion::convertToHidl;
using hardware::cameraservice::utils::conversion::convertFromHidl;
using hardware::cameraservice::utils::conversion::B2HStatus;
@@ -55,6 +58,7 @@
const sp<hardware::camera2::ICameraDeviceUser> &deviceRemote)
: mDeviceRemote(deviceRemote) {
mInitSuccess = initDevice();
+ mVndkVersion = base::GetIntProperty("ro.vndk.version", __ANDROID_API_FUTURE__);
}
bool HidlCameraDeviceUser::initDevice() {
@@ -235,8 +239,16 @@
android::CameraMetadata cameraMetadata;
binder::Status ret = mDeviceRemote->createDefaultRequest(convertFromHidl(templateId),
&cameraMetadata);
- HStatus hStatus = B2HStatus(ret);
+
HCameraMetadata hidlMetadata;
+ if (filterVndkKeys(mVndkVersion, cameraMetadata, /*isStatic*/false) != OK) {
+ ALOGE("%s: Unable to filter vndk metadata keys for version %d",
+ __FUNCTION__, mVndkVersion);
+ _hidl_cb(HStatus::UNKNOWN_ERROR, hidlMetadata);
+ return Void();
+ }
+
+ HStatus hStatus = B2HStatus(ret);
const camera_metadata_t *rawMetadata = cameraMetadata.getAndLock();
convertToHidl(rawMetadata, &hidlMetadata);
_hidl_cb(hStatus, hidlMetadata);
diff --git a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.h b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.h
index 0e2ab3d..a653ca2 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.h
+++ b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.h
@@ -127,6 +127,7 @@
std::shared_ptr<CaptureResultMetadataQueue> mCaptureResultMetadataQueue = nullptr;
bool mInitSuccess = false;
int32_t mRequestId = REQUEST_ID_NONE;
+ int mVndkVersion = -1;
};
} // implementation
diff --git a/services/camera/libcameraservice/hidl/HidlCameraService.cpp b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
index 1d5213d..d6910fe 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraService.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
@@ -66,7 +66,7 @@
HStatus status = HStatus::NO_ERROR;
binder::Status serviceRet =
mAidlICameraService->getCameraCharacteristics(String16(cameraId.c_str()),
- /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/true,
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
&cameraMetadata);
HCameraMetadata hidlMetadata;
if (!serviceRet.isOk()) {
@@ -118,7 +118,7 @@
binder::Status serviceRet = mAidlICameraService->connectDevice(
callbacks, String16(cameraId.c_str()), String16(""), {},
hardware::ICameraService::USE_CALLING_UID, 0/*oomScoreOffset*/,
- /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/true,
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
/*out*/&deviceRemote);
HStatus status = HStatus::NO_ERROR;
if (!serviceRet.isOk()) {
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
index 072fcfb..b397573 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
@@ -346,7 +346,8 @@
android::CameraService::USE_CALLING_UID,
android::CameraService::USE_CALLING_PID,
/*targetSdkVersion*/ __ANDROID_API_FUTURE__,
- /*overrideToPortrait*/true, &cameraDevice);
+ /*overrideToPortrait*/true, /*forceSlowJpegMode*/false,
+ &cameraDevice);
if (!rc.isOk()) {
// camera not connected
return;
diff --git a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
index 2f55def..1a6b2e0 100644
--- a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
@@ -185,9 +185,8 @@
using getCameraDeviceInterface_V1_x_cb = std::function<void(Status status,
const sp<device::V1_0::ICameraDevice>& device)>;
virtual hardware::Return<void> getCameraDeviceInterface_V1_x(
- const hardware::hidl_string& cameraDeviceName,
+ [[maybe_unused]] const hardware::hidl_string& cameraDeviceName,
getCameraDeviceInterface_V1_x_cb _hidl_cb) override {
- (void) cameraDeviceName;
_hidl_cb(Status::OK, nullptr); //TODO: impl. of ver. 1.0 device interface
// otherwise enumeration will fail.
return hardware::Void();
@@ -261,9 +260,8 @@
virtual ~TestInteractionProxy() {}
virtual bool registerForNotifications(
- const std::string &serviceName,
+ [[maybe_unused]] const std::string &serviceName,
const sp<hidl::manager::V1_0::IServiceNotification> ¬ification) override {
- (void) serviceName;
mManagerNotificationInterface = notification;
return true;
}
diff --git a/services/camera/libcameraservice/tests/DistortionMapperTest.cpp b/services/camera/libcameraservice/tests/DistortionMapperTest.cpp
index 8331136..b367571 100644
--- a/services/camera/libcameraservice/tests/DistortionMapperTest.cpp
+++ b/services/camera/libcameraservice/tests/DistortionMapperTest.cpp
@@ -355,8 +355,6 @@
#include "DistortionMapperTest_OpenCvData.h"
TEST(DistortionMapperTest, CompareToOpenCV) {
- status_t res;
-
float bigDistortion[] = {0.1, -0.003, 0.004, 0.02, 0.01};
// Expect to match within sqrt(2) radius pixels
@@ -370,7 +368,7 @@
using namespace openCvData;
DistortionMapperInfo *mapperInfo = m.getMapperInfo();
- res = m.mapRawToCorrected(rawCoords.data(), rawCoords.size() / 2, mapperInfo, /*clamp*/false,
+ m.mapRawToCorrected(rawCoords.data(), rawCoords.size() / 2, mapperInfo, /*clamp*/false,
/*simple*/false);
for (size_t i = 0; i < rawCoords.size(); i+=2) {
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
index 7aaf6b2..4225366 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
@@ -101,6 +101,11 @@
mSessionStats.mStreamStats.clear();
}
+int64_t CameraServiceProxyWrapper::CameraSessionStatsWrapper::getLogId() {
+ Mutex::Autolock l(mLock);
+ return mSessionStats.mLogId;
+}
+
/**
* CameraServiceProxyWrapper functions
*/
@@ -248,9 +253,12 @@
apiLevel = CameraSessionStats::CAMERA_API_LEVEL_2;
}
- sessionStats = std::make_shared<CameraSessionStatsWrapper>(String16(id), facing,
- CameraSessionStats::CAMERA_STATE_OPEN, clientPackageName,
- apiLevel, isNdk, latencyMs);
+ // Generate a new log ID for open events
+ int64_t logId = generateLogId(mRandomDevice);
+
+ sessionStats = std::make_shared<CameraSessionStatsWrapper>(
+ String16(id), facing, CameraSessionStats::CAMERA_STATE_OPEN, clientPackageName,
+ apiLevel, isNdk, latencyMs, logId);
mSessionStatsMap.emplace(id, sessionStats);
ALOGV("%s: Adding id %s", __FUNCTION__, id.c_str());
}
@@ -300,4 +308,31 @@
return ret;
}
-}; // namespace android
+int64_t CameraServiceProxyWrapper::getCurrentLogIdForCamera(const String8& cameraId) {
+ std::shared_ptr<CameraSessionStatsWrapper> stats;
+ {
+ Mutex::Autolock _l(mLock);
+ if (mSessionStatsMap.count(cameraId) == 0) {
+ ALOGE("%s: SessionStatsMap should contain camera %s before asking for its logging ID.",
+ __FUNCTION__, cameraId.c_str());
+ return 0;
+ }
+
+ stats = mSessionStatsMap[cameraId];
+ }
+ return stats->getLogId();
+}
+
+int64_t CameraServiceProxyWrapper::generateLogId(std::random_device& randomDevice) {
+ int64_t ret = 0;
+ do {
+ // std::random_device generates 32 bits per call, so we call it twice
+ ret = randomDevice();
+ ret = ret << 32;
+ ret = ret | randomDevice();
+ } while (ret == 0); // 0 is not a valid identifier
+
+ return ret;
+}
+
+} // namespace android
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
index f90a841..d47c738 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
@@ -24,6 +24,7 @@
#include <utils/String16.h>
#include <utils/StrongPointer.h>
#include <utils/Timers.h>
+#include <random>
#include <camera/CameraSessionStats.h>
@@ -37,7 +38,7 @@
sp<hardware::ICameraServiceProxy> mCameraServiceProxy;
class CameraSessionStatsWrapper {
- private:
+ private:
hardware::CameraSessionStats mSessionStats;
Mutex mLock; // lock for per camera session stats
@@ -47,11 +48,12 @@
*/
void updateProxyDeviceState(sp<hardware::ICameraServiceProxy>& proxyBinder);
- public:
+ public:
CameraSessionStatsWrapper(const String16& cameraId, int facing, int newCameraState,
- const String16& clientName, int apiLevel, bool isNdk, int32_t latencyMs) :
- mSessionStats(cameraId, facing, newCameraState, clientName, apiLevel, isNdk, latencyMs)
- { }
+ const String16& clientName, int apiLevel, bool isNdk,
+ int32_t latencyMs, int64_t logId)
+ : mSessionStats(cameraId, facing, newCameraState, clientName, apiLevel, isNdk,
+ latencyMs, logId) {}
void onOpen(sp<hardware::ICameraServiceProxy>& proxyBinder);
void onClose(sp<hardware::ICameraServiceProxy>& proxyBinder, int32_t latencyMs,
@@ -62,6 +64,9 @@
int64_t requestCount, int64_t resultErrorCount, bool deviceError,
const std::string& userTag, int32_t videoStabilizationMode,
const std::vector<hardware::CameraStreamStats>& streamStats);
+
+ // Returns the logId associated with this event.
+ int64_t getLogId();
};
// Lock for camera session stats map
@@ -69,8 +74,15 @@
// Map from camera id to the camera's session statistics
std::map<String8, std::shared_ptr<CameraSessionStatsWrapper>> mSessionStatsMap;
+ std::random_device mRandomDevice; // pulls 32-bit random numbers from /dev/urandom
+
sp<hardware::ICameraServiceProxy> getCameraServiceProxy();
+ // Returns a randomly generated ID that is suitable for logging the event. A new identifier
+ // should only be generated for an open event. All other events for the cameraId should use the
+ // ID generated for the open event associated with them.
+ static int64_t generateLogId(std::random_device& randomDevice);
+
public:
CameraServiceProxyWrapper(sp<hardware::ICameraServiceProxy> serviceProxy = nullptr) :
mCameraServiceProxy(serviceProxy)
@@ -110,6 +122,11 @@
// Detect if the camera is disabled by device policy.
bool isCameraDisabled(int userId);
+
+ // Returns the logId currently associated with the given cameraId. See 'mLogId' in
+ // frameworks/av/camera/include/camera/CameraSessionStats.h for more details about this
+ // identifier. Returns a non-0 value on success.
+ int64_t getCurrentLogIdForCamera(const String8& cameraId);
};
} // android
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index f786b79..e25f972 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -161,8 +161,13 @@
getAppropriateModeTag(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxResolution);
const int32_t heicSizesTag =
getAppropriateModeTag(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, maxResolution);
+ const int32_t jpegRSizesTag = getAppropriateModeTag(
+ ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS, maxResolution);
+ bool isJpegRDataSpace = (dataSpace == static_cast<android_dataspace_t>(
+ ::aidl::android::hardware::graphics::common::Dataspace::JPEG_R));
camera_metadata_ro_entry streamConfigs =
+ (isJpegRDataSpace) ? info.find(jpegRSizesTag) :
(dataSpace == HAL_DATASPACE_DEPTH) ? info.find(depthSizesTag) :
(dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) ?
info.find(heicSizesTag) :
@@ -196,6 +201,8 @@
if (bestWidth == -1) {
// Return false if no configurations for this format were listed
+ ALOGE("%s: No configurations for format %d width %d, height %d, maxResolution ? %s",
+ __FUNCTION__, format, width, height, maxResolution ? "true" : "false");
return false;
}
@@ -378,6 +385,23 @@
}
}
+bool dataSpaceFromColorSpace(android_dataspace *dataSpace, int32_t colorSpace) {
+ switch (colorSpace) {
+ case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SRGB:
+ *dataSpace = HAL_DATASPACE_V0_SRGB;
+ return true;
+ case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3:
+ *dataSpace = HAL_DATASPACE_DISPLAY_P3;
+ return true;
+ case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_BT2020_HLG:
+ *(reinterpret_cast<int32_t*>(dataSpace)) = HAL_DATASPACE_BT2020_HLG;
+ return true;
+ default:
+ ALOGE("%s: Unsupported color space %d", __FUNCTION__, colorSpace);
+ return false;
+ }
+}
+
bool isStreamUseCaseSupported(int64_t streamUseCase,
const CameraMetadata &deviceInfo) {
camera_metadata_ro_entry_t availableStreamUseCases =
@@ -470,6 +494,16 @@
return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
}
+ if (colorSpace != ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED &&
+ format != HAL_PIXEL_FORMAT_BLOB) {
+ if (!dataSpaceFromColorSpace(&dataSpace, colorSpace)) {
+ String8 msg = String8::format("Camera %s: color space %d not supported, failed to "
+ "convert to data space", logicalCameraId.string(), colorSpace);
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+ }
+ }
+
// FIXME: remove this override since the default format should be
// IMPLEMENTATION_DEFINED. b/9487482 & b/35317944
if ((format >= HAL_PIXEL_FORMAT_RGBA_8888 && format <= HAL_PIXEL_FORMAT_BGRA_8888) &&
@@ -481,7 +515,7 @@
}
std::unordered_set<int32_t> overriddenSensorPixelModes;
if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed, format, width, height,
- physicalCameraMetadata, flexibleConsumer, &overriddenSensorPixelModes) != OK) {
+ physicalCameraMetadata, &overriddenSensorPixelModes) != OK) {
String8 msg = String8::format("Camera %s: sensor pixel modes for stream with "
"format %#x are not valid",logicalCameraId.string(), format);
ALOGE("%s: %s", __FUNCTION__, msg.string());
@@ -643,7 +677,7 @@
binder::Status
convertToHALStreamCombination(
const SessionConfiguration& sessionConfiguration,
- const String8 &logicalCameraId, const CameraMetadata &deviceInfo,
+ const String8 &logicalCameraId, const CameraMetadata &deviceInfo, bool supportNativeJpegR,
metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
aidl::android::hardware::camera::device::StreamConfiguration &streamConfiguration,
bool overrideForPerfClass, bool *earlyExit) {
@@ -755,7 +789,7 @@
streamInfo.dynamicRangeProfile = it.getDynamicRangeProfile();
if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed,
streamInfo.format, streamInfo.width,
- streamInfo.height, metadataChosen, false /*flexibleConsumer*/,
+ streamInfo.height, metadataChosen,
&streamInfo.sensorPixelModesUsed) != OK) {
ALOGE("%s: Deferred surface sensor pixel modes not valid",
__FUNCTION__);
@@ -787,7 +821,8 @@
bool isHeicCompositeStream =
camera3::HeicCompositeStream::isHeicCompositeStream(surface);
bool isJpegRCompositeStream =
- camera3::JpegRCompositeStream::isJpegRCompositeStream(surface);
+ camera3::JpegRCompositeStream::isJpegRCompositeStream(surface) &&
+ !supportNativeJpegR;
if (isDepthCompositeStream || isHeicCompositeStream || isJpegRCompositeStream) {
// We need to take in to account that composite streams can have
// additional internal camera streams.
@@ -932,15 +967,17 @@
status_t checkAndOverrideSensorPixelModesUsed(
const std::vector<int32_t> &sensorPixelModesUsed, int format, int width, int height,
- const CameraMetadata &staticInfo, bool flexibleConsumer,
+ const CameraMetadata &staticInfo,
std::unordered_set<int32_t> *overriddenSensorPixelModesUsed) {
const std::unordered_set<int32_t> &sensorPixelModesUsedSet =
convertToSet(sensorPixelModesUsed);
- if (!isUltraHighResolutionSensor(staticInfo)) {
+ if (!supportsUltraHighResolutionCapture(staticInfo)) {
if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
sensorPixelModesUsedSet.end()) {
// invalid value for non ultra high res sensors
+ ALOGE("%s ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION used on a device which doesn't "
+ "support ultra high resolution capture", __FUNCTION__);
return BAD_VALUE;
}
overriddenSensorPixelModesUsed->clear();
@@ -961,35 +998,40 @@
// Case 1: The client has not changed the sensor mode defaults. In this case, we check if the
// size + format of the OutputConfiguration is found exclusively in 1.
// If yes, add that sensorPixelMode to overriddenSensorPixelModes.
- // If no, add 'DEFAULT' to sensorPixelMode. This maintains backwards
- // compatibility.
+ // If no, add 'DEFAULT' and MAXIMUM_RESOLUTION to overriddenSensorPixelModes.
+ // This maintains backwards compatibility and also tells the framework the stream
+ // might be used in either sensor pixel mode.
if (sensorPixelModesUsedSet.size() == 0) {
- // Ambiguous case, default to only 'DEFAULT' mode.
+ // Ambiguous case, override to include both cases.
if (isInDefaultStreamConfigurationMap && isInMaximumResolutionStreamConfigurationMap) {
overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
- return OK;
- }
- // We don't allow flexible consumer for max resolution mode.
- if (isInMaximumResolutionStreamConfigurationMap) {
overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
return OK;
}
- if (isInDefaultStreamConfigurationMap || (flexibleConsumer && width < ROUNDING_WIDTH_CAP)) {
+ if (isInMaximumResolutionStreamConfigurationMap) {
+ overriddenSensorPixelModesUsed->insert(
+ ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
+ } else {
overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
- return OK;
}
- return BAD_VALUE;
+ return OK;
}
// Case2: The app has set sensorPixelModesUsed, we need to verify that they
// are valid / err out.
if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_DEFAULT) !=
sensorPixelModesUsedSet.end() && !isInDefaultStreamConfigurationMap) {
+ ALOGE("%s: ANDROID_SENSOR_PIXEL_MODE_DEFAULT set by client, but stream f: %d size %d x %d"
+ " isn't present in default stream configuration map", __FUNCTION__, format, width,
+ height);
return BAD_VALUE;
}
if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
sensorPixelModesUsedSet.end() && !isInMaximumResolutionStreamConfigurationMap) {
+ ALOGE("%s: ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION set by client, but stream f: "
+ "%d size %d x %d isn't present in default stream configuration map", __FUNCTION__,
+ format, width, height);
return BAD_VALUE;
}
*overriddenSensorPixelModesUsed = sensorPixelModesUsedSet;
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index b5654ac..d27144e 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -115,6 +115,8 @@
bool isColorSpaceSupported(int32_t colorSpace, int32_t format, android_dataspace dataSpace,
int64_t dynamicRangeProfile, const CameraMetadata& staticMeta);
+bool dataSpaceFromColorSpace(android_dataspace *dataSpace, int32_t colorSpace);
+
bool isStreamUseCaseSupported(int64_t streamUseCase, const CameraMetadata &deviceInfo);
void mapStreamInfo(const OutputStreamInfo &streamInfo,
@@ -136,7 +138,7 @@
binder::Status
convertToHALStreamCombination(
const SessionConfiguration& sessionConfiguration,
- const String8 &logicalCameraId, const CameraMetadata &deviceInfo,
+ const String8 &logicalCameraId, const CameraMetadata &deviceInfo, bool supportNativeJpegR,
metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
aidl::android::hardware::camera::device::StreamConfiguration &streamConfiguration,
bool overrideForPerfClass, bool *earlyExit);
@@ -145,7 +147,7 @@
status_t checkAndOverrideSensorPixelModesUsed(
const std::vector<int32_t> &sensorPixelModesUsed, int format, int width, int height,
- const CameraMetadata &staticInfo, bool flexibleConsumer,
+ const CameraMetadata &staticInfo,
std::unordered_set<int32_t> *overriddenSensorPixelModesUsed);
bool targetPerfClassPrimaryCamera(
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
index 5444f2a..d960024 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
@@ -111,8 +111,8 @@
bool overrideForPerfClass, bool *earlyExit) {
aidl::android::hardware::camera::device::StreamConfiguration aidlStreamConfiguration;
auto ret = convertToHALStreamCombination(sessionConfiguration, logicalCameraId, deviceInfo,
- getMetadata, physicalCameraIds, aidlStreamConfiguration, overrideForPerfClass,
- earlyExit);
+ false /*supportNativeJpegR*/, getMetadata, physicalCameraIds, aidlStreamConfiguration,
+ overrideForPerfClass, earlyExit);
if (!ret.isOk()) {
return ret;
}
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp
index 28a22e1..7d344f8 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp
@@ -73,7 +73,62 @@
return -1;
}
-bool isUltraHighResolutionSensor(const CameraMetadata &deviceInfo) {
+static bool isKeyPresentWithCount(const CameraMetadata &deviceInfo, uint32_t tag, uint32_t count) {
+ auto countFound = deviceInfo.find(tag).count;
+ return (countFound != 0) && (countFound % count == 0);
+}
+
+static bool supportsKeysForBasicUltraHighResolutionCapture(const CameraMetadata &deviceInfo) {
+ // Check whether the following conditions are satisfied for reduced ultra high
+ // resolution support :
+ // 1) SENSOR_PIXEL_MODE is advertised in ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS
+ // 2) The following keys are present in CameraCharacteristics for basic functionality
+ // a) ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION
+ // b) ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION
+ // c) ANDROID_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION
+ // d) ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
+ // e) ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
+ // f) ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION
+ camera_metadata_ro_entry_t entryChar;
+ entryChar = deviceInfo.find(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS);
+ bool supportsSensorPixelMode = false;
+ for (size_t i = 0; i < entryChar.count; i++) {
+ int32_t key = entryChar.data.i32[i];
+ if (key == ANDROID_SENSOR_PIXEL_MODE) {
+ supportsSensorPixelMode = true;
+ break;
+ }
+ }
+ if (!supportsSensorPixelMode) {
+ return false;
+ }
+
+ // Basic sensor array size information tags are present
+ if (!isKeyPresentWithCount(deviceInfo, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION,
+ /*count*/2) ||
+ !isKeyPresentWithCount(deviceInfo,
+ ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
+ /*count*/4) ||
+ !isKeyPresentWithCount(deviceInfo,
+ ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION, /*count*/4) ||
+ !isKeyPresentWithCount(deviceInfo, ANDROID_SENSOR_INFO_BINNING_FACTOR, /*count*/2)) {
+ return false;
+ }
+
+ // Basic stream configuration tags are present
+ if (!isKeyPresentWithCount(deviceInfo,
+ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION, /*count*/4) ||
+ !isKeyPresentWithCount(deviceInfo,
+ ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION, /*count*/4) ||
+ !isKeyPresentWithCount(deviceInfo,
+ ANDROID_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION, /*count*/ 4)) {
+ return false;
+ }
+
+ return true;
+}
+
+bool supportsUltraHighResolutionCapture(const CameraMetadata &deviceInfo) {
camera_metadata_ro_entry_t entryCap;
entryCap = deviceInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
// Go through the capabilities and check if it has
@@ -84,7 +139,10 @@
return true;
}
}
- return false;
+
+ // If not, then check that the keys which guarantee basic supports for
+ // ultra high resolution capture are supported.
+ return supportsKeysForBasicUltraHighResolutionCapture(deviceInfo);
}
bool getArrayWidthAndHeight(const CameraMetadata *deviceInfo,
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.h b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.h
index 45b1e91..dac1824 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.h
@@ -22,7 +22,7 @@
namespace camera3 {
namespace SessionConfigurationUtils {
-bool isUltraHighResolutionSensor(const CameraMetadata &deviceInfo);
+bool supportsUltraHighResolutionCapture(const CameraMetadata &deviceInfo);
int32_t getAppropriateModeTag(int32_t defaultTag, bool maxResolution = false);
@@ -33,4 +33,4 @@
} // camera3
} // android
-#endif
\ No newline at end of file
+#endif
diff --git a/services/medialog/fuzzer/Android.bp b/services/medialog/fuzzer/Android.bp
index 9ff0ce4..c96c37b 100644
--- a/services/medialog/fuzzer/Android.bp
+++ b/services/medialog/fuzzer/Android.bp
@@ -38,5 +38,13 @@
"android-media-fuzzing-reports@google.com",
],
componentid: 155276,
+ hotlists: [
+ "4593311",
+ ],
+ description: "The fuzzer targets the APIs of libmedialogservice library",
+ vector: "local_privileges_required",
+ service_privilege: "constrained",
+ users: "multi_user",
+ fuzzed_code_usage: "future_version",
},
}
diff --git a/services/mediametrics/fuzzer/Android.bp b/services/mediametrics/fuzzer/Android.bp
index 8b33f10..20a6378 100644
--- a/services/mediametrics/fuzzer/Android.bp
+++ b/services/mediametrics/fuzzer/Android.bp
@@ -68,5 +68,13 @@
"android-media-fuzzing-reports@google.com",
],
componentid: 155276,
+ hotlists: [
+ "4593311",
+ ],
+ description: "The fuzzer targets the APIs of libmediametricsservice",
+ vector: "local_no_privileges_required",
+ service_privilege: "constrained",
+ users: "multi_user",
+ fuzzed_code_usage: "shipped",
},
}
diff --git a/services/mediametrics/include/mediametricsservice/TimeMachine.h b/services/mediametrics/include/mediametricsservice/TimeMachine.h
index ce579b3..1445c7c 100644
--- a/services/mediametrics/include/mediametricsservice/TimeMachine.h
+++ b/services/mediametrics/include/mediametricsservice/TimeMachine.h
@@ -143,6 +143,7 @@
if (mPropertyMap.size() >= kKeyMaxProperties &&
!mPropertyMap.count(property)) {
ALOGV("%s: too many properties, rejecting %s", __func__, property.c_str());
+ mRejectedPropertiesCount++;
return;
}
auto& timeSequence = mPropertyMap[property];
@@ -172,6 +173,10 @@
ss << s;
}
}
+ if (ll > 0 && mRejectedPropertiesCount > 0) {
+ ss << "Rejected properties: " << mRejectedPropertiesCount << "\n";
+ ll--;
+ }
return { ss.str(), lines - ll };
}
@@ -214,6 +219,7 @@
const uid_t mAllowUid;
const int64_t mCreationTime;
+ unsigned int mRejectedPropertiesCount = 0;
int64_t mLastModificationTime;
std::map<std::string /* property */, PropertyHistory> mPropertyMap;
};
@@ -221,7 +227,7 @@
using History = std::map<std::string /* key */, std::shared_ptr<KeyHistory>>;
static inline constexpr size_t kTimeSequenceMaxElements = 50;
- static inline constexpr size_t kKeyMaxProperties = 50;
+ static inline constexpr size_t kKeyMaxProperties = 128;
static inline constexpr size_t kKeyLowWaterMark = 400;
static inline constexpr size_t kKeyHighWaterMark = 500;
diff --git a/services/mediametrics/statsd_codec.cpp b/services/mediametrics/statsd_codec.cpp
index c5957e9..cb5e783 100644
--- a/services/mediametrics/statsd_codec.cpp
+++ b/services/mediametrics/statsd_codec.cpp
@@ -444,6 +444,12 @@
}
AStatsEvent_writeInt32(event, hdrFormat);
+ int64_t codecId = 0;
+ if (item->getInt64("android.media.mediacodec.id", &codecId)) {
+ metrics_proto.set_codec_id(codecId);
+ }
+ AStatsEvent_writeInt64(event, codecId);
+
int err = AStatsEvent_write(event);
if (err < 0) {
ALOGE("Failed to write codec metrics to statsd (%d)", err);
diff --git a/services/mediametrics/statsd_drm.cpp b/services/mediametrics/statsd_drm.cpp
index 9f08eca..e5f7190 100644
--- a/services/mediametrics/statsd_drm.cpp
+++ b/services/mediametrics/statsd_drm.cpp
@@ -281,9 +281,9 @@
const int32_t uid = IPCThreadState::self()->getCallingUid();
int32_t frontend = 0;
if (!item->getInt32("frontend", &frontend)) return false;
- int32_t requested_security_level = -1;
+ int32_t requested_security_level = 0;
if (!item->getInt32("requested_security_level", &requested_security_level)) return false;
- int32_t opened_security_level = -1;
+ int32_t opened_security_level = 0;
if (!item->getInt32("opened_security_level", &opened_security_level)) return false;
// Optional to be included
@@ -325,12 +325,10 @@
if (!item->getInt32("frontend", &frontend)) return false;
std::string object_nonce = "";
if (!item->getString("object_nonce", &object_nonce)) return false;
- int32_t security_level = -1;
- if (!item->getInt32("security_level", &security_level)) return false;
std::string api_str = "";
if (!item->getString("api", &api_str)) return false;
const int32_t api = MediaDrmStatsdHelper::findDrmApi(api_str);
- int32_t error_code = -1;
+ int32_t error_code = 0;
if (!item->getInt32("error_code", &error_code)) return false;
// Optional to be included
@@ -338,12 +336,14 @@
item->getString("version", &version);
std::string session_nonce = "";
item->getString("session_nonce", &session_nonce);
+ int32_t security_level = 0;
+ item->getInt32("security_level", &security_level);
int32_t cdm_err = 0;
item->getInt32("cdm_err", &cdm_err);
int32_t oem_err = 0;
item->getInt32("oem_err", &oem_err);
- int32_t error_context = -1;
+ int32_t error_context = 0;
item->getInt32("error_context", &error_context);
const int result = stats_write(stats::media_metrics::MEDIA_DRM_ERRORED, scheme, uuid_lsb,
diff --git a/services/mediaresourcemanager/Android.bp b/services/mediaresourcemanager/Android.bp
index 2b8245e..a2bd5e1 100644
--- a/services/mediaresourcemanager/Android.bp
+++ b/services/mediaresourcemanager/Android.bp
@@ -17,6 +17,7 @@
"aidl/android/media/MediaResourceParcel.aidl",
"aidl/android/media/MediaResourcePolicyParcel.aidl",
"aidl/android/media/ClientInfoParcel.aidl",
+ "aidl/android/media/ClientConfigParcel.aidl",
],
path: "aidl",
}
@@ -73,9 +74,11 @@
name: "libresourcemanagerservice",
srcs: [
+ "ResourceManagerMetrics.cpp",
"ResourceManagerService.cpp",
"ResourceObserverService.cpp",
"ServiceLog.cpp",
+ "UidObserver.cpp",
// TODO: convert to AIDL?
"IMediaResourceMonitor.cpp",
@@ -92,6 +95,7 @@
"libstatspull",
"libstatssocket",
"libprotobuf-cpp-lite",
+ "libactivitymanager_aidl",
],
static_libs: [
diff --git a/services/mediaresourcemanager/OWNERS b/services/mediaresourcemanager/OWNERS
index 82abf8f..4fc3728 100644
--- a/services/mediaresourcemanager/OWNERS
+++ b/services/mediaresourcemanager/OWNERS
@@ -1 +1,3 @@
-dwkang@google.com
+girishshetty@google.com
+lajos@google.com
+wonsik@google.com
diff --git a/services/mediaresourcemanager/ResourceManagerMetrics.cpp b/services/mediaresourcemanager/ResourceManagerMetrics.cpp
new file mode 100644
index 0000000..8d591df
--- /dev/null
+++ b/services/mediaresourcemanager/ResourceManagerMetrics.cpp
@@ -0,0 +1,564 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ResourceManagerMetrics"
+#include <utils/Log.h>
+#include <mediautils/ProcessInfo.h>
+
+#include <stats_media_metrics.h>
+
+#include "UidObserver.h"
+#include "ResourceManagerMetrics.h"
+
+#include <cmath>
+#include <sstream>
+
+namespace android {
+
+using stats::media_metrics::stats_write;
+using stats::media_metrics::MEDIA_CODEC_STARTED;
+using stats::media_metrics::MEDIA_CODEC_STOPPED;
+// Disabling this for now.
+#ifdef ENABLE_MEDIA_CODEC_CONCURRENT_USAGE_REPORTED
+using stats::media_metrics::MEDIA_CODEC_CONCURRENT_USAGE_REPORTED;
+#endif
+using stats::media_metrics::MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED;
+using stats::media_metrics::MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_SUCCESS;
+using stats::media_metrics::\
+ MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_NO_CLIENTS;
+using stats::media_metrics::\
+ MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_RECLAIM_RESOURCES;
+
+inline const char* getCodecType(MediaResourceSubType codecType) {
+ switch (codecType) {
+ case MediaResourceSubType::kAudioCodec: return "Audio";
+ case MediaResourceSubType::kVideoCodec: return "Video";
+ case MediaResourceSubType::kImageCodec: return "Image";
+ case MediaResourceSubType::kUnspecifiedSubType:
+ default:
+ return "Unspecified";
+ }
+ return "Unspecified";
+}
+
+static CodecBucket getCodecBucket(bool isHardware,
+ bool isEncoder,
+ MediaResourceSubType codecType) {
+ if (isHardware) {
+ switch (codecType) {
+ case MediaResourceSubType::kAudioCodec:
+ if (isEncoder) return HwAudioEncoder;
+ return HwAudioDecoder;
+ case MediaResourceSubType::kVideoCodec:
+ if (isEncoder) return HwVideoEncoder;
+ return HwVideoDecoder;
+ case MediaResourceSubType::kImageCodec:
+ if (isEncoder) return HwImageEncoder;
+ return HwImageDecoder;
+ case MediaResourceSubType::kUnspecifiedSubType:
+ default:
+ return CodecBucketUnspecified;
+ }
+ } else {
+ switch (codecType) {
+ case MediaResourceSubType::kAudioCodec:
+ if (isEncoder) return SwAudioEncoder;
+ return SwAudioDecoder;
+ case MediaResourceSubType::kVideoCodec:
+ if (isEncoder) return SwVideoEncoder;
+ return SwVideoDecoder;
+ case MediaResourceSubType::kImageCodec:
+ if (isEncoder) return SwImageEncoder;
+ return SwImageDecoder;
+ case MediaResourceSubType::kUnspecifiedSubType:
+ default:
+ return CodecBucketUnspecified;
+ }
+ }
+
+ return CodecBucketUnspecified;
+}
+
+static bool getLogMessage(int hwCount, int swCount, std::stringstream& logMsg) {
+ bool update = false;
+ logMsg.clear();
+
+ if (hwCount > 0) {
+ logMsg << " HW: " << hwCount;
+ update = true;
+ }
+ if (swCount > 0) {
+ logMsg << " SW: " << swCount;
+ update = true;
+ }
+
+ if (update) {
+ logMsg << " ] ";
+ }
+ return update;
+}
+
+ResourceManagerMetrics::ResourceManagerMetrics(const sp<ProcessInfoInterface>& processInfo) {
+ // Create a process termination watcher, with 5seconds of polling frequency.
+ mUidObserver = sp<UidObserver>::make(processInfo,
+ [this] (int32_t pid, uid_t uid) {
+ onProcessTerminated(pid, uid);
+ });
+ mUidObserver->start();
+}
+
+ResourceManagerMetrics::~ResourceManagerMetrics() {
+ mUidObserver->stop();
+}
+
+void ResourceManagerMetrics::addPid(int pid, uid_t uid) {
+ if (uid != 0) {
+ std::scoped_lock lock(mLock);
+ mUidObserver->add(pid, uid);
+ }
+}
+
+void ResourceManagerMetrics::notifyClientCreated(const ClientInfoParcel& clientInfo) {
+ std::scoped_lock lock(mLock);
+ // Update the resource instance count.
+ std::map<std::string, int>::iterator found = mConcurrentResourceCountMap.find(clientInfo.name);
+ if (found == mConcurrentResourceCountMap.end()) {
+ mConcurrentResourceCountMap[clientInfo.name] = 1;
+ } else {
+ found->second++;
+ }
+}
+
+void ResourceManagerMetrics::notifyClientReleased(const ClientInfoParcel& clientInfo) {
+ bool stopCalled = true;
+ ClientConfigParcel clientConfig;
+ {
+ std::scoped_lock lock(mLock);
+ ClientConfigMap::iterator found = mClientConfigMap.find(clientInfo.id);
+ if (found != mClientConfigMap.end()) {
+ // Release is called without Stop!
+ stopCalled = false;
+ clientConfig = found->second;
+ // Update the timestamp for stopping the codec.
+ clientConfig.timeStamp = systemTime(SYSTEM_TIME_MONOTONIC) / 1000LL;
+ }
+ }
+ if (!stopCalled) {
+ // call Stop to update the metrics.
+ notifyClientStopped(clientConfig);
+ }
+ {
+ std::scoped_lock lock(mLock);
+ // Update the resource instance count also.
+ std::map<std::string, int>::iterator found =
+ mConcurrentResourceCountMap.find(clientInfo.name);
+ if (found != mConcurrentResourceCountMap.end()) {
+ if (found->second > 0) {
+ found->second--;
+ }
+ }
+ }
+}
+
+void ResourceManagerMetrics::notifyClientStarted(const ClientConfigParcel& clientConfig) {
+ std::scoped_lock lock(mLock);
+ int pid = clientConfig.clientInfo.pid;
+ // We need to observer this process.
+ mUidObserver->add(pid, clientConfig.clientInfo.uid);
+
+ // Update the client config for thic client.
+ mClientConfigMap[clientConfig.clientInfo.id] = clientConfig;
+
+ // Update the concurrent codec count for this process.
+ CodecBucket codecBucket = getCodecBucket(clientConfig.isHardware,
+ clientConfig.isEncoder,
+ clientConfig.codecType);
+ increaseConcurrentCodecs(pid, codecBucket);
+
+ if (clientConfig.codecType == MediaResourceSubType::kVideoCodec ||
+ clientConfig.codecType == MediaResourceSubType::kImageCodec) {
+ // Update the pixel count for this process
+ increasePixelCount(pid, clientConfig.width * (long)clientConfig.height);
+ }
+
+ // System concurrent codec usage
+ int systemConcurrentCodecCount = mConcurrentCodecsMap[codecBucket];
+ // Process/Application concurrent codec usage for this type of codec
+ int appConcurrentCodecCount = mProcessConcurrentCodecsMap[pid].mCurrent[codecBucket];
+ // Process/Application's current pixel count.
+ long pixelCount = 0;
+ std::map<int32_t, PixelCount>::iterator it = mProcessPixelsMap.find(pid);
+ if (it != mProcessPixelsMap.end()) {
+ pixelCount = it->second.mCurrent;
+ }
+
+ int result = stats_write(
+ MEDIA_CODEC_STARTED,
+ clientConfig.clientInfo.uid,
+ clientConfig.id,
+ clientConfig.clientInfo.name.c_str(),
+ static_cast<int32_t>(clientConfig.codecType),
+ clientConfig.isEncoder,
+ clientConfig.isHardware,
+ clientConfig.width, clientConfig.height,
+ systemConcurrentCodecCount,
+ appConcurrentCodecCount,
+ pixelCount);
+
+ ALOGV("%s: Pushed MEDIA_CODEC_STARTED atom: "
+ "Process[pid(%d): uid(%d)] "
+ "Codec: [%s: %ju] is %s %s %s "
+ "Timestamp: %jd "
+ "Resolution: %d x %d "
+ "ConcurrentCodec[%d]={System: %d App: %d} "
+ "result: %d",
+ __func__,
+ pid, clientConfig.clientInfo.uid,
+ clientConfig.clientInfo.name.c_str(),
+ clientConfig.id,
+ clientConfig.isHardware? "hardware" : "software",
+ getCodecType(clientConfig.codecType),
+ clientConfig.isEncoder? "encoder" : "decoder",
+ clientConfig.timeStamp,
+ clientConfig.width, clientConfig.height,
+ codecBucket, systemConcurrentCodecCount, appConcurrentCodecCount,
+ result);
+}
+
+void ResourceManagerMetrics::notifyClientStopped(const ClientConfigParcel& clientConfig) {
+ std::scoped_lock lock(mLock);
+ int pid = clientConfig.clientInfo.pid;
+ // Update the concurrent codec count for this process.
+ CodecBucket codecBucket = getCodecBucket(clientConfig.isHardware,
+ clientConfig.isEncoder,
+ clientConfig.codecType);
+ decreaseConcurrentCodecs(pid, codecBucket);
+
+ if (clientConfig.codecType == MediaResourceSubType::kVideoCodec ||
+ clientConfig.codecType == MediaResourceSubType::kImageCodec) {
+ // Update the pixel count for this process
+ decreasePixelCount(pid, clientConfig.width * (long)clientConfig.height);
+ }
+
+ // System concurrent codec usage
+ int systemConcurrentCodecCount = mConcurrentCodecsMap[codecBucket];
+ // Process/Application concurrent codec usage for this type of codec
+ int appConcurrentCodecCount = 0;
+ std::map<int32_t, ConcurrentCodecs>::iterator found = mProcessConcurrentCodecsMap.find(pid);
+ if (found != mProcessConcurrentCodecsMap.end()) {
+ appConcurrentCodecCount = found->second.mCurrent[codecBucket];
+ }
+ // Process/Application's current pixel count.
+ long pixelCount = 0;
+ std::map<int32_t, PixelCount>::iterator it = mProcessPixelsMap.find(pid);
+ if (it != mProcessPixelsMap.end()) {
+ pixelCount = it->second.mCurrent;
+ }
+
+ // calculate the usageTime as:
+ // MediaCodecStopped.clientConfig.timeStamp -
+ // MediaCodecStarted.clientConfig.timeStamp
+ int64_t usageTime = 0;
+ ClientConfigMap::iterator entry = mClientConfigMap.find(clientConfig.clientInfo.id);
+ if (entry != mClientConfigMap.end()) {
+ usageTime = clientConfig.timeStamp - entry->second.timeStamp;
+ // And we can erase this config now.
+ mClientConfigMap.erase(entry);
+ } else {
+ ALOGW("%s: Start Config is missing!", __func__);
+ }
+
+ int result = stats_write(
+ MEDIA_CODEC_STOPPED,
+ clientConfig.clientInfo.uid,
+ clientConfig.id,
+ clientConfig.clientInfo.name.c_str(),
+ static_cast<int32_t>(clientConfig.codecType),
+ clientConfig.isEncoder,
+ clientConfig.isHardware,
+ clientConfig.width, clientConfig.height,
+ systemConcurrentCodecCount,
+ appConcurrentCodecCount,
+ pixelCount,
+ usageTime);
+ ALOGV("%s: Pushed MEDIA_CODEC_STOPPED atom: "
+ "Process[pid(%d): uid(%d)] "
+ "Codec: [%s: %ju] is %s %s %s "
+ "Timestamp: %jd Usage time: %jd "
+ "Resolution: %d x %d "
+ "ConcurrentCodec[%d]={System: %d App: %d} "
+ "result: %d",
+ __func__,
+ pid, clientConfig.clientInfo.uid,
+ clientConfig.clientInfo.name.c_str(),
+ clientConfig.id,
+ clientConfig.isHardware? "hardware" : "software",
+ getCodecType(clientConfig.codecType),
+ clientConfig.isEncoder? "encoder" : "decoder",
+ clientConfig.timeStamp, usageTime,
+ clientConfig.width, clientConfig.height,
+ codecBucket, systemConcurrentCodecCount, appConcurrentCodecCount,
+ result);
+}
+
+void ResourceManagerMetrics::onProcessTerminated(int32_t pid, uid_t uid) {
+ std::scoped_lock lock(mLock);
+ // post MediaCodecConcurrentUsageReported for this terminated pid.
+ pushConcurrentUsageReport(pid, uid);
+}
+
+void ResourceManagerMetrics::pushConcurrentUsageReport(int32_t pid, uid_t uid) {
+ // Process/Application peak concurrent codec usage
+ std::map<int32_t, ConcurrentCodecs>::iterator found = mProcessConcurrentCodecsMap.find(pid);
+ if (found == mProcessConcurrentCodecsMap.end()) {
+ ALOGI("%s: No MEDIA_CODEC_CONCURRENT_USAGE_REPORTED atom Entry for: "
+ "Application[pid(%d): uid(%d)]", __func__, pid, uid);
+ return;
+ }
+ const ConcurrentCodecsMap& codecsMap = found->second.mPeak;
+ int peakHwAudioEncoderCount = codecsMap[HwAudioEncoder];
+ int peakHwAudioDecoderCount = codecsMap[HwAudioDecoder];
+ int peakHwVideoEncoderCount = codecsMap[HwVideoEncoder];
+ int peakHwVideoDecoderCount = codecsMap[HwVideoDecoder];
+ int peakHwImageEncoderCount = codecsMap[HwImageEncoder];
+ int peakHwImageDecoderCount = codecsMap[HwImageDecoder];
+ int peakSwAudioEncoderCount = codecsMap[SwAudioEncoder];
+ int peakSwAudioDecoderCount = codecsMap[SwAudioDecoder];
+ int peakSwVideoEncoderCount = codecsMap[SwVideoEncoder];
+ int peakSwVideoDecoderCount = codecsMap[SwVideoDecoder];
+ int peakSwImageEncoderCount = codecsMap[SwImageEncoder];
+ int peakSwImageDecoderCount = codecsMap[SwImageDecoder];
+
+ long peakPixels = 0;
+ std::map<int32_t, PixelCount>::iterator it = mProcessPixelsMap.find(pid);
+ if (it == mProcessPixelsMap.end()) {
+ ALOGI("%s: No Video Codec Entry for Application[pid(%d): uid(%d)]",
+ __func__, pid, uid);
+ } else {
+ peakPixels = it->second.mPeak;
+ }
+ std::string peakPixelsLog("Peak Pixels: " + std::to_string(peakPixels));
+
+ std::stringstream peakCodecLog;
+ peakCodecLog << "Peak { ";
+ std::stringstream logMsg;
+ if (getLogMessage(peakHwAudioEncoderCount, peakSwAudioEncoderCount, logMsg)) {
+ peakCodecLog << "AudioEnc[" << logMsg.str();
+ }
+ if (getLogMessage(peakHwAudioDecoderCount, peakSwAudioDecoderCount, logMsg)) {
+ peakCodecLog << "AudioDec[" << logMsg.str();
+ }
+ if (getLogMessage(peakHwVideoEncoderCount, peakSwVideoEncoderCount, logMsg)) {
+ peakCodecLog << "VideoEnc[" << logMsg.str();
+ }
+ if (getLogMessage(peakHwVideoDecoderCount, peakSwVideoDecoderCount, logMsg)) {
+ peakCodecLog << "VideoDec[" << logMsg.str();
+ }
+ if (getLogMessage(peakHwImageEncoderCount, peakSwImageEncoderCount, logMsg)) {
+ peakCodecLog << "ImageEnc[" << logMsg.str();
+ }
+ if (getLogMessage(peakHwImageDecoderCount, peakSwImageDecoderCount, logMsg)) {
+ peakCodecLog << "ImageDec[" << logMsg.str();
+ }
+ peakCodecLog << "}";
+
+#ifdef ENABLE_MEDIA_CODEC_CONCURRENT_USAGE_REPORTED
+ int result = stats_write(
+ MEDIA_CODEC_CONCURRENT_USAGE_REPORTED,
+ uid,
+ peakHwVideoDecoderCount,
+ peakHwVideoEncoderCount,
+ peakSwVideoDecoderCount,
+ peakSwVideoEncoderCount,
+ peakHwAudioDecoderCount,
+ peakHwAudioEncoderCount,
+ peakSwAudioDecoderCount,
+ peakSwAudioEncoderCount,
+ peakHwImageDecoderCount,
+ peakHwImageEncoderCount,
+ peakSwImageDecoderCount,
+ peakSwImageEncoderCount,
+ peakPixels);
+ ALOGI("%s: Pushed MEDIA_CODEC_CONCURRENT_USAGE_REPORTED atom: "
+ "Process[pid(%d): uid(%d)] %s %s result: %d",
+ __func__, pid, uid, peakCodecLog.str().c_str(), peakPixelsLog.c_str(), result);
+#else
+ ALOGI("%s: Concurrent Codec Usage Report for the Process[pid(%d): uid(%d)] is %s %s",
+ __func__, pid, uid, peakCodecLog.str().c_str(), peakPixelsLog.c_str());
+#endif
+}
+
+void ResourceManagerMetrics::pushReclaimAtom(const ClientInfoParcel& clientInfo,
+ const std::vector<int>& priorities,
+ const Vector<std::shared_ptr<IResourceManagerClient>>& clients,
+ const PidUidVector& idList, bool reclaimed) {
+ // Construct the metrics for codec reclaim as a pushed atom.
+ // 1. Information about the requester.
+ // - UID and the priority (oom score)
+ int32_t callingPid = clientInfo.pid;
+ int32_t requesterUid = clientInfo.uid;
+ std::string clientName = clientInfo.name;
+ int requesterPriority = priorities[0];
+
+ // 2. Information about the codec.
+ // - Name of the codec requested
+ // - Number of concurrent codecs running.
+ int32_t noOfConcurrentCodecs = 0;
+ std::map<std::string, int>::iterator found = mConcurrentResourceCountMap.find(clientName);
+ if (found != mConcurrentResourceCountMap.end()) {
+ noOfConcurrentCodecs = found->second;
+ }
+
+ // 3. Information about the Reclaim:
+ // - Status of reclaim request
+ // - How many codecs are reclaimed
+ // - For each codecs reclaimed, information of the process that it belonged to:
+ // - UID and the Priority (oom score)
+ int32_t reclaimStatus = MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_SUCCESS;
+ if (!reclaimed) {
+ if (clients.size() == 0) {
+ // No clients to reclaim from
+ reclaimStatus =
+ MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_NO_CLIENTS;
+ } else {
+ // Couldn't reclaim resources from the clients
+ reclaimStatus =
+ MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_RECLAIM_RESOURCES;
+ }
+ }
+ int32_t noOfCodecsReclaimed = clients.size();
+ int32_t targetIndex = 1;
+ for (PidUidVector::const_reference id : idList) {
+ int32_t targetUid = id.second;
+ int targetPriority = priorities[targetIndex];
+ // Post the pushed atom
+ int result = stats_write(
+ MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED,
+ requesterUid,
+ requesterPriority,
+ clientName.c_str(),
+ noOfConcurrentCodecs,
+ reclaimStatus,
+ noOfCodecsReclaimed,
+ targetIndex,
+ targetUid,
+ targetPriority);
+ ALOGI("%s: Pushed MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED atom: "
+ "Requester[pid(%d): uid(%d): priority(%d)] "
+ "Codec: [%s] "
+ "No of concurrent codecs: %d "
+ "Reclaim Status: %d "
+ "No of codecs reclaimed: %d "
+ "Target[%d][pid(%d): uid(%d): priority(%d)] result: %d",
+ __func__, callingPid, requesterUid, requesterPriority,
+ clientName.c_str(), noOfConcurrentCodecs,
+ reclaimStatus, noOfCodecsReclaimed,
+ targetIndex, id.first, targetUid, targetPriority, result);
+ targetIndex++;
+ }
+}
+
+void ResourceManagerMetrics::increaseConcurrentCodecs(int32_t pid,
+ CodecBucket codecBucket) {
+ // Increase the codec usage across the system.
+ mConcurrentCodecsMap[codecBucket]++;
+
+ // Now update the codec usage for this (pid) process.
+ std::map<int32_t, ConcurrentCodecs>::iterator found = mProcessConcurrentCodecsMap.find(pid);
+ if (found == mProcessConcurrentCodecsMap.end()) {
+ ConcurrentCodecs codecs;
+ codecs.mCurrent[codecBucket] = 1;
+ codecs.mPeak[codecBucket] = 1;
+ mProcessConcurrentCodecsMap.emplace(pid, codecs);
+ } else {
+ found->second.mCurrent[codecBucket]++;
+ // Check if it's the peak count for this slot.
+ if (found->second.mPeak[codecBucket] < found->second.mCurrent[codecBucket]) {
+ found->second.mPeak[codecBucket] = found->second.mCurrent[codecBucket];
+ }
+ }
+}
+
+void ResourceManagerMetrics::decreaseConcurrentCodecs(int32_t pid,
+ CodecBucket codecBucket) {
+ // Decrease the codec usage across the system.
+ if (mConcurrentCodecsMap[codecBucket] > 0) {
+ mConcurrentCodecsMap[codecBucket]--;
+ }
+
+ // Now update the codec usage for this (pid) process.
+ std::map<int32_t, ConcurrentCodecs>::iterator found = mProcessConcurrentCodecsMap.find(pid);
+ if (found != mProcessConcurrentCodecsMap.end()) {
+ if (found->second.mCurrent[codecBucket] > 0) {
+ found->second.mCurrent[codecBucket]--;
+ }
+ }
+}
+
+void ResourceManagerMetrics::increasePixelCount(int32_t pid, long pixels) {
+ // Now update the current pixel usage for this (pid) process.
+ std::map<int32_t, PixelCount>::iterator found = mProcessPixelsMap.find(pid);
+ if (found == mProcessPixelsMap.end()) {
+ PixelCount pixelCount {pixels, pixels};
+ mProcessPixelsMap.emplace(pid, pixelCount);
+ } else {
+ if (__builtin_add_overflow(found->second.mCurrent, pixels, &found->second.mCurrent)) {
+ ALOGI("Pixel Count overflow");
+ return;
+ }
+ // Check if it's the peak count for this slot.
+ if (found->second.mPeak < found->second.mCurrent) {
+ found->second.mPeak = found->second.mCurrent;
+ }
+ }
+}
+
+void ResourceManagerMetrics::decreasePixelCount(int32_t pid, long pixels) {
+ // Now update the current pixel usage for this (pid) process.
+ std::map<int32_t, PixelCount>::iterator found = mProcessPixelsMap.find(pid);
+ if (found != mProcessPixelsMap.end()) {
+ if (found->second.mCurrent < pixels) {
+ found->second.mCurrent = 0;
+ } else {
+ if (__builtin_sub_overflow(found->second.mCurrent, pixels, &found->second.mCurrent)) {
+ ALOGI("Pixel Count overflow");
+ return;
+ }
+ }
+ }
+}
+
+long ResourceManagerMetrics::getPeakConcurrentPixelCount(int pid) const {
+ std::map<int32_t, PixelCount>::const_iterator found = mProcessPixelsMap.find(pid);
+ if (found != mProcessPixelsMap.end()) {
+ return found->second.mPeak;
+ }
+
+ return 0;
+}
+
+long ResourceManagerMetrics::getCurrentConcurrentPixelCount(int pid) const {
+ std::map<int32_t, PixelCount>::const_iterator found = mProcessPixelsMap.find(pid);
+ if (found != mProcessPixelsMap.end()) {
+ return found->second.mCurrent;
+ }
+
+ return 0;
+}
+
+} // namespace android
diff --git a/services/mediaresourcemanager/ResourceManagerMetrics.h b/services/mediaresourcemanager/ResourceManagerMetrics.h
new file mode 100644
index 0000000..b7810e5
--- /dev/null
+++ b/services/mediaresourcemanager/ResourceManagerMetrics.h
@@ -0,0 +1,179 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_MEDIA_RESOURCEMANAGERMETRICS_H_
+#define ANDROID_MEDIA_RESOURCEMANAGERMETRICS_H_
+
+#include "ResourceManagerService.h"
+
+namespace android {
+
+using ::aidl::android::media::ClientInfoParcel;
+using ::aidl::android::media::ClientConfigParcel;
+using ::aidl::android::media::IResourceManagerClient;
+
+struct ProcessInfoInterface;
+
+class UidObserver;
+
+//
+// Enumeration for Codec bucket based on:
+// - Encoder or Decoder
+// - hardware implementation or not
+// - Audio/Video/Image codec
+//
+enum CodecBucket {
+ CodecBucketUnspecified = 0,
+ HwAudioEncoder = 1,
+ HwAudioDecoder = 2,
+ HwVideoEncoder = 3,
+ HwVideoDecoder = 4,
+ HwImageEncoder = 5,
+ HwImageDecoder = 6,
+ SwAudioEncoder = 7,
+ SwAudioDecoder = 8,
+ SwVideoEncoder = 9,
+ SwVideoDecoder = 10,
+ SwImageEncoder = 11,
+ SwImageDecoder = 12,
+ CodecBucketMaxSize = 13,
+};
+
+// Map of client id and client configuration, when it was started last.
+typedef std::map<int64_t, ClientConfigParcel> ClientConfigMap;
+
+// Map of pid and the uid.
+typedef std::map<int32_t, uid_t> PidUidMap;
+
+// Map of concurrent codes by Codec type bucket.
+struct ConcurrentCodecsMap {
+ int& operator[](CodecBucket index) {
+ return mCodec[index];
+ }
+
+ const int& operator[](CodecBucket index) const {
+ return mCodec[index];
+ }
+
+private:
+ int mCodec[CodecBucketMaxSize] = {0};
+};
+
+// Current and Peak ConcurrentCodecMap for a process.
+struct ConcurrentCodecs {
+ ConcurrentCodecsMap mCurrent;
+ ConcurrentCodecsMap mPeak;
+};
+
+// Current and Peak pixel count for a process.
+struct PixelCount {
+ long mCurrent = 0;
+ long mPeak = 0;
+};
+
+//
+// ResourceManagerMetrics class that maintaines concurrent codec count based:
+//
+// 1. # of concurrent active codecs (initialized, but aren't released yet) of given
+// implementation (by codec name) across the system.
+//
+// 2. # of concurrent codec usage (started, but not stopped yet), which is
+// measured using codec type bucket (CodecBucket) for:
+// - each process/application.
+// - across the system.
+// Also the peak count of the same for each process/application is maintained.
+//
+// 3. # of Peak Concurrent Pixels for each process/application.
+// This should help with understanding the (video) memory usage per
+// application.
+//
+//
+class ResourceManagerMetrics {
+public:
+ ResourceManagerMetrics(const sp<ProcessInfoInterface>& processInfo);
+ ~ResourceManagerMetrics();
+
+ // To be called when a client is created.
+ void notifyClientCreated(const ClientInfoParcel& clientInfo);
+
+ // To be called when a client is released.
+ void notifyClientReleased(const ClientInfoParcel& clientInfo);
+
+ // To be called when a client is started.
+ void notifyClientStarted(const ClientConfigParcel& clientConfig);
+
+ // To be called when a client is stopped.
+ void notifyClientStopped(const ClientConfigParcel& clientConfig);
+
+ // To be called when after a reclaim event.
+ void pushReclaimAtom(const ClientInfoParcel& clientInfo,
+ const std::vector<int>& priorities,
+ const Vector<std::shared_ptr<IResourceManagerClient>>& clients,
+ const PidUidVector& idList, bool reclaimed);
+
+ // Add this pid/uid set to monitor for the process termination state.
+ void addPid(int pid, uid_t uid = 0);
+
+ // Get the peak concurrent pixel count (associated with the video codecs) for the process.
+ long getPeakConcurrentPixelCount(int pid) const;
+ // Get the current concurrent pixel count (associated with the video codecs) for the process.
+ long getCurrentConcurrentPixelCount(int pid) const;
+
+private:
+ ResourceManagerMetrics(const ResourceManagerMetrics&) = delete;
+ ResourceManagerMetrics(ResourceManagerMetrics&&) = delete;
+ ResourceManagerMetrics& operator=(const ResourceManagerMetrics&) = delete;
+ ResourceManagerMetrics& operator=(ResourceManagerMetrics&&) = delete;
+
+ // To increase/decrease the concurrent codec usage for a given CodecBucket.
+ void increaseConcurrentCodecs(int32_t pid, CodecBucket codecBucket);
+ void decreaseConcurrentCodecs(int32_t pid, CodecBucket codecBucket);
+
+ // To increase/decrease the concurrent pixels usage for a process.
+ void increasePixelCount(int32_t pid, long pixels);
+ void decreasePixelCount(int32_t pid, long pixels);
+
+ // Issued when the process/application with given pid/uid is terminated.
+ void onProcessTerminated(int32_t pid, uid_t uid);
+
+ // To push conccuret codec usage of a process/application.
+ void pushConcurrentUsageReport(int32_t pid, uid_t uid);
+
+private:
+ std::mutex mLock;
+
+ // Map of client id and the configuration.
+ ClientConfigMap mClientConfigMap;
+
+ // Concurrent and Peak Pixel count for each process/application.
+ std::map<int32_t, PixelCount> mProcessPixelsMap;
+
+ // Map of resources (name) and number of concurrent instances
+ std::map<std::string, int> mConcurrentResourceCountMap;
+
+ // Map of concurrent codes by CodecBucket across the system.
+ ConcurrentCodecsMap mConcurrentCodecsMap;
+ // Map of concurrent and peak codes by CodecBucket for each process/application.
+ std::map<int32_t, ConcurrentCodecs> mProcessConcurrentCodecsMap;
+
+ // Uid Observer to monitor the application termination.
+ sp<UidObserver> mUidObserver;
+};
+
+} // namespace android
+
+#endif // ANDROID_MEDIA_RESOURCEMANAGERMETRICS_H_
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index ce910b1..6822b06 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -35,23 +35,15 @@
#include <sys/stat.h>
#include <sys/time.h>
#include <unistd.h>
-#include <stats_media_metrics.h>
#include "IMediaResourceMonitor.h"
+#include "ResourceManagerMetrics.h"
#include "ResourceManagerService.h"
#include "ResourceObserverService.h"
#include "ServiceLog.h"
namespace android {
-using stats::media_metrics::stats_write;
-using stats::media_metrics::MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED;
-using stats::media_metrics::MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_SUCCESS;
-using stats::media_metrics::\
- MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_NO_CLIENTS;
-using stats::media_metrics::\
- MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_RECLAIM_RESOURCES;
-
//static
std::mutex ResourceManagerService::sCookieLock;
//static
@@ -61,8 +53,8 @@
class DeathNotifier : public RefBase {
public:
- DeathNotifier(const std::shared_ptr<ResourceManagerService> &service, int pid,
- int64_t clientId);
+ DeathNotifier(const std::shared_ptr<ResourceManagerService> &service,
+ const ClientInfoParcel& clientInfo);
virtual ~DeathNotifier() {}
@@ -72,13 +64,12 @@
protected:
std::weak_ptr<ResourceManagerService> mService;
- int mPid;
- int64_t mClientId;
+ const ClientInfoParcel mClientInfo;
};
DeathNotifier::DeathNotifier(const std::shared_ptr<ResourceManagerService> &service,
- int pid, int64_t clientId)
- : mService(service), mPid(pid), mClientId(clientId) {}
+ const ClientInfoParcel& clientInfo)
+ : mService(service), mClientInfo(clientInfo) {}
//static
void DeathNotifier::BinderDiedCallback(void* cookie) {
@@ -105,16 +96,16 @@
return;
}
- service->overridePid(mPid, -1);
+ service->overridePid(mClientInfo.pid, -1);
// thiz is freed in the call below, so it must be last call referring thiz
- ClientInfoParcel clientInfo{.pid = mPid, .id = mClientId};
- service->removeResource(clientInfo, false /*checkValid*/);
+ service->removeResource(mClientInfo, false /*checkValid*/);
}
class OverrideProcessInfoDeathNotifier : public DeathNotifier {
public:
OverrideProcessInfoDeathNotifier(const std::shared_ptr<ResourceManagerService> &service,
- int pid) : DeathNotifier(service, pid, 0) {}
+ const ClientInfoParcel& clientInfo)
+ : DeathNotifier(service, clientInfo) {}
virtual ~OverrideProcessInfoDeathNotifier() {}
@@ -129,7 +120,7 @@
return;
}
- service->removeProcessInfoOverride(mPid);
+ service->removeProcessInfoOverride(mClientInfo.pid);
}
template <typename T>
@@ -202,7 +193,11 @@
ResourceInfo info;
info.uid = uid;
info.clientId = clientId;
- info.name = name;
+ if (name.empty()) {
+ info.name = "<unknown client>";
+ } else {
+ info.name = name;
+ }
info.client = client;
info.cookie = 0;
info.pendingRemoval = false;
@@ -292,10 +287,7 @@
snprintf(buffer, SIZE, " Id: %lld\n", (long long)infos[j].clientId);
result.append(buffer);
- std::string clientName = "<unknown client>";
- if (infos[j].client != nullptr) {
- clientName = infos[j].name;
- }
+ std::string clientName = infos[j].name;
snprintf(buffer, SIZE, " Name: %s\n", clientName.c_str());
result.append(buffer);
@@ -357,6 +349,8 @@
mCpuBoostCount(0),
mDeathRecipient(AIBinder_DeathRecipient_new(DeathNotifier::BinderDiedCallback)) {
mSystemCB->noteResetVideo();
+ // Create ResourceManagerMetrics that handles all the metrics.
+ mResourceManagerMetrics = std::make_unique<ResourceManagerMetrics>(mProcessInfo);
}
//static
@@ -364,7 +358,7 @@
std::shared_ptr<ResourceManagerService> service =
::ndk::SharedRefBase::make<ResourceManagerService>();
binder_status_t status =
- AServiceManager_addServiceWithFlag(
+ AServiceManager_addServiceWithFlags(
service->asBinder().get(), getServiceName(),
AServiceManager_AddServiceFlag::ADD_SERVICE_ALLOW_ISOLATED);
if (status != STATUS_OK) {
@@ -510,49 +504,16 @@
}
if (info.cookie == 0 && client != nullptr) {
info.cookie = addCookieAndLink_l(client,
- new DeathNotifier(ref<ResourceManagerService>(), pid, clientId));
+ new DeathNotifier(ref<ResourceManagerService>(), clientInfo));
}
if (mObserverService != nullptr && !resourceAdded.empty()) {
mObserverService->onResourceAdded(uid, pid, resourceAdded);
}
notifyResourceGranted(pid, resources);
- // Increase the instance count of the resource associated with this client.
- increaseResourceInstanceCount(clientId, name);
-
return Status::ok();
}
-void ResourceManagerService::increaseResourceInstanceCount(int64_t clientId,
- const std::string& name) {
- // Check whether this client has been looked into already.
- if (mClientIdSet.find(clientId) == mClientIdSet.end()) {
- mClientIdSet.insert(clientId);
- // Update the resource instance count.
- auto found = mConcurrentResourceCountMap.find(name);
- if (found == mConcurrentResourceCountMap.end()) {
- mConcurrentResourceCountMap[name] = 1;
- } else {
- found->second++;
- }
- }
-}
-
-void ResourceManagerService::decreaseResourceInstanceCount(int64_t clientId,
- const std::string& name) {
- // Since this client has been removed, remove it from mClientIdSet
- mClientIdSet.erase(clientId);
- // Update the resource instance count also.
- auto found = mConcurrentResourceCountMap.find(name);
- if (found != mConcurrentResourceCountMap.end()) {
- if (found->second == 1) {
- mConcurrentResourceCountMap.erase(found);
- } else {
- found->second--;
- }
- }
-}
-
Status ResourceManagerService::removeResource(const ClientInfoParcel& clientInfo,
const std::vector<MediaResourceParcel>& resources) {
int32_t pid = clientInfo.pid;
@@ -657,9 +618,8 @@
onLastRemoved(it->second, info);
}
- // Since this client has been removed, decrease the corresponding
- // resources instance count.
- decreaseResourceInstanceCount(clientId, info.name);
+ // Since this client has been removed, update the metrics collector.
+ mResourceManagerMetrics->notifyClientReleased(clientInfo);
removeCookieAndUnlink_l(info.client, info.cookie);
@@ -791,73 +751,19 @@
void ResourceManagerService::pushReclaimAtom(const ClientInfoParcel& clientInfo,
const Vector<std::shared_ptr<IResourceManagerClient>>& clients,
const PidUidVector& idVector, bool reclaimed) {
- // Construct the metrics for codec reclaim as a pushed atom.
- // 1. Information about the requester.
- // - UID and the priority (oom score)
int32_t callingPid = clientInfo.pid;
- int32_t requesterUid = clientInfo.uid;
- std::string clientName = clientInfo.name;
int requesterPriority = -1;
getPriority_l(callingPid, &requesterPriority);
+ std::vector<int> priorities;
+ priorities.push_back(requesterPriority);
- // 2. Information about the codec.
- // - Name of the codec requested
- // - Number of concurrent codecs running.
- int32_t noOfConcurrentCodecs = 0;
- auto found = mConcurrentResourceCountMap.find(clientName);
- if (found != mConcurrentResourceCountMap.end()) {
- noOfConcurrentCodecs = found->second;
- }
-
- // 3. Information about the Reclaim:
- // - Status of reclaim request
- // - How many codecs are reclaimed
- // - For each codecs reclaimed, information of the process that it belonged to:
- // - UID and the Priority (oom score)
- int32_t reclaimStatus = MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_SUCCESS;
- if (!reclaimed) {
- if (clients.size() == 0) {
- // No clients to reclaim from
- reclaimStatus =
- MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_NO_CLIENTS;
- } else {
- // Couldn't reclaim resources from the clients
- reclaimStatus =
- MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED__RECLAIM_STATUS__RECLAIM_FAILED_RECLAIM_RESOURCES;
- }
- }
- int32_t noOfCodecsReclaimed = clients.size();
- int32_t targetIndex = 1;
- for (const auto& id : idVector) {
- int32_t targetUid = id.second;
+ for (PidUidVector::const_reference id : idVector) {
int targetPriority = -1;
getPriority_l(id.first, &targetPriority);
- // Post the pushed atom
- int result = stats_write(
- MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED,
- requesterUid,
- requesterPriority,
- clientName.c_str(),
- noOfConcurrentCodecs,
- reclaimStatus,
- noOfCodecsReclaimed,
- targetIndex,
- targetUid,
- targetPriority);
- ALOGI("%s: Pushed MEDIA_CODEC_RECLAIM_REQUEST_COMPLETED atom: "
- "Requester[pid(%d): uid(%d): priority(%d)] "
- "Codec: [%s] "
- "No of concurrent codecs: %d "
- "Reclaim Status: %d "
- "No of codecs reclaimed: %d "
- "Target[%d][pid(%d): uid(%d): priority(%d)] "
- "Atom Size: %d",
- __func__, callingPid, requesterUid, requesterPriority,
- clientName.c_str(), noOfConcurrentCodecs,
- reclaimStatus, noOfCodecsReclaimed,
- targetIndex, id.first, targetUid, targetPriority, result);
- targetIndex++;
+ priorities.push_back(targetPriority);
}
+ mResourceManagerMetrics->pushReclaimAtom(clientInfo, priorities, clients,
+ idVector, reclaimed);
}
bool ResourceManagerService::reclaimUnconditionallyFrom(
@@ -933,6 +839,7 @@
mOverridePidMap.erase(originalPid);
if (newPid != -1) {
mOverridePidMap.emplace(originalPid, newPid);
+ mResourceManagerMetrics->addPid(newPid);
}
}
@@ -966,8 +873,12 @@
return Status::fromServiceSpecificError(BAD_VALUE);
}
+ ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(pid),
+ .uid = 0,
+ .id = 0,
+ .name = "<unknown client>"};
uintptr_t cookie = addCookieAndLink_l(client,
- new OverrideProcessInfoDeathNotifier(ref<ResourceManagerService>(), pid));
+ new OverrideProcessInfoDeathNotifier(ref<ResourceManagerService>(), clientInfo));
mProcessInfoOverrideMap.emplace(pid, ProcessInfoOverride{cookie, client});
@@ -1282,4 +1193,27 @@
return true;
}
+Status ResourceManagerService::notifyClientCreated(const ClientInfoParcel& clientInfo) {
+ mResourceManagerMetrics->notifyClientCreated(clientInfo);
+ return Status::ok();
+}
+
+Status ResourceManagerService::notifyClientStarted(const ClientConfigParcel& clientConfig) {
+ mResourceManagerMetrics->notifyClientStarted(clientConfig);
+ return Status::ok();
+}
+
+Status ResourceManagerService::notifyClientStopped(const ClientConfigParcel& clientConfig) {
+ mResourceManagerMetrics->notifyClientStopped(clientConfig);
+ return Status::ok();
+}
+
+long ResourceManagerService::getPeakConcurrentPixelCount(int pid) const {
+ return mResourceManagerMetrics->getPeakConcurrentPixelCount(pid);
+}
+
+long ResourceManagerService::getCurrentConcurrentPixelCount(int pid) const {
+ return mResourceManagerMetrics->getCurrentConcurrentPixelCount(pid);
+}
+
} // namespace android
diff --git a/services/mediaresourcemanager/ResourceManagerService.h b/services/mediaresourcemanager/ResourceManagerService.h
index 0016a19..b9756ae 100644
--- a/services/mediaresourcemanager/ResourceManagerService.h
+++ b/services/mediaresourcemanager/ResourceManagerService.h
@@ -39,6 +39,7 @@
class ResourceObserverService;
class ServiceLog;
struct ProcessInfoInterface;
+class ResourceManagerMetrics;
using Status = ::ndk::ScopedAStatus;
using ::aidl::android::media::IResourceManagerClient;
@@ -46,6 +47,7 @@
using ::aidl::android::media::MediaResourceParcel;
using ::aidl::android::media::MediaResourcePolicyParcel;
using ::aidl::android::media::ClientInfoParcel;
+using ::aidl::android::media::ClientConfigParcel;
typedef std::map<std::tuple<
MediaResource::Type, MediaResource::SubType, std::vector<uint8_t>>,
@@ -61,6 +63,7 @@
bool pendingRemoval{false};
};
+// vector of <PID, UID>
typedef std::vector<std::pair<int32_t, uid_t>> PidUidVector;
// TODO: convert these to std::map
@@ -118,6 +121,12 @@
Status removeResource(const ClientInfoParcel& clientInfo, bool checkValid);
+ Status notifyClientCreated(const ClientInfoParcel& clientInfo) override;
+
+ Status notifyClientStarted(const ClientConfigParcel& clientConfig) override;
+
+ Status notifyClientStopped(const ClientConfigParcel& clientConfig) override;
+
private:
friend class ResourceManagerServiceTest;
friend class DeathNotifier;
@@ -182,15 +191,15 @@
void removeCookieAndUnlink_l(const std::shared_ptr<IResourceManagerClient>& client,
uintptr_t cookie);
- // To increase/decrease the number of instances of a given resource
- // associated with a client.
- void increaseResourceInstanceCount(int64_t clientId, const std::string& name);
- void decreaseResourceInstanceCount(int64_t clientId, const std::string& name);
-
void pushReclaimAtom(const ClientInfoParcel& clientInfo,
const Vector<std::shared_ptr<IResourceManagerClient>>& clients,
const PidUidVector& idList, bool reclaimed);
+ // Get the peak concurrent pixel count (associated with the video codecs) for the process.
+ long getPeakConcurrentPixelCount(int pid) const;
+ // Get the current concurrent pixel count (associated with the video codecs) for the process.
+ long getCurrentConcurrentPixelCount(int pid) const;
+
mutable Mutex mLock;
sp<ProcessInfoInterface> mProcessInfo;
sp<SystemCallbackInterface> mSystemCB;
@@ -211,11 +220,7 @@
static std::map<uintptr_t, sp<DeathNotifier> > sCookieToDeathNotifierMap
GUARDED_BY(sCookieLock);
std::shared_ptr<ResourceObserverService> mObserverService;
-
- // List of active clients
- std::set<int64_t> mClientIdSet;
- // Map of resources (name) and number of concurrent instances
- std::map<std::string, int> mConcurrentResourceCountMap;
+ std::unique_ptr<ResourceManagerMetrics> mResourceManagerMetrics;
};
// ----------------------------------------------------------------------------
diff --git a/services/mediaresourcemanager/ResourceObserverService.cpp b/services/mediaresourcemanager/ResourceObserverService.cpp
index 415530a..ebe3903 100644
--- a/services/mediaresourcemanager/ResourceObserverService.cpp
+++ b/services/mediaresourcemanager/ResourceObserverService.cpp
@@ -100,7 +100,7 @@
std::shared_ptr<ResourceObserverService> ResourceObserverService::instantiate() {
std::shared_ptr<ResourceObserverService> observerService =
::ndk::SharedRefBase::make<ResourceObserverService>();
- binder_status_t status = AServiceManager_addServiceWithFlag(
+ binder_status_t status = AServiceManager_addServiceWithFlags(
observerService->asBinder().get(),ResourceObserverService::getServiceName(),
AServiceManager_AddServiceFlag::ADD_SERVICE_ALLOW_ISOLATED);
diff --git a/services/mediaresourcemanager/UidObserver.cpp b/services/mediaresourcemanager/UidObserver.cpp
new file mode 100644
index 0000000..f321ebc
--- /dev/null
+++ b/services/mediaresourcemanager/UidObserver.cpp
@@ -0,0 +1,182 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ResourceManagerMetrics"
+
+#include <android/binder_process.h>
+#include <mediautils/ProcessInfo.h>
+#include "UidObserver.h"
+
+namespace {
+const char* kActivityServiceName = "activity";
+}; // namespace anonymous
+
+namespace android {
+
+UidObserver::UidObserver(const sp<ProcessInfoInterface>& processInfo,
+ OnProcessTerminated onProcessTerminated) :
+ mRegistered(false),
+ mOnProcessTerminated(std::move(onProcessTerminated)),
+ mProcessInfo(processInfo) {
+}
+
+UidObserver::~UidObserver() {
+ stop();
+}
+
+void UidObserver::start() {
+ // Use check service to see if the activity service is available
+ // If not available then register for notifications, instead of blocking
+ // till the service is ready
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<IBinder> binder = sm->checkService(String16(kActivityServiceName));
+ if (!binder) {
+ sm->registerForNotifications(String16(kActivityServiceName), this);
+ } else {
+ registerWithActivityManager();
+ }
+}
+
+void UidObserver::stop() {
+ std::scoped_lock lock{mLock};
+
+ if (mRegistered) {
+ // Unregistered with ActivityManager
+ mAm.unregisterUidObserver(this);
+ mAm.unlinkToDeath(this);
+ mRegistered = false;
+ }
+}
+
+void UidObserver::add(int pid, uid_t uid) {
+ bool needToRegister = false;
+ {
+ std::scoped_lock lock(mLock);
+ std::map<uid_t, std::set<int32_t>>::iterator found = mUids.find(uid);
+ if (found != mUids.end()) {
+ found->second.insert(pid);
+ } else {
+ std::set<int32_t> pids{pid};
+ mUids.emplace(uid, std::move(pids));
+ }
+ needToRegister = !mRegistered;
+ }
+ if (needToRegister) {
+ start();
+ }
+}
+
+void UidObserver::registerWithActivityManager() {
+ std::scoped_lock lock{mLock};
+
+ if (mRegistered) {
+ return;
+ }
+ status_t res = mAm.linkToDeath(this);
+ // Register for UID gone.
+ mAm.registerUidObserver(this, ActivityManager::UID_OBSERVER_GONE,
+ ActivityManager::PROCESS_STATE_UNKNOWN,
+ String16("mediaserver"));
+ if (res == OK) {
+ mRegistered = true;
+ ALOGV("UidObserver: Registered with ActivityManager");
+ }
+}
+
+void UidObserver::onServiceRegistration(const String16& name, const sp<IBinder>&) {
+ if (name != String16(kActivityServiceName)) {
+ return;
+ }
+
+ registerWithActivityManager();
+}
+
+void UidObserver::getTerminatedProcesses(const std::vector<int32_t>& pids,
+ std::vector<int32_t>& terminatedPids) {
+ std::vector<bool> existent;
+ terminatedPids.clear();
+ if (mProcessInfo->checkProcessExistent(pids, &existent)) {
+ for (size_t index = 0; index < existent.size(); index++) {
+ if (!existent[index]) {
+ // This process has been terminated already.
+ terminatedPids.push_back(pids[index]);
+ }
+ }
+ }
+}
+
+// This callback will be issued for every UID that is gone/terminated.
+// Since one UID could have multiple PIDs, this callback can be issued
+// multiple times with that same UID for each activity/pid.
+// So, we need to check which one among the PIDs (that share the same UID)
+// is gone.
+void UidObserver::onUidGone(uid_t uid, bool /*disabled*/) {
+ std::vector<int32_t> terminatedPids;
+ {
+ std::scoped_lock lock{mLock};
+ std::map<uid_t, std::set<int32_t>>::iterator found = mUids.find(uid);
+ if (found != mUids.end()) {
+ if (found->second.size() == 1) {
+ terminatedPids.push_back(*(found->second.begin()));
+ // Only one PID. So we can remove this UID entry.
+ mUids.erase(found);
+ } else {
+ // There are multiple PIDs with the same UID.
+ // Get the list of all terminated PIDs (with the same UID)
+ std::vector<int32_t> pids;
+ std::copy(found->second.begin(), found->second.end(), std::back_inserter(pids));
+ getTerminatedProcesses(pids, terminatedPids);
+ for (int32_t pid : terminatedPids) {
+ // Remove all the terminated PIDs
+ found->second.erase(pid);
+ }
+ // If all PIDs under this UID have terminated, remove this UID entry.
+ if (found->second.size() == 0) {
+ mUids.erase(uid);
+ }
+ }
+ }
+ }
+
+ for (int32_t pid : terminatedPids) {
+ mOnProcessTerminated(pid, uid);
+ }
+}
+
+void UidObserver::onUidActive(uid_t /*uid*/) {
+}
+
+void UidObserver::onUidIdle(uid_t /*uid*/, bool /*disabled*/) {
+}
+
+void UidObserver::onUidStateChanged(uid_t /*uid*/,
+ int32_t /*procState*/,
+ int64_t /*procStateSeq*/,
+ int32_t /*capability*/) {
+}
+
+void UidObserver::onUidProcAdjChanged(uid_t /*uid*/) {
+}
+
+void UidObserver::binderDied(const wp<IBinder>& /*who*/) {
+ std::scoped_lock lock{mLock};
+ ALOGE("UidObserver: ActivityManager has died");
+ mRegistered = false;
+}
+
+} // namespace android
diff --git a/services/mediaresourcemanager/UidObserver.h b/services/mediaresourcemanager/UidObserver.h
new file mode 100644
index 0000000..ed76839
--- /dev/null
+++ b/services/mediaresourcemanager/UidObserver.h
@@ -0,0 +1,116 @@
+/*
+**
+** Copyright 2023, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_MEDIA_UIDOBSERVER_H_
+#define ANDROID_MEDIA_UIDOBSERVER_H_
+
+#include <map>
+#include <set>
+#include <mutex>
+#include <functional>
+#include <binder/ActivityManager.h>
+#include <binder/IUidObserver.h>
+#include <binder/BinderService.h>
+
+namespace android {
+
+using OnProcessTerminated = std::function<void(int32_t pid, uid_t)>;
+
+struct ProcessInfoInterface;
+
+//
+// UidObserver class
+//
+// This class implements a callback mechanism to notify the termination of the
+// process/applications that are registered with this class.
+//
+// It uses ActivityManager get notification on when an UID is not existent
+// anymore.
+// Since one UID could have multiple PIDs, it uses ActivityManager
+// (through ProcessInfoInterface) to query for the process/application
+// state for the pids.
+//
+class UidObserver :
+ public BnUidObserver,
+ public virtual IBinder::DeathRecipient,
+ public virtual IServiceManager::LocalRegistrationCallback {
+public:
+ explicit UidObserver(const sp<ProcessInfoInterface>& processInfo,
+ OnProcessTerminated onProcessTerminated);
+ virtual ~UidObserver();
+
+ // Start registration (with Application Manager)
+ void start();
+ // Stop registration (with Application Manager)
+ void stop();
+
+ // Add this pid/uid to set of Uid to be observed.
+ void add(int pid, uid_t uid);
+
+private:
+ UidObserver() = delete;
+ UidObserver(const UidObserver&) = delete;
+ UidObserver(UidObserver&&) = delete;
+ UidObserver& operator=(const UidObserver&) = delete;
+ UidObserver& operator=(UidObserver&&) = delete;
+
+ // IUidObserver implementation.
+ void onUidGone(uid_t uid, bool disabled) override;
+ void onUidActive(uid_t uid) override;
+ void onUidIdle(uid_t uid, bool disabled) override;
+ void onUidStateChanged(uid_t uid, int32_t procState, int64_t procStateSeq,
+ int32_t capability) override;
+ void onUidProcAdjChanged(uid_t uid) override;
+
+ // IServiceManager::LocalRegistrationCallback implementation.
+ void onServiceRegistration(const String16& name,
+ const sp<IBinder>& binder) override;
+
+ // IBinder::DeathRecipient implementation.
+ void binderDied(const wp<IBinder> &who) override;
+
+ // Registers with Application Manager for UID gone event
+ // to track the termination of Applications.
+ void registerWithActivityManager();
+
+ /*
+ * For a list of input pids, it will check whether the corresponding
+ * processes are already terminated or not.
+ *
+ * @param[in] pids List of pids to check whether they are terminated.
+ * @param[out] terminatedPids List of pid of terminated processes.
+ *
+ * Upon return, terminatedPids returns list of all the termibated pids
+ * that will be a subset of input pids (in that order).
+ * If none of the input pids have terminated, terminatedPids will be empty.
+ */
+ void getTerminatedProcesses(const std::vector<int32_t>& pids,
+ std::vector<int32_t>& terminatedPids);
+
+ bool mRegistered = false;
+ std::mutex mLock;
+ ActivityManager mAm;
+ // map of UID and all the PIDs associated with it
+ // as one UID could have multiple PIDs.
+ std::map<uid_t, std::set<int32_t>> mUids;
+ OnProcessTerminated mOnProcessTerminated;
+ sp<ProcessInfoInterface> mProcessInfo;
+};
+
+} // namespace android
+
+#endif //ANDROID_MEDIA_UIDOBSERVER_H_
diff --git a/services/mediaresourcemanager/aidl/android/media/ClientConfigParcel.aidl b/services/mediaresourcemanager/aidl/android/media/ClientConfigParcel.aidl
new file mode 100644
index 0000000..3c9c8c7
--- /dev/null
+++ b/services/mediaresourcemanager/aidl/android/media/ClientConfigParcel.aidl
@@ -0,0 +1,65 @@
+/**
+ * Copyright (c) 2023, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.ClientInfoParcel;
+import android.media.MediaResourceSubType;
+
+/**
+ * Description of a Client(codec) configuration.
+ *
+ * {@hide}
+ */
+parcelable ClientConfigParcel {
+ /**
+ * Client info.
+ */
+ ClientInfoParcel clientInfo;
+
+ /**
+ * Type of codec (Audio/Video/Image).
+ */
+ MediaResourceSubType codecType;
+
+ /**
+ * true if this is an encoder, false if this is a decoder.
+ */
+ boolean isEncoder;
+
+ /**
+ * true if this is hardware codec, false otherwise.
+ */
+ boolean isHardware;
+
+ /*
+ * Video Resolution of the codec when it was configured, as width and height (in pixels).
+ */
+ int width;
+ int height;
+
+ /*
+ * Timestamp (in microseconds) when this configuration is created.
+ */
+ long timeStamp;
+ /*
+ * ID associated with the Codec.
+ * This will be used by the metrics:
+ * - Associate MediaCodecStarted with MediaCodecStopped Atom.
+ * - Correlate MediaCodecReported Atom for codec configuration parameters.
+ */
+ long id;
+}
diff --git a/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl b/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl
index 30ad41b..fcade38 100644
--- a/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl
+++ b/services/mediaresourcemanager/aidl/android/media/IResourceManagerService.aidl
@@ -20,6 +20,7 @@
import android.media.MediaResourceParcel;
import android.media.MediaResourcePolicyParcel;
import android.media.ClientInfoParcel;
+import android.media.ClientConfigParcel;
/**
* ResourceManagerService interface that keeps track of media resource
@@ -125,4 +126,34 @@
* @param pid pid from which resources will be reclaimed.
*/
void reclaimResourcesFromClientsPendingRemoval(int pid);
+
+ /**
+ * Notify that the client has been created.
+ *
+ * This call is made to collect the (concurrent) metrics about the
+ * resources associated with the Codec (and also DRM sessions).
+ *
+ * @param clientInfo Information of the client.
+ */
+ void notifyClientCreated(in ClientInfoParcel clientInfo);
+
+ /**
+ * Notify that the client has been started.
+ *
+ * This call is made to collect the (concurrent) metrics about the
+ * resources associated with the Codec (and also DRM sessions).
+ *
+ * @param clientConfig Configuration information of the client.
+ */
+ void notifyClientStarted(in ClientConfigParcel clientConfig);
+
+ /**
+ * Notify that the client has been stopped.
+ *
+ * This call is made to collect the (concurrent) metrics about the
+ * resources associated with the Codec (and also DRM sessions).
+ *
+ * @param clientConfig Configuration information of the client.
+ */
+ void notifyClientStopped(in ClientConfigParcel clientConfig);
}
diff --git a/services/mediaresourcemanager/fuzzer/Android.bp b/services/mediaresourcemanager/fuzzer/Android.bp
index 27d45d5..d98974f 100644
--- a/services/mediaresourcemanager/fuzzer/Android.bp
+++ b/services/mediaresourcemanager/fuzzer/Android.bp
@@ -45,6 +45,7 @@
"libstats_media_metrics",
"libstatspull",
"libstatssocket",
+ "libactivitymanager_aidl",
],
fuzz_config: {
cc: [
diff --git a/services/mediaresourcemanager/test/Android.bp b/services/mediaresourcemanager/test/Android.bp
index 16c5a4c..f903c62 100644
--- a/services/mediaresourcemanager/test/Android.bp
+++ b/services/mediaresourcemanager/test/Android.bp
@@ -23,6 +23,7 @@
"libstats_media_metrics",
"libstatspull",
"libstatssocket",
+ "libactivitymanager_aidl",
],
include_dirs: [
"frameworks/av/include",
@@ -72,6 +73,7 @@
"libstats_media_metrics",
"libstatspull",
"libstatssocket",
+ "libactivitymanager_aidl",
],
include_dirs: [
"frameworks/av/include",
diff --git a/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h b/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
index 8fe2505..474ff0f 100644
--- a/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
+++ b/services/mediaresourcemanager/test/ResourceManagerServiceTestUtils.h
@@ -15,6 +15,7 @@
*/
#include <gtest/gtest.h>
+#include <android/binder_process.h>
#include "ResourceManagerService.h"
#include <aidl/android/media/BnResourceManagerClient.h>
@@ -197,13 +198,20 @@
return static_cast<TestClient*>(testClient.get());
}
- ResourceManagerServiceTestBase()
- : mSystemCB(new TestSystemCallback()),
- mService(::ndk::SharedRefBase::make<ResourceManagerService>(
- new TestProcessInfo, mSystemCB)),
- mTestClient1(::ndk::SharedRefBase::make<TestClient>(kTestPid1, kTestUid1, mService)),
- mTestClient2(::ndk::SharedRefBase::make<TestClient>(kTestPid2, kTestUid2, mService)),
- mTestClient3(::ndk::SharedRefBase::make<TestClient>(kTestPid2, kTestUid2, mService)) {
+ ResourceManagerServiceTestBase() {
+ ALOGI("ResourceManagerServiceTestBase created");
+ }
+
+ void SetUp() override {
+ // Need thread pool to receive callbacks, otherwise oneway callbacks are
+ // silently ignored.
+ ABinderProcess_startThreadPool();
+ mSystemCB = new TestSystemCallback();
+ mService = ::ndk::SharedRefBase::make<ResourceManagerService>(
+ new TestProcessInfo, mSystemCB);
+ mTestClient1 = ::ndk::SharedRefBase::make<TestClient>(kTestPid1, kTestUid1, mService);
+ mTestClient2 = ::ndk::SharedRefBase::make<TestClient>(kTestPid2, kTestUid2, mService);
+ mTestClient3 = ::ndk::SharedRefBase::make<TestClient>(kTestPid2, kTestUid2, mService);
}
std::shared_ptr<IResourceManagerClient> createTestClient(int pid, int uid) {
diff --git a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
index 41cccb8..4e575f0 100644
--- a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
+++ b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
@@ -1367,6 +1367,143 @@
// CPU boost is not expected to be reclaimed when marked as pending removal
EXPECT_FALSE(toTestClient(cpuBoostMarkedClient)->checkIfReclaimedAndReset());
}
+
+ inline void initClientConfigParcel(bool encoder, bool hw,
+ int32_t width, int32_t height,
+ int64_t id,
+ const ClientInfoParcel& clientInfo,
+ ClientConfigParcel& clientConfig) {
+ clientConfig.codecType = MediaResource::SubType::kVideoCodec;
+ clientConfig.isEncoder = encoder;
+ clientConfig.isHardware = hw;
+ clientConfig.width = width;
+ clientConfig.height = height;
+ clientConfig.timeStamp = systemTime(SYSTEM_TIME_MONOTONIC) / 1000LL;
+ clientConfig.id = id;
+ clientConfig.clientInfo = clientInfo;
+ }
+
+ void testConcurrentCodecs() {
+ std::shared_ptr<IResourceManagerClient> testClient4 =
+ createTestClient(kTestPid1, kTestUid1);
+ ClientInfoParcel client1Info{.pid = static_cast<int32_t>(kTestPid1),
+ .uid = static_cast<int32_t>(kTestUid1),
+ .id = getId(mTestClient1),
+ .name = "none"};
+ ClientInfoParcel client2Info{.pid = static_cast<int32_t>(kTestPid2),
+ .uid = static_cast<int32_t>(kTestUid2),
+ .id = getId(mTestClient2),
+ .name = "none"};
+ ClientInfoParcel client3Info{.pid = static_cast<int32_t>(kTestPid2),
+ .uid = static_cast<int32_t>(kTestUid2),
+ .id = getId(mTestClient3),
+ .name = "none"};
+ ClientInfoParcel client4Info{.pid = static_cast<int32_t>(kTestPid1),
+ .uid = static_cast<int32_t>(kTestUid1),
+ .id = getId(testClient4),
+ .name = "none"};
+ ClientConfigParcel client1Config;
+ ClientConfigParcel client2Config;
+ ClientConfigParcel client3Config;
+ ClientConfigParcel client4Config;
+
+ // HW Video Encoder @ 1080P.
+ initClientConfigParcel(true, true, 1920, 1080, 11111111,
+ client1Info, client1Config);
+ // HW Video Decoder @ 4K.
+ initClientConfigParcel(true, true, 2160, 3840, 22222222,
+ client2Info, client2Config);
+ // SW Video Encoder @ 1080P.
+ initClientConfigParcel(true, true, 1920, 1080, 33333333,
+ client3Info, client3Config);
+ // SW Video Decoder @ 4K.
+ initClientConfigParcel(true, true, 2160, 3840, 44444444,
+ client4Info, client4Config);
+
+ // Start client1 at 1080P.
+ mService->notifyClientStarted(client1Config);
+ long peakPixelCountP1 = mService->getPeakConcurrentPixelCount(kTestPid1);
+ long currentPixelCountP1 = mService->getCurrentConcurrentPixelCount(kTestPid1);
+ EXPECT_TRUE(peakPixelCountP1 = client1Config.width * client1Config.height);
+ EXPECT_TRUE(currentPixelCountP1 = client1Config.width * client1Config.height);
+
+ // Stop client1.
+ mService->notifyClientStopped(client1Config);
+ peakPixelCountP1 = mService->getPeakConcurrentPixelCount(kTestPid1);
+ currentPixelCountP1 = mService->getCurrentConcurrentPixelCount(kTestPid1);
+ EXPECT_TRUE(peakPixelCountP1 == client1Config.width * client1Config.height);
+ EXPECT_TRUE(currentPixelCountP1 == 0);
+
+ // Start client1 at 1080P.
+ mService->notifyClientStarted(client1Config);
+ // Start client2 at 4K.
+ mService->notifyClientStarted(client2Config);
+
+ // Verify the Peak and Current Concurrent pixel count for both the process
+ // (kTestPid1, kTestPid2)
+ peakPixelCountP1 = mService->getPeakConcurrentPixelCount(kTestPid1);
+ currentPixelCountP1 = mService->getCurrentConcurrentPixelCount(kTestPid1);
+ long peakPixelCountP2 = mService->getPeakConcurrentPixelCount(kTestPid2);
+ long currentPixelCountP2 = mService->getCurrentConcurrentPixelCount(kTestPid2);
+ EXPECT_TRUE(peakPixelCountP1 == client1Config.width * client1Config.height);
+ EXPECT_TRUE(currentPixelCountP1 == client1Config.width * client1Config.height);
+ EXPECT_TRUE(peakPixelCountP2 == client2Config.width * client2Config.height);
+ EXPECT_TRUE(currentPixelCountP2 == client2Config.width * client2Config.height);
+
+ // Start client3 at 1080P.
+ mService->notifyClientStarted(client3Config);
+ // Start client4 at 4K.
+ mService->notifyClientStarted(client4Config);
+
+ // Verify the Peak and Current Concurrent pixel count for both the process
+ // (kTestPid1, kTestPid2)
+ peakPixelCountP1 = mService->getPeakConcurrentPixelCount(kTestPid1);
+ currentPixelCountP1 = mService->getCurrentConcurrentPixelCount(kTestPid1);
+ peakPixelCountP2 = mService->getPeakConcurrentPixelCount(kTestPid2);
+ currentPixelCountP2 = mService->getCurrentConcurrentPixelCount(kTestPid2);
+ EXPECT_TRUE(peakPixelCountP1 ==
+ (client1Config.width * client1Config.height +
+ client4Config.width * client4Config.height));
+ EXPECT_TRUE(currentPixelCountP1 ==
+ (client1Config.width * client1Config.height +
+ client4Config.width * client4Config.height));
+ EXPECT_TRUE(peakPixelCountP2 ==
+ (client2Config.width * client2Config.height +
+ client3Config.width * client3Config.height));
+ EXPECT_TRUE(currentPixelCountP2 ==
+ (client2Config.width * client2Config.height +
+ client3Config.width * client3Config.height));
+
+ // Stop client4
+ mService->notifyClientStopped(client4Config);
+ currentPixelCountP1 = mService->getCurrentConcurrentPixelCount(kTestPid1);
+ EXPECT_TRUE(currentPixelCountP1 == client1Config.width * client1Config.height);
+
+ // Stop client1
+ mService->notifyClientStopped(client1Config);
+
+ // Stop client2
+ mService->notifyClientStopped(client2Config);
+ currentPixelCountP2 = mService->getCurrentConcurrentPixelCount(kTestPid2);
+ EXPECT_TRUE(currentPixelCountP2 == client3Config.width * client3Config.height);
+ // Stop client3
+ mService->notifyClientStopped(client3Config);
+
+ // Verify the Peak and Current Concurrent pixel count for both the process
+ // (kTestPid1, kTestPid2)
+ peakPixelCountP1 = mService->getPeakConcurrentPixelCount(kTestPid1);
+ currentPixelCountP1 = mService->getCurrentConcurrentPixelCount(kTestPid1);
+ peakPixelCountP2 = mService->getPeakConcurrentPixelCount(kTestPid2);
+ currentPixelCountP2 = mService->getCurrentConcurrentPixelCount(kTestPid2);
+ EXPECT_TRUE(peakPixelCountP1 ==
+ (client1Config.width * client1Config.height +
+ client4Config.width * client4Config.height));
+ EXPECT_TRUE(currentPixelCountP1 == 0);
+ EXPECT_TRUE(peakPixelCountP2 ==
+ (client2Config.width * client2Config.height +
+ client3Config.width * client3Config.height));
+ EXPECT_TRUE(currentPixelCountP2 == 0);
+ }
};
TEST_F(ResourceManagerServiceTest, config) {
@@ -1451,4 +1588,8 @@
testReclaimResourcesFromMarkedClients_removesBiggestMarkedClientForSomeResources();
}
+TEST_F(ResourceManagerServiceTest, concurrentCodecs) {
+ testConcurrentCodecs();
+}
+
} // namespace android
diff --git a/services/mediaresourcemanager/test/ResourceObserverService_test.cpp b/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
index a0d728c..85769d5 100644
--- a/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
+++ b/services/mediaresourcemanager/test/ResourceObserverService_test.cpp
@@ -166,11 +166,14 @@
class ResourceObserverServiceTest : public ResourceManagerServiceTestBase {
public:
- ResourceObserverServiceTest() : ResourceManagerServiceTestBase(),
- mObserverService(::ndk::SharedRefBase::make<ResourceObserverService>()),
- mTestObserver1(::ndk::SharedRefBase::make<TestObserver>("observer1")),
- mTestObserver2(::ndk::SharedRefBase::make<TestObserver>("observer2")),
- mTestObserver3(::ndk::SharedRefBase::make<TestObserver>("observer3")) {
+ ResourceObserverServiceTest() : ResourceManagerServiceTestBase() {}
+
+ void SetUp() override {
+ ResourceManagerServiceTestBase::SetUp();
+ mObserverService = ::ndk::SharedRefBase::make<ResourceObserverService>();
+ mTestObserver1 = ::ndk::SharedRefBase::make<TestObserver>("observer1");
+ mTestObserver2 = ::ndk::SharedRefBase::make<TestObserver>("observer2");
+ mTestObserver3 = ::ndk::SharedRefBase::make<TestObserver>("observer3");
mService->setObserverService(mObserverService);
}
diff --git a/services/oboeservice/AAudioClientTracker.cpp b/services/oboeservice/AAudioClientTracker.cpp
index c0dac11..c91ead0 100644
--- a/services/oboeservice/AAudioClientTracker.cpp
+++ b/services/oboeservice/AAudioClientTracker.cpp
@@ -196,7 +196,8 @@
for (const auto& serviceStream : streamsToClose) {
const aaudio_handle_t handle = serviceStream->getHandle();
ALOGW("binderDied() close abandoned stream 0x%08X\n", handle);
- aaudioService->asAAudioServiceInterface().closeStream(handle);
+ AAudioHandleInfo handleInfo(DEFAULT_AAUDIO_SERVICE_ID, handle);
+ aaudioService->asAAudioServiceInterface().closeStream(handleInfo);
}
// mStreams should be empty now
}
diff --git a/services/oboeservice/AAudioService.h b/services/oboeservice/AAudioService.h
index df66f1b..ada3d53 100644
--- a/services/oboeservice/AAudioService.h
+++ b/services/oboeservice/AAudioService.h
@@ -36,6 +36,7 @@
namespace android {
#define AAUDIO_SERVICE_NAME "media.aaudio"
+#define DEFAULT_AAUDIO_SERVICE_ID 0
class AAudioService :
public BinderService<AAudioService>,
@@ -108,20 +109,22 @@
private:
class Adapter : public aaudio::AAudioBinderAdapter {
public:
+ // Always use default service id in server side since when crash happens,
+ // the aaudio service will restart.
explicit Adapter(AAudioService *service)
- : aaudio::AAudioBinderAdapter(service),
+ : aaudio::AAudioBinderAdapter(service, DEFAULT_AAUDIO_SERVICE_ID),
mService(service) {}
- aaudio_result_t startClient(aaudio::aaudio_handle_t streamHandle,
+ aaudio_result_t startClient(const aaudio::AAudioHandleInfo& streamHandleInfo,
const android::AudioClient &client,
const audio_attributes_t *attr,
audio_port_handle_t *clientHandle) override {
- return mService->startClient(streamHandle, client, attr, clientHandle);
+ return mService->startClient(streamHandleInfo.getHandle(), client, attr, clientHandle);
}
- aaudio_result_t stopClient(aaudio::aaudio_handle_t streamHandle,
+ aaudio_result_t stopClient(const aaudio::AAudioHandleInfo& streamHandleInfo,
audio_port_handle_t clientHandle) override {
- return mService->stopClient(streamHandle, clientHandle);
+ return mService->stopClient(streamHandleInfo.getHandle(), clientHandle);
}
private:
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.cpp b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
index d4cdb0b..7f228c7 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.cpp
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
@@ -58,7 +58,7 @@
result << " MMAP: framesTransferred = " << mFramesTransferred.get();
result << ", HW nanos = " << mHardwareTimeOffsetNanos;
result << ", port handle = " << mPortHandle;
- result << ", audio data FD = " << mAudioDataFileDescriptor;
+ result << ", audio data FD = " << mAudioDataWrapper->getDataFileDescriptor();
result << "\n";
result << " HW Offset Micros: " <<
@@ -89,6 +89,7 @@
aaudio_result_t AAudioServiceEndpointMMAP::open(const aaudio::AAudioStreamRequest &request) {
aaudio_result_t result = AAUDIO_OK;
+ mAudioDataWrapper = std::make_unique<SharedMemoryWrapper>();
copyFrom(request.getConstantConfiguration());
mRequestedDeviceId = getDeviceId();
@@ -104,7 +105,7 @@
while (true) {
if (formatsTried.find(audioFormat) != formatsTried.end()) {
// APM returning something that has already tried.
- ALOGW("Have already tried to open #x, but failed before");
+ ALOGW("Have already tried to open with format=%#x, but failed before", audioFormat);
break;
}
formatsTried.insert(audioFormat);
@@ -194,7 +195,7 @@
// not match the hardware.
ALOGD("%s() - openMmapStream() returned status=%d, suggested format=%#x, sample_rate=%u, "
"channel_mask=%#x",
- __func__, status, config.format, config.sample_rate, config.format);
+ __func__, status, config.format, config.sample_rate, config.channel_mask);
*nextFormatToTry = config.format != audioFormat ? config.format
: *nextFormatToTry;
return AAUDIO_ERROR_UNAVAILABLE;
@@ -219,7 +220,7 @@
__func__, audioFormat, getDeviceId(), getSessionId());
// Create MMAP/NOIRQ buffer.
- result = createMmapBuffer(&mAudioDataFileDescriptor);
+ result = createMmapBuffer();
if (result != AAUDIO_OK) {
goto error;
}
@@ -243,6 +244,8 @@
mTimestampGracePeriodMs = ((int64_t) kTimestampGraceBurstCount * mFramesPerBurst
* AAUDIO_MILLIS_PER_SECOND) / getSampleRate();
+ mDataReportOffsetNanos = ((int64_t)mTimestampGracePeriodMs) * AAUDIO_NANOS_PER_MILLISECOND;
+
ALOGD("%s() got rate = %d, channels = %d channelMask = %#x, deviceId = %d, capacity = %d\n",
__func__, getSampleRate(), getSamplesPerFrame(), getChannelMask(),
deviceId, getBufferCapacity());
@@ -327,17 +330,10 @@
if (mMmapStream == nullptr) {
return AAUDIO_ERROR_NULL;
}
- mAudioDataFileDescriptor.reset();
- const aaudio_result_t result = createMmapBuffer(&mAudioDataFileDescriptor);
+ mAudioDataWrapper->reset();
+ const aaudio_result_t result = createMmapBuffer();
if (result == AAUDIO_OK) {
- const int32_t bytesPerFrame = calculateBytesPerFrame();
- const int32_t capacityInBytes = getBufferCapacity() * bytesPerFrame;
- const int fdIndex = parcelable->addFileDescriptor(
- mAudioDataFileDescriptor, capacityInBytes);
- parcelable->mDownDataQueueParcelable.setupMemory(fdIndex, 0, capacityInBytes);
- parcelable->mDownDataQueueParcelable.setBytesPerFrame(bytesPerFrame);
- parcelable->mDownDataQueueParcelable.setFramesPerBurst(mFramesPerBurst);
- parcelable->mDownDataQueueParcelable.setCapacityInFrames(getBufferCapacity());
+ getDownDataDescription(parcelable);
}
return result;
}
@@ -427,14 +423,19 @@
aaudio_result_t AAudioServiceEndpointMMAP::getDownDataDescription(
AudioEndpointParcelable* parcelable)
{
+ if (mAudioDataWrapper->setupFifoBuffer(calculateBytesPerFrame(), getBufferCapacity())
+ != AAUDIO_OK) {
+ ALOGE("Failed to setup audio data wrapper, will not be able to "
+ "set data for sound dose computation");
+ // This will not affect the audio processing capability
+ }
// Gather information on the data queue based on HAL info.
- const int32_t bytesPerFrame = calculateBytesPerFrame();
- const int32_t capacityInBytes = getBufferCapacity() * bytesPerFrame;
- const int fdIndex = parcelable->addFileDescriptor(mAudioDataFileDescriptor, capacityInBytes);
- parcelable->mDownDataQueueParcelable.setupMemory(fdIndex, 0, capacityInBytes);
- parcelable->mDownDataQueueParcelable.setBytesPerFrame(bytesPerFrame);
- parcelable->mDownDataQueueParcelable.setFramesPerBurst(mFramesPerBurst);
- parcelable->mDownDataQueueParcelable.setCapacityInFrames(getBufferCapacity());
+ mAudioDataWrapper->fillParcelable(parcelable, parcelable->mDownDataQueueParcelable,
+ calculateBytesPerFrame(), mFramesPerBurst,
+ getBufferCapacity(),
+ getDirection() == AAUDIO_DIRECTION_OUTPUT
+ ? SharedMemoryWrapper::WRITE
+ : SharedMemoryWrapper::NONE);
return AAUDIO_OK;
}
@@ -518,8 +519,7 @@
return mHalExternalPositionStatus;
}
-aaudio_result_t AAudioServiceEndpointMMAP::createMmapBuffer(
- android::base::unique_fd* fileDescriptor)
+aaudio_result_t AAudioServiceEndpointMMAP::createMmapBuffer()
{
memset(&mMmapBufferinfo, 0, sizeof(struct audio_mmap_buffer_info));
int32_t minSizeFrames = getBufferCapacity();
@@ -555,8 +555,9 @@
// AAudio creates a copy of this FD and retains ownership of the copy.
// Assume that AudioFlinger will close the original shared_memory_fd.
- fileDescriptor->reset(dup(mMmapBufferinfo.shared_memory_fd));
- if (fileDescriptor->get() == -1) {
+
+ mAudioDataWrapper->getDataFileDescriptor().reset(dup(mMmapBufferinfo.shared_memory_fd));
+ if (mAudioDataWrapper->getDataFileDescriptor().get() == -1) {
ALOGE("%s() - could not dup shared_memory_fd", __func__);
return AAUDIO_ERROR_INTERNAL;
}
@@ -571,3 +572,31 @@
return AAUDIO_OK;
}
+
+int64_t AAudioServiceEndpointMMAP::nextDataReportTime() {
+ return getDirection() == AAUDIO_DIRECTION_OUTPUT
+ ? AudioClock::getNanoseconds() + mDataReportOffsetNanos
+ : std::numeric_limits<int64_t>::max();
+}
+
+void AAudioServiceEndpointMMAP::reportData() {
+ if (mMmapStream == nullptr) {
+ // This must not happen
+ ALOGE("%s() invalid state, mmap stream is not initialized", __func__);
+ return;
+ }
+ auto fifo = mAudioDataWrapper->getFifoBuffer();
+ if (fifo == nullptr) {
+ ALOGE("%s() fifo buffer is not initialized, cannot report data", __func__);
+ return;
+ }
+
+ WrappingBuffer wrappingBuffer;
+ fifo_frames_t framesAvailable = fifo->getFullDataAvailable(&wrappingBuffer);
+ for (size_t i = 0; i < WrappingBuffer::SIZE; ++i) {
+ if (wrappingBuffer.numFrames[i] > 0) {
+ mMmapStream->reportData(wrappingBuffer.data[i], wrappingBuffer.numFrames[i]);
+ }
+ }
+ fifo->advanceReadIndex(framesAvailable);
+}
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.h b/services/oboeservice/AAudioServiceEndpointMMAP.h
index 4f77393..38cf0ba 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.h
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.h
@@ -30,6 +30,7 @@
#include "AAudioServiceStreamMMAP.h"
#include "AAudioMixer.h"
#include "AAudioService.h"
+#include "SharedMemoryWrapper.h"
namespace aaudio {
@@ -90,11 +91,15 @@
aaudio_result_t getExternalPosition(uint64_t *positionFrames, int64_t *timeNanos);
+ int64_t nextDataReportTime();
+
+ void reportData();
+
private:
aaudio_result_t openWithFormat(audio_format_t audioFormat, audio_format_t* nextFormatToTry);
- aaudio_result_t createMmapBuffer(android::base::unique_fd* fileDescriptor);
+ aaudio_result_t createMmapBuffer();
MonotonicCounter mFramesTransferred;
@@ -107,7 +112,7 @@
android::AAudioService &mAAudioService;
- android::base::unique_fd mAudioDataFileDescriptor;
+ std::unique_ptr<SharedMemoryWrapper> mAudioDataWrapper;
int64_t mHardwareTimeOffsetNanos = 0; // TODO get from HAL
@@ -117,6 +122,7 @@
int32_t mTimestampGracePeriodMs;
int32_t mFrozenPositionCount = 0;
int32_t mFrozenTimestampCount = 0;
+ int64_t mDataReportOffsetNanos = 0;
};
diff --git a/services/oboeservice/AAudioServiceStreamBase.cpp b/services/oboeservice/AAudioServiceStreamBase.cpp
index 8e1e497..65854c8 100644
--- a/services/oboeservice/AAudioServiceStreamBase.cpp
+++ b/services/oboeservice/AAudioServiceStreamBase.cpp
@@ -404,7 +404,8 @@
// Hold onto the ref counted stream until the end.
android::sp<AAudioServiceStreamBase> holdStream(this);
TimestampScheduler timestampScheduler;
- int64_t nextTime;
+ int64_t nextTimestampReportTime;
+ int64_t nextDataReportTime;
int64_t standbyTime = AudioClock::getNanoseconds() + IDLE_TIMEOUT_NANOS;
// Balance the incStrong from when the thread was launched.
holdStream->decStrong(nullptr);
@@ -417,8 +418,18 @@
while (mThreadEnabled.load()) {
loopCount++;
int64_t timeoutNanos = -1;
- if (isRunning() || (isIdle_l() && !isStandby_l())) {
- timeoutNanos = (isRunning() ? nextTime : standbyTime) - AudioClock::getNanoseconds();
+ if (isDisconnected_l()) {
+ if (!isStandby_l()) {
+ // If the stream is disconnected but not in standby mode, wait until standby time.
+ timeoutNanos = standbyTime - AudioClock::getNanoseconds();
+ timeoutNanos = std::max<int64_t>(0, timeoutNanos);
+ } // else {
+ // If the stream is disconnected and in standby mode, keep `timeoutNanos` as
+ // -1 to wait forever until next command as the stream can only be closed.
+ // }
+ } else if (isRunning() || (isIdle_l() && !isStandby_l())) {
+ timeoutNanos = (isRunning() ? std::min(nextTimestampReportTime, nextDataReportTime)
+ : standbyTime) - AudioClock::getNanoseconds();
timeoutNanos = std::max<int64_t>(0, timeoutNanos);
}
@@ -428,16 +439,22 @@
break;
}
- if (isRunning() && AudioClock::getNanoseconds() >= nextTime) {
- // It is time to update timestamp.
- if (sendCurrentTimestamp_l() != AAUDIO_OK) {
- ALOGE("Failed to send current timestamp, stop updating timestamp");
- disconnect_l();
- } else {
- nextTime = timestampScheduler.nextAbsoluteTime();
+ if (isRunning() && !isDisconnected_l()) {
+ auto currentTimestamp = AudioClock::getNanoseconds();
+ if (currentTimestamp >= nextDataReportTime) {
+ reportData_l();
+ nextDataReportTime = nextDataReportTime_l();
+ }
+ if (currentTimestamp >= nextTimestampReportTime) {
+ // It is time to update timestamp.
+ if (sendCurrentTimestamp_l() != AAUDIO_OK) {
+ ALOGE("Failed to send current timestamp, stop updating timestamp");
+ disconnect_l();
+ }
+ nextTimestampReportTime = timestampScheduler.nextAbsoluteTime();
}
}
- if (isIdle_l() && AudioClock::getNanoseconds() >= standbyTime) {
+ if ((isIdle_l() || isDisconnected_l()) && AudioClock::getNanoseconds() >= standbyTime) {
aaudio_result_t result = standby_l();
if (result != AAUDIO_OK) {
// If standby failed because of the function is not implemented, there is no
@@ -456,7 +473,8 @@
command->result = start_l();
timestampScheduler.setBurstPeriod(mFramesPerBurst, getSampleRate());
timestampScheduler.start(AudioClock::getNanoseconds());
- nextTime = timestampScheduler.nextAbsoluteTime();
+ nextTimestampReportTime = timestampScheduler.nextAbsoluteTime();
+ nextDataReportTime = nextDataReportTime_l();
break;
case PAUSE:
command->result = pause_l();
diff --git a/services/oboeservice/AAudioServiceStreamBase.h b/services/oboeservice/AAudioServiceStreamBase.h
index 0f51503..bc7ccde 100644
--- a/services/oboeservice/AAudioServiceStreamBase.h
+++ b/services/oboeservice/AAudioServiceStreamBase.h
@@ -346,6 +346,11 @@
|| mState == AAUDIO_STREAM_STATE_STOPPED;
}
+ virtual int64_t nextDataReportTime_l() REQUIRES(mLock) {
+ return std::numeric_limits<int64_t>::max();
+ }
+ virtual void reportData_l() REQUIRES(mLock) { return; }
+
pid_t mRegisteredClientThread = ILLEGAL_THREAD_ID;
std::mutex mUpMessageQueueLock;
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.cpp b/services/oboeservice/AAudioServiceStreamMMAP.cpp
index ec9b2e2..89f6e33 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.cpp
+++ b/services/oboeservice/AAudioServiceStreamMMAP.cpp
@@ -238,3 +238,25 @@
static_cast<AAudioServiceEndpointMMAP *>(endpoint.get());
return serviceEndpointMMAP->getDownDataDescription(parcelable);
}
+
+int64_t AAudioServiceStreamMMAP::nextDataReportTime_l() {
+ sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
+ if (endpoint == nullptr) {
+ ALOGE("%s() has no endpoint", __func__);
+ return std::numeric_limits<int64_t>::max();
+ }
+ sp<AAudioServiceEndpointMMAP> serviceEndpointMMAP =
+ static_cast<AAudioServiceEndpointMMAP *>(endpoint.get());
+ return serviceEndpointMMAP->nextDataReportTime();
+}
+
+void AAudioServiceStreamMMAP::reportData_l() {
+ sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
+ if (endpoint == nullptr) {
+ ALOGE("%s() has no endpoint", __func__);
+ return;
+ }
+ sp<AAudioServiceEndpointMMAP> serviceEndpointMMAP =
+ static_cast<AAudioServiceEndpointMMAP *>(endpoint.get());
+ return serviceEndpointMMAP->reportData();
+}
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.h b/services/oboeservice/AAudioServiceStreamMMAP.h
index 8b8c5e6..db3c8d0 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.h
+++ b/services/oboeservice/AAudioServiceStreamMMAP.h
@@ -84,6 +84,10 @@
aaudio_result_t getHardwareTimestamp_l(
int64_t *positionFrames, int64_t *timeNanos) REQUIRES(mLock) override;
+ int64_t nextDataReportTime_l() REQUIRES(mLock) override;
+
+ void reportData_l() REQUIRES(mLock) override;
+
/**
* Device specific startup.
* @return AAUDIO_OK or negative error.
diff --git a/services/oboeservice/Android.bp b/services/oboeservice/Android.bp
index 56c0dc9..c5080a4 100644
--- a/services/oboeservice/Android.bp
+++ b/services/oboeservice/Android.bp
@@ -104,6 +104,7 @@
"AAudioStreamTracker.cpp",
"AAudioThread.cpp",
"SharedMemoryProxy.cpp",
+ "SharedMemoryWrapper.cpp",
"SharedRingBuffer.cpp",
"TimestampScheduler.cpp",
],
diff --git a/services/oboeservice/SharedMemoryWrapper.cpp b/services/oboeservice/SharedMemoryWrapper.cpp
new file mode 100644
index 0000000..c0dcccb
--- /dev/null
+++ b/services/oboeservice/SharedMemoryWrapper.cpp
@@ -0,0 +1,132 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "SharedMemoryWrapper"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include <iomanip>
+#include <iostream>
+#include <sys/mman.h>
+
+#include "SharedMemoryWrapper.h"
+
+namespace aaudio {
+
+constexpr int COUNTER_SIZE_IN_BYTES = sizeof(android::fifo_counter_t);
+constexpr int WRAPPER_SIZE_IN_BYTES = 2 * COUNTER_SIZE_IN_BYTES;
+
+SharedMemoryWrapper::SharedMemoryWrapper() {
+ mCounterFd.reset(ashmem_create_region("AAudioSharedMemoryWrapper", WRAPPER_SIZE_IN_BYTES));
+ if (mCounterFd.get() == -1) {
+ ALOGE("allocate() ashmem_create_region() failed %d", errno);
+ return;
+ }
+ int err = ashmem_set_prot_region(mCounterFd.get(), PROT_READ|PROT_WRITE);
+ if (err < 0) {
+ ALOGE("allocate() ashmem_set_prot_region() failed %d", errno);
+ mCounterFd.reset();
+ return;
+ }
+ auto tmpPtr = (uint8_t *) mmap(nullptr, WRAPPER_SIZE_IN_BYTES,
+ PROT_READ|PROT_WRITE,
+ MAP_SHARED,
+ mCounterFd.get(), 0);
+ if (tmpPtr == MAP_FAILED) {
+ ALOGE("allocate() mmap() failed %d", errno);
+ mCounterFd.reset();
+ return;
+ }
+ mCounterMemoryAddress = tmpPtr;
+
+ mReadCounterAddress = (android::fifo_counter_t*) mCounterMemoryAddress;
+ mWriteCounterAddress = (android::fifo_counter_t*) &mCounterMemoryAddress[COUNTER_SIZE_IN_BYTES];
+}
+
+SharedMemoryWrapper::~SharedMemoryWrapper()
+{
+ reset();
+ if (mCounterMemoryAddress != nullptr) {
+ munmap(mCounterMemoryAddress, COUNTER_SIZE_IN_BYTES);
+ mCounterMemoryAddress = nullptr;
+ }
+}
+
+aaudio_result_t SharedMemoryWrapper::setupFifoBuffer(android::fifo_frames_t bytesPerFrame,
+ android::fifo_frames_t capacityInFrames) {
+ if (mDataFd.get() == -1) {
+ ALOGE("%s data file descriptor is not initialized", __func__);
+ return AAUDIO_ERROR_INTERNAL;
+ }
+ if (mCounterMemoryAddress == nullptr) {
+ ALOGE("%s the counter memory is not allocated correctly", __func__);
+ return AAUDIO_ERROR_INTERNAL;
+ }
+ mSharedMemorySizeInBytes = bytesPerFrame * capacityInFrames;
+ auto tmpPtr = (uint8_t *) mmap(nullptr, mSharedMemorySizeInBytes,
+ PROT_READ|PROT_WRITE,
+ MAP_SHARED,
+ mDataFd.get(), 0);
+ if (tmpPtr == MAP_FAILED) {
+ ALOGE("allocate() mmap() failed %d", errno);
+ return AAUDIO_ERROR_INTERNAL;
+ }
+ mSharedMemory = tmpPtr;
+
+ mFifoBuffer = std::make_shared<android::FifoBufferIndirect>(
+ bytesPerFrame, capacityInFrames, mReadCounterAddress,
+ mWriteCounterAddress, mSharedMemory);
+ return AAUDIO_OK;
+}
+
+void SharedMemoryWrapper::reset() {
+ mFifoBuffer.reset();
+ if (mSharedMemory != nullptr) {
+ munmap(mSharedMemory, mSharedMemorySizeInBytes);
+ mSharedMemory = nullptr;
+ }
+ mDataFd.reset();
+}
+
+void SharedMemoryWrapper::fillParcelable(
+ AudioEndpointParcelable* endpointParcelable, RingBufferParcelable &ringBufferParcelable,
+ int32_t bytesPerFrame, int32_t framesPerBurst, int32_t capacityInFrames,
+ CounterFilling counterFilling) {
+ const int capacityInBytes = bytesPerFrame * capacityInFrames;
+ const int dataFdIndex =
+ endpointParcelable->addFileDescriptor(mDataFd, mSharedMemorySizeInBytes);
+ ringBufferParcelable.setBytesPerFrame(bytesPerFrame);
+ ringBufferParcelable.setFramesPerBurst(framesPerBurst);
+ ringBufferParcelable.setCapacityInFrames(capacityInFrames);
+ if (mCounterFd.get() == -1 || counterFilling == NONE) {
+ // Failed to create shared memory for read/write counter or requesting no filling counters.
+ ALOGD("%s no counter is filled, counterFd=%d", __func__, mCounterFd.get());
+ ringBufferParcelable.setupMemory(dataFdIndex, 0, capacityInBytes);
+ } else {
+ int counterFdIndex =
+ endpointParcelable->addFileDescriptor(mCounterFd, WRAPPER_SIZE_IN_BYTES);
+ const int readCounterSize = (counterFilling & READ) == NONE ? 0 : COUNTER_SIZE_IN_BYTES;
+ const int writeCounterSize = (counterFilling & WRITE) == NONE ? 0 : COUNTER_SIZE_IN_BYTES;
+ ALOGD("%s counterFdIndex=%d readCounterSize=%d, writeCounterSize=%d",
+ __func__, counterFdIndex, readCounterSize, writeCounterSize);
+ ringBufferParcelable.setupMemory(
+ {dataFdIndex, 0 /*offset*/, capacityInBytes},
+ {counterFdIndex, 0 /*offset*/, readCounterSize},
+ {counterFdIndex, COUNTER_SIZE_IN_BYTES, writeCounterSize});
+ }
+}
+
+} // namespace aaudio
diff --git a/services/oboeservice/SharedMemoryWrapper.h b/services/oboeservice/SharedMemoryWrapper.h
new file mode 100644
index 0000000..323c7f1
--- /dev/null
+++ b/services/oboeservice/SharedMemoryWrapper.h
@@ -0,0 +1,86 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android-base/unique_fd.h>
+#include <cutils/ashmem.h>
+#include <stdint.h>
+#include <string>
+#include <sys/mman.h>
+
+#include "fifo/FifoBuffer.h"
+#include "binding/RingBufferParcelable.h"
+#include "binding/AudioEndpointParcelable.h"
+
+namespace aaudio {
+
+/**
+ * Wrap the shared memory with read and write counters. Provide a fifo buffer to access the
+ * wrapped shared memory.
+ */
+class SharedMemoryWrapper {
+public:
+ explicit SharedMemoryWrapper();
+
+ virtual ~SharedMemoryWrapper();
+
+ android::base::unique_fd& getDataFileDescriptor() { return mDataFd; }
+
+ aaudio_result_t setupFifoBuffer(android::fifo_frames_t bytesPerFrame,
+ android::fifo_frames_t capacityInFrames);
+
+ void reset();
+
+ enum CounterFilling {
+ NONE = 0,
+ READ = 1,
+ WRITE = 2,
+ };
+ /**
+ * Fill shared memory into parcelable.
+ *
+ * @param endpointParcelable container for ring buffers and shared memories
+ * @param ringBufferParcelable the ring buffer
+ * @param bytesPerFrame the bytes per frame of the data memory
+ * @param framesPerBurst the frame per burst of the data memory
+ * @param capacityInFrames the capacity in frames of the data memory
+ * @param counterFilling a bit mask to control if the counter from the wrapper should be filled
+ * or not.
+ */
+ void fillParcelable(AudioEndpointParcelable* endpointParcelable,
+ RingBufferParcelable &ringBufferParcelable,
+ int32_t bytesPerFrame,
+ int32_t framesPerBurst,
+ int32_t capacityInFrames,
+ CounterFilling counterFilling = NONE);
+
+ std::shared_ptr<android::FifoBuffer> getFifoBuffer() {
+ return mFifoBuffer;
+ }
+
+private:
+ android::base::unique_fd mDataFd;
+ android::base::unique_fd mCounterFd;
+ uint8_t* mCounterMemoryAddress = nullptr;
+ android::fifo_counter_t* mReadCounterAddress = nullptr;
+ android::fifo_counter_t* mWriteCounterAddress = nullptr;
+ std::shared_ptr<android::FifoBufferIndirect> mFifoBuffer;
+ uint8_t* mSharedMemory = nullptr;
+ int32_t mSharedMemorySizeInBytes = 0;
+};
+
+} /* namespace aaudio */
diff --git a/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp b/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
index 6dc6eff..f047065 100644
--- a/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
+++ b/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
@@ -149,41 +149,42 @@
void registerClient(const sp<IAAudioClient> &client UNUSED_PARAM) override {}
- aaudio_handle_t openStream(const AAudioStreamRequest &request,
- AAudioStreamConfiguration &configurationOutput) override;
+ AAudioHandleInfo openStream(const AAudioStreamRequest &request,
+ AAudioStreamConfiguration &configurationOutput) override;
- aaudio_result_t closeStream(aaudio_handle_t streamHandle) override;
+ aaudio_result_t closeStream(const AAudioHandleInfo& streamHandleInfo) override;
- aaudio_result_t getStreamDescription(aaudio_handle_t streamHandle,
+ aaudio_result_t getStreamDescription(const AAudioHandleInfo& streamHandleInfo,
AudioEndpointParcelable &parcelable) override;
- aaudio_result_t startStream(aaudio_handle_t streamHandle) override;
+ aaudio_result_t startStream(const AAudioHandleInfo& streamHandleInfo) override;
- aaudio_result_t pauseStream(aaudio_handle_t streamHandle) override;
+ aaudio_result_t pauseStream(const AAudioHandleInfo& streamHandleInfo) override;
- aaudio_result_t stopStream(aaudio_handle_t streamHandle) override;
+ aaudio_result_t stopStream(const AAudioHandleInfo& streamHandleInfo) override;
- aaudio_result_t flushStream(aaudio_handle_t streamHandle) override;
+ aaudio_result_t flushStream(const AAudioHandleInfo& streamHandleInfo) override;
- aaudio_result_t registerAudioThread(aaudio_handle_t streamHandle, pid_t clientThreadId,
+ aaudio_result_t registerAudioThread(const AAudioHandleInfo& streamHandleInfo,
+ pid_t clientThreadId,
int64_t periodNanoseconds) override;
- aaudio_result_t unregisterAudioThread(aaudio_handle_t streamHandle,
+ aaudio_result_t unregisterAudioThread(const AAudioHandleInfo& streamHandleInfo,
pid_t clientThreadId) override;
- aaudio_result_t startClient(aaudio_handle_t streamHandle UNUSED_PARAM,
+ aaudio_result_t startClient(const AAudioHandleInfo& streamHandleInfo UNUSED_PARAM,
const AudioClient &client UNUSED_PARAM,
const audio_attributes_t *attr UNUSED_PARAM,
audio_port_handle_t *clientHandle UNUSED_PARAM) override {
return AAUDIO_ERROR_UNAVAILABLE;
}
- aaudio_result_t stopClient(aaudio_handle_t streamHandle UNUSED_PARAM,
+ aaudio_result_t stopClient(const AAudioHandleInfo& streamHandleInfo UNUSED_PARAM,
audio_port_handle_t clientHandle UNUSED_PARAM) override {
return AAUDIO_ERROR_UNAVAILABLE;
}
- aaudio_result_t exitStandby(aaudio_handle_t streamHandle UNUSED_PARAM,
+ aaudio_result_t exitStandby(const AAudioHandleInfo& streamHandleInfo UNUSED_PARAM,
AudioEndpointParcelable &parcelable UNUSED_PARAM) override {
return AAUDIO_ERROR_UNAVAILABLE;
}
@@ -250,92 +251,91 @@
mAAudioService.clear();
}
-aaudio_handle_t FuzzAAudioClient::openStream(const AAudioStreamRequest &request,
- AAudioStreamConfiguration &configurationOutput) {
- aaudio_handle_t stream;
+AAudioHandleInfo FuzzAAudioClient::openStream(const AAudioStreamRequest &request,
+ AAudioStreamConfiguration &configurationOutput) {
for (int i = 0; i < 2; ++i) {
AAudioServiceInterface *service = getAAudioService();
if (!service) {
- return AAUDIO_ERROR_NO_SERVICE;
+ return {-1, AAUDIO_ERROR_NO_SERVICE};
}
- stream = service->openStream(request, configurationOutput);
+ auto streamHandleInfo = service->openStream(request, configurationOutput);
- if (stream == AAUDIO_ERROR_NO_SERVICE) {
+ if (streamHandleInfo.getHandle() == AAUDIO_ERROR_NO_SERVICE) {
dropAAudioService();
} else {
- break;
+ return streamHandleInfo;
}
}
- return stream;
+ return {-1, AAUDIO_ERROR_NO_SERVICE};
}
-aaudio_result_t FuzzAAudioClient::closeStream(aaudio_handle_t streamHandle) {
+aaudio_result_t FuzzAAudioClient::closeStream(const AAudioHandleInfo& streamHandleInfo) {
AAudioServiceInterface *service = getAAudioService();
if (!service) {
return AAUDIO_ERROR_NO_SERVICE;
}
- return service->closeStream(streamHandle);
+ return service->closeStream(streamHandleInfo);
}
-aaudio_result_t FuzzAAudioClient::getStreamDescription(aaudio_handle_t streamHandle,
+aaudio_result_t FuzzAAudioClient::getStreamDescription(const AAudioHandleInfo& streamHandleInfo,
AudioEndpointParcelable &parcelable) {
AAudioServiceInterface *service = getAAudioService();
if (!service) {
return AAUDIO_ERROR_NO_SERVICE;
}
- return service->getStreamDescription(streamHandle, parcelable);
+ return service->getStreamDescription(streamHandleInfo, parcelable);
}
-aaudio_result_t FuzzAAudioClient::startStream(aaudio_handle_t streamHandle) {
+aaudio_result_t FuzzAAudioClient::startStream(const AAudioHandleInfo& streamHandleInfo) {
AAudioServiceInterface *service = getAAudioService();
if (!service) {
return AAUDIO_ERROR_NO_SERVICE;
}
- return service->startStream(streamHandle);
+ return service->startStream(streamHandleInfo);
}
-aaudio_result_t FuzzAAudioClient::pauseStream(aaudio_handle_t streamHandle) {
+aaudio_result_t FuzzAAudioClient::pauseStream(const AAudioHandleInfo& streamHandleInfo) {
AAudioServiceInterface *service = getAAudioService();
if (!service) {
return AAUDIO_ERROR_NO_SERVICE;
}
- return service->pauseStream(streamHandle);
+ return service->pauseStream(streamHandleInfo);
}
-aaudio_result_t FuzzAAudioClient::stopStream(aaudio_handle_t streamHandle) {
+aaudio_result_t FuzzAAudioClient::stopStream(const AAudioHandleInfo& streamHandleInfo) {
AAudioServiceInterface *service = getAAudioService();
if (!service) {
return AAUDIO_ERROR_NO_SERVICE;
}
- return service->stopStream(streamHandle);
+ return service->stopStream(streamHandleInfo);
}
-aaudio_result_t FuzzAAudioClient::flushStream(aaudio_handle_t streamHandle) {
+aaudio_result_t FuzzAAudioClient::flushStream(const AAudioHandleInfo& streamHandleInfo) {
AAudioServiceInterface *service = getAAudioService();
if (!service) {
return AAUDIO_ERROR_NO_SERVICE;
}
- return service->flushStream(streamHandle);
+ return service->flushStream(streamHandleInfo);
}
-aaudio_result_t FuzzAAudioClient::registerAudioThread(aaudio_handle_t streamHandle,
+aaudio_result_t FuzzAAudioClient::registerAudioThread(const AAudioHandleInfo& streamHandleInfo,
pid_t clientThreadId,
int64_t periodNanoseconds) {
AAudioServiceInterface *service = getAAudioService();
if (!service) {
return AAUDIO_ERROR_NO_SERVICE;
}
- return service->registerAudioThread(streamHandle, clientThreadId, periodNanoseconds);
+ return service->registerAudioThread(streamHandleInfo, clientThreadId, periodNanoseconds);
}
-aaudio_result_t FuzzAAudioClient::unregisterAudioThread(aaudio_handle_t streamHandle,
+aaudio_result_t FuzzAAudioClient::unregisterAudioThread(const AAudioHandleInfo& streamHandleInfo,
pid_t clientThreadId) {
AAudioServiceInterface *service = getAAudioService();
if (!service) {
return AAUDIO_ERROR_NO_SERVICE;
}
- return service->unregisterAudioThread(streamHandle, clientThreadId);
+ return service->unregisterAudioThread(streamHandleInfo, clientThreadId);
}
class OboeserviceFuzzer {
@@ -410,8 +410,8 @@
? fdp.ConsumeIntegral<int32_t>()
: kAAudioFormats[fdp.ConsumeIntegralInRange<int32_t>(0, kNumAAudioFormats - 1)]));
- aaudio_handle_t stream = mClient->openStream(request, configurationOutput);
- if (stream < 0) {
+ auto streamHandleInfo = mClient->openStream(request, configurationOutput);
+ if (streamHandleInfo.getHandle() < 0) {
// invalid request, stream not opened.
return;
}
@@ -420,23 +420,23 @@
int action = fdp.ConsumeIntegralInRange<int32_t>(0, 4);
switch (action) {
case 0:
- mClient->getStreamDescription(stream, audioEndpointParcelable);
+ mClient->getStreamDescription(streamHandleInfo, audioEndpointParcelable);
break;
case 1:
- mClient->startStream(stream);
+ mClient->startStream(streamHandleInfo);
break;
case 2:
- mClient->pauseStream(stream);
+ mClient->pauseStream(streamHandleInfo);
break;
case 3:
- mClient->stopStream(stream);
+ mClient->stopStream(streamHandleInfo);
break;
case 4:
- mClient->flushStream(stream);
+ mClient->flushStream(streamHandleInfo);
break;
}
}
- mClient->closeStream(stream);
+ mClient->closeStream(streamHandleInfo);
assert(mClient->getDeathCount() == 0);
}
diff --git a/services/tuner/hidl/TunerHidlDvr.cpp b/services/tuner/hidl/TunerHidlDvr.cpp
index 8083a6e..285e32b 100644
--- a/services/tuner/hidl/TunerHidlDvr.cpp
+++ b/services/tuner/hidl/TunerHidlDvr.cpp
@@ -66,7 +66,7 @@
AidlMQDesc aidlMQDesc;
unsafeHidlToAidlMQDescriptor<uint8_t, int8_t, SynchronizedReadWrite>(dvrMQDesc, &aidlMQDesc);
- *_aidl_return = move(aidlMQDesc);
+ *_aidl_return = std::move(aidlMQDesc);
return ::ndk::ScopedAStatus::ok();
}
diff --git a/services/tuner/hidl/TunerHidlFilter.cpp b/services/tuner/hidl/TunerHidlFilter.cpp
index d6a0cae..1789028 100644
--- a/services/tuner/hidl/TunerHidlFilter.cpp
+++ b/services/tuner/hidl/TunerHidlFilter.cpp
@@ -140,7 +140,7 @@
AidlMQDesc aidlMQDesc;
unsafeHidlToAidlMQDescriptor<uint8_t, int8_t, SynchronizedReadWrite>(filterMQDesc, &aidlMQDesc);
- *_aidl_return = move(aidlMQDesc);
+ *_aidl_return = std::move(aidlMQDesc);
return ::ndk::ScopedAStatus::ok();
}
@@ -1020,8 +1020,8 @@
}
DemuxFilterEvent filterEvent;
- filterEvent.set<DemuxFilterEvent::media>(move(media));
- res.push_back(move(filterEvent));
+ filterEvent.set<DemuxFilterEvent::media>(std::move(media));
+ res.push_back(std::move(filterEvent));
}
}
@@ -1037,8 +1037,8 @@
section.dataLength = static_cast<int64_t>(sectionEvent.dataLength);
DemuxFilterEvent filterEvent;
- filterEvent.set<DemuxFilterEvent::section>(move(section));
- res.push_back(move(filterEvent));
+ filterEvent.set<DemuxFilterEvent::section>(std::move(section));
+ res.push_back(std::move(filterEvent));
}
}
@@ -1053,8 +1053,8 @@
pes.mpuSequenceNumber = static_cast<int32_t>(pesEvent.mpuSequenceNumber);
DemuxFilterEvent filterEvent;
- filterEvent.set<DemuxFilterEvent::pes>(move(pes));
- res.push_back(move(filterEvent));
+ filterEvent.set<DemuxFilterEvent::pes>(std::move(pes));
+ res.push_back(std::move(filterEvent));
}
}
@@ -1103,8 +1103,8 @@
}
DemuxFilterEvent filterEvent;
- filterEvent.set<DemuxFilterEvent::tsRecord>(move(tsRecord));
- res.push_back(move(filterEvent));
+ filterEvent.set<DemuxFilterEvent::tsRecord>(std::move(tsRecord));
+ res.push_back(std::move(filterEvent));
}
}
@@ -1130,8 +1130,8 @@
}
DemuxFilterEvent filterEvent;
- filterEvent.set<DemuxFilterEvent::mmtpRecord>(move(mmtpRecord));
- res.push_back(move(filterEvent));
+ filterEvent.set<DemuxFilterEvent::mmtpRecord>(std::move(mmtpRecord));
+ res.push_back(std::move(filterEvent));
}
}
@@ -1149,8 +1149,8 @@
download.dataLength = static_cast<int32_t>(downloadEvent.dataLength);
DemuxFilterEvent filterEvent;
- filterEvent.set<DemuxFilterEvent::download>(move(download));
- res.push_back(move(filterEvent));
+ filterEvent.set<DemuxFilterEvent::download>(std::move(download));
+ res.push_back(std::move(filterEvent));
}
}
@@ -1163,8 +1163,8 @@
ipPayload.dataLength = static_cast<int32_t>(ipPayloadEvent.dataLength);
DemuxFilterEvent filterEvent;
- filterEvent.set<DemuxFilterEvent::ipPayload>(move(ipPayload));
- res.push_back(move(filterEvent));
+ filterEvent.set<DemuxFilterEvent::ipPayload>(std::move(ipPayload));
+ res.push_back(std::move(filterEvent));
}
}
@@ -1181,8 +1181,8 @@
copy(descrData.begin(), descrData.end(), temi.descrData.begin());
DemuxFilterEvent filterEvent;
- filterEvent.set<DemuxFilterEvent::temi>(move(temi));
- res.push_back(move(filterEvent));
+ filterEvent.set<DemuxFilterEvent::temi>(std::move(temi));
+ res.push_back(std::move(filterEvent));
}
}
@@ -1204,15 +1204,15 @@
}
DemuxFilterEvent filterEvent;
- filterEvent.set<DemuxFilterEvent::monitorEvent>(move(monitor));
- res.push_back(move(filterEvent));
+ filterEvent.set<DemuxFilterEvent::monitorEvent>(std::move(monitor));
+ res.push_back(std::move(filterEvent));
}
void TunerHidlFilter::FilterCallback::getRestartEvent(
const vector<HidlDemuxFilterEventExt::Event>& eventsExt, vector<DemuxFilterEvent>& res) {
DemuxFilterEvent filterEvent;
filterEvent.set<DemuxFilterEvent::startId>(static_cast<int32_t>(eventsExt[0].startId()));
- res.push_back(move(filterEvent));
+ res.push_back(std::move(filterEvent));
}
} // namespace tuner