Merge "mediautils::SetviceSingleton : fix missing onServiceDied notification" into main
diff --git a/camera/ndk/include/camera/NdkCameraCaptureSession.h b/camera/ndk/include/camera/NdkCameraCaptureSession.h
index e73222b..06e1d34 100644
--- a/camera/ndk/include/camera/NdkCameraCaptureSession.h
+++ b/camera/ndk/include/camera/NdkCameraCaptureSession.h
@@ -1103,12 +1103,19 @@
/**
* Request continuous streaming of a sequence of images for the shared capture session
* when more than one clients can open the same camera in shared mode by calling
- * {@link ACameraManager_openSharedCamera}. In shared session, only primary clients can create
- * a capture request and change capture parameters. Secondary clients can only request streaming of
- * images by calling this api {@link ACameraCaptureSessionShared_startStreaming}. Calling this api
- * for normal sessions when {@link ACameraManager_openCamera} is used to open the camera will throw
+ * {@link ACameraManager_openSharedCamera}. In shared mode, the highest priority client among all
+ * the clients will be the primary client while the others would be secondary clients. In shared
+ * capture session, only primary clients can create a capture request and change capture parameters.
+ * Secondary clients can only request streaming of images by calling this api
+ * {@link ACameraCaptureSessionShared_startStreaming}. Calling this api for normal sessions when
+ * {@link ACameraManager_openCamera} is used to open the camera will throw
* {@link ACAMERA_ERROR_INVALID_OPERATION}.
*
+ * <p>The priority of client access is determined by considering two factors: its current process
+ * state and its "out of memory" score. Clients operating in the background are assigned a lower
+ * priority. In contrast, clients running in the foreground, along with system-level clients, are
+ * given a higher priority.</p>
+ *
* <p>With this method, the camera device will continually capture images, cycling through the
* settings in the list of {@link ACaptureRequest} specified by the primary client. If primary
* client does not have ongoing repeating request, camera service will use a capture request with
@@ -1145,20 +1152,72 @@
* </ul>
*/
camera_status_t ACameraCaptureSessionShared_startStreaming(
- ACameraCaptureSession* sharedSession, ACameraCaptureSession_captureCallbacksV2 *callbacks,
+ ACameraCaptureSession* sharedSession,
+ /*optional*/ACameraCaptureSession_captureCallbacksV2 *callbacks,
int numOutputWindows, ANativeWindow **window,
- int *captureSequenceId) __INTRODUCED_IN(36);
+ /*optional*/int *captureSequenceId) __INTRODUCED_IN(36);
/**
* This has the same functionality as ACameraCaptureSessionShared_startStreaming, with added
* support for logical multi-camera where the capture callbacks supports result metadata for
* physical cameras.
+ *
+ * Request continuous streaming of a sequence of images for the shared capture session
+ * when more than one clients can open the same camera in shared mode by calling
+ * {@link ACameraManager_openSharedCamera}. In shared mode, the highest priority client among all
+ * the clients will be the primary client while the others would be secondary clients. In shared
+ * capture session, only primary clients can create a capture request and change capture parameters.
+ * Secondary clients can only request streaming of images by calling this api
+ * {@link ACameraCaptureSessionShared_logicalCamera_startStreaming}. Calling this api for normal
+ * sessions when {@link ACameraManager_openCamera} is used to open the camera will throw
+ * {@link ACAMERA_ERROR_INVALID_OPERATION}.
+ *
+ * <p>The priority of client access is determined by considering two factors: its current process
+ * state and its "out of memory" score. Clients operating in the background are assigned a lower
+ * priority. In contrast, clients running in the foreground, along with system-level clients, are
+ * given a higher priority.</p>
+ *
+ * <p>With this method, the camera device will continually capture images, cycling through the
+ * settings in the list of {@link ACaptureRequest} specified by the primary client. If primary
+ * client does not have ongoing repeating request, camera service will use a capture request with
+ * default capture parameters for preview template.</p>
+ *
+ * <p>To stop the continuous streaming, call {@link ACameraCaptureSessionShared_stopStreaming}.</p>
+ *
+ * <p>Calling this method will replace an existing continuous streaming request.</p>
+ *
+ * @param sharedSession the shared capture session when camera is opened in
+ * shared mode.
+ * @param callbacks the {@link ACameraCaptureSession_logicalCamera_captureCallbacksV2} to be
+ * associated with this capture sequence. No capture callback will be fired if callbacks
+ * is set to NULL.
+ * @param numOutputWindows number of native windows to be used for streaming. Must be at least 1.
+ * @param windows an array of {@link ANativeWindow} to be used for streaming. Length must be at
+ * least numOutputWindows.
+ * @param captureSequenceId the capture sequence ID associated with this capture method invocation
+ * will be stored here if this argument is not NULL and the method call succeeds.
+ * When this argument is set to NULL, the capture sequence ID will not be returned.
+ *
+ * @return <ul>
+ * <li>{@link ACAMERA_OK} if the method succeeds. captureSequenceId will be filled
+ * if it is not NULL.</li>
+ * <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if session or requests is NULL, or
+ * if numRequests < 1</li>
+ * <li>{@link ACAMERA_ERROR_SESSION_CLOSED} if the capture session has been closed</li>
+ * <li>{@link ACAMERA_ERROR_CAMERA_DISCONNECTED} if the camera device is closed</li>
+ * <li>{@link ACAMERA_ERROR_CAMERA_DEVICE} if the camera device encounters fatal error</li>
+ * <li>{@link ACAMERA_ERROR_CAMERA_SERVICE} if the camera service encounters fatal error
+ * </li>
+ * <li>{@link ACAMERA_ERROR_INVALID_OPERATION} if the session passed is not a shared
+ * session</li>
+ * <li>{@link ACAMERA_ERROR_UNKNOWN} if the method fails for some other reasons</li>
+ * </ul>
*/
camera_status_t ACameraCaptureSessionShared_logicalCamera_startStreaming(
ACameraCaptureSession* sharedSession,
- ACameraCaptureSession_logicalCamera_captureCallbacksV2 *callbacks,
+ /*optional*/ACameraCaptureSession_logicalCamera_captureCallbacksV2 *callbacks,
int numOutputWindows, ANativeWindow **windows,
- int *captureSequenceId) __INTRODUCED_IN(36);
+ /*optional*/int *captureSequenceId) __INTRODUCED_IN(36);
/**
* Cancel any ongoing streaming started by {@link ACameraCaptureSessionShared_startStreaming}.
@@ -1183,7 +1242,7 @@
* </ul>
*/
camera_status_t ACameraCaptureSessionShared_stopStreaming(
- ACameraCaptureSession *sharedSession
+ ACameraCaptureSession *sharedSession
) __INTRODUCED_IN(36);
__END_DECLS
diff --git a/camera/ndk/include/camera/NdkCameraManager.h b/camera/ndk/include/camera/NdkCameraManager.h
index a9b0174..492c41b 100644
--- a/camera/ndk/include/camera/NdkCameraManager.h
+++ b/camera/ndk/include/camera/NdkCameraManager.h
@@ -299,6 +299,21 @@
* ACameraDevice**)} except that it opens the camera in shared mode so that more
* than one client can access the camera at the same time.
*
+ * <p>When camera is opened in shared mode, the highest priority client among all the clients will
+ * be the primary client while the others would be secondary clients. Primary clients can create
+ * capture requests, modify any capture parameters and send them to the capture session for a
+ * one-shot capture or as a repeating request.</p>
+ *
+ * <p>Secondary clients cannot create a capture request and modify any capture parameters. However,
+ * they can start the camera streaming to desired surface targets using
+ * {@link ACameraCaptureSessionShared_startStreaming}. Once the streaming has successfully started,
+ * then they can stop the streaming using {@link ACameraCaptureSessionShared_stopStreaming}.</p>
+ *
+ * <p>The priority of client access is determined by considering two factors: its current process
+ * state and its "out of memory" score. Clients operating in the background are assigned a lower
+ * priority. In contrast, clients running in the foreground, along with system-level clients, are
+ * given a higher priority.</p>
+ *
* <p>Processes need to have android.permission.SYSTEM_CAMERA in addition to
* android.permission.CAMERA in order to connect to this camera device in shared
* mode.</p>
@@ -308,7 +323,7 @@
* @param callback the {@link ACameraDevice_StateCallbacks} associated with the opened camera
* device.
* @param device the opened {@link ACameraDevice} will be filled here if the method call succeeds.
- * @param primaryClient will return as true if the client is primaryClient.
+ * @param isPrimaryClient will return as true if the client is a primary client.
*
* @return <ul>
* <li>{@link ACAMERA_OK} if the method call succeeds.</li>
@@ -449,8 +464,8 @@
*
* @return <ul>
* <li>{@link ACAMERA_OK} if the method call succeeds.</li>
- * <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if any parameter is not
- * valid.</li>
+ * <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if manager, cameraId, or isSharingSupported
+ * is NULL, or cameraId does not match any camera devices connected.</li>
* </ul>
*/
camera_status_t ACameraManager_isCameraDeviceSharingSupported(
diff --git a/media/codec2/components/apv/C2SoftApvEnc.cpp b/media/codec2/components/apv/C2SoftApvEnc.cpp
index 9d84bc7..9036df1 100644
--- a/media/codec2/components/apv/C2SoftApvEnc.cpp
+++ b/media/codec2/components/apv/C2SoftApvEnc.cpp
@@ -1056,10 +1056,20 @@
input->width(), input->width(), input->width(),
input->width(), input->width(), input->height(),
CONV_FORMAT_I420);
- } else if (IsYUV420(*input)) {
- return C2_BAD_VALUE;
} else if (IsI420(*input)) {
- return C2_BAD_VALUE;
+ uint8_t *srcY = (uint8_t*)input->data()[0];
+ uint8_t *srcU = (uint8_t*)input->data()[1];
+ uint8_t *srcV = (uint8_t*)input->data()[2];
+ uint16_t *dstY = (uint16_t*)inputFrames->frm[0].imgb->a[0];
+ uint16_t *dstUV = (uint16_t*)inputFrames->frm[0].imgb->a[1];
+ convertPlanar8ToP210(dstY, dstUV, srcY, srcU, srcV,
+ layout.planes[C2PlanarLayout::PLANE_Y].rowInc,
+ layout.planes[C2PlanarLayout::PLANE_U].rowInc,
+ layout.planes[C2PlanarLayout::PLANE_V].rowInc,
+ input->width(), input->width(),
+ input->width(), input->height(),
+ CONV_FORMAT_I420);
+
} else {
ALOGE("Not supported color format. %d", mColorFormat);
return C2_BAD_VALUE;
@@ -1317,10 +1327,6 @@
return;
}
- if (work->input.buffers.empty()) {
- return;
- }
-
std::shared_ptr<C2GraphicView> view;
std::shared_ptr<C2Buffer> inputBuffer = nullptr;
if (!work->input.buffers.empty()) {
@@ -1332,7 +1338,19 @@
work->workletsProcessed = 1u;
return;
}
+ } else {
+ ALOGV("Empty input Buffer");
+ uint32_t flags = 0;
+ if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+ flags |= C2FrameData::FLAG_END_OF_STREAM;
+ }
+ work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->workletsProcessed = 1u;
+ return;
}
+
if (!inputBuffer) {
fillEmptyWork(work);
return;
@@ -1361,6 +1379,7 @@
error = setEncodeArgs(&mInputFrames, view.get(), workIndex);
if (error != C2_OK) {
+ ALOGE("setEncodeArgs has failed. err = %d", error);
mSignalledError = true;
work->result = error;
work->workletsProcessed = 1u;
@@ -1382,6 +1401,7 @@
int32_t status =
oapve_encode(mEncoderId, &mInputFrames, mMetaId, bits.get(), &stat, &mReconFrames);
if (status != C2_OK) {
+ ALOGE("oapve_encode has failed. err = %d", status);
mSignalledError = true;
work->result = C2_CORRUPTED;
work->workletsProcessed = 1u;
diff --git a/media/codec2/components/base/SimpleC2Component.cpp b/media/codec2/components/base/SimpleC2Component.cpp
index a03f24f..ea67bf4 100644
--- a/media/codec2/components/base/SimpleC2Component.cpp
+++ b/media/codec2/components/base/SimpleC2Component.cpp
@@ -713,6 +713,39 @@
}
}
+void convertPlanar8ToP210(uint16_t *dstY, uint16_t *dstUV,
+ const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
+ size_t srcYStride, size_t srcUStride, size_t srcVStride,
+ size_t dstYStride, size_t dstUVStride,
+ uint32_t width, uint32_t height,
+ CONV_FORMAT_T format) {
+ if (format != CONV_FORMAT_I420) {
+ ALOGE("No support for planar8 to P210. format is %d", format);
+ return;
+ }
+
+ for (int32_t y = 0; y < height; ++y) {
+ for (int32_t x = 0; x < width; ++x) {
+ dstY[x] = ((uint16_t)((double)srcY[x] * 1023 / 255 + 0.5) << 6) & 0xFFC0;
+ }
+ dstY += dstYStride;
+ srcY += srcYStride;
+ }
+
+ for (int32_t y = 0; y < height / 2; ++y) {
+ for (int32_t x = 0; x < width / 2; ++x) {
+ dstUV[x<<1] = dstUV[(x<<1) + dstUVStride] =
+ ((uint16_t)((double)srcU[x] * 1023 / 255 + 0.5) << 6) & 0xFFC0;
+ dstUV[(x<<1) + 1] = dstUV[(x<<1) + dstUVStride + 1] =
+ ((uint16_t)((double)srcV[x] * 1023 / 255 + 0.5) << 6) & 0xFFC0;
+ }
+ dstUV += dstUVStride << 1;
+ srcU += srcUStride;
+ srcV += srcVStride;
+ }
+}
+
+
std::unique_ptr<C2Work> SimpleC2Component::WorkQueue::pop_front() {
std::unique_ptr<C2Work> work = std::move(mQueue.front().work);
mQueue.pop_front();
diff --git a/media/codec2/components/base/include/SimpleC2Component.h b/media/codec2/components/base/include/SimpleC2Component.h
index 4306e55..5d2e8cd 100644
--- a/media/codec2/components/base/include/SimpleC2Component.h
+++ b/media/codec2/components/base/include/SimpleC2Component.h
@@ -111,6 +111,12 @@
size_t dstYStride, size_t dstUVStride,
uint32_t width, uint32_t height,
CONV_FORMAT_T format);
+void convertPlanar8ToP210(uint16_t *dstY, uint16_t *dstUV,
+ const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
+ size_t srcYStride, size_t srcUStride, size_t srcVStride,
+ size_t dstYStride, size_t dstUVStride,
+ uint32_t width, uint32_t height,
+ CONV_FORMAT_T format);
class SimpleC2Component
: public C2Component, public std::enable_shared_from_this<SimpleC2Component> {
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index fa5ce77..fbd1b36 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -269,7 +269,7 @@
kParamIndexSuspendAt, // input-surface, struct
kParamIndexResumeAt, // input-surface, struct
kParamIndexStopAt, // input-surface, struct
- kParamIndexTimeOffset, // input-surface, struct
+ kParamIndexTimeOffset, // input-surface, int64_t
kParamIndexMinFrameRate, // input-surface, float
kParamIndexTimestampGapAdjustment, // input-surface, struct
@@ -299,6 +299,10 @@
// allow tunnel peek behavior to be unspecified for app compatibility
kParamIndexTunnelPeekMode, // tunnel mode, enum
+
+ // input surface
+ kParamIndexCaptureFrameRate, // input-surface, float
+ kParamIndexStopTimeOffset, // input-surface, int64_t
};
}
@@ -2651,6 +2655,14 @@
constexpr char C2_PARAMKEY_INPUT_SURFACE_MIN_FRAME_RATE[] = "input-surface.min-frame-rate";
/**
+ * Maximum fps for input surface.
+ *
+ * Drop frame to meet this.
+ */
+typedef C2PortParam<C2Tuning, C2FloatValue, kParamIndexMaxFrameRate> C2PortMaxFrameRateTuning;
+constexpr char C2_PARAMKEY_INPUT_SURFACE_MAX_FRAME_RATE[] = "input-surface.max-frame-rate";
+
+/**
* Timestamp adjustment (override) for input surface buffers. These control the input timestamp
* fed to the codec, but do not impact the output timestamp.
*/
@@ -2680,9 +2692,26 @@
inline C2TimestampGapAdjustmentStruct::C2TimestampGapAdjustmentStruct()
: mode(C2TimestampGapAdjustmentStruct::NONE), value(0) { }
-typedef C2PortParam<C2Tuning, C2TimestampGapAdjustmentStruct> C2PortTimestampGapTuning;
+typedef C2PortParam<C2Tuning, C2TimestampGapAdjustmentStruct, kParamIndexTimestampGapAdjustment>
+ C2PortTimestampGapTuning;
constexpr char C2_PARAMKEY_INPUT_SURFACE_TIMESTAMP_ADJUSTMENT[] = "input-surface.timestamp-adjustment";
+/**
+ * Capture frame rate for input surface. During timelapse or slowmo encoding,
+ * this represents the frame rate of input surface.
+ */
+typedef C2PortParam<C2Tuning, C2FloatValue, kParamIndexCaptureFrameRate>
+ C2PortCaptureFrameRateTuning;
+constexpr char C2_PARAMKEY_INPUT_SURFACE_CAPTURE_FRAME_RATE[] = "input-surface.capture-frame-rate";
+
+/**
+ * Stop time offset for input surface. Stop time offset is the elapsed time
+ * offset to the last frame time from the stop time. This could be returned from
+ * IInputSurface when it is queried.
+ */
+typedef C2PortParam<C2Tuning, C2Int64Value, kParamIndexStopTimeOffset> C2PortStopTimeOffset;
+constexpr char C2_PARAMKEY_INPUT_SURFACE_STOP_TIME_OFFSET[] = "input-surface.stop-time-offset";
+
/* ===================================== TUNNELED CODEC ==================================== */
/**
diff --git a/media/codec2/hal/aidl/Android.bp b/media/codec2/hal/aidl/Android.bp
index e16e2b1..eaabc33 100644
--- a/media/codec2/hal/aidl/Android.bp
+++ b/media/codec2/hal/aidl/Android.bp
@@ -78,6 +78,8 @@
"Configurable.cpp",
"InputBufferManager.cpp",
"ParamTypes.cpp",
+ "inputsurface/InputSurface.cpp",
+ "inputsurface/InputSurfaceConnection.cpp",
],
header_libs: [
@@ -98,6 +100,7 @@
"libhidlbase",
"liblog",
"libnativewindow",
+ "libmediandk",
"libstagefright_aidl_bufferpool2",
"libstagefright_bufferpool@2.0.1",
"libui",
diff --git a/media/codec2/hal/aidl/include/codec2/aidl/inputsurface/InputSurface.h b/media/codec2/hal/aidl/include/codec2/aidl/inputsurface/InputSurface.h
new file mode 100644
index 0000000..5c2cc2e
--- /dev/null
+++ b/media/codec2/hal/aidl/include/codec2/aidl/inputsurface/InputSurface.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/media/c2/BnInputSurface.h>
+
+#include <codec2/aidl/Configurable.h>
+#include <util/C2InterfaceHelper.h>
+
+#include <C2.h>
+
+#include <memory>
+
+namespace aidl::android::hardware::media::c2::utils {
+
+struct InputSurface : public BnInputSurface {
+ InputSurface();
+ c2_status_t status() const;
+
+ // Methods from IInputSurface follow.
+ ::ndk::ScopedAStatus getSurface(
+ ::aidl::android::view::Surface* surface) override;
+ ::ndk::ScopedAStatus getConfigurable(
+ std::shared_ptr<IConfigurable>* configurable) override;
+ ::ndk::ScopedAStatus connect(
+ const std::shared_ptr<IInputSink>& sink,
+ std::shared_ptr<IInputSurfaceConnection>* connection) override;
+
+protected:
+ class Interface;
+ class ConfigurableIntf;
+
+ c2_status_t mInit;
+ std::shared_ptr<Interface> mIntf;
+ std::shared_ptr<CachedConfigurable> mConfigurable;
+
+ virtual ~InputSurface() override;
+
+
+ ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
+ static void OnBinderDied(void *cookie);
+ static void OnBinderUnlinked(void *cookie);
+ struct DeathContext;
+ DeathContext *mDeathContext;
+};
+
+} // namespace aidl::android::hardware::media::c2::utils
diff --git a/media/codec2/hal/aidl/include/codec2/aidl/inputsurface/InputSurfaceConnection.h b/media/codec2/hal/aidl/include/codec2/aidl/inputsurface/InputSurfaceConnection.h
new file mode 100644
index 0000000..59361e1
--- /dev/null
+++ b/media/codec2/hal/aidl/include/codec2/aidl/inputsurface/InputSurfaceConnection.h
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/media/c2/BnInputSurfaceConnection.h>
+#include <media/NdkImage.h>
+
+#include <C2.h>
+
+#include <memory>
+
+namespace aidl::android::hardware::media::c2::utils {
+
+struct InputSurfaceConnection : public BnInputSurfaceConnection {
+ InputSurfaceConnection();
+ c2_status_t status() const;
+
+ // Methods from IInputSurfaceConnection follow.
+ ::ndk::ScopedAStatus disconnect() override;
+ ::ndk::ScopedAStatus signalEndOfStream() override;
+
+ // implementation specific interface.
+
+ // Submit a buffer to the connected component.
+ c2_status_t submitBuffer(
+ int32_t bufferId,
+ const AImage *buffer = nullptr,
+ int64_t timestamp = 0,
+ int fenceFd = -1);
+
+ // Submit eos to the connected component.
+ c2_status_t submitEos(int32_t bufferId);
+
+ // notify dataspace being changed to the component.
+ void dispatchDataSpaceChanged(
+ int32_t dataSpace, int32_t aspects, int32_t pixelFormat);
+
+protected:
+ virtual ~InputSurfaceConnection() override;
+};
+
+} // namespace aidl::android::hardware::media::c2::utils
diff --git a/media/codec2/hal/aidl/inputsurface/InputSurface.cpp b/media/codec2/hal/aidl/inputsurface/InputSurface.cpp
new file mode 100644
index 0000000..5f6d176
--- /dev/null
+++ b/media/codec2/hal/aidl/inputsurface/InputSurface.cpp
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "Codec2-InputSurface"
+#include <android-base/logging.h>
+
+#include <codec2/aidl/inputsurface/InputSurface.h>
+
+namespace aidl::android::hardware::media::c2::utils {
+
+// Derived class of C2InterfaceHelper
+class InputSurface::Interface : public C2InterfaceHelper {
+public:
+ explicit Interface(
+ const std::shared_ptr<C2ReflectorHelper> &helper)
+ : C2InterfaceHelper(helper) {
+
+ setDerivedInstance(this);
+
+ }
+
+private:
+};
+
+class InputSurface::ConfigurableIntf : public ConfigurableC2Intf {
+public:
+};
+
+struct InputSurface::DeathContext {
+ // TODO;
+};
+
+void InputSurface::OnBinderDied(void *cookie) {
+ (void) cookie;
+}
+
+void InputSurface::OnBinderUnlinked(void *cookie) {
+ (void) cookie;
+}
+
+InputSurface::InputSurface() : mDeathContext(nullptr) {
+ mInit = C2_OK;
+}
+
+InputSurface::~InputSurface() {
+}
+
+::ndk::ScopedAStatus InputSurface::getSurface(::aidl::android::view::Surface* surface) {
+ (void) surface;
+ return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus InputSurface::getConfigurable(
+ std::shared_ptr<IConfigurable>* configurable) {
+ *configurable = mConfigurable;
+ return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus InputSurface::connect(
+ const std::shared_ptr<IInputSink>& sink,
+ std::shared_ptr<IInputSurfaceConnection>* connection) {
+ (void) sink;
+ (void) connection;
+ return ::ndk::ScopedAStatus::ok();
+}
+
+} // namespace aidl::android::hardware::media::c2::utils
diff --git a/media/codec2/hal/aidl/inputsurface/InputSurfaceConnection.cpp b/media/codec2/hal/aidl/inputsurface/InputSurfaceConnection.cpp
new file mode 100644
index 0000000..44ca924
--- /dev/null
+++ b/media/codec2/hal/aidl/inputsurface/InputSurfaceConnection.cpp
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "Codec2-InputSurface"
+#include <android-base/logging.h>
+
+#include <codec2/aidl/inputsurface/InputSurfaceConnection.h>
+
+namespace aidl::android::hardware::media::c2::utils {
+
+InputSurfaceConnection::InputSurfaceConnection() {
+}
+
+InputSurfaceConnection::~InputSurfaceConnection() {
+}
+
+::ndk::ScopedAStatus InputSurfaceConnection::disconnect() {
+ return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus InputSurfaceConnection::signalEndOfStream() {
+ return ::ndk::ScopedAStatus::ok();
+}
+
+c2_status_t InputSurfaceConnection::submitBuffer(
+ int32_t bufferId, const AImage *buffer, int64_t timestamp, int fenceFd) {
+ (void)bufferId;
+ (void)buffer;
+ (void)timestamp;
+ (void)fenceFd;
+ return C2_OK;
+}
+
+c2_status_t InputSurfaceConnection::submitEos(int32_t bufferId) {
+ (void)bufferId;
+ return C2_OK;
+}
+
+void InputSurfaceConnection::dispatchDataSpaceChanged(
+ int32_t dataSpace, int32_t aspects, int32_t pixelFormat) {
+ (void)dataSpace;
+ (void)aspects;
+ (void)pixelFormat;
+}
+
+} // namespace aidl::android::hardware::media::c2::utils
diff --git a/media/codec2/hal/client/GraphicBufferAllocator.cpp b/media/codec2/hal/client/GraphicBufferAllocator.cpp
index 6a6da0f..4055f9b 100644
--- a/media/codec2/hal/client/GraphicBufferAllocator.cpp
+++ b/media/codec2/hal/client/GraphicBufferAllocator.cpp
@@ -96,6 +96,10 @@
mGraphicsTracker->onAttached(generation);
}
+void GraphicBufferAllocator::pollForRenderedFrames(FrameEventHistoryDelta* delta) {
+ mGraphicsTracker->pollForRenderedFrames(delta);
+}
+
c2_status_t GraphicBufferAllocator::allocate(
uint32_t width, uint32_t height, ::android::PixelFormat format, uint64_t usage,
AHardwareBuffer **buf, ::android::sp<::android::Fence> *fence) {
@@ -119,6 +123,10 @@
return mGraphicsTracker->render(block, input, output);
}
+void GraphicBufferAllocator::onRequestStop() {
+ mGraphicsTracker->onRequestStop();
+}
+
GraphicBufferAllocator::~GraphicBufferAllocator() {}
std::shared_ptr<GraphicBufferAllocator> GraphicBufferAllocator::CreateGraphicBufferAllocator(
diff --git a/media/codec2/hal/client/GraphicsTracker.cpp b/media/codec2/hal/client/GraphicsTracker.cpp
index bdfc409..ff356fc 100644
--- a/media/codec2/hal/client/GraphicsTracker.cpp
+++ b/media/codec2/hal/client/GraphicsTracker.cpp
@@ -32,6 +32,9 @@
static constexpr int kMaxDequeueMin = 1;
static constexpr int kMaxDequeueMax = ::android::BufferQueueDefs::NUM_BUFFER_SLOTS - 2;
+// Just some delay for HAL to receive the stop()/release() request.
+static constexpr int kAllocateDirectDelayUs = 16666;
+
c2_status_t retrieveAHardwareBufferId(const C2ConstGraphicBlock &blk, uint64_t *bid) {
std::shared_ptr<const _C2BlockPoolData> bpData = _C2BlockFactory::GetGraphicBlockPoolData(blk);
if (!bpData || bpData->getType() != _C2BlockPoolData::TYPE_AHWBUFFER) {
@@ -177,7 +180,7 @@
mMaxDequeueCommitted{maxDequeueCount},
mDequeueable{maxDequeueCount},
mTotalDequeued{0}, mTotalCancelled{0}, mTotalDropped{0}, mTotalReleased{0},
- mInConfig{false}, mStopped{false} {
+ mInConfig{false}, mStopped{false}, mStopRequested{false}, mAllocAfterStopRequested{0} {
if (maxDequeueCount < kMaxDequeueMin) {
mMaxDequeue = kMaxDequeueMin;
mMaxDequeueCommitted = kMaxDequeueMin;
@@ -490,6 +493,18 @@
}
}
+void GraphicsTracker::onRequestStop() {
+ std::unique_lock<std::mutex> l(mLock);
+ if (mStopped) {
+ return;
+ }
+ if (mStopRequested) {
+ return;
+ }
+ mStopRequested = true;
+ writeIncDequeueableLocked(kMaxDequeueMax - 1);
+}
+
void GraphicsTracker::writeIncDequeueableLocked(int inc) {
CHECK(inc > 0 && inc < kMaxDequeueMax);
thread_local char buf[kMaxDequeueMax];
@@ -544,8 +559,7 @@
return C2_OK;
}
-c2_status_t GraphicsTracker::requestAllocate(std::shared_ptr<BufferCache> *cache) {
- std::lock_guard<std::mutex> l(mLock);
+c2_status_t GraphicsTracker::requestAllocateLocked(std::shared_ptr<BufferCache> *cache) {
if (mDequeueable > 0) {
char buf[1];
int ret = ::read(mReadPipeFd.get(), buf, 1);
@@ -728,6 +742,34 @@
return C2_OK;
}
+c2_status_t GraphicsTracker::_allocateDirect(
+ uint32_t width, uint32_t height, PixelFormat format, uint64_t usage,
+ AHardwareBuffer **buf, sp<Fence> *rFence) {
+ AHardwareBuffer_Desc desc;
+ desc.width = width;
+ desc.height = height;
+ desc.layers = 1u;
+ desc.format = ::android::AHardwareBuffer_convertFromPixelFormat(format);
+ desc.usage = ::android::AHardwareBuffer_convertFromGrallocUsageBits(usage);
+ desc.rfu0 = 0;
+ desc.rfu1 = 0;
+
+ int res = AHardwareBuffer_allocate(&desc, buf);
+ if (res != ::android::OK) {
+ ALOGE("_allocateDirect() failed(%d)", res);
+ if (res == ::android::NO_MEMORY) {
+ return C2_NO_MEMORY;
+ } else {
+ return C2_CORRUPTED;
+ }
+ }
+
+ int alloced = mAllocAfterStopRequested++;
+ *rFence = Fence::NO_FENCE;
+ ALOGD("_allocateDirect() allocated %d buffer", alloced);
+ return C2_OK;
+}
+
c2_status_t GraphicsTracker::allocate(
uint32_t width, uint32_t height, PixelFormat format, uint64_t usage,
AHardwareBuffer **buf, sp<Fence> *rFence) {
@@ -735,10 +777,21 @@
ALOGE("cannot allocate due to being stopped");
return C2_BAD_STATE;
}
+ c2_status_t res = C2_OK;
std::shared_ptr<BufferCache> cache;
- c2_status_t res = requestAllocate(&cache);
- if (res != C2_OK) {
- return res;
+ {
+ std::unique_lock<std::mutex> l(mLock);
+ if (mStopRequested) {
+ l.unlock();
+ res = _allocateDirect(width, height, format, usage, buf, rFence);
+ // Delay a little bit for HAL to receive stop()/release() request.
+ ::usleep(kAllocateDirectDelayUs);
+ return res;
+ }
+ c2_status_t res = requestAllocateLocked(&cache);
+ if (res != C2_OK) {
+ return res;
+ }
}
ALOGV("allocatable or dequeueable");
@@ -1003,6 +1056,19 @@
return C2_OK;
}
+void GraphicsTracker::pollForRenderedFrames(FrameEventHistoryDelta* delta) {
+ sp<IGraphicBufferProducer> igbp;
+ {
+ std::unique_lock<std::mutex> l(mLock);
+ if (mBufferCache) {
+ igbp = mBufferCache->mIgbp;
+ }
+ }
+ if (igbp) {
+ igbp->getFrameTimestamps(delta);
+ }
+}
+
void GraphicsTracker::onReleased(uint32_t generation) {
bool updateDequeue = false;
{
diff --git a/media/codec2/hal/client/client.cpp b/media/codec2/hal/client/client.cpp
index 9ee9b9e..17e5b62 100644
--- a/media/codec2/hal/client/client.cpp
+++ b/media/codec2/hal/client/client.cpp
@@ -3170,6 +3170,11 @@
c2_status_t Codec2Client::Component::stop() {
if (mAidlBase) {
+ std::shared_ptr<AidlGraphicBufferAllocator> gba =
+ mGraphicBufferAllocators->current();
+ if (gba) {
+ gba->onRequestStop();
+ }
::ndk::ScopedAStatus transStatus = mAidlBase->stop();
return GetC2Status(transStatus, "stop");
}
@@ -3220,6 +3225,11 @@
}
}
if (mAidlBase) {
+ std::shared_ptr<AidlGraphicBufferAllocator> gba =
+ mGraphicBufferAllocators->current();
+ if (gba) {
+ gba->onRequestStop();
+ }
::ndk::ScopedAStatus transStatus = mAidlBase->release();
return GetC2Status(transStatus, "release");
}
@@ -3407,7 +3417,11 @@
void Codec2Client::Component::pollForRenderedFrames(FrameEventHistoryDelta* delta) {
if (mAidlBase) {
- // TODO b/311348680
+ std::shared_ptr<AidlGraphicBufferAllocator> gba =
+ mGraphicBufferAllocators->current();
+ if (gba) {
+ gba->pollForRenderedFrames(delta);
+ }
return;
}
mOutputBufferQueue->pollForRenderedFrames(delta);
diff --git a/media/codec2/hal/client/include/codec2/aidl/GraphicBufferAllocator.h b/media/codec2/hal/client/include/codec2/aidl/GraphicBufferAllocator.h
index a797cb7..a70ffef 100644
--- a/media/codec2/hal/client/include/codec2/aidl/GraphicBufferAllocator.h
+++ b/media/codec2/hal/client/include/codec2/aidl/GraphicBufferAllocator.h
@@ -85,6 +85,11 @@
void onBufferAttached(uint32_t generation);
/**
+ * Retrieve frame event history from the crurrent surface if any.
+ */
+ void pollForRenderedFrames(::android::FrameEventHistoryDelta* delta);
+
+ /**
* Allocates a buffer.
*
* @param width width of the requested buffer.
@@ -125,6 +130,11 @@
const ::android::IGraphicBufferProducer::QueueBufferInput& input,
::android::IGraphicBufferProducer::QueueBufferOutput *output);
+ /**
+ * Notify stop()/release() is in progress.
+ */
+ void onRequestStop();
+
~GraphicBufferAllocator();
/**
diff --git a/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h b/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
index 9a4fa12..536caaa 100644
--- a/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
+++ b/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
@@ -35,6 +35,7 @@
using ::android::IGraphicBufferProducer;
using ::android::GraphicBuffer;
+using ::android::FrameEventHistoryDelta;
using ::android::Fence;
using ::android::PixelFormat;
using ::android::sp;
@@ -133,6 +134,11 @@
IGraphicBufferProducer::QueueBufferOutput *output);
/**
+ * Retrieve frame event history from the crurrent surface if any.
+ */
+ void pollForRenderedFrames(FrameEventHistoryDelta* delta);
+
+ /**
* Notifies when a Buffer is ready to allocate from Graphics.
* If generation does not match to the current, notifications via the interface
* will be ignored. (In the case, the notifications are from one of the old surfaces
@@ -175,6 +181,14 @@
*/
void stop();
+ /**
+ * stop()/release() request to HAL is in process from the client.
+ * The class will never be active again after the request.
+ * Still, allocation requests from HAL should be served until stop()
+ * is being called.
+ */
+ void onRequestStop();
+
private:
struct BufferCache;
@@ -290,6 +304,10 @@
std::atomic<bool> mStopped;
+ bool mStopRequested;
+ std::atomic<int> mAllocAfterStopRequested;
+
+
private:
explicit GraphicsTracker(int maxDequeueCount);
@@ -304,7 +322,7 @@
const std::shared_ptr<BufferCache> &cache,
int maxDequeueCommitted);
- c2_status_t requestAllocate(std::shared_ptr<BufferCache> *cache);
+ c2_status_t requestAllocateLocked(std::shared_ptr<BufferCache> *cache);
c2_status_t requestDeallocate(uint64_t bid, const sp<Fence> &fence,
bool *completed, bool *updateDequeue,
std::shared_ptr<BufferCache> *cache, int *slotId,
@@ -334,6 +352,10 @@
bool *cached, int *rSlotId, sp<Fence> *rFence,
std::shared_ptr<BufferItem> *buffer);
+ c2_status_t _allocateDirect(
+ uint32_t width, uint32_t height, PixelFormat format, uint64_t usage,
+ AHardwareBuffer **buf, sp<Fence> *fence);
+
void writeIncDequeueableLocked(int inc);
void drainDequeueableLocked(int dec);
};
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index 33f152c..e59f0ec 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -134,7 +134,12 @@
request.getConfiguration().setInputPreset(getInputPreset());
request.getConfiguration().setPrivacySensitive(isPrivacySensitive());
- request.getConfiguration().setBufferCapacity(builder.getBufferCapacity());
+ // When sample rate conversion is needed, we use the device sample rate instead of the
+ // requested sample rate to scale the capacity in configureDataInformation().
+ // Thus, we should scale the capacity here to cancel out the (sampleRate / deviceSampleRate)
+ // scaling there.
+ request.getConfiguration().setBufferCapacity(builder.getBufferCapacity()
+ * 48000 / getSampleRate());
mServiceStreamHandleInfo = mServiceInterface.openStream(request, configurationOutput);
if (getServiceHandle() < 0
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 3591fbf..2202539 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -1300,7 +1300,7 @@
if (isAudioPlaybackRateEqual(playbackRate, mPlaybackRate)) {
return NO_ERROR;
}
- if (isOffloadedOrDirect_l()) {
+ if (isAfTrackOffloadedOrDirect_l()) {
const status_t status = statusTFromBinderStatus(mAudioTrack->setPlaybackRateParameters(
VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_playback_rate_t_AudioPlaybackRate(playbackRate))));
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index b250a03..e7fc106 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -93,6 +93,8 @@
srcs: [
"NdkJavaVMHelper.cpp",
"NdkMediaCodec.cpp",
+ "NdkMediaCodecInfo.cpp",
+ "NdkMediaCodecStore.cpp",
"NdkMediaCrypto.cpp",
"NdkMediaDataSource.cpp",
"NdkMediaExtractor.cpp",
@@ -131,6 +133,8 @@
"libbase",
"libdatasource",
"libmedia",
+ "libmedia_codeclist",
+ "libmedia_codeclist_capabilities",
"libmediadrm",
"libmedia_omx",
"libmedia_jni_utils",
diff --git a/media/ndk/NdkMediaCodecInfo.cpp b/media/ndk/NdkMediaCodecInfo.cpp
new file mode 100644
index 0000000..82ceb61
--- /dev/null
+++ b/media/ndk/NdkMediaCodecInfo.cpp
@@ -0,0 +1,520 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NdkMediaCodecInfo"
+
+#include "NdkMediaCodecInfoPriv.h"
+
+#include <media/NdkMediaFormatPriv.h>
+
+using namespace android;
+
+extern "C" {
+
+// Utils
+
+EXPORT
+void AIntRange_delete(AIntRange *range) {
+ free(range);
+}
+
+EXPORT
+void ADoubleRange_delete(ADoubleRange *range) {
+ free(range);
+}
+
+// AMediaCodecInfo
+
+EXPORT
+const char* AMediaCodecInfo_getCanonicalName(const AMediaCodecInfo *info) {
+ if (info == nullptr || info->mInfo == nullptr) {
+ return nullptr;
+ }
+
+ return info->mInfo->getCodecName();
+}
+
+EXPORT
+bool AMediaCodecInfo_isEncoder(const AMediaCodecInfo *info) {
+ return info->mInfo->isEncoder();
+}
+
+EXPORT
+bool AMediaCodecInfo_isVendor(const AMediaCodecInfo *info) {
+ int32_t attributes = info->mInfo->getAttributes();
+ return (attributes & android::MediaCodecInfo::kFlagIsVendor);
+}
+
+EXPORT
+AMediaCodecType AMediaCodecInfo_getMediaCodecInfoType(const AMediaCodecInfo *info) {
+ if (info == nullptr || info->mInfo == nullptr) {
+ return (AMediaCodecType)0;
+ }
+
+ int32_t attributes = info->mInfo->getAttributes();
+
+ if (attributes & android::MediaCodecInfo::kFlagIsSoftwareOnly) {
+ return SOFTWARE_ONLY;
+ }
+ if (attributes & android::MediaCodecInfo::kFlagIsHardwareAccelerated) {
+ return HARDWARE_ACCELERATED;
+ }
+ return SOFTWARE_WITH_DEVICE_ACCESS;
+}
+
+EXPORT
+const char* AMediaCodecInfo_getMediaType(const AMediaCodecInfo *info) {
+ if (info == nullptr || info->mInfo == nullptr) {
+ return nullptr;
+ }
+
+ return info->mMediaType.c_str();
+}
+
+EXPORT
+int32_t AMediaCodecInfo_getMaxSupportedInstances(const AMediaCodecInfo *info) {
+ if (info == nullptr) {
+ return -1;
+ }
+
+ return info->mCodecCaps->getMaxSupportedInstances();
+}
+
+EXPORT
+int32_t AMediaCodecInfo_isFeatureSupported(const AMediaCodecInfo *info, const char *featureName) {
+ if (featureName == nullptr) {
+ return -1;
+ }
+ return info->mCodecCaps->isFeatureSupported(std::string(featureName));
+}
+
+EXPORT
+int32_t AMediaCodecInfo_isFeatureRequired(const AMediaCodecInfo *info, const char *featureName) {
+ if (featureName == nullptr) {
+ return -1;
+ }
+ return info->mCodecCaps->isFeatureRequired(std::string(featureName));
+}
+
+EXPORT
+int32_t AMediaCodecInfo_isFormatSupported(const AMediaCodecInfo *info, const AMediaFormat *format) {
+ if (format == nullptr) {
+ return -1;
+ }
+
+ sp<AMessage> nativeFormat;
+ AMediaFormat_getFormat(format, &nativeFormat);
+
+ return info->mCodecCaps->isFormatSupported(nativeFormat);
+}
+
+EXPORT
+media_status_t AMediaCodecInfo_getAudioCapabilities(const AMediaCodecInfo *info,
+ const ACodecAudioCapabilities **outAudioCaps) {
+ if (info == nullptr || info->mInfo == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ *outAudioCaps = info->mAAudioCaps.get();
+
+ if ((*outAudioCaps) == nullptr) {
+ return AMEDIA_ERROR_UNSUPPORTED;
+ }
+
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t AMediaCodecInfo_getVideoCapabilities(const AMediaCodecInfo *info,
+ const ACodecVideoCapabilities **outVideoCaps) {
+ if (info == nullptr || info->mInfo == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ *outVideoCaps = info->mAVideoCaps.get();
+
+ if ((*outVideoCaps) == nullptr) {
+ return AMEDIA_ERROR_UNSUPPORTED;
+ }
+
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t AMediaCodecInfo_getEncoderCapabilities(const AMediaCodecInfo *info,
+ const ACodecEncoderCapabilities **outEncoderCaps) {
+ if (info == nullptr || info->mInfo == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ *outEncoderCaps = info->mAEncoderCaps.get();
+
+ if ((*outEncoderCaps) == nullptr) {
+ return AMEDIA_ERROR_UNSUPPORTED;
+ }
+
+ return AMEDIA_OK;
+}
+
+// ACodecAudioCapabilities
+
+EXPORT
+media_status_t ACodecAudioCapabilities_getBitrateRange(const ACodecAudioCapabilities *audioCaps,
+ AIntRange *outRange) {
+ if (audioCaps == nullptr || outRange == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ const Range<int32_t>& bitrateRange = audioCaps->mAudioCaps->getBitrateRange();
+ outRange->mLower = bitrateRange.lower();
+ outRange->mUpper = bitrateRange.upper();
+
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t ACodecAudioCapabilities_getSupportedSampleRates(
+ const ACodecAudioCapabilities *audioCaps, const int **outArrayPtr, size_t *outCount) {
+ if (audioCaps == nullptr || outArrayPtr == nullptr || outCount == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ if (audioCaps->mSampleRates.empty()) {
+ return AMEDIA_ERROR_UNSUPPORTED;
+ }
+
+ *outArrayPtr = audioCaps->mSampleRates.data();
+ *outCount = audioCaps->mSampleRates.size();
+
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t ACodecAudioCapabilities_getSupportedSampleRateRanges(
+ const ACodecAudioCapabilities *audioCaps, const AIntRange **outArrayPtr, size_t *outCount) {
+ if (audioCaps == nullptr || outArrayPtr == nullptr || outCount == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ *outArrayPtr = audioCaps->mSampleRateRanges.data();
+ *outCount = audioCaps->mSampleRateRanges.size();
+
+ return AMEDIA_OK;
+}
+
+EXPORT
+int32_t ACodecAudioCapabilities_getMaxInputChannelCount(const ACodecAudioCapabilities *audioCaps) {
+ if (audioCaps == nullptr) {
+ return -1;
+ }
+ return audioCaps->mAudioCaps->getMaxInputChannelCount();
+}
+
+EXPORT
+int32_t ACodecAudioCapabilities_getMinInputChannelCount(const ACodecAudioCapabilities *audioCaps) {
+ if (audioCaps == nullptr) {
+ return -1;
+ }
+ return audioCaps->mAudioCaps->getMinInputChannelCount();
+}
+
+EXPORT
+media_status_t ACodecAudioCapabilities_getInputChannelCountRanges(
+ const ACodecAudioCapabilities *audioCaps, const AIntRange **outArrayPtr, size_t *outCount) {
+ if (audioCaps == nullptr || outArrayPtr == nullptr || outCount == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ *outArrayPtr = audioCaps->mInputChannelCountRanges.data();
+ *outCount = audioCaps->mInputChannelCountRanges.size();
+
+ return AMEDIA_OK;
+}
+
+EXPORT
+int32_t ACodecAudioCapabilities_isSampleRateSupported(const ACodecAudioCapabilities *audioCaps,
+ int32_t sampleRate) {
+ if (audioCaps == nullptr) {
+ return -1;
+ }
+ return audioCaps->mAudioCaps->isSampleRateSupported(sampleRate);
+}
+
+// ACodecPerformancePoint
+
+EXPORT
+ACodecPerformancePoint* ACodecPerformancePoint_create(int32_t width, int32_t height,
+ int32_t frameRate) {
+ return new ACodecPerformancePoint(
+ std::make_shared<VideoCapabilities::PerformancePoint>(width, height, frameRate));
+}
+
+EXPORT
+media_status_t ACodecPerformancePoint_delete(ACodecPerformancePoint *performancePoint) {
+ if (performancePoint == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ delete performancePoint;
+
+ return AMEDIA_OK;
+}
+
+EXPORT
+bool ACodecPerformancePoint_coversFormat(const ACodecPerformancePoint *performancePoint,
+ const AMediaFormat *format) {
+ sp<AMessage> nativeFormat;
+ AMediaFormat_getFormat(format, &nativeFormat);
+
+ return performancePoint->mPerformancePoint->covers(nativeFormat);
+}
+
+EXPORT
+bool ACodecPerformancePoint_covers(const ACodecPerformancePoint *one,
+ const ACodecPerformancePoint *another) {
+ return one->mPerformancePoint->covers(*(another->mPerformancePoint));
+}
+
+EXPORT
+bool ACodecPerformancePoint_equals(const ACodecPerformancePoint *one,
+ const ACodecPerformancePoint *another) {
+ return one->mPerformancePoint->equals(*(another->mPerformancePoint));
+}
+
+// ACodecVideoCapabilities
+
+EXPORT
+media_status_t ACodecVideoCapabilities_getBitrateRange(const ACodecVideoCapabilities *videoCaps,
+ AIntRange *outRange) {
+ if (videoCaps == nullptr || outRange == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ const Range<int32_t>& bitrateRange = videoCaps->mVideoCaps->getBitrateRange();
+ outRange->mLower = bitrateRange.lower();
+ outRange->mUpper = bitrateRange.upper();
+
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t ACodecVideoCapabilities_getSupportedWidths(const ACodecVideoCapabilities *videoCaps,
+ AIntRange *outRange) {
+ if (videoCaps == nullptr || outRange == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ const Range<int32_t>& supportedWidths = videoCaps->mVideoCaps->getSupportedWidths();
+ outRange->mLower = supportedWidths.lower();
+ outRange->mUpper = supportedWidths.upper();
+
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t ACodecVideoCapabilities_getSupportedHeights(const ACodecVideoCapabilities *videoCaps,
+ AIntRange *outRange) {
+ if (videoCaps == nullptr || outRange == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ const Range<int32_t>& supportedHeights = videoCaps->mVideoCaps->getSupportedHeights();
+ outRange->mLower = supportedHeights.lower();
+ outRange->mUpper = supportedHeights.upper();
+
+ return AMEDIA_OK;
+}
+
+EXPORT
+int32_t ACodecVideoCapabilities_getWidthAlignment(const ACodecVideoCapabilities *videoCaps) {
+ if (videoCaps == nullptr) {
+ return -1;
+ }
+ return videoCaps->mVideoCaps->getWidthAlignment();
+}
+
+EXPORT
+int32_t ACodecVideoCapabilities_getHeightAlignment(const ACodecVideoCapabilities *videoCaps) {
+ if (videoCaps == nullptr) {
+ return -1;
+ }
+ return videoCaps->mVideoCaps->getHeightAlignment();
+}
+
+EXPORT
+media_status_t ACodecVideoCapabilities_getSupportedFrameRates(
+ const ACodecVideoCapabilities *videoCaps, AIntRange *outRange) {
+ if (videoCaps == nullptr || outRange == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ const Range<int32_t>& frameRateRange = videoCaps->mVideoCaps->getSupportedFrameRates();
+ outRange->mLower = frameRateRange.lower();
+ outRange->mUpper = frameRateRange.upper();
+
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t ACodecVideoCapabilities_getSupportedWidthsFor(
+ const ACodecVideoCapabilities *videoCaps, int32_t height, AIntRange *outRange) {
+ if (videoCaps == nullptr || outRange == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ std::optional<Range<int32_t>> widthRange = videoCaps->mVideoCaps->getSupportedWidthsFor(height);
+ if (!widthRange) {
+ return AMEDIA_ERROR_UNSUPPORTED;
+ }
+
+ outRange->mLower = widthRange.value().lower();
+ outRange->mUpper = widthRange.value().upper();
+
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t ACodecVideoCapabilities_getSupportedHeightsFor(
+ const ACodecVideoCapabilities *videoCaps, int32_t width, AIntRange *outRange) {
+ if (videoCaps == nullptr || outRange == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ std::optional<Range<int32_t>> heightRange
+ = videoCaps->mVideoCaps->getSupportedHeightsFor(width);
+ if (!heightRange) {
+ return AMEDIA_ERROR_UNSUPPORTED;
+ }
+
+ outRange->mLower = heightRange.value().lower();
+ outRange->mUpper = heightRange.value().upper();
+
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t ACodecVideoCapabilities_getSupportedFrameRatesFor(
+ const ACodecVideoCapabilities *videoCaps, int32_t width, int32_t height,
+ ADoubleRange *outRange) {
+ if (videoCaps == nullptr || outRange == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ std::optional<Range<double>> frameRates
+ = videoCaps->mVideoCaps->getSupportedFrameRatesFor(width, height);
+ if (!frameRates) {
+ return AMEDIA_ERROR_UNSUPPORTED;
+ }
+
+ outRange->mLower = frameRates.value().lower();
+ outRange->mUpper = frameRates.value().upper();
+
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t ACodecVideoCapabilities_getAchievableFrameRatesFor(
+ const ACodecVideoCapabilities *videoCaps, int32_t width, int32_t height,
+ ADoubleRange *outRange) {
+ if (videoCaps == nullptr || outRange == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ std::optional<Range<double>> frameRates
+ = videoCaps->mVideoCaps->getAchievableFrameRatesFor(width, height);
+ if (!frameRates) {
+ return AMEDIA_ERROR_UNSUPPORTED;
+ }
+
+ outRange->mLower = frameRates.value().lower();
+ outRange->mUpper = frameRates.value().upper();
+
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t ACodecVideoCapabilities_getSupportedPerformancePoints(
+ const ACodecVideoCapabilities *videoCaps,
+ const ACodecPerformancePoint **outPerformancePointArray, size_t *outCount) {
+ if (videoCaps == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ *outPerformancePointArray = videoCaps->mPerformancePoints.data();
+ *outCount = videoCaps->mPerformancePoints.size();
+
+ return AMEDIA_OK;
+}
+
+EXPORT
+int32_t ACodecVideoCapabilities_areSizeAndRateSupported(const ACodecVideoCapabilities *videoCaps,
+ int32_t width, int32_t height, double frameRate) {
+ if (videoCaps == nullptr) {
+ return -1;
+ }
+ return videoCaps->mVideoCaps->areSizeAndRateSupported(width, height, frameRate);
+}
+
+EXPORT
+int32_t ACodecVideoCapabilities_isSizeSupported(const ACodecVideoCapabilities *videoCaps,
+ int32_t width, int32_t height) {
+ if (videoCaps == nullptr) {
+ return -1;
+ }
+ return videoCaps->mVideoCaps->isSizeSupported(width, height);
+}
+
+// ACodecEncoderCapabilities
+
+EXPORT
+media_status_t ACodecEncoderCapabilities_getQualityRange(
+ const ACodecEncoderCapabilities *encoderCaps, AIntRange *outRange) {
+ if (encoderCaps == nullptr || outRange == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ const Range<int32_t>& qualityRange = encoderCaps->mEncoderCaps->getQualityRange();
+ outRange->mLower = qualityRange.lower();
+ outRange->mUpper = qualityRange.upper();
+
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t ACodecEncoderCapabilities_getComplexityRange(
+ const ACodecEncoderCapabilities *encoderCaps, AIntRange *outRange) {
+ if (encoderCaps == nullptr || outRange == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ const Range<int32_t>& complexityRange = encoderCaps->mEncoderCaps->getComplexityRange();
+ outRange->mLower = complexityRange.lower();
+ outRange->mUpper = complexityRange.upper();
+
+ return AMEDIA_OK;
+}
+
+int32_t ACodecEncoderCapabilities_isBitrateModeSupported(
+ const ACodecEncoderCapabilities *encoderCaps, ABiterateMode mode) {
+ if (encoderCaps == nullptr) {
+ return -1;
+ }
+ return encoderCaps->mEncoderCaps->isBitrateModeSupported(mode);
+}
+
+
+}
\ No newline at end of file
diff --git a/media/ndk/NdkMediaCodecInfoPriv.h b/media/ndk/NdkMediaCodecInfoPriv.h
new file mode 100644
index 0000000..6d9188b
--- /dev/null
+++ b/media/ndk/NdkMediaCodecInfoPriv.h
@@ -0,0 +1,122 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _NDK_MEDIA_CODEC_INFO_PRIV_H
+#define _NDK_MEDIA_CODEC_INFO_PRIV_H
+
+#include <media/MediaCodecInfo.h>
+#include <media/NdkMediaCodecInfo.h>
+
+struct ACodecAudioCapabilities {
+ std::shared_ptr<android::AudioCapabilities> mAudioCaps;
+
+ std::vector<int> mSampleRates;
+ std::vector<AIntRange> mSampleRateRanges;
+ std::vector<AIntRange> mInputChannelCountRanges;
+
+ void initSampleRates() {
+ mSampleRates = mAudioCaps->getSupportedSampleRates();
+ }
+
+ void initSampleRateRanges() {
+ const std::vector<android::Range<int>>& sampleRateRanges
+ = mAudioCaps->getSupportedSampleRateRanges();
+ for (auto it = sampleRateRanges.begin(); it != sampleRateRanges.end(); it++) {
+ mSampleRateRanges.emplace_back(it->lower(), it->upper());
+ }
+ }
+
+ void initInputChannelCountRanges() {
+ const std::vector<android::Range<int>>& inputChannels
+ = mAudioCaps->getInputChannelCountRanges();
+ for (auto it = inputChannels.begin(); it != inputChannels.end(); it++) {
+ mInputChannelCountRanges.emplace_back(it->lower(), it->upper());
+ }
+ }
+
+ ACodecAudioCapabilities(std::shared_ptr<android::AudioCapabilities> audioCaps)
+ : mAudioCaps(audioCaps) {
+ initSampleRates();
+ initSampleRateRanges();
+ initInputChannelCountRanges();
+ }
+};
+
+struct ACodecPerformancePoint {
+ std::shared_ptr<const android::VideoCapabilities::PerformancePoint> mPerformancePoint;
+
+ ACodecPerformancePoint(std::shared_ptr<const android::VideoCapabilities::PerformancePoint>
+ performancePoint) : mPerformancePoint(performancePoint) {}
+};
+
+struct ACodecVideoCapabilities {
+ std::shared_ptr<android::VideoCapabilities> mVideoCaps;
+
+ std::vector<ACodecPerformancePoint> mPerformancePoints;
+
+ void initPerformancePoints() {
+ const std::vector<android::VideoCapabilities::PerformancePoint>& performancePoints
+ = mVideoCaps->getSupportedPerformancePoints();
+ for (auto it = performancePoints.begin(); it != performancePoints.end(); it++) {
+ mPerformancePoints.emplace_back(
+ std::shared_ptr<const android::VideoCapabilities::PerformancePoint>(&(*it)));
+ }
+ }
+
+ ACodecVideoCapabilities(std::shared_ptr<android::VideoCapabilities> videoCaps)
+ : mVideoCaps(videoCaps) {
+ initPerformancePoints();
+ }
+};
+
+struct ACodecEncoderCapabilities {
+ std::shared_ptr<android::EncoderCapabilities> mEncoderCaps;
+
+ ACodecEncoderCapabilities(std::shared_ptr<android::EncoderCapabilities> encoderCaps)
+ : mEncoderCaps(encoderCaps) {}
+};
+
+struct AMediaCodecInfo {
+ std::string mName;
+ android::sp<android::MediaCodecInfo> mInfo;
+ std::string mMediaType;
+ std::shared_ptr<android::CodecCapabilities> mCodecCaps;
+
+ std::shared_ptr<const ACodecAudioCapabilities> mAAudioCaps;
+ std::shared_ptr<const ACodecVideoCapabilities> mAVideoCaps;
+ std::shared_ptr<const ACodecEncoderCapabilities> mAEncoderCaps;
+
+ AMediaCodecInfo(std::string name, android::sp<android::MediaCodecInfo> info,
+ std::shared_ptr<android::CodecCapabilities> codecCaps, std::string mediaType)
+ : mName(name), mInfo(info), mMediaType(mediaType), mCodecCaps(codecCaps) {
+ if (!mName.empty() && mInfo != nullptr && !mMediaType.empty() && mCodecCaps != nullptr) {
+ if (mCodecCaps->getAudioCapabilities() != nullptr) {
+ mAAudioCaps = std::make_shared<const ACodecAudioCapabilities>(
+ mCodecCaps->getAudioCapabilities());
+ }
+ if (mCodecCaps->getVideoCapabilities() != nullptr) {
+ mAVideoCaps = std::make_shared<const ACodecVideoCapabilities>(
+ mCodecCaps->getVideoCapabilities());
+ }
+ if (mCodecCaps->getEncoderCapabilities() != nullptr) {
+ mAEncoderCaps = std::make_shared<const ACodecEncoderCapabilities>(
+ mCodecCaps->getEncoderCapabilities());
+ }
+ }
+ }
+};
+
+#endif //_NDK_MEDIA_CODEC_INFO_PRIV_H
\ No newline at end of file
diff --git a/media/ndk/NdkMediaCodecStore.cpp b/media/ndk/NdkMediaCodecStore.cpp
new file mode 100644
index 0000000..d911593
--- /dev/null
+++ b/media/ndk/NdkMediaCodecStore.cpp
@@ -0,0 +1,254 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NdkMediaCodecStore"
+
+#include "NdkMediaCodecInfoPriv.h"
+
+#include <media/NdkMediaCodecStore.h>
+#include <media/NdkMediaFormatPriv.h>
+
+#include <media/IMediaCodecList.h>
+
+#include <media/MediaCodecInfo.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaCodecConstants.h>
+#include <media/stagefright/MediaCodecList.h>
+
+using namespace android;
+
+static sp<IMediaCodecList> sCodecList;
+static std::vector<AMediaCodecSupportedMediaType> sMediaTypes;
+static std::vector<AMediaCodecInfo> sCodecInfos;
+
+static std::map<std::string, AMediaCodecInfo> sNameToInfoMap;
+static std::map<std::string, std::vector<AMediaCodecInfo>> sTypeToInfoList;
+
+static void initMediaTypes() {
+ if (sCodecList == nullptr) {
+ sCodecList = MediaCodecList::getInstance();
+ }
+
+ std::map<std::string, AMediaCodecSupportedMediaType> typesInfoMap;
+ std::vector<std::string> mediaTypes; // Keep the order of media types appearing in sCodecList.
+ for (size_t idx = 0; idx < sCodecList->countCodecs(); idx++) {
+ sp<MediaCodecInfo> codecInfo = sCodecList->getCodecInfo(idx);
+ if (codecInfo == nullptr) {
+ ALOGW("NULL MediaCodecInfo in MediaCodecList");
+ continue;
+ }
+ Vector<AString> codecMediaTypes;
+ codecInfo->getSupportedMediaTypes(&codecMediaTypes);
+ for (AString codecMediaType : codecMediaTypes) {
+ std::string mediaType = std::string(codecMediaType.c_str());
+
+ // Excludes special codecs from NDK
+ const std::shared_ptr<CodecCapabilities> codecCaps
+ = codecInfo->getCodecCapsFor(mediaType.c_str());
+ if (codecCaps->isFeatureSupported(FEATURE_SpecialCodec)) {
+ continue;
+ }
+
+ auto it = typesInfoMap.find(mediaType);
+ if (it == typesInfoMap.end()) {
+ AMediaCodecSupportedMediaType supportedType = { mediaType.c_str(), 0 };
+ it = typesInfoMap.emplace(mediaType, supportedType).first;
+ mediaTypes.push_back(mediaType);
+ }
+ uint32_t &mode = it->second.mMode;
+ mode |= (codecInfo->isEncoder() ? AMediaCodecSupportedMediaType::FLAG_ENCODER
+ : AMediaCodecSupportedMediaType::FLAG_DECODER);
+ }
+ }
+
+ // sMediaTypes keeps the order of media types appearing in sCodecList.
+ for (std::string &type : mediaTypes) {
+ sMediaTypes.push_back(typesInfoMap.find(type)->second);
+ }
+}
+
+static void initCodecInfoMap() {
+ if (sCodecList == nullptr) {
+ sCodecList = MediaCodecList::getInstance();
+ }
+
+ for (size_t idx = 0; idx < sCodecList->countCodecs(); idx++) {
+ sp<MediaCodecInfo> codecInfo = sCodecList->getCodecInfo(idx);
+ if (codecInfo == nullptr) {
+ ALOGW("NULL MediaCodecInfo in MediaCodecList");
+ continue;
+ }
+
+ Vector<AString> codecMediaTypes;
+ codecInfo->getSupportedMediaTypes(&codecMediaTypes);
+ bool useTypeSuffix = codecMediaTypes.size() > 1;
+ for (AString codecMediaType : codecMediaTypes) {
+ std::string mediaType = std::string(codecMediaType.c_str());
+
+ // Excludes special codecs from NDK
+ const std::shared_ptr<CodecCapabilities> codecCaps
+ = codecInfo->getCodecCapsFor(mediaType.c_str());
+ if (codecCaps->isFeatureSupported(FEATURE_SpecialCodec)) {
+ continue;
+ }
+
+ // get the type name after the slash. e.g. video/x.on2.vp8
+ size_t slashIx = mediaType.find_last_of('/');
+ if (slashIx == std::string::npos) {
+ slashIx = 0;
+ } else {
+ slashIx++;
+ }
+ std::string ndkBaseName = std::string(codecInfo->getCodecName());
+ if (useTypeSuffix) {
+ // If there are multiple supported media types,
+ // add the type to the end of the name to disambiguate names.
+ ndkBaseName += "." + mediaType.substr(slashIx);
+ }
+
+ int32_t copyIx = 0;
+ std::string ndkName;
+ // if a name is already registered,
+ // add ".1", ".2", ... at the end to disambiguate names.
+ while (true) {
+ ndkName = ndkBaseName;
+ if (copyIx > 0) {
+ ndkName += "." + std::to_string(copyIx);
+ }
+ if (!sNameToInfoMap.contains(ndkName)) {
+ break;
+ }
+ copyIx++;
+ }
+
+ AMediaCodecInfo info = AMediaCodecInfo(ndkName, codecInfo, codecCaps, mediaType);
+ sCodecInfos.push_back(info);
+ sNameToInfoMap.emplace(ndkName, info);
+
+ auto it = sTypeToInfoList.find(mediaType);
+ if (it == sTypeToInfoList.end()) {
+ std::vector<AMediaCodecInfo> infoList;
+ infoList.push_back(info);
+ sTypeToInfoList.emplace(mediaType, infoList);
+ } else {
+ it->second.push_back(info);
+ }
+ }
+ }
+}
+
+static bool codecHandlesFormat(const AMediaCodecInfo codecInfo,
+ sp<AMessage> format, bool isEncoder) {
+ return codecInfo.mCodecCaps->isEncoder() == isEncoder
+ && codecInfo.mCodecCaps->isFormatSupported(format);
+}
+
+static media_status_t findNextCodecForFormat(
+ const AMediaFormat *format, bool isEncoder, const AMediaCodecInfo **outCodecInfo) {
+ if (outCodecInfo == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ if (sCodecInfos.empty()) {
+ initCodecInfoMap();
+ }
+
+ std::unique_ptr<std::vector<AMediaCodecInfo>> infos;
+ sp<AMessage> nativeFormat;
+ if (format == nullptr) {
+ infos = std::unique_ptr<std::vector<AMediaCodecInfo>>(&sCodecInfos);
+ } else {
+ AMediaFormat_getFormat(format, &nativeFormat);
+ AString mime;
+ if (!nativeFormat->findString(KEY_MIME, &mime)) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ std::string mediaType = std::string(mime.c_str());
+ auto it = sTypeToInfoList.find(mediaType);
+ if (it == sTypeToInfoList.end()) {
+ return AMEDIA_ERROR_UNSUPPORTED;
+ }
+ infos = std::unique_ptr<std::vector<AMediaCodecInfo>>(&(it->second));
+ }
+
+ bool found = *outCodecInfo == nullptr;
+ for (const AMediaCodecInfo &info : *infos) {
+ if (found && (format == nullptr
+ || codecHandlesFormat(info, nativeFormat, isEncoder))) {
+ *outCodecInfo = &info;
+ return AMEDIA_OK;
+ }
+ if (*outCodecInfo == &info) {
+ found = true;
+ }
+
+ }
+ *outCodecInfo = nullptr;
+ return AMEDIA_ERROR_UNSUPPORTED;
+}
+
+extern "C" {
+
+EXPORT
+media_status_t AMediaCodecStore_getSupportedMediaTypes(
+ const AMediaCodecSupportedMediaType **outMediaTypes, size_t *outCount) {
+ if (outMediaTypes == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ if (sMediaTypes.empty()) {
+ initMediaTypes();
+ }
+
+ *outCount = sMediaTypes.size();
+ *outMediaTypes = sMediaTypes.data();
+
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t AMediaCodecStore_findNextDecoderForFormat(
+ const AMediaFormat *format, const AMediaCodecInfo **outCodecInfo){
+ return findNextCodecForFormat(format, false, outCodecInfo);
+}
+
+EXPORT
+media_status_t AMediaCodecStore_findNextEncoderForFormat(
+ const AMediaFormat *format, const AMediaCodecInfo **outCodecInfo){
+ return findNextCodecForFormat(format, true, outCodecInfo);
+}
+
+EXPORT
+media_status_t AMediaCodecStore_getCodecInfo(
+ const char *name, const AMediaCodecInfo **outCodecInfo) {
+ if (outCodecInfo == nullptr || name == nullptr) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ auto it = sNameToInfoMap.find(std::string(name));
+ if (it == sNameToInfoMap.end()) {
+ *outCodecInfo = nullptr;
+ return AMEDIA_ERROR_UNSUPPORTED;
+ } else {
+ *outCodecInfo = &(it->second);
+ return AMEDIA_OK;
+ }
+}
+
+}
\ No newline at end of file
diff --git a/media/ndk/include/media/NdkMediaCodecInfo.h b/media/ndk/include/media/NdkMediaCodecInfo.h
new file mode 100644
index 0000000..558e82c
--- /dev/null
+++ b/media/ndk/include/media/NdkMediaCodecInfo.h
@@ -0,0 +1,625 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @addtogroup Media
+ * @{
+ */
+
+/**
+ * @file NdkMediaCodecInfo.h
+ */
+
+/*
+ * This file defines an NDK API.
+ * Do not remove methods.
+ * Do not change method signatures.
+ * Do not change the value of constants.
+ * Do not change the size of any of the classes defined in here.
+ * Do not reference types that are not part of the NDK.
+ * Do not #include files that aren't part of the NDK.
+ */
+
+#ifndef _NDK_MEDIA_CODEC_INFO_H
+#define _NDK_MEDIA_CODEC_INFO_H
+
+#include "NdkMediaError.h"
+#include "NdkMediaFormat.h"
+
+__BEGIN_DECLS
+
+struct ACodecAudioCapabilities;
+typedef struct ACodecAudioCapabilities ACodecAudioCapabilities;
+struct ACodecPerformancePoint;
+typedef struct ACodecPerformancePoint ACodecPerformancePoint;
+struct ACodecVideoCapabilities;
+typedef struct ACodecVideoCapabilities ACodecVideoCapabilities;
+struct ACodecEncoderCapabilities;
+typedef struct ACodecEncoderCapabilities ACodecEncoderCapabilities;
+struct AMediaCodecInfo;
+typedef struct AMediaCodecInfo AMediaCodecInfo;
+
+/**
+ * A uitlity structure describing the range of two integer values.
+ */
+typedef struct AIntRange {
+ int32_t mLower;
+ int32_t mUpper;
+} AIntRange;
+
+/**
+ * A uitlity structure describing the range of two double values.
+ */
+typedef struct ADoubleRange {
+ double mLower;
+ double mUpper;
+} ADoubleRange;
+
+// AMediaCodecInfo
+
+/**
+ * Get the canonical name of a codec.
+ *
+ * \return The char pointer to the canonical name.
+ * It is owned by the framework. No lifetime management needed for users.
+ *
+ * Return NULL if @param info is invalid.
+ */
+const char* AMediaCodecInfo_getCanonicalName(const AMediaCodecInfo *info) __INTRODUCED_IN(36);
+
+/**
+ * Query if the codec is an encoder.
+ */
+bool AMediaCodecInfo_isEncoder(const AMediaCodecInfo *info) __INTRODUCED_IN(36);
+
+/**
+ * Query if the codec is provided by the Android platform (false) or the device manufacturer (true).
+ */
+bool AMediaCodecInfo_isVendor(const AMediaCodecInfo *info) __INTRODUCED_IN(36);
+
+/**
+ * The type of codecs.
+ */
+typedef enum AMediaCodecType : int32_t {
+ /**
+ * Not a codec type. Used for indicating an invalid operation occurred.
+ */
+ INVALID_CODEC_INFO = 0,
+
+ /**
+ * Software codec.
+ *
+ * Software-only codecs are more secure as they run in a tighter security sandbox.
+ * On the other hand, software-only codecs do not provide any performance guarantees.
+ */
+ SOFTWARE_ONLY = 1,
+
+ /**
+ * Hardware accelerated codec.
+ *
+ * Hardware codecs generally have higher performance or lower power consumption than
+ * software codecs, but since they are specific to each device,
+ * the actual performance details can vary.
+ */
+ HARDWARE_ACCELERATED = 2,
+
+ /**
+ * Software codec but have device access.
+ * Mainly referring to software codecs provided by vendors.
+ */
+ SOFTWARE_WITH_DEVICE_ACCESS = 3,
+} AMediaCodecType;
+
+/**
+ * Query if the codec is SOFTWARE_ONLY, HARDWARE_ACCELERATED or SOFTWARE_WITH_DEVICE_ACCESS.
+ *
+ * Return INVALID_CODEC_INFO if @param info is invalid.
+ */
+AMediaCodecType AMediaCodecInfo_getMediaCodecInfoType(
+ const AMediaCodecInfo *info) __INTRODUCED_IN(36);
+
+/**
+ * Get the supported media type of the codec.
+ *
+ * \return The char pointer to the media type.
+ * It is owned by the framework with infinite lifetime.
+ *
+ * Return NULL if @param info is invalid.
+ */
+const char* AMediaCodecInfo_getMediaType(const AMediaCodecInfo *info) __INTRODUCED_IN(36);
+
+/**
+ * Returns the max number of the supported concurrent codec instances.
+ *
+ * This is a hint for an upper bound. Applications should not expect to successfully
+ * operate more instances than the returned value, but the actual number of
+ * concurrently operable instances may be less as it depends on the available
+ * resources at time of use.
+ *
+ * Return -1 if @param info is invalid.
+ */
+int32_t AMediaCodecInfo_getMaxSupportedInstances(const AMediaCodecInfo *info) __INTRODUCED_IN(36);
+
+/**
+ * Query codec feature capabilities.
+ *
+ * These features are supported to be used by the codec. These
+ * include optional features that can be turned on, as well as
+ * features that are always on.
+ *
+ * Return 1 if the feature is supported;
+ * Return 0 if the feature is unsupported;
+ * Return -1 if @param featureName is invalid.
+ */
+int32_t AMediaCodecInfo_isFeatureSupported(const AMediaCodecInfo *info,
+ const char *featureName) __INTRODUCED_IN(36);
+
+/**
+ * Query codec feature requirements.
+ *
+ * These features are required to be used by the codec, and as such,
+ * they are always turned on.
+ *
+ * Return 1 if the feature is required;
+ * Return 0 if the feature is not required;
+ * Return -1 if @param featureName is invalid.
+ */
+int32_t AMediaCodecInfo_isFeatureRequired(const AMediaCodecInfo *info,
+ const char *featureName) __INTRODUCED_IN(36);
+
+/**
+ * Query whether codec supports a given @param format.
+ *
+ * Return 1 if the format is supported;
+ * Return 0 if the format is unsupported;
+ * Return -1 if @param format is invalid.
+ */
+int32_t AMediaCodecInfo_isFormatSupported(const AMediaCodecInfo *info,
+ const AMediaFormat *format) __INTRODUCED_IN(36);
+
+/**
+ * Get the ACodecAudioCapabilities from the given AMediaCodecInfo.
+ *
+ * @param outAudioCaps The pointer to the output ACodecAudioCapabilities.
+ * It is owned by the framework and has an infinite lifetime.
+ *
+ * Return AMEDIA_OK if successfully got the ACodecAudioCapabilities.
+ * Return AMEDIA_ERROR_UNSUPPORTED if the codec is not an audio codec.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if @param info is invalid.
+ */
+media_status_t AMediaCodecInfo_getAudioCapabilities(const AMediaCodecInfo *info,
+ const ACodecAudioCapabilities **outAudioCaps) __INTRODUCED_IN(36);
+
+/**
+ * Get the ACodecVideoCapabilities from the given AMediaCodecInfo.
+ *
+ * @param outVideoCaps The pointer to the output ACodecVideoCapabilities.
+ * It is owned by the framework and has an infinite lifetime.
+ *
+ * Return AMEDIA_OK if successfully got the ACodecVideoCapabilities.
+ * Return AMEDIA_ERROR_UNSUPPORTED if the codec is not a video codec.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if @param info is invalid.
+ */
+media_status_t AMediaCodecInfo_getVideoCapabilities(const AMediaCodecInfo *info,
+ const ACodecVideoCapabilities **outVideoCaps) __INTRODUCED_IN(36);
+
+/**
+ * Get the ACodecEncoderCapabilities from the given AMediaCodecInfo.
+ *
+ * @param outEncoderCaps The pointer to the output ACodecEncoderCapabilities.
+ * It is owned by the framework and has an infinite lifetime.
+ *
+ * Return AMEDIA_OK if successfully got the ACodecEncoderCapabilities.
+ * Return AMEDIA_ERROR_UNSUPPORTED if the codec is not an encoder.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if @param info is invalid.
+ */
+media_status_t AMediaCodecInfo_getEncoderCapabilities(const AMediaCodecInfo *info,
+ const ACodecEncoderCapabilities **outEncoderCaps) __INTRODUCED_IN(36);
+
+// ACodecAudioCapabilities
+
+/**
+ * Get the range of supported bitrates in bits/second.
+ *
+ * @param outRange The pointer to the range of supported bitrates.
+ * Users are responsible for allocating a valid AIntRange structure and
+ * managing the lifetime of it.
+ *
+ * Return AMEDIA_OK if got bitrates successfully.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param audioCaps and @param outRange is invalid.
+ */
+media_status_t ACodecAudioCapabilities_getBitrateRange(const ACodecAudioCapabilities *audioCaps,
+ AIntRange *outRange) __INTRODUCED_IN(36);
+
+/**
+ * Get the array of supported sample rates
+ *
+ * The array is sorted in ascending order.
+ *
+ * @param outArrayPtr The pointer to the output sample rates array.
+ * The array is owned by the framework and has an infinite lifetime.
+ * @param outCount The size of the output array.
+ *
+ * Return AMEDIA_OK if the codec supports only discrete values.
+ * Otherwise, it returns AMEDIA_ERROR_UNSUPPORTED.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param audioCaps, @param outArrayPtr
+ * and @param outCount is invalid.
+ */
+media_status_t ACodecAudioCapabilities_getSupportedSampleRates(
+ const ACodecAudioCapabilities *audioCaps, const int **outArrayPtr,
+ size_t *outCount) __INTRODUCED_IN(36);
+
+/**
+ * Get the array of supported sample rate ranges.
+ *
+ * The array is sorted in ascending order, and the ranges are distinct (non-intersecting).
+ *
+ * @param outArrayPtr The pointer to the out sample rate ranges array.
+ * The array is owned by the framework and has an infinite lifetime.
+ * @param outCount The size of the out array.
+ *
+ * Return AMEDIA_OK if got the sample rate ranges successfully.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param audioCaps, @param outArrayPtr
+ * and @param outCount is invalid.
+ */
+media_status_t ACodecAudioCapabilities_getSupportedSampleRateRanges(
+ const ACodecAudioCapabilities *audioCaps,
+ const AIntRange **outArrayPtr, size_t *outCount) __INTRODUCED_IN(36);
+
+/**
+ * Return the maximum number of input channels supported.
+ *
+ * Return -1 if @param audioCaps is invalid.
+ */
+int32_t ACodecAudioCapabilities_getMaxInputChannelCount(
+ const ACodecAudioCapabilities *audioCaps) __INTRODUCED_IN(36);
+
+/**
+ * Returns the minimum number of input channels supported.
+ * This is often 1, but does vary for certain mime types.
+ *
+ * Return -1 if @param audioCaps is invalid.
+ */
+int32_t ACodecAudioCapabilities_getMinInputChannelCount(
+ const ACodecAudioCapabilities *audioCaps) __INTRODUCED_IN(36);
+
+/**
+ * Get an array of ranges representing the number of input channels supported.
+ * The codec supports any number of input channels within this range.
+ * For many codecs, this will be a single range [1..N], for some N.
+ *
+ * The array is sorted in ascending order, and the ranges are distinct (non-intersecting).
+ *
+ * @param outArrayPtr The pointer to the output array of input-channels ranges.
+ * The array is owned by the framework and has an infinite lifetime.
+ * @param outCount The size of the output array.
+ *
+ * Return AMEDIA_OK if got the input channel array successfully.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if @param audioCaps is invalid.
+ */
+media_status_t ACodecAudioCapabilities_getInputChannelCountRanges(
+ const ACodecAudioCapabilities *audioCaps,
+ const AIntRange **outArrayPtr, size_t *outCount) __INTRODUCED_IN(36);
+
+/**
+ * Query whether the sample rate is supported by the codec.
+ *
+ * Return 1 if the sample rate is supported.
+ * Return 0 if the sample rate is unsupported
+ * Return -1 if @param audioCaps is invalid.
+ */
+int32_t ACodecAudioCapabilities_isSampleRateSupported(const ACodecAudioCapabilities *audioCaps,
+ int32_t sampleRate) __INTRODUCED_IN(36);
+
+// ACodecPerformancePoint
+
+/**
+ * Create a performance point for a given frame size and frame rate.
+ *
+ * Performance points are defined by number of pixels, pixel rate and frame rate.
+ *
+ * Users are responsible for calling
+ * ACodecPerformancePoint_delete(ACodecPerformancePoint *performancePoint) after use.
+ *
+ * @param width width of the frame in pixels
+ * @param height height of the frame in pixels
+ * @param frameRate frame rate in frames per second
+ */
+ACodecPerformancePoint* ACodecPerformancePoint_create(int32_t width, int32_t height,
+ int32_t frameRate) __INTRODUCED_IN(36);
+
+/**
+ * Delete a created performance point.
+ *
+ * Return AMEDIA_OK if it is successfully deleted.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if @param performancePoint is invalid.
+ */
+media_status_t ACodecPerformancePoint_delete(
+ ACodecPerformancePoint *performancePoint) __INTRODUCED_IN(36);
+
+/**
+ * Checks whether the performance point covers a media format.
+ *
+ * @param format Stream format considered.
+ * Return true if the performance point covers the format.
+ */
+bool ACodecPerformancePoint_coversFormat(const ACodecPerformancePoint *performancePoint,
+ const AMediaFormat *format) __INTRODUCED_IN(36);
+
+/**
+ * Checks whether a performance point covers another performance point.
+ *
+ * Use this method to determine if a performance point advertised by a codec covers the
+ * performance point required. This method can also be used for loose ordering as this
+ * method is transitive.
+ *
+ * A Performance point represents an upper bound. This means that
+ * it covers all performance points with fewer pixels, pixel rate and frame rate.
+ *
+ * Return true if @param one covers @param another.
+ */
+bool ACodecPerformancePoint_covers(const ACodecPerformancePoint *one,
+ const ACodecPerformancePoint *another) __INTRODUCED_IN(36);
+
+/**
+ * Checks whether two performance points are equal.
+ */
+bool ACodecPerformancePoint_equals(const ACodecPerformancePoint *one,
+ const ACodecPerformancePoint *another) __INTRODUCED_IN(36);
+
+// ACodecVideoCapabilities
+
+/**
+ * Get the range of supported bitrates in bits/second.
+ *
+ * @param outRange The pointer to the range of output bitrates.
+ * Users are responsible for allocating a valid AIntRange structure and
+ * managing the lifetime of it.
+ *
+ * Return AMEDIA_OK if got the supported bitrates successfully.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ */
+media_status_t ACodecVideoCapabilities_getBitrateRange(const ACodecVideoCapabilities *videoCaps,
+ AIntRange *outRange) __INTRODUCED_IN(36);
+
+/**
+ * Get the range of supported video widths.
+ *
+ * @param outRange The pointer to the range of output supported widths.
+ * Users are responsible for allocating a valid AIntRange structure and
+ * managing the lifetime of it.
+ *
+ * Return AMEDIA_OK if got the supported video widths successfully.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ */
+media_status_t ACodecVideoCapabilities_getSupportedWidths(const ACodecVideoCapabilities *videoCaps,
+ AIntRange *outRange) __INTRODUCED_IN(36);
+
+/**
+ * Get the range of supported video heights.
+ *
+ * @param outRange The pointer to the range of output supported heights.
+ * Users are responsible for allocating a valid AIntRange structure and
+ * managing the lifetime of it.
+ *
+ * Return AMEDIA_OK if got the supported video heights successfully.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ */
+media_status_t ACodecVideoCapabilities_getSupportedHeights(const ACodecVideoCapabilities *videoCaps,
+ AIntRange *outRange) __INTRODUCED_IN(36);
+
+/**
+ * Return the alignment requirement for video width (in pixels).
+ *
+ * This is a power-of-2 value that video width must be a multiple of.
+ *
+ * Return -1 if @param videoCaps is invalid.
+ */
+int32_t ACodecVideoCapabilities_getWidthAlignment(
+ const ACodecVideoCapabilities *videoCaps) __INTRODUCED_IN(36);
+
+/**
+ * Return the alignment requirement for video height (in pixels).
+ *
+ * This is a power-of-2 value that video height must be a multiple of.
+ *
+ * Return -1 if @param videoCaps is invalid.
+ */
+int32_t ACodecVideoCapabilities_getHeightAlignment(
+ const ACodecVideoCapabilities *videoCaps) __INTRODUCED_IN(36);
+
+/**
+ * Get the range of supported frame rates.
+ *
+ * This is not a performance indicator. Rather, it expresses the limits specified in the coding
+ * standard, based on the complexities of encoding material for later playback at a certain
+ * frame rate, or the decoding of such material in non-realtime.
+ *
+ * @param outRange The pointer to the range of output supported frame rates.
+ * Users are responsible for allocating a valid AIntRange structure and
+ * managing the lifetime of it.
+ *
+ * \return AMEDIA_OK if got the frame rate range successfully.
+ * \return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ */
+media_status_t ACodecVideoCapabilities_getSupportedFrameRates(
+ const ACodecVideoCapabilities *videoCaps, AIntRange *outRange) __INTRODUCED_IN(36);
+
+/**
+ * Get the range of supported video widths for a video height.
+ *
+ * @param outRange The pointer to the range of supported widths.
+ * Users are responsible for allocating a valid AIntRange structure and
+ * managing the lifetime of it.
+ *
+ * Return AMEDIA_OK if got the supported video width range successfully.
+ * Return AMEDIA_ERROR_UNSUPPORTED if the height query is not supported.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ */
+media_status_t ACodecVideoCapabilities_getSupportedWidthsFor(
+ const ACodecVideoCapabilities *videoCaps, int32_t height,
+ AIntRange *outRange) __INTRODUCED_IN(36);
+
+/**
+ * Get the range of supported video heights for a video width.
+ *
+ * @param outRange The pointer to the range of supported heights.
+ * Users are responsible for allocating a valid AIntRange structure and
+ * managing the lifetime of it.
+ *
+ * Return AMEDIA_OK if got the supported video height range successfully.
+ * Return AMEDIA_ERROR_UNSUPPORTED if the width query is not supported.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ */
+media_status_t ACodecVideoCapabilities_getSupportedHeightsFor(
+ const ACodecVideoCapabilities *videoCaps, int32_t width,
+ AIntRange *outRange) __INTRODUCED_IN(36);
+
+/**
+ * Get the range of supported video frame rates for a video size.
+ *
+ * This is not a performance indicator. Rather, it expresses the limits specified in the coding
+ * standard, based on the complexities of encoding material of a given size for later playback at
+ * a certain frame rate, or the decoding of such material in non-realtime.
+ *
+ * @param outRange The pointer to the range of frame rates.
+ * Users are responsible for allocating a valid ADoubleRange structure and
+ * managing the lifetime of it.
+ *
+ * Return AMEDIA_OK if got the supported video frame rates successfully.
+ * Return AMEDIA_ERROR_UNSUPPORTED if the size query is not supported.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ */
+media_status_t ACodecVideoCapabilities_getSupportedFrameRatesFor(
+ const ACodecVideoCapabilities *videoCaps, int32_t width, int32_t height,
+ ADoubleRange *outRange) __INTRODUCED_IN(36);
+
+/**
+ * Get the range of achievable video frame rates for a video size.
+ *
+ * This is based on manufacturer's performance measurements for this device and codec.
+ * The measurements may not be available for all codecs or devices.
+ *
+ * @param outRange The pointer to the range of frame rates.
+ * Users are responsible for allocating a valid ADoubleRange structure and
+ * managing the lifetime of it.
+ *
+ * Return AMEDIA_OK if got the achievable video frame rates successfully.
+ * Return AMEDIA_ERROR_UNSUPPORTED if the codec did not publish any measurement data.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ */
+media_status_t ACodecVideoCapabilities_getAchievableFrameRatesFor(
+ const ACodecVideoCapabilities *videoCaps, int32_t width, int32_t height,
+ ADoubleRange *outRange) __INTRODUCED_IN(36);
+
+/**
+ * Get the supported performance points.
+ *
+ * @param outPerformancePointArray The pointer to the output performance points array.
+ * The array is owned by the framework and has an infinite
+ * lifetime.
+ * @param outCount The size of the output array.
+ *
+ * Return AMEDIA_OK if successfully got the performance points.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if @param videoCaps is invalid.
+ */
+media_status_t ACodecVideoCapabilities_getSupportedPerformancePoints(
+ const ACodecVideoCapabilities *videoCaps,
+ const ACodecPerformancePoint **outPerformancePointArray,
+ size_t *outCount) __INTRODUCED_IN(36);
+
+/**
+ * Return whether a given video size and frameRate combination is supported.
+ *
+ * Return 1 if the size and rate are supported.
+ * Return 0 if they are not supported.
+ * Return -1 if @param videoCaps is invalid.
+ */
+int32_t ACodecVideoCapabilities_areSizeAndRateSupported(const ACodecVideoCapabilities *videoCaps,
+ int32_t width, int32_t height, double frameRate) __INTRODUCED_IN(36);
+
+/**
+ * Return whether a given video size is supported.
+ *
+ * Return 1 if the size is supported.
+ * Return 0 if the size is not supported.
+ * Return -1 if @param videoCaps is invalid.
+ */
+int32_t ACodecVideoCapabilities_isSizeSupported(const ACodecVideoCapabilities *videoCaps,
+ int32_t width, int32_t height) __INTRODUCED_IN(36);
+
+// ACodecEncoderCapabilities
+
+/**
+ * Get the supported range of quality values.
+ *
+ * Quality is implementation-specific. As a general rule, a higher quality
+ * setting results in a better image quality and a lower compression ratio.
+ *
+ * @param outRange The pointer to the range of quality values.
+ * Users are responsible for allocating a valid AIntRange structure and
+ * managing the lifetime of it.
+ *
+ * Return AMEDIA_OK if successfully got the quality range.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ */
+media_status_t ACodecEncoderCapabilities_getQualityRange(
+ const ACodecEncoderCapabilities *encoderCaps,
+ AIntRange *outRange) __INTRODUCED_IN(36);
+
+/**
+ * Get the supported range of encoder complexity values.
+ *
+ * Some codecs may support multiple complexity levels, where higher complexity values use more
+ * encoder tools (e.g. perform more intensive calculations) to improve the quality or the
+ * compression ratio. Use a lower value to save power and/or time.
+ *
+ * @param outRange The pointer to the range of encoder complexity values.
+ * Users are responsible for allocating a valid AIntRange structure and
+ * managing the lifetime of it.
+ *
+ * Return AMEDIA_OK if successfully got the complexity range.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if any of @param videoCaps and @param outRange is invalid.
+ */
+media_status_t ACodecEncoderCapabilities_getComplexityRange(
+ const ACodecEncoderCapabilities *encoderCaps,
+ AIntRange *outRange) __INTRODUCED_IN(36);
+
+/**
+ * Encoder bitrate modes.
+ */
+typedef enum ABiterateMode : int32_t {
+ BITRATE_MODE_CQ = 0,
+ BITRATE_MODE_VBR = 1,
+ BITRATE_MODE_CBR = 2,
+ BITRATE_MODE_CBR_FD = 3
+} ABiterateMode;
+
+/**
+ * Query whether a bitrate mode is supported.
+ *
+ * Return 1 if the bitrate mode is supported.
+ * Return 0 if the bitrate mode is unsupported.
+ * Return -1 if @param encoderCaps is invalid.
+ */
+int32_t ACodecEncoderCapabilities_isBitrateModeSupported(
+ const ACodecEncoderCapabilities *encoderCaps, ABiterateMode mode) __INTRODUCED_IN(36);
+
+__END_DECLS
+
+#endif //_NDK_MEDIA_CODEC_INFO_H
+
+/** @} */
\ No newline at end of file
diff --git a/media/ndk/include/media/NdkMediaCodecStore.h b/media/ndk/include/media/NdkMediaCodecStore.h
new file mode 100644
index 0000000..aab8689
--- /dev/null
+++ b/media/ndk/include/media/NdkMediaCodecStore.h
@@ -0,0 +1,144 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @addtogroup Media
+ * @{
+ */
+
+/**
+ * @file NdkMediaCodecStore.h
+ */
+
+/*
+ * This file defines an NDK API.
+ * Do not remove methods.
+ * Do not change method signatures.
+ * Do not change the value of constants.
+ * Do not change the size of any of the classes defined in here.
+ * Do not reference types that are not part of the NDK.
+ * Do not #include files that aren't part of the NDK.
+ */
+
+#ifndef _NDK_MEDIA_CODEC_STORE_H
+#define _NDK_MEDIA_CODEC_STORE_H
+
+#include <stdint.h>
+
+#include "NdkMediaCodecInfo.h"
+#include "NdkMediaError.h"
+#include "NdkMediaFormat.h"
+
+__BEGIN_DECLS
+
+/**
+ * The media type definition with bitfeids indicating if it is
+ * supported by decoders/ encoders/ both.
+ */
+typedef struct AMediaCodecSupportedMediaType {
+ enum Mode : uint32_t {
+ FLAG_DECODER = 1 << 0,
+ FLAG_ENCODER = 1 << 1,
+ };
+
+ // The media type.
+ const char *mMediaType;
+ // bitfields for modes.
+ uint32_t mMode;
+} AMediaCodecSupportedMediaType;
+
+/**
+ * Get an array of all the supported media types of a device.
+ *
+ * @param outMediaTypes The pointer to the output AMediaCodecSupportedMediaType array.
+ * It is owned by the fraework and has an infinite lifetime.
+ *
+ * @param outCount size of the out array.
+ *
+ * Return AMEDIA_OK if successfully made the copy.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if the @param outMediaTypes is invalid.
+ */
+media_status_t AMediaCodecStore_getSupportedMediaTypes(
+ const AMediaCodecSupportedMediaType **outMediaTypes, size_t *outCount) __INTRODUCED_IN(36);
+
+/**
+ * Get the next decoder info that supports the format.
+ *
+ * @param outCodecInfo should be set as NULL to start the iteration.
+ * Keep the last codecInfo you got from a previous call to get the next one.
+ * *outCodecInfo will be set to NULL if reached the end.
+ * It is owned by the framework and has an infinite lifetime.
+ *
+ * @param format If set as NULL, this API will iterate through all available decoders.
+ * If NOT NULL, it MUST contain key "mime" implying the media type.
+ *
+ * Return AMEDIA_OK if successfully got the info.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if @param outCodecInfo or @param format is invalid.
+ * Return AMEDIA_ERROR_UNSUPPORTED if no more decoder supporting the format.
+ *
+ * It is undefined behavior to call this API with a NON NULL @param outCodecInfo
+ * and a different @param format during an iteration.
+ */
+media_status_t AMediaCodecStore_findNextDecoderForFormat(
+ const AMediaFormat *format, const AMediaCodecInfo **outCodecInfo) __INTRODUCED_IN(36);
+
+/**
+ * Get the next encoder info that supports the format.
+ *
+ * @param outCodecInfo should be set as NULL to start the iteration.
+ * Keep the last codecInfo you got from a previous call to get the next one.
+ * *outCodecInfo will be set to NULL if reached the end.
+ * It is owned by the framework and has an infinite lifetime.
+ *
+ * @param format If set as NULL, this API will iterate through all available encoders.
+ * If NOT NULL, it MUST contain key "mime" implying the media type.
+ *
+ * Return AMEDIA_OK if successfully got the info.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if @param outCodecInfo is invalid.
+ * Return AMEDIA_ERROR_UNSUPPORTED if no more encoder supporting the format.
+ *
+ * It is undefined behavior to call this API with a NON NULL @param outCodecInfo
+ * and a different @param format during an iteration.
+ *
+ * No secure encoder will show in the output.
+ */
+media_status_t AMediaCodecStore_findNextEncoderForFormat(
+ const AMediaFormat* format, const AMediaCodecInfo **outCodecInfo) __INTRODUCED_IN(36);
+
+/**
+ * Get the codecInfo corresponding to a given codec name.
+ *
+ * @param name Media codec name.
+ * Users can get valid codec names from the AMediaCodecInfo structures
+ * returned from findNextDecoder|EncoderForFormat methods.
+ * Note that this name may not correspond to the name the same codec used
+ * by the SDK API, but will always do for codec names starting with "c2.".
+ *
+ * @param outCodecInfo Output parameter for the corresponding AMeidaCodecInfo structure.
+ * It is owned by the framework and has an infinite lifetime.
+ *
+ * Return AMEDIA_OK if got the codecInfo successfully.
+ * Return AMEDIA_ERROR_UNSUPPORTED if no corresponding codec found.
+ * Return AMEDIA_ERROR_INVALID_PARAMETER if @param outCodecInfo or @param name is invalid.
+ */
+media_status_t AMediaCodecStore_getCodecInfo(
+ const char *name, const AMediaCodecInfo **outCodecInfo) __INTRODUCED_IN(36);
+
+__END_DECLS
+
+#endif //_NDK_MEDIA_CODEC_STORE_H
+
+/** @} */
\ No newline at end of file
diff --git a/media/ndk/libmediandk.map.txt b/media/ndk/libmediandk.map.txt
index 8fb203f..939f151 100644
--- a/media/ndk/libmediandk.map.txt
+++ b/media/ndk/libmediandk.map.txt
@@ -1,5 +1,33 @@
LIBMEDIANDK {
global:
+ ACodecAudioCapabilities_getBitrateRange; # introduced=36
+ ACodecAudioCapabilities_getInputChannelCountRanges; # introduced=36
+ ACodecAudioCapabilities_getMaxInputChannelCount; # introduced=36
+ ACodecAudioCapabilities_getMinInputChannelCount; # introduced=36
+ ACodecAudioCapabilities_getSupportedSampleRates; # introduced=36
+ ACodecAudioCapabilities_getSupportedSampleRateRanges; # introduced=36
+ ACodecAudioCapabilities_isSampleRateSupported; # introduced=36
+ ACodecEncoderCapabilities_getComplexityRange; # introduced=36
+ ACodecEncoderCapabilities_getQualityRange; # introduced=36
+ ACodecEncoderCapabilities_isBitrateModeSupported; # introduced=36
+ ACodecPerformancePoint_create; # introduced=36
+ ACodecPerformancePoint_covers; # introduced=36
+ ACodecPerformancePoint_coversFormat; # introduced=36
+ ACodecPerformancePoint_delete; # introduced=36
+ ACodecPerformancePoint_equals; # introduced=36
+ ACodecVideoCapabilities_areSizeAndRateSupported; # introduced=36
+ ACodecVideoCapabilities_getAchievableFrameRatesFor; # introduced=36
+ ACodecVideoCapabilities_getBitrateRange; # introduced=36
+ ACodecVideoCapabilities_getHeightAlignment; # introduced=36
+ ACodecVideoCapabilities_getSupportedFrameRates; # introduced=36
+ ACodecVideoCapabilities_getSupportedFrameRatesFor; # introduced=36
+ ACodecVideoCapabilities_getSupportedHeights; # introduced=36
+ ACodecVideoCapabilities_getSupportedHeightsFor; # introduced=36
+ ACodecVideoCapabilities_getSupportedPerformancePoints; # introduced=36
+ ACodecVideoCapabilities_getSupportedWidths; # introduced=36
+ ACodecVideoCapabilities_getSupportedWidthsFor; # introduced=36
+ ACodecVideoCapabilities_getWidthAlignment; # introduced=36
+ ACodecVideoCapabilities_isSizeSupported; # introduced=36
AImageReader_acquireLatestImage; # introduced=24
AImageReader_acquireLatestImageAsync; # introduced=26
AImageReader_acquireNextImage; # introduced=24
@@ -217,6 +245,22 @@
AMediaCodec_createPersistentInputSurface; # introduced=26
AMediaCodec_start;
AMediaCodec_stop;
+ AMediaCodecInfo_getAudioCapabilities; # introduced=36
+ AMediaCodecInfo_getEncoderCapabilities; # introduced=36
+ AMediaCodecInfo_getVideoCapabilities; # introduced=36
+ AMediaCodecInfo_getCanonicalName; # introduced=36
+ AMediaCodecInfo_getMaxSupportedInstances; # introduced=36
+ AMediaCodecInfo_getMediaCodecInfoType; # introduced=36
+ AMediaCodecInfo_getMediaType; # introduced=36
+ AMediaCodecInfo_isEncoder; # introduced=36
+ AMediaCodecInfo_isFeatureRequired; # introduced=36
+ AMediaCodecInfo_isFeatureSupported; # introduced=36
+ AMediaCodecInfo_isFormatSupported; # introduced=36
+ AMediaCodecInfo_isVendor; # introduced=36
+ AMediaCodecStore_getCodecInfo; # introduced=36
+ AMediaCodecStore_getSupportedMediaTypes; # introduced=36
+ AMediaCodecStore_findNextDecoderForFormat; # introduced=36
+ AMediaCodecStore_findNextEncoderForFormat; # introduced=36
AMediaCrypto_delete;
AMediaCrypto_isCryptoSchemeSupported;
AMediaCrypto_new;
diff --git a/services/audiopolicy/common/include/policy.h b/services/audiopolicy/common/include/policy.h
index d499222..0c03900 100644
--- a/services/audiopolicy/common/include/policy.h
+++ b/services/audiopolicy/common/include/policy.h
@@ -22,31 +22,77 @@
#include <media/AudioContainers.h>
+#include <string.h>
+
namespace android {
using StreamTypeVector = std::vector<audio_stream_type_t>;
+#define AUDIO_ENUM_QUOTE(x) #x
+#define AUDIO_ENUM_STRINGIFY(x) AUDIO_ENUM_QUOTE(x)
+#define AUDIO_DEFINE_ENUM_SYMBOL_V(symbol, value) symbol = value,
+#define AUDIO_DEFINE_STRINGIFY_CASE_V(symbol, _) case symbol: return AUDIO_ENUM_STRINGIFY(symbol);
+#define AUDIO_DEFINE_PARSE_CASE_V(symbol, _) \
+ if (strcmp(s, AUDIO_ENUM_STRINGIFY(symbol)) == 0) { *t = symbol; return true; } else
+#define AUDIO_DEFINE_MAP_ENTRY_V(symbol, _) { AUDIO_ENUM_STRINGIFY(symbol), symbol },
+
/**
* Legacy audio policy product strategies IDs. These strategies are supported by the default
* policy engine.
* IMPORTANT NOTE: the order of this enum is important as it determines the priority
- * between active strategies for routing decisions: lower enum value => higher prioriy
+ * between active strategies for routing decisions: lower enum value => higher priority
*/
+#define AUDIO_LEGACY_STRATEGY_LIST_DEF(V) \
+ V(STRATEGY_NONE, -1) \
+ V(STRATEGY_PHONE, 0) \
+ V(STRATEGY_SONIFICATION, 1) \
+ V(STRATEGY_ENFORCED_AUDIBLE, 2) \
+ V(STRATEGY_ACCESSIBILITY, 3) \
+ V(STRATEGY_SONIFICATION_RESPECTFUL, 4) \
+ V(STRATEGY_MEDIA, 5) \
+ V(STRATEGY_DTMF, 6) \
+ V(STRATEGY_CALL_ASSISTANT, 7) \
+ V(STRATEGY_TRANSMITTED_THROUGH_SPEAKER, 8) \
+ V(STRATEGY_REROUTING, 9) \
+ V(STRATEGY_PATCH, 10)
+
enum legacy_strategy {
- STRATEGY_NONE = -1,
- STRATEGY_PHONE,
- STRATEGY_SONIFICATION,
- STRATEGY_ENFORCED_AUDIBLE,
- STRATEGY_ACCESSIBILITY,
- STRATEGY_SONIFICATION_RESPECTFUL,
- STRATEGY_MEDIA,
- STRATEGY_DTMF,
- STRATEGY_CALL_ASSISTANT,
- STRATEGY_TRANSMITTED_THROUGH_SPEAKER,
- STRATEGY_REROUTING,
- STRATEGY_PATCH,
+ AUDIO_LEGACY_STRATEGY_LIST_DEF(AUDIO_DEFINE_ENUM_SYMBOL_V)
};
+inline const char* legacy_strategy_to_string(legacy_strategy t) {
+ switch (t) {
+ AUDIO_LEGACY_STRATEGY_LIST_DEF(AUDIO_DEFINE_STRINGIFY_CASE_V)
+ }
+ return "";
+}
+
+inline bool legacy_strategy_from_string(const char* s, legacy_strategy* t) {
+ AUDIO_LEGACY_STRATEGY_LIST_DEF(AUDIO_DEFINE_PARSE_CASE_V)
+ return false;
+}
+
+namespace audio_policy {
+
+struct legacy_strategy_map { const char *name; legacy_strategy id; };
+
+inline std::vector<legacy_strategy_map> getLegacyStrategyMap() {
+ return std::vector<legacy_strategy_map> {
+ AUDIO_LEGACY_STRATEGY_LIST_DEF(AUDIO_DEFINE_MAP_ENTRY_V)
+ };
+}
+
+} // namespace audio_policy
+
+#undef AUDIO_LEGACY_STRATEGY_LIST_DEF
+
+#undef AUDIO_DEFINE_MAP_ENTRY_V
+#undef AUDIO_DEFINE_PARSE_CASE_V
+#undef AUDIO_DEFINE_STRINGIFY_CASE_V
+#undef AUDIO_DEFINE_ENUM_SYMBOL_V
+#undef AUDIO_ENUM_STRINGIFY
+#undef AUDIO_ENUM_QUOTE
+
static const audio_attributes_t defaultAttr = AUDIO_ATTRIBUTES_INITIALIZER;
static const std::set<audio_usage_t > gHighPriorityUseCases = {
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
index f066c09..b29033e 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
@@ -147,6 +147,7 @@
private:
bool mixMatch(const AudioMix* mix, size_t mixIndex,
const audio_attributes_t& attributes,
+ const audio_output_flags_t outputFlags,
const audio_config_base_t& config,
uid_t uid,
audio_session_t session);
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index 3430f4b..ea78a5d 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -361,7 +361,7 @@
continue; // Primary output already found
}
- if(!mixMatch(policyMix.get(), i, attributes, config, uid, session)) {
+ if(!mixMatch(policyMix.get(), i, attributes, flags, config, uid, session)) {
ALOGV("%s: Mix %zu: does not match", __func__, i);
continue; // skip the mix
}
@@ -422,8 +422,8 @@
}
bool AudioPolicyMixCollection::mixMatch(const AudioMix* mix, size_t mixIndex,
- const audio_attributes_t& attributes, const audio_config_base_t& config,
- uid_t uid, audio_session_t session) {
+ const audio_attributes_t& attributes, const audio_output_flags_t outputFlags,
+ const audio_config_base_t& config, uid_t uid, audio_session_t session) {
if (mix->mMixType == MIX_TYPE_PLAYERS) {
// Loopback render mixes are created from a public API and thus restricted
@@ -451,12 +451,17 @@
}
// Permit match only if requested format and mix format are PCM and can be format
- // adapted by the mixer, or are the same (compressed) format.
+ // adapted by the mixer, or are the same format on direct output.
if (!is_mix_loopback(mix->mRouteFlags) &&
- !((audio_is_linear_pcm(config.format) && audio_is_linear_pcm(mix->mFormat.format)) ||
- (config.format == mix->mFormat.format)) &&
- config.format != AUDIO_CONFIG_BASE_INITIALIZER.format) {
- return false;
+ config.format != AUDIO_CONFIG_BASE_INITIALIZER.format) {
+ if (!audio_output_is_mixed_output_flags(outputFlags)) {
+ // Direct output must match format exactly.
+ if (config.format != mix->mFormat.format) return false;
+ } else {
+ // If mixable, both requested and mix format must be linear pcm.
+ if (!audio_is_linear_pcm(config.format) ||
+ !audio_is_linear_pcm(mix->mFormat.format)) return false;
+ }
}
// if there is an address match, prioritize that match
diff --git a/services/audiopolicy/engine/common/include/EngineBase.h b/services/audiopolicy/engine/common/include/EngineBase.h
index edb2e29..4445b66 100644
--- a/services/audiopolicy/engine/common/include/EngineBase.h
+++ b/services/audiopolicy/engine/common/include/EngineBase.h
@@ -117,9 +117,10 @@
AudioDeviceTypeAddrVector &devices) const override;
engineConfig::ParsingResult loadAudioPolicyEngineConfig(
- const media::audio::common::AudioHalEngineConfig& aidlConfig);
+ const media::audio::common::AudioHalEngineConfig& aidlConfig, bool);
- engineConfig::ParsingResult loadAudioPolicyEngineConfig(const std::string& xmlFilePath = "");
+ engineConfig::ParsingResult loadAudioPolicyEngineConfig(
+ const std::string& xmlFilePath = "", bool isConfigurable = false);
const ProductStrategyMap &getProductStrategies() const { return mProductStrategies; }
diff --git a/services/audiopolicy/engine/common/src/EngineBase.cpp b/services/audiopolicy/engine/common/src/EngineBase.cpp
index fb8379e..0799399 100644
--- a/services/audiopolicy/engine/common/src/EngineBase.cpp
+++ b/services/audiopolicy/engine/common/src/EngineBase.cpp
@@ -126,7 +126,7 @@
}
engineConfig::ParsingResult EngineBase::loadAudioPolicyEngineConfig(
- const media::audio::common::AudioHalEngineConfig& aidlConfig)
+ const media::audio::common::AudioHalEngineConfig& aidlConfig, bool)
{
engineConfig::ParsingResult result = engineConfig::convert(aidlConfig);
if (result.parsedConfig == nullptr) {
@@ -141,7 +141,8 @@
return processParsingResult(std::move(result));
}
-engineConfig::ParsingResult EngineBase::loadAudioPolicyEngineConfig(const std::string& xmlFilePath)
+engineConfig::ParsingResult EngineBase::loadAudioPolicyEngineConfig(
+ const std::string& xmlFilePath, bool isConfigurable)
{
auto fileExists = [](const char* path) {
struct stat fileStat;
@@ -150,7 +151,7 @@
const std::string filePath = xmlFilePath.empty() ? engineConfig::DEFAULT_PATH : xmlFilePath;
engineConfig::ParsingResult result =
fileExists(filePath.c_str()) ?
- engineConfig::parse(filePath.c_str()) : engineConfig::ParsingResult{};
+ engineConfig::parse(filePath.c_str(), isConfigurable) : engineConfig::ParsingResult{};
if (result.parsedConfig == nullptr) {
ALOGD("%s: No configuration found, using default matching phone experience.", __FUNCTION__);
engineConfig::Config config = gDefaultEngineConfig;
diff --git a/services/audiopolicy/engine/config/include/EngineConfig.h b/services/audiopolicy/engine/config/include/EngineConfig.h
index 054bdae..8a4fc88 100644
--- a/services/audiopolicy/engine/config/include/EngineConfig.h
+++ b/services/audiopolicy/engine/config/include/EngineConfig.h
@@ -116,7 +116,7 @@
/** Parses the provided audio policy usage configuration.
* @return audio policy usage @see Config
*/
-ParsingResult parse(const char* path = DEFAULT_PATH);
+ParsingResult parse(const char* path = DEFAULT_PATH, bool isConfigurable = false);
android::status_t parseLegacyVolumes(VolumeGroups &volumeGroups);
ParsingResult convert(const ::android::media::audio::common::AudioHalEngineConfig& aidlConfig);
// Exposed for testing.
diff --git a/services/audiopolicy/engine/config/src/EngineConfig.cpp b/services/audiopolicy/engine/config/src/EngineConfig.cpp
index 714ab78..b8d95ee 100644
--- a/services/audiopolicy/engine/config/src/EngineConfig.cpp
+++ b/services/audiopolicy/engine/config/src/EngineConfig.cpp
@@ -52,6 +52,8 @@
namespace {
+static bool gIsConfigurableEngine = false;
+
ConversionResult<std::string> aidl2legacy_AudioHalProductStrategy_ProductStrategyType(int id) {
using AudioProductStrategyType = media::audio::common::AudioProductStrategyType;
@@ -547,9 +549,16 @@
if (!convertTo(idLiteral, id)) {
return BAD_VALUE;
}
- ALOGV("%s: %s, %s = %d", __FUNCTION__, name.c_str(), Attributes::id, id);
+ } else {
+ legacy_strategy legacyId;
+ if (legacy_strategy_from_string(name.c_str(), &legacyId)) {
+ id = legacyId;
+ } else if (!gIsConfigurableEngine) {
+ return BAD_VALUE;
+ }
+ // With a configurable engine it can be a vendor-provided strategy name.
}
- ALOGV("%s: %s = %s", __FUNCTION__, Attributes::name, name.c_str());
+ ALOGV("%s: %s, %s = %d", __FUNCTION__, name.c_str(), Attributes::id, id);
size_t skipped = 0;
AttributesGroups attrGroups;
@@ -776,7 +785,7 @@
} // namespace
-ParsingResult parse(const char* path) {
+ParsingResult parse(const char* path, bool isConfigurable) {
XmlErrorHandler errorHandler;
auto doc = make_xmlUnique(xmlParseFile(path));
if (doc == NULL) {
@@ -801,6 +810,7 @@
ALOGE("%s: No version found", __func__);
return {nullptr, 0};
}
+ gIsConfigurableEngine = isConfigurable;
size_t nbSkippedElements = 0;
auto config = std::make_unique<Config>();
config->version = std::stof(version);
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.cpp b/services/audiopolicy/engineconfigurable/src/Engine.cpp
index 45da7b0..ad49b19 100644
--- a/services/audiopolicy/engineconfigurable/src/Engine.cpp
+++ b/services/audiopolicy/engineconfigurable/src/Engine.cpp
@@ -112,7 +112,7 @@
template<typename T>
status_t Engine::loadWithFallback(const T& configSource) {
- auto result = EngineBase::loadAudioPolicyEngineConfig(configSource);
+ auto result = EngineBase::loadAudioPolicyEngineConfig(configSource, true /*isConfigurable*/);
ALOGE_IF(result.nbSkippedElement != 0,
"Policy Engine configuration is partially invalid, skipped %zu elements",
result.nbSkippedElement);
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index 1082d31..b140a9d 100644
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -38,22 +38,8 @@
namespace android::audio_policy {
-struct legacy_strategy_map { const char *name; legacy_strategy id; };
static const std::vector<legacy_strategy_map>& getLegacyStrategy() {
- static const std::vector<legacy_strategy_map> legacyStrategy = {
- { "STRATEGY_NONE", STRATEGY_NONE },
- { "STRATEGY_MEDIA", STRATEGY_MEDIA },
- { "STRATEGY_PHONE", STRATEGY_PHONE },
- { "STRATEGY_SONIFICATION", STRATEGY_SONIFICATION },
- { "STRATEGY_SONIFICATION_RESPECTFUL", STRATEGY_SONIFICATION_RESPECTFUL },
- { "STRATEGY_DTMF", STRATEGY_DTMF },
- { "STRATEGY_ENFORCED_AUDIBLE", STRATEGY_ENFORCED_AUDIBLE },
- { "STRATEGY_TRANSMITTED_THROUGH_SPEAKER", STRATEGY_TRANSMITTED_THROUGH_SPEAKER },
- { "STRATEGY_ACCESSIBILITY", STRATEGY_ACCESSIBILITY },
- { "STRATEGY_REROUTING", STRATEGY_REROUTING },
- { "STRATEGY_PATCH", STRATEGY_PATCH }, // boiler to manage stream patch volume
- { "STRATEGY_CALL_ASSISTANT", STRATEGY_CALL_ASSISTANT },
- };
+ static const std::vector<legacy_strategy_map> legacyStrategy = getLegacyStrategyMap();
return legacyStrategy;
}
@@ -68,7 +54,7 @@
template<typename T>
status_t Engine::loadWithFallback(const T& configSource) {
- auto result = EngineBase::loadAudioPolicyEngineConfig(configSource);
+ auto result = EngineBase::loadAudioPolicyEngineConfig(configSource, false /*isConfigurable*/);
ALOGE_IF(result.nbSkippedElement != 0,
"Policy Engine configuration is partially invalid, skipped %zu elements",
result.nbSkippedElement);
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index 82351e8..40e99af 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -179,6 +179,7 @@
void SetUp() override;
void TearDown() override;
virtual void SetUpManagerConfig();
+ virtual std::string getEngineConfigFilePath() const { return sTestEngineConfig; }
void dumpToLog();
// When explicit routing is needed, selectedDeviceId needs to be set as the wanted port
@@ -217,6 +218,7 @@
const std::string &address, audio_port_v7 *foundPort);
static audio_port_handle_t getDeviceIdFromPatch(const struct audio_patch* patch);
virtual AudioPolicyManagerTestClient* getClient() { return new AudioPolicyManagerTestClient; }
+ void verifyBuiltInStrategyIdsAreValid();
sp<AudioPolicyConfig> mConfig;
std::unique_ptr<AudioPolicyManagerTestClient> mClient;
@@ -231,7 +233,7 @@
void AudioPolicyManagerTest::SetUp() {
mClient.reset(getClient());
ASSERT_NO_FATAL_FAILURE(SetUpManagerConfig()); // Subclasses may want to customize the config.
- mManager.reset(new AudioPolicyTestManager(mConfig, mClient.get(), sTestEngineConfig));
+ mManager.reset(new AudioPolicyTestManager(mConfig, mClient.get(), getEngineConfigFilePath()));
ASSERT_EQ(NO_ERROR, mManager->initialize());
ASSERT_EQ(NO_ERROR, mManager->initCheck());
}
@@ -397,6 +399,16 @@
return AUDIO_PORT_HANDLE_NONE;
}
+void AudioPolicyManagerTest::verifyBuiltInStrategyIdsAreValid() {
+ AudioProductStrategyVector strategies;
+ ASSERT_EQ(NO_ERROR, mManager->listAudioProductStrategies(strategies));
+ for (const auto& strategy : strategies) {
+ // Since ids are unsigned, this will also cover the case when the id is 'NONE' which is -1.
+ EXPECT_LT(strategy.getId(),
+ media::audio::common::AudioHalProductStrategy::VENDOR_STRATEGY_ID_START)
+ << strategy.getName();
+ }
+}
TEST_F(AudioPolicyManagerTest, InitSuccess) {
// SetUp must finish with no assertions.
@@ -454,6 +466,20 @@
// TODO: Add patch creation tests that involve already existing patch
+TEST_F(AudioPolicyManagerTest, BuiltInStrategyIdsAreValid) {
+ verifyBuiltInStrategyIdsAreValid();
+}
+
+class AudioPolicyManagerTestWithDefaultEngineConfig : public AudioPolicyManagerTest {
+ protected:
+ // The APM will use the default engine config from EngineDefaultConfig.h.
+ std::string getEngineConfigFilePath() const override { return ""; }
+};
+
+TEST_F(AudioPolicyManagerTestWithDefaultEngineConfig, BuiltInStrategyIdsAreValid) {
+ verifyBuiltInStrategyIdsAreValid();
+}
+
enum
{
MSD_AUDIO_PATCH_COUNT_NUM_AUDIO_PATCHES_INDEX = 0,