Merge "Fixed LinearBlock thumbnail crash issue" into main am: 5d1e90cd61 am: b281428b5c
Original change: https://android-review.googlesource.com/c/platform/frameworks/av/+/3351159
Change-Id: I5f620040bb6e201d99c57ff2d5bf84f16c166b8b
Signed-off-by: Automerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>
diff --git a/aidl/com/android/media/permission/PermissionEnum.aidl b/aidl/com/android/media/permission/PermissionEnum.aidl
index b08db44..7badb87 100644
--- a/aidl/com/android/media/permission/PermissionEnum.aidl
+++ b/aidl/com/android/media/permission/PermissionEnum.aidl
@@ -37,5 +37,6 @@
CAPTURE_TUNER_AUDIO_INPUT = 11,
CAPTURE_VOICE_COMMUNICATION_OUTPUT = 12,
BLUETOOTH_CONNECT = 13,
- ENUM_SIZE = 14, // Not for actual usage, used by Java
+ BYPASS_CONCURRENT_RECORD_AUDIO_RESTRICTION = 14,
+ ENUM_SIZE = 15, // Not for actual usage, used by Java
}
diff --git a/camera/Camera.cpp b/camera/Camera.cpp
index d90f7c9..0bc735f 100644
--- a/camera/Camera.cpp
+++ b/camera/Camera.cpp
@@ -99,23 +99,21 @@
return c->unlock();
}
-// pass the buffered IGraphicBufferProducer to the camera service
-status_t Camera::setPreviewTarget(const sp<IGraphicBufferProducer>& bufferProducer)
-{
- ALOGV("setPreviewTarget(%p)", bufferProducer.get());
- sp <::android::hardware::ICamera> c = mCamera;
+// pass the Surface to the camera service
+status_t Camera::setPreviewTarget(const sp<SurfaceType>& target) {
+ ALOGV("setPreviewTarget(%p)", target.get());
+ sp<::android::hardware::ICamera> c = mCamera;
if (c == 0) return NO_INIT;
- ALOGD_IF(bufferProducer == 0, "app passed NULL surface");
- return c->setPreviewTarget(bufferProducer);
+ ALOGD_IF(target == 0, "app passed NULL surface");
+ return c->setPreviewTarget(target);
}
-status_t Camera::setVideoTarget(const sp<IGraphicBufferProducer>& bufferProducer)
-{
- ALOGV("setVideoTarget(%p)", bufferProducer.get());
- sp <::android::hardware::ICamera> c = mCamera;
+status_t Camera::setVideoTarget(const sp<SurfaceType>& target) {
+ ALOGV("setVideoTarget(%p)", target.get());
+ sp<::android::hardware::ICamera> c = mCamera;
if (c == 0) return NO_INIT;
- ALOGD_IF(bufferProducer == 0, "app passed NULL video surface");
- return c->setVideoTarget(bufferProducer);
+ ALOGD_IF(target == 0, "app passed NULL video surface");
+ return c->setVideoTarget(target);
}
// start preview mode
@@ -272,12 +270,10 @@
c->setPreviewCallbackFlag(flag);
}
-status_t Camera::setPreviewCallbackTarget(
- const sp<IGraphicBufferProducer>& callbackProducer)
-{
- sp <::android::hardware::ICamera> c = mCamera;
+status_t Camera::setPreviewCallbackTarget(const sp<SurfaceType>& target) {
+ sp<::android::hardware::ICamera> c = mCamera;
if (c == 0) return NO_INIT;
- return c->setPreviewCallbackTarget(callbackProducer);
+ return c->setPreviewCallbackTarget(target);
}
status_t Camera::setAudioRestriction(int32_t mode)
diff --git a/camera/CameraUtils.cpp b/camera/CameraUtils.cpp
index 3473780..ebb3305 100644
--- a/camera/CameraUtils.cpp
+++ b/camera/CameraUtils.cpp
@@ -32,7 +32,7 @@
const char *kCameraServiceDisabledProperty = "config.disable_cameraservice";
status_t CameraUtils::getRotationTransform(const CameraMetadata& staticInfo,
- int mirrorMode, /*out*/int32_t* transform) {
+ int mirrorMode, bool enableTransformInverseDisplay, /*out*/int32_t* transform) {
ALOGV("%s", __FUNCTION__);
if (transform == NULL) {
@@ -128,7 +128,9 @@
* aspect ratio, or the preview will end up looking non-uniformly
* stretched.
*/
- flags |= NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY;
+ if (enableTransformInverseDisplay) {
+ flags |= NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY;
+ }
ALOGV("%s: final transform = 0x%x", __FUNCTION__, flags);
diff --git a/camera/ICamera.cpp b/camera/ICamera.cpp
index b83edf7..0b811d2 100644
--- a/camera/ICamera.cpp
+++ b/camera/ICamera.cpp
@@ -17,16 +17,16 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "ICamera"
-#include <utils/Log.h>
-#include <stdint.h>
-#include <sys/types.h>
-#include <binder/Parcel.h>
-#include <camera/CameraUtils.h>
#include <android/hardware/ICamera.h>
#include <android/hardware/ICameraClient.h>
-#include <gui/IGraphicBufferProducer.h>
+#include <binder/Parcel.h>
+#include <camera/CameraUtils.h>
#include <gui/Surface.h>
+#include <gui/view/Surface.h>
#include <media/hardware/HardwareAPI.h>
+#include <stdint.h>
+#include <sys/types.h>
+#include <utils/Log.h>
namespace android {
namespace hardware {
@@ -34,8 +34,14 @@
enum {
DISCONNECT = IBinder::FIRST_CALL_TRANSACTION,
SET_PREVIEW_TARGET,
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ SET_PREVIEW_TARGET_SURFACE,
+#endif
SET_PREVIEW_CALLBACK_FLAG,
SET_PREVIEW_CALLBACK_TARGET,
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ SET_PREVIEW_CALLBACK_TARGET_SURFACE,
+#endif
START_PREVIEW,
STOP_PREVIEW,
AUTO_FOCUS,
@@ -54,6 +60,9 @@
RELEASE_RECORDING_FRAME,
SET_VIDEO_BUFFER_MODE,
SET_VIDEO_BUFFER_TARGET,
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ SET_VIDEO_BUFFER_TARGET_SURFACE,
+#endif
RELEASE_RECORDING_FRAME_HANDLE,
RELEASE_RECORDING_FRAME_HANDLE_BATCH,
SET_AUDIO_RESTRICTION,
@@ -79,15 +88,20 @@
return binder::Status::ok();
}
- // pass the buffered IGraphicBufferProducer to the camera service
- status_t setPreviewTarget(const sp<IGraphicBufferProducer>& bufferProducer)
- {
+ // pass the Surface to the camera service
+ status_t setPreviewTarget(const sp<SurfaceType>& target) {
ALOGV("setPreviewTarget");
Parcel data, reply;
data.writeInterfaceToken(ICamera::getInterfaceDescriptor());
- sp<IBinder> b(IInterface::asBinder(bufferProducer));
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ view::Surface viewSurfaceProducer = view::Surface::fromSurface(target);
+ data.writeParcelable(viewSurfaceProducer);
+ remote()->transact(SET_PREVIEW_TARGET_SURFACE, data, &reply);
+#else
+ sp<IBinder> b(IInterface::asBinder(target));
data.writeStrongBinder(b);
remote()->transact(SET_PREVIEW_TARGET, data, &reply);
+#endif
return reply.readInt32();
}
@@ -102,15 +116,19 @@
remote()->transact(SET_PREVIEW_CALLBACK_FLAG, data, &reply);
}
- status_t setPreviewCallbackTarget(
- const sp<IGraphicBufferProducer>& callbackProducer)
- {
+ status_t setPreviewCallbackTarget(const sp<SurfaceType>& target) {
ALOGV("setPreviewCallbackTarget");
Parcel data, reply;
data.writeInterfaceToken(ICamera::getInterfaceDescriptor());
- sp<IBinder> b(IInterface::asBinder(callbackProducer));
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ view::Surface viewCallbackProducer = view::Surface::fromSurface(target);
+ data.writeParcelable(viewCallbackProducer);
+ remote()->transact(SET_PREVIEW_CALLBACK_TARGET_SURFACE, data, &reply);
+#else
+ sp<IBinder> b(IInterface::asBinder(target));
data.writeStrongBinder(b);
remote()->transact(SET_PREVIEW_CALLBACK_TARGET, data, &reply);
+#endif
return reply.readInt32();
}
@@ -326,14 +344,19 @@
return reply.readInt32();
}
- status_t setVideoTarget(const sp<IGraphicBufferProducer>& bufferProducer)
- {
+ status_t setVideoTarget(const sp<SurfaceType>& target) {
ALOGV("setVideoTarget");
Parcel data, reply;
data.writeInterfaceToken(ICamera::getInterfaceDescriptor());
- sp<IBinder> b(IInterface::asBinder(bufferProducer));
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ view::Surface viewSurfaceProducer = view::Surface::fromSurface(target);
+ data.writeParcelable(viewSurfaceProducer);
+ remote()->transact(SET_VIDEO_BUFFER_TARGET_SURFACE, data, &reply);
+#else
+ sp<IBinder> b(IInterface::asBinder(target));
data.writeStrongBinder(b);
remote()->transact(SET_VIDEO_BUFFER_TARGET, data, &reply);
+#endif
return reply.readInt32();
}
};
@@ -358,9 +381,25 @@
CHECK_INTERFACE(ICamera, data, reply);
sp<IGraphicBufferProducer> st =
interface_cast<IGraphicBufferProducer>(data.readStrongBinder());
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ sp<Surface> sp = new Surface(st);
+ reply->writeInt32(setPreviewTarget(sp));
+#else
+ reply->writeInt32(setPreviewTarget(st));
+#endif
+ return NO_ERROR;
+ } break;
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ case SET_PREVIEW_TARGET_SURFACE: {
+ ALOGV("SET_PREVIEW_TARGET_SURFACE");
+ CHECK_INTERFACE(ICamera, data, reply);
+ view::Surface viewSurface;
+ data.readParcelable(&viewSurface);
+ sp<Surface> st = viewSurface.toSurface();
reply->writeInt32(setPreviewTarget(st));
return NO_ERROR;
} break;
+#endif
case SET_PREVIEW_CALLBACK_FLAG: {
ALOGV("SET_PREVIEW_CALLBACK_TYPE");
CHECK_INTERFACE(ICamera, data, reply);
@@ -373,9 +412,25 @@
CHECK_INTERFACE(ICamera, data, reply);
sp<IGraphicBufferProducer> cp =
interface_cast<IGraphicBufferProducer>(data.readStrongBinder());
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ sp<Surface> sp = new Surface(cp);
+ reply->writeInt32(setPreviewCallbackTarget(sp));
+#else
+ reply->writeInt32(setPreviewCallbackTarget(cp));
+#endif
+ return NO_ERROR;
+ }
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ case SET_PREVIEW_CALLBACK_TARGET_SURFACE: {
+ ALOGV("SET_PREVIEW_CALLBACK_TARGET_SURFACE");
+ CHECK_INTERFACE(ICamera, data, reply);
+ view::Surface viewSurface;
+ data.readParcelable(&viewSurface);
+ sp<Surface> cp = viewSurface.toSurface();
reply->writeInt32(setPreviewCallbackTarget(cp));
return NO_ERROR;
}
+#endif
case START_PREVIEW: {
ALOGV("START_PREVIEW");
CHECK_INTERFACE(ICamera, data, reply);
@@ -508,9 +563,25 @@
CHECK_INTERFACE(ICamera, data, reply);
sp<IGraphicBufferProducer> st =
interface_cast<IGraphicBufferProducer>(data.readStrongBinder());
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ sp<Surface> sp = new Surface(st);
+ reply->writeInt32(setVideoTarget(sp));
+#else
reply->writeInt32(setVideoTarget(st));
+#endif
return NO_ERROR;
} break;
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ case SET_VIDEO_BUFFER_TARGET_SURFACE: {
+ ALOGV("SET_VIDEO_BUFFER_TARGET_SURFACE");
+ CHECK_INTERFACE(ICamera, data, reply);
+ view::Surface viewSurface;
+ data.readParcelable(&viewSurface);
+ sp<Surface> cp = viewSurface.toSurface();
+ reply->writeInt32(setVideoTarget(cp));
+ return NO_ERROR;
+ } break;
+#endif
case SET_AUDIO_RESTRICTION: {
CHECK_INTERFACE(ICamera, data, reply);
int32_t mode = data.readInt32();
diff --git a/camera/aidl/android/hardware/ICameraService.aidl b/camera/aidl/android/hardware/ICameraService.aidl
index ce6c2d3..6431737 100644
--- a/camera/aidl/android/hardware/ICameraService.aidl
+++ b/camera/aidl/android/hardware/ICameraService.aidl
@@ -157,6 +157,7 @@
* policy for default device context). Only virtual cameras would be exposed
* only for custom policy and only real cameras would be exposed for default
* policy.
+ * @param sharedMode Parameter specifying if the camera should be opened in shared mode.
*/
ICameraDeviceUser connectDevice(ICameraDeviceCallbacks callbacks,
@utf8InCpp String cameraId,
@@ -164,7 +165,8 @@
int targetSdkVersion,
int rotationOverride,
in AttributionSourceState clientAttribution,
- int devicePolicy);
+ int devicePolicy,
+ boolean sharedMode);
/**
* Add listener for changes to camera device and flashlight state.
diff --git a/camera/aidl/android/hardware/ICameraServiceListener.aidl b/camera/aidl/android/hardware/ICameraServiceListener.aidl
index 9c8c88a..c0fd50e 100644
--- a/camera/aidl/android/hardware/ICameraServiceListener.aidl
+++ b/camera/aidl/android/hardware/ICameraServiceListener.aidl
@@ -105,5 +105,6 @@
* will receive such callbacks.
*/
oneway void onCameraOpened(@utf8InCpp String cameraId, @utf8InCpp String clientPackageId, int deviceId);
+ oneway void onCameraOpenedInSharedMode(@utf8InCpp String cameraId, @utf8InCpp String clientPackageId, int deviceId, boolean primaryClient);
oneway void onCameraClosed(@utf8InCpp String cameraId, int deviceId);
}
diff --git a/camera/aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl b/camera/aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl
index 58b19a3..49e9920 100644
--- a/camera/aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl
+++ b/camera/aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl
@@ -50,4 +50,15 @@
oneway void onRepeatingRequestError(in long lastFrameNumber,
in int repeatingRequestId);
oneway void onRequestQueueEmpty();
+
+ /**
+ * Notify registered clients about client shared access priority changes when the camera device
+ * has been opened in shared mode.
+ *
+ * If the client priority changes from secondary to primary, then it can now
+ * create capture request and change the capture request parameters. If client priority
+ * changes from primary to secondary, that implies that another higher priority client is also
+ * accessing the camera in shared mode and is now the primary client.
+ */
+ oneway void onClientSharedAccessPriorityChanged(boolean primaryClient);
}
diff --git a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
index 8e1fcc0..c1da126 100644
--- a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
+++ b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
@@ -68,6 +68,17 @@
const int CONSTRAINED_HIGH_SPEED_MODE = 1;
/**
+ * The shared operating mode for a camera device.
+ *
+ * <p>
+ * When in shared mode, the camera device can be opened and accessed by multiple applications
+ * simultaneously.
+ * </p>
+ *
+ */
+ const int SHARED_MODE = 2;
+
+ /**
* Start of custom vendor modes
*/
const int VENDOR_MODE_START = 0x8000;
@@ -194,4 +205,12 @@
*/
ICameraOfflineSession switchToOffline(in ICameraDeviceCallbacks callbacks,
in int[] offlineOutputIds);
+
+ /**
+ * Get the client status as primary or secondary when camera is opened in shared mode.
+ *
+ * @return true if this is primary client when camera is opened in shared mode.
+ * false if another higher priority client with primary access is also using the camera.
+ */
+ boolean isPrimaryClient();
}
diff --git a/camera/camera2/OutputConfiguration.cpp b/camera/camera2/OutputConfiguration.cpp
index 2d1af32..a89d7ca 100644
--- a/camera/camera2/OutputConfiguration.cpp
+++ b/camera/camera2/OutputConfiguration.cpp
@@ -22,16 +22,19 @@
#include <camera/camera2/OutputConfiguration.h>
#include <camera/StringUtils.h>
+#include <com_android_internal_camera_flags.h>
#include <binder/Parcel.h>
#include <gui/view/Surface.h>
#include <system/camera_metadata.h>
#include <system/graphics.h>
#include <utils/String8.h>
+namespace flags = com::android::internal::camera::flags;
namespace android {
const int OutputConfiguration::INVALID_ROTATION = -1;
+const int OutputConfiguration::ROTATION_0 = 0;
const int OutputConfiguration::INVALID_SET_ID = -1;
const std::vector<sp<IGraphicBufferProducer>>&
@@ -99,6 +102,30 @@
return mMirrorMode;
}
+int OutputConfiguration::getMirrorMode(sp<IGraphicBufferProducer> surface) const {
+ if (!flags::mirror_mode_shared_surfaces()) {
+ return mMirrorMode;
+ }
+
+ if (mGbps.size() != mMirrorModeForProducers.size()) {
+ ALOGE("%s: mGbps size doesn't match mMirrorModeForProducers: %zu vs %zu",
+ __FUNCTION__, mGbps.size(), mMirrorModeForProducers.size());
+ return mMirrorMode;
+ }
+
+ // Use per-producer mirror mode if available.
+ for (size_t i = 0; i < mGbps.size(); i++) {
+ if (mGbps[i] == surface) {
+ return mMirrorModeForProducers[i];
+ }
+ }
+
+ // For surface that doesn't belong to this output configuration, use
+ // mMirrorMode as default.
+ ALOGW("%s: Surface doesn't belong to this OutputConfiguration!", __FUNCTION__);
+ return mMirrorMode;
+}
+
bool OutputConfiguration::useReadoutTimestamp() const {
return mUseReadoutTimestamp;
}
@@ -142,6 +169,29 @@
mUsage(0) {
}
+OutputConfiguration::OutputConfiguration(int surfaceType, int width, int height, int format,
+ int32_t colorSpace, int mirrorMode, bool useReadoutTimestamp, int timestampBase,
+ int dataspace, int64_t usage, int64_t streamusecase, std::string physicalCamId):
+ mRotation(ROTATION_0),
+ mSurfaceSetID(INVALID_SET_ID),
+ mSurfaceType(surfaceType),
+ mWidth(width),
+ mHeight(height),
+ mIsDeferred(false),
+ mIsShared(false),
+ mPhysicalCameraId(physicalCamId),
+ mIsMultiResolution(false),
+ mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
+ mColorSpace(colorSpace),
+ mStreamUseCase(streamusecase),
+ mTimestampBase(timestampBase),
+ mMirrorMode(mirrorMode),
+ mUseReadoutTimestamp(useReadoutTimestamp),
+ mFormat(format),
+ mDataspace(dataspace),
+ mUsage(usage){
+}
+
OutputConfiguration::OutputConfiguration(const android::Parcel& parcel) :
mRotation(INVALID_ROTATION),
mSurfaceSetID(INVALID_SET_ID) {
@@ -251,6 +301,12 @@
return err;
}
+ std::vector<int> mirrorModeForProducers;
+ if ((err = parcel->readInt32Vector(&mirrorModeForProducers)) != OK) {
+ ALOGE("%s: Failed to read mirroring mode for surfaces from parcel", __FUNCTION__);
+ return err;
+ }
+
int useReadoutTimestamp = 0;
if ((err = parcel->readInt32(&useReadoutTimestamp)) != OK) {
ALOGE("%s: Failed to read useReadoutTimestamp flag from parcel", __FUNCTION__);
@@ -286,6 +342,7 @@
mStreamUseCase = streamUseCase;
mTimestampBase = timestampBase;
mMirrorMode = mirrorMode;
+ mMirrorModeForProducers = std::move(mirrorModeForProducers);
mUseReadoutTimestamp = useReadoutTimestamp != 0;
for (auto& surface : surfaceShims) {
ALOGV("%s: OutputConfiguration: %p, name %s", __FUNCTION__,
@@ -327,6 +384,7 @@
mStreamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
mTimestampBase = TIMESTAMP_BASE_DEFAULT;
mMirrorMode = MIRROR_MODE_AUTO;
+ mMirrorModeForProducers.push_back(mMirrorMode);
mUseReadoutTimestamp = false;
mFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
mDataspace = 0;
@@ -344,9 +402,9 @@
mColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
mTimestampBase(TIMESTAMP_BASE_DEFAULT),
- mMirrorMode(MIRROR_MODE_AUTO), mUseReadoutTimestamp(false),
- mFormat(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED), mDataspace(0),
- mUsage(0) { }
+ mMirrorMode(MIRROR_MODE_AUTO), mMirrorModeForProducers(gbps.size(), mMirrorMode),
+ mUseReadoutTimestamp(false), mFormat(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED),
+ mDataspace(0), mUsage(0) { }
status_t OutputConfiguration::writeToParcel(android::Parcel* parcel) const {
@@ -409,6 +467,9 @@
err = parcel->writeInt32(mMirrorMode);
if (err != OK) return err;
+ err = parcel->writeInt32Vector(mMirrorModeForProducers);
+ if (err != OK) return err;
+
err = parcel->writeInt32(mUseReadoutTimestamp ? 1 : 0);
if (err != OK) return err;
@@ -438,6 +499,20 @@
return true;
}
+template <typename T>
+static bool simpleVectorsLessThan(T first, T second) {
+ if (first.size() != second.size()) {
+ return first.size() < second.size();
+ }
+
+ for (size_t i = 0; i < first.size(); i++) {
+ if (first[i] != second[i]) {
+ return first[i] < second[i];
+ }
+ }
+ return false;
+}
+
bool OutputConfiguration::gbpsEqual(const OutputConfiguration& other) const {
const std::vector<sp<IGraphicBufferProducer> >& otherGbps =
other.getGraphicBufferProducers();
@@ -449,20 +524,20 @@
return simpleVectorsEqual(othersensorPixelModesUsed, mSensorPixelModesUsed);
}
+bool OutputConfiguration::mirrorModesEqual(const OutputConfiguration& other) const {
+ const std::vector<int>& otherMirrorModes = other.getMirrorModes();
+ return simpleVectorsEqual(otherMirrorModes, mMirrorModeForProducers);
+
+}
+
bool OutputConfiguration::sensorPixelModesUsedLessThan(const OutputConfiguration& other) const {
const std::vector<int32_t>& spms = other.getSensorPixelModesUsed();
+ return simpleVectorsLessThan(mSensorPixelModesUsed, spms);
+}
- if (mSensorPixelModesUsed.size() != spms.size()) {
- return mSensorPixelModesUsed.size() < spms.size();
- }
-
- for (size_t i = 0; i < spms.size(); i++) {
- if (mSensorPixelModesUsed[i] != spms[i]) {
- return mSensorPixelModesUsed[i] < spms[i];
- }
- }
-
- return false;
+bool OutputConfiguration::mirrorModesLessThan(const OutputConfiguration& other) const {
+ const std::vector<int>& otherMirrorModes = other.getMirrorModes();
+ return simpleVectorsLessThan(mMirrorModeForProducers, otherMirrorModes);
}
bool OutputConfiguration::gbpsLessThan(const OutputConfiguration& other) const {
diff --git a/camera/camera_platform.aconfig b/camera/camera_platform.aconfig
index bf1520b..3b199b3 100644
--- a/camera/camera_platform.aconfig
+++ b/camera/camera_platform.aconfig
@@ -3,6 +3,14 @@
flag {
namespace: "camera_platform"
+ name: "camera_heif_gainmap"
+ is_exported: true
+ description: "Extend HEIC/HEIF still capture with HDR gainmap"
+ bug: "362608343"
+}
+
+flag {
+ namespace: "camera_platform"
name: "camera_hsum_permission"
is_exported: true
description: "Camera access by headless system user"
@@ -35,13 +43,6 @@
flag {
namespace: "camera_platform"
- name: "session_hal_buf_manager"
- description: "Enable or disable HAL buffer manager as requested by the camera HAL"
- bug: "311263114"
-}
-
-flag {
- namespace: "camera_platform"
name: "inject_session_params"
description: "Enable session parameter injection via reconfiguration"
bug: "308984721"
@@ -64,13 +65,6 @@
flag {
namespace: "camera_platform"
- name: "use_ro_board_api_level_for_vndk_version"
- description: "Enable using ro.board.api_level instead of ro.vndk.version to get VNDK version"
- bug: "312315580"
-}
-
-flag {
- namespace: "camera_platform"
name: "camera_extensions_characteristics_get"
is_exported: true
description: "Enable get extension specific camera characteristics API"
@@ -79,13 +73,6 @@
flag {
namespace: "camera_platform"
- name: "delay_lazy_hal_instantiation"
- description: "Only trigger lazy HAL instantiation when the HAL is needed for an operation."
- bug: "319735068"
-}
-
-flag {
- namespace: "camera_platform"
name: "return_buffers_outside_locks"
description: "Enable returning graphics buffers to buffer queues without holding the in-flight mutex"
bug: "315526878"
@@ -127,36 +114,6 @@
flag {
namespace: "camera_platform"
- name: "cache_permission_services"
- description: "Cache IPermissionController and IPermissionChecker in CameraService to reduce query latency."
- bug: "326139956"
- metadata {
- purpose: PURPOSE_BUGFIX
- }
-}
-
-flag {
- namespace: "camera_platform"
- name: "check_session_support_before_session_char"
- description: "Validate that a SessionConfiguration is supported before fetching SessionCharacteristics."
- bug: "327008530"
- metadata {
- purpose: PURPOSE_BUGFIX
- }
-}
-
-flag {
- namespace: "camera_platform"
- name: "calculate_perf_override_during_session_support"
- description: "Dynamically calulate whether perf class override should be set in isSessionConfigurationWithParametersSupported."
- bug: "332975108"
- metadata {
- purpose: PURPOSE_BUGFIX
- }
-}
-
-flag {
- namespace: "camera_platform"
name: "analytics_24q3"
description: "Miscellaneous camera platform metrics for 24Q3"
bug: "332557570"
@@ -164,16 +121,6 @@
flag {
namespace: "camera_platform"
- name: "use_system_api_for_vndk_version"
- description: "ro.board.api_level isn't reliable. Use system api to replace ro.vndk.version"
- bug: "312315580"
- metadata {
- purpose: PURPOSE_BUGFIX
- }
-}
-
-flag {
- namespace: "camera_platform"
name: "multi_res_raw_reprocessing"
description: "Allow multi-resolution raw reprocessing without reprocessing capability"
bug: "336922859"
@@ -225,3 +172,94 @@
description: "Enable stream reconfiguration for unchanged streams"
bug: "341740105"
}
+
+flag {
+ namespace: "camera_platform"
+ name: "data_delivery_permission_checks"
+ description: "Pass the full AttributionSource chain to PermissionChecker for data delivery"
+ bug: "190657833"
+ is_fixed_read_only: true
+}
+
+flag {
+ namespace: "camera_platform"
+ name: "depth_jpeg_extensions"
+ description: "Add Depth Jpeg extension output support"
+ bug: "362788689"
+}
+
+flag {
+ namespace: "camera_platform"
+ name: "mirror_mode_shared_surfaces"
+ is_exported: true
+ description: "Support setting and getting mirror mode for shared surfaces"
+ bug: "298899993"
+}
+
+flag {
+ namespace: "camera_platform"
+ is_exported: true
+ name: "multiresolution_imagereader_usage_public"
+ description: "Make constructor for MultiResolutionImageReader with usage public"
+ bug: "338621560"
+}
+
+flag {
+ namespace: "camera_platform"
+ is_exported: true
+ name: "color_temperature"
+ description: "Add keys to manually set color temperature and color tint"
+ bug: "359409044"
+}
+
+flag {
+ namespace: "camera_platform"
+ name: "night_mode_indicator"
+ is_exported: true
+ description: "Indicates when to activate Night Mode Camera Extension"
+ bug: "335902696"
+}
+
+flag {
+ namespace: "camera_platform"
+ name: "zoom_method"
+ is_exported: true
+ description: "Gives apps explicit control on reflects zoom via ZOOM_RATIO capture result"
+ bug: "298899993"
+}
+
+flag {
+ namespace: "camera_platform"
+ is_exported: true
+ name: "ae_priority"
+ description: "Add AE priority modes"
+ bug: "359944765"
+}
+
+flag {
+ namespace: "camera_platform"
+ name: "feature_combination_baklava"
+ description: "Add new feature combination query version for Baklava"
+ bug: "370778206"
+}
+
+flag {
+ namespace: "camera_platform"
+ name: "camera_multi_client"
+ is_exported: true
+ description: "add new feature to allow multiple clients to access the camera in shared mode"
+ bug: "265196098"
+ metadata {
+ purpose: PURPOSE_FEATURE
+ }
+}
+
+flag {
+ namespace: "camera_platform"
+ name: "query_process_state"
+ description: "In opChanged, query the process state from AM instead of relying on mUidPolicy"
+ bug: "378016494"
+ metadata {
+ purpose: PURPOSE_BUGFIX
+ }
+}
diff --git a/camera/cameraserver/manifest_android.frameworks.cameraservice.service.xml b/camera/cameraserver/manifest_android.frameworks.cameraservice.service.xml
index 5d85909..d8264df 100644
--- a/camera/cameraserver/manifest_android.frameworks.cameraservice.service.xml
+++ b/camera/cameraserver/manifest_android.frameworks.cameraservice.service.xml
@@ -11,7 +11,7 @@
<hal format="aidl">
<name>android.frameworks.cameraservice.service</name>
- <version>2</version>
+ <version>3</version>
<interface>
<name>ICameraService</name>
<instance>default</instance>
diff --git a/camera/include/camera/Camera.h b/camera/include/camera/Camera.h
index 646b139..fa84b4e 100644
--- a/camera/include/camera/Camera.h
+++ b/camera/include/camera/Camera.h
@@ -21,13 +21,18 @@
#include <android/hardware/ICameraService.h>
-#include <gui/IGraphicBufferProducer.h>
-#include <system/camera.h>
+#include <camera/CameraBase.h>
+#include <camera/CameraUtils.h>
#include <camera/ICameraRecordingProxy.h>
#include <camera/android/hardware/ICamera.h>
#include <camera/android/hardware/ICameraClient.h>
-#include <camera/CameraBase.h>
-#include <camera/CameraUtils.h>
+#include <gui/Flags.h>
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+#include <gui/Surface.h>
+#else
+#include <gui/IGraphicBufferProducer.h>
+#endif
+#include <system/camera.h>
namespace android {
@@ -91,8 +96,8 @@
status_t lock();
status_t unlock();
- // pass the buffered IGraphicBufferProducer to the camera service
- status_t setPreviewTarget(const sp<IGraphicBufferProducer>& bufferProducer);
+ // pass the SurfaceType to the camera service
+ status_t setPreviewTarget(const sp<SurfaceType>& target);
// start preview mode, must call setPreviewTarget first
status_t startPreview();
@@ -148,7 +153,7 @@
// Set the video buffer producer for camera to use in VIDEO_BUFFER_MODE_BUFFER_QUEUE
// mode.
- status_t setVideoTarget(const sp<IGraphicBufferProducer>& bufferProducer);
+ status_t setVideoTarget(const sp<SurfaceType>& target);
void setListener(const sp<CameraListener>& listener);
@@ -158,8 +163,7 @@
// disabled by calling it with CAMERA_FRAME_CALLBACK_FLAG_NOOP, and
// Target by calling it with a NULL interface.
void setPreviewCallbackFlags(int preview_callback_flag);
- status_t setPreviewCallbackTarget(
- const sp<IGraphicBufferProducer>& callbackProducer);
+ status_t setPreviewCallbackTarget(const sp<SurfaceType>& target);
sp<ICameraRecordingProxy> getRecordingProxy();
diff --git a/camera/include/camera/CameraUtils.h b/camera/include/camera/CameraUtils.h
index d358407..766cac1 100644
--- a/camera/include/camera/CameraUtils.h
+++ b/camera/include/camera/CameraUtils.h
@@ -46,7 +46,7 @@
* Returns OK on success, or a negative error code.
*/
static status_t getRotationTransform(const CameraMetadata& staticInfo,
- int mirrorMode, /*out*/int32_t* transform);
+ int mirrorMode, bool enableTransformInverseDisplay, /*out*/int32_t* transform);
/**
* Check if the image data is VideoNativeHandleMetadata, that contains a native handle.
diff --git a/camera/include/camera/android/hardware/ICamera.h b/camera/include/camera/android/hardware/ICamera.h
index ec19e5d..eb887fb 100644
--- a/camera/include/camera/android/hardware/ICamera.h
+++ b/camera/include/camera/android/hardware/ICamera.h
@@ -22,6 +22,7 @@
#include <binder/Parcel.h>
#include <binder/IMemory.h>
#include <binder/Status.h>
+#include <gui/Flags.h>
#include <utils/String8.h>
namespace android {
@@ -61,9 +62,8 @@
// allow other processes to use this ICamera interface
virtual status_t unlock() = 0;
- // pass the buffered IGraphicBufferProducer to the camera service
- virtual status_t setPreviewTarget(
- const sp<IGraphicBufferProducer>& bufferProducer) = 0;
+ // pass the SurfaceType to the camera service
+ virtual status_t setPreviewTarget(const sp<SurfaceType>& bufferProducer) = 0;
// set the preview callback flag to affect how the received frames from
// preview are handled. Enabling preview callback flags disables any active
@@ -73,8 +73,7 @@
// of preview callback buffers. Passing a valid interface here disables any
// active preview callbacks set by setPreviewCallbackFlag(). Passing NULL
// disables the use of the callback target.
- virtual status_t setPreviewCallbackTarget(
- const sp<IGraphicBufferProducer>& callbackProducer) = 0;
+ virtual status_t setPreviewCallbackTarget(const sp<SurfaceType>& callbackProducer) = 0;
// start preview mode, must call setPreviewTarget first
virtual status_t startPreview() = 0;
@@ -138,8 +137,7 @@
virtual status_t setVideoBufferMode(int32_t videoBufferMode) = 0;
// Set the video buffer producer for camera to use in VIDEO_BUFFER_MODE_BUFFER_QUEUE mode.
- virtual status_t setVideoTarget(
- const sp<IGraphicBufferProducer>& bufferProducer) = 0;
+ virtual status_t setVideoTarget(const sp<SurfaceType>& bufferProducer) = 0;
// Set the audio restriction mode
virtual status_t setAudioRestriction(int32_t mode) = 0;
diff --git a/camera/include/camera/camera2/OutputConfiguration.h b/camera/include/camera/camera2/OutputConfiguration.h
index 83ce39d..671d065 100644
--- a/camera/include/camera/camera2/OutputConfiguration.h
+++ b/camera/include/camera/camera2/OutputConfiguration.h
@@ -33,6 +33,7 @@
class OutputConfiguration : public android::Parcelable {
public:
+ static const int ROTATION_0;
static const int INVALID_ROTATION;
static const int INVALID_SET_ID;
enum SurfaceType {
@@ -72,6 +73,7 @@
bool isMultiResolution() const;
int64_t getStreamUseCase() const;
int getTimestampBase() const;
+ int getMirrorMode(sp<IGraphicBufferProducer> surface) const;
int getMirrorMode() const;
bool useReadoutTimestamp() const;
int getFormat() const;
@@ -107,6 +109,9 @@
int surfaceSetID = INVALID_SET_ID,
int surfaceType = SURFACE_TYPE_UNKNOWN, int width = 0,
int height = 0, bool isShared = false);
+ OutputConfiguration(int surfaceType, int width, int height, int format, int32_t colorSpace,
+ int mirrorMode, bool useReadoutTimestamp,int timestampBase, int dataspace,
+ int64_t usage, int64_t streamusecase, std::string physicalCamId);
bool operator == (const OutputConfiguration& other) const {
return ( mRotation == other.mRotation &&
@@ -125,6 +130,7 @@
mStreamUseCase == other.mStreamUseCase &&
mTimestampBase == other.mTimestampBase &&
mMirrorMode == other.mMirrorMode &&
+ mirrorModesEqual(other) &&
mUseReadoutTimestamp == other.mUseReadoutTimestamp &&
mFormat == other.mFormat &&
mDataspace == other.mDataspace &&
@@ -180,6 +186,9 @@
if (mMirrorMode != other.mMirrorMode) {
return mMirrorMode < other.mMirrorMode;
}
+ if (!mirrorModesEqual(other)) {
+ return mirrorModesLessThan(other);
+ }
if (mUseReadoutTimestamp != other.mUseReadoutTimestamp) {
return mUseReadoutTimestamp < other.mUseReadoutTimestamp;
}
@@ -204,6 +213,31 @@
bool sensorPixelModesUsedLessThan(const OutputConfiguration& other) const;
bool gbpsLessThan(const OutputConfiguration& other) const;
void addGraphicProducer(sp<IGraphicBufferProducer> gbp) {mGbps.push_back(gbp);}
+ bool mirrorModesEqual(const OutputConfiguration& other) const;
+ bool mirrorModesLessThan(const OutputConfiguration& other) const;
+ const std::vector<int32_t>& getMirrorModes() const {return mMirrorModeForProducers;}
+ bool sharedConfigEqual(const OutputConfiguration& other) const {
+ return (mRotation == other.mRotation &&
+ mSurfaceSetID == other.mSurfaceSetID &&
+ mSurfaceType == other.mSurfaceType &&
+ mWidth == other.mWidth &&
+ mHeight == other.mHeight &&
+ mIsDeferred == other.mIsDeferred &&
+ mIsShared == other.mIsShared &&
+ mPhysicalCameraId == other.mPhysicalCameraId &&
+ mIsMultiResolution == other.mIsMultiResolution &&
+ sensorPixelModesUsedEqual(other) &&
+ mDynamicRangeProfile == other.mDynamicRangeProfile &&
+ mColorSpace == other.mColorSpace &&
+ mStreamUseCase == other.mStreamUseCase &&
+ mTimestampBase == other.mTimestampBase &&
+ mMirrorMode == other.mMirrorMode &&
+ mUseReadoutTimestamp == other.mUseReadoutTimestamp &&
+ mFormat == other.mFormat &&
+ mDataspace == other.mDataspace &&
+ mUsage == other.mUsage);
+ }
+
private:
std::vector<sp<IGraphicBufferProducer>> mGbps;
int mRotation;
@@ -221,6 +255,7 @@
int64_t mStreamUseCase;
int mTimestampBase;
int mMirrorMode;
+ std::vector<int> mMirrorModeForProducers; // 1:1 mapped with mGbps
bool mUseReadoutTimestamp;
int mFormat;
int mDataspace;
diff --git a/camera/ndk/Android.bp b/camera/ndk/Android.bp
index 379c0b5..508808f 100644
--- a/camera/ndk/Android.bp
+++ b/camera/ndk/Android.bp
@@ -79,6 +79,7 @@
shared_libs: [
"android.companion.virtual.virtualdevice_aidl-cpp",
"android.companion.virtualdevice.flags-aconfig-cc",
+ "camera_platform_flags_c_lib",
"framework-permission-aidl-cpp",
"libandroid_runtime",
"libbinder",
@@ -147,8 +148,8 @@
shared_libs: [
"android.frameworks.cameraservice.common-V1-ndk",
- "android.frameworks.cameraservice.device-V2-ndk",
- "android.frameworks.cameraservice.service-V2-ndk",
+ "android.frameworks.cameraservice.device-V3-ndk",
+ "android.frameworks.cameraservice.service-V3-ndk",
"libbinder_ndk",
"libcamera_metadata",
"libcutils",
diff --git a/camera/ndk/NdkCameraCaptureSession.cpp b/camera/ndk/NdkCameraCaptureSession.cpp
index 92de1e4..58370e5 100644
--- a/camera/ndk/NdkCameraCaptureSession.cpp
+++ b/camera/ndk/NdkCameraCaptureSession.cpp
@@ -177,6 +177,34 @@
}
EXPORT
+camera_status_t ACameraCaptureSessionShared_startStreaming(
+ ACameraCaptureSession* /*session*/, ACameraCaptureSession_captureCallbacksV2* /*callbacks*/,
+ int /*numOutputWindows*/, ANativeWindow** /*window*/,
+ int* /*captureSequenceId*/) {
+ ATRACE_CALL();
+ // Todo: need to add implementation
+ return ACAMERA_OK;
+}
+
+EXPORT
+camera_status_t ACameraCaptureSessionShared_logicalCamera_startStreaming(
+ ACameraCaptureSession* /*session*/,
+ ACameraCaptureSession_logicalCamera_captureCallbacksV2* /*callbacks*/,
+ int /*numOutputWindows*/, ANativeWindow** /*windows*/,
+ int* /*captureSequenceId*/) {
+ ATRACE_CALL();
+ // Todo: need to add implementation
+ return ACAMERA_OK;
+}
+
+EXPORT
+camera_status_t ACameraCaptureSessionShared_stopStreaming(ACameraCaptureSession* /*session*/) {
+ ATRACE_CALL();
+ // Todo: need to add implementation
+ return ACAMERA_OK;
+}
+
+EXPORT
camera_status_t ACameraCaptureSession_updateSharedOutput(ACameraCaptureSession* session,
ACaptureSessionOutput* output) {
ATRACE_CALL();
diff --git a/camera/ndk/NdkCameraManager.cpp b/camera/ndk/NdkCameraManager.cpp
index 1b3343e..28cc9af 100644
--- a/camera/ndk/NdkCameraManager.cpp
+++ b/camera/ndk/NdkCameraManager.cpp
@@ -27,6 +27,8 @@
#include "ndk_vendor/impl/ACameraManager.h"
#else
#include "impl/ACameraManager.h"
+#include <com_android_internal_camera_flags.h>
+namespace flags = com::android::internal::camera::flags;
#endif
#include "impl/ACameraMetadata.h"
@@ -159,6 +161,23 @@
}
EXPORT
+camera_status_t ACameraManager_isCameraDeviceSharingSupported(ACameraManager *mgr,
+ const char *cameraId, bool *isSharingSupported) {
+ ATRACE_CALL();
+ #ifndef __ANDROID_VNDK__
+ if (!flags::camera_multi_client()) {
+ return ACAMERA_ERROR_UNSUPPORTED_OPERATION;
+ }
+ #endif
+ if (mgr == nullptr || cameraId == nullptr || isSharingSupported == nullptr) {
+ ALOGE("%s: invalid argument! mgr %p cameraId %p isSharingSupported %p",
+ __FUNCTION__, mgr, cameraId, isSharingSupported);
+ return ACAMERA_ERROR_INVALID_PARAMETER;
+ }
+ return mgr->isCameraDeviceSharingSupported(cameraId, isSharingSupported);
+}
+
+EXPORT
camera_status_t ACameraManager_getCameraCharacteristics(
ACameraManager* mgr, const char* cameraId, ACameraMetadata** chars){
ATRACE_CALL();
@@ -188,7 +207,27 @@
__FUNCTION__, mgr, cameraId, callback, device);
return ACAMERA_ERROR_INVALID_PARAMETER;
}
- return mgr->openCamera(cameraId, callback, device);
+ bool primaryClient;
+ return mgr->openCamera(cameraId, /*sharedMode*/false, callback, device, &primaryClient);
+}
+
+EXPORT
+camera_status_t ACameraManager_openSharedCamera(
+ ACameraManager* mgr, const char* cameraId, ACameraDevice_StateCallbacks* callback,
+ /*out*/ACameraDevice** device, /*out*/bool* primaryClient) {
+ ATRACE_CALL();
+ #ifndef __ANDROID_VNDK__
+ if (!flags::camera_multi_client()) {
+ return ACAMERA_ERROR_UNSUPPORTED_OPERATION;
+ }
+ #endif
+ if (mgr == nullptr || cameraId == nullptr || callback == nullptr || device == nullptr ||
+ primaryClient == nullptr) {
+ ALOGE("%s: invalid argument! mgr %p cameraId %p callback %p device %p primary %p",
+ __FUNCTION__, mgr, cameraId, callback, device, primaryClient);
+ return ACAMERA_ERROR_INVALID_PARAMETER;
+ }
+ return mgr->openCamera(cameraId, /*sharedMode*/true, callback, device, primaryClient);
}
#ifdef __ANDROID_VNDK__
diff --git a/camera/ndk/impl/ACameraDevice.cpp b/camera/ndk/impl/ACameraDevice.cpp
index 1fa71f4..aed740f 100644
--- a/camera/ndk/impl/ACameraDevice.cpp
+++ b/camera/ndk/impl/ACameraDevice.cpp
@@ -26,6 +26,9 @@
#include "ACameraMetadata.h"
#include "ACaptureRequest.h"
#include "ACameraCaptureSession.h"
+#include <com_android_internal_camera_flags.h>
+
+namespace flags = com::android::internal::camera::flags;
ACameraDevice::~ACameraDevice() {
mDevice->stopLooperAndDisconnect();
@@ -57,12 +60,13 @@
const char* id,
ACameraDevice_StateCallbacks* cb,
sp<ACameraMetadata> chars,
- ACameraDevice* wrapper) :
+ ACameraDevice* wrapper, bool sharedMode) :
mCameraId(id),
mAppCallbacks(*cb),
mChars(chars),
mServiceCallback(new ServiceCallback(this)),
mWrapper(wrapper),
+ mSharedMode(sharedMode),
mInError(false),
mError(ACAMERA_OK),
mIdle(true),
@@ -970,6 +974,7 @@
case kWhatCaptureSeqAbort:
case kWhatCaptureBufferLost:
case kWhatPreparedCb:
+ case kWhatClientSharedAccessPriorityChanged:
ALOGV("%s: Received msg %d", __FUNCTION__, msg->what());
break;
case kWhatCleanUpSessions:
@@ -1007,6 +1012,32 @@
(*onDisconnected)(context, dev);
break;
}
+
+ case kWhatClientSharedAccessPriorityChanged:
+ {
+ if (!flags::camera_multi_client()) {
+ break;
+ }
+ ACameraDevice* dev;
+ found = msg->findPointer(kDeviceKey, (void**) &dev);
+ if (!found || dev == nullptr) {
+ ALOGE("%s: Cannot find device pointer!", __FUNCTION__);
+ return;
+ }
+ ACameraDevice_ClientSharedAccessPriorityChangedCallback
+ onClientSharedAccessPriorityChanged;
+ found = msg->findPointer(kCallbackFpKey, (void**) &onClientSharedAccessPriorityChanged);
+ if (!found) {
+ ALOGE("%s: Cannot find onClientSharedAccessPriorityChanged!", __FUNCTION__);
+ return;
+ }
+ if (onClientSharedAccessPriorityChanged == nullptr) {
+ return;
+ }
+ (*onClientSharedAccessPriorityChanged)(context, dev, dev->isPrimaryClient());
+ break;
+ }
+
case kWhatOnError:
{
ACameraDevice* dev;
@@ -1624,6 +1655,31 @@
}
binder::Status
+CameraDevice::ServiceCallback::onClientSharedAccessPriorityChanged(bool primaryClient) {
+ ALOGV("onClientSharedAccessPriorityChanged received. primaryClient = %d", primaryClient);
+ binder::Status ret = binder::Status::ok();
+ if (!flags::camera_multi_client()) {
+ return ret;
+ }
+ sp<CameraDevice> dev = mDevice.promote();
+ if (dev == nullptr) {
+ return ret; // device has been closed
+ }
+ Mutex::Autolock _l(dev->mDeviceLock);
+ if (dev->isClosed() || dev->mRemote == nullptr) {
+ return ret;
+ }
+ dev->setPrimaryClient(primaryClient);
+ sp<AMessage> msg = new AMessage(kWhatClientSharedAccessPriorityChanged, dev->mHandler);
+ msg->setPointer(kContextKey, dev->mAppCallbacks.context);
+ msg->setPointer(kDeviceKey, (void*) dev->getWrapper());
+ msg->setPointer(kCallbackFpKey, (void*) dev->mAppCallbacks.onClientSharedAccessPriorityChanged);
+ msg->post();
+
+ return binder::Status::ok();
+}
+
+binder::Status
CameraDevice::ServiceCallback::onDeviceIdle() {
ALOGV("Camera is now idle");
binder::Status ret = binder::Status::ok();
diff --git a/camera/ndk/impl/ACameraDevice.h b/camera/ndk/impl/ACameraDevice.h
index 2b9f327..d3aed4b 100644
--- a/camera/ndk/impl/ACameraDevice.h
+++ b/camera/ndk/impl/ACameraDevice.h
@@ -63,7 +63,7 @@
public:
CameraDevice(const char* id, ACameraDevice_StateCallbacks* cb,
sp<ACameraMetadata> chars,
- ACameraDevice* wrapper);
+ ACameraDevice* wrapper, bool sharedMode);
~CameraDevice();
inline const char* getId() const { return mCameraId.c_str(); }
@@ -98,6 +98,7 @@
binder::Status onRequestQueueEmpty() override;
binder::Status onRepeatingRequestError(int64_t lastFrameNumber,
int32_t stoppedSequenceId) override;
+ binder::Status onClientSharedAccessPriorityChanged(bool isPrimaryClient) override;
private:
const wp<CameraDevice> mDevice;
};
@@ -113,6 +114,9 @@
// Stop the looper thread and unregister the handler
void stopLooperAndDisconnect();
+ void setPrimaryClient(bool isPrimary) {mIsPrimaryClient = isPrimary;};
+ bool isPrimaryClient() {return mIsPrimaryClient;};
+
private:
friend ACameraCaptureSession;
camera_status_t checkCameraClosedOrErrorLocked() const;
@@ -186,6 +190,8 @@
const sp<ACameraMetadata> mChars; // Camera characteristics
const sp<ServiceCallback> mServiceCallback;
ACameraDevice* mWrapper;
+ bool mSharedMode;
+ bool mIsPrimaryClient;
// stream id -> pair of (ANW* from application, OutputConfiguration used for camera service)
std::map<int, std::pair<ANativeWindow*, OutputConfiguration>> mConfiguredOutputs;
@@ -227,7 +233,8 @@
kWhatCaptureBufferLost, // onCaptureBufferLost
kWhatPreparedCb, // onWindowPrepared
// Internal cleanup
- kWhatCleanUpSessions // Cleanup cached sp<ACameraCaptureSession>
+ kWhatCleanUpSessions, // Cleanup cached sp<ACameraCaptureSession>
+ kWhatClientSharedAccessPriorityChanged
};
static const char* kContextKey;
static const char* kDeviceKey;
@@ -403,8 +410,8 @@
*/
struct ACameraDevice {
ACameraDevice(const char* id, ACameraDevice_StateCallbacks* cb,
- sp<ACameraMetadata> chars) :
- mDevice(new android::acam::CameraDevice(id, cb, chars, this)) {}
+ sp<ACameraMetadata> chars, bool sharedMode) :
+ mDevice(new android::acam::CameraDevice(id, cb, chars, this, sharedMode)) {}
~ACameraDevice();
@@ -445,6 +452,14 @@
mDevice->setRemoteDevice(remote);
}
+ inline void setPrimaryClient(bool isPrimary) {
+ mDevice->setPrimaryClient(isPrimary);
+ }
+
+ inline bool isPrimaryClient() {
+ return mDevice->isPrimaryClient();
+ }
+
private:
android::sp<android::acam::CameraDevice> mDevice;
};
diff --git a/camera/ndk/impl/ACameraManager.cpp b/camera/ndk/impl/ACameraManager.cpp
index 6d29ef5..f9c1a8a 100644
--- a/camera/ndk/impl/ACameraManager.cpp
+++ b/camera/ndk/impl/ACameraManager.cpp
@@ -28,9 +28,11 @@
#include <memory>
#include "ACameraDevice.h"
#include "ACameraMetadata.h"
+#include <com_android_internal_camera_flags.h>
using namespace android::acam;
namespace vd_flags = android::companion::virtualdevice::flags;
+namespace flags = com::android::internal::camera::flags;
namespace android {
namespace acam {
@@ -170,97 +172,78 @@
}
sp<hardware::ICameraService> CameraManagerGlobal::getCameraServiceLocked() {
- if (mCameraService.get() == nullptr) {
- if (CameraUtils::isCameraServiceDisabled()) {
- return mCameraService;
- }
+ if (mCameraService.get() != nullptr) {
+ return mCameraService;
+ }
+ if (CameraUtils::isCameraServiceDisabled()) {
+ return mCameraService;
+ }
- sp<IServiceManager> sm = defaultServiceManager();
- sp<IBinder> binder;
- binder = sm->checkService(String16(kCameraServiceName));
- if (binder == nullptr) {
- ALOGE("%s: Could not get CameraService instance.", __FUNCTION__);
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<IBinder> binder;
+ binder = sm->checkService(String16(kCameraServiceName));
+ if (binder == nullptr) {
+ ALOGE("%s: Could not get CameraService instance.", __FUNCTION__);
+ return nullptr;
+ }
+ sp<hardware::ICameraService> cameraService = interface_cast<hardware::ICameraService>(binder);
+ if (mDeathNotifier == nullptr) {
+ mDeathNotifier = new DeathNotifier(this);
+ binder->linkToDeath(mDeathNotifier);
+ }
+
+ // Setup looper thread to perform availability callbacks
+ if (mCbLooper == nullptr) {
+ mCbLooper = new ALooper;
+ mCbLooper->setName("C2N-mgr-looper");
+ status_t err = mCbLooper->start(
+ /*runOnCallingThread*/false,
+ /*canCallJava*/ true,
+ PRIORITY_DEFAULT);
+ if (err != OK) {
+ ALOGE("%s: Unable to start camera service listener looper: %s (%d)",
+ __FUNCTION__, strerror(-err), err);
+ mCbLooper.clear();
return nullptr;
}
- if (mDeathNotifier == nullptr) {
- mDeathNotifier = new DeathNotifier(this);
+ if (mHandler == nullptr) {
+ mHandler = new CallbackHandler(this);
}
- binder->linkToDeath(mDeathNotifier);
- mCameraService = interface_cast<hardware::ICameraService>(binder);
+ mCbLooper->registerHandler(mHandler);
+ }
- // Setup looper thread to perfrom availiability callbacks
- if (mCbLooper == nullptr) {
- mCbLooper = new ALooper;
- mCbLooper->setName("C2N-mgr-looper");
- status_t err = mCbLooper->start(
- /*runOnCallingThread*/false,
- /*canCallJava*/ true,
- PRIORITY_DEFAULT);
- if (err != OK) {
- ALOGE("%s: Unable to start camera service listener looper: %s (%d)",
- __FUNCTION__, strerror(-err), err);
- mCbLooper.clear();
- return nullptr;
- }
- if (mHandler == nullptr) {
- mHandler = new CallbackHandler(this);
- }
- mCbLooper->registerHandler(mHandler);
+ // register ICameraServiceListener
+ std::vector<hardware::CameraStatus> cameraStatuses{};
+ if (mCameraServiceListener == nullptr) {
+ mCameraServiceListener = new CameraServiceListener(this);
+ cameraService->addListener(mCameraServiceListener, &cameraStatuses);
+ }
+
+ for (auto& c : cameraStatuses) {
+ onStatusChangedLocked(c.status, c.deviceId, c.cameraId);
+
+ for (auto& unavailablePhysicalId : c.unavailablePhysicalIds) {
+ onStatusChangedLocked(hardware::ICameraServiceListener::STATUS_NOT_PRESENT,
+ c.deviceId, c.cameraId, unavailablePhysicalId);
}
+ }
+ // setup vendor tags
+ if (!setupVendorTags(cameraService)) {
+ ALOGE("%s: Vendor tag descriptor cache couldn't be set up", __FUNCTION__);
+ return nullptr;
+ }
- // register ICameraServiceListener
- if (mCameraServiceListener == nullptr) {
- mCameraServiceListener = new CameraServiceListener(this);
- }
- std::vector<hardware::CameraStatus> cameraStatuses{};
- mCameraService->addListener(mCameraServiceListener, &cameraStatuses);
- for (auto& c : cameraStatuses) {
- onStatusChangedLocked(c.status, c.deviceId, c.cameraId);
+ mCameraService = cameraService;
+ ALOGE_IF(mCameraService == nullptr, "no CameraService!?");
+ return mCameraService;
+}
- for (auto& unavailablePhysicalId : c.unavailablePhysicalIds) {
- onStatusChangedLocked(hardware::ICameraServiceListener::STATUS_NOT_PRESENT,
- c.deviceId, c.cameraId, unavailablePhysicalId);
- }
- }
-
- // setup vendor tags
- sp<VendorTagDescriptor> desc = new VendorTagDescriptor();
- binder::Status ret = mCameraService->getCameraVendorTagDescriptor(/*out*/desc.get());
-
- if (ret.isOk()) {
- if (0 < desc->getTagCount()) {
- status_t err = VendorTagDescriptor::setAsGlobalVendorTagDescriptor(desc);
- if (err != OK) {
- ALOGE("%s: Failed to set vendor tag descriptors, received error %s (%d)",
- __FUNCTION__, strerror(-err), err);
- }
- } else {
- sp<VendorTagDescriptorCache> cache =
- new VendorTagDescriptorCache();
- binder::Status res =
- mCameraService->getCameraVendorTagCache(
- /*out*/cache.get());
- if (res.serviceSpecificErrorCode() ==
- hardware::ICameraService::ERROR_DISCONNECTED) {
- // No camera module available, not an error on devices with no cameras
- VendorTagDescriptorCache::clearGlobalVendorTagCache();
- } else if (res.isOk()) {
- status_t err =
- VendorTagDescriptorCache::setAsGlobalVendorTagCache(
- cache);
- if (err != OK) {
- ALOGE("%s: Failed to set vendor tag cache,"
- "received error %s (%d)", __FUNCTION__,
- strerror(-err), err);
- }
- } else {
- VendorTagDescriptorCache::clearGlobalVendorTagCache();
- ALOGE("%s: Failed to setup vendor tag cache: %s",
- __FUNCTION__, res.toString8().c_str());
- }
- }
- } else if (ret.serviceSpecificErrorCode() ==
- hardware::ICameraService::ERROR_DEPRECATED_HAL) {
+bool CameraManagerGlobal::setupVendorTags(sp<hardware::ICameraService> &cameraService) {
+ sp<VendorTagDescriptor> desc = new VendorTagDescriptor();
+ binder::Status ret = cameraService->getCameraVendorTagDescriptor(/*out*/desc.get());
+ if (!ret.isOk()) {
+ if (ret.serviceSpecificErrorCode() ==
+ hardware::ICameraService::ERROR_DEPRECATED_HAL) {
ALOGW("%s: Camera HAL too old; does not support vendor tags",
__FUNCTION__);
VendorTagDescriptor::clearGlobalVendorTagDescriptor();
@@ -268,9 +251,45 @@
ALOGE("%s: Failed to get vendor tag descriptors: %s",
__FUNCTION__, ret.toString8().c_str());
}
+ return false;
}
- ALOGE_IF(mCameraService == nullptr, "no CameraService!?");
- return mCameraService;
+
+ if (0 < desc->getTagCount()) {
+ status_t err = VendorTagDescriptor::setAsGlobalVendorTagDescriptor(desc);
+ if (err != OK) {
+ ALOGE("%s: Failed to set vendor tag descriptors, received error %s (%d)",
+ __FUNCTION__, strerror(-err), err);
+ return false;
+ }
+ } else {
+ sp<VendorTagDescriptorCache> cache =
+ new VendorTagDescriptorCache();
+ binder::Status res =
+ cameraService->getCameraVendorTagCache(
+ /*out*/cache.get());
+ if (res.serviceSpecificErrorCode() ==
+ hardware::ICameraService::ERROR_DISCONNECTED) {
+ // No camera module available, not an error on devices with no cameras
+ VendorTagDescriptorCache::clearGlobalVendorTagCache();
+ } else if (res.isOk()) {
+ status_t err =
+ VendorTagDescriptorCache::setAsGlobalVendorTagCache(
+ cache);
+ if (err != OK) {
+ ALOGE("%s: Failed to set vendor tag cache,"
+ "received error %s (%d)", __FUNCTION__,
+ strerror(-err), err);
+ return false;
+ }
+ } else {
+ VendorTagDescriptorCache::clearGlobalVendorTagCache();
+ ALOGE("%s: Failed to setup vendor tag cache: %s",
+ __FUNCTION__, res.toString8().c_str());
+ return false;
+ }
+ }
+
+ return true;
}
void CameraManagerGlobal::DeathNotifier::binderDied(const wp<IBinder>&)
@@ -290,6 +309,8 @@
key.cameraId);
}
cm->mCameraService.clear();
+ cm->mCameraServiceListener.clear();
+ cm->mDeathNotifier.clear();
// TODO: consider adding re-connect call here?
}
}
@@ -398,6 +419,9 @@
bool CameraManagerGlobal::supportsCamera2ApiLocked(const std::string &cameraId) {
bool camera2Support = false;
auto cs = getCameraServiceLocked();
+ if (cs == nullptr) {
+ return false;
+ }
binder::Status serviceRet =
cs->supportsCameraApi(cameraId,
hardware::ICameraService::API_VERSION_2, &camera2Support);
@@ -838,10 +862,36 @@
}
camera_status_t
-ACameraManager::openCamera(
+ACameraManager::isCameraDeviceSharingSupported(
const char* cameraId,
+ /*out*/bool* isSharingSupported) {
+ if (!flags::camera_multi_client()) {
+ return ACAMERA_ERROR_UNSUPPORTED_OPERATION;
+ }
+ sp<ACameraMetadata> spChars;
+ camera_status_t ret = getCameraCharacteristics(cameraId, &spChars);
+ if (ret != ACAMERA_OK) {
+ ALOGE("%s: cannot get camera characteristics for camera %s. err %d",
+ __FUNCTION__, cameraId, ret);
+ return ret;
+ }
+
+ ACameraMetadata* chars = spChars.get();
+ ACameraMetadata_const_entry entry;
+ ret = ACameraMetadata_getConstEntry(chars, ANDROID_SHARED_SESSION_OUTPUT_CONFIGURATIONS,
+ &entry);
+ if (ret != ACAMERA_OK) {
+ return ret;
+ }
+ *isSharingSupported = (entry.count > 0) ? true : false;
+ return ACAMERA_OK;
+}
+
+camera_status_t
+ACameraManager::openCamera(
+ const char* cameraId, bool sharedMode,
ACameraDevice_StateCallbacks* callback,
- /*out*/ACameraDevice** outDevice) {
+ /*out*/ACameraDevice** outDevice, /*out*/bool* primaryClient) {
sp<ACameraMetadata> chars;
camera_status_t ret = getCameraCharacteristics(cameraId, &chars);
Mutex::Autolock _l(mLock);
@@ -851,7 +901,7 @@
return ACAMERA_ERROR_INVALID_PARAMETER;
}
- ACameraDevice* device = new ACameraDevice(cameraId, callback, chars);
+ ACameraDevice* device = new ACameraDevice(cameraId, callback, chars, sharedMode);
sp<hardware::ICameraService> cs = mGlobalManager->getCameraService();
if (cs == nullptr) {
@@ -870,13 +920,14 @@
clientAttribution.deviceId = mDeviceContext.deviceId;
clientAttribution.packageName = "";
clientAttribution.attributionTag = std::nullopt;
+ clientAttribution.token = sp<BBinder>::make();
// No way to get package name from native.
// Send a zero length package name and let camera service figure it out from UID
binder::Status serviceRet = cs->connectDevice(
callbacks, cameraId, /*oomScoreOffset*/0,
targetSdkVersion, /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
- clientAttribution, static_cast<int32_t>(mDeviceContext.policy),
+ clientAttribution, static_cast<int32_t>(mDeviceContext.policy), sharedMode,
/*out*/&deviceRemote);
if (!serviceRet.isOk()) {
@@ -920,6 +971,14 @@
return ACAMERA_ERROR_CAMERA_DISCONNECTED;
}
device->setRemoteDevice(deviceRemote);
+ if (flags::camera_multi_client() && sharedMode) {
+ binder::Status remoteRet = deviceRemote->isPrimaryClient(primaryClient);
+ if (!remoteRet.isOk()) {
+ delete device;
+ return ACAMERA_ERROR_UNKNOWN;
+ }
+ device->setPrimaryClient(*primaryClient);
+ }
*outDevice = device;
return ACAMERA_OK;
}
diff --git a/camera/ndk/impl/ACameraManager.h b/camera/ndk/impl/ACameraManager.h
index f4124ef..fffe037 100644
--- a/camera/ndk/impl/ACameraManager.h
+++ b/camera/ndk/impl/ACameraManager.h
@@ -105,6 +105,8 @@
template <class T>
void registerAvailCallback(const DeviceContext& deviceContext, const T* callback);
+ bool setupVendorTags(sp<hardware::ICameraService> &cameraService);
+
class DeathNotifier : public IBinder::DeathRecipient {
public:
explicit DeathNotifier(CameraManagerGlobal* cm) : mCameraManager(cm) {}
@@ -136,6 +138,10 @@
virtual binder::Status onCameraOpened(const std::string&, const std::string&, int32_t) {
return binder::Status::ok();
}
+ virtual binder::Status onCameraOpenedInSharedMode(const std::string&, const std::string&,
+ int32_t, bool) {
+ return binder::Status::ok();
+ }
virtual binder::Status onCameraClosed(const std::string&, int32_t) {
return binder::Status::ok();
}
@@ -325,16 +331,17 @@
camera_status_t getCameraCharacteristics(
const char* cameraId, android::sp<ACameraMetadata>* characteristics);
- camera_status_t openCamera(const char* cameraId,
+ camera_status_t openCamera(const char* cameraId, bool sharedMode,
ACameraDevice_StateCallbacks* callback,
- /*out*/ACameraDevice** device);
+ /*out*/ACameraDevice** device, /*out*/bool* primaryClient);
void registerAvailabilityCallback(const ACameraManager_AvailabilityCallbacks* callback);
void unregisterAvailabilityCallback(const ACameraManager_AvailabilityCallbacks* callback);
void registerExtendedAvailabilityCallback(
const ACameraManager_ExtendedAvailabilityCallbacks* callback);
void unregisterExtendedAvailabilityCallback(
const ACameraManager_ExtendedAvailabilityCallbacks* callback);
-
+ camera_status_t isCameraDeviceSharingSupported(
+ const char* cameraId, bool* isSharingSupported);
private:
enum {
kCameraIdListNotInit = -1
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index 69b30f7..32e2f3d 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -536,6 +536,8 @@
case ACAMERA_COLOR_CORRECTION_TRANSFORM:
case ACAMERA_COLOR_CORRECTION_GAINS:
case ACAMERA_COLOR_CORRECTION_ABERRATION_MODE:
+ case ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE:
+ case ACAMERA_COLOR_CORRECTION_COLOR_TINT:
case ACAMERA_CONTROL_AE_ANTIBANDING_MODE:
case ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION:
case ACAMERA_CONTROL_AE_LOCK:
@@ -560,6 +562,8 @@
case ACAMERA_CONTROL_ZOOM_RATIO:
case ACAMERA_CONTROL_SETTINGS_OVERRIDE:
case ACAMERA_CONTROL_AUTOFRAMING:
+ case ACAMERA_CONTROL_ZOOM_METHOD:
+ case ACAMERA_CONTROL_AE_PRIORITY_MODE:
case ACAMERA_EDGE_MODE:
case ACAMERA_FLASH_MODE:
case ACAMERA_FLASH_STRENGTH_LEVEL:
diff --git a/camera/ndk/include/camera/NdkCameraCaptureSession.h b/camera/ndk/include/camera/NdkCameraCaptureSession.h
index 1400121..e73222b 100644
--- a/camera/ndk/include/camera/NdkCameraCaptureSession.h
+++ b/camera/ndk/include/camera/NdkCameraCaptureSession.h
@@ -1099,6 +1099,92 @@
camera_status_t ACameraCaptureSession_prepareWindow(
ACameraCaptureSession* session,
ANativeWindow *window) __INTRODUCED_IN(34);
+
+/**
+ * Request continuous streaming of a sequence of images for the shared capture session
+ * when more than one clients can open the same camera in shared mode by calling
+ * {@link ACameraManager_openSharedCamera}. In shared session, only primary clients can create
+ * a capture request and change capture parameters. Secondary clients can only request streaming of
+ * images by calling this api {@link ACameraCaptureSessionShared_startStreaming}. Calling this api
+ * for normal sessions when {@link ACameraManager_openCamera} is used to open the camera will throw
+ * {@link ACAMERA_ERROR_INVALID_OPERATION}.
+ *
+ * <p>With this method, the camera device will continually capture images, cycling through the
+ * settings in the list of {@link ACaptureRequest} specified by the primary client. If primary
+ * client does not have ongoing repeating request, camera service will use a capture request with
+ * default capture parameters for preview template.</p>
+ *
+ * <p>To stop the continuous streaming, call {@link ACameraCaptureSessionShared_stopStreaming}.</p>
+ *
+ * <p>Calling this method will replace an existing continuous streaming request.</p>
+ *
+ * @param sharedSession the shared capture session when camera is opened in
+ * shared mode.
+ * @param callbacks the {@link ACameraCaptureSession_captureCallbacks} to be associated with this
+ * capture sequence. No capture callback will be fired if callbacks is set to NULL.
+ * @param numOutputWindows number of native windows to be used for streaming. Must be at least 1.
+ * @param windows an array of {@link ANativeWindow} to be used for streaming. Length must be at
+ * least numOutputWindows.
+ * @param captureSequenceId the capture sequence ID associated with this capture method invocation
+ * will be stored here if this argument is not NULL and the method call succeeds.
+ * When this argument is set to NULL, the capture sequence ID will not be returned.
+ *
+ * @return <ul>
+ * <li>{@link ACAMERA_OK} if the method succeeds. captureSequenceId will be filled
+ * if it is not NULL.</li>
+ * <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if session or requests is NULL, or
+ * if numRequests < 1</li>
+ * <li>{@link ACAMERA_ERROR_SESSION_CLOSED} if the capture session has been closed</li>
+ * <li>{@link ACAMERA_ERROR_CAMERA_DISCONNECTED} if the camera device is closed</li>
+ * <li>{@link ACAMERA_ERROR_CAMERA_DEVICE} if the camera device encounters fatal error</li>
+ * <li>{@link ACAMERA_ERROR_CAMERA_SERVICE} if the camera service encounters fatal error
+ * </li>
+ * <li>{@link ACAMERA_ERROR_INVALID_OPERATION} if the session passed is not a shared
+ * session</li>
+ * <li>{@link ACAMERA_ERROR_UNKNOWN} if the method fails for some other reasons</li>
+ * </ul>
+ */
+camera_status_t ACameraCaptureSessionShared_startStreaming(
+ ACameraCaptureSession* sharedSession, ACameraCaptureSession_captureCallbacksV2 *callbacks,
+ int numOutputWindows, ANativeWindow **window,
+ int *captureSequenceId) __INTRODUCED_IN(36);
+
+/**
+ * This has the same functionality as ACameraCaptureSessionShared_startStreaming, with added
+ * support for logical multi-camera where the capture callbacks supports result metadata for
+ * physical cameras.
+ */
+camera_status_t ACameraCaptureSessionShared_logicalCamera_startStreaming(
+ ACameraCaptureSession* sharedSession,
+ ACameraCaptureSession_logicalCamera_captureCallbacksV2 *callbacks,
+ int numOutputWindows, ANativeWindow **windows,
+ int *captureSequenceId) __INTRODUCED_IN(36);
+
+/**
+ * Cancel any ongoing streaming started by {@link ACameraCaptureSessionShared_startStreaming}.
+ * Calling this api does not effect any streaming requests submitted by other clients who have
+ * opened the camera in shared mode. Calling this api for normal sessions when
+ * {@link ACameraManager_openCamera} is used to open the camera will throw
+ * {@link ACAMERA_ERROR_INVALID_OPERATION}.
+ *
+ * @param sharedSession the capture session of interest
+ *
+ * @return <ul>
+ * <li>{@link ACAMERA_OK} if the method succeeds.</li>
+ * <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if session is NULL.</li>
+ * <li>{@link ACAMERA_ERROR_SESSION_CLOSED} if the capture session has been closed</li>
+ * <li>{@link ACAMERA_ERROR_CAMERA_DISCONNECTED} if the camera device is closed</li>
+ * <li>{@link ACAMERA_ERROR_CAMERA_DEVICE} if the camera device encounters fatal error</li>
+ * <li>{@link ACAMERA_ERROR_CAMERA_SERVICE} if the camera service encounters fatal error
+ * </li>
+ * <li>{@link ACAMERA_ERROR_INVALID_OPERATION} if the session passed is not a shared
+ * session</li>
+ * <li>{@link ACAMERA_ERROR_UNKNOWN} if the method fails for some other reasons</li>
+ * </ul>
+ */
+camera_status_t ACameraCaptureSessionShared_stopStreaming(
+ ACameraCaptureSession *sharedSession
+) __INTRODUCED_IN(36);
__END_DECLS
#endif /* _NDK_CAMERA_CAPTURE_SESSION_H */
diff --git a/camera/ndk/include/camera/NdkCameraDevice.h b/camera/ndk/include/camera/NdkCameraDevice.h
index fbd0ee1..8c7eb1f 100644
--- a/camera/ndk/include/camera/NdkCameraDevice.h
+++ b/camera/ndk/include/camera/NdkCameraDevice.h
@@ -125,6 +125,18 @@
typedef void (*ACameraDevice_ErrorStateCallback)(void* context, ACameraDevice* device, int error);
/**
+ * Client access priorities changed callbacks to be used in {@link ACameraDevice_StateCallbacks}
+ * when camera is opened in shared mode.
+ *
+ * @param context The optional context in {@link ACameraDevice_StateCallbacks} will be passed to
+ * this callback.
+ * @param device The {@link ACameraDevice} whose access priorities has been changed.
+ * @param isPrimaryClient whether the client is primary client.
+ */
+typedef void (*ACameraDevice_ClientSharedAccessPriorityChangedCallback)(void* context,
+ ACameraDevice* device, bool isPrimaryClient);
+
+/**
* Applications' callbacks for camera device state changes, register with
* {@link ACameraManager_openCamera}.
*/
@@ -163,6 +175,17 @@
*
*/
ACameraDevice_ErrorStateCallback onError;
+
+ /**
+ * Notify registered clients about client shared access priority changes when the camera device
+ * has been opened in shared mode.
+ *
+ * If the client priority changes from secondary to primary, then it can now
+ * create capture request and change the capture request parameters. If client priority
+ * changes from primary to secondary, that implies that another higher priority client is also
+ * accessing the camera in shared mode and is now the primary client.
+ */
+ ACameraDevice_ClientSharedAccessPriorityChangedCallback onClientSharedAccessPriorityChanged;
} ACameraDevice_StateCallbacks;
/**
@@ -671,7 +694,9 @@
* <li>{@link ACAMERA_OK} if the method call succeeds. The created capture session will be
* filled in session argument.</li>
* <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if any of device, outputs, callbacks or
- * session is NULL.</li>
+ * session is NULL or if the outputs does not match the predefined
+ * shared session configuration when camera is opened in shared mode.
+ * </li>
* <li>{@link ACAMERA_ERROR_CAMERA_DISCONNECTED} if the camera device is closed.</li>
* <li>{@link ACAMERA_ERROR_CAMERA_DEVICE} if the camera device encounters fatal error.</li>
* <li>{@link ACAMERA_ERROR_CAMERA_SERVICE} if the camera service encounters fatal error.</li>
diff --git a/camera/ndk/include/camera/NdkCameraManager.h b/camera/ndk/include/camera/NdkCameraManager.h
index b4f3bf1..a9b0174 100644
--- a/camera/ndk/include/camera/NdkCameraManager.h
+++ b/camera/ndk/include/camera/NdkCameraManager.h
@@ -293,6 +293,46 @@
/*out*/ACameraDevice** device) __INTRODUCED_IN(24);
/**
+ * Open a shared connection to a camera with the given ID. The opened camera device will be
+ * returned in the `device` parameter. The behavior of this method matches that of
+ * {@link ACameraManager_openCamera(ACameraManager*, const char*, ACameraDevice_StateCallbacks*,
+ * ACameraDevice**)} except that it opens the camera in shared mode so that more
+ * than one client can access the camera at the same time.
+ *
+ * <p>Processes need to have android.permission.SYSTEM_CAMERA in addition to
+ * android.permission.CAMERA in order to connect to this camera device in shared
+ * mode.</p>
+ *
+ * @param manager the {@link ACameraManager} of interest.
+ * @param cameraId the ID string of the camera device to be opened.
+ * @param callback the {@link ACameraDevice_StateCallbacks} associated with the opened camera
+ * device.
+ * @param device the opened {@link ACameraDevice} will be filled here if the method call succeeds.
+ * @param primaryClient will return as true if the client is primaryClient.
+ *
+ * @return <ul>
+ * <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ * <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if manager, cameraId, callback, or device
+ * is NULL, or cameraId does not match any camera devices connected.</li>
+ * <li>{@link ACAMERA_ERROR_CAMERA_DISCONNECTED} if connection to camera service fails.</li>
+ * <li>{@link ACAMERA_ERROR_NOT_ENOUGH_MEMORY} if allocating memory fails.</li>
+ * <li>{@link ACAMERA_ERROR_CAMERA_IN_USE} if camera device is being used by a higher
+ * priority camera API client.</li>
+ * <li>{@link ACAMERA_ERROR_MAX_CAMERA_IN_USE} if the system-wide limit for number of open
+ * cameras or camera resources has been reached, and more camera devices cannot be
+ * opened until previous instances are closed.</li>
+ * <li>{@link ACAMERA_ERROR_CAMERA_DISABLED} if the camera is disabled due to a device
+ * policy, and cannot be opened.</li>
+ * <li>{@link ACAMERA_ERROR_PERMISSION_DENIED} if the application does not have permission
+ * to open camera.</li>
+ * <li>{@link ACAMERA_ERROR_UNKNOWN} if the method fails for some other reasons.</li></ul>
+ */
+camera_status_t ACameraManager_openSharedCamera(
+ ACameraManager* manager, const char* cameraId,
+ ACameraDevice_StateCallbacks* callback,
+ /*out*/ACameraDevice** device,/*out*/bool* isPrimaryClient) __INTRODUCED_IN(36);
+
+/**
* Definition of camera access permission change callback.
*
* <p>Notification that camera access priorities have changed and the camera may
@@ -397,6 +437,27 @@
ACameraManager* manager,
const ACameraManager_ExtendedAvailabilityCallbacks* callback) __INTRODUCED_IN(29);
+
+/**
+ * Checks if a camera can be opened in shared mode by multiple clients.
+ *
+ * @param manager the {@link ACameraManager} of interest.
+ * @param cameraId the ID string of the camera device of interest.
+ * @param isSharingSupported output will be filled here if the method succeeds.
+ * This will be true if camera can be opened in shared mode, false
+ * otherwise.
+ *
+ * @return <ul>
+ * <li>{@link ACAMERA_OK} if the method call succeeds.</li>
+ * <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if any parameter is not
+ * valid.</li>
+ * </ul>
+ */
+camera_status_t ACameraManager_isCameraDeviceSharingSupported(
+ ACameraManager *manager,
+ const char *cameraId,
+ bool *isSharingSupported) __INTRODUCED_IN(36);
+
#ifdef __ANDROID_VNDK__
/**
* Retrieve the tag value, given the tag name and camera id.
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 1817490..fc6b932 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -91,6 +91,7 @@
ACAMERA_AUTOMOTIVE_LENS,
ACAMERA_EXTENSION,
ACAMERA_JPEGR,
+ ACAMERA_SHARED_SESSION,
ACAMERA_SECTION_COUNT,
ACAMERA_VENDOR = 0x8000
@@ -138,6 +139,7 @@
ACAMERA_AUTOMOTIVE_LENS_START = ACAMERA_AUTOMOTIVE_LENS << 16,
ACAMERA_EXTENSION_START = ACAMERA_EXTENSION << 16,
ACAMERA_JPEGR_START = ACAMERA_JPEGR << 16,
+ ACAMERA_SHARED_SESSION_START = ACAMERA_SHARED_SESSION << 16,
ACAMERA_VENDOR_START = ACAMERA_VENDOR << 16
} acamera_metadata_section_start_t;
@@ -307,6 +309,100 @@
*/
ACAMERA_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES = // byte[n]
ACAMERA_COLOR_CORRECTION_START + 4,
+ /**
+ * <p>Specifies the color temperature for CCT mode in Kelvin
+ * to adjust the white balance of the image.</p>
+ *
+ * <p>Type: int32</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul></p>
+ *
+ * <p>Sets the color temperature in Kelvin units for when
+ * ACAMERA_COLOR_CORRECTION_MODE is CCT to adjust the
+ * white balance of the image.</p>
+ * <p>If CCT mode is enabled without a requested color temperature,
+ * a default value will be set by the camera device. The default value can be
+ * retrieved by checking the corresponding capture result. Color temperatures
+ * requested outside the advertised ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE_RANGE
+ * will be clamped.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE_RANGE
+ * @see ACAMERA_COLOR_CORRECTION_MODE
+ */
+ ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE = // int32
+ ACAMERA_COLOR_CORRECTION_START + 5,
+ /**
+ * <p>Specifies the color tint for CCT mode to adjust the white
+ * balance of the image.</p>
+ *
+ * <p>Type: int32</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul></p>
+ *
+ * <p>Sets the color tint for when ACAMERA_COLOR_CORRECTION_MODE
+ * is CCT to adjust the white balance of the image.</p>
+ * <p>If CCT mode is enabled without a requested color tint,
+ * a default value will be set by the camera device. The default value can be
+ * retrieved by checking the corresponding capture result. Color tints requested
+ * outside the supported range will be clamped to the nearest limit (-50 or +50).</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_MODE
+ */
+ ACAMERA_COLOR_CORRECTION_COLOR_TINT = // int32
+ ACAMERA_COLOR_CORRECTION_START + 6,
+ /**
+ * <p>The range of supported color temperature values for
+ * ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE
+ *
+ * <p>Type: int32[2]</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>This key lists the valid range of color temperature values for
+ * ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE supported by this camera device.</p>
+ * <p>This key will be null on devices that do not support CCT mode for
+ * ACAMERA_COLOR_CORRECTION_MODE.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE
+ * @see ACAMERA_COLOR_CORRECTION_MODE
+ */
+ ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE_RANGE = // int32[2]
+ ACAMERA_COLOR_CORRECTION_START + 7,
+ /**
+ * <p>List of color correction modes for ACAMERA_COLOR_CORRECTION_MODE that are
+ * supported by this camera device.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_MODE
+ *
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>This key lists the valid modes for ACAMERA_COLOR_CORRECTION_MODE. If no
+ * color correction modes are available for a device, this key will be null.</p>
+ * <p>Camera devices that have a FULL hardware level will always include at least
+ * FAST, HIGH_QUALITY, and TRANSFORM_MATRIX modes.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_MODE
+ */
+ ACAMERA_COLOR_CORRECTION_AVAILABLE_MODES = // byte[n]
+ ACAMERA_COLOR_CORRECTION_START + 8,
ACAMERA_COLOR_CORRECTION_END,
/**
@@ -469,7 +565,9 @@
* application's selected exposure time, sensor sensitivity,
* and frame duration (ACAMERA_SENSOR_EXPOSURE_TIME,
* ACAMERA_SENSOR_SENSITIVITY, and
- * ACAMERA_SENSOR_FRAME_DURATION). If one of the FLASH modes
+ * ACAMERA_SENSOR_FRAME_DURATION). If ACAMERA_CONTROL_AE_PRIORITY_MODE is
+ * enabled, the relevant priority CaptureRequest settings will not be overridden.
+ * See ACAMERA_CONTROL_AE_PRIORITY_MODE for more details. If one of the FLASH modes
* is selected, the camera device's flash unit controls are
* also overridden.</p>
* <p>The FLASH modes are only available if the camera device
@@ -480,10 +578,22 @@
* camera device auto-exposure routine for the overridden
* fields for a given capture will be available in its
* CaptureResult.</p>
+ * <p>When ACAMERA_CONTROL_AE_MODE is AE_MODE_ON and if the device
+ * supports manual flash strength control, i.e.,
+ * if ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL and
+ * ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL are greater than 1, then
+ * the auto-exposure (AE) precapture metering sequence should be
+ * triggered to avoid the image being incorrectly exposed at
+ * different ACAMERA_FLASH_STRENGTH_LEVEL.</p>
*
+ * @see ACAMERA_CONTROL_AE_MODE
+ * @see ACAMERA_CONTROL_AE_PRIORITY_MODE
* @see ACAMERA_CONTROL_MODE
* @see ACAMERA_FLASH_INFO_AVAILABLE
* @see ACAMERA_FLASH_MODE
+ * @see ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL
+ * @see ACAMERA_FLASH_STRENGTH_LEVEL
+ * @see ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL
* @see ACAMERA_SENSOR_EXPOSURE_TIME
* @see ACAMERA_SENSOR_FRAME_DURATION
* @see ACAMERA_SENSOR_SENSITIVITY
@@ -562,7 +672,7 @@
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
* capability or devices where
* <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
- * lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE">ACAMERA_SENSOR_PIXEL_MODE</a>
+ * lists ACAMERA_SENSOR_PIXEL_MODE,
* ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
* ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION must be used as the
* coordinate system for requests where ACAMERA_SENSOR_PIXEL_MODE is set to
@@ -791,7 +901,7 @@
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
* capability or devices where
* <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
- * lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE">ACAMERA_SENSOR_PIXEL_MODE</a>,
+ * lists ACAMERA_SENSOR_PIXEL_MODE,
* ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
* ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION must be used as the
* coordinate system for requests where ACAMERA_SENSOR_PIXEL_MODE is set to
@@ -997,7 +1107,7 @@
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
* capability or devices where
* <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
- * lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE">ACAMERA_SENSOR_PIXEL_MODE</a>,
+ * lists ACAMERA_SENSOR_PIXEL_MODE,
* ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
* ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION must be used as the
* coordinate system for requests where ACAMERA_SENSOR_PIXEL_MODE is set to
@@ -2296,6 +2406,95 @@
*/
ACAMERA_CONTROL_LOW_LIGHT_BOOST_STATE = // byte (acamera_metadata_enum_android_control_low_light_boost_state_t)
ACAMERA_CONTROL_START + 59,
+ /**
+ * <p>Whether the application uses ACAMERA_SCALER_CROP_REGION or ACAMERA_CONTROL_ZOOM_RATIO
+ * to control zoom levels.</p>
+ *
+ * @see ACAMERA_CONTROL_ZOOM_RATIO
+ * @see ACAMERA_SCALER_CROP_REGION
+ *
+ * <p>Type: byte (acamera_metadata_enum_android_control_zoom_method_t)</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul></p>
+ *
+ * <p>If set to AUTO, the camera device detects which capture request key the application uses
+ * to do zoom, ACAMERA_SCALER_CROP_REGION or ACAMERA_CONTROL_ZOOM_RATIO. If
+ * the application doesn't set android.scaler.zoomRatio or sets it to 1.0 in the capture
+ * request, the effective zoom level is reflected in ACAMERA_SCALER_CROP_REGION in capture
+ * results. If ACAMERA_CONTROL_ZOOM_RATIO is set to values other than 1.0, the effective
+ * zoom level is reflected in ACAMERA_CONTROL_ZOOM_RATIO. AUTO is the default value
+ * for this control, and also the behavior of the OS before Android version
+ * <a href="https://developer.android.com/reference/android/os/Build.VERSION_CODES.html#BAKLAVA">BAKLAVA</a>.</p>
+ * <p>If set to ZOOM_RATIO, the application explicitly specifies zoom level be controlled
+ * by ACAMERA_CONTROL_ZOOM_RATIO, and the effective zoom level is reflected in
+ * ACAMERA_CONTROL_ZOOM_RATIO in capture results. This addresses an ambiguity with AUTO,
+ * with which the camera device cannot know if the application is using cropRegion or
+ * zoomRatio at 1.0x.</p>
+ *
+ * @see ACAMERA_CONTROL_ZOOM_RATIO
+ * @see ACAMERA_SCALER_CROP_REGION
+ */
+ ACAMERA_CONTROL_ZOOM_METHOD = // byte (acamera_metadata_enum_android_control_zoom_method_t)
+ ACAMERA_CONTROL_START + 60,
+ /**
+ * <p>Turn on AE priority mode.</p>
+ *
+ * <p>Type: byte (acamera_metadata_enum_android_control_ae_priority_mode_t)</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul></p>
+ *
+ * <p>This control is only effective if ACAMERA_CONTROL_MODE is
+ * AUTO and ACAMERA_CONTROL_AE_MODE is set to one of its
+ * ON modes, with the exception of ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY.</p>
+ * <p>When a priority mode is enabled, the camera device's
+ * auto-exposure routine will maintain the application's
+ * selected parameters relevant to the priority mode while overriding
+ * the remaining exposure parameters
+ * (ACAMERA_SENSOR_EXPOSURE_TIME, ACAMERA_SENSOR_SENSITIVITY, and
+ * ACAMERA_SENSOR_FRAME_DURATION). For example, if
+ * SENSOR_SENSITIVITY_PRIORITY mode is enabled, the camera device will
+ * maintain the application-selected ACAMERA_SENSOR_SENSITIVITY
+ * while adjusting ACAMERA_SENSOR_EXPOSURE_TIME
+ * and ACAMERA_SENSOR_FRAME_DURATION. The overridden fields for a
+ * given capture will be available in its CaptureResult.</p>
+ *
+ * @see ACAMERA_CONTROL_AE_MODE
+ * @see ACAMERA_CONTROL_MODE
+ * @see ACAMERA_SENSOR_EXPOSURE_TIME
+ * @see ACAMERA_SENSOR_FRAME_DURATION
+ * @see ACAMERA_SENSOR_SENSITIVITY
+ */
+ ACAMERA_CONTROL_AE_PRIORITY_MODE = // byte (acamera_metadata_enum_android_control_ae_priority_mode_t)
+ ACAMERA_CONTROL_START + 61,
+ /**
+ * <p>List of auto-exposure priority modes for ACAMERA_CONTROL_AE_PRIORITY_MODE
+ * that are supported by this camera device.</p>
+ *
+ * @see ACAMERA_CONTROL_AE_PRIORITY_MODE
+ *
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>This entry lists the valid modes for
+ * ACAMERA_CONTROL_AE_PRIORITY_MODE for this camera device.
+ * If no AE priority modes are available for a device, this will only list OFF.</p>
+ *
+ * @see ACAMERA_CONTROL_AE_PRIORITY_MODE
+ */
+ ACAMERA_CONTROL_AE_AVAILABLE_PRIORITY_MODES = // byte[n]
+ ACAMERA_CONTROL_START + 62,
ACAMERA_CONTROL_END,
/**
@@ -4041,8 +4240,8 @@
* <p>For camera devices with the
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
* capability or devices where <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
- * lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE">ACAMERA_SENSOR_PIXEL_MODE</a></p>
- * <p>ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
+ * lists ACAMERA_SENSOR_PIXEL_MODE,
+ * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
* ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION must be used as the
* coordinate system for requests where ACAMERA_SENSOR_PIXEL_MODE is set to
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
@@ -4782,9 +4981,12 @@
* duration exposed to the nearest possible value (rather than expose longer).
* The final exposure time used will be available in the output capture result.</p>
* <p>This control is only effective if ACAMERA_CONTROL_AE_MODE or ACAMERA_CONTROL_MODE is set to
- * OFF; otherwise the auto-exposure algorithm will override this value.</p>
+ * OFF; otherwise the auto-exposure algorithm will override this value. However, in the
+ * case that ACAMERA_CONTROL_AE_PRIORITY_MODE is set to SENSOR_EXPOSURE_TIME_PRIORITY, this
+ * control will be effective and not controlled by the auto-exposure algorithm.</p>
*
* @see ACAMERA_CONTROL_AE_MODE
+ * @see ACAMERA_CONTROL_AE_PRIORITY_MODE
* @see ACAMERA_CONTROL_MODE
*/
ACAMERA_SENSOR_EXPOSURE_TIME = // int64
@@ -4893,7 +5095,9 @@
* value. The final sensitivity used will be available in the
* output capture result.</p>
* <p>This control is only effective if ACAMERA_CONTROL_AE_MODE or ACAMERA_CONTROL_MODE is set to
- * OFF; otherwise the auto-exposure algorithm will override this value.</p>
+ * OFF; otherwise the auto-exposure algorithm will override this value. However, in the
+ * case that ACAMERA_CONTROL_AE_PRIORITY_MODE is set to SENSOR_SENSITIVITY_PRIORITY, this
+ * control will be effective and not controlled by the auto-exposure algorithm.</p>
* <p>Note that for devices supporting postRawSensitivityBoost, the total sensitivity applied
* to the final processed image is the combination of ACAMERA_SENSOR_SENSITIVITY and
* ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST. In case the application uses the sensor
@@ -4902,6 +5106,7 @@
* set postRawSensitivityBoost.</p>
*
* @see ACAMERA_CONTROL_AE_MODE
+ * @see ACAMERA_CONTROL_AE_PRIORITY_MODE
* @see ACAMERA_CONTROL_MODE
* @see ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST
* @see ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE
@@ -5988,7 +6193,7 @@
* This key will only be present for devices which advertise the
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
* capability or devices where <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
- * lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE">ACAMERA_SENSOR_PIXEL_MODE</a></p>
+ * lists ACAMERA_SENSOR_PIXEL_MODE.</p>
* <p>The data representation is <code>int[4]</code>, which maps to <code>(left, top, width, height)</code>.</p>
*
* @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
@@ -6021,7 +6226,7 @@
* This key will only be present for devices which advertise the
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
* capability or devices where <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
- * lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE">ACAMERA_SENSOR_PIXEL_MODE</a></p>
+ * lists ACAMERA_SENSOR_PIXEL_MODE.</p>
*
* @see ACAMERA_SENSOR_INFO_PHYSICAL_SIZE
* @see ACAMERA_SENSOR_PIXEL_MODE
@@ -6050,7 +6255,7 @@
* This key will only be present for devices which advertise the
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
* capability or devices where <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
- * lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE">ACAMERA_SENSOR_PIXEL_MODE</a></p>
+ * lists ACAMERA_SENSOR_PIXEL_MODE.</p>
* <p>The data representation is <code>int[4]</code>, which maps to <code>(left, top, width, height)</code>.</p>
*
* @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
@@ -6089,7 +6294,7 @@
* <ul>
* <li>This key will be present if
* <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
- * lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE">ACAMERA_SENSOR_PIXEL_MODE</a>, since RAW
+ * lists ACAMERA_SENSOR_PIXEL_MODE, since RAW
* images may not necessarily have a regular bayer pattern when
* <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE">ACAMERA_SENSOR_PIXEL_MODE</a> is set to
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</li>
@@ -6459,9 +6664,19 @@
* height dimensions are given in ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE.
* This may include hot pixels that lie outside of the active array
* bounds given by ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.</p>
+ * <p>For camera devices with the
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
+ * capability or devices where
+ * <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
+ * lists ACAMERA_SENSOR_PIXEL_MODE,
+ * ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION will be used as the
+ * pixel array size if the corresponding request sets ACAMERA_SENSOR_PIXEL_MODE to
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
*
* @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
* @see ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE
+ * @see ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION
+ * @see ACAMERA_SENSOR_PIXEL_MODE
*/
ACAMERA_STATISTICS_HOT_PIXEL_MAP = // int32[2*n]
ACAMERA_STATISTICS_START + 15,
@@ -7611,8 +7826,8 @@
* <p>For camera devices with the
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR">CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR</a>
* capability or devices where <a href="https://developer.android.com/reference/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>
- * lists <a href="https://developer.android.com/reference/CaptureRequest.html#SENSOR_PIXEL_MODE">ACAMERA_SENSOR_PIXEL_MODE</a>
- * , the current active physical device
+ * lists ACAMERA_SENSOR_PIXEL_MODE,
+ * the current active physical device
* ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION /
* ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION must be used as the
* coordinate system for requests where ACAMERA_SENSOR_PIXEL_MODE is set to
@@ -7844,6 +8059,145 @@
ACAMERA_HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION =
// int64[4*n]
ACAMERA_HEIC_START + 5,
+ /**
+ * <p>The available HEIC (ISO/IEC 23008-12/24) UltraHDR stream
+ * configurations that this camera device supports
+ * (i.e. format, width, height, output/input stream).</p>
+ *
+ * <p>Type: int32[n*4] (acamera_metadata_enum_android_heic_available_heic_ultra_hdr_stream_configurations_t)</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>The configurations are listed as <code>(format, width, height, input?)</code> tuples.</p>
+ * <p>All the static, control, and dynamic metadata tags related to JPEG apply to HEIC formats.
+ * Configuring JPEG and HEIC streams at the same time is not supported.</p>
+ * <p>All the configuration tuples <code>(format, width, height, input?)</code> will contain
+ * AIMAGE_FORMAT_HEIC format as OUTPUT only.</p>
+ */
+ ACAMERA_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS =
+ // int32[n*4] (acamera_metadata_enum_android_heic_available_heic_ultra_hdr_stream_configurations_t)
+ ACAMERA_HEIC_START + 6,
+ /**
+ * <p>This lists the minimum frame duration for each
+ * format/size combination for HEIC UltraHDR output formats.</p>
+ *
+ * <p>Type: int64[4*n]</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>This should correspond to the frame duration when only that
+ * stream is active, with all processing (typically in android.*.mode)
+ * set to either OFF or FAST.</p>
+ * <p>When multiple streams are used in a request, the minimum frame
+ * duration will be max(individual stream min durations).</p>
+ * <p>See ACAMERA_SENSOR_FRAME_DURATION and
+ * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS for more details about
+ * calculating the max frame rate.</p>
+ *
+ * @see ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS
+ * @see ACAMERA_SENSOR_FRAME_DURATION
+ */
+ ACAMERA_HEIC_AVAILABLE_HEIC_ULTRA_HDR_MIN_FRAME_DURATIONS = // int64[4*n]
+ ACAMERA_HEIC_START + 7,
+ /**
+ * <p>This lists the maximum stall duration for each
+ * output format/size combination for HEIC UltraHDR streams.</p>
+ *
+ * <p>Type: int64[4*n]</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>A stall duration is how much extra time would get added
+ * to the normal minimum frame duration for a repeating request
+ * that has streams with non-zero stall.</p>
+ * <p>This functions similarly to
+ * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS for HEIC UltraHDR
+ * streams.</p>
+ * <p>All HEIC output stream formats may have a nonzero stall
+ * duration.</p>
+ *
+ * @see ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS
+ */
+ ACAMERA_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STALL_DURATIONS = // int64[4*n]
+ ACAMERA_HEIC_START + 8,
+ /**
+ * <p>The available HEIC (ISO/IEC 23008-12/24) UltraHDR stream
+ * configurations that this camera device supports
+ * (i.e. format, width, height, output/input stream) for CaptureRequests where
+ * ACAMERA_SENSOR_PIXEL_MODE is set to
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+ *
+ * @see ACAMERA_SENSOR_PIXEL_MODE
+ *
+ * <p>Type: int32[n*4] (acamera_metadata_enum_android_heic_available_heic_ultra_hdr_stream_configurations_maximum_resolution_t)</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>Refer to ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS for details.</p>
+ * <p>All the configuration tuples <code>(format, width, height, input?)</code> will contain
+ * AIMAGE_FORMAT_HEIC format as OUTPUT only.</p>
+ *
+ * @see ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS
+ */
+ ACAMERA_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION =
+ // int32[n*4] (acamera_metadata_enum_android_heic_available_heic_ultra_hdr_stream_configurations_maximum_resolution_t)
+ ACAMERA_HEIC_START + 9,
+ /**
+ * <p>This lists the minimum frame duration for each
+ * format/size combination for HEIC UltraHDR output formats for CaptureRequests where
+ * ACAMERA_SENSOR_PIXEL_MODE is set to
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+ *
+ * @see ACAMERA_SENSOR_PIXEL_MODE
+ *
+ * <p>Type: int64[4*n]</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>Refer to ACAMERA_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS for details.</p>
+ *
+ * @see ACAMERA_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS
+ */
+ ACAMERA_HEIC_AVAILABLE_HEIC_ULTRA_HDR_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION =
+ // int64[4*n]
+ ACAMERA_HEIC_START + 10,
+ /**
+ * <p>This lists the maximum stall duration for each
+ * output format/size combination for HEIC UltraHDR streams for CaptureRequests where
+ * ACAMERA_SENSOR_PIXEL_MODE is set to
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
+ *
+ * @see ACAMERA_SENSOR_PIXEL_MODE
+ *
+ * <p>Type: int64[4*n]</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>Refer to ACAMERA_HEIC_AVAILABLE_HEIC_STALL_DURATIONS for details.</p>
+ *
+ * @see ACAMERA_HEIC_AVAILABLE_HEIC_STALL_DURATIONS
+ */
+ ACAMERA_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STALL_DURATIONS_MAXIMUM_RESOLUTION =
+ // int64[4*n]
+ ACAMERA_HEIC_START + 11,
ACAMERA_HEIC_END,
/**
@@ -7928,6 +8282,33 @@
ACAMERA_AUTOMOTIVE_LENS_END,
/**
+ * <p>Indicates when to activate Night Mode Camera Extension for high-quality
+ * still captures in low-light conditions.</p>
+ *
+ * <p>Type: int32 (acamera_metadata_enum_android_extension_night_mode_indicator_t)</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * </ul></p>
+ *
+ * <p>Provides awareness to the application when the current scene can benefit from using a
+ * Night Mode Camera Extension to take a high-quality photo.</p>
+ * <p>Support for this capture result can be queried via
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#getAvailableCaptureResultKeys">CameraCharacteristics#getAvailableCaptureResultKeys</a>.</p>
+ * <p>If the device supports this capability then it will also support
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraExtensionCharacteristics.html#EXTENSION_NIGHT">NIGHT</a>
+ * and will be available in both
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCaptureSession.html">sessions</a> and
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraExtensionSession.html">sessions</a>.</p>
+ * <p>The value will be {@code UNKNOWN} in the following auto exposure modes: ON_AUTO_FLASH,
+ * ON_ALWAYS_FLASH, ON_AUTO_FLASH_REDEYE, or ON_EXTERNAL_FLASH.</p>
+ */
+ ACAMERA_EXTENSION_NIGHT_MODE_INDICATOR = // int32 (acamera_metadata_enum_android_extension_night_mode_indicator_t)
+ ACAMERA_EXTENSION_START + 2,
+ ACAMERA_EXTENSION_END,
+
+ /**
* <p>The available Jpeg/R stream
* configurations that this camera device supports
* (i.e. format, width, height, output/input stream).</p>
@@ -8116,6 +8497,20 @@
*/
ACAMERA_COLOR_CORRECTION_MODE_HIGH_QUALITY = 2,
+ /**
+ * <p>Use
+ * ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE and
+ * ACAMERA_COLOR_CORRECTION_COLOR_TINT to adjust the white balance based
+ * on correlated color temperature.</p>
+ * <p>If AWB is enabled with <code>ACAMERA_CONTROL_AWB_MODE != OFF</code>, then
+ * CCT is ignored.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE
+ * @see ACAMERA_COLOR_CORRECTION_COLOR_TINT
+ * @see ACAMERA_CONTROL_AWB_MODE
+ */
+ ACAMERA_COLOR_CORRECTION_MODE_CCT = 3,
+
} acamera_metadata_enum_android_color_correction_mode_t;
// ACAMERA_COLOR_CORRECTION_ABERRATION_MODE
@@ -8233,7 +8628,17 @@
* ACAMERA_SENSOR_FRAME_DURATION are ignored. The
* application has control over the various
* ACAMERA_FLASH_* fields.</p>
+ * <p>If the device supports manual flash strength control, i.e.,
+ * if ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL and
+ * ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL are greater than 1, then
+ * the auto-exposure (AE) precapture metering sequence should be
+ * triggered for the configured flash mode and strength to avoid
+ * the image being incorrectly exposed at different
+ * ACAMERA_FLASH_STRENGTH_LEVEL.</p>
*
+ * @see ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL
+ * @see ACAMERA_FLASH_STRENGTH_LEVEL
+ * @see ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL
* @see ACAMERA_SENSOR_EXPOSURE_TIME
* @see ACAMERA_SENSOR_FRAME_DURATION
* @see ACAMERA_SENSOR_SENSITIVITY
@@ -9347,6 +9752,64 @@
} acamera_metadata_enum_android_control_low_light_boost_state_t;
+// ACAMERA_CONTROL_ZOOM_METHOD
+typedef enum acamera_metadata_enum_acamera_control_zoom_method {
+ /**
+ * <p>The camera device automatically detects whether the application does zoom with
+ * ACAMERA_SCALER_CROP_REGION or ACAMERA_CONTROL_ZOOM_RATIO, and in turn decides which
+ * metadata tag reflects the effective zoom level.</p>
+ *
+ * @see ACAMERA_CONTROL_ZOOM_RATIO
+ * @see ACAMERA_SCALER_CROP_REGION
+ */
+ ACAMERA_CONTROL_ZOOM_METHOD_AUTO = 0,
+
+ /**
+ * <p>The application intends to control zoom via ACAMERA_CONTROL_ZOOM_RATIO, and
+ * the effective zoom level is reflected by ACAMERA_CONTROL_ZOOM_RATIO in capture results.</p>
+ *
+ * @see ACAMERA_CONTROL_ZOOM_RATIO
+ */
+ ACAMERA_CONTROL_ZOOM_METHOD_ZOOM_RATIO = 1,
+
+} acamera_metadata_enum_android_control_zoom_method_t;
+
+// ACAMERA_CONTROL_AE_PRIORITY_MODE
+typedef enum acamera_metadata_enum_acamera_control_ae_priority_mode {
+ /**
+ * <p>Disable AE priority mode. This is the default value.</p>
+ */
+ ACAMERA_CONTROL_AE_PRIORITY_MODE_OFF = 0,
+
+ /**
+ * <p>The camera device's auto-exposure routine is active and
+ * prioritizes the application-selected ISO (ACAMERA_SENSOR_SENSITIVITY).</p>
+ * <p>The application has control over ACAMERA_SENSOR_SENSITIVITY while
+ * the application's values for ACAMERA_SENSOR_EXPOSURE_TIME and
+ * ACAMERA_SENSOR_FRAME_DURATION are ignored.</p>
+ *
+ * @see ACAMERA_SENSOR_EXPOSURE_TIME
+ * @see ACAMERA_SENSOR_FRAME_DURATION
+ * @see ACAMERA_SENSOR_SENSITIVITY
+ */
+ ACAMERA_CONTROL_AE_PRIORITY_MODE_SENSOR_SENSITIVITY_PRIORITY = 1,
+
+ /**
+ * <p>The camera device's auto-exposure routine is active and
+ * prioritizes the application-selected exposure time
+ * (ACAMERA_SENSOR_EXPOSURE_TIME).</p>
+ * <p>The application has control over ACAMERA_SENSOR_EXPOSURE_TIME while
+ * the application's values for ACAMERA_SENSOR_SENSITIVITY and
+ * ACAMERA_SENSOR_FRAME_DURATION are ignored.</p>
+ *
+ * @see ACAMERA_SENSOR_EXPOSURE_TIME
+ * @see ACAMERA_SENSOR_FRAME_DURATION
+ * @see ACAMERA_SENSOR_SENSITIVITY
+ */
+ ACAMERA_CONTROL_AE_PRIORITY_MODE_SENSOR_EXPOSURE_TIME_PRIORITY = 2,
+
+} acamera_metadata_enum_android_control_ae_priority_mode_t;
+
// ACAMERA_EDGE_MODE
@@ -11387,6 +11850,26 @@
} acamera_metadata_enum_android_heic_available_heic_stream_configurations_maximum_resolution_t;
+// ACAMERA_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS
+typedef enum acamera_metadata_enum_acamera_heic_available_heic_ultra_hdr_stream_configurations {
+ ACAMERA_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS_OUTPUT
+ = 0,
+
+ ACAMERA_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS_INPUT
+ = 1,
+
+} acamera_metadata_enum_android_heic_available_heic_ultra_hdr_stream_configurations_t;
+
+// ACAMERA_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION
+typedef enum acamera_metadata_enum_acamera_heic_available_heic_ultra_hdr_stream_configurations_maximum_resolution {
+ ACAMERA_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_OUTPUT
+ = 0,
+
+ ACAMERA_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION_INPUT
+ = 1,
+
+} acamera_metadata_enum_android_heic_available_heic_ultra_hdr_stream_configurations_maximum_resolution_t;
+
// ACAMERA_AUTOMOTIVE_LOCATION
@@ -11550,6 +12033,33 @@
} acamera_metadata_enum_android_automotive_lens_facing_t;
+// ACAMERA_EXTENSION_NIGHT_MODE_INDICATOR
+typedef enum acamera_metadata_enum_acamera_extension_night_mode_indicator {
+ /**
+ * <p>The camera can't accurately assess the scene's lighting to determine if a Night Mode
+ * Camera Extension capture would improve the photo. This can happen when the current
+ * camera configuration doesn't support night mode indicator detection, such as when
+ * the auto exposure mode is ON_AUTO_FLASH, ON_ALWAYS_FLASH, ON_AUTO_FLASH_REDEYE, or
+ * ON_EXTERNAL_FLASH.</p>
+ */
+ ACAMERA_EXTENSION_NIGHT_MODE_INDICATOR_UNKNOWN = 0,
+
+ /**
+ * <p>The camera has detected lighting conditions that are sufficiently bright. Night
+ * Mode Camera Extensions is available but may not be able to optimize the camera
+ * settings to take a higher quality photo.</p>
+ */
+ ACAMERA_EXTENSION_NIGHT_MODE_INDICATOR_OFF = 1,
+
+ /**
+ * <p>The camera has detected low-light conditions. It is recommended to use Night Mode
+ * Camera Extension to optimize the camera settings to take a high-quality photo in
+ * the dark.</p>
+ */
+ ACAMERA_EXTENSION_NIGHT_MODE_INDICATOR_ON = 2,
+
+} acamera_metadata_enum_android_extension_night_mode_indicator_t;
+
// ACAMERA_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS
typedef enum acamera_metadata_enum_acamera_jpegr_available_jpeg_r_stream_configurations {
@@ -11571,6 +12081,7 @@
+
__END_DECLS
#endif /* _NDK_CAMERA_METADATA_TAGS_H */
diff --git a/camera/ndk/libcamera2ndk.map.txt b/camera/ndk/libcamera2ndk.map.txt
index 7d7868b..60d4775 100644
--- a/camera/ndk/libcamera2ndk.map.txt
+++ b/camera/ndk/libcamera2ndk.map.txt
@@ -28,6 +28,8 @@
ACameraManager_getCameraCharacteristics;
ACameraManager_getCameraIdList;
ACameraManager_openCamera;
+ ACameraManager_openSharedCamera; # systemapi introduced=36
+ ACameraManager_isCameraDeviceSharingSupported; # systemapi introduced=36
ACameraManager_registerAvailabilityCallback;
ACameraManager_unregisterAvailabilityCallback;
ACameraManager_registerExtendedAvailabilityCallback; # introduced=29
@@ -72,6 +74,9 @@
ACaptureSessionSharedOutput_remove; # introduced=28
ACaptureSessionPhysicalOutput_create; # introduced=29
ACaptureSessionOutput_free;
+ ACameraCaptureSessionShared_startStreaming; # systemapi introduced=36
+ ACameraCaptureSessionShared_logicalCamera_startStreaming; # systemapi introduced=36
+ ACameraCaptureSessionShared_stopStreaming; # systemapi introduced=36
local:
*;
};
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
index 3325da6..d3a8e0d 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
+++ b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
@@ -85,11 +85,12 @@
const char* id,
ACameraDevice_StateCallbacks* cb,
sp<ACameraMetadata> chars,
- ACameraDevice* wrapper) :
+ ACameraDevice* wrapper, bool sharedMode) :
mCameraId(id),
mAppCallbacks(*cb),
mChars(std::move(chars)),
mWrapper(wrapper),
+ mSharedMode(sharedMode),
mInError(false),
mError(ACAMERA_OK),
mIdle(true),
@@ -960,6 +961,7 @@
case kWhatCaptureSeqAbort:
case kWhatCaptureBufferLost:
case kWhatPreparedCb:
+ case kWhatClientSharedAccessPriorityChanged:
ALOGV("%s: Received msg %d", __FUNCTION__, msg->what());
break;
case kWhatCleanUpSessions:
@@ -997,6 +999,28 @@
(*onDisconnected)(context, dev);
break;
}
+ case kWhatClientSharedAccessPriorityChanged:
+ {
+ ACameraDevice* dev;
+ found = msg->findPointer(kDeviceKey, (void**) &dev);
+ if (!found || dev == nullptr) {
+ ALOGE("%s: Cannot find device pointer!", __FUNCTION__);
+ return;
+ }
+ ACameraDevice_ClientSharedAccessPriorityChangedCallback
+ onClientSharedAccessPriorityChanged;
+ found = msg->findPointer(kCallbackFpKey, (void**) &onClientSharedAccessPriorityChanged);
+ if (!found) {
+ ALOGE("%s: Cannot find onClientSharedAccessPriorityChanged!", __FUNCTION__);
+ return;
+ }
+ if (onClientSharedAccessPriorityChanged == nullptr) {
+ return;
+ }
+ (*onClientSharedAccessPriorityChanged)(context, dev, dev->isPrimaryClient());
+ break;
+ }
+
case kWhatOnError:
{
ACameraDevice* dev;
@@ -1614,6 +1638,28 @@
return ScopedAStatus::ok();
}
+ScopedAStatus CameraDevice::ServiceCallback::onClientSharedAccessPriorityChanged(
+ bool primaryClient) {
+ ALOGV("onClientSharedAccessPriorityChanged received. primaryClient = %d", primaryClient);
+ ScopedAStatus ret = ScopedAStatus::ok();
+ std::shared_ptr<CameraDevice> dev = mDevice.lock();
+ if (dev == nullptr) {
+ return ret; // device has been closed
+ }
+ Mutex::Autolock _l(dev->mDeviceLock);
+ if (dev->isClosed() || dev->mRemote == nullptr) {
+ return ret;
+ }
+ dev->setPrimaryClient(primaryClient);
+ sp<AMessage> msg = new AMessage(kWhatClientSharedAccessPriorityChanged, dev->mHandler);
+ msg->setPointer(kContextKey, dev->mAppCallbacks.context);
+ msg->setPointer(kDeviceKey, (void*) dev->getWrapper());
+ msg->setPointer(kCallbackFpKey, (void*) dev->mAppCallbacks.onClientSharedAccessPriorityChanged);
+ msg->post();
+
+ return ScopedAStatus::ok();
+}
+
ScopedAStatus CameraDevice::ServiceCallback::onDeviceIdle() {
ALOGV("Camera is now idle");
@@ -1684,8 +1730,9 @@
__FUNCTION__, burstId, cbh.mRequests.size());
dev->setCameraDeviceErrorLocked(ACAMERA_ERROR_CAMERA_SERVICE);
}
+
sp<CaptureRequest> request = cbh.mRequests[burstId];
- ALOGE("%s: request = %p", __FUNCTION__, request.get());
+ ALOGV("%s: request = %p", __FUNCTION__, request.get());
sp<AMessage> msg = nullptr;
if (v2Callback) {
msg = new AMessage(kWhatCaptureStart2, dev->mHandler);
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDevice.h b/camera/ndk/ndk_vendor/impl/ACameraDevice.h
index b771d47..6ba30bb 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDevice.h
+++ b/camera/ndk/ndk_vendor/impl/ACameraDevice.h
@@ -95,7 +95,7 @@
public:
CameraDevice(const char* id, ACameraDevice_StateCallbacks* cb,
sp<ACameraMetadata> chars,
- ACameraDevice* wrapper);
+ ACameraDevice* wrapper, bool sharedMode);
~CameraDevice();
// Called to initialize fields that require shared_ptr to `this`
@@ -136,6 +136,7 @@
const CaptureResultExtras& in_resultExtras,
const std::vector<PhysicalCaptureResultInfo>&
in_physicalCaptureResultInfos) override;
+ ndk::ScopedAStatus onClientSharedAccessPriorityChanged(bool isPrimaryClient) override;
private:
camera_status_t readOneResultMetadata(const CaptureMetadataInfo& captureMetadataInfo,
@@ -154,6 +155,8 @@
// Stop the looper thread and unregister the handler
void stopLooperAndDisconnect();
+ void setPrimaryClient(bool isPrimary) {mIsPrimaryClient = isPrimary;};
+ bool isPrimaryClient() {return mIsPrimaryClient;};
private:
friend ACameraCaptureSession;
@@ -232,6 +235,8 @@
const sp<ACameraMetadata> mChars; // Camera characteristics
std::shared_ptr<ServiceCallback> mServiceCallback;
ACameraDevice* mWrapper;
+ bool mSharedMode;
+ bool mIsPrimaryClient;
// stream id -> pair of (ACameraWindowType* from application, OutputConfiguration used for
// camera service)
@@ -274,7 +279,8 @@
kWhatCaptureBufferLost, // onCaptureBufferLost
kWhatPreparedCb, // onPrepared
// Internal cleanup
- kWhatCleanUpSessions // Cleanup cached sp<ACameraCaptureSession>
+ kWhatCleanUpSessions, // Cleanup cached sp<ACameraCaptureSession>
+ kWhatClientSharedAccessPriorityChanged
};
static const char* kContextKey;
static const char* kDeviceKey;
@@ -434,9 +440,9 @@
*/
struct ACameraDevice {
ACameraDevice(const char* id, ACameraDevice_StateCallbacks* cb,
- sp<ACameraMetadata> chars) :
+ sp<ACameraMetadata> chars, bool sharedMode) :
mDevice(std::make_shared<android::acam::CameraDevice>(id, cb,
- std::move(chars), this)) {
+ std::move(chars), this, sharedMode)) {
mDevice->init();
}
@@ -481,6 +487,13 @@
inline bool setDeviceMetadataQueues() {
return mDevice->setDeviceMetadataQueues();
}
+ inline void setPrimaryClient(bool isPrimary) {
+ mDevice->setPrimaryClient(isPrimary);
+ }
+ inline bool isPrimaryClient() {
+ return mDevice->isPrimaryClient();
+ }
+
private:
std::shared_ptr<android::acam::CameraDevice> mDevice;
};
diff --git a/camera/ndk/ndk_vendor/impl/ACameraManager.cpp b/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
index cdba8ff..c34c4bd 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
+++ b/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
@@ -194,11 +194,11 @@
return (strncmp(value, "0", 2) != 0 && strncasecmp(value, "false", 6) != 0);
}
-bool CameraManagerGlobal::setupVendorTags() {
+bool CameraManagerGlobal::setupVendorTags(std::shared_ptr<ICameraService> &cameraService) {
sp<VendorTagDescriptorCache> tagCache = new VendorTagDescriptorCache();
Status status = Status::NO_ERROR;
std::vector<ProviderIdAndVendorTagSections> providerIdsAndVts;
- ScopedAStatus remoteRet = mCameraService->getCameraVendorTagSections(&providerIdsAndVts);
+ ScopedAStatus remoteRet = cameraService->getCameraVendorTagSections(&providerIdsAndVts);
if (!remoteRet.isOk()) {
if (remoteRet.getExceptionCode() == EX_SERVICE_SPECIFIC) {
@@ -261,15 +261,12 @@
ALOGE("%s: Could not get ICameraService instance.", __FUNCTION__);
return nullptr;
}
-
if (mDeathRecipient.get() == nullptr) {
mDeathRecipient = ndk::ScopedAIBinder_DeathRecipient(
AIBinder_DeathRecipient_new(CameraManagerGlobal::binderDeathCallback));
+ AIBinder_linkToDeath(cameraService->asBinder().get(),
+ mDeathRecipient.get(), /*cookie=*/ this);
}
- AIBinder_linkToDeath(cameraService->asBinder().get(),
- mDeathRecipient.get(), /*cookie=*/ this);
-
- mCameraService = cameraService;
// Setup looper thread to perform availability callbacks
if (mCbLooper == nullptr) {
@@ -291,33 +288,25 @@
mCbLooper->registerHandler(mHandler);
}
+ std::vector<CameraStatusAndId> cameraStatuses;
// register ICameraServiceListener
if (mCameraServiceListener == nullptr) {
mCameraServiceListener = ndk::SharedRefBase::make<CameraServiceListener>(weak_from_this());
- }
-
- std::vector<CameraStatusAndId> cameraStatuses;
- Status status = Status::NO_ERROR;
- ScopedAStatus remoteRet = mCameraService->addListener(mCameraServiceListener,
- &cameraStatuses);
-
- if (!remoteRet.isOk()) {
- if (remoteRet.getExceptionCode() == EX_SERVICE_SPECIFIC) {
- Status errStatus = static_cast<Status>(remoteRet.getServiceSpecificError());
- ALOGE("%s: Failed to add listener to camera service: %s", __FUNCTION__,
- toString(errStatus).c_str());
- } else {
- ALOGE("%s: Transaction failed when adding listener to camera service: %d",
- __FUNCTION__, remoteRet.getExceptionCode());
+ ScopedAStatus remoteRet = cameraService->addListener(mCameraServiceListener,
+ &cameraStatuses);
+ if (!remoteRet.isOk()) {
+ if (remoteRet.getExceptionCode() == EX_SERVICE_SPECIFIC) {
+ Status errStatus = static_cast<Status>(remoteRet.getServiceSpecificError());
+ ALOGE("%s: Failed to add listener to camera service: %s", __FUNCTION__,
+ toString(errStatus).c_str());
+ } else {
+ ALOGE("%s: Transaction failed when adding listener to camera service: %d",
+ __FUNCTION__, remoteRet.getExceptionCode());
+ }
+ return nullptr;
}
}
- // Setup vendor tags
- if (!setupVendorTags()) {
- ALOGE("Unable to set up vendor tags");
- return nullptr;
- }
-
for (auto& csi: cameraStatuses){
onStatusChangedLocked(csi.deviceStatus, csi.cameraId);
@@ -326,6 +315,13 @@
csi.cameraId, unavailablePhysicalId);
}
}
+
+ // Setup vendor tags
+ if (!setupVendorTags(cameraService)) {
+ ALOGE("Unable to set up vendor tags");
+ return nullptr;
+ }
+ mCameraService = cameraService;
return mCameraService;
}
@@ -346,6 +342,8 @@
instance->onStatusChangedLocked(deviceStatus, cameraId);
}
instance->mCameraService.reset();
+ instance->mDeathRecipient.release();
+ instance->mCameraServiceListener.reset();
// TODO: consider adding re-connect call here?
}
@@ -791,10 +789,33 @@
}
camera_status_t
-ACameraManager::openCamera(
+ACameraManager::isCameraDeviceSharingSupported(
const char* cameraId,
+ /*out*/bool* isSharingSupported) {
+ sp<ACameraMetadata> spChars;
+ camera_status_t ret = getCameraCharacteristics(cameraId, &spChars);
+ if (ret != ACAMERA_OK) {
+ ALOGE("%s: cannot get camera characteristics for camera %s. err %d",
+ __FUNCTION__, cameraId, ret);
+ return ret;
+ }
+
+ ACameraMetadata* chars = spChars.get();
+ ACameraMetadata_const_entry entry;
+ ret = ACameraMetadata_getConstEntry(chars, ANDROID_SHARED_SESSION_OUTPUT_CONFIGURATIONS,
+ &entry);
+ if (ret != ACAMERA_OK) {
+ return ret;
+ }
+ *isSharingSupported = (entry.count > 0) ? true : false;
+ return ACAMERA_OK;
+}
+
+camera_status_t
+ACameraManager::openCamera(
+ const char* cameraId, bool sharedMode,
ACameraDevice_StateCallbacks* callback,
- /*out*/ACameraDevice** outDevice) {
+ /*out*/ACameraDevice** outDevice, /*out*/bool* isPrimaryClient) {
sp<ACameraMetadata> rawChars;
camera_status_t ret = getCameraCharacteristics(cameraId, &rawChars);
Mutex::Autolock _l(mLock);
@@ -804,7 +825,7 @@
return ACAMERA_ERROR_INVALID_PARAMETER;
}
- ACameraDevice* device = new ACameraDevice(cameraId, callback, std::move(rawChars));
+ ACameraDevice* device = new ACameraDevice(cameraId, callback, std::move(rawChars), sharedMode);
std::shared_ptr<ICameraService> cs = CameraManagerGlobal::getInstance()->getCameraService();
if (cs == nullptr) {
@@ -815,11 +836,18 @@
std::shared_ptr<BnCameraDeviceCallback> deviceCallback = device->getServiceCallback();
std::shared_ptr<ICameraDeviceUser> deviceRemote;
+ ScopedAStatus serviceRet;
// No way to get package name from native.
// Send a zero length package name and let camera service figure it out from UID
- ScopedAStatus serviceRet = cs->connectDevice(deviceCallback,
- std::string(cameraId), &deviceRemote);
+ if (sharedMode) {
+ serviceRet = cs->connectDeviceV2(deviceCallback,
+ std::string(cameraId), sharedMode, &deviceRemote);
+ } else {
+ serviceRet = cs->connectDevice(deviceCallback,
+ std::string(cameraId), &deviceRemote);
+ }
+
if (!serviceRet.isOk()) {
if (serviceRet.getExceptionCode() == EX_SERVICE_SPECIFIC) {
Status errStatus = static_cast<Status>(serviceRet.getServiceSpecificError());
@@ -842,6 +870,13 @@
}
device->setRemoteDevice(deviceRemote);
+ if (sharedMode) {
+ ScopedAStatus remoteRet = deviceRemote->isPrimaryClient(isPrimaryClient);
+ if (!remoteRet.isOk()) {
+ return ACAMERA_ERROR_UNKNOWN;
+ }
+ device->setPrimaryClient(*isPrimaryClient);
+ }
device->setDeviceMetadataQueues();
*outDevice = device;
return ACAMERA_OK;
diff --git a/camera/ndk/ndk_vendor/impl/ACameraManager.h b/camera/ndk/ndk_vendor/impl/ACameraManager.h
index 2d8eefa..e9973e6 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraManager.h
+++ b/camera/ndk/ndk_vendor/impl/ACameraManager.h
@@ -188,7 +188,7 @@
const std::string &physicalCameraId);
void onStatusChangedLocked(const CameraDeviceStatus &status, const std::string &cameraId,
const std::string &physicalCameraId);
- bool setupVendorTags();
+ bool setupVendorTags(std::shared_ptr<ICameraService> &cameraService);
// Utils for status
static bool validStatus(CameraDeviceStatus status);
@@ -261,9 +261,9 @@
camera_status_t getCameraCharacteristics(
const char* cameraId, android::sp<ACameraMetadata>* characteristics);
- camera_status_t openCamera(const char* cameraId,
- ACameraDevice_StateCallbacks* callback,
- /*out*/ACameraDevice** device);
+ camera_status_t openCamera(const char* cameraId, bool sharedMode,
+ ACameraDevice_StateCallbacks* callback, /*out*/ACameraDevice** device,
+ /*out*/bool* primaryClient);
camera_status_t getTagFromName(const char *cameraId, const char *name, uint32_t *tag);
void registerAvailabilityCallback(const ACameraManager_AvailabilityCallbacks* callback);
void unregisterAvailabilityCallback(const ACameraManager_AvailabilityCallbacks* callback);
@@ -271,6 +271,8 @@
const ACameraManager_ExtendedAvailabilityCallbacks* callback);
void unregisterExtendedAvailabilityCallback(
const ACameraManager_ExtendedAvailabilityCallbacks* callback);
+ camera_status_t isCameraDeviceSharingSupported(const char *cameraId,
+ bool *isSharingSupported);
private:
enum {
diff --git a/camera/tests/CameraBinderTests.cpp b/camera/tests/CameraBinderTests.cpp
index 5135b5d..4384df9 100644
--- a/camera/tests/CameraBinderTests.cpp
+++ b/camera/tests/CameraBinderTests.cpp
@@ -130,6 +130,15 @@
return binder::Status::ok();
}
+ virtual binder::Status onCameraOpenedInSharedMode(
+ [[maybe_unused]] const std::string& /*cameraId*/,
+ [[maybe_unused]] const std::string& /*clientPackageName*/,
+ [[maybe_unused]] int32_t /*deviceId*/,
+ [[maybe_unused]] bool /*isPrimaryClient*/) override {
+ // No op
+ return binder::Status::ok();
+ }
+
bool waitForNumCameras(size_t num) const {
Mutex::Autolock l(mLock);
@@ -281,6 +290,12 @@
return binder::Status::ok();
}
+ virtual binder::Status onClientSharedAccessPriorityChanged(
+ [[maybe_unused]] bool /*isPrimaryClient*/) {
+ // No-op
+ return binder::Status::ok();
+ }
+
// Test helper functions:
bool hadError() const {
@@ -402,7 +417,8 @@
res = service->connectDevice(callbacks, cameraId,
/*oomScoreOffset*/ 0,
/*targetSdkVersion*/__ANDROID_API_FUTURE__,
- /*overrideToPortrait*/false, clientAttribution, /*devicePolicy*/0, /*out*/&device);
+ /*overrideToPortrait*/false, clientAttribution, /*devicePolicy*/0,
+ /*sharedMode*/false, /*out*/&device);
EXPECT_TRUE(res.isOk()) << res;
ASSERT_NE(nullptr, device.get());
device->disconnect();
@@ -451,7 +467,7 @@
/*oomScoreOffset*/ 0,
/*targetSdkVersion*/__ANDROID_API_FUTURE__,
/*overrideToPortrait*/false, clientAttribution, /*devicePolicy*/0,
- /*out*/&device);
+ /*sharedMode*/false, /*out*/&device);
EXPECT_TRUE(res.isOk()) << res;
}
auto p = std::make_pair(callbacks, device);
diff --git a/camera/tests/CameraZSLTests.cpp b/camera/tests/CameraZSLTests.cpp
index 2740d09..b06f9b4 100644
--- a/camera/tests/CameraZSLTests.cpp
+++ b/camera/tests/CameraZSLTests.cpp
@@ -20,17 +20,18 @@
#include <gtest/gtest.h>
#include <android/content/AttributionSourceState.h>
+#include <android/hardware/ICameraService.h>
#include <binder/ProcessState.h>
-#include <utils/Errors.h>
-#include <utils/Log.h>
-#include <gui/Surface.h>
-#include <gui/SurfaceComposerClient.h>
-#include <camera/CameraParameters.h>
-#include <camera/CameraMetadata.h>
#include <camera/Camera.h>
+#include <camera/CameraMetadata.h>
+#include <camera/CameraParameters.h>
#include <camera/CameraUtils.h>
#include <camera/StringUtils.h>
-#include <android/hardware/ICameraService.h>
+#include <gui/Flags.h>
+#include <gui/Surface.h>
+#include <gui/SurfaceComposerClient.h>
+#include <utils/Errors.h>
+#include <utils/Log.h>
using namespace android;
using namespace android::hardware;
@@ -276,8 +277,11 @@
previewSurface = surfaceControl->getSurface();
ASSERT_TRUE(previewSurface != NULL);
- ASSERT_EQ(NO_ERROR, cameraDevice->setPreviewTarget(
- previewSurface->getIGraphicBufferProducer()));
+ ASSERT_EQ(NO_ERROR, cameraDevice->setPreviewTarget(previewSurface
+#if !WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ ->getIGraphicBufferProducer()
+#endif
+ ));
cameraDevice->setPreviewCallbackFlag(
CAMERA_FRAME_CALLBACK_FLAG_CAMCORDER);
diff --git a/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp b/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp
index 7046075..b6fa817 100644
--- a/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp
@@ -120,7 +120,10 @@
[&]() { outputConfiguration->getColorSpace(); },
[&]() { outputConfiguration->getStreamUseCase(); },
[&]() { outputConfiguration->getTimestampBase(); },
- [&]() { outputConfiguration->getMirrorMode(); },
+ [&]() {
+ sp<IGraphicBufferProducer> gbp = createIGraphicBufferProducer();
+ outputConfiguration->getMirrorMode(gbp);
+ },
[&]() { outputConfiguration->useReadoutTimestamp(); },
});
callC2OutputConfAPIs();
diff --git a/camera/tests/fuzzer/camera_fuzzer.cpp b/camera/tests/fuzzer/camera_fuzzer.cpp
index f46d246..f976fe1 100644
--- a/camera/tests/fuzzer/camera_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_fuzzer.cpp
@@ -20,6 +20,7 @@
#include <android/content/AttributionSourceState.h>
#include <binder/MemoryDealer.h>
#include <fuzzer/FuzzedDataProvider.h>
+#include <gui/Flags.h>
#include <gui/Surface.h>
#include <gui/SurfaceComposerClient.h>
#include "camera2common.h"
@@ -210,7 +211,11 @@
auto callCameraAPIs = mFDP->PickValueInArray<const std::function<void()>>({
[&]() {
if (surfaceControl) {
- mCamera->setPreviewTarget(surface->getIGraphicBufferProducer());
+ mCamera->setPreviewTarget(surface
+#if !WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ ->getIGraphicBufferProducer()
+#endif
+ );
}
},
[&]() {
@@ -267,7 +272,11 @@
},
[&]() {
if (surfaceControl) {
- mCamera->setVideoTarget(surface->getIGraphicBufferProducer());
+ mCamera->setVideoTarget(surface
+#if !WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ ->getIGraphicBufferProducer()
+#endif
+ );
}
},
[&]() {
@@ -283,7 +292,11 @@
},
[&]() {
if (surfaceControl) {
- mCamera->setPreviewCallbackTarget(surface->getIGraphicBufferProducer());
+ mCamera->setPreviewCallbackTarget(surface
+#if !WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ ->getIGraphicBufferProducer()
+#endif
+ );
}
},
[&]() { mCamera->getRecordingProxy(); },
diff --git a/camera/tests/fuzzer/camera_utils_fuzzer.cpp b/camera/tests/fuzzer/camera_utils_fuzzer.cpp
index c816f82..ca0a06f 100644
--- a/camera/tests/fuzzer/camera_utils_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_utils_fuzzer.cpp
@@ -75,6 +75,7 @@
CameraUtils::getRotationTransform(
staticMetadata, mFDP->ConsumeIntegral<int32_t>() /* mirrorMode */,
+ true /*enableTransformInverseDisplay*/,
&transform /*out*/);
},
[&]() { CameraUtils::isCameraServiceDisabled(); },
diff --git a/include/media/MmapStreamCallback.h b/include/media/MmapStreamCallback.h
index 76ee6d7..a3876d9 100644
--- a/include/media/MmapStreamCallback.h
+++ b/include/media/MmapStreamCallback.h
@@ -17,6 +17,7 @@
#ifndef ANDROID_AUDIO_MMAP_STREAM_CALLBACK_H
#define ANDROID_AUDIO_MMAP_STREAM_CALLBACK_H
+#include <media/AudioContainers.h>
#include <system/audio.h>
#include <utils/Errors.h>
#include <utils/RefBase.h>
@@ -42,10 +43,10 @@
virtual void onVolumeChanged(float volume) = 0;
/**
- * The device the stream is routed to/from has changed
- * \param[in] onRoutingChanged the unique device ID of the new device.
+ * The devices the stream is routed to/from has changed
+ * \param[in] deviceIds a set of the device IDs of the new devices.
*/
- virtual void onRoutingChanged(audio_port_handle_t deviceId) = 0;
+ virtual void onRoutingChanged(const DeviceIdVector& deviceIds) = 0;
protected:
MmapStreamCallback() {}
diff --git a/include/media/MmapStreamInterface.h b/include/media/MmapStreamInterface.h
index 7725175..3d29335 100644
--- a/include/media/MmapStreamInterface.h
+++ b/include/media/MmapStreamInterface.h
@@ -19,6 +19,7 @@
#include <system/audio.h>
#include <media/AudioClient.h>
+#include <media/AudioContainers.h>
#include <utils/Errors.h>
#include <utils/RefBase.h>
@@ -51,9 +52,10 @@
* Requested parameters as input,
* Actual parameters as output
* \param[in] client a AudioClient struct describing the first client using this stream.
- * \param[in,out] deviceId audio device the stream should preferably be routed to/from
- * Requested as input,
- * Actual as output
+ * \param[in,out] deviceIds audio devices the stream should preferably be routed to/from.
+ * Leave empty if there are no preferred devices.
+ * Requested as input,
+ * Actual as output
* \param[in,out] sessionId audio sessionId for the stream
* Requested as input, may be AUDIO_SESSION_ALLOCATE
* Actual as output
@@ -70,7 +72,7 @@
const audio_attributes_t *attr,
audio_config_base_t *config,
const AudioClient& client,
- audio_port_handle_t *deviceId,
+ DeviceIdVector *deviceIds,
audio_session_t *sessionId,
const sp<MmapStreamCallback>& callback,
sp<MmapStreamInterface>& interface,
diff --git a/media/audio/aconfig/Android.bp b/media/audio/aconfig/Android.bp
index 5f4a6a1..cab126f 100644
--- a/media/audio/aconfig/Android.bp
+++ b/media/audio/aconfig/Android.bp
@@ -190,6 +190,12 @@
}
cc_aconfig_library {
+ name: "android.media.audio-aconfig-cc",
+ aconfig_declarations: "android.media.audio-aconfig",
+ defaults: ["audio-aconfig-cc-defaults"],
+}
+
+cc_aconfig_library {
name: "android.media.audiopolicy-aconfig-cc",
aconfig_declarations: "android.media.audiopolicy-aconfig",
defaults: ["audio-aconfig-cc-defaults"],
diff --git a/media/audio/aconfig/README.md b/media/audio/aconfig/README.md
index 8ce1259..83370fe 100644
--- a/media/audio/aconfig/README.md
+++ b/media/audio/aconfig/README.md
@@ -126,11 +126,13 @@
### TestApis
-TestApis do not require flagging, since their existence in the tree implies that they should
-be accessible to callers (xTS not building on trunk enables this).
-
+TestApis do not require flagging, unless they are API additions associated with new features.
+For testing existing features, we have full control over the set of callers.
### Api Changes
-Currently, the flag infra does not support any type of Api modification (arguments, annotation,
-renaming, deletion, etc.) In any of these cases (including for SystemApi), exceptions will need to
-be granted.
+There is partial (work ongoing) support for modifying API surfaces.
+ - SystemApi -> public is supported
+ - UAU -> SystemApi is supported, but the @UAU must remain until the flag is in next
+Other modifications involving moving between surfaces, or annotation changes may not be supported:
+check the [FAQ](https://g3doc.corp.google.com/company/teams/android-api-council/guidelines/faq.md?cl=head#i-cannot-use-flaggedapi-with-data-classes-generated-by-codegen)
+for the up to date list of support.
diff --git a/media/audio/aconfig/aaudio.aconfig b/media/audio/aconfig/aaudio.aconfig
index f9fb4c7..7896a75 100644
--- a/media/audio/aconfig/aaudio.aconfig
+++ b/media/audio/aconfig/aaudio.aconfig
@@ -6,6 +6,13 @@
container: "system"
flag {
+ name: "offload_support"
+ namespace: "media_audio"
+ description: "Enable offload support in AAudio."
+ bug: "372041799"
+}
+
+flag {
name: "sample_rate_conversion"
namespace: "media_audio"
description: "Enable the AAudio sample rate converter."
diff --git a/media/audio/aconfig/audio.aconfig b/media/audio/aconfig/audio.aconfig
index c732708..fe53824 100644
--- a/media/audio/aconfig/audio.aconfig
+++ b/media/audio/aconfig/audio.aconfig
@@ -35,6 +35,13 @@
}
flag {
+ name: "audio_eraser_effect"
+ namespace: "media_audio"
+ description: "Enable audio eraser effect"
+ bug: "367667349"
+}
+
+flag {
name: "bluetooth_mac_address_anonymization"
namespace: "media_audio"
description:
@@ -69,6 +76,22 @@
}
flag {
+ name: "hardening_impl"
+ is_exported: true
+ namespace: "media_audio"
+ description: "Flag for overall implementation of hardening"
+ bug: "376480814"
+}
+
+flag {
+ name: "hardening_strict"
+ is_exported: true
+ namespace: "media_audio"
+ description: "Flag for strict enforcement (deny access) of hardening"
+ bug: "376480814"
+}
+
+flag {
name: "music_fx_edge_to_edge"
namespace: "media_audio"
description: "Enable Edge-to-edge feature for MusicFx and handle insets"
diff --git a/media/audio/aconfig/audio_framework.aconfig b/media/audio/aconfig/audio_framework.aconfig
index 587bdbb..1450417 100644
--- a/media/audio/aconfig/audio_framework.aconfig
+++ b/media/audio/aconfig/audio_framework.aconfig
@@ -1,5 +1,6 @@
# Top level framework (android.media) flags
# Only add flags here which must be included in framework.jar
+# Flags used in both framework.jar and native can be added here
#
# Please add flags in alphabetical order.
@@ -22,11 +23,13 @@
bug: "302323921"
}
-flag{
- name: "enable_ringtone_haptics_customization"
+flag {
+ name: "concurrent_audio_record_bypass_permission"
namespace: "media_audio"
- description: "Enables haptic customization for playing ringtone."
- bug: "351974934"
+ description:
+ "New privileged permission to allow bypassing concurrent audio"
+ "capture rules."
+ bug: "374751406"
}
flag {
@@ -38,6 +41,24 @@
}
flag {
+ name: "enable_multichannel_group_device"
+ namespace: "media_audio"
+ description:
+ "Enable new audio device type for wireless connected speaker group"
+ "supporting multichannel content."
+ is_exported: true
+ is_fixed_read_only: true
+ bug: "344031109"
+}
+
+flag{
+ name: "enable_ringtone_haptics_customization"
+ namespace: "media_audio"
+ description: "Enables haptic customization for playing ringtone."
+ bug: "351974934"
+}
+
+flag {
name: "feature_spatial_audio_headtracking_low_latency"
is_exported: true
namespace: "media_audio"
@@ -91,6 +112,30 @@
}
flag {
+ name: "hardening_permission_api"
+ is_exported: true
+ namespace: "media_audio"
+ description: "API flag for additional appop/perm constructs for hardening."
+ bug: "376480814"
+}
+
+flag {
+ name: "hardening_permission_spa"
+ is_exported: true
+ namespace: "media_audio"
+ description: "Flag for special app access impl for hardening."
+ bug: "376480814"
+}
+
+flag {
+ name: "iamf_definitions_api"
+ is_exported: true
+ namespace: "media_audio"
+ description: "API definitions for the IAMF format"
+ bug: "337522902"
+}
+
+flag {
name: "loudness_configurator_api"
is_exported: true
namespace: "media_audio"
@@ -152,6 +197,42 @@
is_fixed_read_only: true
}
+flag {
+ name: "routed_device_ids"
+ namespace: "media_audio"
+ description:
+ "Enable Java and native functions to get"
+ "multiple routed device ids"
+ bug: "367816690"
+}
+
+flag {
+ name: "spatial_audio_settings_versioning"
+ namespace: "media_audio"
+ description: "introduce versioning of spatial audio settings"
+ bug: "377977731"
+}
+
+flag {
+ name: "spatializer_capabilities"
+ namespace: "media_audio"
+ description: "spatializer reports effective channel masks"
+ bug: "377582613"
+}
+
+flag {
+ name: "speaker_cleanup_usage"
+ namespace: "media_audio"
+ description: "Support new AudioAttributes usage for speaker cleanup"
+ bug: "355050846"
+}
+
+flag {
+ name: "speaker_layout_api"
+ namespace: "media_audio"
+ description: "Surface new API method for returning speaker layout channel mask for devices"
+ bug: "337522902"
+}
# TODO remove
flag {
diff --git a/media/audioaidlconversion/AidlConversionCppNdk.cpp b/media/audioaidlconversion/AidlConversionCppNdk.cpp
index 0682f65..f739f3c 100644
--- a/media/audioaidlconversion/AidlConversionCppNdk.cpp
+++ b/media/audioaidlconversion/AidlConversionCppNdk.cpp
@@ -478,6 +478,11 @@
AudioDeviceType::OUT_BROADCAST,
GET_DEVICE_DESC_CONNECTION(BT_LE))
},
+ {
+ AUDIO_DEVICE_OUT_MULTICHANNEL_GROUP, make_AudioDeviceDescription(
+ AudioDeviceType::OUT_MULTICHANNEL_GROUP,
+ GET_DEVICE_DESC_CONNECTION(VIRTUAL))
+ },
// AUDIO_DEVICE_IN_AMBIENT and IN_COMMUNICATION are removed since they were deprecated.
{
AUDIO_DEVICE_IN_BUILTIN_MIC, make_AudioDeviceDescription(
@@ -1055,6 +1060,14 @@
return OK;
}
+namespace {
+ // Use '01' for LSB bits 0 and 1 as Bluetooth MAC addresses are never multicast
+ // and universaly administered
+ constexpr std::array<uint8_t, 4> BTANON_PREFIX {0xFD, 0xFF, 0xFF, 0xFF};
+ // Keep sync with ServiceUtilities.cpp mustAnonymizeBluetoothAddress
+ constexpr const char * BTANON_PREFIX_STR = "XX:XX:XX:XX:";
+}
+
::android::status_t aidl2legacy_AudioDevice_audio_device(
const AudioDevice& aidl,
audio_devices_t* legacyType, std::string* legacyAddress) {
@@ -1069,8 +1082,16 @@
case Tag::mac: {
const std::vector<uint8_t>& mac = aidl.address.get<AudioDeviceAddress::mac>();
if (mac.size() != 6) return BAD_VALUE;
- snprintf(addressBuffer, AUDIO_DEVICE_MAX_ADDRESS_LEN, "%02X:%02X:%02X:%02X:%02X:%02X",
- mac[0], mac[1], mac[2], mac[3], mac[4], mac[5]);
+ if (std::equal(BTANON_PREFIX.begin(), BTANON_PREFIX.end(), mac.begin())) {
+ // special case for anonymized mac address:
+ // change anonymized bytes back from FD:FF:FF:FF: to XX:XX:XX:XX:
+ snprintf(addressBuffer, AUDIO_DEVICE_MAX_ADDRESS_LEN,
+ "%s%02X:%02X", BTANON_PREFIX_STR, mac[4], mac[5]);
+ } else {
+ snprintf(addressBuffer, AUDIO_DEVICE_MAX_ADDRESS_LEN,
+ "%02X:%02X:%02X:%02X:%02X:%02X",
+ mac[0], mac[1], mac[2], mac[3], mac[4], mac[5]);
+ }
} break;
case Tag::ipv4: {
const std::vector<uint8_t>& ipv4 = aidl.address.get<AudioDeviceAddress::ipv4>();
@@ -1132,8 +1153,20 @@
switch (suggestDeviceAddressTag(aidl.type)) {
case Tag::mac: {
std::vector<uint8_t> mac(6);
- int status = sscanf(legacyAddress.c_str(), "%hhX:%hhX:%hhX:%hhX:%hhX:%hhX",
- &mac[0], &mac[1], &mac[2], &mac[3], &mac[4], &mac[5]);
+ int status;
+ // special case for anonymized mac address:
+ // change anonymized bytes so that they can be scanned as HEX bytes
+ if (legacyAddress.starts_with(BTANON_PREFIX_STR)) {
+ std::copy(BTANON_PREFIX.begin(), BTANON_PREFIX.end(), mac.begin());
+ LOG_ALWAYS_FATAL_IF(legacyAddress.length() <= strlen(BTANON_PREFIX_STR));
+ status = sscanf(legacyAddress.c_str() + strlen(BTANON_PREFIX_STR),
+ "%hhX:%hhX",
+ &mac[4], &mac[5]);
+ status += 4;
+ } else {
+ status = sscanf(legacyAddress.c_str(), "%hhX:%hhX:%hhX:%hhX:%hhX:%hhX",
+ &mac[0], &mac[1], &mac[2], &mac[3], &mac[4], &mac[5]);
+ }
if (status != mac.size()) {
ALOGE("%s: malformed MAC address: \"%s\"", __func__, legacyAddress.c_str());
return unexpected(BAD_VALUE);
@@ -1767,6 +1800,8 @@
return AUDIO_USAGE_VEHICLE_STATUS;
case AudioUsage::ANNOUNCEMENT:
return AUDIO_USAGE_ANNOUNCEMENT;
+ case AudioUsage::SPEAKER_CLEANUP:
+ return AUDIO_USAGE_SPEAKER_CLEANUP;
}
return unexpected(BAD_VALUE);
}
@@ -1818,6 +1853,8 @@
return AudioUsage::VEHICLE_STATUS;
case AUDIO_USAGE_ANNOUNCEMENT:
return AudioUsage::ANNOUNCEMENT;
+ case AUDIO_USAGE_SPEAKER_CLEANUP:
+ return AudioUsage::SPEAKER_CLEANUP;
}
return unexpected(BAD_VALUE);
}
diff --git a/media/audioserver/main_audioserver.cpp b/media/audioserver/main_audioserver.cpp
index 5d7daa4..4f5b95d 100644
--- a/media/audioserver/main_audioserver.cpp
+++ b/media/audioserver/main_audioserver.cpp
@@ -190,7 +190,7 @@
// attempting to call audio flinger on a null pointer could make the process crash
// and attract attentions.
std::vector<AudioMMapPolicyInfo> policyInfos;
- status_t status = sp<IAudioFlinger>::cast(af)->getMmapPolicyInfos(
+ status_t status = AudioSystem::getMmapPolicyInfos(
AudioMMapPolicyType::DEFAULT, &policyInfos);
// Initialize aaudio service when querying mmap policy succeeds and
// any of the policy supports MMAP.
@@ -205,6 +205,7 @@
__func__, status, policyInfos.size());
}
const auto endTime = std::chrono::steady_clock::now();
+ af->startupFinished();
using FloatMillis = std::chrono::duration<float, std::milli>;
const float timeTaken = std::chrono::duration_cast<FloatMillis>(
endTime - startTime).count();
diff --git a/media/codec2/components/dav1d/C2SoftDav1dDec.cpp b/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
index 4ec26d6..44a8dd1 100644
--- a/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
+++ b/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
@@ -139,8 +139,8 @@
addParameter(DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
.withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240))
.withFields({
- C2F(mSize, width).inRange(2, 2048, 2),
- C2F(mSize, height).inRange(2, 2048, 2),
+ C2F(mSize, width).inRange(2, 4096, 2),
+ C2F(mSize, height).inRange(2, 4096, 2),
})
.withSetter(MaxPictureSizeSetter, mSize)
.build());
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.cpp b/media/codec2/components/vpx/C2SoftVpxDec.cpp
index 318f093..83cbe47 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxDec.cpp
@@ -69,8 +69,8 @@
DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
.withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
.withFields({
- C2F(mSize, width).inRange(2, 2048),
- C2F(mSize, height).inRange(2, 2048),
+ C2F(mSize, width).inRange(2, 4096),
+ C2F(mSize, height).inRange(2, 4096),
})
.withSetter(SizeSetter)
.build());
@@ -167,8 +167,8 @@
DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
.withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240))
.withFields({
- C2F(mSize, width).inRange(2, 2048, 2),
- C2F(mSize, height).inRange(2, 2048, 2),
+ C2F(mSize, width).inRange(2, 4096, 2),
+ C2F(mSize, height).inRange(2, 4096, 2),
})
.withSetter(MaxPictureSizeSetter, mSize)
.build());
diff --git a/media/codec2/tests/aidl/GraphicsTracker_test.cpp b/media/codec2/tests/aidl/GraphicsTracker_test.cpp
index 9008086..da79277 100644
--- a/media/codec2/tests/aidl/GraphicsTracker_test.cpp
+++ b/media/codec2/tests/aidl/GraphicsTracker_test.cpp
@@ -92,8 +92,7 @@
sp<IGraphicBufferConsumer> consumer = mConsumer.promote();
if (consumer != nullptr && consumer->acquireBuffer(&buffer, 0) == android::NO_ERROR) {
::usleep(kRenderDelayUs);
- consumer->releaseBuffer(buffer.mSlot, buffer.mFrameNumber,
- EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, buffer.mFence);
+ consumer->releaseBuffer(buffer.mSlot, buffer.mFrameNumber, buffer.mFence);
}
}
void onBuffersReleased() override {}
@@ -438,8 +437,7 @@
// Consume one buffer and release
BufferItem item;
ASSERT_EQ(OK, mConsumer->acquireBuffer(&item, 0));
- ASSERT_EQ(OK, mConsumer->releaseBuffer(item.mSlot, item.mFrameNumber,
- EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, item.mFence));
+ ASSERT_EQ(OK, mConsumer->releaseBuffer(item.mSlot, item.mFrameNumber, item.mFence));
// Nothing to consume
ASSERT_NE(OK, mConsumer->acquireBuffer(&item, 0));
diff --git a/media/libaaudio/fuzzer/Android.bp b/media/libaaudio/fuzzer/Android.bp
index 7998524..5b48401 100644
--- a/media/libaaudio/fuzzer/Android.bp
+++ b/media/libaaudio/fuzzer/Android.bp
@@ -37,12 +37,16 @@
],
shared_libs: [
"com.android.media.aaudio-aconfig-cc",
+ "libaconfig_storage_read_api_cc",
"libaudio_aidl_conversion_common_cpp",
"libaudioclient_aidl_conversion",
"libaudiomanager",
"libaudiopolicy",
"libbinder",
+ "libbinder_ndk",
+ "libmediautils",
"libutils",
+ "server_configurable_flags",
],
static_libs: [
"aaudio-aidl-cpp",
@@ -56,6 +60,7 @@
"libaaudio",
"libaaudio_internal",
"libaudioclient",
+ "libaudiofoundation",
"libaudioutils",
"libbase_ndk",
"libcutils",
diff --git a/media/libaaudio/fuzzer/libaaudio_fuzzer.cpp b/media/libaaudio/fuzzer/libaaudio_fuzzer.cpp
index 1b06ea7..c3b43ab 100644
--- a/media/libaaudio/fuzzer/libaaudio_fuzzer.cpp
+++ b/media/libaaudio/fuzzer/libaaudio_fuzzer.cpp
@@ -17,6 +17,7 @@
#include "aaudio/AAudio.h"
#include "aaudio/AAudioTesting.h"
+#include "system/aaudio/AAudio.h"
#include <fuzzer/FuzzedDataProvider.h>
#include <functional>
@@ -183,6 +184,12 @@
fdp.PickValueInArray({AAUDIO_UNSPECIFIED, fdp.ConsumeIntegral<int32_t>()});
AAudioStreamBuilder_setFramesPerDataCallback(mAaudioBuilder, framesPerDataCallback);
+ const size_t tagsNumBytes = fdp.ConsumeIntegralInRange<size_t>(
+ 0, AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE + 10);
+ AAudioStreamBuilder_setTags(mAaudioBuilder,
+ (tagsNumBytes == 0 ? nullptr
+ : fdp.ConsumeBytesAsString(tagsNumBytes).c_str()));
+
aaudio_policy_t policy =
fdp.PickValueInArray({fdp.PickValueInArray(kPolicies), fdp.ConsumeIntegral<int32_t>()});
AAudio_setMMapPolicy(policy);
@@ -193,6 +200,7 @@
int32_t maxFrames = 0;
int32_t count = 0;
aaudio_stream_state_t state = AAUDIO_STREAM_STATE_UNKNOWN;
+ char tags[AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE + 1];
invokeAAudioSetAPIs(fdp);
@@ -312,6 +320,9 @@
(void)AAudioStream_getBufferSizeInFrames(mAaudioStream);
},
[&]() {
+ (void)AAudioStream_getTags(mAaudioStream, tags);
+ },
+ [&]() {
(void)AAudioStream_isMMapUsed(mAaudioStream);
},
});
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index e19d526..6c41198 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -119,8 +119,42 @@
*
* Available since API level 34.
*/
- AAUDIO_FORMAT_IEC61937
+ AAUDIO_FORMAT_IEC61937,
+ /**
+ * This format is used for audio compressed in MP3 format.
+ */
+ AAUDIO_FORMAT_MP3,
+
+ /**
+ * This format is used for audio compressed in AAC LC format.
+ */
+ AAUDIO_FORMAT_AAC_LC,
+
+ /**
+ * This format is used for audio compressed in AAC HE V1 format.
+ */
+ AAUDIO_FORMAT_AAC_HE_V1,
+
+ /**
+ * This format is used for audio compressed in AAC HE V2 format.
+ */
+ AAUDIO_FORMAT_AAC_HE_V2,
+
+ /**
+ * This format is used for audio compressed in AAC ELD format.
+ */
+ AAUDIO_FORMAT_AAC_ELD,
+
+ /**
+ * This format is used for audio compressed in AAC XHE format.
+ */
+ AAUDIO_FORMAT_AAC_XHE,
+
+ /**
+ * This format is used for audio compressed in OPUS.
+ */
+ AAUDIO_FORMAT_OPUS
};
typedef int32_t aaudio_format_t;
@@ -335,7 +369,23 @@
/**
* Reducing latency is more important than battery life.
*/
- AAUDIO_PERFORMANCE_MODE_LOW_LATENCY
+ AAUDIO_PERFORMANCE_MODE_LOW_LATENCY,
+
+ /**
+ * Extending battery life is more important than low latency.
+ *
+ * This mode is not supported in input streams.
+ * This mode will play through the offloaded audio path to save battery life.
+ *
+ * Comparing to mode {@link #AAUDIO_PERFORMANCE_MODE_POWER_SAVING}, the stream at
+ * this mode will be able to write a large amount(several seconds) of data within a
+ * short time. The written data will be queued in a hardware buffer. After that, the
+ * app can suspend its thread/process that playing audio, the audio framework's data
+ * pipe will be suspended automatically and the CPU will be allowed to sleep for
+ * power saving. When all queued data are played, the apps will be able to get callback
+ * to feed more data.
+ */
+ AAUDIO_PERFORMANCE_MODE_POWER_SAVING_OFFLOADED
};
typedef int32_t aaudio_performance_mode_t;
@@ -1090,7 +1140,8 @@
* Set the requested performance mode.
*
* Supported modes are {@link #AAUDIO_PERFORMANCE_MODE_NONE},
- * {@link #AAUDIO_PERFORMANCE_MODE_POWER_SAVING} * and {@link #AAUDIO_PERFORMANCE_MODE_LOW_LATENCY}.
+ * {@link #AAUDIO_PERFORMANCE_MODE_POWER_SAVING}, {@link #AAUDIO_PERFORMANCE_MODE_LOW_LATENCY} and
+ * {@link #AAUDIO_PERFORMANCE_MODE_POWER_SAVING_OFFLOADED}.
*
* The default, if you do not call this function, is {@link #AAUDIO_PERFORMANCE_MODE_NONE}.
*
@@ -1475,6 +1526,44 @@
__INTRODUCED_IN(26);
/**
+ * Prototype for the callback function that is passed to
+ * AAudioStreamBuilder_setPresentationEndCallback().
+ *
+ * This will be called when all the buffers of an offloaded stream that were queued in the audio
+ * system (e.g. the combination of the Android audio framework and the device's audio hardware)
+ * have been played after AudioStream_requestStop() has been called.
+ *
+ * @param stream reference provided by AAudioStreamBuilder_openStream(), which must be an
+ * output stream as the offloaded mode is only supported for output stream
+ * @param userData the same address that was passed to
+ * AAudioStreamBuilder_setPresentationEndCallback().
+ */
+typedef void (*AAudioStream_presentationEndCallback)(AAudioStream* _Nonnull stream,
+ void* _Null_unspecified userData);
+
+/**
+ * Request that AAudio call this function when all the buffers of an offloaded stream that were
+ * queued in the audio system (e.g. the combination of the Android audio framework and the device's
+ * audio hardware) have been played.
+ *
+ * The presentation end callback must be used together with the data callback.
+ * The presentation edn callback won't be called if the stream is closed before all the data
+ * is played.
+ *
+ * Available since API level 36.
+ *
+ * @param builder reference provided by AAudio_createStreamBuilder()
+ * @param callback pointer to a function that will be called when all the buffers of an offloaded
+ * stream that were queued have been played.
+ * @param userData pointer to an application data structure that will be passed
+ * to the callback functions.
+ */
+AAUDIO_API void AAudioStreamBuilder_setPresentationEndCallback(
+ AAudioStreamBuilder* _Nonnull builder,
+ AAudioStream_presentationEndCallback _Nonnull callback,
+ void* _Nullable userData) __INTRODUCED_IN(36);
+
+/**
* Open a stream based on the options in the StreamBuilder.
*
* AAudioStream_close() must be called when finished with the stream to recover
@@ -1909,11 +1998,33 @@
* Available since API level 26.
*
* @param stream reference provided by AAudioStreamBuilder_openStream()
- * @return actual device ID
+ * @return actual device id. If there are multiple device ids used, the first device picked by
+ * the audio policy engine will be returned.
*/
AAUDIO_API int32_t AAudioStream_getDeviceId(AAudioStream* _Nonnull stream) __INTRODUCED_IN(26);
/**
+ * Available since API level 36.
+ *
+ * Call this function after AAudioStreamBuilder_openStream().
+ * This function will crash if stream is null.
+ * An array of size 16 should generally be large enough to fit all device identifiers.
+ *
+ * @param stream reference provided by AAudioStreamBuilder_openStream().
+ * @param ids reference to an array of ids.
+ * @params numIds size allocated to the array of ids.
+ * The input should be the size of the ids array.
+ * The output will be the actual number of device ids.
+ * @return {@link #AAUDIO_OK} or an error code.
+ * If numIds is null, return {@link #AAUDIO_ERROR_ILLEGAL_ARGUMENT}.
+ * If numIds is smaller than the number of device ids, return
+ * {@link #AAUDIO_ERROR_OUT_OF_RANGE}. The value of numIds will still be updated.
+ * Otherwise, if ids is null, return {@link #AAUDIO_ERROR_ILLEGAL_ARGUMENT}.
+ */
+AAUDIO_API aaudio_result_t AAudioStream_getDeviceIds(AAudioStream* _Nonnull stream,
+ int32_t* _Nonnull ids, int32_t* _Nonnull numIds) __INTRODUCED_IN(36);
+
+/**
* Available since API level 26.
*
* @param stream reference provided by AAudioStreamBuilder_openStream()
@@ -2166,6 +2277,72 @@
AAUDIO_API aaudio_channel_mask_t AAudioStream_getChannelMask(AAudioStream* _Nonnull stream)
__INTRODUCED_IN(32);
+/**
+ * Configures the delay and padding values for the current stream playing in offload mode.
+ * This should only be used on a stream whose performance mode is
+ * {@link #AAUDIO_PERFORMANCE_MODE_POWER_SAVING_OFFLOADED} and the format is compressed format.
+ * The unit is frames, where a frame includes samples for all audio channels, e.g. 100 frames
+ * for a stereo stream corresponds to 200 interleaved PCM samples.
+ *
+ * @param stream reference provided by AAudioStreamBuilder_openStream()
+ * @param delayInFrames number of frames to be ignored at the beginning of the stream. A value
+ * of 0 indicates no delay is to be applied.
+ * @param paddingInFrames number of frames to be ignored at the end of the stream. A value of 0
+ * of 0 indicates no padding is to be applied.
+ * @return {@link #AAUDIO_OK} if the delay and padding values are set successfully,
+ * or {@link #AAUDIO_ERROR_ILLEGAL_ARGUMENT} if delayInFrames or paddingInFrames
+ * is less than 0,
+ * or {@link #AAUDIO_ERROR_UNIMPLEMENTED} if the stream is not an output stream whose
+ * performance mode is {@link #AAUDIO_PERFORMANCE_MODE_POWER_SAVING_OFFLOADED},
+ * or {@link #AAUDIO_ERROR_INVALID_STATE} if the stream is not yet initialized.
+ */
+AAUDIO_API aaudio_result_t AAudioStream_setOffloadDelayPadding(
+ AAudioStream* _Nonnull stream, int32_t delayInFrames, int32_t paddingInFrames)
+ __INTRODUCED_IN(36);
+
+/**
+ * Return the decoder delay of an offloaded stream in frames.
+ *
+ * @param stream reference provided by AAudioStreamBuilder_openStream()
+ * @return the offload delay in frames that previously set with
+ * {@link #AAudioStream_setOffloadDelayPadding},
+ * or 0 if it was never modified,
+ * or {@link #AAUDIO_ERROR_UNIMPLEMENTED} if the stream is not an output stream whose
+ * performance mode is {@link #AAUDIO_PERFORMANCE_MODE_POWER_SAVING_OFFLOADED},
+ * or {@link #AAUDIO_ERROR_INVALID_STATE} if the stream is not yet initialized.
+ */
+AAUDIO_API int32_t AAudioStream_getOffloadDelay(AAudioStream* _Nonnull stream) __INTRODUCED_IN(36);
+
+/**
+ * Return the decoder padding of an offloaded stream in frames.
+ *
+ * @param stream reference provided by AAudioStreamBuilder_openStream()
+ * @return the offload padding in frames that previously set with
+ * {@link #AAudioStream_setOffloadDelayPadding},
+ * or 0 if it was never modified,
+ * or {@link #AAUDIO_ERROR_UNIMPLEMENTED} if the stream is not an output stream whose
+ * performance mode is {@link #AAUDIO_PERFORMANCE_MODE_POWER_SAVING_OFFLOADED},
+ * or {@link #AAUDIO_ERROR_INVALID_STATE} if the stream is not yet initialized.
+ */
+AAUDIO_API int32_t AAudioStream_getOffloadPadding(AAudioStream* _Nonnull stream)
+ __INTRODUCED_IN(36);
+
+/**
+ * Declares that the last data writing operation on this stream provided the last buffer of this
+ * stream.
+ * After the end of stream, previously set padding and delay values are ignored. That indicates
+ * all written data will be played.
+ * Use this method in the same thread as any data writing operation.
+ *
+ * @param stream reference provided by AAudioStreamBuilder_openStream()
+ * @return {@link #AAUDIO_OK} on success,
+ * or {@link #AAUDIO_ERROR_UNIMPLEMENTED} if the stream is not an output stream whose
+ * performance mode is {@link #AAUDIO_PERFORMANCE_MODE_POWER_SAVING_OFFLOADED},
+ * or {@link #AAUDIO_ERROR_INVALID_STATE} if the stream is not yet initialized.
+ */
+AAUDIO_API aaudio_result_t AAudioStream_setOffloadEndOfStream(AAudioStream* _Nonnull stream)
+ __INTRODUCED_IN(36);
+
#ifdef __cplusplus
}
#endif
diff --git a/media/libaaudio/include/aaudio/AAudioTesting.h b/media/libaaudio/include/aaudio/AAudioTesting.h
index d67ec70..16d6c33 100644
--- a/media/libaaudio/include/aaudio/AAudioTesting.h
+++ b/media/libaaudio/include/aaudio/AAudioTesting.h
@@ -31,24 +31,225 @@
* They may change or be removed at any time.
************************************************************************************/
+/**
+ * When the audio is played/recorded via AAudio MMAP data path, the apps can write to/read from
+ * a shared memory that will also be accessed directly by hardware. That reduces the audio latency.
+ * The following values are used to describe how AAudio MMAP is supported.
+ */
enum {
/**
- * Related feature is disabled and never used.
+ * AAudio MMAP is disabled and never used.
*/
AAUDIO_POLICY_NEVER = 1,
/**
- * If related feature works then use it. Otherwise fall back to something else.
+ * AAudio MMAP support depends on device's availability. It will be used
+ * when it is possible or fallback to the normal path, where the audio data
+ * will be delivered via audio framework data pipeline.
*/
- AAUDIO_POLICY_AUTO,
+ AAUDIO_POLICY_AUTO,
/**
- * Related feature must be used. If not available then fail.
+ * AAudio MMAP must be used or fail.
*/
AAUDIO_POLICY_ALWAYS
};
typedef int32_t aaudio_policy_t;
+// The values are copied from JAVA SDK device types defined in android/media/AudioDeviceInfo.java
+// When a new value is added, it should be added here and handled by the conversion at
+// AAudioConvert_aaudioToAndroidDeviceType.
+typedef enum AAudio_DeviceType : int32_t {
+ /**
+ * A device type describing the attached earphone speaker.
+ */
+ AAUDIO_DEVICE_BUILTIN_EARPIECE = 1,
+
+ /**
+ * A device type describing the speaker system (i.e. a mono speaker or stereo speakers) built
+ * in a device.
+ */
+ AAUDIO_DEVICE_BUILTIN_SPEAKER = 2,
+
+ /**
+ * A device type describing a headset, which is the combination of a headphones and microphone.
+ */
+ AAUDIO_DEVICE_WIRED_HEADSET = 3,
+
+ /**
+ * A device type describing a pair of wired headphones.
+ */
+ AAUDIO_DEVICE_WIRED_HEADPHONES = 4,
+
+ /**
+ * A device type describing an analog line-level connection.
+ */
+ AAUDIO_DEVICE_LINE_ANALOG = 5,
+
+ /**
+ * A device type describing a digital line connection (e.g. SPDIF).
+ */
+ AAUDIO_DEVICE_LINE_DIGITAL = 6,
+
+ /**
+ * A device type describing a Bluetooth device typically used for telephony.
+ */
+ AAUDIO_DEVICE_BLUETOOTH_SCO = 7,
+
+ /**
+ * A device type describing a Bluetooth device supporting the A2DP profile.
+ */
+ AAUDIO_DEVICE_BLUETOOTH_A2DP = 8,
+
+ /**
+ * A device type describing an HDMI connection .
+ */
+ AAUDIO_DEVICE_HDMI = 9,
+
+ /**
+ * A device type describing the Audio Return Channel of an HDMI connection.
+ */
+ AAUDIO_DEVICE_HDMI_ARC = 10,
+
+ /**
+ * A device type describing a USB audio device.
+ */
+ AAUDIO_DEVICE_USB_DEVICE = 11,
+
+ /**
+ * A device type describing a USB audio device in accessory mode.
+ */
+ AAUDIO_DEVICE_USB_ACCESSORY = 12,
+
+ /**
+ * A device type describing the audio device associated with a dock.
+ * Starting at API 34, this device type only represents digital docks, while docks with an
+ * analog connection are represented with {@link #AAUDIO_DEVICE_DOCK_ANALOG}.
+ */
+ AAUDIO_DEVICE_DOCK = 13,
+
+ /**
+ * A device type associated with the transmission of audio signals over FM.
+ */
+ AAUDIO_DEVICE_FM = 14,
+
+ /**
+ * A device type describing the microphone(s) built in a device.
+ */
+ AAUDIO_DEVICE_BUILTIN_MIC = 15,
+
+ /**
+ * A device type for accessing the audio content transmitted over FM.
+ */
+ AAUDIO_DEVICE_FM_TUNER = 16,
+
+ /**
+ * A device type for accessing the audio content transmitted over the TV tuner system.
+ */
+ AAUDIO_DEVICE_TV_TUNER = 17,
+
+ /**
+ * A device type describing the transmission of audio signals over the telephony network.
+ */
+ AAUDIO_DEVICE_TELEPHONY = 18,
+
+ /**
+ * A device type describing the auxiliary line-level connectors.
+ */
+ AAUDIO_DEVICE_AUX_LINE = 19,
+
+ /**
+ * A device type connected over IP.
+ */
+ AAUDIO_DEVICE_IP = 20,
+
+ /**
+ * A type-agnostic device used for communication with external audio systems.
+ */
+ AAUDIO_DEVICE_BUS = 21,
+
+ /**
+ * A device type describing a USB audio headset.
+ */
+ AAUDIO_DEVICE_USB_HEADSET = 22,
+
+ /**
+ * A device type describing a Hearing Aid.
+ */
+ AAUDIO_DEVICE_HEARING_AID = 23,
+
+ /**
+ * A device type describing the speaker system (i.e. a mono speaker or stereo speakers) built
+ * in a device, that is specifically tuned for outputting sounds like notifications and alarms
+ * (i.e. sounds the user couldn't necessarily anticipate).
+ * <p>Note that this physical audio device may be the same as {@link #TYPE_BUILTIN_SPEAKER}
+ * but is driven differently to safely accommodate the different use case.</p>
+ */
+ AAUDIO_DEVICE_BUILTIN_SPEAKER_SAFE = 24,
+
+ /**
+ * A device type for rerouting audio within the Android framework between mixes and
+ * system applications.
+ */
+ AAUDIO_DEVICE_REMOTE_SUBMIX = 25,
+ /**
+ * A device type describing a Bluetooth Low Energy (BLE) audio headset or headphones.
+ * Headphones are grouped with headsets when the device is a sink:
+ * the features of headsets and headphones with regard to playback are the same.
+ */
+ AAUDIO_DEVICE_BLE_HEADSET = 26,
+
+ /**
+ * A device type describing a Bluetooth Low Energy (BLE) audio speaker.
+ */
+ AAUDIO_DEVICE_BLE_SPEAKER = 27,
+
+ /**
+ * A device type describing an Echo Canceller loopback Reference.
+ * This device is only used when capturing with MediaRecorder.AudioSource.ECHO_REFERENCE,
+ * which requires privileged permission
+ * {@link android.Manifest.permission#CAPTURE_AUDIO_OUTPUT}.
+ *
+ * Note that this is not exposed as it is a system API that requires privileged permission.
+ */
+ // AAUDIO_DEVICE_ECHO_REFERENCE = 28,
+
+ /**
+ * A device type describing the Enhanced Audio Return Channel of an HDMI connection.
+ */
+ AAUDIO_DEVICE_HDMI_EARC = 29,
+
+ /**
+ * A device type describing a Bluetooth Low Energy (BLE) broadcast group.
+ */
+ AAUDIO_DEVICE_BLE_BROADCAST = 30,
+
+ /**
+ * A device type describing the audio device associated with a dock using an analog connection.
+ */
+ AAUDIO_DEVICE_DOCK_ANALOG = 31
+} AAudio_DeviceType;
+
+/**
+ * Query how aaudio mmap is supported for the given device type.
+ *
+ * @param device device type
+ * @param direction {@link AAUDIO_DIRECTION_OUTPUT} or {@link AAUDIO_DIRECTION_INPUT}
+ * @return the mmap policy or negative error
+ */
+AAUDIO_API aaudio_policy_t AAudio_getPlatformMMapPolicy(
+ AAudio_DeviceType device, aaudio_direction_t direction) __INTRODUCED_IN(36);
+
+/**
+ * Query how aaudio exclusive mmap is supported for the given device type.
+ *
+ * @param device device type
+ * @param direction {@link AAUDIO_DIRECTION_OUTPUT} or {@link AAUDIO_DIRECTION_INPUT}
+ * @return the mmap exclusive policy or negative error
+ */
+AAUDIO_API aaudio_policy_t AAudio_getPlatformMMapExclusivePolicy(
+ AAudio_DeviceType device, aaudio_direction_t direction) __INTRODUCED_IN(36);
+
/**
* Control whether AAudioStreamBuilder_openStream() will use the new MMAP data path
* or the older "Legacy" data path.
diff --git a/media/libaaudio/include/system/aaudio/AAudio.h b/media/libaaudio/include/system/aaudio/AAudio.h
new file mode 100644
index 0000000..933ad35
--- /dev/null
+++ b/media/libaaudio/include/system/aaudio/AAudio.h
@@ -0,0 +1,80 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * This is the system APIs for AAudio.
+ */
+#ifndef SYSTEM_AAUDIO_H
+#define SYSTEM_AAUDIO_H
+
+#include <aaudio/AAudio.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/**
+ * The tags string attributes allows OEMs to extend the
+ * <a href="/reference/android/media/AudioAttributes">AudioAttributes</a>.
+ *
+ * Note that the maximum length includes all tags combined with delimiters and null terminator.
+ *
+ * Note that it matches the equivalent value in
+ * <a href="/reference/android/system/media/audio">AUDIO_ATTRIBUTES_TAGS_MAX_SIZE</a>
+ * in the Android native API.
+ */
+#define AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE 256
+
+/**
+ * Set one or more vendor extension tags that the output stream will carry.
+ *
+ * The tags can be used by the audio policy engine for routing purpose.
+ * Routing is based on audio attributes, translated into legacy stream type.
+ * The stream types cannot be extended, so the product strategies have been introduced to allow
+ * vendor extension of routing capabilities.
+ * This could, for example, affect how volume and routing is handled for the stream.
+ *
+ * The tags can also be used by a System App to pass vendor specific information through the
+ * framework to the HAL. That info could affect routing, ducking or other audio behavior in the HAL.
+ *
+ * By default, audio attributes tags are empty if this method is not called.
+ *
+ * @param builder reference provided by AAudio_createStreamBuilder()
+ * @param tags the desired tags to add, which must be UTF-8 format and null-terminated. The size
+ * of the tags must be at most {@link #AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE}. Multiple tags
+ * must be separated by semicolons.
+ * @return {@link #AAUDIO_OK} on success or {@link #AAUDIO_ERROR_ILLEGAL_ARGUMENT} if the given
+ * tags is null or its length is greater than {@link #AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE}.
+ */
+aaudio_result_t AAudioStreamBuilder_setTags(AAudioStreamBuilder* _Nonnull builder,
+ const char* _Nonnull tags);
+
+/**
+ * Read the audio attributes' tags for the stream into a buffer.
+ * The caller is responsible for allocating and freeing the returned data.
+ *
+ * @param stream reference provided by AAudioStreamBuilder_openStream()
+ * @param tags pointer to write the value to in UTF-8 that containing OEM extension tags. It must
+ * be sized with {@link #AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE}.
+ * @return {@link #AAUDIO_OK} or {@link #AAUDIO_ERROR_ILLEGAL_ARGUMENT} if the given tags is null.
+ */
+aaudio_result_t AAudioStream_getTags(AAudioStream* _Nonnull stream, char* _Nonnull tags);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif //SYSTEM_AAUDIO_H
diff --git a/media/libaaudio/src/Android.bp b/media/libaaudio/src/Android.bp
index ebb7637..cccb096 100644
--- a/media/libaaudio/src/Android.bp
+++ b/media/libaaudio/src/Android.bp
@@ -103,6 +103,7 @@
"framework-permission-aidl-cpp",
"libaaudio_internal",
"libaudioclient",
+ "libaudiofoundation",
"libaudioutils",
"libbinder",
"libcutils",
@@ -166,6 +167,7 @@
"framework-permission-aidl-cpp",
"libaudioclient",
"libaudioclient_aidl_conversion",
+ "libaudiofoundation",
"libaudioutils",
"libbinder",
"libcutils",
diff --git a/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp b/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
index c4692ce..37c1a98 100644
--- a/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
+++ b/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
@@ -34,7 +34,16 @@
AAudioStreamConfiguration::AAudioStreamConfiguration(const StreamParameters& parcelable) {
setChannelMask(parcelable.channelMask);
setSampleRate(parcelable.sampleRate);
- setDeviceId(parcelable.deviceId);
+ auto deviceIds = android::convertContainer<android::DeviceIdVector>(
+ parcelable.deviceIds, android::aidl2legacy_int32_t_audio_port_handle_t);
+ if (deviceIds.ok()) {
+ setDeviceIds(deviceIds.value());
+ } else {
+ ALOGE("deviceIds (%s) aidl2legacy conversion failed",
+ android::toString(parcelable.deviceIds).c_str());
+ android::DeviceIdVector emptyDeviceIds;
+ setDeviceIds(emptyDeviceIds);
+ }
static_assert(sizeof(aaudio_sharing_mode_t) == sizeof(parcelable.sharingMode));
setSharingMode(parcelable.sharingMode);
auto convFormat = android::aidl2legacy_AudioFormatDescription_audio_format_t(
@@ -50,7 +59,7 @@
setUsage(parcelable.usage);
static_assert(sizeof(aaudio_content_type_t) == sizeof(parcelable.contentType));
setContentType(parcelable.contentType);
-
+ setTags(parcelable.tags);
static_assert(sizeof(aaudio_spatialization_behavior_t) ==
sizeof(parcelable.spatializationBehavior));
setSpatializationBehavior(parcelable.spatializationBehavior);
@@ -87,7 +96,15 @@
StreamParameters result;
result.channelMask = getChannelMask();
result.sampleRate = getSampleRate();
- result.deviceId = getDeviceId();
+ auto deviceIds = android::convertContainer<std::vector<int32_t>>(
+ getDeviceIds(), android::legacy2aidl_audio_port_handle_t_int32_t);
+ if (deviceIds.ok()) {
+ result.deviceIds = deviceIds.value();
+ } else {
+ ALOGE("deviceIds (%s) legacy2aidl conversion failed",
+ android::toString(getDeviceIds()).c_str());
+ result.deviceIds = {};
+ }
static_assert(sizeof(aaudio_sharing_mode_t) == sizeof(result.sharingMode));
result.sharingMode = getSharingMode();
auto convAudioFormat = android::legacy2aidl_audio_format_t_AudioFormatDescription(getFormat());
@@ -106,6 +123,8 @@
result.usage = getUsage();
static_assert(sizeof(aaudio_content_type_t) == sizeof(result.contentType));
result.contentType = getContentType();
+ std::optional<std::string> tags = getTags();
+ result.tags = tags.has_value() ? tags.value() : "";
static_assert(
sizeof(aaudio_spatialization_behavior_t) == sizeof(result.spatializationBehavior));
result.spatializationBehavior = getSpatializationBehavior();
diff --git a/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl b/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl
index fa46e0d..7d7abce 100644
--- a/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl
+++ b/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl
@@ -21,12 +21,13 @@
parcelable StreamParameters {
int channelMask; // = AAUDIO_UNSPECIFIED;
int sampleRate; // = AAUDIO_UNSPECIFIED;
- int deviceId; // = AAUDIO_UNSPECIFIED;
+ int[] deviceIds; // = null;
int /* aaudio_sharing_mode_t */ sharingMode; // = AAUDIO_SHARING_MODE_SHARED;
AudioFormatDescription audioFormat; // = AUDIO_FORMAT_DEFAULT;
int /* aaudio_direction_t */ direction; // = AAUDIO_DIRECTION_OUTPUT;
int /* aaudio_usage_t */ usage; // = AAUDIO_UNSPECIFIED;
int /* aaudio_content_type_t */ contentType; // = AAUDIO_UNSPECIFIED;
+ @utf8InCpp String tags; /* UTF8 */
int /* aaudio_spatialization_behavior_t */spatializationBehavior; //= AAUDIO_UNSPECIFIED;
boolean isContentSpatialized; // = false;
int /* aaudio_input_preset_t */ inputPreset; // = AAUDIO_UNSPECIFIED;
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index fa3f5a0..6bc7dc2 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -121,7 +121,7 @@
request.setSharingModeMatchRequired(isSharingModeMatchRequired());
request.setInService(isInService());
- request.getConfiguration().setDeviceId(getDeviceId());
+ request.getConfiguration().setDeviceIds(getDeviceIds());
request.getConfiguration().setSampleRate(getSampleRate());
request.getConfiguration().setDirection(getDirection());
request.getConfiguration().setSharingMode(getSharingMode());
@@ -129,6 +129,7 @@
request.getConfiguration().setUsage(getUsage());
request.getConfiguration().setContentType(getContentType());
+ request.getConfiguration().setTags(getTags());
request.getConfiguration().setSpatializationBehavior(getSpatializationBehavior());
request.getConfiguration().setIsContentSpatialized(isContentSpatialized());
request.getConfiguration().setInputPreset(getInputPreset());
@@ -179,12 +180,13 @@
setChannelMask(configurationOutput.getChannelMask());
}
- setDeviceId(configurationOutput.getDeviceId());
+ setDeviceIds(configurationOutput.getDeviceIds());
setSessionId(configurationOutput.getSessionId());
setSharingMode(configurationOutput.getSharingMode());
setUsage(configurationOutput.getUsage());
setContentType(configurationOutput.getContentType());
+ setTags(configurationOutput.getTags());
setSpatializationBehavior(configurationOutput.getSpatializationBehavior());
setIsContentSpatialized(configurationOutput.isContentSpatialized());
setInputPreset(configurationOutput.getInputPreset());
diff --git a/media/libaaudio/src/core/AAudioAudio.cpp b/media/libaaudio/src/core/AAudioAudio.cpp
index 1e8ac8d..de82471 100644
--- a/media/libaaudio/src/core/AAudioAudio.cpp
+++ b/media/libaaudio/src/core/AAudioAudio.cpp
@@ -25,6 +25,7 @@
#include <aaudio/AAudio.h>
#include <aaudio/AAudioTesting.h>
+#include <system/aaudio/AAudio.h>
#include "AudioClock.h"
#include "AudioGlobal.h"
#include "AudioStreamBuilder.h"
@@ -53,6 +54,16 @@
return AudioGlobal_convertStreamStateToText(state);
}
+AAUDIO_API aaudio_policy_t AAudio_getPlatformMMapPolicy(
+ AAudio_DeviceType device, aaudio_direction_t direction) {
+ return AudioGlobal_getPlatformMMapPolicy(device, direction);
+}
+
+AAUDIO_API aaudio_policy_t AAudio_getPlatformMMapExclusivePolicy(
+ AAudio_DeviceType device, aaudio_direction_t direction) {
+ return AudioGlobal_getPlatformMMapExclusivePolicy(device, direction);
+}
+
static AudioStream *convertAAudioStreamToAudioStream(AAudioStream* stream)
{
return (AudioStream*) stream;
@@ -84,7 +95,11 @@
int32_t deviceId)
{
AudioStreamBuilder *streamBuilder = convertAAudioBuilderToStreamBuilder(builder);
- streamBuilder->setDeviceId(deviceId);
+ android::DeviceIdVector deviceIds;
+ if (deviceId != AAUDIO_UNSPECIFIED) {
+ deviceIds.push_back(deviceId);
+ }
+ streamBuilder->setDeviceIds(deviceIds);
}
AAUDIO_API void AAudioStreamBuilder_setPackageName(AAudioStreamBuilder* builder,
@@ -167,6 +182,17 @@
streamBuilder->setContentType(contentType);
}
+AAUDIO_API aaudio_result_t AAudioStreamBuilder_setTags(AAudioStreamBuilder* builder,
+ const char* tags) {
+ if (tags == nullptr || strlen(tags) >= AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE) {
+ return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
+ }
+ AudioStreamBuilder *streamBuilder = convertAAudioBuilderToStreamBuilder(builder);
+ std::optional<std::string> optionalTags = std::string(tags);
+ streamBuilder->setTags(optionalTags);
+ return AAUDIO_OK;
+}
+
AAUDIO_API void AAudioStreamBuilder_setSpatializationBehavior(AAudioStreamBuilder* builder,
aaudio_spatialization_behavior_t spatializationBehavior) {
AudioStreamBuilder *streamBuilder = convertAAudioBuilderToStreamBuilder(builder);
@@ -229,6 +255,16 @@
streamBuilder->setErrorCallbackUserData(userData);
}
+AAUDIO_API void AAudioStreamBuilder_setPresentationEndCallback(AAudioStreamBuilder* builder,
+ AAudioStream_presentationEndCallback callback, void* userData) {
+ AudioStreamBuilder *streamBuilder = convertAAudioBuilderToStreamBuilder(builder);
+ if (streamBuilder == nullptr) {
+ return;
+ }
+ streamBuilder->setPresentationEndCallbackProc(callback)
+ ->setPresentationEndCallbackUserData(userData);
+}
+
AAUDIO_API void AAudioStreamBuilder_setFramesPerDataCallback(AAudioStreamBuilder* builder,
int32_t frames)
{
@@ -515,7 +551,33 @@
AAUDIO_API int32_t AAudioStream_getDeviceId(AAudioStream* stream)
{
AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
- return audioStream->getDeviceId();
+ auto deviceIds = audioStream->getDeviceIds();
+ if (deviceIds.empty()) {
+ return AAUDIO_UNSPECIFIED;
+ }
+ return deviceIds[0];
+}
+
+AAUDIO_API aaudio_result_t AAudioStream_getDeviceIds(AAudioStream* stream, int32_t* ids,
+ int32_t* numIds)
+{
+ if (numIds == nullptr) {
+ return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
+ }
+ AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
+ auto deviceIds = audioStream->getDeviceIds();
+ if (*numIds < deviceIds.size()) {
+ *numIds = deviceIds.size();
+ return AAUDIO_ERROR_OUT_OF_RANGE;
+ }
+ if (ids == nullptr) {
+ return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
+ }
+ for (int i = 0; i < deviceIds.size(); i++) {
+ ids[i] = deviceIds[i];
+ }
+ *numIds = deviceIds.size();
+ return AAUDIO_OK;
}
AAUDIO_API aaudio_sharing_mode_t AAudioStream_getSharingMode(AAudioStream* stream)
@@ -536,6 +598,22 @@
return audioStream->getContentType();
}
+AAUDIO_API aaudio_result_t AAudioStream_getTags(AAudioStream* stream, char* tags)
+{
+ if (tags == nullptr) {
+ return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
+ }
+ AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
+ std::optional<std::string> optTags = audioStream->getTags();
+ if (optTags.has_value() && !optTags->empty()) {
+ strncpy(tags, optTags.value().c_str(), AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
+ tags[AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE-1] = '\0';
+ } else {
+ tags[0] = '\0';
+ }
+ return AAUDIO_OK;
+}
+
AAUDIO_API aaudio_spatialization_behavior_t AAudioStream_getSpatializationBehavior(
AAudioStream* stream)
{
@@ -625,3 +703,27 @@
// Do not return channel index masks as they are not public.
return AAudio_isChannelIndexMask(channelMask) ? AAUDIO_UNSPECIFIED : channelMask;
}
+
+AAUDIO_API aaudio_result_t AAudioStream_setOffloadDelayPadding(
+ AAudioStream* stream, int32_t delayInFrames, int32_t paddingInFrames) {
+ if (delayInFrames < 0 || paddingInFrames < 0) {
+ return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
+ }
+ AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
+ return audioStream->setOffloadDelayPadding(delayInFrames, paddingInFrames);
+}
+
+AAUDIO_API int32_t AAudioStream_getOffloadDelay(AAudioStream* stream) {
+ AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
+ return audioStream->getOffloadDelay();
+}
+
+AAUDIO_API int32_t AAudioStream_getOffloadPadding(AAudioStream* stream) {
+ AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
+ return audioStream->getOffloadPadding();
+}
+
+AAUDIO_API aaudio_result_t AAudioStream_setOffloadEndOfStream(AAudioStream* stream) {
+ AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
+ return audioStream->setOffloadEndOfStream();
+}
diff --git a/media/libaaudio/src/core/AAudioStreamParameters.cpp b/media/libaaudio/src/core/AAudioStreamParameters.cpp
index 67fc668..ed20d12 100644
--- a/media/libaaudio/src/core/AAudioStreamParameters.cpp
+++ b/media/libaaudio/src/core/AAudioStreamParameters.cpp
@@ -18,6 +18,7 @@
#define LOG_TAG "AAudioStreamParameters"
#include <utils/Log.h>
#include <system/audio.h>
+#include <system/aaudio/AAudio.h>
#include "AAudioStreamParameters.h"
@@ -26,7 +27,7 @@
void AAudioStreamParameters::copyFrom(const AAudioStreamParameters &other) {
mSamplesPerFrame = other.mSamplesPerFrame;
mSampleRate = other.mSampleRate;
- mDeviceId = other.mDeviceId;
+ mDeviceIds = other.mDeviceIds;
mSessionId = other.mSessionId;
mSharingMode = other.mSharingMode;
mAudioFormat = other.mAudioFormat;
@@ -34,6 +35,7 @@
mBufferCapacity = other.mBufferCapacity;
mUsage = other.mUsage;
mContentType = other.mContentType;
+ mTags = other.mTags;
mSpatializationBehavior = other.mSpatializationBehavior;
mIsContentSpatialized = other.mIsContentSpatialized;
mInputPreset = other.mInputPreset;
@@ -56,6 +58,13 @@
case AUDIO_FORMAT_PCM_24_BIT_PACKED:
case AUDIO_FORMAT_PCM_8_24_BIT:
case AUDIO_FORMAT_IEC61937:
+ case AUDIO_FORMAT_MP3:
+ case AUDIO_FORMAT_AAC_LC:
+ case AUDIO_FORMAT_AAC_HE_V1:
+ case AUDIO_FORMAT_AAC_HE_V2:
+ case AUDIO_FORMAT_AAC_ELD:
+ case AUDIO_FORMAT_AAC_XHE:
+ case AUDIO_FORMAT_OPUS:
break; // valid
default:
ALOGD("audioFormat not valid, audio_format_t = 0x%08x", format);
@@ -72,9 +81,13 @@
return AAUDIO_ERROR_OUT_OF_RANGE;
}
- if (mDeviceId < 0) {
- ALOGD("deviceId out of range = %d", mDeviceId);
- return AAUDIO_ERROR_OUT_OF_RANGE;
+ // TODO(b/379139078): Query AudioSystem::listAudioPorts
+ for (auto deviceId : mDeviceIds) {
+ if (deviceId < 0) {
+ ALOGE("deviceId out of range = %d, deviceIds = %s", deviceId,
+ android::toString(mDeviceIds).c_str());
+ return AAUDIO_ERROR_OUT_OF_RANGE;
+ }
}
// All Session ID values are legal.
@@ -199,6 +212,10 @@
// break;
}
+ if (mTags.has_value() && mTags->size() >= AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE) {
+ return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
+ }
+
return validateChannelMask();
}
@@ -290,7 +307,7 @@
}
void AAudioStreamParameters::dump() const {
- ALOGD("mDeviceId = %6d", mDeviceId);
+ ALOGD("mDeviceIds = %s", android::toString(mDeviceIds).c_str());
ALOGD("mSessionId = %6d", mSessionId);
ALOGD("mSampleRate = %6d", mSampleRate);
ALOGD("mSamplesPerFrame = %6d", mSamplesPerFrame);
@@ -301,6 +318,7 @@
ALOGD("mBufferCapacity = %6d", mBufferCapacity);
ALOGD("mUsage = %6d", mUsage);
ALOGD("mContentType = %6d", mContentType);
+ ALOGD("mTags = %s", mTags.has_value() ? mTags.value().c_str() : "");
ALOGD("mSpatializationBehavior = %6d", mSpatializationBehavior);
ALOGD("mIsContentSpatialized = %s", mIsContentSpatialized ? "true" : "false");
ALOGD("mInputPreset = %6d", mInputPreset);
diff --git a/media/libaaudio/src/core/AAudioStreamParameters.h b/media/libaaudio/src/core/AAudioStreamParameters.h
index 7c78f03..c4c0a4f 100644
--- a/media/libaaudio/src/core/AAudioStreamParameters.h
+++ b/media/libaaudio/src/core/AAudioStreamParameters.h
@@ -20,6 +20,7 @@
#include <stdint.h>
#include <aaudio/AAudio.h>
+#include <media/AudioContainers.h>
#include <utility/AAudioUtilities.h>
namespace aaudio {
@@ -29,12 +30,12 @@
AAudioStreamParameters() = default;
virtual ~AAudioStreamParameters() = default;
- int32_t getDeviceId() const {
- return mDeviceId;
+ android::DeviceIdVector getDeviceIds() const {
+ return mDeviceIds;
}
- void setDeviceId(int32_t deviceId) {
- mDeviceId = deviceId;
+ void setDeviceIds(const android::DeviceIdVector& deviceIds) {
+ mDeviceIds = deviceIds;
}
int32_t getSampleRate() const {
@@ -97,6 +98,14 @@
mContentType = contentType;
}
+ void setTags(const std::optional<std::string>& tags) {
+ mTags = tags;
+ }
+
+ const std::optional<std::string> getTags() const {
+ return mTags;
+ }
+
aaudio_spatialization_behavior_t getSpatializationBehavior() const {
return mSpatializationBehavior;
}
@@ -217,12 +226,13 @@
int32_t mSamplesPerFrame = AAUDIO_UNSPECIFIED;
int32_t mSampleRate = AAUDIO_UNSPECIFIED;
- int32_t mDeviceId = AAUDIO_UNSPECIFIED;
+ android::DeviceIdVector mDeviceIds;
aaudio_sharing_mode_t mSharingMode = AAUDIO_SHARING_MODE_SHARED;
audio_format_t mAudioFormat = AUDIO_FORMAT_DEFAULT;
aaudio_direction_t mDirection = AAUDIO_DIRECTION_OUTPUT;
aaudio_usage_t mUsage = AAUDIO_UNSPECIFIED;
aaudio_content_type_t mContentType = AAUDIO_UNSPECIFIED;
+ std::optional<std::string> mTags = {};
aaudio_spatialization_behavior_t mSpatializationBehavior
= AAUDIO_UNSPECIFIED;
bool mIsContentSpatialized = false;
diff --git a/media/libaaudio/src/core/AudioGlobal.cpp b/media/libaaudio/src/core/AudioGlobal.cpp
index 30f9677..3268488 100644
--- a/media/libaaudio/src/core/AudioGlobal.cpp
+++ b/media/libaaudio/src/core/AudioGlobal.cpp
@@ -15,6 +15,13 @@
*/
#include <aaudio/AAudio.h>
#include <aaudio/AAudioTesting.h>
+#include <android/media/audio/common/AudioDevice.h>
+#include <android/media/audio/common/AudioMMapPolicyInfo.h>
+#include <android/media/audio/common/AudioMMapPolicyType.h>
+#include <media/AidlConversionCppNdk.h>
+#include <media/AudioSystem.h>
+#include <system/audio-hal-enums.h>
+#include <utility/AAudioUtilities.h>
#include "AudioGlobal.h"
@@ -23,6 +30,10 @@
*/
namespace aaudio {
+using android::media::audio::common::AudioDevice;
+using android::media::audio::common::AudioMMapPolicyInfo;
+using android::media::audio::common::AudioMMapPolicyType;
+
static aaudio_policy_t g_MMapPolicy = AAUDIO_UNSPECIFIED;
aaudio_policy_t AudioGlobal_getMMapPolicy() {
@@ -132,6 +143,39 @@
return "Unrecognized";
}
+namespace {
+
+aaudio_policy_t getPlatformMMapPolicy(AudioMMapPolicyType policyType, AAudio_DeviceType device,
+ aaudio_direction_t direction) {
+ if (direction != AAUDIO_DIRECTION_INPUT && direction != AAUDIO_DIRECTION_OUTPUT) {
+ return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
+ }
+ const audio_devices_t deviceType = AAudioConvert_aaudioToAndroidDeviceType(device, direction);
+ if (deviceType == AUDIO_DEVICE_NONE) {
+ return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
+ }
+
+ AudioMMapPolicyInfo policyInfo;
+ if (android::status_t status = android::AudioSystem::getMmapPolicyForDevice(
+ policyType, deviceType, &policyInfo);
+ status != android::NO_ERROR) {
+ return AAudioConvert_androidToAAudioResult(status);
+ }
+ return AAudioConvert_androidToAAudioMMapPolicy(policyInfo.mmapPolicy);
+}
+
+} // namespace
+
+aaudio_policy_t AudioGlobal_getPlatformMMapPolicy(
+ AAudio_DeviceType device, aaudio_direction_t direction) {
+ return getPlatformMMapPolicy(AudioMMapPolicyType::DEFAULT, device, direction);
+}
+
+aaudio_policy_t AudioGlobal_getPlatformMMapExclusivePolicy(
+ AAudio_DeviceType device, aaudio_direction_t direction) {
+ return getPlatformMMapPolicy(AudioMMapPolicyType::EXCLUSIVE, device, direction);
+}
+
#undef AAUDIO_CASE_ENUM
} // namespace aaudio
diff --git a/media/libaaudio/src/core/AudioGlobal.h b/media/libaaudio/src/core/AudioGlobal.h
index 8af49b4..7ff344b 100644
--- a/media/libaaudio/src/core/AudioGlobal.h
+++ b/media/libaaudio/src/core/AudioGlobal.h
@@ -40,6 +40,11 @@
const char* AudioGlobal_convertSharingModeToText(aaudio_sharing_mode_t mode);
const char* AudioGlobal_convertStreamStateToText(aaudio_stream_state_t state);
+aaudio_policy_t AudioGlobal_getPlatformMMapPolicy(
+ AAudio_DeviceType device, aaudio_direction_t direction);
+aaudio_policy_t AudioGlobal_getPlatformMMapExclusivePolicy(
+ AAudio_DeviceType device, aaudio_direction_t direction);
+
} // namespace aaudio
#endif // AAUDIO_AUDIOGLOBAL_H
diff --git a/media/libaaudio/src/core/AudioStream.cpp b/media/libaaudio/src/core/AudioStream.cpp
index e0fd325..468bcfa 100644
--- a/media/libaaudio/src/core/AudioStream.cpp
+++ b/media/libaaudio/src/core/AudioStream.cpp
@@ -79,7 +79,7 @@
mSamplesPerFrame = builder.getSamplesPerFrame();
mChannelMask = builder.getChannelMask();
mSampleRate = builder.getSampleRate();
- mDeviceId = builder.getDeviceId();
+ mDeviceIds = builder.getDeviceIds();
mFormat = builder.getFormat();
mSharingMode = builder.getSharingMode();
mSharingModeMatchRequired = builder.isSharingModeMatchRequired();
@@ -93,6 +93,7 @@
if (mContentType == AAUDIO_UNSPECIFIED) {
mContentType = AAUDIO_CONTENT_TYPE_MUSIC;
}
+ mTags = builder.getTags();
mSpatializationBehavior = builder.getSpatializationBehavior();
// for consistency with other properties, note UNSPECIFIED is the same as AUTO
if (mSpatializationBehavior == AAUDIO_UNSPECIFIED) {
@@ -115,6 +116,8 @@
mErrorCallbackProc = builder.getErrorCallbackProc();
mDataCallbackUserData = builder.getDataCallbackUserData();
mErrorCallbackUserData = builder.getErrorCallbackUserData();
+ setPresentationEndCallbackUserData(builder.getPresentationEndCallbackUserData());
+ setPresentationEndCallbackProc(builder.getPresentationEndCallbackProc());
return AAUDIO_OK;
}
@@ -203,7 +206,7 @@
aaudio_result_t result = requestStart_l();
if (result == AAUDIO_OK) {
// We only call this for logging in "dumpsys audio". So ignore return code.
- (void) mPlayerBase->startWithStatus(getDeviceId());
+ (void) mPlayerBase->startWithStatus(getDeviceIds());
}
return result;
}
@@ -284,6 +287,10 @@
aaudio_result_t AudioStream::systemStopInternal() {
std::lock_guard<std::mutex> lock(mStreamLock);
+ return systemStopInternal_l();
+}
+
+aaudio_result_t AudioStream::systemStopInternal_l() {
aaudio_result_t result = safeStop_l();
if (result == AAUDIO_OK) {
// We only call this for logging in "dumpsys audio". So ignore return code.
diff --git a/media/libaaudio/src/core/AudioStream.h b/media/libaaudio/src/core/AudioStream.h
index 49a63c4..0ddc8ed 100644
--- a/media/libaaudio/src/core/AudioStream.h
+++ b/media/libaaudio/src/core/AudioStream.h
@@ -27,6 +27,7 @@
#include <utils/StrongPointer.h>
#include <aaudio/AAudio.h>
+#include <media/AudioContainers.h>
#include <media/AudioSystem.h>
#include <media/PlayerBase.h>
#include <media/VolumeShaper.h>
@@ -268,8 +269,8 @@
mPerformanceMode = performanceMode;
}
- int32_t getDeviceId() const {
- return mDeviceId;
+ android::DeviceIdVector getDeviceIds() const {
+ return mDeviceIds;
}
aaudio_sharing_mode_t getSharingMode() const {
@@ -290,6 +291,10 @@
return mContentType;
}
+ const std::optional<std::string> getTags() const {
+ return mTags;
+ }
+
aaudio_spatialization_behavior_t getSpatializationBehavior() const {
return mSpatializationBehavior;
}
@@ -327,7 +332,7 @@
* have been called.
*/
int32_t getBytesPerFrame() const {
- return mSamplesPerFrame * getBytesPerSample();
+ return audio_bytes_per_frame(mSamplesPerFrame, mFormat);
}
/**
@@ -341,7 +346,7 @@
* This is only valid after setDeviceSamplesPerFrame() and setDeviceFormat() have been called.
*/
int32_t getBytesPerDeviceFrame() const {
- return getDeviceSamplesPerFrame() * audio_bytes_per_sample(getDeviceFormat());
+ return audio_bytes_per_frame(getDeviceSamplesPerFrame(), getDeviceFormat());
}
virtual int64_t getFramesWritten() = 0;
@@ -385,6 +390,24 @@
mDeviceSamplesPerFrame = deviceSamplesPerFrame;
}
+ virtual aaudio_result_t setOffloadDelayPadding(int32_t delayInFrames, int32_t paddingInFrames) {
+ return AAUDIO_ERROR_UNIMPLEMENTED;
+ }
+
+ virtual int32_t getOffloadDelay() {
+ return AAUDIO_ERROR_UNIMPLEMENTED;
+ }
+
+ virtual int32_t getOffloadPadding() {
+ return AAUDIO_ERROR_UNIMPLEMENTED;
+ }
+
+ virtual aaudio_result_t setOffloadEndOfStream() EXCLUDES(mStreamLock) {
+ return AAUDIO_ERROR_UNIMPLEMENTED;
+ }
+
+ virtual void setPresentationEndCallbackProc(AAudioStream_presentationEndCallback proc) { }
+ virtual void setPresentationEndCallbackUserData(void* userData) { }
/**
* @return true if data callback has been specified
@@ -403,11 +426,11 @@
/**
* @return true if called from the same thread as the callback
*/
- bool collidesWithCallback() const;
+ virtual bool collidesWithCallback() const;
// Implement AudioDeviceCallback
void onAudioDeviceUpdate(audio_io_handle_t audioIo,
- audio_port_handle_t deviceId) override {};
+ const android::DeviceIdVector& deviceIds) override {};
// ============== I/O ===========================
// A Stream will only implement read() or write() depending on its direction.
@@ -628,8 +651,8 @@
}
void setDisconnected();
- void setDeviceId(int32_t deviceId) {
- mDeviceId = deviceId;
+ void setDeviceIds(const android::DeviceIdVector& deviceIds) {
+ mDeviceIds = deviceIds;
}
// This should not be called after the open() call.
@@ -644,6 +667,8 @@
aaudio_result_t joinThread_l(void **returnArg) REQUIRES(mStreamLock);
+ virtual aaudio_result_t systemStopInternal_l() REQUIRES(mStreamLock);
+
std::atomic<bool> mCallbackEnabled{false};
float mDuckAndMuteVolume = 1.0f;
@@ -687,6 +712,13 @@
mContentType = contentType;
}
+ /**
+ * This should not be called after the open() call.
+ */
+ void setTags(const std::optional<std::string> &tags) {
+ mTags = tags;
+ }
+
void setSpatializationBehavior(aaudio_spatialization_behavior_t spatializationBehavior) {
mSpatializationBehavior = spatializationBehavior;
}
@@ -730,6 +762,8 @@
mAudioBalance = audioBalance;
}
+ aaudio_result_t safeStop_l() REQUIRES(mStreamLock);
+
std::string mMetricsId; // set once during open()
std::mutex mStreamLock;
@@ -738,8 +772,6 @@
private:
- aaudio_result_t safeStop_l() REQUIRES(mStreamLock);
-
/**
* Release then close the stream.
*/
@@ -763,7 +795,7 @@
int32_t mSampleRate = AAUDIO_UNSPECIFIED;
int32_t mDeviceSampleRate = AAUDIO_UNSPECIFIED;
int32_t mHardwareSampleRate = AAUDIO_UNSPECIFIED;
- int32_t mDeviceId = AAUDIO_UNSPECIFIED;
+ android::DeviceIdVector mDeviceIds;
aaudio_sharing_mode_t mSharingMode = AAUDIO_SHARING_MODE_SHARED;
bool mSharingModeMatchRequired = false; // must match sharing mode requested
audio_format_t mFormat = AUDIO_FORMAT_DEFAULT;
@@ -776,6 +808,7 @@
aaudio_usage_t mUsage = AAUDIO_UNSPECIFIED;
aaudio_content_type_t mContentType = AAUDIO_UNSPECIFIED;
+ std::optional<std::string> mTags = {};
aaudio_spatialization_behavior_t mSpatializationBehavior = AAUDIO_UNSPECIFIED;
bool mIsContentSpatialized = false;
aaudio_input_preset_t mInputPreset = AAUDIO_UNSPECIFIED;
diff --git a/media/libaaudio/src/core/AudioStreamBuilder.cpp b/media/libaaudio/src/core/AudioStreamBuilder.cpp
index 01f0038..61881cb 100644
--- a/media/libaaudio/src/core/AudioStreamBuilder.cpp
+++ b/media/libaaudio/src/core/AudioStreamBuilder.cpp
@@ -110,7 +110,7 @@
std::vector<AudioMMapPolicyInfo> policyInfos;
aaudio_policy_t mmapPolicy = AudioGlobal_getMMapPolicy();
ALOGD("%s, global mmap policy is %d", __func__, mmapPolicy);
- if (status_t status = android::AudioSystem::getMmapPolicyInfo(
+ if (status_t status = android::AudioSystem::getMmapPolicyInfos(
AudioMMapPolicyType::DEFAULT, &policyInfos); status == NO_ERROR) {
aaudio_policy_t systemMmapPolicy = AAudio_getAAudioPolicy(
policyInfos, AAUDIO_MMAP_POLICY_DEFAULT_AIDL);
@@ -143,7 +143,7 @@
policyInfos.clear();
aaudio_policy_t mmapExclusivePolicy = AAUDIO_UNSPECIFIED;
- if (status_t status = android::AudioSystem::getMmapPolicyInfo(
+ if (status_t status = android::AudioSystem::getMmapPolicyInfos(
AudioMMapPolicyType::EXCLUSIVE, &policyInfos); status == NO_ERROR) {
mmapExclusivePolicy = AAudio_getAAudioPolicy(
policyInfos, AAUDIO_MMAP_EXCLUSIVE_POLICY_DEFAULT_AIDL);
@@ -174,6 +174,11 @@
__func__);
allowMMap = false;
}
+ if (!audio_is_linear_pcm(getFormat())) {
+ ALOGD("%s() MMAP not used because the requested format(%#x) is not pcm",
+ __func__, getFormat());
+ allowMMap = false;
+ }
// SessionID and Effects are only supported in Legacy mode.
if (getSessionId() != AAUDIO_SESSION_ID_NONE) {
@@ -261,6 +266,14 @@
case AAUDIO_PERFORMANCE_MODE_POWER_SAVING:
case AAUDIO_PERFORMANCE_MODE_LOW_LATENCY:
break;
+ case AAUDIO_PERFORMANCE_MODE_POWER_SAVING_OFFLOADED:
+ if (getDirection() != AAUDIO_DIRECTION_OUTPUT ||
+ getFormat() == AUDIO_FORMAT_DEFAULT ||
+ getSampleRate() == 0 ||
+ getChannelMask() == AAUDIO_UNSPECIFIED) {
+ return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
+ }
+ break;
default:
ALOGE("illegal performanceMode = %d", mPerformanceMode);
return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
@@ -307,8 +320,8 @@
getSampleRate(), getSamplesPerFrame(), getChannelMask(), getFormat(),
AAudio_convertSharingModeToShortText(getSharingMode()),
AAudio_convertDirectionToText(getDirection()));
- ALOGI("device = %6d, sessionId = %d, perfMode = %d, callback: %s with frames = %d",
- getDeviceId(),
+ ALOGI("devices = %s, sessionId = %d, perfMode = %d, callback: %s with frames = %d",
+ android::toString(getDeviceIds()).c_str(),
getSessionId(),
getPerformanceMode(),
((getDataCallbackProc() != nullptr) ? "ON" : "OFF"),
diff --git a/media/libaaudio/src/core/AudioStreamBuilder.h b/media/libaaudio/src/core/AudioStreamBuilder.h
index f91c25a..d0678ae 100644
--- a/media/libaaudio/src/core/AudioStreamBuilder.h
+++ b/media/libaaudio/src/core/AudioStreamBuilder.h
@@ -89,6 +89,24 @@
return mErrorCallbackUserData;
}
+ AudioStreamBuilder* setPresentationEndCallbackProc(AAudioStream_presentationEndCallback proc) {
+ mPresentationEndCallbackProc = proc;
+ return this;
+ }
+
+ AAudioStream_presentationEndCallback getPresentationEndCallbackProc() const {
+ return mPresentationEndCallbackProc;
+ }
+
+ AudioStreamBuilder* setPresentationEndCallbackUserData(void *userData) {
+ mPresentationEndCallbackUserData = userData;
+ return this;
+ }
+
+ void *getPresentationEndCallbackUserData() const {
+ return mPresentationEndCallbackUserData;
+ }
+
int32_t getFramesPerDataCallback() const {
return mFramesPerDataCallback;
}
@@ -128,6 +146,9 @@
AAudioStream_errorCallback mErrorCallbackProc = nullptr;
void *mErrorCallbackUserData = nullptr;
+ AAudioStream_presentationEndCallback mPresentationEndCallbackProc = nullptr;
+ void *mPresentationEndCallbackUserData = nullptr;
+
enum {
PRIVACY_SENSITIVE_DEFAULT = -1,
PRIVACY_SENSITIVE_DISABLED = 0,
diff --git a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
index 255bd0f..dfb9a01 100644
--- a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
@@ -260,36 +260,41 @@
}
void AudioStreamLegacy::onAudioDeviceUpdate(audio_io_handle_t /* audioIo */,
- audio_port_handle_t deviceId) {
- // Check for an invalid deviceId. Why change to UNSPECIFIED?
- if (deviceId == AAUDIO_UNSPECIFIED) {
- ALOGE("%s(, deviceId = AAUDIO_UNSPECIFIED)! Why?", __func__);
+ const android::DeviceIdVector& deviceIds) {
+ // Check for empty deviceIds. Callbacks for duplicating threads returns empty devices.
+ if (deviceIds.empty()) {
+ ALOGW("%s(empty deviceIds", __func__);
return;
}
+ android::DeviceIdVector oldDeviceIds = getDeviceIds();
// Device routing is a common source of errors and DISCONNECTS.
// Please leave this log in place. If there is a bug then this might
// get called after the stream has been deleted so log before we
// touch the stream object.
- ALOGD("%s(deviceId = %d)", __func__, (int)deviceId);
- if (getDeviceId() != AAUDIO_UNSPECIFIED
- && getDeviceId() != deviceId
+ ALOGD("%s() devices %s => %s",
+ __func__, android::toString(oldDeviceIds).c_str(),
+ android::toString(deviceIds).c_str());
+ if (!oldDeviceIds.empty()
+ && !android::areDeviceIdsEqual(oldDeviceIds, deviceIds)
&& !isDisconnected()
) {
// Note that isDataCallbackActive() is affected by state so call it before DISCONNECTING.
// If we have a data callback and the stream is active, then ask the data callback
// to DISCONNECT and call the error callback.
if (isDataCallbackActive()) {
- ALOGD("%s() request DISCONNECT in data callback, device %d => %d",
- __func__, (int) getDeviceId(), (int) deviceId);
+ ALOGD("%s() request DISCONNECT in data callback, devices %s => %s",
+ __func__, android::toString(oldDeviceIds).c_str(),
+ android::toString(deviceIds).c_str());
// If the stream is stopped before the data callback has a chance to handle the
// request then the requestStop_l() and requestPause() methods will handle it after
// the callback has stopped.
mRequestDisconnect.request();
} else {
- ALOGD("%s() DISCONNECT the stream now, device %d => %d",
- __func__, (int) getDeviceId(), (int) deviceId);
+ ALOGD("%s() DISCONNECT the stream now, devices %s => %s",
+ __func__, android::toString(oldDeviceIds).c_str(),
+ android::toString(deviceIds).c_str());
forceDisconnect();
}
}
- setDeviceId(deviceId);
+ setDeviceIds(deviceIds);
}
diff --git a/media/libaaudio/src/legacy/AudioStreamLegacy.h b/media/libaaudio/src/legacy/AudioStreamLegacy.h
index 53f6e06..a729161 100644
--- a/media/libaaudio/src/legacy/AudioStreamLegacy.h
+++ b/media/libaaudio/src/legacy/AudioStreamLegacy.h
@@ -95,7 +95,7 @@
android::ExtendedTimestamp *extendedTimestamp);
void onAudioDeviceUpdate(audio_io_handle_t audioIo,
- audio_port_handle_t deviceId) override;
+ const android::DeviceIdVector& deviceIds) override;
/*
* Check to see whether a callback thread has requested a disconnected.
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.cpp b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
index fe4bf2c..1591f7d 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
@@ -112,9 +112,7 @@
mCallbackBufferSize = builder.getFramesPerDataCallback();
// Don't call mAudioRecord->setInputDevice() because it will be overwritten by set()!
- audio_port_handle_t selectedDeviceId = (getDeviceId() == AAUDIO_UNSPECIFIED)
- ? AUDIO_PORT_HANDLE_NONE
- : getDeviceId();
+ audio_port_handle_t selectedDeviceId = getFirstDeviceId(getDeviceIds());
const audio_content_type_t contentType =
AAudioConvert_contentTypeToInternal(builder.getContentType());
@@ -198,7 +196,8 @@
AudioGlobal_convertPerformanceModeToText(builder.getPerformanceMode()))
.set(AMEDIAMETRICS_PROP_SHARINGMODE,
AudioGlobal_convertSharingModeToText(builder.getSharingMode()))
- .set(AMEDIAMETRICS_PROP_ENCODINGCLIENT, toString(requestedFormat).c_str()).record();
+ .set(AMEDIAMETRICS_PROP_ENCODINGCLIENT,
+ android::toString(requestedFormat).c_str()).record();
// Get the actual values from the AudioRecord.
setChannelMask(AAudioConvert_androidToAAudioChannelMask(
@@ -275,7 +274,7 @@
perfMode, actualPerformanceMode);
setState(AAUDIO_STREAM_STATE_OPEN);
- setDeviceId(mAudioRecord->getRoutedDeviceId());
+ setDeviceIds(mAudioRecord->getRoutedDeviceIds());
aaudio_session_id_t actualSessionId =
(requestedSessionId == AAUDIO_SESSION_ID_NONE)
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.cpp b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
index d729047..da15563 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
@@ -22,6 +22,7 @@
#include <media/AudioTrack.h>
#include <aaudio/AAudio.h>
+#include <com_android_media_aaudio.h>
#include <system/audio.h>
#include "core/AudioGlobal.h"
@@ -56,6 +57,10 @@
aaudio_result_t AudioStreamTrack::open(const AudioStreamBuilder& builder)
{
+ if (!com::android::media::aaudio::offload_support() &&
+ builder.getPerformanceMode() == AAUDIO_PERFORMANCE_MODE_POWER_SAVING_OFFLOADED) {
+ return AAUDIO_ERROR_UNIMPLEMENTED;
+ }
aaudio_result_t result = AAUDIO_OK;
result = AudioStream::open(builder);
@@ -132,9 +137,7 @@
notificationFrames, (uint)frameCount);
// Don't call mAudioTrack->setDeviceId() because it will be overwritten by set()!
- audio_port_handle_t selectedDeviceId = (getDeviceId() == AAUDIO_UNSPECIFIED)
- ? AUDIO_PORT_HANDLE_NONE
- : getDeviceId();
+ audio_port_handle_t selectedDeviceId = getFirstDeviceId(getDeviceIds());
const audio_content_type_t contentType =
AAudioConvert_contentTypeToInternal(builder.getContentType());
@@ -146,13 +149,41 @@
builder.isContentSpatialized(),
flags);
- const audio_attributes_t attributes = {
- .content_type = contentType,
- .usage = usage,
- .source = AUDIO_SOURCE_DEFAULT, // only used for recording
- .flags = attributesFlags,
- .tags = ""
- };
+ const std::optional<std::string> tags = builder.getTags();
+ audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
+ attributes.content_type = contentType;
+ attributes.usage = usage;
+ attributes.flags = attributesFlags;
+ if (tags.has_value() && !tags.value().empty()) {
+ strcpy(attributes.tags, tags.value().c_str());
+ }
+
+ audio_offload_info_t offloadInfo = AUDIO_INFO_INITIALIZER;
+ if (getPerformanceMode() == AAUDIO_PERFORMANCE_MODE_POWER_SAVING_OFFLOADED) {
+ audio_config_t config = AUDIO_CONFIG_INITIALIZER;
+ config.format = format;
+ config.channel_mask = channelMask;
+ config.sample_rate = getSampleRate();
+ audio_direct_mode_t directMode = AUDIO_DIRECT_NOT_SUPPORTED;
+ if (status_t status = AudioSystem::getDirectPlaybackSupport(
+ &attributes, &config, &directMode);
+ status != NO_ERROR) {
+ ALOGE("%s, failed to query direct support, error=%d", __func__, status);
+ return status;
+ }
+ static const audio_direct_mode_t offloadMode = static_cast<audio_direct_mode_t>(
+ AUDIO_DIRECT_OFFLOAD_SUPPORTED | AUDIO_DIRECT_OFFLOAD_GAPLESS_SUPPORTED);
+ if ((directMode & offloadMode) == AUDIO_DIRECT_NOT_SUPPORTED) {
+ return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
+ }
+ flags = AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
+ frameCount = 0;
+ offloadInfo.format = format;
+ offloadInfo.sample_rate = getSampleRate();
+ offloadInfo.channel_mask = channelMask;
+ offloadInfo.has_video = false;
+ offloadInfo.stream_type = AUDIO_STREAM_MUSIC;
+ }
mAudioTrack = new AudioTrack();
// TODO b/182392769: use attribution source util
@@ -169,7 +200,8 @@
false, // DEFAULT threadCanCallJava
sessionId,
streamTransferType,
- nullptr, // DEFAULT audio_offload_info_t
+ getPerformanceMode() == AAUDIO_PERFORMANCE_MODE_POWER_SAVING_OFFLOADED
+ ? &offloadInfo : nullptr,
AttributionSourceState(), // DEFAULT uid and pid
&attributes,
// WARNING - If doNotReconnect set true then audio stops after plugging and unplugging
@@ -197,7 +229,8 @@
AudioGlobal_convertPerformanceModeToText(builder.getPerformanceMode()))
.set(AMEDIAMETRICS_PROP_SHARINGMODE,
AudioGlobal_convertSharingModeToText(builder.getSharingMode()))
- .set(AMEDIAMETRICS_PROP_ENCODINGCLIENT, toString(getFormat()).c_str()).record();
+ .set(AMEDIAMETRICS_PROP_ENCODINGCLIENT,
+ android::toString(getFormat()).c_str()).record();
doSetVolume();
@@ -233,7 +266,7 @@
mBlockAdapter = nullptr;
}
- setDeviceId(mAudioTrack->getRoutedDeviceId());
+ setDeviceIds(mAudioTrack->getRoutedDeviceIds());
aaudio_session_id_t actualSessionId =
(requestedSessionId == AAUDIO_SESSION_ID_NONE)
@@ -248,7 +281,9 @@
audio_output_flags_t actualFlags = mAudioTrack->getFlags();
aaudio_performance_mode_t actualPerformanceMode = AAUDIO_PERFORMANCE_MODE_NONE;
// We may not get the RAW flag. But as long as we get the FAST flag we can call it LOW_LATENCY.
- if ((actualFlags & AUDIO_OUTPUT_FLAG_FAST) != 0) {
+ if ((actualFlags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) != AUDIO_OUTPUT_FLAG_NONE) {
+ actualPerformanceMode = AAUDIO_PERFORMANCE_MODE_POWER_SAVING_OFFLOADED;
+ } else if ((actualFlags & AUDIO_OUTPUT_FLAG_FAST) != 0) {
actualPerformanceMode = AAUDIO_PERFORMANCE_MODE_LOW_LATENCY;
} else if ((actualFlags & AUDIO_OUTPUT_FLAG_DEEP_BUFFER) != 0) {
actualPerformanceMode = AAUDIO_PERFORMANCE_MODE_POWER_SAVING;
@@ -317,7 +352,7 @@
if (mAudioTrack->channelCount() != getSamplesPerFrame()
|| mAudioTrack->format() != getFormat()
|| mAudioTrack->getSampleRate() != getSampleRate()
- || mAudioTrack->getRoutedDeviceId() != getDeviceId()
+ || !areDeviceIdsEqual(mAudioTrack->getRoutedDeviceIds(), getDeviceIds())
|| getBufferCapacityFromDevice() != getBufferCapacity()
|| getFramesPerBurstFromDevice() != getFramesPerBurst()) {
AudioStreamLegacy::onNewIAudioTrack();
@@ -348,6 +383,7 @@
setState(originalState);
return AAudioConvert_androidToAAudioResult(err);
}
+ mOffloadEosPending = false;
return AAUDIO_OK;
}
@@ -431,6 +467,12 @@
break;
case AAUDIO_STREAM_STATE_STOPPING:
if (mAudioTrack->stopped()) {
+ if (getPerformanceMode() == AAUDIO_PERFORMANCE_MODE_POWER_SAVING_OFFLOADED) {
+ std::lock_guard<std::mutex> lock(mStreamLock);
+ if (!mOffloadEosPending) {
+ break;
+ }
+ }
setState(AAUDIO_STREAM_STATE_STOPPED);
}
break;
@@ -580,6 +622,104 @@
mAudioTrack->setPlayerIId(mPlayerBase->getPlayerIId());
}
+aaudio_result_t AudioStreamTrack::systemStopInternal_l() {
+ if (aaudio_result_t result = AudioStream::systemStopInternal_l(); result != AAUDIO_OK) {
+ return result;
+ }
+ mOffloadEosPending = false;
+ return AAUDIO_OK;
+}
+
+aaudio_result_t AudioStreamTrack::setOffloadDelayPadding(
+ int32_t delayInFrames, int32_t paddingInFrames) {
+ if (getPerformanceMode() != AAUDIO_PERFORMANCE_MODE_POWER_SAVING_OFFLOADED ||
+ audio_is_linear_pcm(getFormat())) {
+ return AAUDIO_ERROR_UNIMPLEMENTED;
+ }
+ if (mAudioTrack == nullptr) {
+ return AAUDIO_ERROR_INVALID_STATE;
+ }
+ AudioParameter param = AudioParameter();
+ param.addInt(String8(AUDIO_OFFLOAD_CODEC_DELAY_SAMPLES), delayInFrames);
+ param.addInt(String8(AUDIO_OFFLOAD_CODEC_PADDING_SAMPLES), paddingInFrames);
+ mAudioTrack->setParameters(param.toString());
+ mOffloadDelayFrames.store(delayInFrames);
+ mOffloadPaddingFrames.store(paddingInFrames);
+ return AAUDIO_OK;
+}
+
+int32_t AudioStreamTrack::getOffloadDelay() {
+ if (getPerformanceMode() != AAUDIO_PERFORMANCE_MODE_POWER_SAVING_OFFLOADED ||
+ audio_is_linear_pcm(getFormat())) {
+ return AAUDIO_ERROR_UNIMPLEMENTED;
+ }
+ if (mAudioTrack == nullptr) {
+ return AAUDIO_ERROR_INVALID_STATE;
+ }
+ return mOffloadDelayFrames.load();
+}
+
+int32_t AudioStreamTrack::getOffloadPadding() {
+ if (getPerformanceMode() != AAUDIO_PERFORMANCE_MODE_POWER_SAVING_OFFLOADED ||
+ audio_is_linear_pcm(getFormat())) {
+ return AAUDIO_ERROR_UNIMPLEMENTED;
+ }
+ if (mAudioTrack == nullptr) {
+ return AAUDIO_ERROR_INVALID_STATE;
+ }
+ return mOffloadPaddingFrames.load();
+}
+
+aaudio_result_t AudioStreamTrack::setOffloadEndOfStream() {
+ if (getPerformanceMode() != AAUDIO_PERFORMANCE_MODE_POWER_SAVING_OFFLOADED) {
+ return AAUDIO_ERROR_UNIMPLEMENTED;
+ }
+ if (mAudioTrack == nullptr) {
+ return AAUDIO_ERROR_INVALID_STATE;
+ }
+ std::lock_guard<std::mutex> lock(mStreamLock);
+ if (aaudio_result_t result = safeStop_l(); result != AAUDIO_OK) {
+ return result;
+ }
+ mOffloadEosPending = true;
+ return AAUDIO_OK;
+}
+
+bool AudioStreamTrack::collidesWithCallback() const {
+ if (AudioStream::collidesWithCallback()) {
+ return true;
+ }
+ pid_t thisThread = gettid();
+ return mPresentationEndCallbackThread.load() == thisThread;
+}
+
+void AudioStreamTrack::onStreamEnd() {
+ if (getPerformanceMode() != AAUDIO_PERFORMANCE_MODE_POWER_SAVING_OFFLOADED) {
+ return;
+ }
+ if (getState() == AAUDIO_STREAM_STATE_STOPPING) {
+ std::lock_guard<std::mutex> lock(mStreamLock);
+ if (mOffloadEosPending) {
+ requestStart_l();
+ }
+ mOffloadEosPending = false;
+ }
+ maybeCallPresentationEndCallback();
+}
+
+void AudioStreamTrack::maybeCallPresentationEndCallback() {
+ if (mPresentationEndCallbackProc != nullptr) {
+ pid_t expected = CALLBACK_THREAD_NONE;
+ if (mPresentationEndCallbackThread.compare_exchange_strong(expected, gettid())) {
+ (*mPresentationEndCallbackProc)(
+ (AAudioStream *) this, mPresentationEndCallbackUserData);
+ mPresentationEndCallbackThread.store(CALLBACK_THREAD_NONE);
+ } else {
+ ALOGW("%s() error callback already running!", __func__);
+ }
+ }
+}
+
#if AAUDIO_USE_VOLUME_SHAPER
using namespace android::media::VolumeShaper;
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.h b/media/libaaudio/src/legacy/AudioStreamTrack.h
index 05609c4..82ba772 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.h
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.h
@@ -49,6 +49,11 @@
aaudio_result_t requestPause_l() REQUIRES(mStreamLock) override;
aaudio_result_t requestFlush_l() REQUIRES(mStreamLock) override;
aaudio_result_t requestStop_l() REQUIRES(mStreamLock) override;
+ aaudio_result_t systemStopInternal_l() REQUIRES(mStreamLock) final;
+
+ bool collidesWithCallback() const final;
+
+ void onStreamEnd() final;
public:
bool isFlushSupported() const override {
@@ -89,6 +94,26 @@
void registerPlayerBase() override;
+ // Offload begin --------------------------------------
+ aaudio_result_t setOffloadDelayPadding(int32_t delayInFrames, int32_t paddingInFrames) final;
+
+ int32_t getOffloadDelay() final;
+
+ int32_t getOffloadPadding() final;
+
+ aaudio_result_t setOffloadEndOfStream() EXCLUDES(mStreamLock) final;
+
+ void setPresentationEndCallbackProc(AAudioStream_presentationEndCallback proc) final {
+ mPresentationEndCallbackProc = proc;
+ }
+
+ virtual void setPresentationEndCallbackUserData(void *userData) final {
+ mPresentationEndCallbackUserData = userData;
+ }
+
+ void maybeCallPresentationEndCallback();
+ // Offload end ----------------------------------------
+
#if AAUDIO_USE_VOLUME_SHAPER
virtual android::binder::Status applyVolumeShaper(
const android::media::VolumeShaper::Configuration& configuration,
@@ -110,6 +135,15 @@
// TODO add 64-bit position reporting to AudioTrack and use it.
aaudio_wrapping_frames_t mPositionWhenPausing = 0;
+
+ // Offload --------------------------------------------
+ std::atomic<int32_t> mOffloadDelayFrames = 0;
+ std::atomic<int32_t> mOffloadPaddingFrames = 0;
+ bool mOffloadEosPending GUARDED_BY(mStreamLock) = false;
+
+ AAudioStream_presentationEndCallback mPresentationEndCallbackProc = nullptr;
+ void *mPresentationEndCallbackUserData = nullptr;
+ std::atomic<pid_t> mPresentationEndCallbackThread{CALLBACK_THREAD_NONE};
};
} /* namespace aaudio */
diff --git a/media/libaaudio/src/libaaudio.map.txt b/media/libaaudio/src/libaaudio.map.txt
index e28dcb4..44bb4c6 100644
--- a/media/libaaudio/src/libaaudio.map.txt
+++ b/media/libaaudio/src/libaaudio.map.txt
@@ -28,6 +28,7 @@
AAudioStreamBuilder_setChannelMask; # introduced=32
AAudioStreamBuilder_setSpatializationBehavior; # introduced=32
AAudioStreamBuilder_setIsContentSpatialized; # introduced=32
+ AAudioStreamBuilder_setPresentationEndCallback; #introduced=36
AAudioStreamBuilder_openStream;
AAudioStreamBuilder_delete;
AAudioStream_close;
@@ -70,6 +71,16 @@
AAudioStream_getHardwareChannelCount; # introduced=UpsideDownCake
AAudioStream_getHardwareFormat; # introduced=UpsideDownCake
AAudioStream_getHardwareSampleRate; # introduced=UpsideDownCake
+ AAudio_getPlatformMMapPolicy; # introduced=36
+ AAudio_getPlatformMMapExclusivePolicy; #introduced=36
+ AAudioStream_getDeviceIds; # introduced=36
+ AAudioStream_setOffloadDelayPadding; #introduced=36
+ AAudioStream_getOffloadDelay; #introduced=36
+ AAudioStream_getOffloadPadding; #introduced=36
+ AAudioStream_setOffloadEndOfStream; #introduced=36
+
+ AAudioStreamBuilder_setTags; # systemapi
+ AAudioStream_getTags; # systemapi
local:
*;
};
diff --git a/media/libaaudio/src/utility/AAudioUtilities.cpp b/media/libaaudio/src/utility/AAudioUtilities.cpp
index 3df23ee..873fcba 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.cpp
+++ b/media/libaaudio/src/utility/AAudioUtilities.cpp
@@ -147,6 +147,27 @@
case AAUDIO_FORMAT_IEC61937:
androidFormat = AUDIO_FORMAT_IEC61937;
break;
+ case AAUDIO_FORMAT_MP3:
+ androidFormat = AUDIO_FORMAT_MP3;
+ break;
+ case AAUDIO_FORMAT_AAC_LC:
+ androidFormat = AUDIO_FORMAT_AAC_LC;
+ break;
+ case AAUDIO_FORMAT_AAC_HE_V1:
+ androidFormat = AUDIO_FORMAT_AAC_HE_V1;
+ break;
+ case AAUDIO_FORMAT_AAC_HE_V2:
+ androidFormat = AUDIO_FORMAT_AAC_HE_V2;
+ break;
+ case AAUDIO_FORMAT_AAC_ELD:
+ androidFormat = AUDIO_FORMAT_AAC_ELD;
+ break;
+ case AAUDIO_FORMAT_AAC_XHE:
+ androidFormat = AUDIO_FORMAT_AAC_XHE;
+ break;
+ case AAUDIO_FORMAT_OPUS:
+ androidFormat = AUDIO_FORMAT_OPUS;
+ break;
default:
androidFormat = AUDIO_FORMAT_INVALID;
ALOGE("%s() 0x%08X unrecognized", __func__, aaudioFormat);
@@ -176,6 +197,27 @@
case AUDIO_FORMAT_IEC61937:
aaudioFormat = AAUDIO_FORMAT_IEC61937;
break;
+ case AUDIO_FORMAT_MP3:
+ aaudioFormat = AAUDIO_FORMAT_MP3;
+ break;
+ case AUDIO_FORMAT_AAC_LC:
+ aaudioFormat = AAUDIO_FORMAT_AAC_LC;
+ break;
+ case AUDIO_FORMAT_AAC_HE_V1:
+ aaudioFormat = AAUDIO_FORMAT_AAC_HE_V1;
+ break;
+ case AUDIO_FORMAT_AAC_HE_V2:
+ aaudioFormat = AAUDIO_FORMAT_AAC_HE_V2;
+ break;
+ case AUDIO_FORMAT_AAC_ELD:
+ aaudioFormat = AAUDIO_FORMAT_AAC_ELD;
+ break;
+ case AUDIO_FORMAT_AAC_XHE:
+ aaudioFormat = AAUDIO_FORMAT_AAC_XHE;
+ break;
+ case AUDIO_FORMAT_OPUS:
+ aaudioFormat = AAUDIO_FORMAT_OPUS;
+ break;
default:
aaudioFormat = AAUDIO_FORMAT_INVALID;
ALOGE("%s() 0x%08X unrecognized", __func__, androidFormat);
@@ -693,3 +735,128 @@
}
return aidl2legacy_aaudio_policy(policy);
}
+
+audio_devices_t AAudioConvert_aaudioToAndroidDeviceType(AAudio_DeviceType device,
+ aaudio_direction_t direction) {
+ if (direction == AAUDIO_DIRECTION_INPUT) {
+ switch (device) {
+ case AAUDIO_DEVICE_BUILTIN_MIC:
+ return AUDIO_DEVICE_IN_BUILTIN_MIC;
+ case AAUDIO_DEVICE_BLUETOOTH_SCO:
+ return AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET;
+ case AAUDIO_DEVICE_WIRED_HEADSET:
+ return AUDIO_DEVICE_IN_WIRED_HEADSET;
+ case AAUDIO_DEVICE_HDMI:
+ return AUDIO_DEVICE_IN_HDMI;
+ case AAUDIO_DEVICE_TELEPHONY:
+ return AUDIO_DEVICE_IN_TELEPHONY_RX;
+ case AAUDIO_DEVICE_DOCK:
+ return AUDIO_DEVICE_IN_DGTL_DOCK_HEADSET;
+ case AAUDIO_DEVICE_DOCK_ANALOG:
+ return AUDIO_DEVICE_IN_ANLG_DOCK_HEADSET;
+ case AAUDIO_DEVICE_USB_ACCESSORY:
+ return AUDIO_DEVICE_IN_USB_ACCESSORY;
+ case AAUDIO_DEVICE_USB_DEVICE:
+ return AUDIO_DEVICE_IN_USB_DEVICE;
+ case AAUDIO_DEVICE_USB_HEADSET:
+ return AUDIO_DEVICE_IN_USB_HEADSET;
+ case AAUDIO_DEVICE_FM_TUNER:
+ return AUDIO_DEVICE_IN_FM_TUNER;
+ case AAUDIO_DEVICE_TV_TUNER:
+ return AUDIO_DEVICE_IN_TV_TUNER;
+ case AAUDIO_DEVICE_LINE_ANALOG:
+ return AUDIO_DEVICE_IN_LINE;
+ case AAUDIO_DEVICE_LINE_DIGITAL:
+ return AUDIO_DEVICE_IN_SPDIF;
+ case AAUDIO_DEVICE_BLUETOOTH_A2DP:
+ return AUDIO_DEVICE_IN_BLUETOOTH_A2DP;
+ case AAUDIO_DEVICE_IP:
+ return AUDIO_DEVICE_IN_IP;
+ case AAUDIO_DEVICE_BUS:
+ return AUDIO_DEVICE_IN_BUS;
+ case AAUDIO_DEVICE_REMOTE_SUBMIX:
+ return AUDIO_DEVICE_IN_REMOTE_SUBMIX;
+ case AAUDIO_DEVICE_BLE_HEADSET:
+ return AUDIO_DEVICE_IN_BLE_HEADSET;
+ case AAUDIO_DEVICE_HDMI_ARC:
+ return AUDIO_DEVICE_IN_HDMI_ARC;
+ case AAUDIO_DEVICE_HDMI_EARC:
+ return AUDIO_DEVICE_IN_HDMI_EARC;
+ default:
+ break;
+ }
+ } else {
+ switch (device) {
+ case AAUDIO_DEVICE_BUILTIN_EARPIECE:
+ return AUDIO_DEVICE_OUT_EARPIECE;
+ case AAUDIO_DEVICE_BUILTIN_SPEAKER:
+ return AUDIO_DEVICE_OUT_SPEAKER;
+ case AAUDIO_DEVICE_WIRED_HEADSET:
+ return AUDIO_DEVICE_OUT_WIRED_HEADSET;
+ case AAUDIO_DEVICE_WIRED_HEADPHONES:
+ return AUDIO_DEVICE_OUT_WIRED_HEADPHONE;
+ case AAUDIO_DEVICE_LINE_ANALOG:
+ return AUDIO_DEVICE_OUT_LINE;
+ case AAUDIO_DEVICE_LINE_DIGITAL:
+ return AUDIO_DEVICE_OUT_SPDIF;
+ case AAUDIO_DEVICE_BLUETOOTH_SCO:
+ return AUDIO_DEVICE_OUT_BLUETOOTH_SCO;
+ case AAUDIO_DEVICE_BLUETOOTH_A2DP:
+ return AUDIO_DEVICE_OUT_BLUETOOTH_A2DP;
+ case AAUDIO_DEVICE_HDMI:
+ return AUDIO_DEVICE_OUT_HDMI;
+ case AAUDIO_DEVICE_HDMI_ARC:
+ return AUDIO_DEVICE_OUT_HDMI_ARC;
+ case AAUDIO_DEVICE_HDMI_EARC:
+ return AUDIO_DEVICE_OUT_HDMI_EARC;
+ case AAUDIO_DEVICE_USB_DEVICE:
+ return AUDIO_DEVICE_OUT_USB_DEVICE;
+ case AAUDIO_DEVICE_USB_HEADSET:
+ return AUDIO_DEVICE_OUT_USB_HEADSET;
+ case AAUDIO_DEVICE_USB_ACCESSORY:
+ return AUDIO_DEVICE_OUT_USB_ACCESSORY;
+ case AAUDIO_DEVICE_DOCK:
+ return AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET;
+ case AAUDIO_DEVICE_DOCK_ANALOG:
+ return AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET;
+ case AAUDIO_DEVICE_FM:
+ return AUDIO_DEVICE_OUT_FM;
+ case AAUDIO_DEVICE_TELEPHONY:
+ return AUDIO_DEVICE_OUT_TELEPHONY_TX;
+ case AAUDIO_DEVICE_AUX_LINE:
+ return AUDIO_DEVICE_OUT_AUX_LINE;
+ case AAUDIO_DEVICE_IP:
+ return AUDIO_DEVICE_OUT_IP;
+ case AAUDIO_DEVICE_BUS:
+ return AUDIO_DEVICE_OUT_BUS;
+ case AAUDIO_DEVICE_HEARING_AID:
+ return AUDIO_DEVICE_OUT_HEARING_AID;
+ case AAUDIO_DEVICE_BUILTIN_SPEAKER_SAFE:
+ return AUDIO_DEVICE_OUT_SPEAKER_SAFE;
+ case AAUDIO_DEVICE_REMOTE_SUBMIX:
+ return AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
+ case AAUDIO_DEVICE_BLE_HEADSET:
+ return AUDIO_DEVICE_OUT_BLE_HEADSET;
+ case AAUDIO_DEVICE_BLE_SPEAKER:
+ return AUDIO_DEVICE_OUT_BLE_SPEAKER;
+ case AAUDIO_DEVICE_BLE_BROADCAST:
+ return AUDIO_DEVICE_OUT_BLE_BROADCAST;
+ default:
+ break;
+ }
+ }
+ return AUDIO_DEVICE_NONE;
+}
+
+aaudio_policy_t AAudioConvert_androidToAAudioMMapPolicy(AudioMMapPolicy policy) {
+ switch (policy) {
+ case AudioMMapPolicy::AUTO:
+ return AAUDIO_POLICY_AUTO;
+ case AudioMMapPolicy::ALWAYS:
+ return AAUDIO_POLICY_ALWAYS;
+ case AudioMMapPolicy::NEVER:
+ case AudioMMapPolicy::UNSPECIFIED:
+ default:
+ return AAUDIO_POLICY_NEVER;
+ }
+}
diff --git a/media/libaaudio/src/utility/AAudioUtilities.h b/media/libaaudio/src/utility/AAudioUtilities.h
index 7c351e1..940e4b5 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.h
+++ b/media/libaaudio/src/utility/AAudioUtilities.h
@@ -363,4 +363,16 @@
android::media::audio::common::AudioMMapPolicy defaultPolicy =
android::media::audio::common::AudioMMapPolicy::NEVER);
+/**
+ * Convert the aaudio device type to android device type. Returns AUDIO_DEVICE_NONE if
+ * the given device is not a valid one.
+ */
+audio_devices_t AAudioConvert_aaudioToAndroidDeviceType(
+ AAudio_DeviceType device, aaudio_direction_t direction);
+
+aaudio_policy_t AAudioConvert_androidToAAudioMMapPolicy(
+ android::media::audio::common::AudioMMapPolicy policy);
+
+bool AAudio_isCompressedFormat(audio_format_t format);
+
#endif //UTILITY_AAUDIO_UTILITIES_H
diff --git a/media/libaaudio/tests/test_attributes.cpp b/media/libaaudio/tests/test_attributes.cpp
index e5676a7..045c236 100644
--- a/media/libaaudio/tests/test_attributes.cpp
+++ b/media/libaaudio/tests/test_attributes.cpp
@@ -26,6 +26,8 @@
#include <aaudio/AAudio.h>
#include <gtest/gtest.h>
+#include <system/audio.h>
+#include <system/aaudio/AAudio.h>
constexpr int64_t kNanosPerSecond = 1000000000;
constexpr int kNumFrames = 256;
@@ -36,6 +38,7 @@
static void checkAttributes(aaudio_performance_mode_t perfMode,
aaudio_usage_t usage,
aaudio_content_type_t contentType,
+ const char * tags = nullptr,
aaudio_input_preset_t preset = DONT_SET,
aaudio_allowed_capture_policy_t capturePolicy = DONT_SET,
int privacyMode = DONT_SET,
@@ -45,6 +48,7 @@
AAudioStreamBuilder *aaudioBuilder = nullptr;
AAudioStream *aaudioStream = nullptr;
+ aaudio_result_t expectedSetTagsResult = AAUDIO_OK;
// Use an AAudioStreamBuilder to contain requested parameters.
ASSERT_EQ(AAUDIO_OK, AAudio_createStreamBuilder(&aaudioBuilder));
@@ -60,6 +64,12 @@
if (contentType != DONT_SET) {
AAudioStreamBuilder_setContentType(aaudioBuilder, contentType);
}
+ if (tags != nullptr) {
+ aaudio_result_t result = AAudioStreamBuilder_setTags(aaudioBuilder, tags);
+ expectedSetTagsResult = (strlen(tags) >= AUDIO_ATTRIBUTES_TAGS_MAX_SIZE) ?
+ AAUDIO_ERROR_ILLEGAL_ARGUMENT : AAUDIO_OK;
+ EXPECT_EQ(result, expectedSetTagsResult);
+ }
if (preset != DONT_SET) {
AAudioStreamBuilder_setInputPreset(aaudioBuilder, preset);
}
@@ -87,6 +97,20 @@
: contentType;
EXPECT_EQ(expectedContentType, AAudioStream_getContentType(aaudioStream));
+ char readTags[AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE] = {};
+ EXPECT_EQ(AAUDIO_OK, AAudioStream_getTags(aaudioStream, readTags))
+ << "Expected tags=" << (tags != nullptr ? tags : "null") << ", got tags=" << readTags;;
+ EXPECT_LT(strlen(readTags), AUDIO_ATTRIBUTES_TAGS_MAX_SIZE)
+ << "expected tags len " << strlen(readTags) << " less than "
+ << AUDIO_ATTRIBUTES_TAGS_MAX_SIZE;
+
+ // Null tags or failed to set, empty tags expected (default initializer)
+ const char * expectedTags = tags == nullptr ?
+ "" : (expectedSetTagsResult != AAUDIO_OK ? "" : tags);
+ // Oversized tags will be discarded
+ EXPECT_TRUE(std::strcmp(expectedTags, readTags) == 0)
+ << "Expected tags=" << expectedTags << ", got tags=" << readTags;
+
aaudio_input_preset_t expectedPreset =
(preset == DONT_SET || preset == AAUDIO_UNSPECIFIED)
? AAUDIO_INPUT_PRESET_VOICE_RECOGNITION // default
@@ -139,6 +163,21 @@
// Note that the AAUDIO_SYSTEM_USAGE_* values requires special permission.
};
+static const std::string oversizedTags2 = std::string(AUDIO_ATTRIBUTES_TAGS_MAX_SIZE + 1, 'A');
+static const std::string oversizedTags = std::string(AUDIO_ATTRIBUTES_TAGS_MAX_SIZE, 'B');
+static const std::string maxSizeTags = std::string(AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1, 'C');
+
+static const char * const sTags[] = {
+ nullptr,
+ "",
+ "oem=routing_extension",
+ "VX_OEM_ROUTING_EXTENSION",
+ maxSizeTags.c_str(),
+ // intentionnaly use oversized tags
+ oversizedTags.c_str(),
+ oversizedTags2.c_str()
+};
+
static const aaudio_content_type_t sContentypes[] = {
DONT_SET,
AAUDIO_UNSPECIFIED,
@@ -185,11 +224,18 @@
}
}
+static void checkAttributesTags(aaudio_performance_mode_t perfMode) {
+ for (const char * const tags : sTags) {
+ checkAttributes(perfMode, DONT_SET, DONT_SET, tags);
+ }
+}
+
static void checkAttributesInputPreset(aaudio_performance_mode_t perfMode) {
for (aaudio_input_preset_t inputPreset : sInputPresets) {
checkAttributes(perfMode,
DONT_SET,
DONT_SET,
+ nullptr,
inputPreset,
DONT_SET,
DONT_SET,
@@ -202,6 +248,7 @@
checkAttributes(perfMode,
DONT_SET,
DONT_SET,
+ nullptr,
DONT_SET,
policy,
AAUDIO_DIRECTION_INPUT);
@@ -213,6 +260,7 @@
checkAttributes(perfMode,
DONT_SET,
DONT_SET,
+ nullptr,
DONT_SET,
DONT_SET,
privacyMode,
@@ -228,6 +276,10 @@
checkAttributesContentType(AAUDIO_PERFORMANCE_MODE_NONE);
}
+TEST(test_attributes, aaudio_tags_perfnone) {
+ checkAttributesTags(AAUDIO_PERFORMANCE_MODE_NONE);
+}
+
TEST(test_attributes, aaudio_input_preset_perfnone) {
checkAttributesInputPreset(AAUDIO_PERFORMANCE_MODE_NONE);
}
@@ -244,6 +296,10 @@
checkAttributesContentType(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
}
+TEST(test_attributes, aaudio_tags_lowlat) {
+ checkAttributesTags(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
+}
+
TEST(test_attributes, aaudio_input_preset_lowlat) {
checkAttributesInputPreset(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
}
diff --git a/media/libaaudio/tests/test_mmap_path.cpp b/media/libaaudio/tests/test_mmap_path.cpp
index c8376f6..6ad694f 100644
--- a/media/libaaudio/tests/test_mmap_path.cpp
+++ b/media/libaaudio/tests/test_mmap_path.cpp
@@ -40,7 +40,7 @@
*/
static void openStreamAndVerify(aaudio_direction_t direction) {
std::vector<AudioMMapPolicyInfo> policyInfos;
- ASSERT_EQ(android::NO_ERROR, android::AudioSystem::getMmapPolicyInfo(
+ ASSERT_EQ(android::NO_ERROR, android::AudioSystem::getMmapPolicyInfos(
AudioMMapPolicyType::DEFAULT, &policyInfos));
if (AAudio_getAAudioPolicy(policyInfos) == AAUDIO_POLICY_NEVER) {
// Query the system MMAP policy, if it is NEVER, it indicates there is no MMAP support.
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index b193950..9a4b45d 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -731,7 +731,7 @@
mSelectedDeviceId = deviceId;
if (mStatus == NO_ERROR) {
if (mActive) {
- if (mSelectedDeviceId != mRoutedDeviceId) {
+ if (getFirstDeviceId(mRoutedDeviceIds) != mSelectedDeviceId) {
// stop capture so that audio policy manager does not reject the new instance
// start request as only one capture can be active at a time.
if (mAudioRecord != 0) {
@@ -758,7 +758,7 @@
}
// must be called with mLock held
-void AudioRecord::updateRoutedDeviceId_l()
+void AudioRecord::updateRoutedDeviceIds_l()
{
// if the record is inactive, do not update actual device as the input stream maybe routed
// from a device not relevant to this client because of other active use cases.
@@ -766,17 +766,21 @@
return;
}
if (mInput != AUDIO_IO_HANDLE_NONE) {
- audio_port_handle_t deviceId = AudioSystem::getDeviceIdForIo(mInput);
- if (deviceId != AUDIO_PORT_HANDLE_NONE) {
- mRoutedDeviceId = deviceId;
+ DeviceIdVector deviceIds;
+ status_t result = AudioSystem::getDeviceIdsForIo(mInput, deviceIds);
+ if (result != OK) {
+ ALOGW("%s: getDeviceIdsForIo returned: %d", __func__, result);
+ }
+ if (!deviceIds.empty()) {
+ mRoutedDeviceIds = deviceIds;
}
}
}
-audio_port_handle_t AudioRecord::getRoutedDeviceId() {
+DeviceIdVector AudioRecord::getRoutedDeviceIds() {
AutoMutex lock(mLock);
- updateRoutedDeviceId_l();
- return mRoutedDeviceId;
+ updateRoutedDeviceIds_l();
+ return mRoutedDeviceIds;
}
status_t AudioRecord::dump(int fd, const Vector<String16>& args __unused) const
@@ -794,10 +798,11 @@
mFrameCount, mReqFrameCount);
result.appendFormat(" notif. frame count(%u), req. notif. frame count(%u)\n",
mNotificationFramesAct, mNotificationFramesReq);
- result.appendFormat(" input(%d), latency(%u), selected device Id(%d), routed device Id(%d)\n",
- mInput, mLatency, mSelectedDeviceId, mRoutedDeviceId);
- result.appendFormat(" mic direction(%d) mic field dimension(%f)",
- mSelectedMicDirection, mSelectedMicFieldDimension);
+ result.appendFormat(" input(%d), latency(%u), selected device Id(%d)\n",
+ mInput, mLatency, mSelectedDeviceId);
+ result.appendFormat(" routed device Ids(%s), mic direction(%d) mic field dimension(%f)",
+ toString(mRoutedDeviceIds).c_str(), mSelectedMicDirection,
+ mSelectedMicFieldDimension);
::write(fd, result.c_str(), result.size());
return NO_ERROR;
}
@@ -940,7 +945,7 @@
mAwaitBoost = true;
}
mFlags = output.flags;
- mRoutedDeviceId = output.selectedDeviceId;
+ mRoutedDeviceIds = { output.selectedDeviceId };
mSessionId = output.sessionId;
mSampleRate = output.sampleRate;
mServerConfig = output.serverConfig;
@@ -1063,7 +1068,8 @@
.set(AMEDIAMETRICS_PROP_SOURCE, toString(mAttributes.source).c_str())
.set(AMEDIAMETRICS_PROP_THREADID, (int32_t)output.inputId)
.set(AMEDIAMETRICS_PROP_SELECTEDDEVICEID, (int32_t)mSelectedDeviceId)
- .set(AMEDIAMETRICS_PROP_ROUTEDDEVICEID, (int32_t)mRoutedDeviceId)
+ .set(AMEDIAMETRICS_PROP_ROUTEDDEVICEID, (int32_t)(getFirstDeviceId(mRoutedDeviceIds)))
+ .set(AMEDIAMETRICS_PROP_ROUTEDDEVICEIDS, toString(mRoutedDeviceIds).c_str())
.set(AMEDIAMETRICS_PROP_ENCODING, toString(mFormat).c_str())
.set(AMEDIAMETRICS_PROP_CHANNELMASK, (int32_t)mChannelMask)
.set(AMEDIAMETRICS_PROP_FRAMECOUNT, (int32_t)mFrameCount)
@@ -1577,11 +1583,6 @@
const int INITIAL_RETRIES = 3;
int retries = INITIAL_RETRIES;
retry:
- if (retries < INITIAL_RETRIES) {
- // refresh the audio configuration cache in this process to make sure we get new
- // input parameters and new IAudioRecord in createRecord_l()
- AudioSystem::clearAudioConfigCache();
- }
mFlags = mOrigFlags;
// if the new IAudioRecord is created, createRecord_l() will modify the
@@ -1661,7 +1662,7 @@
}
void AudioRecord::onAudioDeviceUpdate(audio_io_handle_t audioIo,
- audio_port_handle_t deviceId)
+ const DeviceIdVector& deviceIds)
{
sp<AudioSystem::AudioDeviceCallback> callback;
{
@@ -1673,11 +1674,11 @@
// only update device if the record is active as route changes due to other use cases are
// irrelevant for this client
if (mActive) {
- mRoutedDeviceId = deviceId;
+ mRoutedDeviceIds = deviceIds;
}
}
if (callback.get() != nullptr) {
- callback->onAudioDeviceUpdate(mInput, mRoutedDeviceId);
+ callback->onAudioDeviceUpdate(mInput, mRoutedDeviceIds);
}
}
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index 55f74e1..dcfef45 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -26,12 +26,14 @@
#include <binder/IServiceManager.h>
#include <binder/ProcessState.h>
#include <binder/IPCThreadState.h>
+#include <cutils/properties.h>
#include <media/AidlConversion.h>
#include <media/AudioResamplerPublic.h>
#include <media/AudioSystem.h>
#include <media/IAudioFlinger.h>
#include <media/PolicyAidlConversion.h>
#include <media/TypeConverter.h>
+#include <mediautils/ServiceSingleton.h>
#include <math.h>
#include <system/audio.h>
@@ -78,172 +80,217 @@
std::mutex AudioSystem::gSoundTriggerMutex;
sp<CaptureStateListenerImpl> AudioSystem::gSoundTriggerCaptureStateListener;
-// Sets the Binder for the AudioFlinger service, passed to this client process
-// from the system server.
-// This allows specific isolated processes to access the audio system. Currently used only for the
-// HotwordDetectionService.
-template <typename ServiceInterface, typename Client, typename AidlInterface,
- typename ServiceTraits>
-class ServiceHandler {
+// ----------------------------
+
+// AudioSystem is the client side interface to AudioFlinger (AF) and AudioPolicy (AP).
+//
+// For clients:
+// We use the ServiceSingleton class in mediautils to fetch the AF/AP service.
+// The ServiceSingleton offers service prefetch, automatic
+// new service notification, automatic binder death notification.
+//
+// AudioFlingerServiceTraits and AudioPolicyServiceTraits are passed into
+// ServiceSingleton to provide interaction with the service notifications and
+// binder death notifications.
+//
+// If the AF/AP service is unavailable for kServiceClientWaitMs from ServiceManager,
+// ServiceSingleton will return a nullptr service handle resulting in the same dead object error
+// as if the service died (which it did, otherwise we'd be returning the cached handle).
+//
+// Potential deadlock sequence:
+// 1) audioserver reboots.
+// 2) App clients call into AudioService (system server) obtaining binder threads,
+// these calls blocking for audioserver reboot completion (or waiting for a mutex
+// held by those blocked threads).
+// 3) AudioFlinger and AudioPolicyManager services need to call into system server
+// during initialization. It can't because app clients hold all the binder threads
+// in the threadpool.
+// 4) We have a resource deadlock between (2) and (3) potentially causing an ANR and
+// further reinitialization.
+// 5) However, after the service wait timeout kServiceWaitNs, the calls for (2) will
+// return an error and resolve itself, breaking the resource deadlock in (4).
+//
+// At this time, it is a matter of experimentation whether the service timeout is
+// applied only for system server, and we let other clients block indefinitely.
+//
+// For audio services:
+// AudioFlinger and AudioPolicy may call back into AudioSystem. When doing
+// so it should not hold any mutexes. There is no service wait as AudioFlinger
+// and AudioPolicy are in-process with each other, and the call proceeds without
+// binder. The setLocalService() method is used to set the service interfaces
+// within audioserver to bypass the ServiceManager lookup.
+//
+
+// Wait timeout for AudioFlinger or AudioPolicy service before returning with null.
+// Such an audioserver failure is considered benign as the ground truth is stored in
+// the Java AudioService and can be restored once audioserver has finished initialization.
+//
+// TODO(b/375691003) We use 5s as a conservative timeout value, and will tune closer to 3s.
+// Too small a value (i.e. less than 1s would churn repeated calls to get the service).
+// The value can be tuned by the property audio.service.client_wait_ms.
+static constexpr int32_t kServiceClientWaitMs = 5'000;
+
+static constexpr const char kServiceWaitProperty[] = "audio.service.client_wait_ms";
+
+// AudioFlingerServiceTraits is a collection of methods that parameterize the
+// ServiceSingleton handler for IAudioFlinger
+
+class AudioFlingerServiceTraits {
public:
- sp<ServiceInterface> getService()
- EXCLUDES(mMutex) NO_THREAD_SAFETY_ANALYSIS { // std::unique_ptr
- sp<ServiceInterface> service;
- sp<Client> client;
+ // ------- required by ServiceSingleton
- bool reportNoError = false;
+ static constexpr const char* getServiceName() { return "media.audio_flinger"; }
+
+ static void onNewService(const sp<media::IAudioFlingerService>& afs) {
+ onNewServiceWithAdapter(createServiceAdapter(afs));
+ }
+
+ static void onServiceDied(const sp<media::IAudioFlingerService>&) {
+ ALOGW("%s: %s service died", __func__, getServiceName());
{
- std::lock_guard _l(mMutex);
- if (mService != nullptr) {
- return mService;
- }
+ std::lock_guard l(mMutex);
+ mValid = false;
+ mClient->clearIoCache();
}
+ AudioSystem::reportError(DEAD_OBJECT);
+ }
- std::unique_lock ul_only1thread(mSingleGetter);
- std::unique_lock ul(mMutex);
- if (mService != nullptr) {
- return mService;
- }
- if (mClient == nullptr) {
- mClient = sp<Client>::make();
- } else {
- reportNoError = true;
- }
- while (true) {
- mService = mLocalService;
- if (mService != nullptr) break;
+ static constexpr mediautils::ServiceOptions options() {
+ return mediautils::ServiceOptions::kNone;
+ }
- sp<IBinder> binder = mBinder;
- if (binder == nullptr) {
- sp <IServiceManager> sm = defaultServiceManager();
- binder = sm->checkService(String16(ServiceTraits::SERVICE_NAME));
- if (binder == nullptr) {
- ALOGD("%s: waiting for %s", __func__, ServiceTraits::SERVICE_NAME);
+ // ------- required by AudioSystem
- // if the condition variable is present, setLocalService() and
- // setBinder() is allowed to use it to notify us.
- if (mCvGetter == nullptr) {
- mCvGetter = std::make_shared<std::condition_variable>();
- }
- mCvGetter->wait_for(ul, std::chrono::seconds(1));
- continue;
- }
+ static sp<IAudioFlinger> getService(
+ std::chrono::milliseconds waitMs = std::chrono::milliseconds{-1}) {
+ static bool init = false;
+ audio_utils::unique_lock ul(mMutex);
+ if (!init) {
+ if (!mDisableThreadPoolStart) {
+ ProcessState::self()->startThreadPool();
}
- binder->linkToDeath(mClient);
- auto aidlInterface = interface_cast<AidlInterface>(binder);
- LOG_ALWAYS_FATAL_IF(aidlInterface == nullptr);
- if constexpr (std::is_same_v<ServiceInterface, AidlInterface>) {
- mService = std::move(aidlInterface);
- } else /* constexpr */ {
- mService = ServiceTraits::createServiceAdapter(aidlInterface);
- }
- break;
+ mediautils::initService<media::IAudioFlingerService, AudioFlingerServiceTraits>();
+ mWaitMs = std::chrono::milliseconds(
+ property_get_int32(kServiceWaitProperty, kServiceClientWaitMs));
+ init = true;
}
- if (mCvGetter) mCvGetter.reset(); // remove condition variable.
- client = mClient;
- service = mService;
- // Make sure callbacks can be received by the client
- if (mCanStartThreadPool) {
- ProcessState::self()->startThreadPool();
- }
+ if (mValid) return mService;
+ if (waitMs.count() < 0) waitMs = mWaitMs;
ul.unlock();
- ul_only1thread.unlock();
- ServiceTraits::onServiceCreate(service, client);
- if (reportNoError) AudioSystem::reportError(NO_ERROR);
- return service;
+
+ // mediautils::getService() installs a persistent new service notification.
+ auto service = mediautils::getService<
+ media::IAudioFlingerService>(waitMs);
+ ALOGD("%s: checking for service %s: %p", __func__, getServiceName(), service.get());
+
+ ul.lock();
+ // return the IAudioFlinger interface which is adapted
+ // from the media::IAudioFlingerService.
+ return mService;
}
- status_t setLocalService(const sp<ServiceInterface>& service) EXCLUDES(mMutex) {
- std::lock_guard _l(mMutex);
- // we allow clearing once set, but not a double non-null set.
- if (mService != nullptr && service != nullptr) return INVALID_OPERATION;
- mLocalService = service;
- if (mCvGetter) mCvGetter->notify_one();
- return OK;
- }
+ static sp<AudioSystem::AudioFlingerClient> getClient() {
+ audio_utils::unique_lock ul(mMutex);
+ if (mValid) return mClient;
+ ul.unlock();
- sp<Client> getClient() EXCLUDES(mMutex) {
- const auto service = getService();
- if (service == nullptr) return nullptr;
- std::lock_guard _l(mMutex);
+ auto service = getService();
+ ALOGD("%s: checking for service: %p", __func__, service.get());
+
+ ul.lock();
return mClient;
}
- void setBinder(const sp<IBinder>& binder) EXCLUDES(mMutex) {
- std::lock_guard _l(mMutex);
- if (mService != nullptr) {
- ALOGW("%s: ignoring; %s connection already established.",
- __func__, ServiceTraits::SERVICE_NAME);
- return;
+ static void setBinder(const sp<IBinder>& binder) {
+ setLocalService(createServiceAdapter(
+ mediautils::interfaceFromBinder<media::IAudioFlingerService>(binder)));
+ }
+
+ static status_t setLocalService(const sp<IAudioFlinger>& af) {
+ mediautils::skipService<media::IAudioFlingerService>();
+ sp<IAudioFlinger> old;
+ {
+ std::lock_guard l(mMutex);
+ old = mService;
+ mService = af;
}
- mBinder = binder;
- if (mCvGetter) mCvGetter->notify_one();
+ if (old) onServiceDied({});
+ if (af) onNewServiceWithAdapter(af);
+ return OK;
}
- void clearService() EXCLUDES(mMutex) {
- std::lock_guard _l(mMutex);
- mService.clear();
- if (mClient) ServiceTraits::onClearService(mClient);
+ static void disableThreadPoolStart() {
+ mDisableThreadPoolStart = true;
}
- void disableThreadPool() {
- mCanStartThreadPool = false;
+ static bool isValid() {
+ audio_utils::unique_lock ul(mMutex);
+ if (mValid) return true;
+ ul.unlock();
+ (void)getService({});
+ ul.lock();
+ return mValid;
+ }
+
+ // called to determine error on nullptr service return.
+ static constexpr status_t getError() {
+ return DEAD_OBJECT;
}
private:
- std::mutex mSingleGetter;
- std::mutex mMutex;
- std::shared_ptr<std::condition_variable> mCvGetter GUARDED_BY(mMutex);
- sp<IBinder> mBinder GUARDED_BY(mMutex);
- sp<ServiceInterface> mLocalService GUARDED_BY(mMutex);
- sp<ServiceInterface> mService GUARDED_BY(mMutex);
- sp<Client> mClient GUARDED_BY(mMutex);
- std::atomic<bool> mCanStartThreadPool = true;
-};
-struct AudioFlingerTraits {
- static void onServiceCreate(
- const sp<IAudioFlinger>& af, const sp<AudioSystem::AudioFlingerClient>& afc) {
+ static void onNewServiceWithAdapter(const sp<IAudioFlinger>& service) {
+ ALOGD("%s: %s service obtained %p", __func__, getServiceName(), service.get());
+ sp<AudioSystem::AudioFlingerClient> client;
+ bool reportNoError = false;
+ {
+ std::lock_guard l(mMutex);
+ if (mClient == nullptr) {
+ mClient = sp<AudioSystem::AudioFlingerClient>::make();
+ } else {
+ mClient->clearIoCache();
+ reportNoError = true;
+ }
+ mService = service;
+ client = mClient;
+ mValid = true;
+ }
+ // TODO(b/375280520) consider registerClient() within mMutex lock.
const int64_t token = IPCThreadState::self()->clearCallingIdentity();
- af->registerClient(afc);
+ service->registerClient(client);
IPCThreadState::self()->restoreCallingIdentity(token);
+
+ if (reportNoError) AudioSystem::reportError(NO_ERROR);
}
static sp<IAudioFlinger> createServiceAdapter(
- const sp<media::IAudioFlingerService>& aidlInterface) {
- return sp<AudioFlingerClientAdapter>::make(aidlInterface);
+ const sp<media::IAudioFlingerService>& af) {
+ return sp<AudioFlingerClientAdapter>::make(af);
}
- static void onClearService(const sp<AudioSystem::AudioFlingerClient>& afc) {
- afc->clearIoCache();
- }
-
- static constexpr const char* SERVICE_NAME = IAudioFlinger::DEFAULT_SERVICE_NAME;
+ static inline constinit std::mutex mMutex;
+ static inline constinit sp<AudioSystem::AudioFlingerClient> mClient GUARDED_BY(mMutex);
+ static inline constinit sp<IAudioFlinger> mService GUARDED_BY(mMutex);
+ static inline constinit std::chrono::milliseconds mWaitMs
+ GUARDED_BY(mMutex) {kServiceClientWaitMs};
+ static inline constinit bool mValid GUARDED_BY(mMutex) = false;
+ static inline constinit std::atomic_bool mDisableThreadPoolStart = false;
};
-[[clang::no_destroy]] static constinit ServiceHandler<IAudioFlinger,
- AudioSystem::AudioFlingerClient, media::IAudioFlingerService,
- AudioFlingerTraits> gAudioFlingerServiceHandler;
-
sp<IAudioFlinger> AudioSystem::get_audio_flinger() {
- return gAudioFlingerServiceHandler.getService();
+ return AudioFlingerServiceTraits::getService();
}
sp<AudioSystem::AudioFlingerClient> AudioSystem::getAudioFlingerClient() {
- return gAudioFlingerServiceHandler.getClient();
+ return AudioFlingerServiceTraits::getClient();
}
void AudioSystem::setAudioFlingerBinder(const sp<IBinder>& audioFlinger) {
- if (audioFlinger->getInterfaceDescriptor() != media::IAudioFlingerService::descriptor) {
- ALOGE("%s: received a binder of type %s",
- __func__, String8(audioFlinger->getInterfaceDescriptor()).c_str());
- return;
- }
- gAudioFlingerServiceHandler.setBinder(audioFlinger);
+ AudioFlingerServiceTraits::setBinder(audioFlinger);
}
status_t AudioSystem::setLocalAudioFlinger(const sp<IAudioFlinger>& af) {
- return gAudioFlingerServiceHandler.setLocalService(af);
+ return AudioFlingerServiceTraits::setLocalService(af);
}
sp<AudioIoDescriptor> AudioSystem::getIoDescriptor(audio_io_handle_t ioHandle) {
@@ -256,9 +303,7 @@
}
/* static */ status_t AudioSystem::checkAudioFlinger() {
- if (defaultServiceManager()->checkService(String16("media.audio_flinger")) != 0) {
- return NO_ERROR;
- }
+ if (AudioFlingerServiceTraits::isValid()) return OK;
return DEAD_OBJECT;
}
@@ -266,97 +311,98 @@
status_t AudioSystem::muteMicrophone(bool state) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->setMicMute(state);
}
status_t AudioSystem::isMicrophoneMuted(bool* state) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
*state = af->getMicMute();
return NO_ERROR;
}
status_t AudioSystem::setMasterVolume(float value) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
af->setMasterVolume(value);
return NO_ERROR;
}
status_t AudioSystem::setMasterMute(bool mute) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
af->setMasterMute(mute);
return NO_ERROR;
}
status_t AudioSystem::getMasterVolume(float* volume) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
*volume = af->masterVolume();
return NO_ERROR;
}
status_t AudioSystem::getMasterMute(bool* mute) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
*mute = af->masterMute();
return NO_ERROR;
}
status_t AudioSystem::setStreamVolume(audio_stream_type_t stream, float value,
- audio_io_handle_t output) {
+ bool muted, audio_io_handle_t output) {
if (uint32_t(stream) >= AUDIO_STREAM_CNT) return BAD_VALUE;
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
- af->setStreamVolume(stream, value, output);
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
+ af->setStreamVolume(stream, value, muted, output);
return NO_ERROR;
}
status_t AudioSystem::setStreamMute(audio_stream_type_t stream, bool mute) {
if (uint32_t(stream) >= AUDIO_STREAM_CNT) return BAD_VALUE;
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
af->setStreamMute(stream, mute);
return NO_ERROR;
}
status_t AudioSystem::setPortsVolume(
- const std::vector<audio_port_handle_t>& portIds, float volume, audio_io_handle_t output) {
+ const std::vector<audio_port_handle_t>& portIds, float volume, bool muted,
+ audio_io_handle_t output) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
std::vector<int32_t> portIdsAidl = VALUE_OR_RETURN_STATUS(
convertContainer<std::vector<int32_t>>(
portIds, legacy2aidl_audio_port_handle_t_int32_t));
int32_t outputAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(output));
- af->setPortsVolume(portIdsAidl, volume, outputAidl);
+ af->setPortsVolume(portIdsAidl, volume, muted, outputAidl);
return NO_ERROR;
}
status_t AudioSystem::setMode(audio_mode_t mode) {
if (uint32_t(mode) >= AUDIO_MODE_CNT) return BAD_VALUE;
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->setMode(mode);
}
status_t AudioSystem::setSimulateDeviceConnections(bool enabled) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->setSimulateDeviceConnections(enabled);
}
status_t AudioSystem::setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->setParameters(ioHandle, keyValuePairs);
}
String8 AudioSystem::getParameters(audio_io_handle_t ioHandle, const String8& keys) {
const sp<IAudioFlinger> af = get_audio_flinger();
String8 result = String8("");
- if (af == 0) return result;
+ if (af == nullptr) return result;
result = af->getParameters(ioHandle, keys);
return result;
@@ -435,7 +481,7 @@
status_t AudioSystem::getSamplingRate(audio_io_handle_t ioHandle,
uint32_t* samplingRate) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
sp<AudioIoDescriptor> desc = getIoDescriptor(ioHandle);
if (desc == 0) {
*samplingRate = af->sampleRate(ioHandle);
@@ -470,7 +516,7 @@
status_t AudioSystem::getFrameCount(audio_io_handle_t ioHandle,
size_t* frameCount) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
sp<AudioIoDescriptor> desc = getIoDescriptor(ioHandle);
if (desc == 0) {
*frameCount = af->frameCount(ioHandle);
@@ -505,7 +551,7 @@
status_t AudioSystem::getLatency(audio_io_handle_t output,
uint32_t* latency) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
sp<AudioIoDescriptor> outputDesc = getIoDescriptor(output);
if (outputDesc == 0) {
*latency = af->latency(output);
@@ -529,14 +575,14 @@
status_t AudioSystem::setVoiceVolume(float value) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->setVoiceVolume(value);
}
status_t AudioSystem::getRenderPosition(audio_io_handle_t output, uint32_t* halFrames,
uint32_t* dspFrames) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->getRenderPosition(halFrames, dspFrames, output);
}
@@ -544,7 +590,7 @@
uint32_t AudioSystem::getInputFramesLost(audio_io_handle_t ioHandle) {
const sp<IAudioFlinger> af = get_audio_flinger();
uint32_t result = 0;
- if (af == 0) return result;
+ if (af == nullptr) return result;
if (ioHandle == AUDIO_IO_HANDLE_NONE) return result;
result = af->getInputFramesLost(ioHandle);
@@ -554,7 +600,7 @@
audio_unique_id_t AudioSystem::newAudioUniqueId(audio_unique_id_use_t use) {
// Must not use AF as IDs will re-roll on audioserver restart, b/130369529.
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return AUDIO_UNIQUE_ID_ALLOCATE;
+ if (af == nullptr) return AUDIO_UNIQUE_ID_ALLOCATE;
return af->newAudioUniqueId(use);
}
@@ -574,26 +620,26 @@
audio_hw_sync_t AudioSystem::getAudioHwSyncForSession(audio_session_t sessionId) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return AUDIO_HW_SYNC_INVALID;
+ if (af == nullptr) return AUDIO_HW_SYNC_INVALID;
return af->getAudioHwSyncForSession(sessionId);
}
status_t AudioSystem::systemReady() {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return NO_INIT;
+ if (af == nullptr) return NO_INIT;
return af->systemReady();
}
status_t AudioSystem::audioPolicyReady() {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return NO_INIT;
+ if (af == nullptr) return NO_INIT;
return af->audioPolicyReady();
}
status_t AudioSystem::getFrameCountHAL(audio_io_handle_t ioHandle,
size_t* frameCount) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
sp<AudioIoDescriptor> desc = getIoDescriptor(ioHandle);
if (desc == 0) {
*frameCount = af->frameCountHAL(ioHandle);
@@ -622,13 +668,6 @@
mInChannelMask = AUDIO_CHANNEL_NONE;
}
-void AudioSystem::AudioFlingerClient::binderDied(const wp<IBinder>& who __unused) {
- gAudioFlingerServiceHandler.clearService();
- reportError(DEAD_OBJECT);
-
- ALOGW("AudioFlinger server died!");
-}
-
Status AudioSystem::AudioFlingerClient::ioConfigChanged(
media::AudioIoConfigEvent _event,
const media::AudioIoDescriptor& _ioDesc) {
@@ -642,7 +681,7 @@
if (ioDesc->getIoHandle() == AUDIO_IO_HANDLE_NONE) return Status::ok();
- audio_port_handle_t deviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector deviceIds;
std::vector<sp<AudioDeviceCallback>> callbacksToCall;
{
std::lock_guard _l(mMutex);
@@ -654,12 +693,12 @@
case AUDIO_INPUT_OPENED:
case AUDIO_INPUT_REGISTERED: {
if (sp<AudioIoDescriptor> oldDesc = getIoDescriptor_l(ioDesc->getIoHandle())) {
- deviceId = oldDesc->getDeviceId();
+ deviceIds = oldDesc->getDeviceIds();
}
mIoDescriptors[ioDesc->getIoHandle()] = ioDesc;
- if (ioDesc->getDeviceId() != AUDIO_PORT_HANDLE_NONE) {
- deviceId = ioDesc->getDeviceId();
+ if (!ioDesc->getDeviceIds().empty()) {
+ deviceIds = ioDesc->getDeviceIds();
if (event == AUDIO_OUTPUT_OPENED || event == AUDIO_INPUT_OPENED) {
auto it = mAudioDeviceCallbacks.find(ioDesc->getIoHandle());
if (it != mAudioDeviceCallbacks.end()) {
@@ -700,11 +739,12 @@
break;
}
- deviceId = oldDesc->getDeviceId();
+ deviceIds = oldDesc->getDeviceIds();
mIoDescriptors[ioDesc->getIoHandle()] = ioDesc;
- if (deviceId != ioDesc->getDeviceId()) {
- deviceId = ioDesc->getDeviceId();
+ DeviceIdVector ioDescDeviceIds = ioDesc->getDeviceIds();
+ if (!areDeviceIdsEqual(deviceIds, ioDescDeviceIds)) {
+ deviceIds = ioDescDeviceIds;
auto it = mAudioDeviceCallbacks.find(ioDesc->getIoHandle());
if (it != mAudioDeviceCallbacks.end()) {
callbacks = it->second;
@@ -732,7 +772,7 @@
auto it2 = cbks.find(ioDesc->getPortId());
if (it2 != cbks.end()) {
callbacks.emplace(ioDesc->getPortId(), it2->second);
- deviceId = oldDesc->getDeviceId();
+ deviceIds = oldDesc->getDeviceIds();
}
}
}
@@ -751,7 +791,7 @@
// example getRoutedDevice that updates the device and tries to acquire mMutex.
for (auto cb : callbacksToCall) {
// If callbacksToCall is not empty, it implies ioDesc->getIoHandle() and deviceId are valid
- cb->onAudioDeviceUpdate(ioDesc->getIoHandle(), deviceId);
+ cb->onAudioDeviceUpdate(ioDesc->getIoHandle(), deviceIds);
}
return Status::ok();
@@ -785,9 +825,7 @@
uint32_t sampleRate, audio_format_t format,
audio_channel_mask_t channelMask, size_t* buffSize) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) {
- return PERMISSION_DENIED;
- }
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
std::lock_guard _l(mMutex);
// Do we have a stale mInBuffSize or are we requesting the input buffer size for new values
if ((mInBuffSize == 0) || (sampleRate != mInSamplingRate) || (format != mInFormat)
@@ -922,47 +960,146 @@
gVolRangeInitReqCallback = cb;
}
-struct AudioPolicyTraits {
- static void onServiceCreate(const sp<IAudioPolicyService>& ap,
- const sp<AudioSystem::AudioPolicyServiceClient>& apc) {
+
+// AudioPolicyServiceTraits is a collection of methods that parameterize the
+// ServiceSingleton class implementation of IAudioPolicyService.
+
+class AudioPolicyServiceTraits {
+public:
+ // ------- methods required by ServiceSingleton
+
+ static constexpr const char* getServiceName() { return "media.audio_policy"; }
+
+ static void onNewService(const sp<IAudioPolicyService>& aps) {
+ ALOGD("%s: %s service obtained %p", __func__, getServiceName(), aps.get());
+ sp<AudioSystem::AudioPolicyServiceClient> client;
+ {
+ std::lock_guard l(mMutex);
+ if (mClient == nullptr) {
+ mClient = sp<AudioSystem::AudioPolicyServiceClient>::make();
+ }
+ client = mClient;
+ mService = aps;
+ mValid = true;
+ }
+ // TODO(b/375280520) consider registerClient() within mMutex lock.
const int64_t token = IPCThreadState::self()->clearCallingIdentity();
- ap->registerClient(apc);
- ap->setAudioPortCallbacksEnabled(apc->isAudioPortCbEnabled());
- ap->setAudioVolumeGroupCallbacksEnabled(apc->isAudioVolumeGroupCbEnabled());
+ aps->registerClient(client);
IPCThreadState::self()->restoreCallingIdentity(token);
}
- static void onClearService(const sp<AudioSystem::AudioPolicyServiceClient>&) {}
+ static void onServiceDied(const sp<IAudioPolicyService>&) {
+ ALOGW("%s: %s service died", __func__, getServiceName());
+ sp<AudioSystem::AudioPolicyServiceClient> client;
+ {
+ std::lock_guard l(mMutex);
+ mValid = false;
+ client = mClient;
+ }
+ client->onServiceDied();
+ }
- static constexpr const char *SERVICE_NAME = "media.audio_policy";
+ static constexpr mediautils::ServiceOptions options() {
+ return mediautils::ServiceOptions::kNone;
+ }
+
+ // ------- methods required by AudioSystem
+
+ static sp<IAudioPolicyService> getService(
+ std::chrono::milliseconds waitMs = std::chrono::milliseconds{-1}) {
+ static bool init = false;
+ audio_utils::unique_lock ul(mMutex);
+ if (!init) {
+ if (!mDisableThreadPoolStart) {
+ ProcessState::self()->startThreadPool();
+ }
+ mediautils::initService<IAudioPolicyService, AudioPolicyServiceTraits>();
+ mWaitMs = std::chrono::milliseconds(
+ property_get_int32(kServiceWaitProperty, kServiceClientWaitMs));
+ init = true;
+ }
+ if (mValid) return mService;
+ if (waitMs.count() < 0) waitMs = mWaitMs;
+ ul.unlock();
+
+ auto service = mediautils::getService<
+ media::IAudioPolicyService>(waitMs);
+ ALOGD("%s: checking for service %s: %p", __func__, getServiceName(), service.get());
+
+ // mediautils::getService() will return early if setLocalService() is called
+ // (whereupon mService contained the actual local service pointer to use).
+ // we should always return mService.
+ ul.lock();
+ return mService;
+ }
+
+ static sp<AudioSystem::AudioPolicyServiceClient> getClient() {
+ audio_utils::unique_lock ul(mMutex);
+ if (mValid) return mClient;
+ ul.unlock();
+
+ auto service = getService();
+ ALOGD("%s: checking for service: %p", __func__, service.get());
+
+ ul.lock();
+ return mClient;
+ }
+
+ static status_t setLocalService(const sp<IAudioPolicyService>& aps) {
+ mediautils::skipService<IAudioPolicyService>();
+ sp<IAudioPolicyService> old;
+ {
+ std::lock_guard l(mMutex);
+ old = mService;
+ mService = aps;
+ }
+ if (old) onServiceDied(old);
+ if (aps) onNewService(aps);
+ return OK;
+ }
+
+ static void disableThreadPoolStart() {
+ mDisableThreadPoolStart = true;
+ }
+
+ // called to determine error on nullptr service return.
+ static constexpr status_t getError() {
+ return DEAD_OBJECT;
+ }
+private:
+
+ static inline constinit std::mutex mMutex;
+ static inline constinit sp<AudioSystem::AudioPolicyServiceClient> mClient GUARDED_BY(mMutex);
+ static inline constinit sp<IAudioPolicyService> mService GUARDED_BY(mMutex);
+ static inline constinit bool mValid GUARDED_BY(mMutex) = false;
+ static inline constinit std::chrono::milliseconds mWaitMs
+ GUARDED_BY(mMutex) {kServiceClientWaitMs};
+ static inline constinit std::atomic_bool mDisableThreadPoolStart = false;
};
-[[clang::no_destroy]] static constinit ServiceHandler<IAudioPolicyService,
- AudioSystem::AudioPolicyServiceClient, IAudioPolicyService,
- AudioPolicyTraits> gAudioPolicyServiceHandler;
-
-status_t AudioSystem::setLocalAudioPolicyService(const sp<IAudioPolicyService>& aps) {
- return gAudioPolicyServiceHandler.setLocalService(aps);
-}
sp<IAudioPolicyService> AudioSystem::get_audio_policy_service() {
- return gAudioPolicyServiceHandler.getService();
+ return AudioPolicyServiceTraits::getService();
}
-void AudioSystem::clearAudioPolicyService() {
- gAudioPolicyServiceHandler.clearService();
+status_t AudioSystem::setLocalAudioPolicyService(const sp<IAudioPolicyService>& aps) {
+ return AudioPolicyServiceTraits::setLocalService(aps);
+}
+
+sp<AudioSystem::AudioPolicyServiceClient> AudioSystem::getAudioPolicyClient() {
+ return AudioPolicyServiceTraits::getClient();
}
void AudioSystem::disableThreadPool() {
- gAudioFlingerServiceHandler.disableThreadPool();
- gAudioPolicyServiceHandler.disableThreadPool();
+ AudioFlingerServiceTraits::disableThreadPoolStart();
+ AudioPolicyServiceTraits::disableThreadPoolStart();
}
// ---------------------------------------------------------------------------
void AudioSystem::onNewAudioModulesAvailable() {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return;
+ if (aps == nullptr) return;
aps->onNewAudioModulesAvailable();
}
@@ -971,7 +1108,7 @@
audio_format_t encodedFormat) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
return statusTFromBinderStatus(
aps->setDeviceConnectionState(
@@ -985,7 +1122,7 @@
audio_policy_dev_state_t AudioSystem::getDeviceConnectionState(audio_devices_t device,
const char* device_address) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE;
+ if (aps == nullptr) return AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE;
auto result = [&]() -> ConversionResult<audio_policy_dev_state_t> {
AudioDevice deviceAidl = VALUE_OR_RETURN(
@@ -1008,7 +1145,7 @@
const char* address = "";
const char* name = "";
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
if (device_address != NULL) {
address = device_address;
@@ -1028,7 +1165,7 @@
status_t AudioSystem::setPhoneState(audio_mode_t state, uid_t uid) {
if (uint32_t(state) >= AUDIO_MODE_CNT) return BAD_VALUE;
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
return statusTFromBinderStatus(aps->setPhoneState(
VALUE_OR_RETURN_STATUS(legacy2aidl_audio_mode_t_AudioMode(state)),
@@ -1038,7 +1175,7 @@
status_t
AudioSystem::setForceUse(audio_policy_force_use_t usage, audio_policy_forced_cfg_t config) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
return statusTFromBinderStatus(
aps->setForceUse(
@@ -1051,7 +1188,7 @@
audio_policy_forced_cfg_t AudioSystem::getForceUse(audio_policy_force_use_t usage) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return AUDIO_POLICY_FORCE_NONE;
+ if (aps == nullptr) return AUDIO_POLICY_FORCE_NONE;
auto result = [&]() -> ConversionResult<audio_policy_forced_cfg_t> {
media::AudioPolicyForceUse usageAidl = VALUE_OR_RETURN(
@@ -1068,7 +1205,7 @@
audio_io_handle_t AudioSystem::getOutput(audio_stream_type_t stream) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return AUDIO_IO_HANDLE_NONE;
+ if (aps == nullptr) return AUDIO_IO_HANDLE_NONE;
auto result = [&]() -> ConversionResult<audio_io_handle_t> {
AudioStreamType streamAidl = VALUE_OR_RETURN(
@@ -1089,12 +1226,13 @@
const AttributionSourceState& attributionSource,
audio_config_t* config,
audio_output_flags_t flags,
- audio_port_handle_t* selectedDeviceId,
+ DeviceIdVector* selectedDeviceIds,
audio_port_handle_t* portId,
std::vector<audio_io_handle_t>* secondaryOutputs,
bool *isSpatialized,
bool *isBitPerfect,
- float *volume) {
+ float *volume,
+ bool *muted) {
if (attr == nullptr) {
ALOGE("%s NULL audio attributes", __func__);
return BAD_VALUE;
@@ -1103,8 +1241,8 @@
ALOGE("%s NULL output - shouldn't happen", __func__);
return BAD_VALUE;
}
- if (selectedDeviceId == nullptr) {
- ALOGE("%s NULL selectedDeviceId - shouldn't happen", __func__);
+ if (selectedDeviceIds == nullptr) {
+ ALOGE("%s NULL selectedDeviceIds - shouldn't happen", __func__);
return BAD_VALUE;
}
if (portId == nullptr) {
@@ -1117,7 +1255,7 @@
}
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return NO_INIT;
+ if (aps == nullptr) return NO_INIT;
media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_attributes_t_AudioAttributes(*attr));
@@ -1126,20 +1264,20 @@
legacy2aidl_audio_config_t_AudioConfig(*config, false /*isInput*/));
int32_t flagsAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_output_flags_t_int32_t_mask(flags));
- int32_t selectedDeviceIdAidl = VALUE_OR_RETURN_STATUS(
- legacy2aidl_audio_port_handle_t_int32_t(*selectedDeviceId));
+ auto selectedDeviceIdsAidl = VALUE_OR_RETURN_STATUS(convertContainer<std::vector<int32_t>>(
+ *selectedDeviceIds, legacy2aidl_audio_port_handle_t_int32_t));
media::GetOutputForAttrResponse responseAidl;
status_t status = statusTFromBinderStatus(
aps->getOutputForAttr(attrAidl, sessionAidl, attributionSource, configAidl, flagsAidl,
- selectedDeviceIdAidl, &responseAidl));
+ selectedDeviceIdsAidl, &responseAidl));
if (status != NO_ERROR) {
config->format = VALUE_OR_RETURN_STATUS(
- aidl2legacy_AudioFormatDescription_audio_format_t(responseAidl.configBase.format));
+ aidl2legacy_AudioFormatDescription_audio_format_t(responseAidl.configBase.format));
config->channel_mask = VALUE_OR_RETURN_STATUS(
- aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
- responseAidl.configBase.channelMask, false /*isInput*/));
+ aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
+ responseAidl.configBase.channelMask, false /*isInput*/));
config->sample_rate = responseAidl.configBase.sampleRate;
return status;
}
@@ -1151,8 +1289,8 @@
*stream = VALUE_OR_RETURN_STATUS(
aidl2legacy_AudioStreamType_audio_stream_type_t(responseAidl.stream));
}
- *selectedDeviceId = VALUE_OR_RETURN_STATUS(
- aidl2legacy_int32_t_audio_port_handle_t(responseAidl.selectedDeviceId));
+ *selectedDeviceIds = VALUE_OR_RETURN_STATUS(convertContainer<DeviceIdVector>(
+ responseAidl.selectedDeviceIds, aidl2legacy_int32_t_audio_port_handle_t));
*portId = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_port_handle_t(responseAidl.portId));
*secondaryOutputs = VALUE_OR_RETURN_STATUS(convertContainer<std::vector<audio_io_handle_t>>(
responseAidl.secondaryOutputs, aidl2legacy_int32_t_audio_io_handle_t));
@@ -1161,13 +1299,14 @@
*attr = VALUE_OR_RETURN_STATUS(
aidl2legacy_AudioAttributes_audio_attributes_t(responseAidl.attr));
*volume = responseAidl.volume;
+ *muted = responseAidl.muted;
return OK;
}
status_t AudioSystem::startOutput(audio_port_handle_t portId) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
return statusTFromBinderStatus(aps->startOutput(portIdAidl));
@@ -1175,7 +1314,7 @@
status_t AudioSystem::stopOutput(audio_port_handle_t portId) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
return statusTFromBinderStatus(aps->stopOutput(portIdAidl));
@@ -1183,7 +1322,7 @@
void AudioSystem::releaseOutput(audio_port_handle_t portId) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return;
+ if (aps == nullptr) return;
auto status = [&]() -> status_t {
int32_t portIdAidl = VALUE_OR_RETURN_STATUS(
@@ -1223,7 +1362,7 @@
}
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return NO_INIT;
+ if (aps == nullptr) return NO_INIT;
media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_attributes_t_AudioAttributes(*attr));
@@ -1238,13 +1377,14 @@
media::GetInputForAttrResponse response;
- status_t status = statusTFromBinderStatus(
- aps->getInputForAttr(attrAidl, inputAidl, riidAidl, sessionAidl, attributionSource,
- configAidl, flagsAidl, selectedDeviceIdAidl, &response));
- if (status != NO_ERROR) {
+ const Status res = aps->getInputForAttr(attrAidl, inputAidl, riidAidl, sessionAidl,
+ attributionSource, configAidl, flagsAidl,
+ selectedDeviceIdAidl, &response);
+ if (!res.isOk()) {
+ ALOGE("getInputForAttr error: %s", res.toString8().c_str());
*config = VALUE_OR_RETURN_STATUS(
aidl2legacy_AudioConfigBase_audio_config_base_t(response.config, true /*isInput*/));
- return status;
+ return statusTFromBinderStatus(res);
}
*input = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_audio_io_handle_t(response.input));
@@ -1257,7 +1397,7 @@
status_t AudioSystem::startInput(audio_port_handle_t portId) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
return statusTFromBinderStatus(aps->startInput(portIdAidl));
@@ -1265,7 +1405,7 @@
status_t AudioSystem::stopInput(audio_port_handle_t portId) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
return statusTFromBinderStatus(aps->stopInput(portIdAidl));
@@ -1273,7 +1413,7 @@
void AudioSystem::releaseInput(audio_port_handle_t portId) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return;
+ if (aps == nullptr) return;
auto status = [&]() -> status_t {
int32_t portIdAidl = VALUE_OR_RETURN_STATUS(
@@ -1291,7 +1431,7 @@
bool enabled,
audio_stream_type_t streamToDriveAbs) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == nullptr) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
AudioDevice deviceAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_device_AudioDevice(deviceType, address));
@@ -1305,7 +1445,7 @@
int indexMin,
int indexMax) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
@@ -1313,20 +1453,15 @@
int32_t indexMaxAidl = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(indexMax));
status_t status = statusTFromBinderStatus(
aps->initStreamVolume(streamAidl, indexMinAidl, indexMaxAidl));
- if (status == DEAD_OBJECT) {
- // This is a critical operation since w/o proper stream volumes no audio
- // will be heard. Make sure we recover from a failure in any case.
- ALOGE("Received DEAD_OBJECT from APS, clearing the client");
- clearAudioPolicyService();
- }
return status;
}
status_t AudioSystem::setStreamVolumeIndex(audio_stream_type_t stream,
int index,
+ bool muted,
audio_devices_t device) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
@@ -1334,14 +1469,14 @@
AudioDeviceDescription deviceAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_devices_t_AudioDeviceDescription(device));
return statusTFromBinderStatus(
- aps->setStreamVolumeIndex(streamAidl, deviceAidl, indexAidl));
+ aps->setStreamVolumeIndex(streamAidl, deviceAidl, indexAidl, muted));
}
status_t AudioSystem::getStreamVolumeIndex(audio_stream_type_t stream,
int* index,
audio_devices_t device) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
@@ -1358,9 +1493,10 @@
status_t AudioSystem::setVolumeIndexForAttributes(const audio_attributes_t& attr,
int index,
+ bool muted,
audio_devices_t device) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_attributes_t_AudioAttributes(attr));
@@ -1368,14 +1504,14 @@
AudioDeviceDescription deviceAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_devices_t_AudioDeviceDescription(device));
return statusTFromBinderStatus(
- aps->setVolumeIndexForAttributes(attrAidl, deviceAidl, indexAidl));
+ aps->setVolumeIndexForAttributes(attrAidl, deviceAidl, indexAidl, muted));
}
status_t AudioSystem::getVolumeIndexForAttributes(const audio_attributes_t& attr,
int& index,
audio_devices_t device) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_attributes_t_AudioAttributes(attr));
@@ -1390,7 +1526,7 @@
status_t AudioSystem::getMaxVolumeIndexForAttributes(const audio_attributes_t& attr, int& index) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_attributes_t_AudioAttributes(attr));
@@ -1403,7 +1539,7 @@
status_t AudioSystem::getMinVolumeIndexForAttributes(const audio_attributes_t& attr, int& index) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_attributes_t_AudioAttributes(attr));
@@ -1416,7 +1552,7 @@
product_strategy_t AudioSystem::getStrategyForStream(audio_stream_type_t stream) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PRODUCT_STRATEGY_NONE;
+ if (aps == nullptr) return PRODUCT_STRATEGY_NONE;
auto result = [&]() -> ConversionResult<product_strategy_t> {
AudioStreamType streamAidl = VALUE_OR_RETURN(
@@ -1436,7 +1572,7 @@
return BAD_VALUE;
}
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::audio::common::AudioAttributes aaAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_attributes_t_AudioAttributes(aa));
@@ -1453,7 +1589,7 @@
audio_io_handle_t AudioSystem::getOutputForEffect(const effect_descriptor_t* desc) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
// FIXME change return type to status_t, and return PERMISSION_DENIED here
- if (aps == 0) return AUDIO_IO_HANDLE_NONE;
+ if (aps == nullptr) return AUDIO_IO_HANDLE_NONE;
auto result = [&]() -> ConversionResult<audio_io_handle_t> {
media::EffectDescriptor descAidl = VALUE_OR_RETURN(
@@ -1473,7 +1609,7 @@
audio_session_t session,
int id) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::EffectDescriptor descAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_effect_descriptor_t_EffectDescriptor(*desc));
@@ -1487,7 +1623,7 @@
status_t AudioSystem::unregisterEffect(int id) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
int32_t idAidl = VALUE_OR_RETURN_STATUS(convertReinterpret<int32_t>(id));
return statusTFromBinderStatus(
@@ -1496,7 +1632,7 @@
status_t AudioSystem::setEffectEnabled(int id, bool enabled) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
int32_t idAidl = VALUE_OR_RETURN_STATUS(convertReinterpret<int32_t>(id));
return statusTFromBinderStatus(
@@ -1505,7 +1641,7 @@
status_t AudioSystem::moveEffectsToIo(const std::vector<int>& ids, audio_io_handle_t io) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
std::vector<int32_t> idsAidl = VALUE_OR_RETURN_STATUS(
convertContainer<std::vector<int32_t>>(ids, convertReinterpret<int32_t, int>));
@@ -1515,7 +1651,7 @@
status_t AudioSystem::isStreamActive(audio_stream_type_t stream, bool* state, uint32_t inPastMs) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
if (state == NULL) return BAD_VALUE;
AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
@@ -1529,7 +1665,7 @@
status_t AudioSystem::isStreamActiveRemotely(audio_stream_type_t stream, bool* state,
uint32_t inPastMs) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
if (state == NULL) return BAD_VALUE;
AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
@@ -1542,7 +1678,7 @@
status_t AudioSystem::isSourceActive(audio_source_t stream, bool* state) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
if (state == NULL) return BAD_VALUE;
AudioSource streamAidl = VALUE_OR_RETURN_STATUS(
@@ -1554,32 +1690,25 @@
uint32_t AudioSystem::getPrimaryOutputSamplingRate() {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return 0;
+ if (af == nullptr) return 0;
return af->getPrimaryOutputSamplingRate();
}
size_t AudioSystem::getPrimaryOutputFrameCount() {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return 0;
+ if (af == nullptr) return 0;
return af->getPrimaryOutputFrameCount();
}
status_t AudioSystem::setLowRamDevice(bool isLowRamDevice, int64_t totalMemory) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->setLowRamDevice(isLowRamDevice, totalMemory);
}
-void AudioSystem::clearAudioConfigCache() {
- // called by restoreTrack_l(), which needs new IAudioFlinger and IAudioPolicyService instances
- ALOGV("clearAudioConfigCache()");
- gAudioFlingerServiceHandler.clearService();
- clearAudioPolicyService();
-}
-
status_t AudioSystem::setSupportedSystemUsages(const std::vector<audio_usage_t>& systemUsages) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == nullptr) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
std::vector<AudioUsage> systemUsagesAidl = VALUE_OR_RETURN_STATUS(
convertContainer<std::vector<AudioUsage>>(systemUsages,
@@ -1589,7 +1718,7 @@
status_t AudioSystem::setAllowedCapturePolicy(uid_t uid, audio_flags_mask_t capturePolicy) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == nullptr) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
int32_t capturePolicyAidl = VALUE_OR_RETURN_STATUS(
@@ -1600,7 +1729,7 @@
audio_offload_mode_t AudioSystem::getOffloadSupport(const audio_offload_info_t& info) {
ALOGV("%s", __func__);
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return AUDIO_OFFLOAD_NOT_SUPPORTED;
+ if (aps == nullptr) return AUDIO_OFFLOAD_NOT_SUPPORTED;
auto result = [&]() -> ConversionResult<audio_offload_mode_t> {
AudioOffloadInfo infoAidl = VALUE_OR_RETURN(
@@ -1625,7 +1754,7 @@
}
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::AudioPortRole roleAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_port_role_t_AudioPortRole(role));
@@ -1649,7 +1778,7 @@
std::vector<media::AudioPortFw>* result) {
if (result == nullptr) return BAD_VALUE;
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(aps->listDeclaredDevicePorts(role, result)));
return OK;
}
@@ -1659,7 +1788,7 @@
return BAD_VALUE;
}
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::AudioPortFw portAidl;
RETURN_STATUS_IF_ERROR(
@@ -1675,7 +1804,7 @@
}
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::AudioPatchFw patchAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_patch_AudioPatchFw(*patch));
@@ -1688,7 +1817,7 @@
status_t AudioSystem::releaseAudioPatch(audio_patch_handle_t handle) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
int32_t handleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_patch_handle_t_int32_t(handle));
return statusTFromBinderStatus(aps->releaseAudioPatch(handleAidl));
@@ -1703,7 +1832,7 @@
}
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
Int numPatchesAidl;
@@ -1726,7 +1855,7 @@
}
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::AudioPortConfigFw configAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_port_config_AudioPortConfigFw(*config));
@@ -1735,8 +1864,8 @@
status_t AudioSystem::addAudioPortCallback(const sp<AudioPortCallback>& callback) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
- const auto apc = gAudioPolicyServiceHandler.getClient();
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
+ const auto apc = getAudioPolicyClient();
if (apc == nullptr) return NO_INIT;
std::lock_guard _l(gApsCallbackMutex);
@@ -1750,8 +1879,8 @@
/*static*/
status_t AudioSystem::removeAudioPortCallback(const sp<AudioPortCallback>& callback) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
- const auto apc = gAudioPolicyServiceHandler.getClient();
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
+ const auto apc = AudioSystem::getAudioPolicyClient();
if (apc == nullptr) return NO_INIT;
std::lock_guard _l(gApsCallbackMutex);
@@ -1764,8 +1893,8 @@
status_t AudioSystem::addAudioVolumeGroupCallback(const sp<AudioVolumeGroupCallback>& callback) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
- const auto apc = gAudioPolicyServiceHandler.getClient();
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
+ const auto apc = AudioSystem::getAudioPolicyClient();
if (apc == nullptr) return NO_INIT;
std::lock_guard _l(gApsCallbackMutex);
@@ -1778,8 +1907,8 @@
status_t AudioSystem::removeAudioVolumeGroupCallback(const sp<AudioVolumeGroupCallback>& callback) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
- const auto apc = gAudioPolicyServiceHandler.getClient();
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
+ const auto apc = AudioSystem::getAudioPolicyClient();
if (apc == nullptr) return NO_INIT;
std::lock_guard _l(gApsCallbackMutex);
@@ -1835,14 +1964,16 @@
return afc->removeSupportedLatencyModesCallback(callback);
}
-audio_port_handle_t AudioSystem::getDeviceIdForIo(audio_io_handle_t audioIo) {
+status_t AudioSystem::getDeviceIdsForIo(audio_io_handle_t audioIo, DeviceIdVector& deviceIds) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
const sp<AudioIoDescriptor> desc = getIoDescriptor(audioIo);
if (desc == 0) {
- return AUDIO_PORT_HANDLE_NONE;
+ deviceIds.clear();
+ } else {
+ deviceIds = desc->getDeviceIds();
}
- return desc->getDeviceId();
+ return OK;
}
status_t AudioSystem::acquireSoundTriggerSession(audio_session_t* session,
@@ -1852,7 +1983,7 @@
return BAD_VALUE;
}
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::SoundTriggerSession retAidl;
RETURN_STATUS_IF_ERROR(
@@ -1866,7 +1997,7 @@
status_t AudioSystem::releaseSoundTriggerSession(audio_session_t session) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
int32_t sessionAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_session_t_int32_t(session));
return statusTFromBinderStatus(aps->releaseSoundTriggerSession(sessionAidl));
@@ -1874,7 +2005,7 @@
audio_mode_t AudioSystem::getPhoneState() {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return AUDIO_MODE_INVALID;
+ if (aps == nullptr) return AUDIO_MODE_INVALID;
auto result = [&]() -> ConversionResult<audio_mode_t> {
media::audio::common::AudioMode retAidl;
@@ -1887,7 +2018,7 @@
status_t AudioSystem::registerPolicyMixes(const Vector<AudioMix>& mixes, bool registration) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
size_t mixesSize = std::min(mixes.size(), size_t{MAX_MIXES_PER_POLICY});
std::vector<media::AudioMix> mixesAidl;
@@ -1903,7 +2034,7 @@
}
const sp<IAudioPolicyService> aps = AudioSystem::get_audio_policy_service();
- if (aps == nullptr) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
std::vector<::android::media::AudioMix> aidlMixes;
Status status = aps->getRegisteredPolicyMixes(&aidlMixes);
@@ -1920,7 +2051,7 @@
const std::vector<std::pair<AudioMix, std::vector<AudioMixMatchCriterion>>>&
mixesWithUpdates) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
std::vector<media::AudioMixUpdate> updatesAidl;
updatesAidl.reserve(mixesWithUpdates.size());
@@ -1939,7 +2070,7 @@
status_t AudioSystem::setUidDeviceAffinities(uid_t uid, const AudioDeviceTypeAddrVector& devices) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
std::vector<AudioDevice> devicesAidl = VALUE_OR_RETURN_STATUS(
@@ -1950,7 +2081,7 @@
status_t AudioSystem::removeUidDeviceAffinities(uid_t uid) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
return statusTFromBinderStatus(aps->removeUidDeviceAffinities(uidAidl));
@@ -1959,7 +2090,7 @@
status_t AudioSystem::setUserIdDeviceAffinities(int userId,
const AudioDeviceTypeAddrVector& devices) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
int32_t userIdAidl = VALUE_OR_RETURN_STATUS(convertReinterpret<int32_t>(userId));
std::vector<AudioDevice> devicesAidl = VALUE_OR_RETURN_STATUS(
@@ -1971,7 +2102,7 @@
status_t AudioSystem::removeUserIdDeviceAffinities(int userId) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
int32_t userIdAidl = VALUE_OR_RETURN_STATUS(convertReinterpret<int32_t>(userId));
return statusTFromBinderStatus(aps->removeUserIdDeviceAffinities(userIdAidl));
}
@@ -1983,7 +2114,7 @@
return BAD_VALUE;
}
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::AudioPortConfigFw sourceAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_port_config_AudioPortConfigFw(*source));
@@ -1998,7 +2129,7 @@
status_t AudioSystem::stopAudioSource(audio_port_handle_t portId) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
return statusTFromBinderStatus(aps->stopAudioSource(portIdAidl));
@@ -2006,7 +2137,7 @@
status_t AudioSystem::setMasterMono(bool mono) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
return statusTFromBinderStatus(aps->setMasterMono(mono));
}
@@ -2015,26 +2146,26 @@
return BAD_VALUE;
}
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
return statusTFromBinderStatus(aps->getMasterMono(mono));
}
status_t AudioSystem::setMasterBalance(float balance) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->setMasterBalance(balance);
}
status_t AudioSystem::getMasterBalance(float* balance) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->getMasterBalance(balance);
}
float
AudioSystem::getStreamVolumeDB(audio_stream_type_t stream, int index, audio_devices_t device) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return NAN;
+ if (aps == nullptr) return NAN;
auto result = [&]() -> ConversionResult<float> {
AudioStreamType streamAidl = VALUE_OR_RETURN(
@@ -2052,13 +2183,13 @@
status_t AudioSystem::getMicrophones(std::vector<media::MicrophoneInfoFw>* microphones) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->getMicrophones(microphones);
}
status_t AudioSystem::setAudioHalPids(const std::vector<pid_t>& pids) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == nullptr) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->setAudioHalPids(pids);
}
@@ -2072,7 +2203,7 @@
}
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
Int numSurroundFormatsAidl;
numSurroundFormatsAidl.value =
VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(*numSurroundFormats));
@@ -2099,7 +2230,7 @@
}
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
Int numSurroundFormatsAidl;
numSurroundFormatsAidl.value =
VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(*numSurroundFormats));
@@ -2117,7 +2248,7 @@
status_t AudioSystem::setSurroundFormatEnabled(audio_format_t audioFormat, bool enabled) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
AudioFormatDescription audioFormatAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_format_t_AudioFormatDescription(audioFormat));
@@ -2127,7 +2258,7 @@
status_t AudioSystem::setAssistantServicesUids(const std::vector<uid_t>& uids) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
std::vector<int32_t> uidsAidl = VALUE_OR_RETURN_STATUS(
convertContainer<std::vector<int32_t>>(uids, legacy2aidl_uid_t_int32_t));
@@ -2136,7 +2267,7 @@
status_t AudioSystem::setActiveAssistantServicesUids(const std::vector<uid_t>& activeUids) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
std::vector<int32_t> activeUidsAidl = VALUE_OR_RETURN_STATUS(
convertContainer<std::vector<int32_t>>(activeUids, legacy2aidl_uid_t_int32_t));
@@ -2145,7 +2276,7 @@
status_t AudioSystem::setA11yServicesUids(const std::vector<uid_t>& uids) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
std::vector<int32_t> uidsAidl = VALUE_OR_RETURN_STATUS(
convertContainer<std::vector<int32_t>>(uids, legacy2aidl_uid_t_int32_t));
@@ -2154,7 +2285,7 @@
status_t AudioSystem::setCurrentImeUid(uid_t uid) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
return statusTFromBinderStatus(aps->setCurrentImeUid(uidAidl));
@@ -2162,7 +2293,7 @@
bool AudioSystem::isHapticPlaybackSupported() {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return false;
+ if (aps == nullptr) return false;
auto result = [&]() -> ConversionResult<bool> {
bool retVal;
@@ -2175,7 +2306,7 @@
bool AudioSystem::isUltrasoundSupported() {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return false;
+ if (aps == nullptr) return false;
auto result = [&]() -> ConversionResult<bool> {
bool retVal;
@@ -2193,7 +2324,7 @@
}
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
std::vector<AudioFormatDescription> formatsAidl;
AudioDeviceDescription deviceAidl = VALUE_OR_RETURN_STATUS(
@@ -2209,7 +2340,7 @@
status_t AudioSystem::listAudioProductStrategies(AudioProductStrategyVector& strategies) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
std::vector<media::AudioProductStrategy> strategiesAidl;
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
@@ -2271,7 +2402,7 @@
product_strategy_t& productStrategy,
bool fallbackOnDefault) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::audio::common::AudioAttributes aaAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_attributes_t_AudioAttributes(aa));
@@ -2287,7 +2418,7 @@
status_t AudioSystem::listAudioVolumeGroups(AudioVolumeGroupVector& groups) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
std::vector<media::AudioVolumeGroup> groupsAidl;
RETURN_STATUS_IF_ERROR(
@@ -2301,7 +2432,7 @@
volume_group_t& volumeGroup,
bool fallbackOnDefault) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::audio::common::AudioAttributes aaAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_attributes_t_AudioAttributes(aa));
@@ -2314,13 +2445,13 @@
status_t AudioSystem::setRttEnabled(bool enabled) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
return statusTFromBinderStatus(aps->setRttEnabled(enabled));
}
bool AudioSystem::isCallScreenModeSupported() {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return false;
+ if (aps == nullptr) return false;
auto result = [&]() -> ConversionResult<bool> {
bool retAidl;
@@ -2335,9 +2466,7 @@
device_role_t role,
const AudioDeviceTypeAddrVector& devices) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) {
- return PERMISSION_DENIED;
- }
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
int32_t strategyAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_product_strategy_t_int32_t(strategy));
media::DeviceRole roleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_device_role_t_DeviceRole(role));
@@ -2352,9 +2481,7 @@
device_role_t role,
const AudioDeviceTypeAddrVector& devices) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) {
- return PERMISSION_DENIED;
- }
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
int32_t strategyAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_product_strategy_t_int32_t(strategy));
media::DeviceRole roleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_device_role_t_DeviceRole(role));
@@ -2368,9 +2495,8 @@
status_t
AudioSystem::clearDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) {
- return PERMISSION_DENIED;
- }
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
+
int32_t strategyAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_product_strategy_t_int32_t(strategy));
media::DeviceRole roleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_device_role_t_DeviceRole(role));
return statusTFromBinderStatus(
@@ -2381,9 +2507,8 @@
device_role_t role,
AudioDeviceTypeAddrVector& devices) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) {
- return PERMISSION_DENIED;
- }
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
+
int32_t strategyAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_product_strategy_t_int32_t(strategy));
media::DeviceRole roleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_device_role_t_DeviceRole(role));
std::vector<AudioDevice> devicesAidl;
@@ -2399,9 +2524,7 @@
device_role_t role,
const AudioDeviceTypeAddrVector& devices) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) {
- return PERMISSION_DENIED;
- }
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
AudioSource audioSourceAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_source_t_AudioSource(audioSource));
@@ -2417,9 +2540,8 @@
device_role_t role,
const AudioDeviceTypeAddrVector& devices) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) {
- return PERMISSION_DENIED;
- }
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
+
AudioSource audioSourceAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_source_t_AudioSource(audioSource));
media::DeviceRole roleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_device_role_t_DeviceRole(role));
@@ -2433,9 +2555,8 @@
status_t AudioSystem::removeDevicesRoleForCapturePreset(
audio_source_t audioSource, device_role_t role, const AudioDeviceTypeAddrVector& devices) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) {
- return PERMISSION_DENIED;
- }
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
+
AudioSource audioSourceAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_source_t_AudioSource(audioSource));
media::DeviceRole roleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_device_role_t_DeviceRole(role));
@@ -2449,9 +2570,8 @@
status_t AudioSystem::clearDevicesRoleForCapturePreset(audio_source_t audioSource,
device_role_t role) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) {
- return PERMISSION_DENIED;
- }
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
+
AudioSource audioSourceAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_source_t_AudioSource(audioSource));
media::DeviceRole roleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_device_role_t_DeviceRole(role));
@@ -2463,9 +2583,7 @@
device_role_t role,
AudioDeviceTypeAddrVector& devices) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) {
- return PERMISSION_DENIED;
- }
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
AudioSource audioSourceAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_source_t_AudioSource(audioSource));
media::DeviceRole roleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_device_role_t_DeviceRole(role));
@@ -2484,9 +2602,7 @@
if (spatializer == nullptr) {
return BAD_VALUE;
}
- if (aps == 0) {
- return PERMISSION_DENIED;
- }
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::GetSpatializerResponse response;
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
aps->getSpatializer(callback, &response)));
@@ -2503,9 +2619,7 @@
if (canBeSpatialized == nullptr) {
return BAD_VALUE;
}
- if (aps == 0) {
- return PERMISSION_DENIED;
- }
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
audio_attributes_t attributes = attr != nullptr ? *attr : AUDIO_ATTRIBUTES_INITIALIZER;
audio_config_t configuration = config != nullptr ? *config : AUDIO_CONFIG_INITIALIZER;
@@ -2524,9 +2638,7 @@
status_t AudioSystem::getSoundDoseInterface(const sp<media::ISoundDoseCallback>& callback,
sp<media::ISoundDose>* soundDose) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == nullptr) {
- return PERMISSION_DENIED;
- }
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
if (soundDose == nullptr) {
return BAD_VALUE;
}
@@ -2543,9 +2655,7 @@
}
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) {
- return PERMISSION_DENIED;
- }
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_attributes_t_AudioAttributes(*attr));
@@ -2567,9 +2677,7 @@
}
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) {
- return PERMISSION_DENIED;
- }
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_attributes_t_AudioAttributes(*attr));
@@ -2586,52 +2694,40 @@
status_t AudioSystem::setRequestedLatencyMode(
audio_io_handle_t output, audio_latency_mode_t mode) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == nullptr) {
- return PERMISSION_DENIED;
- }
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->setRequestedLatencyMode(output, mode);
}
status_t AudioSystem::getSupportedLatencyModes(audio_io_handle_t output,
std::vector<audio_latency_mode_t>* modes) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == nullptr) {
- return PERMISSION_DENIED;
- }
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->getSupportedLatencyModes(output, modes);
}
status_t AudioSystem::setBluetoothVariableLatencyEnabled(bool enabled) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == nullptr) {
- return PERMISSION_DENIED;
- }
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->setBluetoothVariableLatencyEnabled(enabled);
}
status_t AudioSystem::isBluetoothVariableLatencyEnabled(
bool *enabled) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == nullptr) {
- return PERMISSION_DENIED;
- }
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->isBluetoothVariableLatencyEnabled(enabled);
}
status_t AudioSystem::supportsBluetoothVariableLatency(
bool *support) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == nullptr) {
- return PERMISSION_DENIED;
- }
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->supportsBluetoothVariableLatency(support);
}
status_t AudioSystem::getAudioPolicyConfig(media::AudioPolicyConfig *config) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == nullptr) {
- return PERMISSION_DENIED;
- }
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->getAudioPolicyConfig(config);
}
@@ -2678,9 +2774,7 @@
LOG_ALWAYS_FATAL_IF(listener == nullptr);
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) {
- return PERMISSION_DENIED;
- }
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
std::lock_guard _l(AudioSystem::gSoundTriggerMutex);
gSoundTriggerCaptureStateListener = new CaptureStateListenerImpl(aps, listener);
@@ -2692,43 +2786,33 @@
status_t AudioSystem::setVibratorInfos(
const std::vector<media::AudioVibratorInfo>& vibratorInfos) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == nullptr) {
- return PERMISSION_DENIED;
- }
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->setVibratorInfos(vibratorInfos);
}
-status_t AudioSystem::getMmapPolicyInfo(
+status_t AudioSystem::getMmapPolicyInfos(
AudioMMapPolicyType policyType, std::vector<AudioMMapPolicyInfo> *policyInfos) {
- const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == nullptr) {
- return PERMISSION_DENIED;
- }
- return af->getMmapPolicyInfos(policyType, policyInfos);
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
+ return statusTFromBinderStatus(aps->getMmapPolicyInfos(policyType, policyInfos));
}
int32_t AudioSystem::getAAudioMixerBurstCount() {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == nullptr) {
- return PERMISSION_DENIED;
- }
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->getAAudioMixerBurstCount();
}
int32_t AudioSystem::getAAudioHardwareBurstMinUsec() {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == nullptr) {
- return PERMISSION_DENIED;
- }
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->getAAudioHardwareBurstMinUsec();
}
status_t AudioSystem::getSupportedMixerAttributes(
audio_port_handle_t portId, std::vector<audio_mixer_attributes_t> *mixerAttrs) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == nullptr) {
- return PERMISSION_DENIED;
- }
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
std::vector<media::AudioMixerAttributesInternal> _aidlReturn;
@@ -2746,9 +2830,7 @@
uid_t uid,
const audio_mixer_attributes_t *mixerAttr) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == nullptr) {
- return PERMISSION_DENIED;
- }
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_attributes_t_AudioAttributes(*attr));
@@ -2766,9 +2848,7 @@
audio_port_handle_t portId,
std::optional<audio_mixer_attributes_t> *mixerAttr) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == nullptr) {
- return PERMISSION_DENIED;
- }
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_attributes_t_AudioAttributes(*attr));
@@ -2789,9 +2869,7 @@
audio_port_handle_t portId,
uid_t uid) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == nullptr) {
- return PERMISSION_DENIED;
- }
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_attributes_t_AudioAttributes(*attr));
@@ -2801,6 +2879,18 @@
aps->clearPreferredMixerAttributes(attrAidl, portIdAidl, uidAidl));
}
+status_t AudioSystem::getMmapPolicyForDevice(AudioMMapPolicyType policyType,
+ audio_devices_t device,
+ AudioMMapPolicyInfo *policyInfo) {
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
+ if (aps == nullptr) {
+ return PERMISSION_DENIED;
+ }
+ policyInfo->device.type = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_audio_devices_t_AudioDeviceDescription(device));
+ return statusTFromBinderStatus(aps->getMmapPolicyForDevice(policyType, policyInfo));
+}
+
// ---------------------------------------------------------------------------
int AudioSystem::AudioPolicyServiceClient::addAudioPortCallback(
@@ -2945,19 +3035,14 @@
return Status::ok();
}
-void AudioSystem::AudioPolicyServiceClient::binderDied(const wp<IBinder>& who __unused) {
- {
- std::lock_guard _l(mMutex);
- for (const auto& callback : mAudioPortCallbacks) {
- callback->onServiceDied();
- }
- for (const auto& callback : mAudioVolumeGroupCallbacks) {
- callback->onServiceDied();
- }
+void AudioSystem::AudioPolicyServiceClient::onServiceDied() {
+ std::lock_guard _l(mMutex);
+ for (const auto& callback : mAudioPortCallbacks) {
+ callback->onServiceDied();
}
- AudioSystem::clearAudioPolicyService();
-
- ALOGW("AudioPolicyService server died!");
+ for (const auto& callback : mAudioVolumeGroupCallbacks) {
+ callback->onServiceDied();
+ }
}
ConversionResult<record_client_info_t>
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index e0c5e92..3591fbf 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -1736,7 +1736,7 @@
// allow track invalidation when track is not playing to propagate
// the updated mSelectedDeviceId
if (isPlaying_l()) {
- if (mSelectedDeviceId != mRoutedDeviceId) {
+ if (getFirstDeviceId(mRoutedDeviceIds) != mSelectedDeviceId) {
android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
mProxy->interrupt();
}
@@ -1759,7 +1759,7 @@
}
// must be called with mLock held
-void AudioTrack::updateRoutedDeviceId_l()
+void AudioTrack::updateRoutedDeviceIds_l()
{
// if the track is inactive, do not update actual device as the output stream maybe routed
// to a device not relevant to this client because of other active use cases.
@@ -1767,17 +1767,21 @@
return;
}
if (mOutput != AUDIO_IO_HANDLE_NONE) {
- audio_port_handle_t deviceId = AudioSystem::getDeviceIdForIo(mOutput);
- if (deviceId != AUDIO_PORT_HANDLE_NONE) {
- mRoutedDeviceId = deviceId;
+ DeviceIdVector deviceIds;
+ status_t result = AudioSystem::getDeviceIdsForIo(mOutput, deviceIds);
+ if (result != OK) {
+ ALOGW("%s: getDeviceIdsForIo returned: %d", __func__, result);
+ }
+ if (!deviceIds.empty()) {
+ mRoutedDeviceIds = deviceIds;
}
}
}
-audio_port_handle_t AudioTrack::getRoutedDeviceId() {
+DeviceIdVector AudioTrack::getRoutedDeviceIds() {
AutoMutex lock(mLock);
- updateRoutedDeviceId_l();
- return mRoutedDeviceId;
+ updateRoutedDeviceIds_l();
+ return mRoutedDeviceIds;
}
status_t AudioTrack::attachAuxEffect(int effectId)
@@ -1937,7 +1941,7 @@
mFrameCount = output.frameCount;
mNotificationFramesAct = (uint32_t)output.notificationFrameCount;
- mRoutedDeviceId = output.selectedDeviceId;
+ mRoutedDeviceIds = output.selectedDeviceIds;
mSessionId = output.sessionId;
mStreamType = output.streamType;
@@ -2106,7 +2110,8 @@
.set(AMEDIAMETRICS_PROP_USAGE, toString(mAttributes.usage).c_str())
.set(AMEDIAMETRICS_PROP_THREADID, (int32_t)output.outputId)
.set(AMEDIAMETRICS_PROP_SELECTEDDEVICEID, (int32_t)mSelectedDeviceId)
- .set(AMEDIAMETRICS_PROP_ROUTEDDEVICEID, (int32_t)mRoutedDeviceId)
+ .set(AMEDIAMETRICS_PROP_ROUTEDDEVICEID, (int32_t)(getFirstDeviceId(mRoutedDeviceIds)))
+ .set(AMEDIAMETRICS_PROP_ROUTEDDEVICEIDS, toString(mRoutedDeviceIds).c_str())
.set(AMEDIAMETRICS_PROP_ENCODING, toString(mFormat).c_str())
.set(AMEDIAMETRICS_PROP_CHANNELMASK, (int32_t)mChannelMask)
.set(AMEDIAMETRICS_PROP_FRAMECOUNT, (int32_t)mFrameCount)
@@ -2875,10 +2880,6 @@
__func__, mPortId, isOffloadedOrDirect_l() ? "Offloaded or Direct" : "PCM", from);
++mSequence;
- // refresh the audio configuration cache in this process to make sure we get new
- // output parameters and new IAudioFlinger in createTrack_l()
- AudioSystem::clearAudioConfigCache();
-
if (!forceRestore &&
(isOffloadedOrDirect_l() || mDoNotReconnect)) {
// FIXME re-creation of offloaded and direct tracks is not yet implemented;
@@ -2911,10 +2912,6 @@
const int INITIAL_RETRIES = 3;
int retries = INITIAL_RETRIES;
retry:
- if (retries < INITIAL_RETRIES) {
- // See the comment for clearAudioConfigCache at the start of the function.
- AudioSystem::clearAudioConfigCache();
- }
mFlags = mOrigFlags;
// If a new IAudioTrack is successfully created, createTrack_l() will modify the
@@ -3563,8 +3560,8 @@
result.appendFormat(" notif. frame count(%u), req. notif. frame count(%u),"
" req. notif. per buff(%u)\n",
mNotificationFramesAct, mNotificationFramesReq, mNotificationsPerBufferReq);
- result.appendFormat(" latency (%d), selected device Id(%d), routed device Id(%d)\n",
- mLatency, mSelectedDeviceId, mRoutedDeviceId);
+ result.appendFormat(" latency (%d), selected device Id(%d), routed device Ids(%s)\n",
+ mLatency, mSelectedDeviceId, toString(mRoutedDeviceIds).c_str());
result.appendFormat(" output(%d) AF latency (%u) AF frame count(%zu) AF SampleRate(%u)\n",
mOutput, mAfLatency, mAfFrameCount, mAfSampleRate);
::write(fd, result.c_str(), result.size());
@@ -3631,7 +3628,7 @@
// first time when the track is created we do not have a valid piid
if (mPlayerIId != PLAYER_PIID_INVALID) {
- mAudioManager->playerEvent(mPlayerIId, PLAYER_UPDATE_PORT_ID, mPortId);
+ mAudioManager->playerEvent(mPlayerIId, PLAYER_UPDATE_PORT_ID, {mPortId});
}
}
@@ -3680,7 +3677,7 @@
void AudioTrack::onAudioDeviceUpdate(audio_io_handle_t audioIo,
- audio_port_handle_t deviceId)
+ const DeviceIdVector& deviceIds)
{
sp<AudioSystem::AudioDeviceCallback> callback;
{
@@ -3692,12 +3689,12 @@
// only update device if the track is active as route changes due to other use cases are
// irrelevant for this client
if (mState == STATE_ACTIVE) {
- mRoutedDeviceId = deviceId;
+ mRoutedDeviceIds = deviceIds;
}
}
if (callback.get() != nullptr) {
- callback->onAudioDeviceUpdate(mOutput, mRoutedDeviceId);
+ callback->onAudioDeviceUpdate(mOutput, mRoutedDeviceIds);
}
}
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index 9241973..1523607 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -103,8 +103,8 @@
aidl.flags = VALUE_OR_RETURN(legacy2aidl_audio_output_flags_t_int32_t_mask(flags));
aidl.frameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(frameCount));
aidl.notificationFrameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(notificationFrameCount));
- aidl.selectedDeviceId = VALUE_OR_RETURN(
- legacy2aidl_audio_port_handle_t_int32_t(selectedDeviceId));
+ aidl.selectedDeviceIds = VALUE_OR_RETURN(convertContainer<std::vector<int32_t>>(
+ selectedDeviceIds, legacy2aidl_audio_port_handle_t_int32_t));
aidl.sessionId = VALUE_OR_RETURN(legacy2aidl_audio_session_t_int32_t(sessionId));
aidl.sampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(sampleRate));
aidl.streamType = VALUE_OR_RETURN(
@@ -132,8 +132,8 @@
legacy.frameCount = VALUE_OR_RETURN(convertIntegral<size_t>(aidl.frameCount));
legacy.notificationFrameCount = VALUE_OR_RETURN(
convertIntegral<size_t>(aidl.notificationFrameCount));
- legacy.selectedDeviceId = VALUE_OR_RETURN(
- aidl2legacy_int32_t_audio_port_handle_t(aidl.selectedDeviceId));
+ legacy.selectedDeviceIds = VALUE_OR_RETURN(convertContainer<DeviceIdVector>(
+ aidl.selectedDeviceIds, aidl2legacy_int32_t_audio_port_handle_t));
legacy.sessionId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_session_t(aidl.sessionId));
legacy.sampleRate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.sampleRate));
legacy.streamType = VALUE_OR_RETURN(
@@ -337,11 +337,12 @@
}
status_t AudioFlingerClientAdapter::setStreamVolume(audio_stream_type_t stream, float value,
- audio_io_handle_t output) {
+ bool muted, audio_io_handle_t output) {
AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
int32_t outputAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(output));
- return statusTFromBinderStatus(mDelegate->setStreamVolume(streamAidl, value, outputAidl));
+ return statusTFromBinderStatus(
+ mDelegate->setStreamVolume(streamAidl, value, muted, outputAidl));
}
status_t AudioFlingerClientAdapter::setStreamMute(audio_stream_type_t stream, bool muted) {
@@ -351,12 +352,14 @@
}
status_t AudioFlingerClientAdapter::setPortsVolume(
- const std::vector<audio_port_handle_t>& portIds, float volume, audio_io_handle_t output) {
+ const std::vector<audio_port_handle_t> &portIds, float volume, bool muted,
+ audio_io_handle_t output) {
std::vector<int32_t> portIdsAidl = VALUE_OR_RETURN_STATUS(
convertContainer<std::vector<int32_t>>(
portIds, legacy2aidl_audio_port_handle_t_int32_t));
int32_t outputAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(output));
- return statusTFromBinderStatus(mDelegate->setPortsVolume(portIdsAidl, volume, outputAidl));
+ return statusTFromBinderStatus(
+ mDelegate->setPortsVolume(portIdsAidl, volume, muted, outputAidl));
}
status_t AudioFlingerClientAdapter::setMode(audio_mode_t mode) {
@@ -1007,12 +1010,13 @@
}
Status AudioFlingerServerAdapter::setStreamVolume(AudioStreamType stream, float value,
- int32_t output) {
+ bool muted, int32_t output) {
audio_stream_type_t streamLegacy = VALUE_OR_RETURN_BINDER(
aidl2legacy_AudioStreamType_audio_stream_type_t(stream));
audio_io_handle_t outputLegacy = VALUE_OR_RETURN_BINDER(
aidl2legacy_int32_t_audio_io_handle_t(output));
- return Status::fromStatusT(mDelegate->setStreamVolume(streamLegacy, value, outputLegacy));
+ return Status::fromStatusT(
+ mDelegate->setStreamVolume(streamLegacy, value, muted, outputLegacy));
}
Status AudioFlingerServerAdapter::setStreamMute(AudioStreamType stream, bool muted) {
@@ -1022,13 +1026,14 @@
}
Status AudioFlingerServerAdapter::setPortsVolume(
- const std::vector<int32_t>& portIds, float volume, int32_t output) {
+ const std::vector<int32_t>& portIds, float volume, bool muted, int32_t output) {
std::vector<audio_port_handle_t> portIdsLegacy = VALUE_OR_RETURN_BINDER(
convertContainer<std::vector<audio_port_handle_t>>(
portIds, aidl2legacy_int32_t_audio_port_handle_t));
audio_io_handle_t outputLegacy = VALUE_OR_RETURN_BINDER(
aidl2legacy_int32_t_audio_io_handle_t(output));
- return Status::fromStatusT(mDelegate->setPortsVolume(portIdsLegacy, volume, outputLegacy));
+ return Status::fromStatusT(
+ mDelegate->setPortsVolume(portIdsLegacy, volume, muted, outputLegacy));
}
Status AudioFlingerServerAdapter::setMode(AudioMode mode) {
diff --git a/media/libaudioclient/PlayerBase.cpp b/media/libaudioclient/PlayerBase.cpp
index 651255a..5999040 100644
--- a/media/libaudioclient/PlayerBase.cpp
+++ b/media/libaudioclient/PlayerBase.cpp
@@ -30,8 +30,7 @@
PlayerBase::PlayerBase() : BnPlayer(),
mPanMultiplierL(1.0f), mPanMultiplierR(1.0f),
mVolumeMultiplierL(1.0f), mVolumeMultiplierR(1.0f),
- mPIId(PLAYER_PIID_INVALID), mLastReportedEvent(PLAYER_STATE_UNKNOWN),
- mLastReportedDeviceId(AUDIO_PORT_HANDLE_NONE)
+ mPIId(PLAYER_PIID_INVALID), mLastReportedEvent(PLAYER_STATE_UNKNOWN)
{
ALOGD("PlayerBase::PlayerBase()");
// use checkService() to avoid blocking if audio service is not up yet
@@ -68,7 +67,7 @@
}
if (mPIId != PLAYER_PIID_INVALID && portId != AUDIO_PORT_HANDLE_NONE) {
- mAudioManager->playerEvent(mPIId, android::PLAYER_UPDATE_PORT_ID, portId);
+ mAudioManager->playerEvent(mPIId, android::PLAYER_UPDATE_PORT_ID, { portId });
}
}
@@ -80,13 +79,13 @@
}
//------------------------------------------------------------------------------
-void PlayerBase::servicePlayerEvent(player_state_t event, audio_port_handle_t deviceId) {
+void PlayerBase::servicePlayerEvent(player_state_t event, const DeviceIdVector& deviceIds) {
if (mAudioManager != 0) {
bool changed = false;
{
Mutex::Autolock _l(mDeviceIdLock);
- changed = mLastReportedDeviceId != deviceId;
- mLastReportedDeviceId = deviceId;
+ changed = !areDeviceIdsEqual(deviceIds, mLastReportedDeviceIds);
+ mLastReportedDeviceIds = deviceIds;
}
{
@@ -99,7 +98,7 @@
}
}
if (changed && (mPIId != PLAYER_PIID_INVALID)) {
- mAudioManager->playerEvent(mPIId, event, deviceId);
+ mAudioManager->playerEvent(mPIId, event, deviceIds);
}
}
}
@@ -112,18 +111,18 @@
}
//FIXME temporary method while some player state is outside of this class
-void PlayerBase::reportEvent(player_state_t event, audio_port_handle_t deviceId) {
- servicePlayerEvent(event, deviceId);
+void PlayerBase::reportEvent(player_state_t event, const DeviceIdVector& deviceIds) {
+ servicePlayerEvent(event, deviceIds);
}
-void PlayerBase::baseUpdateDeviceId(audio_port_handle_t deviceId) {
- servicePlayerEvent(PLAYER_UPDATE_DEVICE_ID, deviceId);
+void PlayerBase::baseUpdateDeviceIds(const DeviceIdVector& deviceIds) {
+ servicePlayerEvent(PLAYER_UPDATE_DEVICE_ID, deviceIds);
}
-status_t PlayerBase::startWithStatus(audio_port_handle_t deviceId) {
+status_t PlayerBase::startWithStatus(const DeviceIdVector& deviceIds) {
status_t status = playerStart();
if (status == NO_ERROR) {
- servicePlayerEvent(PLAYER_STATE_STARTED, deviceId);
+ servicePlayerEvent(PLAYER_STATE_STARTED, deviceIds);
} else {
ALOGW("PlayerBase::start() error %d", status);
}
@@ -133,7 +132,7 @@
status_t PlayerBase::pauseWithStatus() {
status_t status = playerPause();
if (status == NO_ERROR) {
- servicePlayerEvent(PLAYER_STATE_PAUSED, AUDIO_PORT_HANDLE_NONE);
+ servicePlayerEvent(PLAYER_STATE_PAUSED, {});
} else {
ALOGW("PlayerBase::pause() error %d", status);
}
@@ -144,7 +143,7 @@
status_t status = playerStop();
if (status == NO_ERROR) {
- servicePlayerEvent(PLAYER_STATE_STOPPED, AUDIO_PORT_HANDLE_NONE);
+ servicePlayerEvent(PLAYER_STATE_STOPPED, {});
} else {
ALOGW("PlayerBase::stop() error %d", status);
}
@@ -155,12 +154,12 @@
// Implementation of IPlayer
binder::Status PlayerBase::start() {
ALOGD("PlayerBase::start() from IPlayer");
- audio_port_handle_t deviceId;
+ DeviceIdVector deviceIds;
{
Mutex::Autolock _l(mDeviceIdLock);
- deviceId = mLastReportedDeviceId;
+ deviceIds = mLastReportedDeviceIds;
}
- (void)startWithStatus(deviceId);
+ (void)startWithStatus(deviceIds);
return binder::Status::ok();
}
diff --git a/media/libaudioclient/TrackPlayerBase.cpp b/media/libaudioclient/TrackPlayerBase.cpp
index bc38251..7928c65 100644
--- a/media/libaudioclient/TrackPlayerBase.cpp
+++ b/media/libaudioclient/TrackPlayerBase.cpp
@@ -60,8 +60,8 @@
}
void TrackPlayerBase::SelfAudioDeviceCallback::onAudioDeviceUpdate(audio_io_handle_t __unused,
- audio_port_handle_t deviceId) {
- mSelf.baseUpdateDeviceId(deviceId);
+ const DeviceIdVector& deviceIds) {
+ mSelf.baseUpdateDeviceIds(deviceIds);
}
void TrackPlayerBase::doDestroy() {
diff --git a/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl b/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl
index ab60461..0c9a947 100644
--- a/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl
+++ b/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl
@@ -33,8 +33,8 @@
int flags;
long frameCount;
long notificationFrameCount;
- /** Interpreted as audio_port_handle_t. */
- int selectedDeviceId;
+ /** Interpreted as audio_port_handle_t[]. */
+ int[] selectedDeviceIds;
int sessionId;
int sampleRate;
AudioStreamType streamType;
diff --git a/media/libaudioclient/aidl/android/media/GetInputForAttrResponse.aidl b/media/libaudioclient/aidl/android/media/GetInputForAttrResponse.aidl
index 347bf79..9e57820 100644
--- a/media/libaudioclient/aidl/android/media/GetInputForAttrResponse.aidl
+++ b/media/libaudioclient/aidl/android/media/GetInputForAttrResponse.aidl
@@ -28,6 +28,8 @@
int selectedDeviceId;
/** Interpreted as audio_port_handle_t. */
int portId;
+ /** The virtual device id corresponding to the opened input. */
+ int virtualDeviceId;
/** The suggested config if fails to get an input. **/
AudioConfigBase config;
}
diff --git a/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl b/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl
index 4b26d5b..5d066bb 100644
--- a/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl
+++ b/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl
@@ -26,8 +26,8 @@
/** Interpreted as audio_io_handle_t. */
int output;
AudioStreamType stream;
- /** Interpreted as audio_port_handle_t. */
- int selectedDeviceId;
+ /** Interpreted as audio_port_handle_t[]. */
+ int[] selectedDeviceIds;
/** Interpreted as audio_port_handle_t. */
int portId;
/** Interpreted as audio_io_handle_t[]. */
@@ -41,4 +41,6 @@
AudioAttributes attr;
/** initial port volume for the new audio track */
float volume;
+ /** initial port muted state for the new audio track */
+ boolean muted;
}
diff --git a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
index 1c825bc..474ab11 100644
--- a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
@@ -96,15 +96,17 @@
/*
* Set stream type state. This will probably be used by
* the preference panel, mostly.
+ * This method is deprecated. Please use the setPortsVolume method instead.
*/
- void setStreamVolume(AudioStreamType stream, float value, int /* audio_io_handle_t */ output);
+ void setStreamVolume(AudioStreamType stream, float value, boolean muted,
+ int /* audio_io_handle_t */ output);
void setStreamMute(AudioStreamType stream, boolean muted);
/*
* Set AudioTrack port ids volume attribute. This is the new way of controlling volume from
* AudioPolicyManager to AudioFlinger.
*/
- void setPortsVolume(in int[] /* audio_port_handle_t[] */ portIds, float volume,
+ void setPortsVolume(in int[] /* audio_port_handle_t[] */ portIds, float volume, boolean muted,
int /* audio_io_handle_t */ output);
// set audio mode.
diff --git a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
index ac42ea9..fab2d95 100644
--- a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
@@ -48,6 +48,8 @@
import android.media.audio.common.AudioDevice;
import android.media.audio.common.AudioDeviceDescription;
import android.media.audio.common.AudioFormatDescription;
+import android.media.audio.common.AudioMMapPolicyInfo;
+import android.media.audio.common.AudioMMapPolicyType;
import android.media.audio.common.AudioMode;
import android.media.audio.common.AudioProfile;
import android.media.audio.common.AudioOffloadInfo;
@@ -92,7 +94,7 @@
in AttributionSourceState attributionSource,
in AudioConfig config,
int /* Bitmask, indexed by AudioOutputFlags */ flags,
- int /* audio_port_handle_t */ selectedDeviceId);
+ in int[] /* audio_port_handle_t */ selectedDeviceIds);
void startOutput(int /* audio_port_handle_t */ portId);
@@ -116,9 +118,9 @@
void releaseInput(int /* audio_port_handle_t */ portId);
- oneway void setDeviceAbsoluteVolumeEnabled(in AudioDevice device,
- boolean enabled,
- AudioStreamType streamToDriveAbs);
+ void setDeviceAbsoluteVolumeEnabled(in AudioDevice device,
+ boolean enabled,
+ AudioStreamType streamToDriveAbs);
void initStreamVolume(AudioStreamType stream,
int indexMin,
@@ -126,14 +128,14 @@
void setStreamVolumeIndex(AudioStreamType stream,
in AudioDeviceDescription device,
- int index);
+ int index, boolean muted);
int getStreamVolumeIndex(AudioStreamType stream,
in AudioDeviceDescription device);
void setVolumeIndexForAttributes(in AudioAttributes attr,
in AudioDeviceDescription device,
- int index);
+ int index, boolean muted);
int getVolumeIndexForAttributes(in AudioAttributes attr,
in AudioDeviceDescription device);
@@ -482,6 +484,17 @@
* required to control audio access.
*/
INativePermissionController getPermissionController();
+
+ /**
+ * Query mmap policy information.
+ */
+ AudioMMapPolicyInfo[] getMmapPolicyInfos(AudioMMapPolicyType policyType);
+
+ /**
+ * Get all devices that support AAudio MMAP.
+ */
+ void getMmapPolicyForDevice(AudioMMapPolicyType policyType,
+ inout AudioMMapPolicyInfo policyInfo);
// When adding a new method, please review and update
// AudioPolicyService.cpp AudioPolicyService::onTransact()
// AudioPolicyService.cpp IAUDIOPOLICYSERVICE_BINDER_METHOD_MACRO_LIST
diff --git a/media/libaudioclient/aidl/fuzzer/Android.bp b/media/libaudioclient/aidl/fuzzer/Android.bp
index 14e528f..05db9e5 100644
--- a/media/libaudioclient/aidl/fuzzer/Android.bp
+++ b/media/libaudioclient/aidl/fuzzer/Android.bp
@@ -22,7 +22,7 @@
name: "libaudioclient_aidl_fuzzer_defaults",
static_libs: [
"android.hardware.audio.common@7.0-enums",
- "audiopermissioncontroller",
+ "libaudiopermission",
"libaudiomockhal",
"libfakeservicemanager",
"libjsoncpp",
diff --git a/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp b/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
index 710a656..ba5b3b1 100644
--- a/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
+++ b/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
@@ -396,7 +396,7 @@
static_cast<audio_port_handle_t>(mFdp.ConsumeIntegral<int32_t>());
record->setInputDevice(deviceId);
record->getInputDevice();
- record->getRoutedDeviceId();
+ record->getRoutedDeviceIds();
record->getPortId();
}
@@ -511,11 +511,11 @@
stream = getValue(&mFdp, kStreamtypes);
AudioSystem::setStreamVolume(stream, mFdp.ConsumeFloatingPoint<float>(),
- mFdp.ConsumeIntegral<int32_t>());
+ mFdp.ConsumeBool(), mFdp.ConsumeIntegral<int32_t>());
} else {
std::vector <audio_port_handle_t> portsForVolumeChange{};
AudioSystem::setPortsVolume(portsForVolumeChange, mFdp.ConsumeFloatingPoint<float>(),
- mFdp.ConsumeIntegral<int32_t>());
+ mFdp.ConsumeBool(), mFdp.ConsumeIntegral<int32_t>());
}
audio_mode_t mode = getValue(&mFdp, kModes);
AudioSystem::setMode(mode);
diff --git a/media/libaudioclient/include/media/AudioIoDescriptor.h b/media/libaudioclient/include/media/AudioIoDescriptor.h
index 405ec7d..961cc1c 100644
--- a/media/libaudioclient/include/media/AudioIoDescriptor.h
+++ b/media/libaudioclient/include/media/AudioIoDescriptor.h
@@ -69,12 +69,21 @@
size_t getFrameCountHAL() const { return mFrameCountHAL; }
uint32_t getLatency() const { return mLatency; }
audio_port_handle_t getPortId() const { return mPortId; }
- audio_port_handle_t getDeviceId() const {
- if (mPatch.num_sources != 0 && mPatch.num_sinks != 0) {
- // FIXME: the API only returns the first device in case of multiple device selection
- return mIsInput ? mPatch.sources[0].id : mPatch.sinks[0].id;
+ std::vector<audio_port_handle_t> getDeviceIds() const {
+ std::vector<audio_port_handle_t> deviceIds;
+ if (mPatch.num_sources == 0 || mPatch.num_sinks == 0) {
+ return deviceIds;
}
- return AUDIO_PORT_HANDLE_NONE;
+ if (mIsInput) {
+ for (unsigned int i = 0; i < mPatch.num_sources; i++) {
+ deviceIds.push_back(mPatch.sources[i].id);
+ }
+ } else {
+ for (unsigned int i = 0; i < mPatch.num_sinks; i++) {
+ deviceIds.push_back(mPatch.sinks[i].id);
+ }
+ }
+ return deviceIds;
}
void setPatch(const audio_patch& patch) { mPatch = patch; }
@@ -88,7 +97,13 @@
(mIsInput ? audio_channel_in_mask_to_string(mChannelMask) :
audio_channel_out_mask_to_string(mChannelMask)))
<< ", frameCount " << mFrameCount << ", frameCountHAL " << mFrameCountHAL
- << ", deviceId " << getDeviceId();
+ << ", deviceIds ";
+
+ std::vector<audio_port_handle_t> deviceIds = getDeviceIds();
+ for (auto deviceId : deviceIds) {
+ ss << deviceId << " ";
+ }
+
return ss.str();
}
diff --git a/media/libaudioclient/include/media/AudioRecord.h b/media/libaudioclient/include/media/AudioRecord.h
index 25d91d3..80a756e 100644
--- a/media/libaudioclient/include/media/AudioRecord.h
+++ b/media/libaudioclient/include/media/AudioRecord.h
@@ -495,19 +495,19 @@
*/
audio_port_handle_t getInputDevice();
- /* Returns the ID of the audio device actually used by the input to which this AudioRecord
+ /* Returns the IDs of the audio devices actually used by the input to which this AudioRecord
* is attached.
- * The device ID is relevant only if the AudioRecord is active.
- * When the AudioRecord is inactive, the device ID returned can be either:
- * - AUDIO_PORT_HANDLE_NONE if the AudioRecord is not attached to any output.
- * - The device ID used before paused or stopped.
+ * The device IDs is relevant only if the AudioRecord is active.
+ * When the AudioRecord is inactive, the device IDs returned can be either:
+ * - An empty vector if the AudioRecord is not attached to any output.
+ * - The device IDs used before paused or stopped.
* - The device ID selected by audio policy manager of setOutputDevice() if the AudioRecord
* has not been started yet.
*
* Parameters:
* none.
*/
- audio_port_handle_t getRoutedDeviceId();
+ DeviceIdVector getRoutedDeviceIds();
/* Add an AudioDeviceCallback. The caller will be notified when the audio device
* to which this AudioRecord is routed is updated.
@@ -534,7 +534,7 @@
// AudioSystem::AudioDeviceCallback> virtuals
virtual void onAudioDeviceUpdate(audio_io_handle_t audioIo,
- audio_port_handle_t deviceId);
+ const DeviceIdVector& deviceIds);
private:
/* If nonContig is non-NULL, it is an output parameter that will be set to the number of
@@ -678,7 +678,7 @@
// FIXME enum is faster than strcmp() for parameter 'from'
status_t restoreRecord_l(const char *from);
- void updateRoutedDeviceId_l();
+ void updateRoutedDeviceIds_l();
sp<AudioRecordThread> mAudioRecordThread;
mutable Mutex mLock;
@@ -810,7 +810,7 @@
audio_port_handle_t mSelectedDeviceId = AUDIO_PORT_HANDLE_NONE;
// Device actually selected by AudioPolicyManager: This may not match the app
// selection depending on other activity and connected devices
- audio_port_handle_t mRoutedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector mRoutedDeviceIds;
wp<AudioSystem::AudioDeviceCallback> mDeviceCallback;
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index 40e5673..45ede3c 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -103,6 +103,7 @@
template <typename ServiceInterface, typename Client, typename AidlInterface,
typename ServiceTraits>
friend class ServiceHandler;
+ friend class AudioFlingerServiceTraits;
public:
@@ -126,7 +127,7 @@
// set stream volume on specified output
static status_t setStreamVolume(audio_stream_type_t stream, float value,
- audio_io_handle_t output);
+ bool muted, audio_io_handle_t output);
// mute/unmute stream
static status_t setStreamMute(audio_stream_type_t stream, bool mute);
@@ -135,11 +136,12 @@
* Set volume for given AudioTrack port ids on specified output
* @param portIds to consider
* @param volume to set
+ * @param muted to set
* @param output to consider
* @return NO_ERROR if successful
*/
static status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds,
- float volume, audio_io_handle_t output);
+ float volume, bool muted, audio_io_handle_t output);
// set audio mode in audio hardware
static status_t setMode(audio_mode_t mode);
@@ -340,12 +342,13 @@
const AttributionSourceState& attributionSource,
audio_config_t *config,
audio_output_flags_t flags,
- audio_port_handle_t *selectedDeviceId,
+ DeviceIdVector *selectedDeviceIds,
audio_port_handle_t *portId,
std::vector<audio_io_handle_t> *secondaryOutputs,
bool *isSpatialized,
bool *isBitPerfect,
- float *volume);
+ float *volume,
+ bool *muted);
static status_t startOutput(audio_port_handle_t portId);
static status_t stopOutput(audio_port_handle_t portId);
static void releaseOutput(audio_port_handle_t portId);
@@ -391,6 +394,7 @@
int indexMax);
static status_t setStreamVolumeIndex(audio_stream_type_t stream,
int index,
+ bool muted,
audio_devices_t device);
static status_t getStreamVolumeIndex(audio_stream_type_t stream,
int *index,
@@ -398,6 +402,7 @@
static status_t setVolumeIndexForAttributes(const audio_attributes_t &attr,
int index,
+ bool muted,
audio_devices_t device);
static status_t getVolumeIndexForAttributes(const audio_attributes_t &attr,
int &index,
@@ -422,17 +427,12 @@
static status_t setEffectEnabled(int id, bool enabled);
static status_t moveEffectsToIo(const std::vector<int>& ids, audio_io_handle_t io);
- // clear stream to output mapping cache (gStreamOutputMap)
- // and output configuration cache (gOutputs)
- static void clearAudioConfigCache();
-
// Sets a local AudioPolicyService interface to be used by AudioSystem.
// This is used by audioserver main() to allow client object initialization
// before exposing any interfaces to ServiceManager.
static status_t setLocalAudioPolicyService(const sp<media::IAudioPolicyService>& aps);
static sp<media::IAudioPolicyService> get_audio_policy_service();
- static void clearAudioPolicyService();
// helpers for android.media.AudioManager.getProperty(), see description there for meaning
static uint32_t getPrimaryOutputSamplingRate();
@@ -767,7 +767,7 @@
virtual ~AudioDeviceCallback() {}
virtual void onAudioDeviceUpdate(audio_io_handle_t audioIo,
- audio_port_handle_t deviceId) = 0;
+ const DeviceIdVector& deviceIds) = 0;
};
static status_t addAudioDeviceCallback(const wp<AudioDeviceCallback>& callback,
@@ -793,11 +793,11 @@
static status_t removeSupportedLatencyModesCallback(
const sp<SupportedLatencyModesCallback>& callback);
- static audio_port_handle_t getDeviceIdForIo(audio_io_handle_t audioIo);
+ static status_t getDeviceIdsForIo(audio_io_handle_t audioIo, DeviceIdVector& deviceIds);
static status_t setVibratorInfos(const std::vector<media::AudioVibratorInfo>& vibratorInfos);
- static status_t getMmapPolicyInfo(
+ static status_t getMmapPolicyInfos(
media::audio::common::AudioMMapPolicyType policyType,
std::vector<media::audio::common::AudioMMapPolicyInfo> *policyInfos);
@@ -805,7 +805,11 @@
static int32_t getAAudioHardwareBurstMinUsec();
- class AudioFlingerClient: public IBinder::DeathRecipient, public media::BnAudioFlingerClient
+ static status_t getMmapPolicyForDevice(
+ media::audio::common::AudioMMapPolicyType policyType, audio_devices_t device,
+ media::audio::common::AudioMMapPolicyInfo *policyInfo);
+
+ class AudioFlingerClient: public media::BnAudioFlingerClient
{
public:
AudioFlingerClient() = default;
@@ -815,9 +819,6 @@
audio_channel_mask_t channelMask, size_t* buffSize) EXCLUDES(mMutex);
sp<AudioIoDescriptor> getIoDescriptor(audio_io_handle_t ioHandle) EXCLUDES(mMutex);
- // DeathRecipient
- void binderDied(const wp<IBinder>& who) final;
-
// IAudioFlingerClient
// indicate a change in the configuration of an output or input: keeps the cached
@@ -841,7 +842,8 @@
status_t removeSupportedLatencyModesCallback(
const sp<SupportedLatencyModesCallback>& callback) EXCLUDES(mMutex);
- audio_port_handle_t getDeviceIdForIo(audio_io_handle_t audioIo) EXCLUDES(mMutex);
+ status_t getDeviceIdsForIo(audio_io_handle_t audioIo, DeviceIdVector& deviceIds)
+ EXCLUDES(mMutex);
private:
mutable std::mutex mMutex;
@@ -862,8 +864,7 @@
sp<AudioIoDescriptor> getIoDescriptor_l(audio_io_handle_t ioHandle) REQUIRES(mMutex);
};
- class AudioPolicyServiceClient: public IBinder::DeathRecipient,
- public media::BnAudioPolicyServiceClient {
+ class AudioPolicyServiceClient: public media::BnAudioPolicyServiceClient {
public:
AudioPolicyServiceClient() = default;
@@ -887,8 +888,7 @@
return !mAudioVolumeGroupCallbacks.empty();
}
- // DeathRecipient
- void binderDied(const wp<IBinder>& who) final;
+ void onServiceDied();
// IAudioPolicyServiceClient
binder::Status onAudioVolumeGroupChanged(int32_t group, int32_t flags) override;
@@ -918,6 +918,7 @@
static audio_io_handle_t getOutput(audio_stream_type_t stream);
static sp<AudioFlingerClient> getAudioFlingerClient();
+ static sp<AudioPolicyServiceClient> getAudioPolicyClient();
static sp<AudioIoDescriptor> getIoDescriptor(audio_io_handle_t ioHandle);
// Invokes all registered error callbacks with the given error code.
diff --git a/media/libaudioclient/include/media/AudioTimestamp.h b/media/libaudioclient/include/media/AudioTimestamp.h
index e5925dd..45d5595 100644
--- a/media/libaudioclient/include/media/AudioTimestamp.h
+++ b/media/libaudioclient/include/media/AudioTimestamp.h
@@ -154,10 +154,13 @@
std::string toString() const {
std::stringstream ss;
- ss << "BOOTTIME offset " << mTimebaseOffset[TIMEBASE_BOOTTIME] << "\n";
+ ss << "BOOTTIME offset " << mTimebaseOffset[TIMEBASE_BOOTTIME] << ": ExtendedTimestamp: ";
for (int i = 0; i < LOCATION_MAX; ++i) {
- ss << "ExtendedTimestamp[" << i << "] position: "
- << mPosition[i] << " time: " << mTimeNs[i] << "\n";
+ ss << "([" << i << "] position: "
+ << mPosition[i] << " time: " << mTimeNs[i] << ")";
+ if (i != LOCATION_MAX - 1) {
+ ss << ", ";
+ }
}
return ss.str();
}
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index de97863..330b5ee 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -835,18 +835,18 @@
*/
audio_port_handle_t getOutputDevice();
- /* Returns the ID of the audio device actually used by the output to which this AudioTrack is
+ /* Returns the IDs of the audio devices actually used by the output to which this AudioTrack is
* attached.
* When the AudioTrack is inactive, the device ID returned can be either:
- * - AUDIO_PORT_HANDLE_NONE if the AudioTrack is not attached to any output.
- * - The device ID used before paused or stopped.
+ * - An empty vector if the AudioTrack is not attached to any output.
+ * - The device IDs used before paused or stopped.
* - The device ID selected by audio policy manager of setOutputDevice() if the AudioTrack
* has not been started yet.
*
* Parameters:
* none.
*/
- audio_port_handle_t getRoutedDeviceId();
+ DeviceIdVector getRoutedDeviceIds();
/* Returns the unique session ID associated with this track.
*
@@ -1089,7 +1089,7 @@
// AudioSystem::AudioDeviceCallback> virtuals
virtual void onAudioDeviceUpdate(audio_io_handle_t audioIo,
- audio_port_handle_t deviceId);
+ const DeviceIdVector& deviceIds);
/* Obtain the pending duration in milliseconds for playback of pure PCM
* (mixable without embedded timing) data remaining in AudioTrack.
@@ -1258,7 +1258,7 @@
void restartIfDisabled();
- void updateRoutedDeviceId_l();
+ void updateRoutedDeviceIds_l();
/* Sets the Dual Mono mode presentation on the output device. */
status_t setDualMonoMode_l(audio_dual_mono_mode_t mode);
@@ -1482,9 +1482,9 @@
// Device requested by the application.
audio_port_handle_t mSelectedDeviceId = AUDIO_PORT_HANDLE_NONE;
- // Device actually selected by AudioPolicyManager: This may not match the app
+ // Devices actually selected by AudioPolicyManager: This may not match the app
// selection depending on other activity and connected devices.
- audio_port_handle_t mRoutedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector mRoutedDeviceIds;
sp<media::VolumeHandler> mVolumeHandler;
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index a5f3217..8292eef 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -109,7 +109,7 @@
audio_output_flags_t flags;
size_t frameCount;
size_t notificationFrameCount;
- audio_port_handle_t selectedDeviceId;
+ DeviceIdVector selectedDeviceIds;
audio_session_t sessionId;
/* output */
@@ -226,18 +226,19 @@
* the preference panel, mostly.
*/
virtual status_t setStreamVolume(audio_stream_type_t stream, float value,
- audio_io_handle_t output) = 0;
+ bool muted, audio_io_handle_t output) = 0;
virtual status_t setStreamMute(audio_stream_type_t stream, bool muted) = 0;
/**
* Set volume for given AudioTrack port ids on specified output
* @param portIds to consider
* @param volume to set
+ * @param muted to set
* @param output to consider
* @return NO_ERROR if successful
*/
virtual status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds, float volume,
- audio_io_handle_t output) = 0;
+ bool muted, audio_io_handle_t output) = 0;
// set audio mode
virtual status_t setMode(audio_mode_t mode) = 0;
@@ -428,10 +429,10 @@
status_t setMasterBalance(float balance) override;
status_t getMasterBalance(float* balance) const override;
status_t setStreamVolume(audio_stream_type_t stream, float value,
- audio_io_handle_t output) override;
+ bool muted, audio_io_handle_t output) override;
status_t setStreamMute(audio_stream_type_t stream, bool muted) override;
status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds, float volume,
- audio_io_handle_t output) override;
+ bool muted, audio_io_handle_t output) override;
status_t setMode(audio_mode_t mode) override;
status_t setMicMute(bool state) override;
bool getMicMute() const override;
@@ -675,10 +676,10 @@
Status setMasterBalance(float balance) override;
Status getMasterBalance(float* _aidl_return) override;
Status setStreamVolume(media::audio::common::AudioStreamType stream,
- float value, int32_t output) override;
+ float value, bool muted, int32_t output) override;
Status setStreamMute(media::audio::common::AudioStreamType stream, bool muted) override;
- Status setPortsVolume(const std::vector<int32_t>& portIds, float volume, int32_t output)
- override;
+ Status setPortsVolume(const std::vector<int32_t>& portIds, float volume, bool muted,
+ int32_t output) override;
Status setMode(media::audio::common::AudioMode mode) override;
Status setMicMute(bool state) override;
Status getMicMute(bool* _aidl_return) override;
diff --git a/media/libaudioclient/include/media/PlayerBase.h b/media/libaudioclient/include/media/PlayerBase.h
index 5475f76..5df1a6e 100644
--- a/media/libaudioclient/include/media/PlayerBase.h
+++ b/media/libaudioclient/include/media/PlayerBase.h
@@ -22,6 +22,7 @@
#include <utils/Mutex.h>
#include "android/media/BnPlayer.h"
+#include "media/AudioContainers.h"
namespace android {
@@ -44,14 +45,14 @@
const media::VolumeShaperConfiguration& configuration,
const media::VolumeShaperOperation& operation) override;
- status_t startWithStatus(audio_port_handle_t deviceId);
+ status_t startWithStatus(const DeviceIdVector& deviceIds);
status_t pauseWithStatus();
status_t stopWithStatus();
//FIXME temporary method while some player state is outside of this class
- void reportEvent(player_state_t event, audio_port_handle_t deviceId);
+ void reportEvent(player_state_t event, const DeviceIdVector& deviceIds);
- void baseUpdateDeviceId(audio_port_handle_t deviceId);
+ void baseUpdateDeviceIds(const DeviceIdVector& deviceIds);
/**
* Updates the mapping in the AudioService between portId and piid
@@ -80,7 +81,7 @@
audio_unique_id_t mPIId;
private:
// report events to AudioService
- void servicePlayerEvent(player_state_t event, audio_port_handle_t deviceId);
+ void servicePlayerEvent(player_state_t event, const DeviceIdVector& deviceIds);
void serviceReleasePlayer();
// native interface to AudioService
@@ -91,7 +92,7 @@
player_state_t mLastReportedEvent;
Mutex mDeviceIdLock;
- audio_port_handle_t mLastReportedDeviceId;
+ DeviceIdVector mLastReportedDeviceIds GUARDED_BY(mDeviceIdLock);
};
} // namespace android
diff --git a/media/libaudioclient/include/media/TrackPlayerBase.h b/media/libaudioclient/include/media/TrackPlayerBase.h
index 8df9ff8..575b14c 100644
--- a/media/libaudioclient/include/media/TrackPlayerBase.h
+++ b/media/libaudioclient/include/media/TrackPlayerBase.h
@@ -60,7 +60,7 @@
public:
SelfAudioDeviceCallback(PlayerBase& self);
virtual void onAudioDeviceUpdate(audio_io_handle_t audioIo,
- audio_port_handle_t deviceId);
+ const DeviceIdVector& deviceIds);
private:
virtual ~SelfAudioDeviceCallback();
PlayerBase& mSelf;
diff --git a/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp b/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
index aa6cb0d..2cb5f09 100644
--- a/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
+++ b/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
@@ -483,8 +483,27 @@
AudioDeviceAddress::make<AudioDeviceAddress::Tag::alsa>(
std::vector<int32_t>{1, 2}))));
+TEST(AnonymizedBluetoothAddressRoundTripTest, Legacy2Aidl2Legacy) {
+ const std::vector<uint8_t> sAnonymizedAidlAddress {0xFD, 0xFF, 0xFF, 0xFF, 0xAB, 0xCD};
+ const std::string sAnonymizedLegacyAddress = std::string("XX:XX:XX:XX:AB:CD");
+ auto device = legacy2aidl_audio_device_AudioDevice(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP,
+ sAnonymizedLegacyAddress);
+ ASSERT_TRUE(device.ok());
+ ASSERT_EQ(AudioDeviceAddress::Tag::mac, device.value().address.getTag());
+ ASSERT_EQ(sAnonymizedAidlAddress, device.value().address.get<AudioDeviceAddress::mac>());
+
+ audio_devices_t legacyType;
+ std::string legacyAddress;
+ status_t status =
+ aidl2legacy_AudioDevice_audio_device(device.value(), &legacyType, &legacyAddress);
+ ASSERT_EQ(OK, status);
+ EXPECT_EQ(legacyType, AUDIO_DEVICE_OUT_BLUETOOTH_A2DP);
+ EXPECT_EQ(sAnonymizedLegacyAddress, legacyAddress);
+}
+
class AudioFormatDescriptionRoundTripTest : public testing::TestWithParam<AudioFormatDescription> {
};
+
TEST_P(AudioFormatDescriptionRoundTripTest, Aidl2Legacy2Aidl) {
const auto initial = GetParam();
auto conv = aidl2legacy_AudioFormatDescription_audio_format_t(initial);
diff --git a/media/libaudioclient/tests/audio_test_utils.cpp b/media/libaudioclient/tests/audio_test_utils.cpp
index 1599839..7d13939 100644
--- a/media/libaudioclient/tests/audio_test_utils.cpp
+++ b/media/libaudioclient/tests/audio_test_utils.cpp
@@ -27,12 +27,12 @@
#define MAX_WAIT_TIME_MS 5000
void OnAudioDeviceUpdateNotifier::onAudioDeviceUpdate(audio_io_handle_t audioIo,
- audio_port_handle_t deviceId) {
- ALOGI("%s: audioIo=%d deviceId=%d", __func__, audioIo, deviceId);
+ const DeviceIdVector& deviceIds) {
+ ALOGI("%s: audioIo=%d deviceIds=%s", __func__, audioIo, toString(deviceIds).c_str());
{
std::lock_guard lock(mMutex);
mAudioIo = audioIo;
- mDeviceId = deviceId;
+ mDeviceIds = deviceIds;
}
mCondition.notify_all();
}
@@ -41,20 +41,23 @@
std::unique_lock lock(mMutex);
android::base::ScopedLockAssertion lock_assertion(mMutex);
if (mAudioIo == AUDIO_IO_HANDLE_NONE ||
- (expDeviceId != AUDIO_PORT_HANDLE_NONE && expDeviceId != mDeviceId)) {
+ (expDeviceId != AUDIO_PORT_HANDLE_NONE &&
+ std::find(mDeviceIds.begin(), mDeviceIds.end(), expDeviceId) == mDeviceIds.end())) {
mCondition.wait_for(lock, std::chrono::milliseconds(500));
if (mAudioIo == AUDIO_IO_HANDLE_NONE ||
- (expDeviceId != AUDIO_PORT_HANDLE_NONE && expDeviceId != mDeviceId)) {
+ (expDeviceId != AUDIO_PORT_HANDLE_NONE &&
+ std::find(mDeviceIds.begin(), mDeviceIds.end(), expDeviceId) == mDeviceIds.end())) {
return TIMED_OUT;
}
}
return OK;
}
-std::pair<audio_io_handle_t, audio_port_handle_t>
-OnAudioDeviceUpdateNotifier::getLastPortAndDevice() const {
+std::pair<audio_io_handle_t, DeviceIdVector> OnAudioDeviceUpdateNotifier::getLastPortAndDevices()
+ const {
std::lock_guard lock(mMutex);
- return {mAudioIo, mDeviceId};
+ ALOGI("%s: audioIo=%d deviceIds=%s", __func__, mAudioIo, toString(mDeviceIds).c_str());
+ return {mAudioIo, mDeviceIds};
}
AudioPlayback::AudioPlayback(uint32_t sampleRate, audio_format_t format,
@@ -761,13 +764,15 @@
return BAD_VALUE;
}
-bool patchContainsOutputDevice(audio_port_handle_t deviceId, audio_patch patch) {
+// Check if the patch matches all the output devices in the deviceIds vector.
+bool patchMatchesOutputDevices(const DeviceIdVector& deviceIds, audio_patch patch) {
+ DeviceIdVector patchDeviceIds;
for (auto j = 0; j < patch.num_sinks; j++) {
- if (patch.sinks[j].type == AUDIO_PORT_TYPE_DEVICE && patch.sinks[j].id == deviceId) {
- return true;
+ if (patch.sinks[j].type == AUDIO_PORT_TYPE_DEVICE) {
+ patchDeviceIds.push_back(patch.sinks[j].id);
}
}
- return false;
+ return areDeviceIdsEqual(deviceIds, patchDeviceIds);
}
bool patchContainsInputDevice(audio_port_handle_t deviceId, audio_patch patch) {
@@ -779,10 +784,10 @@
return false;
}
-bool checkPatchPlayback(audio_io_handle_t audioIo, audio_port_handle_t deviceId) {
+bool checkPatchPlayback(audio_io_handle_t audioIo, const DeviceIdVector& deviceIds) {
struct audio_patch patch;
if (getPatchForOutputMix(audioIo, patch) == OK) {
- return patchContainsOutputDevice(deviceId, patch);
+ return patchMatchesOutputDevices(deviceIds, patch);
}
return false;
}
diff --git a/media/libaudioclient/tests/audio_test_utils.h b/media/libaudioclient/tests/audio_test_utils.h
index 022ecf3..9ccc7da 100644
--- a/media/libaudioclient/tests/audio_test_utils.h
+++ b/media/libaudioclient/tests/audio_test_utils.h
@@ -52,9 +52,9 @@
audio_port_v7& port);
status_t getPatchForOutputMix(audio_io_handle_t audioIo, audio_patch& patch);
status_t getPatchForInputMix(audio_io_handle_t audioIo, audio_patch& patch);
-bool patchContainsOutputDevice(audio_port_handle_t deviceId, audio_patch patch);
+bool patchContainsOutputDevices(DeviceIdVector deviceIds, audio_patch patch);
bool patchContainsInputDevice(audio_port_handle_t deviceId, audio_patch patch);
-bool checkPatchPlayback(audio_io_handle_t audioIo, audio_port_handle_t deviceId);
+bool checkPatchPlayback(audio_io_handle_t audioIo, const DeviceIdVector& deviceIds);
bool checkPatchCapture(audio_io_handle_t audioIo, audio_port_handle_t deviceId);
std::string dumpPort(const audio_port_v7& port);
std::string dumpPortConfig(const audio_port_config& port);
@@ -62,13 +62,13 @@
class OnAudioDeviceUpdateNotifier : public AudioSystem::AudioDeviceCallback {
public:
- void onAudioDeviceUpdate(audio_io_handle_t audioIo, audio_port_handle_t deviceId) override;
+ void onAudioDeviceUpdate(audio_io_handle_t audioIo, const DeviceIdVector& deviceIds) override;
status_t waitForAudioDeviceCb(audio_port_handle_t expDeviceId = AUDIO_PORT_HANDLE_NONE);
- std::pair<audio_io_handle_t, audio_port_handle_t> getLastPortAndDevice() const;
+ std::pair<audio_io_handle_t, DeviceIdVector> getLastPortAndDevices() const;
private:
audio_io_handle_t mAudioIo GUARDED_BY(mMutex) = AUDIO_IO_HANDLE_NONE;
- audio_port_handle_t mDeviceId GUARDED_BY(mMutex) = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector mDeviceIds GUARDED_BY(mMutex);
mutable std::mutex mMutex;
std::condition_variable mCondition;
};
diff --git a/media/libaudioclient/tests/audioeffect_analyser.cpp b/media/libaudioclient/tests/audioeffect_analyser.cpp
index 199fb8b..3df5fd8 100644
--- a/media/libaudioclient/tests/audioeffect_analyser.cpp
+++ b/media/libaudioclient/tests/audioeffect_analyser.cpp
@@ -119,7 +119,8 @@
CHECK_OK(capture->start(), "start recording failed")
CHECK_OK(capture->audioProcess(), "recording process failed")
CHECK_OK(cbCapture->waitForAudioDeviceCb(), "audio device callback notification timed out");
- if (port.id != capture->getAudioRecordHandle()->getRoutedDeviceId()) {
+ DeviceIdVector routedDeviceIds = capture->getAudioRecordHandle()->getRoutedDeviceIds();
+ if (port.id != routedDeviceIds[0]) {
CHECK_OK(BAD_VALUE, "Capture NOT routed on expected port")
}
CHECK_OK(getPortByAttributes(AUDIO_PORT_ROLE_SINK, AUDIO_PORT_TYPE_DEVICE,
diff --git a/media/libaudioclient/tests/audiorecord_tests.cpp b/media/libaudioclient/tests/audiorecord_tests.cpp
index f2fee8b..550ce6c 100644
--- a/media/libaudioclient/tests/audiorecord_tests.cpp
+++ b/media/libaudioclient/tests/audiorecord_tests.cpp
@@ -123,12 +123,12 @@
EXPECT_EQ(OK, mAC->getAudioRecordHandle()->addAudioDeviceCallback(cb));
EXPECT_EQ(OK, mAC->start()) << "record creation failed";
EXPECT_EQ(OK, cb->waitForAudioDeviceCb());
- const auto [oldAudioIo, oldDeviceId] = cbOld->getLastPortAndDevice();
+ const auto [oldAudioIo, oldDeviceIds] = cbOld->getLastPortAndDevices();
EXPECT_EQ(AUDIO_IO_HANDLE_NONE, oldAudioIo);
- EXPECT_EQ(AUDIO_PORT_HANDLE_NONE, oldDeviceId);
- const auto [audioIo, deviceId] = cb->getLastPortAndDevice();
+ EXPECT_TRUE(oldDeviceIds.empty());
+ const auto [audioIo, deviceIds] = cb->getLastPortAndDevices();
EXPECT_NE(AUDIO_IO_HANDLE_NONE, audioIo);
- EXPECT_NE(AUDIO_PORT_HANDLE_NONE, deviceId);
+ EXPECT_FALSE(deviceIds.empty());
EXPECT_EQ(BAD_VALUE, mAC->getAudioRecordHandle()->removeAudioDeviceCallback(nullptr));
EXPECT_EQ(INVALID_OPERATION, mAC->getAudioRecordHandle()->removeAudioDeviceCallback(cbOld));
EXPECT_EQ(OK, mAC->getAudioRecordHandle()->removeAudioDeviceCallback(cb));
diff --git a/media/libaudioclient/tests/audiorouting_tests.cpp b/media/libaudioclient/tests/audiorouting_tests.cpp
index a3ab9d2..7957c10 100644
--- a/media/libaudioclient/tests/audiorouting_tests.cpp
+++ b/media/libaudioclient/tests/audiorouting_tests.cpp
@@ -64,8 +64,8 @@
EXPECT_EQ(OK, ap->start()) << "audio track start failed";
EXPECT_EQ(OK, ap->onProcess());
EXPECT_EQ(OK, cb->waitForAudioDeviceCb());
- const auto [audioIo, deviceId] = cb->getLastPortAndDevice();
- EXPECT_TRUE(checkPatchPlayback(audioIo, deviceId));
+ const auto [audioIo, deviceIds] = cb->getLastPortAndDevices();
+ EXPECT_TRUE(checkPatchPlayback(audioIo, deviceIds));
EXPECT_NE(0, ap->getAudioTrackHandle()->getFlags() & output_flags[i]);
audio_patch patch;
EXPECT_EQ(OK, getPatchForOutputMix(audioIo, patch));
@@ -127,8 +127,8 @@
// capture should be routed to submix in port
EXPECT_EQ(OK, capture->start()) << "start recording failed";
EXPECT_EQ(OK, cbCapture->waitForAudioDeviceCb());
- EXPECT_EQ(port.id, capture->getAudioRecordHandle()->getRoutedDeviceId())
- << "Capture NOT routed on expected port";
+ DeviceIdVector routedDeviceIds = capture->getAudioRecordHandle()->getRoutedDeviceIds();
+ EXPECT_EQ(port.id, routedDeviceIds[0]) << "Capture NOT routed on expected port";
// capture start should create submix out port
status_t status = getPortByAttributes(AUDIO_PORT_ROLE_SINK, AUDIO_PORT_TYPE_DEVICE,
@@ -138,8 +138,8 @@
// playback should be routed to submix out as long as capture is active
EXPECT_EQ(OK, playback->start()) << "audio track start failed";
EXPECT_EQ(OK, cbPlayback->waitForAudioDeviceCb());
- EXPECT_EQ(port.id, playback->getAudioTrackHandle()->getRoutedDeviceId())
- << "Playback NOT routed on expected port";
+ routedDeviceIds = playback->getAudioTrackHandle()->getRoutedDeviceIds();
+ EXPECT_EQ(port.id, routedDeviceIds[0]) << "Playback NOT routed on expected port";
capture->stop();
playback->stop();
@@ -235,13 +235,13 @@
// launch
EXPECT_EQ(OK, captureA->start()) << "start recording failed";
EXPECT_EQ(OK, cbCaptureA->waitForAudioDeviceCb());
- EXPECT_EQ(port.id, captureA->getAudioRecordHandle()->getRoutedDeviceId())
- << "Capture NOT routed on expected port";
+ DeviceIdVector routedDeviceIds = captureA->getAudioRecordHandle()->getRoutedDeviceIds();
+ EXPECT_EQ(port.id, routedDeviceIds[0]) << "Capture NOT routed on expected port";
EXPECT_EQ(OK, captureB->start()) << "start recording failed";
EXPECT_EQ(OK, cbCaptureB->waitForAudioDeviceCb());
- EXPECT_EQ(port_mix.id, captureB->getAudioRecordHandle()->getRoutedDeviceId())
- << "Capture NOT routed on expected port";
+ routedDeviceIds = captureB->getAudioRecordHandle()->getRoutedDeviceIds();
+ EXPECT_EQ(port_mix.id, routedDeviceIds[0]) << "Capture NOT routed on expected port";
// as record started, expect submix out ports to be connected
status = getPortByAttributes(AUDIO_PORT_ROLE_SINK, AUDIO_PORT_TYPE_DEVICE,
@@ -255,8 +255,8 @@
// check if playback routed to desired port
EXPECT_EQ(OK, playback->start());
EXPECT_EQ(OK, cbPlayback->waitForAudioDeviceCb());
- EXPECT_EQ(port_mix.id, playback->getAudioTrackHandle()->getRoutedDeviceId())
- << "Playback NOT routed on expected port";
+ routedDeviceIds = playback->getAudioTrackHandle()->getRoutedDeviceIds();
+ EXPECT_EQ(port_mix.id, routedDeviceIds[0]) << "Playback NOT routed on expected port";
captureB->stop();
@@ -282,8 +282,8 @@
playback->onProcess();
// as captureA is active, it should re route to legacy submix
EXPECT_EQ(OK, cbPlayback->waitForAudioDeviceCb(port.id));
- EXPECT_EQ(port.id, playback->getAudioTrackHandle()->getRoutedDeviceId())
- << "Playback NOT routed on expected port";
+ routedDeviceIds = playback->getAudioTrackHandle()->getRoutedDeviceIds();
+ EXPECT_EQ(port.id, routedDeviceIds[0]) << "Playback NOT routed on expected port";
captureA->stop();
playback->stop();
diff --git a/media/libaudioclient/tests/audiosystem_tests.cpp b/media/libaudioclient/tests/audiosystem_tests.cpp
index 742ca48..31cab78 100644
--- a/media/libaudioclient/tests/audiosystem_tests.cpp
+++ b/media/libaudioclient/tests/audiosystem_tests.cpp
@@ -108,7 +108,7 @@
// UNIT TESTS
TEST_F(AudioSystemTest, CheckServerSideValues) {
ASSERT_NO_FATAL_FAILURE(createPlaybackSession());
- const auto [pbAudioIo, _] = mCbPlayback->getLastPortAndDevice();
+ const auto [pbAudioIo, _] = mCbPlayback->getLastPortAndDevices();
EXPECT_GT(mAF->sampleRate(pbAudioIo), 0);
EXPECT_NE(mAF->format(pbAudioIo), AUDIO_FORMAT_INVALID);
EXPECT_GT(mAF->frameCount(pbAudioIo), 0);
@@ -122,7 +122,7 @@
EXPECT_LE(mAF->latency(pbAudioIo), mPlayback->getAudioTrackHandle()->latency());
ASSERT_NO_FATAL_FAILURE(createRecordSession());
- const auto [recAudioIo, __] = mCbRecord->getLastPortAndDevice();
+ const auto [recAudioIo, __] = mCbRecord->getLastPortAndDevices();
EXPECT_GT(mAF->sampleRate(recAudioIo), 0);
// EXPECT_NE(mAF->format(recAudioIo), AUDIO_FORMAT_INVALID);
EXPECT_GT(mAF->frameCount(recAudioIo), 0);
diff --git a/media/libaudioclient/tests/audiotrack_tests.cpp b/media/libaudioclient/tests/audiotrack_tests.cpp
index cf7d926..d283c6c 100644
--- a/media/libaudioclient/tests/audiotrack_tests.cpp
+++ b/media/libaudioclient/tests/audiotrack_tests.cpp
@@ -157,20 +157,21 @@
EXPECT_EQ(OK, ap->start()) << "audio track start failed";
EXPECT_EQ(OK, ap->onProcess());
EXPECT_EQ(OK, cb->waitForAudioDeviceCb());
- const auto [oldAudioIo, oldDeviceId] = cbOld->getLastPortAndDevice();
+ const auto [oldAudioIo, oldDeviceIds] = cbOld->getLastPortAndDevices();
EXPECT_EQ(AUDIO_IO_HANDLE_NONE, oldAudioIo);
- EXPECT_EQ(AUDIO_PORT_HANDLE_NONE, oldDeviceId);
- const auto [audioIo, deviceId] = cb->getLastPortAndDevice();
+ EXPECT_TRUE(oldDeviceIds.empty());
+ const auto [audioIo, deviceIds] = cb->getLastPortAndDevices();
EXPECT_NE(AUDIO_IO_HANDLE_NONE, audioIo);
- EXPECT_NE(AUDIO_PORT_HANDLE_NONE, deviceId);
+ EXPECT_FALSE(deviceIds.empty());
EXPECT_EQ(audioIo, ap->getAudioTrackHandle()->getOutput());
- EXPECT_EQ(deviceId, ap->getAudioTrackHandle()->getRoutedDeviceId());
+ DeviceIdVector routedDeviceIds = ap->getAudioTrackHandle()->getRoutedDeviceIds();
+ EXPECT_TRUE(areDeviceIdsEqual(routedDeviceIds, deviceIds));
String8 keys;
keys = ap->getAudioTrackHandle()->getParameters(keys);
if (!keys.empty()) {
std::cerr << "track parameters :: " << keys << std::endl;
}
- EXPECT_TRUE(checkPatchPlayback(audioIo, deviceId));
+ EXPECT_TRUE(checkPatchPlayback(audioIo, deviceIds));
EXPECT_EQ(BAD_VALUE, ap->getAudioTrackHandle()->removeAudioDeviceCallback(nullptr));
EXPECT_EQ(INVALID_OPERATION, ap->getAudioTrackHandle()->removeAudioDeviceCallback(cbOld));
EXPECT_EQ(OK, ap->getAudioTrackHandle()->removeAudioDeviceCallback(cb));
diff --git a/media/libaudiofoundation/AudioContainers.cpp b/media/libaudiofoundation/AudioContainers.cpp
index e1265cf..f3d295b 100644
--- a/media/libaudiofoundation/AudioContainers.cpp
+++ b/media/libaudiofoundation/AudioContainers.cpp
@@ -130,6 +130,33 @@
return ss.str();
}
+std::string toString(const DeviceIdVector& deviceIds) {
+ if (deviceIds.empty()) {
+ return "AUDIO_PORT_HANDLE_NONE";
+ }
+ std::stringstream ss;
+ for (auto it = deviceIds.begin(); it != deviceIds.end(); ++it) {
+ if (it != deviceIds.begin()) {
+ ss << ", ";
+ }
+ ss << *it;
+ }
+ return ss.str();
+}
+
+audio_port_handle_t getFirstDeviceId(const DeviceIdVector& deviceIds) {
+ if (deviceIds.empty()) {
+ return AUDIO_PORT_HANDLE_NONE;
+ }
+ return deviceIds[0];
+}
+
+bool areDeviceIdsEqual(const DeviceIdVector& first, const DeviceIdVector& second) {
+ const std::set<audio_port_handle_t> firstSet(first.begin(), first.end());
+ const std::set<audio_port_handle_t> secondSet(second.begin(), second.end());
+ return firstSet == secondSet;
+}
+
AudioProfileAttributesMultimap createAudioProfilesAttrMap(audio_profile profiles[],
uint32_t first,
uint32_t last) {
diff --git a/media/libaudiofoundation/include/media/AudioContainers.h b/media/libaudiofoundation/include/media/AudioContainers.h
index 46fd620..b6c0444 100644
--- a/media/libaudiofoundation/include/media/AudioContainers.h
+++ b/media/libaudiofoundation/include/media/AudioContainers.h
@@ -34,6 +34,7 @@
using SampleRateSet = std::set<uint32_t>;
using MixerBehaviorSet = std::set<audio_mixer_behavior_t>;
+using DeviceIdVector = std::vector<audio_port_handle_t>;
using FormatVector = std::vector<audio_format_t>;
using AudioProfileAttributesMultimap =
std::multimap<audio_format_t, std::pair<SampleRateSet, ChannelMaskSet>>;
@@ -139,6 +140,21 @@
}
/**
+ * Returns human readable string for a vector of device ids.
+ */
+std::string toString(const DeviceIdVector& deviceIds);
+
+/**
+ * Returns the first device id of a vector of device ids or AUDIO_PORT_HANDLE_NONE when its empty.
+ */
+audio_port_handle_t getFirstDeviceId(const DeviceIdVector& deviceIds);
+
+/**
+ * Returns whether two vectors of device ids have the same elements.
+ */
+bool areDeviceIdsEqual(const DeviceIdVector& first, const DeviceIdVector& second);
+
+/**
* Create audio profile attributes map by given audio profile array from the range of [first, last).
*
* @param profiles the array of audio profiles.
diff --git a/media/libaudiohal/impl/Android.bp b/media/libaudiohal/impl/Android.bp
index 0dd0f74..00f3929 100644
--- a/media/libaudiohal/impl/Android.bp
+++ b/media/libaudiohal/impl/Android.bp
@@ -206,6 +206,7 @@
"android.hardware.common-V2-ndk",
"android.hardware.common.fmq-V1-ndk",
"av-audio-types-aidl-ndk",
+ "com.android.media.audio-aconfig-cc",
"libaudio_aidl_conversion_common_cpp",
"libaudio_aidl_conversion_common_ndk",
"libaudio_aidl_conversion_common_ndk_cpp",
diff --git a/media/libaudiohal/impl/EffectProxy.cpp b/media/libaudiohal/impl/EffectProxy.cpp
index c7c6536..ac3975e 100644
--- a/media/libaudiohal/impl/EffectProxy.cpp
+++ b/media/libaudiohal/impl/EffectProxy.cpp
@@ -23,6 +23,7 @@
#include <fmq/AidlMessageQueue.h>
#include <system/audio_aidl_utils.h>
+#include <system/audio_effects/aidl_effects_utils.h>
#include <utils/Log.h>
#include "EffectProxy.h"
@@ -41,7 +42,8 @@
EffectProxy::EffectProxy(const AudioUuid& uuid, const std::vector<Descriptor>& descriptors,
const std::shared_ptr<IFactory>& factory)
- : mDescriptorCommon(buildDescriptorCommon(uuid, descriptors)),
+ : mSharedCapability(buildDescriptorCapability(descriptors)),
+ mDescriptorCommon(buildDescriptorCommon(uuid, descriptors)),
mSubEffects(
[](const std::vector<Descriptor>& descs, const std::shared_ptr<IFactory>& factory) {
std::vector<SubEffect> subEffects;
@@ -163,6 +165,7 @@
ndk::ScopedAStatus EffectProxy::getDescriptor(Descriptor* desc) {
*desc = mSubEffects[mActiveSubIdx].descriptor;
+ desc->capability = mSharedCapability;
desc->common = mDescriptorCommon;
return ndk::ScopedAStatus::ok();
}
@@ -182,6 +185,7 @@
}
desc->common = buildDescriptorCommon(uuid, subEffectDescs);
+ desc->capability = buildDescriptorCapability(subEffectDescs);
return ndk::ScopedAStatus::ok();
}
@@ -216,6 +220,20 @@
return swCommon;
}
+// Build a shared Descriptor capability with all sub-effects.
+Capability EffectProxy::buildDescriptorCapability(const std::vector<Descriptor>& subEffectDescs) {
+ std::optional<Capability> cap = subEffectDescs[0].capability;
+ for (size_t i = 1; i < subEffectDescs.size(); i++) {
+ cap = findSharedCapability(cap.value(), subEffectDescs[i].capability);
+ if (!cap) {
+ ALOGE("%s failed to find the shared capability at %zu", __func__, i);
+ return subEffectDescs[0].capability;
+ }
+ }
+
+ return cap.value();
+}
+
// Handle with active sub-effect first, only send to other sub-effects when success
ndk::ScopedAStatus EffectProxy::command(CommandId id) {
return runWithActiveSubEffectThenOthers(
@@ -323,6 +341,8 @@
prefixSpace += " ";
base::StringAppendF(&ss, "%sDescriptorCommon: %s\n", prefixSpace.c_str(),
mDescriptorCommon.toString().c_str());
+ base::StringAppendF(&ss, "%sDescriptorCapability: %s\n", prefixSpace.c_str(),
+ mSharedCapability.toString().c_str());
base::StringAppendF(&ss, "%sActiveSubIdx: %zu\n", prefixSpace.c_str(), mActiveSubIdx);
base::StringAppendF(&ss, "%sAllSubEffects:\n", prefixSpace.c_str());
for (size_t i = 0; i < mSubEffects.size(); i++) {
diff --git a/media/libaudiohal/impl/EffectProxy.h b/media/libaudiohal/impl/EffectProxy.h
index 9b9e8f1..6736104 100644
--- a/media/libaudiohal/impl/EffectProxy.h
+++ b/media/libaudiohal/impl/EffectProxy.h
@@ -108,6 +108,8 @@
std::string toString(size_t indent = 0) const;
private:
+ // The shared capability of all sub-effects
+ const ::aidl::android::hardware::audio::effect::Capability mSharedCapability;
// Proxy descriptor common part, copy from one sub-effect, and update the implementation UUID to
// proxy UUID, proxy descriptor capability part comes from the active sub-effect capability
const ::aidl::android::hardware::audio::effect::Descriptor::Common mDescriptorCommon;
@@ -146,6 +148,11 @@
const std::vector<::aidl::android::hardware::audio::effect::Descriptor>&
subEffectDescs);
+ // build a shared capability with all sub-effect descriptors
+ static ::aidl::android::hardware::audio::effect::Capability buildDescriptorCapability(
+ const std::vector<::aidl::android::hardware::audio::effect::Descriptor>&
+ subEffectDescs);
+
// close and release all sub-effects
~EffectProxy();
};
diff --git a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
index 2753906..ac69b26 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
@@ -25,10 +25,12 @@
#include <error/expected_utils.h>
#include <aidl/android/media/audio/common/AudioStreamType.h>
#include <android/binder_manager.h>
+#include <com_android_media_audio.h>
#include <media/AidlConversionCppNdk.h>
#include <media/AidlConversionEffect.h>
#include <system/audio.h>
#include <system/audio_aidl_utils.h>
+#include <system/audio_effects/effect_uuid.h>
#include <utils/Log.h>
#include "AidlUtils.h"
@@ -68,6 +70,7 @@
std::vector<Descriptor> list;
if (mFactory) {
mFactory->queryEffects(std::nullopt, std::nullopt, std::nullopt, &list).isOk();
+ filterHalDescriptors(list);
}
return list;
}()),
@@ -180,6 +183,11 @@
AudioUuid aidlUuid =
VALUE_OR_RETURN_STATUS(::aidl::android::legacy2aidl_audio_uuid_t_AudioUuid(*uuid));
+ if (!com_android_media_audio_audio_eraser_effect() && isAudioEraser(aidlUuid)) {
+ ALOGE("%s Audio eraser effect not supported yet", __func__);
+ return BAD_VALUE;
+ }
+
std::shared_ptr<IEffect> aidlEffect;
// Use EffectProxy interface instead of IFactory to create
const bool isProxy = isProxyEffect(aidlUuid);
@@ -367,6 +375,23 @@
return 0;
}
+
+bool EffectsFactoryHalAidl::isAudioEraser(const AudioUuid& uuid) {
+ return uuid == getEffectTypeUuidEraser();
+}
+
+void EffectsFactoryHalAidl::filterHalDescriptors(std::vector<Descriptor>& descs) {
+ if (!com_android_media_audio_audio_eraser_effect()) {
+ descs.erase(std::remove_if(descs.begin(), descs.end(),
+ [](const Descriptor& desc) {
+ return isAudioEraser(desc.common.id.type);
+ }),
+ descs.end());
+ }
+
+ return;
+}
+
} // namespace effect
// When a shared library is built from a static library, even explicit
diff --git a/media/libaudiohal/impl/EffectsFactoryHalAidl.h b/media/libaudiohal/impl/EffectsFactoryHalAidl.h
index 3b8628c..a3cd165 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalAidl.h
+++ b/media/libaudiohal/impl/EffectsFactoryHalAidl.h
@@ -94,6 +94,11 @@
std::vector<effect_descriptor_t>* descriptors);
bool isProxyEffect(const aidl::android::media::audio::common::AudioUuid& uuid) const;
+
+ static bool isAudioEraser(const aidl::android::media::audio::common::AudioUuid& uuid);
+
+ // filter out descriptors which can not supported by the framework
+ static void filterHalDescriptors(std::vector<Descriptor>& descs);
};
} // namespace effect
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDynamicsProcessing.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDynamicsProcessing.cpp
index f77c093..711050d 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDynamicsProcessing.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionDynamicsProcessing.cpp
@@ -25,6 +25,7 @@
#include <media/AidlConversionNdk.h>
#include <media/AidlConversionEffect.h>
#include <system/audio_effect.h>
+#include <system/audio_effects/aidl_effects_utils.h>
#include <system/audio_effects/effect_dynamicsprocessing.h>
#include <Utils.h>
#include <utils/Log.h>
@@ -38,8 +39,10 @@
using ::aidl::android::getParameterSpecificField;
using ::aidl::android::aidl_utils::statusTFromBinderStatus;
using ::aidl::android::hardware::audio::effect::Capability;
+using ::aidl::android::hardware::audio::effect::clampParameter;
using ::aidl::android::hardware::audio::effect::DynamicsProcessing;
using ::aidl::android::hardware::audio::effect::Parameter;
+using ::aidl::android::hardware::audio::effect::Range;
using ::aidl::android::hardware::audio::effect::toString;
using ::aidl::android::hardware::audio::effect::VendorExtension;
using ::android::status_t;
@@ -126,7 +129,14 @@
}
}
- return statusTFromBinderStatus(mEffect->setParameter(aidlParam));
+ std::optional<Parameter> clamped =
+ clampParameter<Range::dynamicsProcessing, Parameter::Specific::dynamicsProcessing>(
+ aidlParam, getDescriptor().capability);
+ if (!clamped) {
+ ALOGE("%s failed to clamp parameters: %s", __func__, aidlParam.toString().c_str());
+ return BAD_VALUE;
+ }
+ return statusTFromBinderStatus(mEffect->setParameter(clamped.value()));
}
status_t AidlConversionDp::getParameter(EffectParamWriter& param) {
diff --git a/media/libaudiohal/tests/EffectHalVersionCompatibility_test.cpp b/media/libaudiohal/tests/EffectHalVersionCompatibility_test.cpp
index e8731ea..c11f908 100644
--- a/media/libaudiohal/tests/EffectHalVersionCompatibility_test.cpp
+++ b/media/libaudiohal/tests/EffectHalVersionCompatibility_test.cpp
@@ -83,6 +83,7 @@
{Parameter::Id::visualizerTag, 1},
{Parameter::Id::volumeTag, 1},
{Parameter::Id::spatializerTag, 2},
+ {Parameter::Id::eraserTag, 3},
};
// Tags defined Parameter::Specific union.
static const std::unordered_map<Parameter::Specific::Tag, int /* version */>
@@ -104,6 +105,7 @@
{Parameter::Specific::visualizer, 1},
{Parameter::Specific::volume, 1},
{Parameter::Specific::spatializer, 2},
+ {Parameter::Specific::eraser, 3},
};
class MockFactory : public IFactory {
@@ -223,6 +225,7 @@
case Parameter::Id::virtualizerTag:
case Parameter::Id::visualizerTag:
case Parameter::Id::volumeTag:
+ case Parameter::Id::eraserTag:
FALLTHROUGH_INTENDED;
case Parameter::Id::spatializerTag: {
if (kParamIdEffectVersionMap.find(idTag) != kParamIdEffectVersionMap.end() &&
diff --git a/services/audiopolicy/permission/Android.bp b/media/libaudiopermission/Android.bp
similarity index 92%
rename from services/audiopolicy/permission/Android.bp
rename to media/libaudiopermission/Android.bp
index cfbeaae..7275fd7 100644
--- a/services/audiopolicy/permission/Android.bp
+++ b/media/libaudiopermission/Android.bp
@@ -4,13 +4,13 @@
}
cc_library_headers {
- name: "audiopermissioncontroller_headers",
+ name: "libaudiopermission_headers",
host_supported: true,
export_include_dirs: ["include"],
}
cc_library {
- name: "audiopermissioncontroller",
+ name: "libaudiopermission",
srcs: [
"NativePermissionController.cpp",
@@ -83,14 +83,14 @@
}
cc_test {
- name: "audiopermissioncontroller_test",
+ name: "libaudiopermission_test",
host_supported: true,
defaults: [
"libmediautils_tests_config",
],
static_libs: [
"audio-permission-aidl-cpp",
- "audiopermissioncontroller",
+ "libaudiopermission",
"framework-permission-aidl-cpp",
"libgmock",
],
diff --git a/services/audiopolicy/permission/NativePermissionController.cpp b/media/libaudiopermission/NativePermissionController.cpp
similarity index 100%
rename from services/audiopolicy/permission/NativePermissionController.cpp
rename to media/libaudiopermission/NativePermissionController.cpp
diff --git a/services/audiopolicy/permission/ValidatedAttributionSourceState.cpp b/media/libaudiopermission/ValidatedAttributionSourceState.cpp
similarity index 100%
rename from services/audiopolicy/permission/ValidatedAttributionSourceState.cpp
rename to media/libaudiopermission/ValidatedAttributionSourceState.cpp
diff --git a/services/audiopolicy/permission/include/media/IPermissionProvider.h b/media/libaudiopermission/include/media/IPermissionProvider.h
similarity index 100%
rename from services/audiopolicy/permission/include/media/IPermissionProvider.h
rename to media/libaudiopermission/include/media/IPermissionProvider.h
diff --git a/services/audiopolicy/permission/include/media/NativePermissionController.h b/media/libaudiopermission/include/media/NativePermissionController.h
similarity index 100%
rename from services/audiopolicy/permission/include/media/NativePermissionController.h
rename to media/libaudiopermission/include/media/NativePermissionController.h
diff --git a/services/audiopolicy/permission/include/media/ValidatedAttributionSourceState.h b/media/libaudiopermission/include/media/ValidatedAttributionSourceState.h
similarity index 100%
rename from services/audiopolicy/permission/include/media/ValidatedAttributionSourceState.h
rename to media/libaudiopermission/include/media/ValidatedAttributionSourceState.h
diff --git a/services/audiopolicy/permission/tests/NativePermissionControllerTest.cpp b/media/libaudiopermission/tests/NativePermissionControllerTest.cpp
similarity index 100%
rename from services/audiopolicy/permission/tests/NativePermissionControllerTest.cpp
rename to media/libaudiopermission/tests/NativePermissionControllerTest.cpp
diff --git a/services/audiopolicy/permission/tests/ValidatedAttributionSourceStateTest.cpp b/media/libaudiopermission/tests/ValidatedAttributionSourceStateTest.cpp
similarity index 100%
rename from services/audiopolicy/permission/tests/ValidatedAttributionSourceStateTest.cpp
rename to media/libaudiopermission/tests/ValidatedAttributionSourceStateTest.cpp
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
index d5e3cf7..53d4311 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
@@ -90,6 +90,12 @@
}
}
+RetCode BundleContext::setCommon(const Parameter::Common& common) {
+ RetCode ret = EffectContext::setCommon(common);
+ RETURN_VALUE_IF(ret != RetCode::SUCCESS, ret, " setCommonFailed");
+ return init();
+}
+
RetCode BundleContext::enable() {
if (mEnabled) return RetCode::ERROR_ILLEGAL_PARAMETER;
// Bass boost or Virtualizer can be temporarily disabled if playing over device speaker due to
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleContext.h b/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
index e5ab40d..d5de5bd 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
@@ -35,6 +35,8 @@
void deInit();
lvm::BundleEffectType getBundleType() const { return mType; }
+ RetCode setCommon(const Parameter::Common& common) override;
+
RetCode enable() override;
RetCode enableOperatingMode();
RetCode disable() override;
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index ee6ad00..894c459 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -104,8 +104,8 @@
],
shared_libs: [
- "android.hidl.token@1.0-utils",
"android.hardware.media.omx@1.0",
+ "android.hidl.token@1.0-utils",
"libbinder",
"libcutils",
"libhidlbase",
@@ -116,8 +116,8 @@
],
export_shared_lib_headers: [
- "android.hidl.token@1.0-utils",
"android.hardware.media.omx@1.0",
+ "android.hidl.token@1.0-utils",
"libstagefright_foundation",
"libui",
],
@@ -138,15 +138,15 @@
],
cflags: [
+ "-Wall",
"-Werror",
"-Wno-error=deprecated-declarations",
- "-Wall",
],
sanitize: {
misc_undefined: [
- "unsigned-integer-overflow",
"signed-integer-overflow",
+ "unsigned-integer-overflow",
],
cfi: true,
},
@@ -197,15 +197,15 @@
],
cflags: [
+ "-Wall",
"-Werror",
"-Wno-error=deprecated-declarations",
- "-Wall",
],
sanitize: {
misc_undefined: [
- "unsigned-integer-overflow",
"signed-integer-overflow",
+ "unsigned-integer-overflow",
],
cfi: true,
},
@@ -232,15 +232,15 @@
],
cflags: [
+ "-Wall",
"-Werror",
"-Wno-error=deprecated-declarations",
- "-Wall",
],
sanitize: {
misc_undefined: [
- "unsigned-integer-overflow",
"signed-integer-overflow",
+ "unsigned-integer-overflow",
],
cfi: true,
},
@@ -281,15 +281,15 @@
],
cflags: [
+ "-Wall",
"-Werror",
"-Wno-error=deprecated-declarations",
- "-Wall",
],
sanitize: {
misc_undefined: [
- "unsigned-integer-overflow",
"signed-integer-overflow",
+ "unsigned-integer-overflow",
],
cfi: true,
},
@@ -325,15 +325,15 @@
],
cflags: [
+ "-Wall",
"-Werror",
"-Wno-error=deprecated-declarations",
- "-Wall",
],
sanitize: {
misc_undefined: [
- "unsigned-integer-overflow",
"signed-integer-overflow",
+ "unsigned-integer-overflow",
],
cfi: true,
},
@@ -348,35 +348,35 @@
srcs: [
":mediaextractorservice_aidl",
- "IDataSource.cpp",
"BufferingSettings.cpp",
- "mediaplayer.cpp",
+ "CharacterEncodingDetector.cpp",
+ "IDataSource.cpp",
+ "IMediaDeathNotifier.cpp",
+ "IMediaExtractor.cpp",
"IMediaHTTPConnection.cpp",
"IMediaHTTPService.cpp",
- "IMediaExtractor.cpp",
- "IMediaPlayerService.cpp",
- "IMediaPlayerClient.cpp",
- "IMediaRecorderClient.cpp",
+ "IMediaMetadataRetriever.cpp",
"IMediaPlayer.cpp",
+ "IMediaPlayerClient.cpp",
+ "IMediaPlayerService.cpp",
"IMediaRecorder.cpp",
+ "IMediaRecorderClient.cpp",
"IMediaSource.cpp",
"IRemoteDisplay.cpp",
"IRemoteDisplayClient.cpp",
"IStreamSource.cpp",
- "Metadata.cpp",
- "mediarecorder.cpp",
- "IMediaMetadataRetriever.cpp",
- "mediametadataretriever.cpp",
- "MediaScanner.cpp",
- "MediaScannerClient.cpp",
- "CharacterEncodingDetector.cpp",
- "IMediaDeathNotifier.cpp",
"MediaProfiles.cpp",
"MediaResource.cpp",
"MediaResourcePolicy.cpp",
- "StringArray.cpp",
- "NdkMediaFormatPriv.cpp",
+ "MediaScanner.cpp",
+ "MediaScannerClient.cpp",
+ "Metadata.cpp",
"NdkMediaErrorPriv.cpp",
+ "NdkMediaFormatPriv.cpp",
+ "StringArray.cpp",
+ "mediametadataretriever.cpp",
+ "mediaplayer.cpp",
+ "mediarecorder.cpp",
],
aidl: {
@@ -385,55 +385,57 @@
},
header_libs: [
+ "jni_headers",
"libstagefright_headers",
"media_ndk_headers",
- "jni_headers",
],
export_header_lib_headers: [
+ "jni_headers",
"libstagefright_headers",
"media_ndk_headers",
- "jni_headers",
],
shared_libs: [
"android.hidl.token@1.0-utils",
"audioclient-types-aidl-cpp",
"av-types-aidl-cpp",
- "liblog",
- "libcutils",
- "libutils",
"libbinder",
"libbinder_ndk",
//"libsonivox",
+ "libcutils",
+ "liblog",
+ "libutils",
+ "framework-permission-aidl-cpp",
"libandroidicu",
- "libexpat",
- "libcamera_client",
- "libstagefright_foundation",
- "libgui",
- "libdl",
"libaudioclient",
+ "libaudiofoundation",
+ "libcamera_client",
+ "libdl",
+ "libexpat",
+ "libgui",
"libmedia_codeclist",
"libmedia_omx",
- "framework-permission-aidl-cpp",
+ "libstagefright_foundation",
],
export_shared_lib_headers: [
"libaudioclient",
+ "libaudiofoundation",
"libbinder",
//"libsonivox",
- "libmedia_omx",
"framework-permission-aidl-cpp",
+ "libmedia_omx",
],
static_libs: [
- "resourcemanager_aidl_interface-ndk",
"framework-permission-aidl-cpp",
+ "resourcemanager_aidl_interface-ndk",
],
export_static_lib_headers: [
- "resourcemanager_aidl_interface-ndk",
"framework-permission-aidl-cpp",
+ "resourcemanager_aidl_interface-ndk",
],
export_include_dirs: [
@@ -441,17 +443,17 @@
],
cflags: [
+ "-Wall",
"-Werror",
"-Wno-error=deprecated-declarations",
- "-Wall",
],
version_script: "exports.lds",
sanitize: {
misc_undefined: [
- "unsigned-integer-overflow",
"signed-integer-overflow",
+ "unsigned-integer-overflow",
],
cfi: true,
},
@@ -463,8 +465,8 @@
host_supported: true,
srcs: [
- "NdkMediaFormatPriv.cpp",
"NdkMediaErrorPriv.cpp",
+ "NdkMediaFormatPriv.cpp",
],
header_libs: [
@@ -475,8 +477,8 @@
cflags: [
"-DEXPORT=__attribute__((visibility(\"default\")))",
- "-Werror",
"-Wall",
+ "-Werror",
],
export_include_dirs: ["include"],
diff --git a/media/libmedia/IMediaPlayer.cpp b/media/libmedia/IMediaPlayer.cpp
index c9f361e..4967dda 100644
--- a/media/libmedia/IMediaPlayer.cpp
+++ b/media/libmedia/IMediaPlayer.cpp
@@ -567,23 +567,24 @@
return reply.readInt32();
}
- status_t getRoutedDeviceId(audio_port_handle_t* deviceId)
+ status_t getRoutedDeviceIds(DeviceIdVector& deviceIds)
{
Parcel data, reply;
data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
+ deviceIds.clear();
- status_t status = remote()->transact(GET_ROUTED_DEVICE_ID, data, &reply);
+ status_t status = remote()->transact(GET_ROUTED_DEVICE_IDS, data, &reply);
if (status != OK) {
- ALOGE("getRoutedDeviceid: binder call failed: %d", status);
- *deviceId = AUDIO_PORT_HANDLE_NONE;
+ ALOGE("getRoutedDeviceIds: binder call failed: %d", status);
return status;
}
status = reply.readInt32();
- if (status != NO_ERROR) {
- *deviceId = AUDIO_PORT_HANDLE_NONE;
- } else {
- *deviceId = reply.readInt32();
+ if (status == NO_ERROR) {
+ int size = reply.readInt32();
+ for (int i = 0; i < size; i++) {
+ deviceIds.push_back(reply.readInt32());
+ }
}
return status;
}
@@ -983,13 +984,16 @@
}
return NO_ERROR;
}
- case GET_ROUTED_DEVICE_ID: {
+ case GET_ROUTED_DEVICE_IDS: {
CHECK_INTERFACE(IMediaPlayer, data, reply);
- audio_port_handle_t deviceId;
- status_t ret = getRoutedDeviceId(&deviceId);
+ DeviceIdVector deviceIds;
+ status_t ret = getRoutedDeviceIds(deviceIds);
reply->writeInt32(ret);
if (ret == NO_ERROR) {
- reply->writeInt32(deviceId);
+ reply->writeInt32(deviceIds.size());
+ for (auto deviceId : deviceIds) {
+ reply->writeInt32(deviceId);
+ }
}
return NO_ERROR;
} break;
diff --git a/media/libmedia/IMediaRecorder.cpp b/media/libmedia/IMediaRecorder.cpp
index 755a147..1f04217 100644
--- a/media/libmedia/IMediaRecorder.cpp
+++ b/media/libmedia/IMediaRecorder.cpp
@@ -62,7 +62,7 @@
RESUME,
GET_METRICS,
SET_INPUT_DEVICE,
- GET_ROUTED_DEVICE_ID,
+ GET_ROUTED_DEVICE_IDS,
ENABLE_AUDIO_DEVICE_CALLBACK,
GET_ACTIVE_MICROPHONES,
GET_PORT_ID,
@@ -392,24 +392,24 @@
return reply.readInt32();;
}
- audio_port_handle_t getRoutedDeviceId(audio_port_handle_t *deviceId)
+ status_t getRoutedDeviceIds(DeviceIdVector& deviceIds)
{
- ALOGV("getRoutedDeviceId");
Parcel data, reply;
data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor());
+ deviceIds.clear();
- status_t status = remote()->transact(GET_ROUTED_DEVICE_ID, data, &reply);
+ status_t status = remote()->transact(GET_ROUTED_DEVICE_IDS, data, &reply);
if (status != OK) {
- ALOGE("getRoutedDeviceid binder call failed: %d", status);
- *deviceId = AUDIO_PORT_HANDLE_NONE;
+ ALOGE("getRoutedDeviceIds: binder call failed: %d", status);
return status;
}
status = reply.readInt32();
- if (status != NO_ERROR) {
- *deviceId = AUDIO_PORT_HANDLE_NONE;
- } else {
- *deviceId = reply.readInt32();
+ if (status == NO_ERROR) {
+ int size = reply.readInt32();
+ for (int i = 0; i < size; i++) {
+ deviceIds.push_back(reply.readInt32());
+ }
}
return status;
}
@@ -730,14 +730,17 @@
}
return NO_ERROR;
} break;
- case GET_ROUTED_DEVICE_ID: {
- ALOGV("GET_ROUTED_DEVICE_ID");
+ case GET_ROUTED_DEVICE_IDS: {
+ ALOGV("GET_ROUTED_DEVICE_IDS");
CHECK_INTERFACE(IMediaRecorder, data, reply);
- audio_port_handle_t deviceId;
- status_t status = getRoutedDeviceId(&deviceId);
- reply->writeInt32(status);
- if (status == NO_ERROR) {
- reply->writeInt32(deviceId);
+ DeviceIdVector deviceIds;
+ status_t ret = getRoutedDeviceIds(deviceIds);
+ reply->writeInt32(ret);
+ if (ret == NO_ERROR) {
+ reply->writeInt32(deviceIds.size());
+ for (auto deviceId : deviceIds) {
+ reply->writeInt32(deviceId);
+ }
}
return NO_ERROR;
} break;
diff --git a/media/libmedia/include/media/IMediaPlayer.h b/media/libmedia/include/media/IMediaPlayer.h
index 28684d1..4c6f32c 100644
--- a/media/libmedia/include/media/IMediaPlayer.h
+++ b/media/libmedia/include/media/IMediaPlayer.h
@@ -23,6 +23,7 @@
#include <utils/KeyedVector.h>
#include <system/audio.h>
+#include <media/AudioContainers.h>
#include <media/AudioResamplerPublic.h>
#include <media/stagefright/MediaSource.h>
#include <media/VolumeShaper.h>
@@ -135,7 +136,7 @@
// AudioRouting
virtual status_t setOutputDevice(audio_port_handle_t deviceId) = 0;
- virtual status_t getRoutedDeviceId(audio_port_handle_t *deviceId) = 0;
+ virtual status_t getRoutedDeviceIds(DeviceIdVector& deviceIds) = 0;
virtual status_t enableAudioDeviceCallback(bool enabled) = 0;
protected:
@@ -184,7 +185,7 @@
RELEASE_DRM,
// AudioRouting
SET_OUTPUT_DEVICE,
- GET_ROUTED_DEVICE_ID,
+ GET_ROUTED_DEVICE_IDS,
ENABLE_AUDIO_DEVICE_CALLBACK,
};
};
diff --git a/media/libmedia/include/media/IMediaRecorder.h b/media/libmedia/include/media/IMediaRecorder.h
index 05da5c2..8411ca7 100644
--- a/media/libmedia/include/media/IMediaRecorder.h
+++ b/media/libmedia/include/media/IMediaRecorder.h
@@ -20,6 +20,7 @@
#include <android/media/MicrophoneInfoFw.h>
#include <binder/IInterface.h>
+#include <media/AudioContainers.h>
#include <system/audio.h>
#include <vector>
@@ -71,7 +72,7 @@
virtual sp<IGraphicBufferProducer> querySurfaceMediaSource() = 0;
virtual status_t setInputDevice(audio_port_handle_t deviceId) = 0;
- virtual status_t getRoutedDeviceId(audio_port_handle_t *deviceId) = 0;
+ virtual status_t getRoutedDeviceIds(DeviceIdVector& deviceIds) = 0;
virtual status_t enableAudioDeviceCallback(bool enabled) = 0;
virtual status_t getActiveMicrophones(
std::vector<media::MicrophoneInfoFw>* activeMicrophones) = 0;
diff --git a/media/libmedia/include/media/MediaRecorderBase.h b/media/libmedia/include/media/MediaRecorderBase.h
index 82ec9c5..e3698e3 100644
--- a/media/libmedia/include/media/MediaRecorderBase.h
+++ b/media/libmedia/include/media/MediaRecorderBase.h
@@ -69,7 +69,7 @@
virtual status_t setInputSurface(const sp<PersistentSurface>& surface) = 0;
virtual sp<IGraphicBufferProducer> querySurfaceMediaSource() const = 0;
virtual status_t setInputDevice(audio_port_handle_t deviceId) = 0;
- virtual status_t getRoutedDeviceId(audio_port_handle_t* deviceId) = 0;
+ virtual status_t getRoutedDeviceIds(DeviceIdVector& deviceIds) = 0;
virtual void setAudioDeviceCallback(const sp<AudioSystem::AudioDeviceCallback>& callback) = 0;
virtual status_t enableAudioDeviceCallback(bool enabled) = 0;
virtual status_t getActiveMicrophones(
diff --git a/media/libmedia/include/media/mediaplayer.h b/media/libmedia/include/media/mediaplayer.h
index 2f9b85e..7c612c3 100644
--- a/media/libmedia/include/media/mediaplayer.h
+++ b/media/libmedia/include/media/mediaplayer.h
@@ -281,7 +281,7 @@
status_t releaseDrm();
// AudioRouting
status_t setOutputDevice(audio_port_handle_t deviceId);
- audio_port_handle_t getRoutedDeviceId();
+ status_t getRoutedDeviceIds(DeviceIdVector& deviceIds);
status_t enableAudioDeviceCallback(bool enabled);
private:
diff --git a/media/libmedia/include/media/mediarecorder.h b/media/libmedia/include/media/mediarecorder.h
index 602f72e..1377d61 100644
--- a/media/libmedia/include/media/mediarecorder.h
+++ b/media/libmedia/include/media/mediarecorder.h
@@ -22,6 +22,7 @@
#include <utils/threads.h>
#include <utils/List.h>
#include <utils/Errors.h>
+#include <media/AudioContainers.h>
#include <media/IMediaRecorderClient.h>
#include <media/IMediaDeathNotifier.h>
#include <android/media/MicrophoneInfoFw.h>
@@ -266,7 +267,7 @@
sp<IGraphicBufferProducer> querySurfaceMediaSourceFromMediaServer();
status_t getMetrics(Parcel *reply);
status_t setInputDevice(audio_port_handle_t deviceId);
- status_t getRoutedDeviceId(audio_port_handle_t *deviceId);
+ status_t getRoutedDeviceIds(DeviceIdVector& deviceIds);
status_t enableAudioDeviceCallback(bool enabled);
status_t getActiveMicrophones(std::vector<media::MicrophoneInfoFw>* activeMicrophones);
status_t setPreferredMicrophoneDirection(audio_microphone_direction_t direction);
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index b5c75b3..9d3fce7 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -1105,19 +1105,14 @@
return mPlayer->setOutputDevice(deviceId);
}
-audio_port_handle_t MediaPlayer::getRoutedDeviceId()
+status_t MediaPlayer::getRoutedDeviceIds(DeviceIdVector& deviceIds)
{
Mutex::Autolock _l(mLock);
if (mPlayer == NULL) {
- ALOGV("getRoutedDeviceId: player not init");
- return AUDIO_PORT_HANDLE_NONE;
+ ALOGV("getRoutedDeviceIds: player not init");
+ return NO_INIT;
}
- audio_port_handle_t deviceId;
- status_t status = mPlayer->getRoutedDeviceId(&deviceId);
- if (status != NO_ERROR) {
- return AUDIO_PORT_HANDLE_NONE;
- }
- return deviceId;
+ return mPlayer->getRoutedDeviceIds(deviceIds);
}
status_t MediaPlayer::enableAudioDeviceCallback(bool enabled)
diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp
index 48f5e4b..e676d5a 100644
--- a/media/libmedia/mediarecorder.cpp
+++ b/media/libmedia/mediarecorder.cpp
@@ -858,17 +858,17 @@
return mMediaRecorder->setInputDevice(deviceId);
}
-status_t MediaRecorder::getRoutedDeviceId(audio_port_handle_t* deviceId)
+status_t MediaRecorder::getRoutedDeviceIds(DeviceIdVector& deviceIds)
{
- ALOGV("getRoutedDeviceId");
+ ALOGV("getRoutedDeviceIds");
if (mMediaRecorder == NULL) {
ALOGE("media recorder is not initialized yet");
return INVALID_OPERATION;
}
- status_t status = mMediaRecorder->getRoutedDeviceId(deviceId);
+ status_t status = mMediaRecorder->getRoutedDeviceIds(deviceIds);
if (status != NO_ERROR) {
- *deviceId = AUDIO_PORT_HANDLE_NONE;
+ deviceIds.clear();
}
return status;
}
diff --git a/media/libmedia/tests/mediaplayer/IMediaPlayerTest.cpp b/media/libmedia/tests/mediaplayer/IMediaPlayerTest.cpp
index cc60933..f0db018 100644
--- a/media/libmedia/tests/mediaplayer/IMediaPlayerTest.cpp
+++ b/media/libmedia/tests/mediaplayer/IMediaPlayerTest.cpp
@@ -50,7 +50,7 @@
// We write a length greater than the following session id array. Should be discarded.
data.writeUint32(2);
- data.writeUnpadded(kMockByteArray, 1);
+ data.write(kMockByteArray, 1);
status_t result = IMediaPlayer::asBinder(iMediaPlayer_)
->transact(PREPARE_DRM, data, &reply);
diff --git a/media/libmediametrics/MediaMetricsItem.cpp b/media/libmediametrics/MediaMetricsItem.cpp
index ecb248d..2c58461 100644
--- a/media/libmediametrics/MediaMetricsItem.cpp
+++ b/media/libmediametrics/MediaMetricsItem.cpp
@@ -334,21 +334,21 @@
// This is checked only once in the lifetime of the process.
const uid_t uid = getuid();
- switch (uid) {
- case AID_RADIO: // telephony subsystem, RIL
+ const uid_t appid = multiuser_get_app_id(uid);
+
+ if (appid == AID_RADIO) {
+ // telephony subsystem, RIL
return false;
- default:
+ }
+
+ if (appid >= AID_ISOLATED_START && appid <= AID_ISOLATED_END) {
// Some isolated processes can access the audio system; see
// AudioSystem::setAudioFlingerBinder (currently only the HotwordDetectionService). Instead
// of also allowing access to the MediaMetrics service, it's simpler to just disable it for
// now.
// TODO(b/190151205): Either allow the HotwordDetectionService to access MediaMetrics or
// make this disabling specific to that process.
- uid_t appid = multiuser_get_app_id(uid);
- if (appid >= AID_ISOLATED_START && appid <= AID_ISOLATED_END) {
- return false;
- }
- break;
+ return false;
}
int enabled = property_get_int32(Item::EnabledProperty, -1);
diff --git a/media/libmediametrics/include/MediaMetricsConstants.h b/media/libmediametrics/include/MediaMetricsConstants.h
index 98c3382..a7b2077 100644
--- a/media/libmediametrics/include/MediaMetricsConstants.h
+++ b/media/libmediametrics/include/MediaMetricsConstants.h
@@ -183,6 +183,7 @@
#define AMEDIAMETRICS_PROP_PLAYBACK_SPEED "playback.speed" // double value (AudioTrack)
#define AMEDIAMETRICS_PROP_PLAYERIID "playerIId" // int32 (-1 invalid/unset IID)
#define AMEDIAMETRICS_PROP_ROUTEDDEVICEID "routedDeviceId" // int32
+#define AMEDIAMETRICS_PROP_ROUTEDDEVICEIDS "routedDeviceIds" // string value
#define AMEDIAMETRICS_PROP_SAMPLERATE "sampleRate" // int32
#define AMEDIAMETRICS_PROP_SAMPLERATECLIENT "sampleRateClient" // int32
#define AMEDIAMETRICS_PROP_SAMPLERATEHARDWARE "sampleRateHardware" // int32
diff --git a/media/libmediaplayerservice/Android.bp b/media/libmediaplayerservice/Android.bp
index a10c509..1d493e2 100644
--- a/media/libmediaplayerservice/Android.bp
+++ b/media/libmediaplayerservice/Android.bp
@@ -46,13 +46,14 @@
"av-types-aidl-cpp",
"framework-permission-aidl-cpp",
"libaconfig_storage_read_api_cc",
- "libaudioclient_aidl_conversion",
- "libbase",
- "libbinder_ndk",
"libactivitymanager_aidl",
"libandroid_net",
"libaudioclient",
+ "libaudioclient_aidl_conversion",
+ "libaudiofoundation",
+ "libbase",
"libbinder",
+ "libbinder_ndk",
"libcamera_client",
"libcodec2_client",
"libcrypto",
@@ -81,25 +82,25 @@
],
header_libs: [
- "media_plugin_headers",
"libmediautils_headers",
"libstagefright_rtsp_headers",
"libstagefright_webm_headers",
+ "media_plugin_headers",
],
static_libs: [
"com.android.media.flags.editing-aconfig-cc",
+ "framework-permission-aidl-cpp",
"libplayerservice_datasource",
"libstagefright_nuplayer",
"libstagefright_rtsp",
"libstagefright_timedtext",
- "framework-permission-aidl-cpp",
],
cflags: [
+ "-Wall",
"-Werror",
"-Wno-error=deprecated-declarations",
- "-Wall",
],
sanitize: {
@@ -115,8 +116,8 @@
],
export_shared_lib_headers: [
- "libmedia",
"framework-permission-aidl-cpp",
+ "libmedia",
],
export_header_lib_headers: [
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index b267c08..0067344 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -856,10 +856,13 @@
void MediaPlayerService::Client::AudioDeviceUpdatedNotifier::onAudioDeviceUpdate(
audio_io_handle_t audioIo,
- audio_port_handle_t deviceId) {
+ const DeviceIdVector& deviceIds) {
+ ALOGD("onAudioDeviceUpdate deviceIds: %s", toString(deviceIds).c_str());
sp<MediaPlayerBase> listener = mListener.promote();
if (listener != NULL) {
- listener->sendEvent(MEDIA_AUDIO_ROUTING_CHANGED, audioIo, deviceId);
+ // Java should query the new device ids once it gets the event.
+ // TODO(b/378505346): Pass the deviceIds to Java to avoid race conditions.
+ listener->sendEvent(MEDIA_AUDIO_ROUTING_CHANGED, audioIo);
} else {
ALOGW("listener for process %d death is gone", MEDIA_AUDIO_ROUTING_CHANGED);
}
@@ -1750,13 +1753,13 @@
return NO_INIT;
}
-status_t MediaPlayerService::Client::getRoutedDeviceId(audio_port_handle_t* deviceId)
+status_t MediaPlayerService::Client::getRoutedDeviceIds(DeviceIdVector& deviceIds)
{
- ALOGV("[%d] getRoutedDeviceId", mConnId);
+ ALOGV("[%d] getRoutedDeviceIds", mConnId);
{
Mutex::Autolock l(mLock);
if (mAudioOutput.get() != nullptr) {
- return mAudioOutput->getRoutedDeviceId(deviceId);
+ return mAudioOutput->getRoutedDeviceIds(deviceIds);
}
}
return NO_INIT;
@@ -1830,7 +1833,6 @@
mFlags(AUDIO_OUTPUT_FLAG_NONE),
mVolumeHandler(new media::VolumeHandler()),
mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE),
- mRoutedDeviceId(AUDIO_PORT_HANDLE_NONE),
mDeviceCallbackEnabled(false),
mDeviceCallback(deviceCallback)
{
@@ -2604,14 +2606,14 @@
return NO_ERROR;
}
-status_t MediaPlayerService::AudioOutput::getRoutedDeviceId(audio_port_handle_t* deviceId)
+status_t MediaPlayerService::AudioOutput::getRoutedDeviceIds(DeviceIdVector& deviceIds)
{
- ALOGV("getRoutedDeviceId");
+ ALOGV("getRoutedDeviceIds");
Mutex::Autolock lock(mLock);
if (mTrack != 0) {
- mRoutedDeviceId = mTrack->getRoutedDeviceId();
+ mRoutedDeviceIds = mTrack->getRoutedDeviceIds();
}
- *deviceId = mRoutedDeviceId;
+ deviceIds = mRoutedDeviceIds;
return NO_ERROR;
}
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index 76b7bcf..497ef79 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -28,6 +28,7 @@
#include <utils/Vector.h>
#include <media/AidlConversion.h>
+#include <media/AudioContainers.h>
#include <media/AudioResamplerPublic.h>
#include <media/AudioSystem.h>
#include <media/AudioTrack.h>
@@ -148,7 +149,7 @@
// AudioRouting
virtual status_t setOutputDevice(audio_port_handle_t deviceId);
- virtual status_t getRoutedDeviceId(audio_port_handle_t* deviceId);
+ virtual status_t getRoutedDeviceIds(DeviceIdVector& deviceIds);
virtual status_t enableAudioDeviceCallback(bool enabled);
private:
@@ -181,7 +182,7 @@
audio_output_flags_t mFlags;
sp<media::VolumeHandler> mVolumeHandler;
audio_port_handle_t mSelectedDeviceId;
- audio_port_handle_t mRoutedDeviceId;
+ DeviceIdVector mRoutedDeviceIds;
bool mDeviceCallbackEnabled;
wp<AudioSystem::AudioDeviceCallback> mDeviceCallback;
mutable Mutex mLock;
@@ -401,7 +402,7 @@
virtual status_t releaseDrm();
// AudioRouting
virtual status_t setOutputDevice(audio_port_handle_t deviceId);
- virtual status_t getRoutedDeviceId(audio_port_handle_t* deviceId);
+ virtual status_t getRoutedDeviceIds(DeviceIdVector& deviceIds);
virtual status_t enableAudioDeviceCallback(bool enabled);
private:
@@ -414,7 +415,7 @@
~AudioDeviceUpdatedNotifier() {}
virtual void onAudioDeviceUpdate(audio_io_handle_t audioIo,
- audio_port_handle_t deviceId);
+ const DeviceIdVector& deviceIds);
private:
wp<MediaPlayerBase> mListener;
diff --git a/media/libmediaplayerservice/MediaRecorderClient.cpp b/media/libmediaplayerservice/MediaRecorderClient.cpp
index ed3ec89..53f4e61 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.cpp
+++ b/media/libmediaplayerservice/MediaRecorderClient.cpp
@@ -409,10 +409,13 @@
void MediaRecorderClient::AudioDeviceUpdatedNotifier::onAudioDeviceUpdate(
audio_io_handle_t audioIo,
- audio_port_handle_t deviceId) {
+ const DeviceIdVector& deviceIds) {
+ ALOGD("onAudioDeviceUpdate deviceIds: %s", toString(deviceIds).c_str());
sp<IMediaRecorderClient> listener = mListener.promote();
if (listener != NULL) {
- listener->notify(MEDIA_RECORDER_AUDIO_ROUTING_CHANGED, audioIo, deviceId);
+ // Java should query the new device ids once it gets the event.
+ // TODO(b/378505346): Pass the deviceIds to Java to avoid race conditions.
+ listener->notify(MEDIA_RECORDER_AUDIO_ROUTING_CHANGED, audioIo, 0 /*ext2*/);
} else {
ALOGW("listener for process %d death is gone", MEDIA_RECORDER_AUDIO_ROUTING_CHANGED);
}
@@ -550,11 +553,11 @@
return NO_INIT;
}
-status_t MediaRecorderClient::getRoutedDeviceId(audio_port_handle_t* deviceId) {
- ALOGV("getRoutedDeviceId");
+status_t MediaRecorderClient::getRoutedDeviceIds(DeviceIdVector& deviceIds) {
+ ALOGV("getRoutedDeviceIds");
Mutex::Autolock lock(mLock);
if (mRecorder != NULL) {
- return mRecorder->getRoutedDeviceId(deviceId);
+ return mRecorder->getRoutedDeviceIds(deviceIds);
}
return NO_INIT;
}
diff --git a/media/libmediaplayerservice/MediaRecorderClient.h b/media/libmediaplayerservice/MediaRecorderClient.h
index dec0c99..3b9ab07 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.h
+++ b/media/libmediaplayerservice/MediaRecorderClient.h
@@ -41,7 +41,7 @@
virtual ~AudioDeviceUpdatedNotifier();
virtual void onAudioDeviceUpdate(
audio_io_handle_t audioIo,
- audio_port_handle_t deviceId);
+ const DeviceIdVector& deviceIds);
private:
wp<IMediaRecorderClient> mListener;
};
@@ -80,7 +80,7 @@
virtual status_t setInputSurface(const sp<PersistentSurface>& surface);
virtual sp<IGraphicBufferProducer> querySurfaceMediaSource();
virtual status_t setInputDevice(audio_port_handle_t deviceId);
- virtual status_t getRoutedDeviceId(audio_port_handle_t* deviceId);
+ virtual status_t getRoutedDeviceIds(DeviceIdVector& deviceIds);
virtual status_t enableAudioDeviceCallback(bool enabled);
virtual status_t getActiveMicrophones(
std::vector<media::MicrophoneInfoFw>* activeMicrophones);
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 086baa3..fa42da2 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -59,6 +59,7 @@
#include <media/stagefright/PersistentSurface.h>
#include <media/MediaProfiles.h>
#include <camera/CameraParameters.h>
+#include <gui/Flags.h>
#include <utils/Errors.h>
#include <sys/types.h>
@@ -1932,16 +1933,32 @@
return BAD_VALUE;
}
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ sp<Surface> surface = new Surface(mPreviewSurface);
+ mCameraSourceTimeLapse = CameraSourceTimeLapse::CreateFromCamera(
+ mCamera, mCameraProxy, mCameraId, clientName, uid, pid,
+ videoSize, mFrameRate, surface,
+ std::llround(1e6 / mCaptureFps));
+#else
mCameraSourceTimeLapse = CameraSourceTimeLapse::CreateFromCamera(
mCamera, mCameraProxy, mCameraId, clientName, uid, pid,
videoSize, mFrameRate, mPreviewSurface,
std::llround(1e6 / mCaptureFps));
+#endif
*cameraSource = mCameraSourceTimeLapse;
} else {
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ sp<Surface> surface = new Surface(mPreviewSurface);
+ *cameraSource = CameraSource::CreateFromCamera(
+ mCamera, mCameraProxy, mCameraId, clientName, uid, pid,
+ videoSize, mFrameRate,
+ surface);
+#else
*cameraSource = CameraSource::CreateFromCamera(
mCamera, mCameraProxy, mCameraId, clientName, uid, pid,
videoSize, mFrameRate,
mPreviewSurface);
+#endif
}
mCamera.clear();
mCameraProxy.clear();
@@ -2578,11 +2595,11 @@
return NO_ERROR;
}
-status_t StagefrightRecorder::getRoutedDeviceId(audio_port_handle_t* deviceId) {
- ALOGV("getRoutedDeviceId");
+status_t StagefrightRecorder::getRoutedDeviceIds(DeviceIdVector& deviceIds) {
+ ALOGV("getRoutedDeviceIds");
if (mAudioSourceNode != 0) {
- status_t status = mAudioSourceNode->getRoutedDeviceId(deviceId);
+ status_t status = mAudioSourceNode->getRoutedDeviceIds(deviceIds);
return status;
}
return NO_INIT;
diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h
index 0b6a5bb..4c5e62f 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.h
+++ b/media/libmediaplayerservice/StagefrightRecorder.h
@@ -80,7 +80,7 @@
// Querying a SurfaceMediaSourcer
virtual sp<IGraphicBufferProducer> querySurfaceMediaSource() const;
virtual status_t setInputDevice(audio_port_handle_t deviceId);
- virtual status_t getRoutedDeviceId(audio_port_handle_t* deviceId);
+ virtual status_t getRoutedDeviceIds(DeviceIdVector& deviceIds);
virtual void setAudioDeviceCallback(const sp<AudioSystem::AudioDeviceCallback>& callback);
virtual status_t enableAudioDeviceCallback(bool enabled);
virtual status_t getActiveMicrophones(std::vector<media::MicrophoneInfoFw>* activeMicrophones);
diff --git a/media/libmediaplayerservice/fuzzer/Android.bp b/media/libmediaplayerservice/fuzzer/Android.bp
index fcdaff9..a3285ee 100644
--- a/media/libmediaplayerservice/fuzzer/Android.bp
+++ b/media/libmediaplayerservice/fuzzer/Android.bp
@@ -33,6 +33,7 @@
"liblog",
],
shared_libs: [
+ "camera_platform_flags_c_lib",
"framework-permission-aidl-cpp",
"libbinder",
"libbinder_ndk",
@@ -81,6 +82,7 @@
"libactivitymanager_aidl",
"libandroid_net",
"libaudioflinger",
+ "libaudiofoundation",
"libcamera_client",
"libcodec2_client",
"libcrypto",
@@ -159,6 +161,7 @@
"libactivitymanager_aidl",
"libandroid_net",
"libaudioclient",
+ "libaudiofoundation",
"libcamera_client",
"libcodec2_client",
"libcrypto",
diff --git a/media/libmediaplayerservice/fuzzer/mediaplayer_fuzzer.cpp b/media/libmediaplayerservice/fuzzer/mediaplayer_fuzzer.cpp
index 15265bf..a52d751 100644
--- a/media/libmediaplayerservice/fuzzer/mediaplayer_fuzzer.cpp
+++ b/media/libmediaplayerservice/fuzzer/mediaplayer_fuzzer.cpp
@@ -482,8 +482,8 @@
mMediaPlayer->setOutputDevice(deviceId);
},
[&]() {
- audio_port_handle_t deviceId;
- mMediaPlayer->getRoutedDeviceId(&deviceId);
+ DeviceIdVector deviceIds;
+ mMediaPlayer->getRoutedDeviceIds(deviceIds);
},
[&]() { mMediaPlayer->enableAudioDeviceCallback(mFdp.ConsumeBool()); },
[&]() {
diff --git a/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp b/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp
index 2518c21..b95cae7 100644
--- a/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp
+++ b/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp
@@ -24,6 +24,7 @@
#include <fakeservicemanager/FakeServiceManager.h>
#include <gui/IGraphicBufferProducer.h>
#include <gui/Surface.h>
+#include <gui/Flags.h>
#include <gui/SurfaceComposerClient.h>
#include <media/stagefright/PersistentSurface.h>
#include <media/stagefright/foundation/AString.h>
@@ -115,7 +116,7 @@
virtual ~TestAudioDeviceCallback() = default;
void onAudioDeviceUpdate(audio_io_handle_t /*audioIo*/,
- audio_port_handle_t /*deviceId*/) override{};
+ const DeviceIdVector& /*deviceIds*/) override{};
};
class TestCamera : public ICamera {
@@ -126,14 +127,9 @@
status_t connect(const sp<ICameraClient> & /*client*/) override { return 0; };
status_t lock() override { return 0; };
status_t unlock() override { return 0; };
- status_t setPreviewTarget(const sp<IGraphicBufferProducer> & /*bufferProducer*/) override {
- return 0;
- };
+ status_t setPreviewTarget(const sp<SurfaceType> & /*target*/) override { return 0; };
+ status_t setPreviewCallbackTarget(const sp<SurfaceType> & /*target*/) override { return 0; };
void setPreviewCallbackFlag(int /*flag*/) override{};
- status_t setPreviewCallbackTarget(
- const sp<IGraphicBufferProducer> & /*callbackProducer*/) override {
- return 0;
- };
status_t startPreview() override { return 0; };
void stopPreview() override{};
bool previewEnabled() override { return true; };
@@ -152,9 +148,7 @@
return 0;
};
status_t setVideoBufferMode(int32_t /*videoBufferMode*/) override { return 0; };
- status_t setVideoTarget(const sp<IGraphicBufferProducer> & /*bufferProducer*/) override {
- return 0;
- };
+ status_t setVideoTarget(const sp<SurfaceType> & /*target*/) override { return 0; };
status_t setAudioRestriction(int32_t /*mode*/) override { return 0; };
int32_t getGlobalAudioRestriction() override { return 0; };
IBinder *onAsBinder() override { return reinterpret_cast<IBinder *>(this); };
@@ -191,8 +185,8 @@
int32_t max;
mStfRecorder->getMaxAmplitude(&max);
- int32_t deviceId;
- mStfRecorder->getRoutedDeviceId(&deviceId);
+ DeviceIdVector deviceIds;
+ mStfRecorder->getRoutedDeviceIds(deviceIds);
vector<android::media::MicrophoneInfoFw> activeMicrophones{};
mStfRecorder->getActiveMicrophones(&activeMicrophones);
diff --git a/media/libmediaplayerservice/include/MediaPlayerInterface.h b/media/libmediaplayerservice/include/MediaPlayerInterface.h
index 495cf00..9fe0e95 100644
--- a/media/libmediaplayerservice/include/MediaPlayerInterface.h
+++ b/media/libmediaplayerservice/include/MediaPlayerInterface.h
@@ -26,6 +26,7 @@
#include <utils/RefBase.h>
#include <media/mediaplayer.h>
+#include <media/AudioContainers.h>
#include <media/AudioResamplerPublic.h>
#include <media/AudioTimestamp.h>
#include <media/AVSyncSettings.h>
@@ -185,7 +186,7 @@
// AudioRouting
virtual status_t setOutputDevice(audio_port_handle_t deviceId) = 0;
- virtual status_t getRoutedDeviceId(audio_port_handle_t* deviceId) = 0;
+ virtual status_t getRoutedDeviceIds(DeviceIdVector& deviceIds) = 0;
virtual status_t enableAudioDeviceCallback(bool enabled) = 0;
};
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 9ed5343..92ac451 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -47,8 +47,8 @@
],
cflags: [
- "-Werror",
"-Wall",
+ "-Werror",
],
header_libs: [
@@ -57,6 +57,8 @@
],
shared_libs: [
+ "android.hardware.cas.native@1.0",
+ "android.hardware.drm@1.0",
"libaudioutils",
"libgui",
"libhidlallocatorutils",
@@ -66,15 +68,13 @@
"libstagefright_foundation",
"libui",
"libutils",
- "android.hardware.cas.native@1.0",
- "android.hardware.drm@1.0",
],
sanitize: {
cfi: true,
misc_undefined: [
- "unsigned-integer-overflow",
"signed-integer-overflow",
+ "unsigned-integer-overflow",
],
},
}
@@ -88,9 +88,9 @@
min_sdk_version: "29",
srcs: [
- "Utils.cpp",
- "MediaSource.cpp",
"HevcUtils.cpp",
+ "MediaSource.cpp",
+ "Utils.cpp",
],
shared_libs: [
@@ -115,17 +115,17 @@
],
cflags: [
- "-Wno-multichar",
+ "-Wall",
"-Werror",
"-Wno-error=deprecated-declarations",
- "-Wall",
+ "-Wno-multichar",
],
sanitize: {
cfi: true,
misc_undefined: [
- "unsigned-integer-overflow",
"signed-integer-overflow",
+ "unsigned-integer-overflow",
],
},
@@ -151,12 +151,11 @@
],
shared_libs: [
- "libbase",
- "libcutils",
"libEGL",
"libGLESv1_CM",
"libGLESv2",
- "libvulkan",
+ "libbase",
+ "libcutils",
"libgui",
"liblog",
"libprocessgroup",
@@ -164,6 +163,7 @@
"libsync",
"libui",
"libutils",
+ "libvulkan",
],
static_libs: [
@@ -175,18 +175,18 @@
],
cflags: [
- "-Wno-multichar",
+ "-Wall",
"-Werror",
"-Wno-error=deprecated-declarations",
- "-Wall",
+ "-Wno-multichar",
],
sanitize: {
// TODO: re-enabled cfi for this lib after b/139945549 fixed
cfi: false,
misc_undefined: [
- "unsigned-integer-overflow",
"signed-integer-overflow",
+ "unsigned-integer-overflow",
],
},
}
@@ -210,16 +210,16 @@
],
cflags: [
- "-Wno-multichar",
- "-Werror",
"-Wall",
+ "-Werror",
+ "-Wno-multichar",
],
sanitize: {
cfi: true,
misc_undefined: [
- "unsigned-integer-overflow",
"signed-integer-overflow",
+ "unsigned-integer-overflow",
],
},
}
@@ -256,13 +256,13 @@
"MediaCodecSource.cpp",
"MediaExtractor.cpp",
"MediaExtractorFactory.cpp",
+ "MediaMuxer.cpp",
"MediaSource.cpp",
"MediaSync.cpp",
"MediaTrack.cpp",
- "MediaMuxer.cpp",
"NuMediaExtractor.cpp",
- "OggWriter.cpp",
"OMXClient.cpp",
+ "OggWriter.cpp",
"OmxInfoBuilder.cpp",
"RemoteMediaExtractor.cpp",
"RemoteMediaSource.cpp",
@@ -271,13 +271,22 @@
"SurfaceUtils.cpp",
"ThrottledSource.cpp",
"Utils.cpp",
- "VideoFrameSchedulerBase.cpp",
"VideoFrameScheduler.cpp",
+ "VideoFrameSchedulerBase.cpp",
"VideoRenderQualityTracker.cpp",
],
shared_libs: [
- "libstagefright_framecapture_utils",
+ "aconfig_mediacodec_flags_c_lib",
+ "android.hardware.cas.native@1.0",
+ "android.hardware.drm@1.0",
+ "android.hardware.media.omx@1.0",
+ "android.hidl.allocator@1.0",
+ "framework-permission-aidl-cpp",
+ "libaconfig_storage_read_api_cc",
+ "libaudioclient",
+ "libaudioclient_aidl_conversion",
+ "libaudiofoundation",
"libaudioutils",
"libbase",
"libbinder",
@@ -290,66 +299,61 @@
"libdl",
"libdl_android",
"libgui",
+ "libhidlallocatorutils",
+ "libhidlbase",
+ "libhidlmemory",
"liblog",
"libmedia",
"libmedia_codeclist",
+ "libmedia_helper",
"libmedia_omx",
"libmedia_omx_client",
- "libaudioclient",
"libmediametrics",
- "libui",
- "libutils",
- "libmedia_helper",
"libsfplugin_ccodec",
"libsfplugin_ccodec_utils",
"libstagefright_codecbase",
"libstagefright_foundation",
+ "libstagefright_framecapture_utils",
"libstagefright_omx_utils",
- "libhidlallocatorutils",
- "libhidlbase",
- "libhidlmemory",
- "android.hidl.allocator@1.0",
- "android.hardware.cas.native@1.0",
- "android.hardware.drm@1.0",
- "android.hardware.media.omx@1.0",
- "framework-permission-aidl-cpp",
- "libaudioclient_aidl_conversion",
+ "libui",
+ "libutils",
"packagemanager_aidl-cpp",
"server_configurable_flags",
"libaconfig_storage_read_api_cc",
"aconfig_mediacodec_flags_c_lib",
+ "camera_platform_flags_c_lib",
],
static_libs: [
"android.media.codec-aconfig-cc",
"android.media.extractor.flags-aconfig-cc",
"com.android.media.flags.editing-aconfig-cc",
- "libstagefright_esds",
- "libstagefright_color_conversion",
- "libyuv",
- "libstagefright_webm",
- "libstagefright_timedtext",
- "libogg",
- "libstagefright_id3",
"framework-permission-aidl-cpp",
- "libmediandk_format",
"libmedia_ndkformatpriv",
+ "libmediandk_format",
+ "libogg",
+ "libstagefright_color_conversion",
+ "libstagefright_esds",
+ "libstagefright_id3",
+ "libstagefright_timedtext",
+ "libstagefright_webm",
+ "libyuv",
],
header_libs: [
"libmediadrm_headers",
+ "libmediaformatshaper_headers",
"libnativeloader-headers",
"libstagefright_xmlparser_headers",
"media_ndk_headers",
- "libmediaformatshaper_headers",
],
export_shared_lib_headers: [
+ "android.hidl.allocator@1.0",
+ "framework-permission-aidl-cpp",
"libgui",
"libhidlmemory",
"libmedia",
- "android.hidl.allocator@1.0",
- "framework-permission-aidl-cpp",
],
export_include_dirs: [
@@ -357,10 +361,10 @@
],
cflags: [
- "-Wno-multichar",
+ "-Wall",
"-Werror",
"-Wno-error=deprecated-declarations",
- "-Wall",
+ "-Wno-multichar",
],
version_script: "exports.lds",
@@ -375,8 +379,8 @@
sanitize: {
cfi: true,
misc_undefined: [
- "unsigned-integer-overflow",
"signed-integer-overflow",
+ "unsigned-integer-overflow",
],
},
}
diff --git a/media/libstagefright/AudioSource.cpp b/media/libstagefright/AudioSource.cpp
index 584dad6..f658d84 100644
--- a/media/libstagefright/AudioSource.cpp
+++ b/media/libstagefright/AudioSource.cpp
@@ -497,9 +497,9 @@
return NO_INIT;
}
-status_t AudioSource::getRoutedDeviceId(audio_port_handle_t* deviceId) {
+status_t AudioSource::getRoutedDeviceIds(DeviceIdVector& deviceIds) {
if (mRecord != 0) {
- *deviceId = mRecord->getRoutedDeviceId();
+ deviceIds = mRecord->getRoutedDeviceIds();
return NO_ERROR;
}
return NO_INIT;
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index e26f189..81a5508 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -35,6 +35,7 @@
#include <camera/StringUtils.h>
#include <com_android_graphics_libgui_flags.h>
#include <gui/Surface.h>
+#include <gui/Flags.h>
#include <utils/String8.h>
#include <cutils/properties.h>
@@ -99,7 +100,7 @@
pid_t clientPid,
Size videoSize,
int32_t frameRate,
- const sp<IGraphicBufferProducer>& surface) {
+ const sp<SurfaceType>& surface) {
CameraSource *source = new CameraSource(camera, proxy, cameraId,
clientName, clientUid, clientPid, videoSize, frameRate, surface);
@@ -115,7 +116,7 @@
pid_t clientPid,
Size videoSize,
int32_t frameRate,
- const sp<IGraphicBufferProducer>& surface)
+ const sp<SurfaceType>& surface)
: mCameraFlags(0),
mNumInputBuffers(0),
mVideoFrameRate(-1),
@@ -490,11 +491,23 @@
#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
mVideoBufferConsumer = new BufferItemConsumer(usage, bufferCount);
mVideoBufferConsumer->setName(String8::format("StageFright-CameraSource"));
+
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ mVideoBufferProducer = mVideoBufferConsumer->getSurface();
+#else
mVideoBufferProducer = mVideoBufferConsumer->getSurface()->getIGraphicBufferProducer();
+#endif // WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+
#else
mVideoBufferConsumer = new BufferItemConsumer(consumer, usage, bufferCount);
mVideoBufferConsumer->setName(String8::format("StageFright-CameraSource"));
+
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ mVideoBufferProducer = new Surface(producer);
+#else
mVideoBufferProducer = producer;
+#endif // WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+
#endif // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
status_t res = mVideoBufferConsumer->setDefaultBufferSize(width, height);
diff --git a/media/libstagefright/CameraSourceTimeLapse.cpp b/media/libstagefright/CameraSourceTimeLapse.cpp
index 50a512f..b1a005b 100644
--- a/media/libstagefright/CameraSourceTimeLapse.cpp
+++ b/media/libstagefright/CameraSourceTimeLapse.cpp
@@ -29,6 +29,7 @@
#include <media/stagefright/MetaData.h>
#include <camera/Camera.h>
#include <camera/CameraParameters.h>
+#include <gui/Flags.h>
#include <utils/String8.h>
#include <utils/Vector.h>
@@ -44,7 +45,11 @@
pid_t clientPid,
Size videoSize,
int32_t videoFrameRate,
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ const sp<Surface>& surface,
+#else
const sp<IGraphicBufferProducer>& surface,
+#endif
int64_t timeBetweenFrameCaptureUs) {
CameraSourceTimeLapse *source = new
@@ -71,7 +76,11 @@
pid_t clientPid,
Size videoSize,
int32_t videoFrameRate,
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ const sp<Surface>& surface,
+#else
const sp<IGraphicBufferProducer>& surface,
+#endif
int64_t timeBetweenFrameCaptureUs)
: CameraSource(camera, proxy, cameraId, clientName, clientUid, clientPid,
videoSize, videoFrameRate, surface),
diff --git a/media/libstagefright/FrameCaptureLayer.cpp b/media/libstagefright/FrameCaptureLayer.cpp
index 4e71943..53e4d7d 100644
--- a/media/libstagefright/FrameCaptureLayer.cpp
+++ b/media/libstagefright/FrameCaptureLayer.cpp
@@ -242,8 +242,7 @@
ALOGV("releaseBuffer");
Mutex::Autolock _lock(mLock);
- return mConsumer->releaseBuffer(bi.mSlot, bi.mFrameNumber,
- EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, bi.mFence);
+ return mConsumer->releaseBuffer(bi.mSlot, bi.mFrameNumber, bi.mFence);
}
} // namespace android
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index cb3c185..1e233cf 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -53,6 +53,7 @@
#include <media/esds/ESDS.h>
#include "include/HevcUtils.h"
+#include <com_android_internal_camera_flags.h>
#include <com_android_media_editing_flags.h>
namespace editing_flags = com::android::media::editing::flags;
@@ -66,6 +67,8 @@
true; \
}))
+namespace flags_camera = com::android::internal::camera::flags;
+
namespace android {
static const int64_t kMinStreamableFileSizeInBytes = 5 * 1024 * 1024;
@@ -92,6 +95,8 @@
static const int kTimestampDebugCount = 10;
static const int kItemIdBase = 10000;
static const char kExifHeader[] = {'E', 'x', 'i', 'f', '\0', '\0'};
+static const char kGainmapMetaHeader[] = {'t', 'm', 'a', 'p', '\0', '\0'};
+static const char kGainmapHeader[] = {'g', 'm', 'a', 'p', '\0', '\0'};
static const uint8_t kExifApp1Marker[] = {'E', 'x', 'i', 'f', 0xff, 0xe1};
static const uint8_t kMandatoryHevcNalUnitTypes[3] = {
@@ -169,8 +174,11 @@
bool isMPEG4() const { return mIsMPEG4; }
bool usePrefix() const { return mIsAvc || mIsHevc || mIsHeic || mIsDovi; }
bool isExifData(MediaBufferBase *buffer, uint32_t *tiffHdrOffset) const;
+ bool isGainmapMetaData(MediaBufferBase* buffer, uint32_t* offset) const;
+ bool isGainmapData(MediaBufferBase* buffer, uint32_t* offset) const;
void addChunkOffset(off64_t offset);
- void addItemOffsetAndSize(off64_t offset, size_t size, bool isExif);
+ void addItemOffsetAndSize(off64_t offset, size_t size, bool isExif,
+ bool isGainmapMeta = false, bool isGainmap = false);
void flushItemRefs();
TrackId& getTrackId() { return mTrackId; }
status_t dump(int fd, const Vector<String16>& args) const;
@@ -180,8 +188,11 @@
void resetInternal();
int64_t trackMetaDataSize();
bool isTimestampValid(int64_t timeUs);
+ uint16_t getImageItemId() { return mImageItemId; };
+ uint16_t getGainmapItemId() { return mGainmapItemId; };
+ uint16_t getGainmapMetaItemId() { return mGainmapMetadataItemId; };
-private:
+ private:
// A helper class to handle faster write box with table entries
template<class TYPE, unsigned ENTRY_SIZE>
// ENTRY_SIZE: # of values in each entry
@@ -408,6 +419,7 @@
Vector<uint16_t> mProperties;
ItemRefs mDimgRefs;
+ ItemRefs mGainmapDimgRefs;
Vector<uint16_t> mExifList;
uint16_t mImageItemId;
uint16_t mItemIdBase;
@@ -416,6 +428,10 @@
int32_t mTileWidth, mTileHeight;
int32_t mGridRows, mGridCols;
size_t mNumTiles, mTileIndex;
+ uint16_t mGainmapItemId, mGainmapMetadataItemId;
+ ColorAspects mColorAspects;
+ bool mColorAspectsValid;
+ Vector<uint8_t> mBitsPerChannel;
// Update the audio track's drift information.
void updateDriftTime(const sp<MetaData>& meta);
@@ -821,6 +837,10 @@
+ 12 // iref box (when empty)
;
+ if (flags_camera::camera_heif_gainmap()) {
+ metaSize += 36; // grpl box (when empty)
+ }
+
for (List<Track *>::iterator it = mTracks.begin();
it != mTracks.end(); ++it) {
if ((*it)->isHeif()) {
@@ -2220,8 +2240,7 @@
////////////////////////////////////////////////////////////////////////////////
-MPEG4Writer::Track::Track(
- MPEG4Writer *owner, const sp<MediaSource> &source, uint32_t aTrackId)
+MPEG4Writer::Track::Track(MPEG4Writer* owner, const sp<MediaSource>& source, uint32_t aTrackId)
: mOwner(owner),
mMeta(source->getFormat()),
mSource(source),
@@ -2241,7 +2260,7 @@
mStssTableEntries(new ListTableEntries<uint32_t, 1>(1000)),
mSttsTableEntries(new ListTableEntries<uint32_t, 2>(1000)),
mCttsTableEntries(new ListTableEntries<uint32_t, 2>(1000)),
- mElstTableEntries(new ListTableEntries<uint32_t, 3>(3)), // Reserve 3 rows, a row has 3 items
+ mElstTableEntries(new ListTableEntries<uint32_t, 3>(3)), // Reserve 3 rows, a row has 3 items
mMinCttsOffsetTimeUs(0),
mMinCttsOffsetTicks(0),
mMaxCttsOffsetTicks(0),
@@ -2255,6 +2274,7 @@
mFirstSampleStartOffsetUs(0),
mRotation(0),
mDimgRefs("dimg"),
+ mGainmapDimgRefs("dimg"),
mImageItemId(0),
mItemIdBase(0),
mIsPrimary(0),
@@ -2265,7 +2285,10 @@
mGridRows(0),
mGridCols(0),
mNumTiles(1),
- mTileIndex(0) {
+ mTileIndex(0),
+ mGainmapItemId(0),
+ mGainmapMetadataItemId(0),
+ mColorAspectsValid(false) {
getCodecSpecificDataFromInputFormatIfPossible();
const char *mime;
@@ -2454,25 +2477,57 @@
return OK;
}
-bool MPEG4Writer::Track::isExifData(
- MediaBufferBase *buffer, uint32_t *tiffHdrOffset) const {
+bool MPEG4Writer::Track::isGainmapMetaData(MediaBufferBase* buffer, uint32_t* offset) const {
+ if (!mIsHeif) {
+ return false;
+ }
+
+ // Gainmap metadata block starting with 'tmap\0\0'
+ size_t length = buffer->range_length();
+ uint8_t *data = (uint8_t *)buffer->data() + buffer->range_offset();
+ if ((length > sizeof(kGainmapMetaHeader)) &&
+ !memcmp(data, kGainmapMetaHeader, sizeof(kGainmapMetaHeader))) {
+ *offset = sizeof(kGainmapMetaHeader);
+ return true;
+ }
+
+ return false;
+}
+
+bool MPEG4Writer::Track::isGainmapData(MediaBufferBase* buffer, uint32_t* offset) const {
+ if (!mIsHeif) {
+ return false;
+ }
+
+ // Gainmap block starting with 'gmap\0\0'
+ size_t length = buffer->range_length();
+ uint8_t* data = (uint8_t*)buffer->data() + buffer->range_offset();
+ if ((length > sizeof(kGainmapHeader)) &&
+ !memcmp(data, kGainmapHeader, sizeof(kGainmapHeader))) {
+ *offset = sizeof(kGainmapHeader);
+ return true;
+ }
+
+ return false;
+}
+
+bool MPEG4Writer::Track::isExifData(MediaBufferBase* buffer, uint32_t* tiffHdrOffset) const {
if (!mIsHeif) {
return false;
}
// Exif block starting with 'Exif\0\0'
size_t length = buffer->range_length();
- uint8_t *data = (uint8_t *)buffer->data() + buffer->range_offset();
- if ((length > sizeof(kExifHeader))
- && !memcmp(data, kExifHeader, sizeof(kExifHeader))) {
+ uint8_t* data = (uint8_t*)buffer->data() + buffer->range_offset();
+ if ((length > sizeof(kExifHeader)) && !memcmp(data, kExifHeader, sizeof(kExifHeader))) {
*tiffHdrOffset = sizeof(kExifHeader);
return true;
}
// Exif block starting with fourcc 'Exif' followed by APP1 marker
- if ((length > sizeof(kExifApp1Marker) + 2 + sizeof(kExifHeader))
- && !memcmp(data, kExifApp1Marker, sizeof(kExifApp1Marker))
- && !memcmp(data + sizeof(kExifApp1Marker) + 2, kExifHeader, sizeof(kExifHeader))) {
+ if ((length > sizeof(kExifApp1Marker) + 2 + sizeof(kExifHeader)) &&
+ !memcmp(data, kExifApp1Marker, sizeof(kExifApp1Marker)) &&
+ !memcmp(data + sizeof(kExifApp1Marker) + 2, kExifHeader, sizeof(kExifHeader))) {
// skip 'Exif' fourcc
buffer->set_range(4, buffer->range_length() - 4);
@@ -2489,7 +2544,8 @@
mCo64TableEntries->add(hton64(offset));
}
-void MPEG4Writer::Track::addItemOffsetAndSize(off64_t offset, size_t size, bool isExif) {
+void MPEG4Writer::Track::addItemOffsetAndSize(off64_t offset, size_t size, bool isExif,
+ bool isGainmapMeta, bool isGainmap) {
CHECK(mIsHeif);
if (offset > UINT32_MAX || size > UINT32_MAX) {
@@ -2518,6 +2574,46 @@
return;
}
+ bool hasGrid = (mTileWidth > 0);
+
+ if (isGainmapMeta && flags_camera::camera_heif_gainmap()) {
+ uint16_t metaItemId;
+ if (mOwner->reserveItemId_l(1, &metaItemId) != OK) {
+ return;
+ }
+
+ Vector<uint16_t> props;
+ if (mColorAspectsValid) {
+ ItemProperty property;
+ property.type = FOURCC('c', 'o', 'l', 'r');
+ ColorUtils::convertCodecColorAspectsToIsoAspects(
+ mColorAspects, &property.colorPrimaries, &property.colorTransfer,
+ &property.colorMatrix, &property.colorRange);
+ props.push_back(mOwner->addProperty_l(property));
+ }
+ if (!mBitsPerChannel.empty()) {
+ ItemProperty property;
+ property.type = FOURCC('p', 'i', 'x', 'i');
+ property.bitsPerChannel.appendVector(mBitsPerChannel);
+ props.push_back(mOwner->addProperty_l(property));
+ }
+ props.push_back(mOwner->addProperty_l({
+ .type = FOURCC('i', 's', 'p', 'e'),
+ .width = hasGrid ? mTileWidth : mWidth,
+ .height = hasGrid ? mTileHeight : mHeight,
+ }));
+ mGainmapMetadataItemId = mOwner->addItem_l({
+ .itemType = "tmap",
+ .itemId = metaItemId,
+ .isPrimary = false,
+ .isHidden = false,
+ .offset = (uint32_t)offset,
+ .size = (uint32_t)size,
+ .properties = props,
+ });
+ return;
+ }
+
if (mTileIndex >= mNumTiles) {
ALOGW("Ignoring excess tiles!");
return;
@@ -2532,8 +2628,6 @@
default: break; // don't set if invalid
}
- bool hasGrid = (mTileWidth > 0);
-
if (mProperties.empty()) {
mProperties.push_back(mOwner->addProperty_l({
.type = static_cast<uint32_t>(mIsAvif ?
@@ -2558,7 +2652,7 @@
mTileIndex++;
if (hasGrid) {
- mDimgRefs.value.push_back(mOwner->addItem_l({
+ uint16_t id = mOwner->addItem_l({
.itemType = mIsAvif ? "av01" : "hvc1",
.itemId = mItemIdBase++,
.isPrimary = false,
@@ -2566,7 +2660,12 @@
.offset = (uint32_t)offset,
.size = (uint32_t)size,
.properties = mProperties,
- }));
+ });
+ if (isGainmap && flags_camera::camera_heif_gainmap()) {
+ mGainmapDimgRefs.value.push_back(id);
+ } else {
+ mDimgRefs.value.push_back(id);
+ }
if (mTileIndex == mNumTiles) {
mProperties.clear();
@@ -2581,28 +2680,71 @@
.rotation = heifRotation,
}));
}
- mImageItemId = mOwner->addItem_l({
- .itemType = "grid",
- .itemId = mItemIdBase++,
- .isPrimary = (mIsPrimary != 0),
- .isHidden = false,
- .rows = (uint32_t)mGridRows,
- .cols = (uint32_t)mGridCols,
- .width = (uint32_t)mWidth,
- .height = (uint32_t)mHeight,
- .properties = mProperties,
+ if (mColorAspectsValid && flags_camera::camera_heif_gainmap()) {
+ ItemProperty property;
+ property.type = FOURCC('c', 'o', 'l', 'r');
+ ColorUtils::convertCodecColorAspectsToIsoAspects(
+ mColorAspects, &property.colorPrimaries, &property.colorTransfer,
+ &property.colorMatrix, &property.colorRange);
+ mProperties.push_back(mOwner->addProperty_l(property));
+ }
+ if (!mBitsPerChannel.empty() && flags_camera::camera_heif_gainmap()) {
+ ItemProperty property;
+ property.type = FOURCC('p', 'i', 'x', 'i');
+ property.bitsPerChannel.appendVector(mBitsPerChannel);
+ mProperties.push_back(mOwner->addProperty_l(property));
+ }
+ uint16_t itemId = mOwner->addItem_l({
+ .itemType = "grid",
+ .itemId = mItemIdBase++,
+ .isPrimary = isGainmap && flags_camera::camera_heif_gainmap()
+ ? false
+ : (mIsPrimary != 0),
+ .isHidden = false,
+ .rows = (uint32_t)mGridRows,
+ .cols = (uint32_t)mGridCols,
+ .width = (uint32_t)mWidth,
+ .height = (uint32_t)mHeight,
+ .properties = mProperties,
});
+
+ if (isGainmap && flags_camera::camera_heif_gainmap()) {
+ mGainmapItemId = itemId;
+ } else {
+ mImageItemId = itemId;
+ }
}
} else {
- mImageItemId = mOwner->addItem_l({
- .itemType = mIsAvif ? "av01" : "hvc1",
- .itemId = mItemIdBase++,
- .isPrimary = (mIsPrimary != 0),
- .isHidden = false,
- .offset = (uint32_t)offset,
- .size = (uint32_t)size,
- .properties = mProperties,
+ if (mColorAspectsValid && flags_camera::camera_heif_gainmap()) {
+ ItemProperty property;
+ property.type = FOURCC('c', 'o', 'l', 'r');
+ ColorUtils::convertCodecColorAspectsToIsoAspects(
+ mColorAspects, &property.colorPrimaries, &property.colorTransfer,
+ &property.colorMatrix, &property.colorRange);
+ mProperties.push_back(mOwner->addProperty_l(property));
+ }
+ if (!mBitsPerChannel.empty() && flags_camera::camera_heif_gainmap()) {
+ ItemProperty property;
+ property.type = FOURCC('p', 'i', 'x', 'i');
+ property.bitsPerChannel.appendVector(mBitsPerChannel);
+ mProperties.push_back(mOwner->addProperty_l(property));
+ }
+ uint16_t itemId = mOwner->addItem_l({
+ .itemType = mIsAvif ? "av01" : "hvc1",
+ .itemId = mItemIdBase++,
+ .isPrimary = (isGainmap && flags_camera::camera_heif_gainmap()) ? false
+ : (mIsPrimary != 0),
+ .isHidden = false,
+ .offset = (uint32_t)offset,
+ .size = (uint32_t)size,
+ .properties = mProperties,
});
+
+ if (isGainmap && flags_camera::camera_heif_gainmap()) {
+ mGainmapItemId = itemId;
+ } else {
+ mImageItemId = itemId;
+ }
}
}
@@ -2627,6 +2769,10 @@
}
}
}
+
+ if ((mGainmapItemId > 0) && flags_camera::camera_heif_gainmap()) {
+ mOwner->addRefs_l(mGainmapItemId, mGainmapDimgRefs);
+ }
}
void MPEG4Writer::Track::setTimeScale() {
@@ -3671,19 +3817,68 @@
break;
}
+ bool isGainmapMeta = false;
+ bool isGainmap = false;
bool isExif = false;
uint32_t tiffHdrOffset = 0;
+ uint32_t gainmapOffset = 0;
int32_t isMuxerData;
if (buffer->meta_data().findInt32(kKeyIsMuxerData, &isMuxerData) && isMuxerData) {
- // We only support one type of muxer data, which is Exif data block.
+ if (flags_camera::camera_heif_gainmap()) {
+ isGainmapMeta = isGainmapMetaData(buffer, &gainmapOffset);
+ isGainmap = isGainmapData(buffer, &gainmapOffset);
+ if ((isGainmap || isGainmapMeta) && (gainmapOffset > 0) &&
+ (gainmapOffset < buffer->range_length())) {
+ // Don't include the tmap/gmap header
+ buffer->set_range(gainmapOffset, buffer->range_length() - gainmapOffset);
+ }
+ }
isExif = isExifData(buffer, &tiffHdrOffset);
- if (!isExif) {
- ALOGW("Ignoring bad Exif data block");
+ if (!isExif && !isGainmap && !isGainmapMeta) {
+ ALOGW("Ignoring bad muxer data block");
buffer->release();
buffer = NULL;
continue;
}
}
+ if (flags_camera::camera_heif_gainmap()) {
+ int32_t val32;
+ if (buffer->meta_data().findInt32(kKeyColorPrimaries, &val32)) {
+ mColorAspects.mPrimaries = static_cast<ColorAspects::Primaries>(val32);
+ mColorAspectsValid = true;
+ } else {
+ mColorAspectsValid = false;
+ }
+ if (buffer->meta_data().findInt32(kKeyTransferFunction, &val32)) {
+ mColorAspects.mTransfer = static_cast<ColorAspects::Transfer>(val32);
+ } else {
+ mColorAspectsValid = false;
+ }
+ if (buffer->meta_data().findInt32(kKeyColorMatrix, &val32)) {
+ mColorAspects.mMatrixCoeffs = static_cast<ColorAspects::MatrixCoeffs>(val32);
+ } else {
+ mColorAspectsValid = false;
+ }
+ if (buffer->meta_data().findInt32(kKeyColorRange, &val32)) {
+ mColorAspects.mRange = static_cast<ColorAspects::Range>(val32);
+ } else {
+ mColorAspectsValid = false;
+ }
+ if (mBitsPerChannel.empty() && buffer->meta_data().findInt32(kKeyColorFormat, &val32)) {
+ switch (val32) {
+ case COLOR_FormatYUV420Flexible:
+ case COLOR_FormatYUV420Planar:
+ case COLOR_FormatYUV420SemiPlanar: {
+ uint8_t bitsPerChannel[] = {8, 8, 8};
+ mBitsPerChannel.appendArray(bitsPerChannel, sizeof(bitsPerChannel));
+ }
+ break;
+ default:
+ break;
+ }
+ }
+ }
+
if (!buffer->meta_data().findInt64(kKeySampleFileOffset, &sampleFileOffset)) {
sampleFileOffset = -1;
}
@@ -3709,7 +3904,7 @@
// Make a deep copy of the MediaBuffer and Metadata and release
// the original as soon as we can
- MediaBuffer *copy = new MediaBuffer(buffer->range_length());
+ MediaBuffer* copy = new MediaBuffer(buffer->range_length());
if (sampleFileOffset != -1) {
copy->meta_data().setInt64(kKeySampleFileOffset, sampleFileOffset);
} else {
@@ -4006,13 +4201,13 @@
trackProgressStatus(timestampUs);
}
}
- if (!hasMultipleTracks) {
+ if (!hasMultipleTracks || isGainmapMeta || isGainmap) {
size_t bytesWritten;
off64_t offset = mOwner->addSample_l(
copy, usePrefix, tiffHdrOffset, &bytesWritten);
if (mIsHeif) {
- addItemOffsetAndSize(offset, bytesWritten, isExif);
+ addItemOffsetAndSize(offset, bytesWritten, isExif, isGainmapMeta, isGainmap);
} else {
if (mCo64TableEntries->count() == 0) {
addChunkOffset(offset);
@@ -4315,6 +4510,15 @@
increase += 9; // 'irot' property (worst case)
}
+ if (flags_camera::camera_heif_gainmap()) {
+ // assume we have HDR gainmap and associated metadata
+ increase += (8 + mCodecSpecificDataSize) // 'hvcC' property (HDR gainmap)
+ + (2 * 20) // 'ispe' property
+ + (2 * 16) // 'pixi' property
+ + (2 * 19) // 'colr' property
+ ;
+ }
+
// increase to iref and idat
if (grid) {
increase += (12 + mNumTiles * 2) // 'dimg' in iref
@@ -4328,6 +4532,12 @@
+ 21) // increase to 'iinf'
* (mNumTiles + grid + 1); // "+1" is for 'Exif'
+ if (flags_camera::camera_heif_gainmap()) {
+ increase += (16 // increase to 'iloc'
+ + 21) // increase to 'iinf'
+ * 2; // "2" is for 'tmap', 'gmap'
+ }
+
// When total # of properties is > 127, the properties id becomes 2-byte.
// We write 4 properties at most for each image (2x'ispe', 1x'hvcC', 1x'irot').
// Set the threshold to be 30.
@@ -5499,6 +5709,21 @@
endBox();
}
+void MPEG4Writer::writeGrplBox(const Vector<uint16_t> &items) {
+ if (flags_camera::camera_heif_gainmap()) {
+ beginBox("grpl");
+ beginBox("altr");
+ writeInt32(0); // Version = 0, Flags = 0
+ writeInt32(1); // Group Id
+ writeInt32(items.size());// Number of entities
+ for (size_t i = 0; i < items.size(); i++) {
+ writeInt32(items[i]);// Item Id
+ }
+ endBox();
+ endBox();
+ }
+}
+
void MPEG4Writer::writeIpcoBox() {
beginBox("ipco");
size_t numProperties = mProperties.size();
@@ -5544,6 +5769,32 @@
endBox();
break;
}
+ case FOURCC('c', 'o', 'l', 'r'):
+ {
+ if (flags_camera::camera_heif_gainmap()) {
+ beginBox("colr");
+ writeFourcc("nclx");
+ writeInt16(mProperties[propIndex].colorPrimaries);
+ writeInt16(mProperties[propIndex].colorTransfer);
+ writeInt16(mProperties[propIndex].colorMatrix);
+ writeInt8(int8_t(mProperties[propIndex].colorRange ? 0x80 : 0x0));
+ endBox();
+ }
+ break;
+ }
+ case FOURCC('p', 'i', 'x', 'i'):
+ {
+ if (flags_camera::camera_heif_gainmap()) {
+ beginBox("pixi");
+ writeInt32(0); // Version = 0, Flags = 0
+ writeInt8(mProperties[propIndex].bitsPerChannel.size()); // Number of channels
+ for (size_t i = 0; i < mProperties[propIndex].bitsPerChannel.size(); i++) {
+ writeInt8(mProperties[propIndex].bitsPerChannel[i]); // Channel bit depth
+ }
+ endBox();
+ }
+ break;
+ }
default:
ALOGW("Skipping unrecognized property: type 0x%08x",
mProperties[propIndex].type);
@@ -5598,6 +5849,12 @@
for (auto it = mItems.begin(); it != mItems.end(); it++) {
ItemInfo &item = it->second;
+ if (item.isGainmapMeta() && !item.properties.empty() &&
+ flags_camera::camera_heif_gainmap()) {
+ mAssociationEntryCount++;
+ continue;
+ }
+
if (!item.isImage()) continue;
if (item.isPrimary) {
@@ -5629,11 +5886,27 @@
}
}
+ uint16_t gainmapItemId = 0;
+ uint16_t gainmapMetaItemId = 0;
for (List<Track *>::iterator it = mTracks.begin();
it != mTracks.end(); ++it) {
if ((*it)->isHeif()) {
(*it)->flushItemRefs();
}
+ if (flags_camera::camera_heif_gainmap()) {
+ if ((*it)->getGainmapItemId() > 0) {
+ gainmapItemId = (*it)->getGainmapItemId();
+ }
+ if ((*it)->getGainmapMetaItemId() > 0) {
+ gainmapMetaItemId = (*it)->getGainmapMetaItemId();
+ }
+ }
+ }
+ if ((gainmapItemId > 0) && (gainmapMetaItemId > 0) && flags_camera::camera_heif_gainmap()) {
+ ItemRefs gainmapRefs("dimg");
+ gainmapRefs.value.push_back(mPrimaryItemId);
+ gainmapRefs.value.push_back(gainmapItemId);
+ addRefs_l(gainmapMetaItemId, gainmapRefs);
}
beginBox("meta");
@@ -5649,6 +5922,12 @@
if (mHasRefs) {
writeIrefBox();
}
+ if ((gainmapItemId > 0) && (gainmapMetaItemId > 0) && flags_camera::camera_heif_gainmap()) {
+ Vector<uint16_t> itemIds;
+ itemIds.push_back(gainmapMetaItemId);
+ itemIds.push_back(mPrimaryItemId);
+ writeGrplBox(itemIds);
+ }
endBox();
}
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 35370e7..f917aa2 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -755,8 +755,7 @@
// consume buffer
sp<IGraphicBufferConsumer> consumer = mConsumer.promote();
if (consumer != nullptr && consumer->acquireBuffer(&buffer, 0) == NO_ERROR) {
- consumer->releaseBuffer(buffer.mSlot, buffer.mFrameNumber,
- EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, buffer.mFence);
+ consumer->releaseBuffer(buffer.mSlot, buffer.mFrameNumber, buffer.mFence);
}
}
diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp
index 1008445..96e399b 100644
--- a/media/libstagefright/MediaMuxer.cpp
+++ b/media/libstagefright/MediaMuxer.cpp
@@ -19,6 +19,8 @@
#include "webm/WebmWriter.h"
+#include <com_android_internal_camera_flags.h>
+
#include <utils/Log.h>
#include <media/stagefright/MediaMuxer.h>
@@ -38,6 +40,8 @@
#include <media/stagefright/OggWriter.h>
#include <media/stagefright/Utils.h>
+namespace flags_camera = com::android::internal::camera::flags;
+
namespace android {
static bool isMp4Format(MediaMuxer::OutputFormat format) {
@@ -270,6 +274,25 @@
sampleMetaData.setInt64(kKeyLastSampleIndexInChunk, val64);
}
+ if (flags_camera::camera_heif_gainmap()) {
+ int32_t val32;
+ if (bufMeta->findInt32("color-primaries", &val32)) {
+ sampleMetaData.setInt32(kKeyColorPrimaries, val32);
+ }
+ if (bufMeta->findInt32("color-transfer", &val32)) {
+ sampleMetaData.setInt32(kKeyTransferFunction, val32);
+ }
+ if (bufMeta->findInt32("color-matrix", &val32)) {
+ sampleMetaData.setInt32(kKeyColorMatrix, val32);
+ }
+ if (bufMeta->findInt32("color-range", &val32)) {
+ sampleMetaData.setInt32(kKeyColorRange, val32);
+ }
+ if (bufMeta->findInt32(KEY_COLOR_FORMAT, &val32)) {
+ sampleMetaData.setInt32(kKeyColorFormat, val32);
+ }
+ }
+
sp<MediaAdapter> currentTrack = mTrackList[trackIndex];
// This pushBuffer will wait until the mediaBuffer is consumed.
return currentTrack->pushBuffer(mediaBuffer);
diff --git a/media/libstagefright/MediaSync.cpp b/media/libstagefright/MediaSync.cpp
index a3f55da..b640040 100644
--- a/media/libstagefright/MediaSync.cpp
+++ b/media/libstagefright/MediaSync.cpp
@@ -752,8 +752,7 @@
status_t status = mInput->attachBuffer(&consumerSlot, oldBuffer);
ALOGE_IF(status != NO_ERROR, "attaching buffer to input failed (%d)", status);
if (status == NO_ERROR) {
- status = mInput->releaseBuffer(consumerSlot, 0 /* frameNumber */,
- EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, fence);
+ status = mInput->releaseBuffer(consumerSlot, 0 /* frameNumber */, fence);
ALOGE_IF(status != NO_ERROR, "releasing buffer to input failed (%d)", status);
}
diff --git a/media/libstagefright/include/media/stagefright/AudioSource.h b/media/libstagefright/include/media/stagefright/AudioSource.h
index 65d5246..51f6ac4 100644
--- a/media/libstagefright/include/media/stagefright/AudioSource.h
+++ b/media/libstagefright/include/media/stagefright/AudioSource.h
@@ -78,7 +78,7 @@
virtual void signalBufferReturned(MediaBufferBase *buffer);
status_t setInputDevice(audio_port_handle_t deviceId);
- status_t getRoutedDeviceId(audio_port_handle_t* deviceId);
+ status_t getRoutedDeviceIds(DeviceIdVector& deviceIds);
status_t addAudioDeviceCallback(const sp<AudioSystem::AudioDeviceCallback>& callback);
status_t removeAudioDeviceCallback(const sp<AudioSystem::AudioDeviceCallback>& callback);
diff --git a/media/libstagefright/include/media/stagefright/CameraSource.h b/media/libstagefright/include/media/stagefright/CameraSource.h
index f42e315..54e4f18 100644
--- a/media/libstagefright/include/media/stagefright/CameraSource.h
+++ b/media/libstagefright/include/media/stagefright/CameraSource.h
@@ -25,6 +25,8 @@
#include <camera/ICameraRecordingProxy.h>
#include <camera/CameraParameters.h>
#include <gui/BufferItemConsumer.h>
+#include <gui/Surface.h>
+#include <gui/Flags.h>
#include <utils/List.h>
#include <utils/RefBase.h>
#include <utils/String16.h>
@@ -77,7 +79,7 @@
pid_t clientPid,
Size videoSize,
int32_t frameRate,
- const sp<IGraphicBufferProducer>& surface);
+ const sp<SurfaceType>& surface);
virtual ~CameraSource();
@@ -165,7 +167,7 @@
sp<Camera> mCamera;
sp<ICameraRecordingProxy> mCameraRecordingProxy;
sp<DeathNotifier> mDeathNotifier;
- sp<IGraphicBufferProducer> mSurface;
+ sp<SurfaceType> mSurface;
sp<MetaData> mMeta;
int64_t mStartTimeUs;
@@ -180,8 +182,7 @@
CameraSource(const sp<hardware::ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
int32_t cameraId, const String16& clientName, uid_t clientUid, pid_t clientPid,
- Size videoSize, int32_t frameRate,
- const sp<IGraphicBufferProducer>& surface);
+ Size videoSize, int32_t frameRate, const sp<SurfaceType> & surface);
virtual status_t startCameraRecording();
virtual void releaseRecordingFrame(const sp<IMemory>& frame);
@@ -221,7 +222,7 @@
static const nsecs_t kMemoryBaseAvailableTimeoutNs = 200000000; // 200ms
// Consumer and producer of the buffer queue between this class and camera.
sp<BufferItemConsumer> mVideoBufferConsumer;
- sp<IGraphicBufferProducer> mVideoBufferProducer;
+ sp<SurfaceType> mVideoBufferProducer;
// Memory used to send the buffers to encoder, where sp<IMemory> stores VideoNativeMetadata.
sp<IMemoryHeap> mMemoryHeapBase;
List<sp<IMemory>> mMemoryBases;
diff --git a/media/libstagefright/include/media/stagefright/CameraSourceTimeLapse.h b/media/libstagefright/include/media/stagefright/CameraSourceTimeLapse.h
index 3c311cf..a789b12 100644
--- a/media/libstagefright/include/media/stagefright/CameraSourceTimeLapse.h
+++ b/media/libstagefright/include/media/stagefright/CameraSourceTimeLapse.h
@@ -23,6 +23,7 @@
#include <utils/RefBase.h>
#include <utils/threads.h>
#include <utils/String16.h>
+#include <gui/Flags.h>
namespace android {
@@ -44,7 +45,7 @@
pid_t clientPid,
Size videoSize,
int32_t videoFrameRate,
- const sp<IGraphicBufferProducer>& surface,
+ const sp<SurfaceType>& surface,
int64_t timeBetweenTimeLapseFrameCaptureUs);
virtual ~CameraSourceTimeLapse();
@@ -120,7 +121,7 @@
pid_t clientPid,
Size videoSize,
int32_t videoFrameRate,
- const sp<IGraphicBufferProducer>& surface,
+ const sp<SurfaceType>& surface,
int64_t timeBetweenTimeLapseFrameCaptureUs);
// Wrapper over CameraSource::signalBufferReturned() to implement quick stop.
diff --git a/media/libstagefright/include/media/stagefright/MPEG4Writer.h b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
index ee75129..a409e46 100644
--- a/media/libstagefright/include/media/stagefright/MPEG4Writer.h
+++ b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
@@ -200,6 +200,9 @@
bool isImage() const {
return !strcmp("hvc1", itemType) || !strcmp("av01", itemType) || isGrid();
}
+ bool isGainmapMeta() const {
+ return !strcmp("tmap", itemType);
+ }
const char *itemType;
uint16_t itemId;
bool isPrimary;
@@ -227,6 +230,11 @@
int32_t width;
int32_t height;
int32_t rotation;
+ int32_t colorPrimaries;
+ int32_t colorTransfer;
+ int32_t colorMatrix;
+ bool colorRange;
+ Vector<uint8_t> bitsPerChannel;
sp<ABuffer> data;
} ItemProperty;
@@ -347,6 +355,7 @@
void writeIdatBox();
void writeIrefBox();
void writePitmBox();
+ void writeGrplBox(const Vector<uint16_t> &items);
void writeFileLevelMetaBox();
void sendSessionSummary();
diff --git a/media/libstagefright/webm/Android.bp b/media/libstagefright/webm/Android.bp
index 723131d..c3bd36e 100644
--- a/media/libstagefright/webm/Android.bp
+++ b/media/libstagefright/webm/Android.bp
@@ -11,8 +11,8 @@
name: "libstagefright_webm",
cflags: [
- "-Werror",
"-Wall",
+ "-Werror",
],
sanitize: {
@@ -38,11 +38,12 @@
export_include_dirs: ["include"],
shared_libs: [
+ "framework-permission-aidl-cpp",
+ "libaudiofoundation",
"libdatasource",
+ "liblog",
"libstagefright_foundation",
"libutils",
- "liblog",
- "framework-permission-aidl-cpp",
],
header_libs: [
@@ -51,7 +52,6 @@
],
}
-
cc_library_headers {
name: "libstagefright_webm_headers",
export_include_dirs: ["include"],
diff --git a/media/libstagefright/writer_fuzzers/Android.bp b/media/libstagefright/writer_fuzzers/Android.bp
index 840c6b3c..483175c 100644
--- a/media/libstagefright/writer_fuzzers/Android.bp
+++ b/media/libstagefright/writer_fuzzers/Android.bp
@@ -47,6 +47,7 @@
"libcutils",
"libutils",
"server_configurable_flags",
+ "camera_platform_flags_c_lib",
],
}
diff --git a/media/module/bqhelper/GraphicBufferSource.cpp b/media/module/bqhelper/GraphicBufferSource.cpp
index 82ddbc0..c9082f2 100644
--- a/media/module/bqhelper/GraphicBufferSource.cpp
+++ b/media/module/bqhelper/GraphicBufferSource.cpp
@@ -996,9 +996,8 @@
// somehow need to propagate frame number to that queue
if (buffer->isCached()) {
--mNumOutstandingAcquires;
- mConsumer->releaseBuffer(
- buffer->getSlot(), frameNum, EGL_NO_DISPLAY, EGL_NO_SYNC_KHR,
- buffer->getReleaseFence());
+ mConsumer->releaseBuffer(buffer->getSlot(), frameNum,
+ buffer->getReleaseFence());
}
},
bi.mFence);
diff --git a/media/mtp/MtpFfsHandle.cpp b/media/mtp/MtpFfsHandle.cpp
index 979edab..26e5ddf 100644
--- a/media/mtp/MtpFfsHandle.cpp
+++ b/media/mtp/MtpFfsHandle.cpp
@@ -20,7 +20,6 @@
#include <dirent.h>
#include <errno.h>
#include <fcntl.h>
-#include <memory>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
@@ -298,9 +297,11 @@
void MtpFfsHandle::close() {
// Join all child threads before destruction
- for (auto& thread : mChildThreads) {
- thread.join();
+ int count = mChildThreads.size();
+ for (int i = 0; i < count; i++) {
+ mChildThreads[i].join();
}
+ mChildThreads.clear();
io_destroy(mCtx);
closeEndpoints();
diff --git a/media/psh_utils/Android.bp b/media/psh_utils/Android.bp
index dafa63b..803de94 100644
--- a/media/psh_utils/Android.bp
+++ b/media/psh_utils/Android.bp
@@ -10,7 +10,7 @@
// libraries that are included whole_static for test apps
ndk_libs = [
"android.hardware.health-V3-ndk",
- "android.hardware.power.stats-V1-ndk",
+ "android.hardware.power.stats-V1-cpp",
]
// Power, System, Health utils
@@ -32,6 +32,7 @@
"com.android.media.audio-aconfig-cc",
"libaudioutils",
"libbase",
+ "libbinder",
"libbinder_ndk",
"libcutils",
"liblog",
diff --git a/media/psh_utils/HealthStatsProvider.cpp b/media/psh_utils/HealthStatsProvider.cpp
index de72463..611c424 100644
--- a/media/psh_utils/HealthStatsProvider.cpp
+++ b/media/psh_utils/HealthStatsProvider.cpp
@@ -18,7 +18,7 @@
#include <aidl/android/hardware/health/IHealth.h>
#include <android-base/logging.h>
#include <android/binder_manager.h>
-#include <psh_utils/ServiceSingleton.h>
+#include <mediautils/ServiceSingleton.h>
using ::aidl::android::hardware::health::HealthInfo;
using ::aidl::android::hardware::health::IHealth;
@@ -26,7 +26,7 @@
namespace android::media::psh_utils {
static auto getHealthService() {
- return getServiceSingleton<IHealth>();
+ return mediautils::getService<IHealth>();
}
status_t HealthStatsDataProvider::fill(PowerStats* stat) const {
diff --git a/media/psh_utils/PowerStats.cpp b/media/psh_utils/PowerStats.cpp
index f8f87c5..89e36e2 100644
--- a/media/psh_utils/PowerStats.cpp
+++ b/media/psh_utils/PowerStats.cpp
@@ -233,14 +233,14 @@
health_stats += other.health_stats;
if (power_entity_state_residency.empty()) {
power_entity_state_residency = other.power_entity_state_residency;
- } else {
+ } else if (power_entity_state_residency.size() == other.power_entity_state_residency.size()) {
for (size_t i = 0; i < power_entity_state_residency.size(); ++i) {
power_entity_state_residency[i] += other.power_entity_state_residency[i];
}
}
if (rail_energy.empty()) {
rail_energy = other.rail_energy;
- } else {
+ } else if (rail_energy.size() == other.rail_energy.size()) {
for (size_t i = 0; i < rail_energy.size(); ++i) {
rail_energy[i] += other.rail_energy[i];
}
@@ -253,14 +253,14 @@
health_stats -= other.health_stats;
if (power_entity_state_residency.empty()) {
power_entity_state_residency = other.power_entity_state_residency;
- } else {
+ } else if (power_entity_state_residency.size() == other.power_entity_state_residency.size()) {
for (size_t i = 0; i < power_entity_state_residency.size(); ++i) {
power_entity_state_residency[i] -= other.power_entity_state_residency[i];
}
}
if (rail_energy.empty()) {
rail_energy = other.rail_energy;
- } else {
+ } else if (rail_energy.size() == other.rail_energy.size()) {
for (size_t i = 0; i < rail_energy.size(); ++i) {
rail_energy[i] -= other.rail_energy[i];
}
diff --git a/media/psh_utils/PowerStatsProvider.cpp b/media/psh_utils/PowerStatsProvider.cpp
index 112c323..033ad95 100644
--- a/media/psh_utils/PowerStatsProvider.cpp
+++ b/media/psh_utils/PowerStatsProvider.cpp
@@ -15,17 +15,17 @@
*/
#include "PowerStatsProvider.h"
-#include <aidl/android/hardware/power/stats/IPowerStats.h>
+#include <android/hardware/power/stats/IPowerStats.h>
#include <android-base/logging.h>
-#include <psh_utils/ServiceSingleton.h>
+#include <mediautils/ServiceSingleton.h>
#include <unordered_map>
-using ::aidl::android::hardware::power::stats::IPowerStats;
+using ::android::hardware::power::stats::IPowerStats;
namespace android::media::psh_utils {
static auto getPowerStatsService() {
- return getServiceSingleton<IPowerStats>();
+ return mediautils::getService<IPowerStats>();
}
status_t RailEnergyDataProvider::fill(PowerStats *stat) const {
@@ -35,9 +35,9 @@
return NO_INIT;
}
- std::unordered_map<int32_t, ::aidl::android::hardware::power::stats::Channel> channelMap;
+ std::unordered_map<int32_t, ::android::hardware::power::stats::Channel> channelMap;
{
- std::vector<::aidl::android::hardware::power::stats::Channel> channels;
+ std::vector<::android::hardware::power::stats::Channel> channels;
if (!powerStatsService->getEnergyMeterInfo(&channels).isOk()) {
LOG(ERROR) << "unable to get energy meter info";
return INVALID_OPERATION;
@@ -47,7 +47,7 @@
}
}
- std::vector<::aidl::android::hardware::power::stats::EnergyMeasurement> measurements;
+ std::vector<::android::hardware::power::stats::EnergyMeasurement> measurements;
if (!powerStatsService->readEnergyMeter({}, &measurements).isOk()) {
LOG(ERROR) << "unable to get energy measurements";
return INVALID_OPERATION;
@@ -86,7 +86,7 @@
std::vector<int32_t> powerEntityIds; // ids to use
{
- std::vector<::aidl::android::hardware::power::stats::PowerEntity> entities;
+ std::vector<::android::hardware::power::stats::PowerEntity> entities;
if (!powerStatsService->getPowerEntityInfo(&entities).isOk()) {
LOG(ERROR) << __func__ << ": unable to get entity info";
return INVALID_OPERATION;
@@ -108,7 +108,7 @@
}
}
- std::vector<::aidl::android::hardware::power::stats::StateResidencyResult> results;
+ std::vector<::android::hardware::power::stats::StateResidencyResult> results;
if (!powerStatsService->getStateResidency(powerEntityIds, &results).isOk()) {
LOG(ERROR) << __func__ << ": Unable to get state residency";
return INVALID_OPERATION;
diff --git a/media/psh_utils/benchmarks/Android.bp b/media/psh_utils/benchmarks/Android.bp
index 2382c69..066771b 100644
--- a/media/psh_utils/benchmarks/Android.bp
+++ b/media/psh_utils/benchmarks/Android.bp
@@ -8,10 +8,9 @@
default_applicable_licenses: ["frameworks_av_license"],
}
-cc_benchmark {
- name: "audio_powerstats_benchmark",
+cc_defaults {
+ name: "audio_psh_utils_benchmark_defaults",
- srcs: ["audio_powerstats_benchmark.cpp"],
cflags: [
"-Wall",
"-Werror",
@@ -22,6 +21,7 @@
shared_libs: [
"libaudioutils",
"libbase",
+ "libbinder",
"libbinder_ndk",
"libcutils",
"liblog",
@@ -31,45 +31,25 @@
}
cc_benchmark {
+ name: "audio_powerstats_benchmark",
+
+ defaults: ["audio_psh_utils_benchmark_defaults"],
+
+ srcs: ["audio_powerstats_benchmark.cpp"],
+}
+
+cc_benchmark {
name: "audio_powerstatscollector_benchmark",
+ defaults: ["audio_psh_utils_benchmark_defaults"],
+
srcs: ["audio_powerstatscollector_benchmark.cpp"],
- cflags: [
- "-Wall",
- "-Werror",
- ],
- static_libs: [
- "libpshutils",
- ],
- shared_libs: [
- "libaudioutils",
- "libbase",
- "libbinder_ndk",
- "libcutils",
- "liblog",
- "libmediautils",
- "libutils",
- ],
}
cc_benchmark {
name: "audio_token_benchmark",
+ defaults: ["audio_psh_utils_benchmark_defaults"],
+
srcs: ["audio_token_benchmark.cpp"],
- cflags: [
- "-Wall",
- "-Werror",
- ],
- static_libs: [
- "libpshutils",
- ],
- shared_libs: [
- "libaudioutils",
- "libbase",
- "libbinder_ndk",
- "libcutils",
- "liblog",
- "libmediautils",
- "libutils",
- ],
}
diff --git a/media/psh_utils/include/psh_utils/ServiceSingleton.h b/media/psh_utils/include/psh_utils/ServiceSingleton.h
deleted file mode 100644
index d0cd6d2..0000000
--- a/media/psh_utils/include/psh_utils/ServiceSingleton.h
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Copyright (C) 2024 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#pragma once
-
-#include <android/binder_auto_utils.h>
-#include <android/binder_manager.h>
-#include <android-base/thread_annotations.h>
-#include <mutex>
-#include <utils/Log.h>
-#include <utils/Timers.h>
-
-namespace android::media::psh_utils {
-
-struct DefaultServiceTraits {
- static constexpr int64_t kThresholdRetryNs = 1'000'000'000;
- static constexpr int64_t kMaxRetries = 5;
- static constexpr const char* kServiceVersion = "/default";
- static constexpr bool kShowLog = true;
-};
-
-template<typename Service, typename ServiceTraits = DefaultServiceTraits>
-std::shared_ptr<Service> getServiceSingleton() {
- [[clang::no_destroy]] static constinit std::mutex m;
- [[clang::no_destroy]] static constinit std::shared_ptr<Service> service GUARDED_BY(m);
- static int64_t nextTryNs GUARDED_BY(m) = 0;
- static int64_t tries GUARDED_BY(m) = 0;
-
- std::lock_guard l(m);
- if (service
- || tries > ServiceTraits::kMaxRetries // try too many times
- || systemTime(SYSTEM_TIME_BOOTTIME) < nextTryNs) { // try too frequently.
- return service;
- }
-
- const auto serviceName = std::string(Service::descriptor)
- .append(ServiceTraits::kServiceVersion);
- service = Service::fromBinder(
- ::ndk::SpAIBinder(AServiceManager_checkService(serviceName.c_str())));
-
- if (!service) {
- // If failed, set a time limit before retry.
- // No need to log an error, it is already done.
- nextTryNs = systemTime(SYSTEM_TIME_BOOTTIME) + ServiceTraits::kThresholdRetryNs;
- ALOGV_IF(ServiceTraits::kShowLog, "service:%s retries:%lld of %lld nextTryNs:%lld",
- Service::descriptor, (long long)tries,
- (long long)kMaxRetries, (long long)nextTryNs);
- ++tries;
- }
-
- return service;
-}
-
-} // namespace android::media::psh_utils
diff --git a/media/psh_utils/tests/Android.bp b/media/psh_utils/tests/Android.bp
index 74589f8..64fc971 100644
--- a/media/psh_utils/tests/Android.bp
+++ b/media/psh_utils/tests/Android.bp
@@ -15,9 +15,11 @@
],
shared_libs: [
"libbase",
+ "libbinder",
"libbinder_ndk",
"libcutils",
"liblog",
+ "libmediautils",
"libutils",
],
static_libs: [
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/BlockModelDecoder.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/BlockModelDecoder.java
new file mode 100644
index 0000000..3b3640e
--- /dev/null
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/BlockModelDecoder.java
@@ -0,0 +1,241 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.media.benchmark.library;
+
+import android.media.MediaCodec;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.os.Build;
+import android.util.Log;
+
+import androidx.annotation.NonNull;
+
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.List;
+
+import com.android.media.benchmark.library.Decoder;
+
+public class BlockModelDecoder extends Decoder {
+ private static final String TAG = BlockModelDecoder.class.getSimpleName();
+ private final boolean DEBUG = false;
+ protected final LinearBlockWrapper mLinearInputBlock = new LinearBlockWrapper();
+
+ /**
+ * Wrapper class for {@link MediaCodec.LinearBlock}
+ */
+ public static class LinearBlockWrapper {
+ private MediaCodec.LinearBlock mBlock;
+ private ByteBuffer mBuffer;
+ private int mOffset;
+
+ public MediaCodec.LinearBlock getBlock() {
+ return mBlock;
+ }
+
+ public ByteBuffer getBuffer() {
+ return mBuffer;
+ }
+
+ public int getBufferCapacity() {
+ return mBuffer == null ? 0 : mBuffer.capacity();
+ }
+
+ public int getOffset() {
+ return mOffset;
+ }
+
+ public void setOffset(int size) {
+ mOffset = size;
+ }
+
+ public boolean allocateBlock(String codec, int size) throws RuntimeException{
+ recycle();
+ mBlock = MediaCodec.LinearBlock.obtain(size, new String[]{codec});
+ if (mBlock == null || !mBlock.isMappable()) {
+ throw new RuntimeException("Linear Block not allocated/mapped");
+ }
+ mBuffer = mBlock.map();
+ mOffset = 0;
+ return true;
+ }
+
+ public void recycle() {
+ if (mBlock != null) {
+ mBlock.recycle();
+ mBlock = null;
+ }
+ mBuffer = null;
+ mOffset = 0;
+ }
+ }
+
+ public BlockModelDecoder() {
+ // empty
+ }
+
+ public void tearDown() {
+ mLinearInputBlock.recycle();
+
+ }
+
+ /**
+ * Decodes the given input buffer,
+ * provided valid list of buffer info and format are passed as inputs.
+ *
+ * @param inputBuffer Decode the provided list of ByteBuffers
+ * @param inputBufferInfo List of buffer info corresponding to provided input buffers
+ * @param asyncMode Will run on async implementation if true
+ * @param format For creating the decoder if codec name is empty and configuring it
+ * @param codecName Will create the decoder with codecName
+ * @return DECODE_SUCCESS if decode was successful, DECODE_DECODER_ERROR for fail,
+ * DECODE_CREATE_ERROR for decoder not created
+ * @throws IOException if the codec cannot be created.
+ */
+ @Override
+ public int decode(@NonNull List<ByteBuffer> inputBuffer,
+ @NonNull List<MediaCodec.BufferInfo> inputBufferInfo, final boolean asyncMode,
+ @NonNull MediaFormat format, String codecName)
+ throws IOException, InterruptedException {
+ setExtraConfigureFlags(MediaCodec.CONFIGURE_FLAG_USE_BLOCK_MODEL);
+ return super.decode(inputBuffer, inputBufferInfo, asyncMode, format, codecName);
+ }
+
+ @Override
+ protected void onInputAvailable(int inputBufferId, MediaCodec mediaCodec) {
+ if (mNumInFramesProvided >= mNumInFramesRequired) {
+ mIndex = mInputBufferInfo.size() - 1;
+ }
+ MediaCodec.BufferInfo bufInfo = mInputBufferInfo.get(mIndex);
+ mSawInputEOS = (bufInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
+ if (mLinearInputBlock.getOffset() + bufInfo.size > mLinearInputBlock.getBufferCapacity()) {
+ int requestSize = 8192;
+ requestSize = Math.max(bufInfo.size, requestSize);
+ mLinearInputBlock.allocateBlock(mediaCodec.getCanonicalName(), requestSize);
+ }
+ int codecFlags = 0;
+ if ((bufInfo.flags & MediaExtractor.SAMPLE_FLAG_SYNC) != 0) {
+ codecFlags |= MediaCodec.BUFFER_FLAG_KEY_FRAME;
+ }
+ if ((bufInfo.flags & MediaExtractor.SAMPLE_FLAG_PARTIAL_FRAME) != 0) {
+ codecFlags |= MediaCodec.BUFFER_FLAG_PARTIAL_FRAME;
+ }
+ codecFlags |= mSawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0;
+ if (DEBUG) {
+ Log.v(TAG, "input: id: " + inputBufferId
+ + " size: " + bufInfo.size
+ + " pts: " + bufInfo.presentationTimeUs
+ + " flags: " + codecFlags);
+ }
+ mLinearInputBlock.getBuffer().put(mInputBuffer.get(mIndex).array());
+ mNumInFramesProvided++;
+ mIndex = mNumInFramesProvided % (mInputBufferInfo.size() - 1);
+ if (mSawInputEOS) {
+ Log.i(TAG, "Saw Input EOS");
+ }
+ mStats.addFrameSize(bufInfo.size);
+ MediaCodec.QueueRequest request = mCodec.getQueueRequest(inputBufferId);
+ request.setLinearBlock(mLinearInputBlock.getBlock(), mLinearInputBlock.getOffset(),
+ bufInfo.size);
+ request.setPresentationTimeUs(bufInfo.presentationTimeUs);
+ request.setFlags(codecFlags);
+ request.queue();
+ if (bufInfo.size > 0 && (codecFlags & (MediaCodec.BUFFER_FLAG_CODEC_CONFIG
+ | MediaCodec.BUFFER_FLAG_PARTIAL_FRAME)) == 0) {
+ mLinearInputBlock.setOffset(mLinearInputBlock.getOffset() + bufInfo.size);
+ }
+ }
+
+ @Override
+ protected void onOutputAvailable(
+ MediaCodec mediaCodec, int outputBufferId, MediaCodec.BufferInfo outputBufferInfo) {
+ if (mSawOutputEOS || outputBufferId < 0) {
+ return;
+ }
+ mNumOutputFrame++;
+ if (DEBUG) {
+ Log.d(TAG,
+ "In OutputBufferAvailable ,"
+ + " output frame number = " + mNumOutputFrame
+ + " timestamp = " + outputBufferInfo.presentationTimeUs
+ + " size = " + outputBufferInfo.size);
+ }
+ MediaCodec.OutputFrame outFrame = mediaCodec.getOutputFrame(outputBufferId);
+ ByteBuffer outputBuffer = null;
+ try {
+ if (outFrame.getLinearBlock() != null) {
+ outputBuffer = outFrame.getLinearBlock().map();
+ }
+ } catch(IllegalStateException e) {
+ // buffer may not be linear, this is ok
+ // as we are handling non-linear buffers below.
+ }
+ if (mOutputStream != null) {
+ try {
+ if (outputBuffer != null) {
+ byte[] bytesOutput = new byte[outputBuffer.remaining()];
+ outputBuffer.get(bytesOutput);
+ mOutputStream.write(bytesOutput);
+ }
+ } catch (IOException e) {
+ e.printStackTrace();
+ Log.d(TAG, "Error Dumping File: Exception " + e.toString());
+ }
+ }
+ ByteBuffer copiedBuffer = null;
+ int bytesRemaining = 0;
+ if (outputBuffer != null) {
+ bytesRemaining = outputBuffer.remaining();
+ if (mIBufferSend != null) {
+ copiedBuffer = ByteBuffer.allocate(outputBuffer.remaining());
+ copiedBuffer.put(outputBuffer);
+ }
+ outFrame.getLinearBlock().recycle();
+ outputBuffer = null;
+ }
+ if (mFrameReleaseQueue != null) {
+ if (mMime.startsWith("audio/")) {
+ try {
+ mFrameReleaseQueue.pushFrame(outputBufferId, bytesRemaining);
+ } catch (Exception e) {
+ Log.d(TAG, "Error in getting MediaCodec buffer" + e.toString());
+ }
+ } else {
+ mFrameReleaseQueue.pushFrame(mNumOutputFrame, outputBufferId,
+ outputBufferInfo.presentationTimeUs);
+ }
+
+ } else if (mIBufferSend != null) {
+ IBufferXfer.BufferXferInfo info = new IBufferXfer.BufferXferInfo();
+ // TODO: may be inefficient;
+ info.buf = copiedBuffer;
+ info.idx = outputBufferId;
+ info.obj = mediaCodec;
+ info.bytesRead = outputBufferInfo.size;
+ info.presentationTimeUs = outputBufferInfo.presentationTimeUs;
+ info.flag = outputBufferInfo.flags;
+ mIBufferSend.sendBuffer(this, info);
+ } else {
+ mediaCodec.releaseOutputBuffer(outputBufferId, mRender);
+ }
+ mSawOutputEOS = (outputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
+ if (DEBUG && mSawOutputEOS) {
+ Log.i(TAG, "Saw output EOS");
+ }
+ }
+
+}
\ No newline at end of file
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/CodecUtils.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/CodecUtils.java
index f223242..031817b 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/CodecUtils.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/CodecUtils.java
@@ -78,4 +78,21 @@
}
return null;
}
+ /**
+ * Returns compression ratio for a given mediaType.
+ * @param mediaType mime type for which compression ratio is to be returned.
+ */
+ public static float getCompressionRatio(String mediaType) {
+ switch (mediaType) {
+ case MediaFormat.MIMETYPE_AUDIO_FLAC:
+ return 0.7f;
+ case MediaFormat.MIMETYPE_AUDIO_G711_MLAW:
+ case MediaFormat.MIMETYPE_AUDIO_G711_ALAW:
+ case MediaFormat.MIMETYPE_AUDIO_MSGSM:
+ return 0.5f;
+ case MediaFormat.MIMETYPE_AUDIO_RAW:
+ return 1.0f;
+ }
+ return 0.1f;
+ }
}
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java
index e9b337d..2ea0ed2 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java
@@ -18,6 +18,7 @@
import android.view.Surface;
+import android.media.AudioFormat;
import android.media.MediaCodec;
import android.media.MediaCodec.BufferInfo;
import android.media.MediaFormat;
@@ -42,6 +43,7 @@
protected final Object mLock = new Object();
protected MediaCodec mCodec;
+ protected int mExtraFlags = 0;
protected Surface mSurface = null;
protected boolean mRender = false;
protected ArrayList<BufferInfo> mInputBufferInfo;
@@ -58,6 +60,8 @@
protected int mNumOutputFrame;
protected int mIndex;
+ protected boolean mUseFrameReleaseQueue = false;
+
protected ArrayList<ByteBuffer> mInputBuffer;
protected FileOutputStream mOutputStream;
protected FrameReleaseQueue mFrameReleaseQueue = null;
@@ -85,6 +89,11 @@
mIBufferSend = receiver;
return true;
}
+
+ public void setExtraConfigureFlags(int flags) {
+ this.mExtraFlags = flags;
+ }
+
/**
* Setup of decoder
*
@@ -94,17 +103,32 @@
mSignalledError = false;
mOutputStream = outputStream;
}
+
+ /*
+ * This can be used to setup audio decoding, simulating audio playback.
+ */
+ public void setupDecoder(
+ boolean render, boolean useFrameReleaseQueue, int numInFramesRequired) {
+ mRender = render;
+ mUseFrameReleaseQueue = useFrameReleaseQueue;
+ mNumInFramesRequired = numInFramesRequired;
+ mSignalledError = false;
+ setupDecoder(null);
+ }
+
public void setupDecoder(Surface surface, boolean render,
boolean useFrameReleaseQueue, int frameRate) {
setupDecoder(surface, render, useFrameReleaseQueue, frameRate, -1);
}
+
public void setupDecoder(Surface surface, boolean render,
boolean useFrameReleaseQueue, int frameRate, int numInFramesRequired) {
mSignalledError = false;
mOutputStream = null;
mSurface = surface;
mRender = render;
- if (useFrameReleaseQueue) {
+ mUseFrameReleaseQueue = useFrameReleaseQueue;
+ if (mUseFrameReleaseQueue) {
Log.i(TAG, "Using FrameReleaseQueue with frameRate " + frameRate);
mFrameReleaseQueue = new FrameReleaseQueue(mRender, frameRate);
}
@@ -166,6 +190,18 @@
public void onOutputFormatChanged(
@NonNull MediaCodec mediaCodec, @NonNull MediaFormat format) {
Log.i(TAG, "Output format changed. Format: " + format.toString());
+ if (mUseFrameReleaseQueue
+ && mFrameReleaseQueue == null && mMime.startsWith("audio/")) {
+ // start a frame release thread for this configuration.
+ int bytesPerSample = AudioFormat.getBytesPerSample(
+ format.getInteger(MediaFormat.KEY_PCM_ENCODING,
+ AudioFormat.ENCODING_PCM_16BIT));
+ int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
+ int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
+ mFrameReleaseQueue = new FrameReleaseQueue(
+ mRender, sampleRate, channelCount, bytesPerSample);
+ mFrameReleaseQueue.setMediaCodec(mCodec);
+ }
}
@Override
@@ -223,11 +259,10 @@
if (asyncMode) {
setCallback(mCodec);
}
- int isEncoder = 0;
if (DEBUG) {
Log.d(TAG, "Media Format : " + format.toString());
}
- mCodec.configure(format, mSurface, null, isEncoder);
+ mCodec.configure(format, mSurface, null, mExtraFlags);
mCodec.start();
Log.i(TAG, "Codec started async mode ? " + asyncMode);
@@ -395,8 +430,17 @@
}
}
if (mFrameReleaseQueue != null) {
- mFrameReleaseQueue.pushFrame(mNumOutputFrame, outputBufferId,
- outputBufferInfo.presentationTimeUs);
+ if (mMime.startsWith("audio/")) {
+ try {
+ ByteBuffer outputBuffer = mediaCodec.getOutputBuffer(outputBufferId);
+ mFrameReleaseQueue.pushFrame(outputBufferId, outputBuffer.remaining());
+ } catch (Exception e) {
+ Log.d(TAG, "Error in getting MediaCodec buffer" + e.toString());
+ }
+ } else {
+ mFrameReleaseQueue.pushFrame(mNumOutputFrame, outputBufferId,
+ outputBufferInfo.presentationTimeUs);
+ }
} else if (mIBufferSend != null) {
IBufferXfer.BufferXferInfo info = new IBufferXfer.BufferXferInfo();
info.buf = mediaCodec.getOutputBuffer(outputBufferId);
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/FrameReleaseQueue.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/FrameReleaseQueue.java
index 20a2573..0861c2c 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/FrameReleaseQueue.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/FrameReleaseQueue.java
@@ -29,45 +29,89 @@
public class FrameReleaseQueue {
private static final String TAG = "FrameReleaseQueue";
+ private static final boolean DEBUG = false;
private final String MIME_AV1 = "video/av01";
private final int AV1_SUPERFRAME_DELAY = 6;
private final int THRESHOLD_TIME = 5;
+ private final long HOUR_IN_MS = (60 * 60 * 1000L);
+ private final long MINUTE_IN_MS = (60 * 1000L);
+
private MediaCodec mCodec;
private LinkedBlockingQueue<FrameInfo> mFrameInfoQueue;
private ReleaseThread mReleaseThread;
private AtomicBoolean doFrameRelease = new AtomicBoolean(false);
- private boolean mReleaseJobStarted = false;
+ private AtomicBoolean mReleaseJobStarted = new AtomicBoolean(false);
private boolean mRender = false;
- private int mWaitTime = 40; // milliseconds per frame
+ private long mWaitTime = 40; // milliseconds per frame
private int mWaitTimeCorrection = 0;
private int mCorrectionLoopCount;
- private int firstReleaseTime = -1;
- private int mAllowedDelayTime = THRESHOLD_TIME;
+ protected long firstReleaseTime = -1;
+ private long mAllowedDelayTime = THRESHOLD_TIME;
private int mFrameDelay = 0;
private final ScheduledExecutorService mScheduler = Executors.newScheduledThreadPool(1);
+ public FrameReleaseQueue(boolean render, int frameRate) {
+ this.mFrameInfoQueue = new LinkedBlockingQueue();
+ this.mReleaseThread = new ReleaseThread();
+ this.doFrameRelease.set(true);
+ this.mRender = render;
+ this.mWaitTime = 1000 / frameRate; // wait time in milliseconds per frame
+ int waitTimeRemainder = 1000 % frameRate;
+ int gcd = gcd(frameRate, waitTimeRemainder);
+ this.mCorrectionLoopCount = frameRate / gcd;
+ this.mWaitTimeCorrection = waitTimeRemainder / gcd;
+ Log.i(TAG, "Constructed FrameReleaseQueue with wait time " + this.mWaitTime + " ms");
+ }
+
+ public FrameReleaseQueue(boolean render, int sampleRate, int nChannels, int bytesPerChannel) {
+ this.mFrameInfoQueue = new LinkedBlockingQueue();
+ this.doFrameRelease.set(true);
+ this.mRender = render;
+ this.mReleaseThread = new AudioRendererThread(sampleRate, nChannels, bytesPerChannel);
+ }
private static class FrameInfo {
private int number;
private int bufferId;
private int displayTime;
+ private int bytes;
public FrameInfo(int frameNumber, int frameBufferId, int frameDisplayTime) {
this.number = frameNumber;
this.bufferId = frameBufferId;
this.displayTime = frameDisplayTime;
}
+ public FrameInfo(int frameBufferId, int bytes) {
+ this.bufferId = frameBufferId;
+ this.bytes = bytes;
+ }
}
private class ReleaseThread extends Thread {
private int mLoopCount = 0;
- private int mNextReleaseTime = 0;
+ private long mNextReleaseTime = 0;
+
+ protected void printPlaybackTime() {
+ if (firstReleaseTime == -1) {
+ Log.d(TAG, "Playback Time not initialized");
+ return;
+ }
+ long curTime = getCurSysTime() - firstReleaseTime;
+ long hours = curTime / (HOUR_IN_MS);
+ curTime -= (hours * HOUR_IN_MS);
+ long min = curTime / MINUTE_IN_MS;
+ curTime -= (min * MINUTE_IN_MS);
+ Log.d(TAG, "Playback time: "
+ + hours + "h "
+ + min + "m "
+ + (double)(curTime / (double)1000) +"s");
+ }
@SuppressWarnings("FutureReturnValueIgnored")
public void run() {
/* Check if the release thread wakes up too late */
if (mLoopCount != 0) {
- int delta = getCurSysTime() - mNextReleaseTime;
+ long delta = getCurSysTime() - mNextReleaseTime;
if (delta >= THRESHOLD_TIME) {
Log.d(TAG, "Release thread wake up late by " + delta);
/* For accidental late wake up, we should relax the timestamp
@@ -93,8 +137,8 @@
popAndRelease(false);
} else {
mNextReleaseTime += mWaitTime;
- int curSysTime = getCurSysTime();
- int curMediaTime = curSysTime - firstReleaseTime;
+ long curSysTime = getCurSysTime();
+ long curMediaTime = curSysTime - firstReleaseTime;
while (curFrameInfo != null && curFrameInfo.displayTime > 0 &&
curFrameInfo.displayTime <= curMediaTime) {
if (!((curMediaTime - curFrameInfo.displayTime) <= mAllowedDelayTime)) {
@@ -123,21 +167,86 @@
mNextReleaseTime += mWaitTimeCorrection;
}
mLoopCount += 1;
+ } else {
+ mReleaseJobStarted.set(false);
}
}
}
- public FrameReleaseQueue(boolean render, int frameRate) {
- this.mFrameInfoQueue = new LinkedBlockingQueue();
- this.mReleaseThread = new ReleaseThread();
- this.doFrameRelease.set(true);
- this.mRender = render;
- this.mWaitTime = 1000 / frameRate; // wait time in milliseconds per frame
- int waitTimeRemainder = 1000 % frameRate;
- int gcd = gcd(frameRate, waitTimeRemainder);
- this.mCorrectionLoopCount = frameRate / gcd;
- this.mWaitTimeCorrection = waitTimeRemainder / gcd;
- Log.i(TAG, "Constructed FrameReleaseQueue with wait time " + this.mWaitTime + " ms");
+ private class AudioRendererThread extends ReleaseThread {
+ private final int WAIT_FOR_BUFFER_IN_SEC = 2;
+ private double mTimeAdjustMs = 0;
+ private double mMsForByte = 0;
+ private double mExpectedWakeUpTime = 0;
+ private FrameInfo mCurrentFrameInfo;
+
+ AudioRendererThread(int sampleRate, int nChannels, int bytesPerChannel) {
+ if (DEBUG) {
+ Log.d(TAG, "sampleRate " + sampleRate
+ + " nChannels " + nChannels
+ + " bytesPerChannel " + bytesPerChannel);
+ }
+ this.mMsForByte = 1000 / (double)(sampleRate * nChannels * bytesPerChannel);
+ }
+
+ @Override
+ @SuppressWarnings("FutureReturnValueIgnored")
+ public void run() {
+ long curTime = getCurSysTime();
+ if (DEBUG) {
+ if (firstReleaseTime == -1) {
+ firstReleaseTime = curTime;
+ }
+ printPlaybackTime();
+ }
+ if (mMsForByte == 0) {
+ Log.e(TAG, "Audio rendering not possible, no valid params");
+ return;
+ }
+ if (mCurrentFrameInfo != null) {
+ try {
+ mCodec.releaseOutputBuffer(mCurrentFrameInfo.bufferId, mRender);
+ } catch (IllegalStateException e) {
+ doFrameRelease.set(false);
+ Log.e(TAG, "Threw InterruptedException on releaseOutputBuffer");
+ } finally {
+ mCurrentFrameInfo = null;
+ }
+ }
+ boolean requestedSchedule = false;
+ try {
+ while (doFrameRelease.get() || mFrameInfoQueue.size() > 0) {
+ mCurrentFrameInfo = mFrameInfoQueue.poll(
+ WAIT_FOR_BUFFER_IN_SEC, TimeUnit.SECONDS);
+ if (mCurrentFrameInfo != null) {
+ mTimeAdjustMs = 0;
+ if (mExpectedWakeUpTime != 0) {
+ mTimeAdjustMs = mExpectedWakeUpTime - getCurSysTime();
+ }
+ double sleepTimeUs =
+ (mMsForByte * mCurrentFrameInfo.bytes + mTimeAdjustMs) * 1000;
+ mExpectedWakeUpTime = getCurSysTime() + (sleepTimeUs / 1000);
+ if (DEBUG) {
+ Log.d(TAG, " mExpectedWakeUpTime " + mExpectedWakeUpTime
+ + " Waiting for " + (long)(sleepTimeUs) + "us"
+ + " Now " + getCurSysTime()
+ + " bytes " + mCurrentFrameInfo.bytes
+ + " bufferID " + mCurrentFrameInfo.bufferId);
+ }
+ mScheduler.schedule(
+ mReleaseThread,(long)(sleepTimeUs),TimeUnit.MICROSECONDS);
+ requestedSchedule = true;
+ break;
+ }
+ }
+ } catch(InterruptedException e) {
+ Log.d(TAG, "Interrupted during poll wait");
+ doFrameRelease.set(false);
+ }
+ if (!requestedSchedule) {
+ mReleaseJobStarted.set(false);
+ }
+ }
}
private static int gcd(int a, int b) {
@@ -154,6 +263,19 @@
}
}
+ public boolean pushFrame(int frameBufferId, int bytes) {
+ FrameInfo info = new FrameInfo(frameBufferId, bytes);
+ boolean pushSuccess = mFrameInfoQueue.offer(info);
+ if (!pushSuccess) {
+ Log.e(TAG, "Failed to push frame with buffer id " + info.bufferId);
+ return false;
+ }
+ if (!mReleaseJobStarted.get()) {
+ mScheduler.execute(mReleaseThread);
+ mReleaseJobStarted.set(true);
+ }
+ return true;
+ }
public boolean pushFrame(int frameNumber, int frameBufferId, long frameDisplayTime) {
int frameDisplayTimeMs = (int)(frameDisplayTime/1000);
FrameInfo curFrameInfo = new FrameInfo(frameNumber, frameBufferId, frameDisplayTimeMs);
@@ -163,16 +285,16 @@
return false;
}
- if (!mReleaseJobStarted && frameNumber >= mFrameDelay) {
+ if (!mReleaseJobStarted.get() && frameNumber >= mFrameDelay) {
mScheduler.execute(mReleaseThread);
- mReleaseJobStarted = true;
+ mReleaseJobStarted.set(true);
Log.i(TAG, "Started frame release thread");
}
return true;
}
- private int getCurSysTime() {
- return (int)(System.nanoTime()/1000000);
+ private long getCurSysTime() {
+ return (long)(System.nanoTime() / 1000000L);
}
@SuppressWarnings("FutureReturnValueIgnored")
@@ -196,7 +318,7 @@
public void stopFrameRelease() {
doFrameRelease.set(false);
- while (mFrameInfoQueue.size() > 0) {
+ while (mReleaseJobStarted.get()) {
try {
TimeUnit.SECONDS.sleep(1);
} catch (InterruptedException e) {
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/IBufferXfer.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/IBufferXfer.java
index c97a35c..bbc3d48 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/IBufferXfer.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/IBufferXfer.java
@@ -16,6 +16,8 @@
package com.android.media.benchmark.library;
import android.media.MediaCodec;
+
+import java.util.ArrayDeque;
import java.nio.ByteBuffer;
/**
* interfaces that can be used to implement
@@ -26,10 +28,11 @@
public ByteBuffer buf;
public int idx;
public Object obj;
- int flag;
- int bytesRead;
- boolean isComplete = true;
- long presentationTimeUs;
+ public ArrayDeque<MediaCodec.BufferInfo> infos;
+ public int flag;
+ public int bytesRead;
+ public boolean isComplete = true;
+ public long presentationTimeUs;
}
public interface IReceiveBuffer {
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/IBufferXferImpl.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/IBufferXferImpl.java
index 3e6cee1..c68ac8a 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/IBufferXferImpl.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/IBufferXferImpl.java
@@ -96,10 +96,10 @@
pBuf.info.buf.remaining() +" C:" + cBuf.info.buf.remaining());
}
}
+ cBuf.info.infos = pBuf.info.infos;
cBuf.info.bytesRead = bytesRead;
cBuf.info.presentationTimeUs = pBuf.info.presentationTimeUs;
cBuf.info.flag = pBuf.info.flag;
-
if (pBuf.rIface != null) {
pBuf.rIface.receiveBuffer(pBuf.info);
}
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/MultiAccessUnitBlockModelDecoder.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/MultiAccessUnitBlockModelDecoder.java
new file mode 100644
index 0000000..d1a5d79
--- /dev/null
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/MultiAccessUnitBlockModelDecoder.java
@@ -0,0 +1,267 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.media.benchmark.library;
+
+import android.media.AudioFormat;
+import android.media.MediaCodec;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.os.Build;
+import android.util.Log;
+
+import androidx.annotation.NonNull;
+
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import java.util.ArrayDeque;
+import java.util.Iterator;
+import java.util.List;
+
+import com.android.media.benchmark.library.CodecUtils;
+import com.android.media.benchmark.library.BlockModelDecoder;
+
+public class MultiAccessUnitBlockModelDecoder extends BlockModelDecoder {
+ private static final String TAG = MultiAccessUnitBlockModelDecoder.class.getSimpleName();
+ private final ArrayDeque<MediaCodec.BufferInfo> mInputInfos = new ArrayDeque<>();
+ private final boolean DEBUG = false;
+ protected int mMaxInputSize = 0;
+
+ public MultiAccessUnitBlockModelDecoder() {
+ // empty
+ }
+
+ /**
+ * Decodes the given input buffer,
+ * provided valid list of buffer info and format are passed as inputs.
+ *
+ * @param inputBuffer Decode the provided list of ByteBuffers
+ * @param inputBufferInfo List of buffer info corresponding to provided input buffers
+ * @param asyncMode Will run on async implementation if true
+ * @param format For creating the decoder if codec name is empty and configuring it
+ * @param codecName Will create the decoder with codecName
+ * @return DECODE_SUCCESS if decode was successful, DECODE_DECODER_ERROR for fail,
+ * DECODE_CREATE_ERROR for decoder not created
+ * @throws IOException if the codec cannot be created.
+ */
+ @Override
+ public int decode(@NonNull List<ByteBuffer> inputBuffer,
+ @NonNull List<MediaCodec.BufferInfo> inputBufferInfo, final boolean asyncMode,
+ @NonNull MediaFormat format, String codecName)
+ throws IOException, InterruptedException {
+ setExtraConfigureFlags(MediaCodec.CONFIGURE_FLAG_USE_BLOCK_MODEL);
+ configureMaxInputSize(format);
+ return super.decode(inputBuffer, inputBufferInfo, asyncMode, format, codecName);
+ }
+
+ protected void configureMaxInputSize(MediaFormat format) {
+ final String mime = format.getString(MediaFormat.KEY_MIME);
+ final int maxOutputSize = format.getNumber(
+ MediaFormat.KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE, 0).intValue();
+ int maxInputSizeInBytes = 0;
+ if (format.containsKey(MediaFormat.KEY_MAX_INPUT_SIZE)) {
+ maxInputSizeInBytes = format.getNumber(
+ MediaFormat.KEY_MAX_INPUT_SIZE, 0).intValue();
+ }
+ mMaxInputSize = Math.max(maxInputSizeInBytes,
+ (int) (maxOutputSize * CodecUtils.getCompressionRatio(mime)));
+ }
+
+ @Override
+ public void setCallback(MediaCodec codec) {
+ mCodec.setCallback(new MediaCodec.Callback() {
+ boolean isUsingLargeFrameMode = false;
+
+ @Override
+ public void onInputBufferAvailable(
+ @NonNull MediaCodec mediaCodec, int inputBufferId) {
+ try {
+ mStats.addInputTime();
+ if (isUsingLargeFrameMode) {
+ onInputsAvailable(inputBufferId, mediaCodec);
+ } else {
+ onInputAvailable(inputBufferId, mediaCodec);
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ Log.e(TAG, e.toString());
+ }
+ }
+
+ @Override
+ public void onOutputBufferAvailable(@NonNull MediaCodec mediaCodec,
+ int outputBufferId, @NonNull MediaCodec.BufferInfo bufferInfo) {
+ mStats.addOutputTime();
+ onOutputAvailable(mediaCodec, outputBufferId, bufferInfo);
+ if (mSawOutputEOS) {
+ synchronized (mLock) { mLock.notify(); }
+ }
+ }
+
+ @Override
+ public void onOutputBuffersAvailable(
+ @NonNull MediaCodec mediaCodec,
+ int outputBufferId, @NonNull ArrayDeque<MediaCodec.BufferInfo> infos) {
+ int i = 0;
+ while(i++ < infos.size()) {
+ mStats.addOutputTime();
+ }
+ onOutputsAvailable(mediaCodec, outputBufferId, infos);
+ if (mSawOutputEOS) {
+ synchronized (mLock) { mLock.notify(); }
+ }
+ }
+
+ @Override
+ public void onOutputFormatChanged(
+ @NonNull MediaCodec mediaCodec, @NonNull MediaFormat format) {
+ Log.i(TAG, "Output format changed. Format: " + format.toString());
+ final int maxOutputSize = format.getNumber(
+ MediaFormat.KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE, 0).intValue();
+ isUsingLargeFrameMode = (maxOutputSize > 0);
+ configureMaxInputSize(format);
+ if (mUseFrameReleaseQueue && mFrameReleaseQueue == null) {
+ int bytesPerSample = AudioFormat.getBytesPerSample(
+ format.getInteger(MediaFormat.KEY_PCM_ENCODING,
+ AudioFormat.ENCODING_PCM_16BIT));
+ int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
+ int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
+ mFrameReleaseQueue = new FrameReleaseQueue(
+ mRender, sampleRate, channelCount, bytesPerSample);
+ mFrameReleaseQueue.setMediaCodec(mCodec);
+ }
+ }
+
+ @Override
+ public void onError(
+ @NonNull MediaCodec mediaCodec, @NonNull MediaCodec.CodecException e) {
+ mSignalledError = true;
+ Log.e(TAG, "Codec Error: " + e.toString());
+ e.printStackTrace();
+ synchronized (mLock) { mLock.notify(); }
+ }
+ });
+
+ }
+
+ protected void onInputsAvailable(int inputBufferId, MediaCodec mediaCodec) {
+ if (inputBufferId >= 0) {
+ mLinearInputBlock.allocateBlock(mediaCodec.getCanonicalName(), mMaxInputSize);
+ MediaCodec.BufferInfo bufInfo;
+ mInputInfos.clear();
+ int offset = 0;
+ while (mNumInFramesProvided < mNumInFramesRequired) {
+ bufInfo = mInputBufferInfo.get(mIndex);
+ mSawInputEOS = (bufInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
+ int bufferSizeNeeded = mLinearInputBlock.getOffset() + bufInfo.size;
+ if (bufferSizeNeeded > mLinearInputBlock.getBufferCapacity()) {
+ break;
+ }
+ mLinearInputBlock.getBuffer().put(mInputBuffer.get(mIndex).array());
+ mLinearInputBlock.setOffset(mLinearInputBlock.getOffset() + bufInfo.size);
+ bufInfo.offset = offset; offset += bufInfo.size;
+ mInputInfos.add(bufInfo);
+ mNumInFramesProvided++;
+ mIndex = mNumInFramesProvided % (mInputBufferInfo.size() - 1);
+
+ }
+ if (DEBUG) {
+ Log.d(TAG, "inputsAvailable ID : " + inputBufferId
+ + " queued info size: " + mInputInfos.size()
+ + " Total queued size: " + offset);
+ }
+ if (mNumInFramesProvided >= mNumInFramesRequired) {
+ mIndex = mInputBufferInfo.size() - 1;
+ bufInfo = mInputBufferInfo.get(mIndex);
+ int bufferSizeNeeded = mLinearInputBlock.getOffset() + bufInfo.size;
+ if (bufferSizeNeeded <= mLinearInputBlock.getBufferCapacity()) {
+ if ((bufInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) == 0) {
+ Log.e(TAG, "Error in EOS flag for Decoder");
+ }
+ mSawInputEOS = (bufInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
+ mLinearInputBlock.getBuffer().put(mInputBuffer.get(mIndex).array());
+ mLinearInputBlock.setOffset(mLinearInputBlock.getOffset() + bufInfo.size);
+ bufInfo.offset = offset; offset += bufInfo.size;
+ //bufInfo.flags = codecFlags;
+ mInputInfos.add(bufInfo);
+ mNumInFramesProvided++;
+ }
+ }
+ if (mInputInfos.size() == 0) {
+ Log.d(TAG, " No inputs to queue");
+ } else {
+ mStats.addFrameSize(offset);
+ MediaCodec.QueueRequest request = mediaCodec.getQueueRequest(inputBufferId);
+ request.setMultiFrameLinearBlock(mLinearInputBlock.getBlock(), mInputInfos);
+ request.queue();
+ }
+ }
+ }
+
+ protected void onOutputsAvailable(MediaCodec mediaCodec, int outputBufferId,
+ ArrayDeque<MediaCodec.BufferInfo> infos) {
+ if (mSawOutputEOS || outputBufferId < 0) {
+ return;
+ }
+ MediaCodec.OutputFrame outFrame = mediaCodec.getOutputFrame(outputBufferId);
+ ByteBuffer outputBuffer = null;
+ try {
+ if (outFrame.getLinearBlock() != null) {
+ outputBuffer = outFrame.getLinearBlock().map();
+ }
+ } catch(IllegalStateException e) {
+ // buffer may not be linear, this is ok
+ // as we are handling non-linear buffers below.
+ }
+ if (mOutputStream != null) {
+ try {
+ if (outputBuffer != null) {
+ byte[] bytesOutput = new byte[outputBuffer.remaining()];
+ outputBuffer.get(bytesOutput);
+ mOutputStream.write(bytesOutput);
+ if (DEBUG) {
+ Log.d(TAG, "Received outputs buffer size : " + outputBuffer.remaining()
+ + " infos size " + infos.size());
+ }
+ }
+ } catch (IOException e) {
+ e.printStackTrace();
+ Log.d(TAG, "Error Dumping File: Exception " + e.toString());
+ }
+ }
+ mNumOutputFrame += infos.size();
+ MediaCodec.BufferInfo last = infos.peekLast();
+ if (last != null) {
+ mSawOutputEOS |= ((last.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0);
+ }
+ int bytesRemaining = 0;
+ if (outputBuffer != null) {
+ bytesRemaining = outputBuffer.remaining();
+ outFrame.getLinearBlock().recycle();
+ outputBuffer = null;
+ }
+ if (mFrameReleaseQueue != null) {
+ mFrameReleaseQueue.pushFrame(outputBufferId, bytesRemaining);
+ } else if (mIBufferSend == null) {
+ mediaCodec.releaseOutputBuffer(outputBufferId, mRender);
+ }
+ if (mSawOutputEOS) {
+ Log.i(TAG, "Large frame - saw output EOS");
+ }
+ }
+
+}
\ No newline at end of file
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/MultiAccessUnitDecoder.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/MultiAccessUnitDecoder.java
index cb92f06..fd8859b 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/MultiAccessUnitDecoder.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/MultiAccessUnitDecoder.java
@@ -18,6 +18,7 @@
import android.view.Surface;
+import android.media.AudioFormat;
import android.media.MediaCodec;
import android.media.MediaCodec.BufferInfo;
import android.media.MediaFormat;
@@ -91,8 +92,18 @@
@NonNull MediaCodec mediaCodec, @NonNull MediaFormat format) {
Log.i(TAG, "Output format changed. Format: " + format.toString());
final int maxOutputSize = format.getNumber(
- MediaFormat.KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE, 0).intValue();
+ MediaFormat.KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE, 0).intValue();
isUsingLargeFrameMode = (maxOutputSize > 0);
+ if (mUseFrameReleaseQueue && mFrameReleaseQueue == null) {
+ int bytesPerSample = AudioFormat.getBytesPerSample(
+ format.getInteger(MediaFormat.KEY_PCM_ENCODING,
+ AudioFormat.ENCODING_PCM_16BIT));
+ int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
+ int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
+ mFrameReleaseQueue = new FrameReleaseQueue(
+ mRender, sampleRate, channelCount, bytesPerSample);
+ mFrameReleaseQueue.setMediaCodec(mCodec);
+ }
}
@Override
@@ -177,30 +188,6 @@
if (mSawOutputEOS || outputBufferId < 0) {
return;
}
- Iterator<BufferInfo> iter = infos.iterator();
- while (iter.hasNext()) {
- BufferInfo bufferInfo = iter.next();
- mNumOutputFrame++;
- if (DEBUG) {
- Log.d(TAG,
- "In OutputBufferAvailable ,"
- + " output frame number = " + mNumOutputFrame
- + " timestamp = " + bufferInfo.presentationTimeUs
- + " size = " + bufferInfo.size);
- }
- if (mIBufferSend != null) {
- IBufferXfer.BufferXferInfo info = new IBufferXfer.BufferXferInfo();
- info.buf = mc.getOutputBuffer(outputBufferId);
- info.idx = outputBufferId;
- info.obj = mc;
- info.bytesRead = bufferInfo.size;
- info.presentationTimeUs = bufferInfo.presentationTimeUs;
- info.flag = bufferInfo.flags;
- info.isComplete = iter.hasNext() ? false : true;
- mIBufferSend.sendBuffer(this, info);
- }
- mSawOutputEOS |= (bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
- }
if (mOutputStream != null) {
try {
ByteBuffer outputBuffer = mc.getOutputBuffer(outputBufferId);
@@ -212,12 +199,27 @@
Log.d(TAG, "Error Dumping File: Exception " + e.toString());
}
}
- if (mIBufferSend == null) {
+ mNumOutputFrame += infos.size();
+ MediaCodec.BufferInfo last = infos.peekLast();
+ if (last != null) {
+ mSawOutputEOS |= ((last.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0);
+ }
+ if (mIBufferSend != null) {
+ IBufferXfer.BufferXferInfo info = new IBufferXfer.BufferXferInfo();
+ info.buf = mc.getOutputBuffer(outputBufferId);
+ info.idx = outputBufferId;
+ info.obj = mc;
+ info.infos = infos;
+ mIBufferSend.sendBuffer(this, info);
+ } else if (mFrameReleaseQueue != null) {
+ ByteBuffer outputBuffer = mc.getOutputBuffer(outputBufferId);
+ mFrameReleaseQueue.pushFrame(
+ outputBufferId, outputBuffer.remaining());
+ } else {
mc.releaseOutputBuffer(outputBufferId, mRender);
}
if (mSawOutputEOS) {
Log.i(TAG, "Large frame - saw output EOS");
}
- // we don't support frame release queue for large audio frame
}
}
diff --git a/media/utils/Android.bp b/media/utils/Android.bp
index e340b40..762984e 100644
--- a/media/utils/Android.bp
+++ b/media/utils/Android.bp
@@ -53,6 +53,7 @@
"Process.cpp",
"ProcessInfo.cpp",
"SchedulingPolicyService.cpp",
+ "ServiceSingleton.cpp",
"ServiceUtilities.cpp",
"ThreadSnapshot.cpp",
"TimeCheck.cpp",
@@ -89,6 +90,7 @@
"libaudioutils", // for clock.h, Statistics.h
"libbase",
"libbinder",
+ "libbinder_ndk",
"libcutils",
"libhidlbase",
"liblog",
@@ -112,6 +114,8 @@
],
export_shared_lib_headers: [
+ "libaudioutils",
+ "libbinder_ndk",
"libpermission",
"packagemanager_aidl-cpp",
],
diff --git a/media/utils/ServiceSingleton.cpp b/media/utils/ServiceSingleton.cpp
new file mode 100644
index 0000000..ade7a3e
--- /dev/null
+++ b/media/utils/ServiceSingleton.cpp
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ServiceSingleton"
+
+#include <mediautils/ServiceSingleton.h>
+
+namespace android::mediautils {
+
+namespace details {
+
+// To prevent multiple instances in different linkages,
+// we anchor the singleton in a .cpp instead of inlining in the header.
+
+template<typename T>
+requires (std::is_same_v<T, const char*> || std::is_same_v<T, String16>)
+std::shared_ptr<ServiceHandler> ServiceHandler::getInstance(const T& name) {
+ using Key = std::conditional_t<std::is_same_v<T, String16>, String16, std::string>;
+ [[clang::no_destroy]] static constinit std::mutex mutex;
+ [[clang::no_destroy]] static constinit std::shared_ptr<
+ std::map<Key, std::shared_ptr<ServiceHandler>>> map GUARDED_BY(mutex);
+ static constinit bool init GUARDED_BY(mutex) = false;
+
+ std::lock_guard l(mutex);
+ if (!init) {
+ map = std::make_shared<std::map<Key, std::shared_ptr<ServiceHandler>>>();
+ init = true;
+ }
+
+ auto& handler = (*map)[name];
+ if (!handler) {
+ handler = std::make_shared<ServiceHandler>();
+ if constexpr (std::is_same_v<T, String16>) {
+ handler->init_cpp();
+ } else /* constexpr */ {
+ handler->init_ndk();
+ }
+ }
+ return handler;
+}
+
+// Explicit template function instantiation.
+template
+std::shared_ptr<ServiceHandler> ServiceHandler::getInstance<const char*>(const char* const& name);
+
+template
+std::shared_ptr<ServiceHandler> ServiceHandler::getInstance<String16>(const String16& name);
+
+} // details
+
+} // namespace android::mediautils
+
diff --git a/media/utils/ServiceUtilities.cpp b/media/utils/ServiceUtilities.cpp
index e13f8f7..39a172f 100644
--- a/media/utils/ServiceUtilities.cpp
+++ b/media/utils/ServiceUtilities.cpp
@@ -18,6 +18,7 @@
#define LOG_TAG "ServiceUtilities"
#include <audio_utils/clock.h>
+#include <android-base/properties.h>
#include <binder/AppOpsManager.h>
#include <binder/IPCThreadState.h>
#include <binder/IServiceManager.h>
@@ -28,9 +29,11 @@
#include <media/AidlConversionUtil.h>
#include <android/content/AttributionSourceState.h>
-#include <iterator>
#include <algorithm>
+#include <iterator>
+#include <mutex>
#include <pwd.h>
+#include <thread>
/* When performing permission checks we do not use permission cache for
* runtime permissions (protection level dangerous) as they may change at
@@ -42,6 +45,7 @@
namespace android {
namespace {
+constexpr auto PERMISSION_GRANTED = permission::PermissionChecker::PERMISSION_GRANTED;
constexpr auto PERMISSION_HARD_DENIED = permission::PermissionChecker::PERMISSION_HARD_DENIED;
}
@@ -73,21 +77,44 @@
return packages[0];
}
+// NOTE/TODO(b/379754682):
+// AUDIO_SOURCE_VOICE_DOWNLINK and AUDIO_SOURCE_VOICE_CALL are handled specially:
+// DOWNLINK is an output source, but we still require RecordOp in addition to
+// OP_RECORD_INCOMING_PHONE_AUDIO
+// CALL includes both uplink and downlink, but we attribute RECORD_OP (only), since
+// there is not support for noting multiple ops.
int32_t getOpForSource(audio_source_t source) {
switch (source) {
- case AUDIO_SOURCE_HOTWORD:
- return AppOpsManager::OP_RECORD_AUDIO_HOTWORD;
+ // BEGIN output sources
+ case AUDIO_SOURCE_FM_TUNER:
+ return AppOpsManager::OP_NONE;
case AUDIO_SOURCE_ECHO_REFERENCE: // fallthrough
case AUDIO_SOURCE_REMOTE_SUBMIX:
+ // TODO -- valid in all cases?
return AppOpsManager::OP_RECORD_AUDIO_OUTPUT;
case AUDIO_SOURCE_VOICE_DOWNLINK:
return AppOpsManager::OP_RECORD_INCOMING_PHONE_AUDIO;
+ // END output sources
+ case AUDIO_SOURCE_HOTWORD:
+ return AppOpsManager::OP_RECORD_AUDIO_HOTWORD;
case AUDIO_SOURCE_DEFAULT:
default:
return AppOpsManager::OP_RECORD_AUDIO;
}
}
+bool isRecordOpRequired(audio_source_t source) {
+ switch (source) {
+ case AUDIO_SOURCE_FM_TUNER:
+ case AUDIO_SOURCE_ECHO_REFERENCE: // fallthrough
+ case AUDIO_SOURCE_REMOTE_SUBMIX:
+ // case AUDIO_SOURCE_VOICE_DOWNLINK:
+ return false;
+ default:
+ return true;
+ }
+}
+
std::optional<AttributionSourceState> resolveAttributionSource(
const AttributionSourceState& callerAttributionSource, const uint32_t virtualDeviceId) {
AttributionSourceState nextAttributionSource = callerAttributionSource;
@@ -119,7 +146,8 @@
return std::optional<AttributionSourceState>{myAttributionSource};
}
- static int checkRecordingInternal(const AttributionSourceState &attributionSource,
+
+static int checkRecordingInternal(const AttributionSourceState &attributionSource,
const uint32_t virtualDeviceId,
const String16 &msg, bool start, audio_source_t source) {
// Okay to not track in app ops as audio server or media server is us and if
@@ -128,32 +156,47 @@
// user is active, but it is a core system service so let it through.
// TODO(b/141210120): UserManager.DISALLOW_RECORD_AUDIO should not affect system user 0
uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
- if (isAudioServerOrMediaServerOrSystemServerOrRootUid(uid)) return true;
-
- // We specify a pid and uid here as mediaserver (aka MediaRecorder or StageFrightRecorder)
- // may open a record track on behalf of a client. Note that pid may be a tid.
- // IMPORTANT: DON'T USE PermissionCache - RUNTIME PERMISSIONS CHANGE.
- std::optional<AttributionSourceState> resolvedAttributionSource =
- resolveAttributionSource(attributionSource, virtualDeviceId);
- if (!resolvedAttributionSource.has_value()) {
- return false;
- }
+ if (isAudioServerOrMediaServerOrSystemServerOrRootUid(uid)) return PERMISSION_GRANTED;
const int32_t attributedOpCode = getOpForSource(source);
+ if (isRecordOpRequired(source)) {
+ // We specify a pid and uid here as mediaserver (aka MediaRecorder or StageFrightRecorder)
+ // may open a record track on behalf of a client. Note that pid may be a tid.
+ // IMPORTANT: DON'T USE PermissionCache - RUNTIME PERMISSIONS CHANGE.
+ std::optional<AttributionSourceState> resolvedAttributionSource =
+ resolveAttributionSource(attributionSource, virtualDeviceId);
+ if (!resolvedAttributionSource.has_value()) {
+ return PERMISSION_HARD_DENIED;
+ }
- permission::PermissionChecker permissionChecker;
- int permitted;
- if (start) {
- permitted = permissionChecker.checkPermissionForStartDataDeliveryFromDatasource(
- sAndroidPermissionRecordAudio, resolvedAttributionSource.value(), msg,
- attributedOpCode);
+ permission::PermissionChecker permissionChecker;
+ int permitted;
+ if (start) {
+ permitted = permissionChecker.checkPermissionForStartDataDeliveryFromDatasource(
+ sAndroidPermissionRecordAudio, resolvedAttributionSource.value(), msg,
+ attributedOpCode);
+ } else {
+ permitted = permissionChecker.checkPermissionForPreflightFromDatasource(
+ sAndroidPermissionRecordAudio, resolvedAttributionSource.value(), msg,
+ attributedOpCode);
+ }
+
+ return permitted;
} else {
- permitted = permissionChecker.checkPermissionForPreflightFromDatasource(
- sAndroidPermissionRecordAudio, resolvedAttributionSource.value(), msg,
- attributedOpCode);
+ if (attributedOpCode == AppOpsManager::OP_NONE) return PERMISSION_GRANTED; // nothing to do
+ AppOpsManager ap{};
+ PermissionController pc{};
+ return ap.startOpNoThrow(
+ attributedOpCode, attributionSource.uid,
+ resolveCallingPackage(pc,
+ String16{attributionSource.packageName.value_or("").c_str()},
+ attributionSource.uid),
+ false,
+ attributionSource.attributionTag.has_value()
+ ? String16{attributionSource.attributionTag.value().c_str()}
+ : String16{},
+ msg);
}
-
- return permitted;
}
static constexpr int DEVICE_ID_DEFAULT = 0;
@@ -185,19 +228,32 @@
uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
if (isAudioServerOrMediaServerOrSystemServerOrRootUid(uid)) return;
- // We specify a pid and uid here as mediaserver (aka MediaRecorder or StageFrightRecorder)
- // may open a record track on behalf of a client. Note that pid may be a tid.
- // IMPORTANT: DON'T USE PermissionCache - RUNTIME PERMISSIONS CHANGE.
- const std::optional<AttributionSourceState> resolvedAttributionSource =
- resolveAttributionSource(attributionSource, virtualDeviceId);
- if (!resolvedAttributionSource.has_value()) {
- return;
- }
-
const int32_t attributedOpCode = getOpForSource(source);
- permission::PermissionChecker permissionChecker;
- permissionChecker.finishDataDeliveryFromDatasource(attributedOpCode,
- resolvedAttributionSource.value());
+ if (isRecordOpRequired(source)) {
+ // We specify a pid and uid here as mediaserver (aka MediaRecorder or StageFrightRecorder)
+ // may open a record track on behalf of a client. Note that pid may be a tid.
+ // IMPORTANT: DON'T USE PermissionCache - RUNTIME PERMISSIONS CHANGE.
+ const std::optional<AttributionSourceState> resolvedAttributionSource =
+ resolveAttributionSource(attributionSource, virtualDeviceId);
+ if (!resolvedAttributionSource.has_value()) {
+ return;
+ }
+
+ permission::PermissionChecker permissionChecker;
+ permissionChecker.finishDataDeliveryFromDatasource(attributedOpCode,
+ resolvedAttributionSource.value());
+ } else {
+ if (attributedOpCode == AppOpsManager::OP_NONE) return; // nothing to do
+ AppOpsManager ap{};
+ PermissionController pc{};
+ ap.finishOp(attributedOpCode, attributionSource.uid,
+ resolveCallingPackage(
+ pc, String16{attributionSource.packageName.value_or("").c_str()},
+ attributionSource.uid),
+ attributionSource.attributionTag.has_value()
+ ? String16{attributionSource.attributionTag.value().c_str()}
+ : String16{});
+ }
}
bool captureAudioOutputAllowed(const AttributionSourceState& attributionSource) {
@@ -245,6 +301,21 @@
return ok;
}
+bool bypassConcurrentPolicyAllowed(const AttributionSourceState& attributionSource) {
+ uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
+ uid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(attributionSource.pid));
+ if (isAudioServerOrRootUid(uid)) return true;
+ static const String16 sBypassConcurrentPolicy(
+ "android.permission.BYPASS_CONCURRENT_RECORD_AUDIO_RESTRICTION ");
+ // Use PermissionChecker, which includes some logic for allowing the isolated
+ // HotwordDetectionService to hold certain permissions.
+ bool ok = PermissionCache::checkPermission(sBypassConcurrentPolicy, pid, uid);
+ if (!ok) {
+ ALOGV("Request requires android.permission.BYPASS_CONCURRENT_RECORD_AUDIO_RESTRICTION");
+ }
+ return ok;
+}
+
bool accessUltrasoundAllowed(const AttributionSourceState& attributionSource) {
uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
uid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(attributionSource.pid));
@@ -396,6 +467,106 @@
return NO_ERROR;
}
+// TODO(b/285588444), clean this up on main, but soak it for backporting purposes for now
+namespace {
+class BluetoothPermissionCache {
+ static constexpr auto SYSPROP_NAME = "cache_key.system_server.package_info";
+ const String16 BLUETOOTH_PERM {"android.permission.BLUETOOTH_CONNECT"};
+ mutable std::mutex mLock;
+ // Cached property conditionally defined, since only avail on bionic. On host, don't inval cache
+#if defined(__BIONIC__)
+ // Unlocked, but only accessed from mListenerThread
+ base::CachedProperty mCachedProperty;
+#endif
+ // This thread is designed to never join/terminate, so no signal is fine
+ const std::thread mListenerThread;
+ GUARDED_BY(mLock)
+ std::string mPropValue;
+ GUARDED_BY(mLock)
+ std::unordered_map<uid_t, bool> mCache;
+ PermissionController mPc{};
+public:
+ BluetoothPermissionCache()
+#if defined(__BIONIC__)
+ : mCachedProperty{SYSPROP_NAME},
+ mListenerThread([this]() mutable {
+ while (true) {
+ std::string newVal = mCachedProperty.WaitForChange() ?: "";
+ std::lock_guard l{mLock};
+ if (newVal != mPropValue) {
+ ALOGV("Bluetooth permission update");
+ mPropValue = newVal;
+ mCache.clear();
+ }
+ }
+ })
+#endif
+ {}
+
+ bool checkPermission(uid_t uid, pid_t pid) {
+ std::lock_guard l{mLock};
+ auto it = mCache.find(uid);
+ if (it == mCache.end()) {
+ it = mCache.insert({uid, mPc.checkPermission(BLUETOOTH_PERM, pid, uid)}).first;
+ }
+ return it->second;
+ }
+};
+
+// Don't call this from locks, since it potentially calls up to system server!
+// Check for non-app UIDs above this method!
+bool checkBluetoothPermission(const AttributionSourceState& attr) {
+ [[clang::no_destroy]] static BluetoothPermissionCache impl{};
+ return impl.checkPermission(attr.uid, attr.pid);
+}
+} // anonymous
+
+/**
+ * Determines if the MAC address in Bluetooth device descriptors returned by APIs of
+ * a native audio service (audio flinger, audio policy) must be anonymized.
+ * MAC addresses returned to system server or apps with BLUETOOTH_CONNECT permission
+ * are not anonymized.
+ *
+ * @param attributionSource The attribution source of the calling app.
+ * @param caller string identifying the caller for logging.
+ * @return true if the MAC addresses must be anonymized, false otherwise.
+ */
+bool mustAnonymizeBluetoothAddress(
+ const AttributionSourceState& attributionSource, const String16&) {
+ uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
+ bool res;
+ switch(multiuser_get_app_id(uid)) {
+ case AID_ROOT:
+ case AID_SYSTEM:
+ case AID_RADIO:
+ case AID_BLUETOOTH:
+ case AID_MEDIA:
+ case AID_AUDIOSERVER:
+ // Don't anonymize for privileged clients
+ res = false;
+ break;
+ default:
+ res = !checkBluetoothPermission(attributionSource);
+ break;
+ }
+ ALOGV("%s uid: %d, result: %d", __func__, uid, res);
+ return res;
+}
+
+/**
+ * Modifies the passed MAC address string in place for consumption by unprivileged clients.
+ * the string is assumed to have a valid MAC address format.
+ * the anonymization must be kept in sync with toAnonymizedAddress() in BluetoothUtils.java
+ *
+ * @param address input/output the char string contining the MAC address to anonymize.
+ */
+void anonymizeBluetoothAddress(char *address) {
+ if (address == nullptr || strlen(address) != strlen("AA:BB:CC:DD:EE:FF")) {
+ return;
+ }
+ memcpy(address, "XX:XX:XX:XX", strlen("XX:XX:XX:XX"));
+}
+
sp<content::pm::IPackageManagerNative> MediaPackageManager::retrievePackageManager() {
const sp<IServiceManager> sm = defaultServiceManager();
if (sm == nullptr) {
diff --git a/media/utils/include/mediautils/BinderGenericUtils.h b/media/utils/include/mediautils/BinderGenericUtils.h
new file mode 100644
index 0000000..c2bbde1
--- /dev/null
+++ b/media/utils/include/mediautils/BinderGenericUtils.h
@@ -0,0 +1,388 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android/binder_auto_utils.h>
+#include <android/binder_interface_utils.h>
+#include <android/binder_manager.h>
+#include <binder/IServiceManager.h>
+
+namespace android::mediautils {
+// General Template Binder Utilities.
+//
+// In order to write generic Template methods, we need to have utility methods
+// that provide seamless template overload resolution between NDK and CPP variants.
+//
+
+// Returns true or false based on whether the Interface is a NDK Interface.
+template<typename Interface>
+inline constexpr bool is_ndk = std::derived_from<Interface, ::ndk::ICInterface>;
+
+// Returns the Interface ptr type (shared_ptr or sp) based on the Interface.
+template<typename Interface>
+using InterfaceType =
+ std::conditional_t <is_ndk<Interface>, std::shared_ptr<Interface>, sp<Interface>>;
+
+template<typename Interface>
+using BaseInterfaceType = std::conditional_t <is_ndk<Interface>,
+std::shared_ptr<::ndk::ICInterface>, sp<::android::IInterface>>;
+
+/**
+ * Returns either a sp<IBinder> or an SpAIBinder object
+ * for the AIDL interface given.
+ *
+ * A -cpp interface will return sp<IBinder>.
+ * A -ndk interface will return SpAIBinder
+ */
+template<typename Interface>
+sp<IBinder> binderFromInterface(const sp<Interface> &interface) {
+ return IInterface::asBinder(interface);
+}
+
+template<typename Interface>
+::ndk::SpAIBinder binderFromInterface(const std::shared_ptr<Interface> &interface) {
+ return interface->asBinder();
+}
+
+/**
+ * Returns either a sp<Interface> or a std::shared_ptr<Interface> from a Binder object.
+ *
+ * A -cpp interface will return sp<Interface>.
+ * A -ndk interface will return std::shared_ptr<Interface>
+ */
+template<typename Interface>
+sp<Interface> interfaceFromBinder(const sp<IBinder> &binder) {
+ return interface_cast<Interface>(binder);
+}
+
+template<typename Interface>
+std::shared_ptr<Interface> interfaceFromBinder(const ::ndk::SpAIBinder &binder) {
+ return Interface::fromBinder(binder);
+}
+
+/**
+ * Returns either a sp<Interface> or a std::shared_ptr<Interface> from
+ * the NDK/CPP base interface class.
+ */
+template<typename Interface>
+sp<Interface> interfaceFromBase(const sp<::android::IInterface> &interface) {
+ // this is unvalidated, though could verify getInterfaceDescriptor() == Interface::descriptor
+ return sp<Interface>::cast(interface);
+}
+
+template<typename Interface>
+std::shared_ptr<Interface> interfaceFromBase(
+ const std::shared_ptr<::ndk::ICInterface> &interface) {
+ // this is unvalidated, though could verify
+ // !strcmp(AIBinder_Class_getDescriptor(AIBinder_getClass(...), Interface::descriptor)
+ return std::static_pointer_cast<Interface>(interface);
+}
+
+/**
+ * Returns a fully qualified service name.
+ *
+ * @param name
+ * If name is empty, it returns the name from the Service descriptor.
+ * If name starts with '/', it appends the name as a version to the Service descriptor,
+ * e.g. "/default".
+ * Otherwise the name is assumed to be the full Service name, overriding the
+ * Service descriptor.
+ */
+template<typename Service>
+auto fullyQualifiedServiceName(const char* const name) {
+ using StringType = std::conditional_t<is_ndk<Service>, std::string, String16>;
+ return name == nullptr ? StringType(Service::descriptor)
+ : name[0] != 0 && name[0] != '/' ? StringType(name)
+ : StringType(Service::descriptor) + StringType(name);
+}
+
+/**
+ * Returns either a std::shared_ptr<Interface> or sp<Interface>
+ * for the AIDL interface given.
+ *
+ * A -cpp interface will return sp<Service>.
+ * A -ndk interface will return std::shared_ptr<Service>
+ *
+ * @param name if non-empty should contain either a suffix if it starts
+ * with a '/' such as "/default", or the full service name.
+ */
+template<typename Service>
+auto checkServicePassThrough(const char *const name = "") {
+ if constexpr(is_ndk<Service>)
+ {
+ const auto serviceName = fullyQualifiedServiceName<Service>(name);
+ return Service::fromBinder(
+ ::ndk::SpAIBinder(AServiceManager_checkService(serviceName.c_str())));
+ } else /* constexpr */ {
+ const auto serviceName = fullyQualifiedServiceName<Service>(name);
+ auto binder = defaultServiceManager()->checkService(serviceName);
+ return interface_cast<Service>(binder);
+ }
+}
+
+template<typename Service>
+void addService(const std::shared_ptr<Service> &service) {
+ AServiceManager_addService(binderFromInterface(service), Service::descriptor);
+}
+
+template<typename Service>
+void addService(const sp<Service> &service) {
+ defaultServiceManager()->addService(Service::descriptor, binderFromInterface(service));
+}
+
+namespace details {
+
+// Use the APIs below, not the details here.
+
+/**
+ * RequestServiceManagerCallback(Cpp|Ndk) is a RAII class that
+ * requests a ServiceManager callback.
+ *
+ * Note the ServiceManager is a single threaded "apartment" and only one
+ * transaction is active, hence:
+ *
+ * 1) After the RequestServiceManagerCallback object is destroyed no
+ * calls to the onBinder function is pending or will occur.
+ * 2) To prevent deadlock, do not construct or destroy the class with
+ * a lock held that the onService function also requires.
+ */
+template<typename Service>
+class RequestServiceManagerCallbackCpp {
+public:
+ explicit RequestServiceManagerCallbackCpp(
+ std::function<void(const sp<Service> &)> &&onService,
+ const char *const serviceName = ""
+ )
+ : mServiceName{fullyQualifiedServiceName<Service>(serviceName)},
+ mWaiter{sp<Waiter>::make(std::move(onService))},
+ mStatus{defaultServiceManager()->registerForNotifications(mServiceName,
+ mWaiter)} {
+ }
+
+ ~RequestServiceManagerCallbackCpp() {
+ if (mStatus == OK) {
+ defaultServiceManager()->unregisterForNotifications(mServiceName, mWaiter);
+ }
+ }
+
+ status_t getStatus() const {
+ return mStatus;
+ }
+
+private:
+ const String16 mServiceName;
+ const sp<IServiceManager::LocalRegistrationCallback> mWaiter;
+ const status_t mStatus;
+
+ // With some work here, we could make this a singleton to improve
+ // performance and reduce binder clutter.
+ class Waiter : public IServiceManager::LocalRegistrationCallback {
+ public:
+ explicit Waiter(std::function<void(const sp<Service> &)> &&onService)
+ : mOnService{std::move(onService)} {}
+
+ private:
+ void onServiceRegistration(
+ const String16 & /*name*/, const sp<IBinder> &binder) final {
+ mOnService(interface_cast<Service>(binder));
+ }
+
+ const std::function<void(const sp<Service> &)> mOnService;
+ };
+};
+
+template<typename Service>
+class RequestServiceManagerCallbackNdk {
+public:
+ explicit RequestServiceManagerCallbackNdk(
+ std::function<void(const std::shared_ptr<Service> &)> &&onService,
+ const char *const serviceName = ""
+ )
+ : mServiceName{fullyQualifiedServiceName<Service>(serviceName)},
+ mOnService{std::move(onService)},
+ mWaiter{AServiceManager_registerForServiceNotifications(
+ mServiceName.c_str(),
+ onRegister, this)} // must be registered after mOnService.
+ {}
+
+ ~RequestServiceManagerCallbackNdk() {
+ if (mWaiter) {
+ AServiceManager_NotificationRegistration_delete(mWaiter);
+ }
+ }
+
+ status_t getStatus() const {
+ return mWaiter != nullptr ? OK : INVALID_OPERATION;
+ }
+
+private:
+ const std::string mServiceName; // must keep a local copy.
+ const std::function<void(const std::shared_ptr<Service> &)> mOnService;
+ AServiceManager_NotificationRegistration *const mWaiter; // last.
+
+ static void onRegister(const char *instance, AIBinder *registered, void *cookie) {
+ (void) instance;
+ auto *callbackHandler = static_cast<RequestServiceManagerCallbackNdk<Service> *>(cookie);
+ callbackHandler->mOnService(Service::fromBinder(::ndk::SpAIBinder(registered)));
+ }
+};
+
+/**
+ * RequestDeathNotification(Cpp|Ndk) is a RAII class that
+ * requests a death notification.
+ *
+ * Note the ServiceManager is a single threaded "apartment" and only one
+ * transaction is active, hence:
+ *
+ * 1) After the RequestDeathNotification object is destroyed no
+ * calls to the onBinder function is pending or will occur.
+ * 2) To prevent deadlock, do not construct or destroy the class with
+ * a lock held that the onBinderDied function also requires.
+ */
+
+class RequestDeathNotificationCpp {
+ class DeathRecipientHelper : public IBinder::DeathRecipient {
+ public:
+ explicit DeathRecipientHelper(std::function<void()> &&onBinderDied)
+ : mOnBinderDied{std::move(onBinderDied)} {
+ }
+
+ void binderDied(const wp<IBinder> &weakBinder) final {
+ (void) weakBinder;
+ mOnBinderDied();
+ }
+
+ private:
+ const std::function<void()> mOnBinderDied;
+ };
+
+public:
+ RequestDeathNotificationCpp(const sp<IBinder> &binder,
+ std::function<void()> &&onBinderDied)
+ : mHelper{sp<DeathRecipientHelper>::make(std::move(onBinderDied))},
+ mWeakBinder{binder}, mStatus{binder->linkToDeath(mHelper)} {
+ ALOGW_IF(mStatus != OK, "%s: linkToDeath status:%d", __func__, mStatus);
+ }
+
+ ~RequestDeathNotificationCpp() {
+ if (mStatus == OK) {
+ const auto binder = mWeakBinder.promote();
+ if (binder) binder->unlinkToDeath(mHelper);
+ }
+ }
+
+ status_t getStatus() const {
+ return mStatus;
+ }
+
+private:
+ const sp<DeathRecipientHelper> mHelper;
+ const wp<IBinder> mWeakBinder;
+ const status_t mStatus;
+};
+
+class RequestDeathNotificationNdk {
+public:
+ RequestDeathNotificationNdk(
+ const ::ndk::SpAIBinder &binder, std::function<void()> &&onBinderDied)
+ : mOnBinderDied(std::move(onBinderDied)),
+ mRecipient(::AIBinder_DeathRecipient_new(OnBinderDiedStatic),
+ &AIBinder_DeathRecipient_delete), mStatus{AIBinder_linkToDeath(
+ binder.get(), mRecipient.get(), /* cookie */ this)} {
+ ALOGW_IF(mStatus != OK, "%s: AIBinder_linkToDeath status:%d", __func__, mStatus);
+ // We do not use AIBinder_DeathRecipient_setOnUnlinked() to do resource deallocation
+ // as the functor mOnBinderDied is kept alive by this class.
+ }
+
+ ~RequestDeathNotificationNdk() {
+ // The AIBinder_DeathRecipient dtor automatically unlinks all registered notifications,
+ // so AIBinder_unlinkToDeath() is not needed here (elsewise we need to maintain a
+ // AIBinder_Weak here).
+ }
+
+ status_t getStatus() const {
+ return mStatus;
+ }
+
+private:
+ void onBinderDied() {
+ mOnBinderDied();
+ }
+
+ static void OnBinderDiedStatic(void *cookie) {
+ reinterpret_cast<RequestDeathNotificationNdk *>(cookie)->onBinderDied();
+ }
+
+ const std::function<void()> mOnBinderDied;
+ const std::unique_ptr<AIBinder_DeathRecipient, decltype(
+ &AIBinder_DeathRecipient_delete)>
+ mRecipient;
+ const status_t mStatus; // binder_status_t is a limited subset of status_t
+};
+
+} // details
+
+/**
+ * Requests a notification that service is available.
+ *
+ * An opaque handle is returned - after clearing it is guaranteed that
+ * no callback will occur.
+ *
+ * The callback will be of form:
+ * onService(const sp<Service>& service);
+ * onService(const std::shared_ptr<Service>& service);
+ */
+template<typename Service, typename F>
+std::shared_ptr<void> requestServiceNotification(
+ F onService, const char *const serviceName = "") {
+ // the following are used for callbacks but placed here for invalidate.
+ using RequestServiceManagerCallback = std::conditional_t<is_ndk<Service>,
+ details::RequestServiceManagerCallbackNdk<Service>,
+ details::RequestServiceManagerCallbackCpp<Service>>;
+ const auto ptr = std::make_shared<RequestServiceManagerCallback>(
+ onService, serviceName);
+ const auto status = ptr->getStatus();
+ return status == OK ? ptr : nullptr;
+}
+
+/**
+ * Requests a death notification.
+ *
+ * An opaque handle is returned - after clearing it is guaranteed that
+ * no notification will occur.
+ *
+ * The callback will be of form void onBinderDied();
+ */
+template<typename Service>
+std::shared_ptr<void> requestDeathNotification(
+ const sp<Service> &service, std::function<void()> &&onBinderDied) {
+ const auto ptr = std::make_shared<details::RequestDeathNotificationCpp>(
+ binderFromInterface(service), std::move(onBinderDied));
+ const auto status = ptr->getStatus();
+ return status == OK ? ptr : nullptr;
+}
+
+template<typename Service>
+std::shared_ptr<void> requestDeathNotification(
+ const std::shared_ptr<Service> &service, std::function<void()> &&onBinderDied) {
+ const auto ptr = std::make_shared<details::RequestDeathNotificationNdk>(
+ binderFromInterface(service), std::move(onBinderDied));
+ const auto status = ptr->getStatus();
+ return status == OK ? ptr : nullptr;
+}
+
+} // namespace android::mediautils
diff --git a/media/utils/include/mediautils/ServiceSingleton.h b/media/utils/include/mediautils/ServiceSingleton.h
new file mode 100644
index 0000000..644d9cd
--- /dev/null
+++ b/media/utils/include/mediautils/ServiceSingleton.h
@@ -0,0 +1,464 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "BinderGenericUtils.h"
+
+#include <android-base/thread_annotations.h>
+#include <audio_utils/mutex.h>
+#include <chrono>
+#include <map>
+#include <mutex>
+#include <utils/Log.h>
+#include <utils/Timers.h>
+
+/**
+ * ServiceSingleton provides a non-blocking NDK/CPP compatible service cache.
+ *
+ * This is a specialized cache that allows per-service configuration.
+ *
+ * Features:
+ *
+ * 1) Seamless compatibility with NDK and CPP based interfaces.
+ * 2) Time-out based service acquisition.
+ * Set the maximum time to wait for any service.
+ * 3) Service prefetch:
+ * Reduce start-up by prefetching service in advance (not on demand).
+ * Prefetch is automatically installed by getService().
+ * 4) Manual interface setting for test and non-service manager acquisition support.
+ *
+ * If both NDK and CPP interfaces are available, we prefer the CPP version
+ * for the following reasons:
+ * 1) Established sp<> reference counting avoids mistakes. NDK tends to be error-prone.
+ * 2) Possible reduced binder object clutter by a singleton notification binder object.
+ * Fewer binder objects are more efficient for the binder driver and ServiceManager.
+ * For example, fewer binder deaths means less ServiceManager (linear time) cleanup.
+ * A single binder object also offers binder access serialization.
+ * 3) CPP offers slightly better efficiency as it is closer to the
+ * actual implementation, a minor detail and effect.
+ *
+ * We use a per-service ServiceHandler object to collect methods and implementation details.
+ * Currently this is separate for NDK and CPP interfaces to the same service;
+ * unification is possible by using ibinder_internals.h.
+ */
+namespace android::mediautils {
+
+enum ServiceOptions {
+ kNone = 0,
+ kNonNull = (1 << 0), // don't return a null interface unless disabled.
+ // partially implemented and experimental.
+};
+
+// Traits may come through a constexpr static function collection.
+// This participates in small buffer optimization SBO in std::function impl.
+template <typename Service>
+struct DefaultServiceTraits {
+ // getServiceName() returns the name associated with Service.
+ //
+ // If name is empty, it returns the name from the Service descriptor.
+ // If name starts with '/', it appends the name as a version to the Service descriptor,
+ // e.g. "/default".
+ // Otherwise the name is assumed to be the Service name.
+ static constexpr const char* getServiceName() { return "/default"; }
+
+ // This callback is called when a new service is received.
+ // The callback requires at least one thread in the Binder threadpool.
+ static constexpr void onNewService(const InterfaceType<Service>&) {}
+
+ // This callback is called if the service has died.
+ // The callback requires at least one thread in the Binder threadpool.
+ static constexpr void onServiceDied(const InterfaceType<Service>&) {}
+
+ // ServiceOptions configured for the Service.
+ static constexpr ServiceOptions options() { return ServiceOptions::kNone; }
+};
+
+// We store the traits as functors.
+template <typename Service>
+struct FunctionalServiceTraits {
+ template <typename ServiceTraits>
+ explicit FunctionalServiceTraits(const ServiceTraits& serviceTraits)
+ : getServiceName{serviceTraits.getServiceName}
+ , onNewService{serviceTraits.onNewService}
+ , onServiceDied{serviceTraits.onServiceDied}
+ , options{serviceTraits.options} {
+ }
+ std::function<const char*()> getServiceName;
+ std::function<void(const InterfaceType<Service>& service)> onNewService;
+ std::function<void(const InterfaceType<Service>& service)> onServiceDied;
+ std::function<ServiceOptions()> options;
+};
+
+namespace details {
+
+class ServiceHandler
+{
+public:
+ /**
+ * Returns a ServiceHandler, templated type T is String16 for the native type
+ * of the CPP service descriptors and const char* for the native type of the NDK
+ * service descriptors.
+ */
+ template<typename T>
+ requires (std::is_same_v<T, const char*> || std::is_same_v<T, String16>)
+ static std::shared_ptr<ServiceHandler> getInstance(const T& name);
+
+ /**
+ * Initializes the service handler with new service traits
+ * (methods that are triggered on service events).
+ *
+ * This is optional. Default construction of traits is allowed for
+ * services that do not require special handling.
+ *
+ * @param serviceTraits
+ * @return true if the service handler had been previously initialized.
+ */
+ template<typename Service, typename ServiceTraits>
+ bool init(const ServiceTraits& serviceTraits) {
+ auto traits = std::make_shared<FunctionalServiceTraits<Service>>(serviceTraits);
+ std::shared_ptr<void> oldTraits;
+ std::lock_guard l(mMutex);
+ std::swap(oldTraits, mTraits);
+ const bool existing = oldTraits != nullptr;
+ mTraits = std::move(traits);
+ mSkip = false;
+ return existing;
+ }
+
+ /**
+ * Returns the service based on a timeout.
+ *
+ * @param waitNs the time to wait, internally clamped to (0, INT64_MAX / 2) to
+ * avoid numeric overflow.
+ * @param useCallback installs a callback instead of polling.
+ * the Callback persists if the call timeouts. A Callback requires
+ * at least one thread in the threadpool.
+ * @return Service interface.
+ */
+ template <typename Service>
+ auto get(std::chrono::nanoseconds waitNs, bool useCallback) {
+ audio_utils::unique_lock ul(mMutex);
+ auto& service = std::get<BaseInterfaceType<Service>>(mService);
+
+ if (mSkip || (service && mValid)) return service; // early check.
+
+ // clamp to avoid numeric overflow. INT64_MAX / 2 is effectively forever for a device.
+ std::chrono::nanoseconds kWaitLimitNs(
+ std::numeric_limits<decltype(waitNs.count())>::max() / 2);
+ waitNs = std::clamp(waitNs, decltype(waitNs)(0), kWaitLimitNs);
+ const auto end = std::chrono::steady_clock::now() + waitNs;
+
+ for (bool first = true; true; first = false) {
+ // we may have released mMutex, so see if service has been obtained.
+ if (mSkip || (service && mValid)) return service;
+
+ const auto traits = getTraits_l<Service>();
+
+ // first time or not using callback, check the service.
+ if (first || !useCallback) {
+ auto service_new = checkServicePassThrough<Service>(
+ traits->getServiceName());
+ if (service_new) {
+ mValid = true;
+ service = std::move(service_new);
+ setDeathNotifier_l<Service>();
+ auto service_fixed = service; // we're releasing the mutex.
+ ul.unlock();
+ traits->onNewService(interfaceFromBase<Service>(service_fixed));
+ mCv.notify_all();
+ return service_fixed;
+ }
+ }
+
+ // install service callback if needed.
+ if (useCallback && !mServiceNotificationHandle) {
+ setServiceNotifier_l<Service>();
+ }
+
+ // check time expiration.
+ const auto now = std::chrono::steady_clock::now();
+ if (now >= end
+ && (service || !(traits->options() & ServiceOptions::kNonNull))) {
+ return service;
+ }
+
+ // compute time to wait, then wait.
+ if (mServiceNotificationHandle) {
+ mCv.wait_until(ul, end);
+ } else {
+ const auto target = now + kPollTime;
+ mCv.wait_until(ul, std::min(target, end));
+ }
+ // loop back to see if we have any state change.
+ }
+ }
+
+ /**
+ * Sets an externally provided service override.
+ *
+ * @param Service
+ * @param service_new
+ */
+ template<typename Service>
+ void set(const InterfaceType<Service>& service_new) {
+ audio_utils::unique_lock ul(mMutex);
+ auto& service = std::get<BaseInterfaceType<Service>>(mService);
+ const auto traits = getTraits_l<Service>();
+ if (service) {
+ auto orig_service = service;
+ invalidateService_l<Service>();
+ ul.unlock();
+ traits->onServiceDied(interfaceFromBase<Service>(orig_service));
+ }
+ service = service_new;
+ ul.unlock();
+ // should we set the death notifier? It could be a local service.
+ if (service_new) traits->onNewService(service_new);
+ mCv.notify_all();
+ }
+
+ /**
+ * Disables cache management in the ServiceHandler. init() needs to be
+ * called to restart.
+ *
+ * All notifiers removed.
+ * Service pointer is released.
+ */
+ template<typename Service>
+ void skip() {
+ audio_utils::unique_lock ul(mMutex);
+ mSkip = true;
+ // remove notifiers. OK to hold lock as presuming notifications one-way
+ // or manually triggered outside of lock.
+ mDeathNotificationHandle.reset();
+ mServiceNotificationHandle.reset();
+ auto& service = std::get<BaseInterfaceType<Service>>(mService);
+ const auto traits = getTraits_l<Service>();
+ std::shared_ptr<void> oldTraits;
+ std::swap(oldTraits, mTraits); // destroyed outside of lock.
+ if (service) {
+ auto orig_service = service; // keep reference to service to manually notify death.
+ invalidateService_l<Service>(); // sets service to nullptr
+ ul.unlock();
+ traits->onServiceDied(interfaceFromBase<Service>(orig_service));
+ } else {
+ ul.unlock();
+ }
+ mCv.notify_all();
+ }
+
+private:
+
+ // invalidateService_l is called to remove the old death notifier,
+ // invalidate the service, and optionally clear the service pointer.
+ template <typename Service>
+ void invalidateService_l() REQUIRES(mMutex) {
+ mDeathNotificationHandle.reset();
+ const auto traits = getTraits_l<Service>();
+ mValid = false;
+ if (!(traits->options() & ServiceOptions::kNonNull) || mSkip) {
+ auto &service = std::get<BaseInterfaceType<Service>>(mService);
+ service = nullptr;
+ }
+ }
+
+ // gets the traits set by init(), initializes with default if init() not called.
+ template <typename Service>
+ std::shared_ptr<FunctionalServiceTraits<Service>> getTraits_l() REQUIRES(mMutex) {
+ if (!mTraits) {
+ mTraits = std::make_shared<FunctionalServiceTraits<Service>>(
+ DefaultServiceTraits<Service>{});
+ }
+ return std::static_pointer_cast<FunctionalServiceTraits<Service>>(mTraits);
+ }
+
+ // sets the service notification
+ template <typename Service>
+ void setServiceNotifier_l() REQUIRES(mMutex) {
+ const auto traits = getTraits_l<Service>();
+ mServiceNotificationHandle = requestServiceNotification<Service>(
+ [traits, this](const InterfaceType<Service>& service) {
+ audio_utils::unique_lock ul(mMutex);
+ auto originalService = std::get<BaseInterfaceType<Service>>(mService);
+ if (originalService != service) {
+ mService = service;
+ mValid = true;
+ setDeathNotifier_l<Service>();
+ traits->onNewService(service);
+ }
+ ul.unlock();
+ mCv.notify_all();
+ }, traits->getServiceName());
+ ALOGW_IF(!mServiceNotificationHandle, "%s: cannot register service notification %s"
+ " (do we have permission?)",
+ __func__, toString(Service::descriptor).c_str());
+ }
+
+ // sets the death notifier for mService (mService must be non-null).
+ template <typename Service>
+ void setDeathNotifier_l() REQUIRES(mMutex) {
+ auto base = std::get<BaseInterfaceType<Service>>(mService);
+ auto service = interfaceFromBase<Service>(base);
+ const auto binder = binderFromInterface(service);
+ if (binder.get()) {
+ auto traits = getTraits_l<Service>();
+ mDeathNotificationHandle = requestDeathNotification(
+ base, [traits, service, this]() {
+ // as only one death notification is dispatched,
+ // we do not need to generation count.
+ {
+ std::lock_guard l(mMutex);
+ invalidateService_l<Service>();
+ }
+ traits->onServiceDied(service);
+ });
+ ALOGW_IF(!mDeathNotificationHandle, "%s: cannot register death notification %s"
+ " (already died?)",
+ __func__, toString(Service::descriptor).c_str());
+ }
+ }
+
+ // initializes the variant for NDK use (called on first creation in the cache map).
+ void init_ndk() EXCLUDES(mMutex) {
+ std::lock_guard l(mMutex);
+ mService = std::shared_ptr<::ndk::ICInterface>{};
+ }
+
+ // initializes the variant for CPP use (called on first creation in the cache map).
+ void init_cpp() EXCLUDES(mMutex) {
+ std::lock_guard l(mMutex);
+ mService = sp<::android::IInterface>{};
+ }
+
+ static std::string toString(const std::string& s) { return s; }
+ static std::string toString(const String16& s) { return String8(s).c_str(); }
+
+ mutable std::mutex mMutex;
+ std::condition_variable mCv;
+ static constexpr auto kPollTime = std::chrono::seconds(1);
+
+ std::variant<std::shared_ptr<::ndk::ICInterface>,
+ sp<::android::IInterface>> mService GUARDED_BY(mMutex);
+ // aesthetically we place these last, but a ServiceHandler is never deleted in
+ // current operation, so there is no deadlock on destruction.
+ std::shared_ptr<void> mDeathNotificationHandle GUARDED_BY(mMutex);
+ std::shared_ptr<void> mServiceNotificationHandle GUARDED_BY(mMutex);
+ std::shared_ptr<void> mTraits GUARDED_BY(mMutex);
+
+ // mValid is true iff the service is non-null and alive.
+ bool mValid GUARDED_BY(mMutex) = false;
+
+ // mSkip indicates that the service is not cached.
+ bool mSkip GUARDED_BY(mMutex) = false;
+};
+
+} // details
+
+//----------------------------------
+// ServiceSingleton API
+//
+
+/*
+ * Implementation detail:
+ *
+ * Each CPP or NDK service interface has a unique ServiceHandler that
+ * is stored in a singleton cache. The cache key is based on the service descriptor string
+ * so only one version can be chosen. (The particular version may be changed using
+ * ServiceTraits.getName()).
+ */
+
+/**
+ * Sets the service trait parameters for acquiring the Service interface.
+ *
+ * If this is not set before the first service fetch, then default service traits are used.
+ *
+ * @return true if there is a preexisting (including prior default set) traits.
+ */
+template<typename Service, typename ServiceTraits>
+bool initService(const ServiceTraits& serviceTraits = {}) {
+ const auto serviceHandler = details::ServiceHandler::getInstance(Service::descriptor);
+ return serviceHandler->template init<Service>(serviceTraits);
+}
+
+/**
+ * Returns either a std::shared_ptr<Interface> or sp<Interface>
+ * for the AIDL service. If the service is not available within waitNs,
+ * the method will return nullptr
+ * (or the previous invalidated service if Service.options() & kNonNull).
+ *
+ * This method installs a callback to obtain the service, so with waitNs == 0, it may be used to
+ * prefetch the service before it is actually needed.
+ *
+ * @param waitNs wait time for the service to become available.
+ * @return
+ * a sp<> for a CPP interface
+ * a std::shared_ptr<> for a NDK interface
+ *
+ */
+template<typename Service>
+auto getService(std::chrono::nanoseconds waitNs = {}) {
+ const auto serviceHandler = details::ServiceHandler::getInstance(Service::descriptor);
+ return interfaceFromBase<Service>(serviceHandler->template get<Service>(
+ waitNs, true /* useCallback */));
+}
+
+/**
+ * Returns either a std::shared_ptr<Interface> or sp<Interface>
+ * for the AIDL service. If the service is not available within waitNs,
+ * the method will return nullptr
+ * (or the previous invalidated service if Service.options() & kNonNull).
+ *
+ * This method polls to obtain the service, which
+ * is useful if the service is restricted due to permissions or
+ * one is concerned about ThreadPool starvation.
+ *
+ * @param waitNs wait time for the service to become available.
+ * @return
+ * a sp<> for a CPP interface
+ * a std::shared_ptr<> for a NDK interface
+ */
+template<typename Service>
+auto checkService(std::chrono::nanoseconds waitNs = {}) {
+ const auto serviceHandler = details::ServiceHandler::getInstance(Service::descriptor);
+ return interfaceFromBase<Service>(serviceHandler->template get<Service>(
+ waitNs, false /* useCallback */));
+}
+
+/**
+ * Sets a service implementation override, replacing any fetched service from ServiceManager.
+ *
+ * An empty service clears the cache.
+ */
+template<typename Service>
+void setService(const InterfaceType<Service>& service) {
+ const auto serviceHandler = details::ServiceHandler::getInstance(Service::descriptor);
+ serviceHandler->template set<Service>(service);
+}
+
+/**
+ * Disables the service cache.
+ *
+ * This releases any service and notification callbacks. After this,
+ * another initService() can be called seamlessly.
+ */
+template<typename Service>
+void skipService() {
+ const auto serviceHandler = details::ServiceHandler::getInstance(Service::descriptor);
+ serviceHandler->template skip<Service>();
+}
+
+} // namespace android::mediautils
diff --git a/media/utils/include/mediautils/ServiceUtilities.h b/media/utils/include/mediautils/ServiceUtilities.h
index 2631469..573cc14 100644
--- a/media/utils/include/mediautils/ServiceUtilities.h
+++ b/media/utils/include/mediautils/ServiceUtilities.h
@@ -102,6 +102,7 @@
bool captureMediaOutputAllowed(const AttributionSourceState& attributionSource);
bool captureTunerAudioInputAllowed(const AttributionSourceState& attributionSource);
bool captureVoiceCommunicationOutputAllowed(const AttributionSourceState& attributionSource);
+bool bypassConcurrentPolicyAllowed(const AttributionSourceState& attributionSource) ;
bool accessUltrasoundAllowed(const AttributionSourceState& attributionSource);
bool captureHotwordAllowed(const AttributionSourceState& attributionSource);
bool settingsAllowed();
@@ -114,6 +115,11 @@
bool bypassInterruptionPolicyAllowed(const AttributionSourceState& attributionSource);
bool callAudioInterceptionAllowed(const AttributionSourceState& attributionSource);
void purgePermissionCache();
+bool mustAnonymizeBluetoothAddress(
+ const AttributionSourceState& attributionSource, const String16& caller);
+void anonymizeBluetoothAddress(char *address);
+
+bool isRecordOpRequired(audio_source_t source);
int32_t getOpForSource(audio_source_t source);
AttributionSourceState getCallingAttributionSource();
diff --git a/media/utils/include/mediautils/SharedMemoryAllocator.h b/media/utils/include/mediautils/SharedMemoryAllocator.h
index 4243b9c..7d4f62e 100644
--- a/media/utils/include/mediautils/SharedMemoryAllocator.h
+++ b/media/utils/include/mediautils/SharedMemoryAllocator.h
@@ -28,6 +28,7 @@
#include <type_traits>
#include <unordered_map>
+#include <android-base/thread_annotations.h>
#include <binder/MemoryBase.h>
#include <binder/MemoryHeapBase.h>
#include <log/log_main.h>
@@ -425,8 +426,57 @@
[[no_unique_address]] SecondaryAllocator mSecondary;
};
+// Wrap an allocator with a lock if backs multiple allocators through indirection
+template <typename Allocator>
+class LockedAllocator {
+ public:
+ static size_t alignment() { return Allocator::alignment(); }
+
+ explicit LockedAllocator(Allocator allocator) : mAllocator(allocator) {}
+
+ LockedAllocator() = default;
+
+ template <typename T>
+ AllocationType allocate(T&& request) {
+ static_assert(std::is_base_of_v<android::mediautils::BasicAllocRequest, std::decay_t<T>>);
+ std::lock_guard l_{mMutex};
+ return mAllocator.allocate(std::forward<T>(request));
+ }
+
+ void deallocate(const AllocationType& allocation) {
+ std::lock_guard l_{mMutex};
+ mAllocator.deallocate(allocation);
+ }
+
+ template <typename Enable = void>
+ auto deallocate_all()
+ -> std::enable_if_t<shared_allocator_impl::has_deallocate_all<Allocator>, Enable> {
+ std::lock_guard l_{mMutex};
+ mAllocator.deallocate_all();
+ }
+
+ template <typename Enable = bool>
+ auto owns(const AllocationType& allocation) const
+ -> std::enable_if_t<shared_allocator_impl::has_owns<Allocator>, Enable> {
+ std::lock_guard l_{mMutex};
+ return mAllocator.owns(allocation);
+ }
+
+ template <typename Enable = std::string>
+ auto dump() const -> std::enable_if_t<shared_allocator_impl::has_dump<Allocator>, Enable> {
+ std::lock_guard l_{mMutex};
+ return mAllocator.dump();
+ }
+
+ private:
+ std::mutex mMutex;
+ [[no_unique_address]] Allocator mAllocator GUARDED_BY(mMutex);
+};
+
// An allocator which is backed by a shared_ptr to an allocator, so multiple
// allocators can share the same backing allocator (and thus the same state).
+// When the same backing allocator is used by multiple higher level allocators,
+// locking at the sharing level is necessary.
template <typename Allocator>
class IndirectAllocator {
public:
diff --git a/media/utils/tests/Android.bp b/media/utils/tests/Android.bp
index ff11b42..4456df2 100644
--- a/media/utils/tests/Android.bp
+++ b/media/utils/tests/Android.bp
@@ -12,8 +12,6 @@
cc_defaults {
name: "libmediautils_tests_config",
- host_supported: true,
-
cflags: [
"-Wall",
"-Werror",
@@ -67,6 +65,22 @@
],
}
+aidl_interface {
+ name: "ServiceSingletonTestInterface",
+ unstable: true,
+ srcs: [
+ "IServiceSingletonTest.aidl",
+ ],
+ backend: {
+ cpp: {
+ enabled: true,
+ },
+ ndk: {
+ enabled: true,
+ },
+ },
+}
+
cc_test_library {
name: "libsharedtest",
@@ -178,6 +192,34 @@
}
cc_test {
+ name: "service_singleton_tests",
+
+ defaults: ["libmediautils_tests_config"],
+
+ // to add and get services, we need to be root.
+ require_root: true,
+ host_supported: false,
+
+ srcs: [
+ "service_singleton_tests.cpp",
+ ],
+
+ shared_libs: [
+ "libaudioutils",
+ "libbinder",
+ "libbinder_ndk",
+ "liblog",
+ "libmediautils",
+ "libutils",
+ ],
+
+ static_libs: [
+ "ServiceSingletonTestInterface-cpp",
+ "ServiceSingletonTestInterface-ndk",
+ ],
+}
+
+cc_test {
name: "static_string_tests",
defaults: ["libmediautils_tests_defaults"],
diff --git a/media/utils/tests/IServiceSingletonTest.aidl b/media/utils/tests/IServiceSingletonTest.aidl
new file mode 100644
index 0000000..9f889a6
--- /dev/null
+++ b/media/utils/tests/IServiceSingletonTest.aidl
@@ -0,0 +1,19 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+interface IServiceSingletonTest {
+ int inc();
+}
diff --git a/media/utils/tests/service_singleton_tests.cpp b/media/utils/tests/service_singleton_tests.cpp
new file mode 100644
index 0000000..8656a20
--- /dev/null
+++ b/media/utils/tests/service_singleton_tests.cpp
@@ -0,0 +1,365 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "service_singleton_tests"
+
+#include <mediautils/ServiceSingleton.h>
+
+#include "BnServiceSingletonTest.h"
+#include "aidl/BnServiceSingletonTest.h"
+#include <audio_utils/RunRemote.h>
+#include <binder/IPCThreadState.h>
+#include <binder/ProcessState.h>
+#include <gtest/gtest.h>
+#include <utils/Log.h>
+
+using namespace android;
+
+/**
+ * Service Singleton Test uses a worker process to spawn new binder services.
+ *
+ * A worker process is required since we cannot fork after registering
+ * with the binder driver.
+ *
+ * Test Process -> Worker_Process -> Service Process(1)
+ * -> Service Process(2)
+ * -> ....
+ */
+
+// Service implementation.
+class ServiceSingletonTestCpp : public BnServiceSingletonTest {
+public:
+ binder::Status inc(int32_t* _aidl_return) final {
+ *_aidl_return = ++mValue;
+ return binder::Status::ok();
+ }
+ std::atomic_int32_t mValue = 0;
+};
+
+// The service traits increment static atomic counters, which
+// validates that the trait callbacks are invoked.
+static std::atomic_int32_t sNewService = 0;
+static std::atomic_int32_t sServiceDied = 0;
+
+template <typename Service>
+struct TestServiceTraits : public mediautils::DefaultServiceTraits<Service> {
+ static constexpr const char* getServiceName() { return ""; }
+ static constexpr void onNewService(const mediautils::InterfaceType<Service>&) {
+ ++sNewService;
+ }
+ static constexpr void onServiceDied(const mediautils::InterfaceType<Service>&) {
+ ++sServiceDied;
+ }
+};
+
+// Here we have an alternative set of service traits,
+// used to validate that we can switch traits for the service singleton.
+static std::atomic_int32_t sNewService2 = 0;
+static std::atomic_int32_t sServiceDied2 = 0;
+
+template <typename Service>
+struct TestServiceTraits2 : public mediautils::DefaultServiceTraits<Service> {
+ static constexpr const char* getServiceName() { return ""; }
+ static constexpr void onNewService(const mediautils::InterfaceType<Service>&) {
+ ++sNewService2;
+ }
+ static constexpr void onServiceDied(const mediautils::InterfaceType<Service>&) {
+ ++sServiceDied2;
+ }
+};
+
+/*
+ * ServiceThreads run in a remote process.
+ *
+ * The WorkerThread is used to launch and kill the ServiceThread in a remote process.
+ */
+static void ServiceThread(audio_utils::RunRemote& runRemote) {
+ int c = runRemote.getc(); // requires any character to launch
+ auto service = sp<IServiceSingletonTest>::cast(sp<ServiceSingletonTestCpp>::make());
+ mediautils::addService(service);
+ ProcessState::self()->startThreadPool();
+ runRemote.putc(c); // echo character.
+ IPCThreadState::self()->joinThreadPool();
+}
+
+/*
+ * The WorkerThread is run in a remote process from the test. It communicates with
+ * the test process through pipes.
+ */
+static void WorkerThread(audio_utils::RunRemote& runRemote) {
+ std::shared_ptr<audio_utils::RunRemote> remoteService;
+ while (true) {
+ const int c = runRemote.getc();
+ switch (c) {
+ case 'a': // launch a new service.
+ // if the old service isn't destroyed, it will be destroyed here
+ // when the RunRemote is replaced.
+ remoteService = std::make_shared<audio_utils::RunRemote>(ServiceThread);
+ remoteService->run();
+ remoteService->putc('a'); // create service.
+ (void)remoteService->getc(); // ensure it is created.
+ runRemote.putc(c); // echo
+ break;
+ case 'b': // destroys the old service.
+ remoteService.reset(); // this kills the service.
+ runRemote.putc(c); // echo
+ break;
+ default: // respond that we don't know what happened!
+ runRemote.putc('?');
+ break;
+ }
+ }
+}
+
+// This is a monolithic test.
+TEST(service_singleton_tests, one_and_only) {
+ std::atomic_int32_t listenerServiceCreated = 0;
+ std::atomic_int32_t listenerServiceDied = 0;
+
+ // initialize the service cache with a custom handler.
+ mediautils::initService<
+ IServiceSingletonTest, TestServiceTraits<IServiceSingletonTest>>({});
+ mediautils::initService<
+ aidl::IServiceSingletonTest, TestServiceTraits<aidl::IServiceSingletonTest>>({});
+
+ // start the worker thread that spawns the services.
+ auto remoteWorker = std::make_shared<audio_utils::RunRemote>(WorkerThread);
+ remoteWorker->run();
+
+ // now we are ready for binder.
+ ProcessState::self()->startThreadPool();
+
+ // check that our service isn't preexisting.
+ {
+ auto service = mediautils::checkServicePassThrough<IServiceSingletonTest>();
+ EXPECT_FALSE(service);
+
+ auto service2 = mediautils::checkServicePassThrough<aidl::IServiceSingletonTest>();
+ EXPECT_FALSE(service2);
+ }
+ EXPECT_EQ(0, sNewService);
+ EXPECT_EQ(0, sServiceDied);
+
+ {
+ auto service = mediautils::checkService<IServiceSingletonTest>();
+ EXPECT_FALSE(service);
+
+ auto service2 = mediautils::checkService<aidl::IServiceSingletonTest>();
+ EXPECT_FALSE(service2);
+ }
+ EXPECT_EQ(0, sNewService);
+ EXPECT_EQ(0, sServiceDied);
+
+ // getService will register a notification handler that fetches the
+ // service in the background.
+ {
+ auto service = mediautils::getService<IServiceSingletonTest>();
+ EXPECT_FALSE(service);
+
+ auto service2 = mediautils::getService<aidl::IServiceSingletonTest>();
+ EXPECT_FALSE(service2);
+ }
+ EXPECT_EQ(0, sNewService);
+ EXPECT_EQ(0, sServiceDied);
+
+ // now spawn the service.
+ remoteWorker->putc('a');
+ EXPECT_EQ('a', remoteWorker->getc());
+
+ sleep(1); // In the background, 2 services were fetched.
+
+ EXPECT_EQ(2, sNewService);
+ EXPECT_EQ(0, sServiceDied);
+
+ // we repeat the prior checks, but the service is cached now.
+ {
+ auto service = mediautils::checkServicePassThrough<IServiceSingletonTest>();
+ EXPECT_TRUE(service);
+
+ auto service2 = mediautils::checkServicePassThrough<aidl::IServiceSingletonTest>();
+ EXPECT_TRUE(service2);
+ }
+ EXPECT_EQ(2, sNewService);
+ EXPECT_EQ(0, sServiceDied);
+
+ {
+ auto service = mediautils::checkService<IServiceSingletonTest>();
+ EXPECT_TRUE(service);
+
+ auto service2 = mediautils::checkService<aidl::IServiceSingletonTest>();
+ EXPECT_TRUE(service2);
+ }
+ EXPECT_EQ(2, sNewService);
+ EXPECT_EQ(0, sServiceDied);
+
+ {
+ auto service = mediautils::getService<IServiceSingletonTest>();
+ EXPECT_TRUE(service);
+
+ auto service2 = mediautils::getService<aidl::IServiceSingletonTest>();
+ EXPECT_TRUE(service2);
+ }
+ EXPECT_EQ(2, sNewService);
+ EXPECT_EQ(0, sServiceDied);
+
+ // destroy the service.
+ remoteWorker->putc('b');
+ EXPECT_EQ('b', remoteWorker->getc());
+
+ sleep(1);
+
+ // We expect the died callbacks.
+ EXPECT_EQ(2, sNewService);
+ EXPECT_EQ(2, sServiceDied);
+
+ // we can also manually check whether there is a new service by
+ // requesting service notifications. This is outside of the service singleton
+ // traits.
+ auto handle1 = mediautils::requestServiceNotification<IServiceSingletonTest>(
+ [&](const sp<IServiceSingletonTest>&) { ++listenerServiceCreated; });
+ auto handle2 = mediautils::requestServiceNotification<aidl::IServiceSingletonTest>(
+ [&](const std::shared_ptr<aidl::IServiceSingletonTest>&) {
+ ++listenerServiceCreated; });
+
+ // Spawn the service again.
+ remoteWorker->putc('a');
+ EXPECT_EQ('a', remoteWorker->getc());
+
+ sleep(1); // In the background, 2 services were fetched.
+
+ EXPECT_EQ(4, sNewService);
+ EXPECT_EQ(2, sServiceDied);
+
+ EXPECT_EQ(2, listenerServiceCreated); // our listener picked up the service creation.
+
+ std::shared_ptr<void> handle3, handle4;
+ std::shared_ptr<aidl::IServiceSingletonTest> keepAlive; // NDK Workaround!
+ {
+ auto service = mediautils::getService<IServiceSingletonTest>();
+ EXPECT_TRUE(service);
+
+ auto service2 = mediautils::getService<aidl::IServiceSingletonTest>();
+ EXPECT_TRUE(service2);
+
+ keepAlive = service2;
+
+ // we can also request our own death notifications (outside of the service traits).
+ handle3 = mediautils::requestDeathNotification(service, [&] { ++listenerServiceDied; });
+ handle4 = mediautils::requestDeathNotification(service2, [&] { ++listenerServiceDied; });
+ }
+
+ EXPECT_EQ(4, sNewService);
+ EXPECT_EQ(2, sServiceDied);
+
+ // destroy the service.
+
+ remoteWorker->putc('b');
+ EXPECT_EQ('b', remoteWorker->getc());
+
+ sleep(1);
+
+ // We expect the died callbacks.
+ EXPECT_EQ(4, sNewService);
+ EXPECT_EQ(4, sServiceDied);
+
+ EXPECT_EQ(2, listenerServiceCreated);
+ EXPECT_EQ(2, listenerServiceDied); // NDK Workaround - without keepAlive, this is 1.
+ // the death notification is invalidated without a
+ // pointer to the binder object.
+
+ keepAlive.reset();
+
+ // Cancel the singleton cache.
+ mediautils::skipService<IServiceSingletonTest>();
+ mediautils::skipService<aidl::IServiceSingletonTest>();
+
+ // Spawn the service again.
+ remoteWorker->putc('a');
+ EXPECT_EQ('a', remoteWorker->getc());
+
+ sleep(1);
+
+ // We expect no change from the service traits (service not cached).
+ EXPECT_EQ(4, sNewService);
+ EXPECT_EQ(4, sServiceDied);
+ EXPECT_EQ(4, listenerServiceCreated); // our listener picks it up.
+
+ // remove service
+ remoteWorker->putc('b');
+ EXPECT_EQ('b', remoteWorker->getc());
+
+ sleep(1);
+
+ // We expect no change from the service traits (service not cached).
+ EXPECT_EQ(4, sNewService);
+ EXPECT_EQ(4, sServiceDied);
+ EXPECT_EQ(4, listenerServiceCreated);
+ EXPECT_EQ(2, listenerServiceDied); // binder died is associated with the actual handle.
+
+ // replace the service traits.
+ {
+ auto previous = mediautils::initService<
+ IServiceSingletonTest, TestServiceTraits2<IServiceSingletonTest>>({});
+ auto previous2 = mediautils::initService<
+ aidl::IServiceSingletonTest, TestServiceTraits2<aidl::IServiceSingletonTest>>({});
+
+ EXPECT_FALSE(previous);
+ EXPECT_FALSE(previous2);
+ }
+
+ // We expect no change with old counters.
+ EXPECT_EQ(4, sNewService);
+ EXPECT_EQ(4, sServiceDied);
+ EXPECT_EQ(0, sNewService2);
+ EXPECT_EQ(0, sServiceDied2);
+
+ {
+ auto service = mediautils::getService<IServiceSingletonTest>();
+ EXPECT_FALSE(service);
+
+ auto service2 = mediautils::getService<aidl::IServiceSingletonTest>();
+ EXPECT_FALSE(service2);
+ }
+
+ EXPECT_EQ(4, sNewService);
+ EXPECT_EQ(4, sServiceDied);
+ EXPECT_EQ(0, sNewService2);
+ EXPECT_EQ(0, sServiceDied2);
+
+ // Spawn the service again.
+ remoteWorker->putc('a');
+ EXPECT_EQ('a', remoteWorker->getc());
+
+ sleep(1);
+
+ EXPECT_EQ(4, sNewService); // old counters do not change.
+ EXPECT_EQ(4, sServiceDied);
+ EXPECT_EQ(2, sNewService2); // new counters change
+ EXPECT_EQ(0, sServiceDied2);
+
+ EXPECT_EQ(6, listenerServiceCreated); // listener associated with service name picks up info.
+
+ // Release the service.
+ remoteWorker->putc('b');
+ EXPECT_EQ('b', remoteWorker->getc());
+
+ sleep(1);
+
+ EXPECT_EQ(4, sNewService); // old counters do not change.
+ EXPECT_EQ(4, sServiceDied);
+ EXPECT_EQ(2, sNewService2); // new counters change
+ EXPECT_EQ(2, sServiceDied2);
+}
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index 01bde42..add8a43 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -149,7 +149,7 @@
"audio-permission-aidl-cpp",
"audioclient-types-aidl-cpp",
"audioflinger-aidl-cpp",
- "audiopermissioncontroller",
+ "libaudiopermission",
"av-types-aidl-cpp",
"com.android.media.audio-aconfig-cc",
"com.android.media.audioserver-aconfig-cc",
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 4c7087e..b2edaf7 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -26,9 +26,7 @@
#include "Configuration.h"
#include "AudioFlinger.h"
-//#define BUFLOG_NDEBUG 0
-#include <afutils/BufLog.h>
-#include <afutils/DumpTryLock.h>
+#include <afutils/FallibleLockGuard.h>
#include <afutils/NBAIO_Tee.h>
#include <afutils/Permission.h>
#include <afutils/PropertyUtils.h>
@@ -68,6 +66,7 @@
// not needed with the includes above, added to prevent transitive include dependency.
#include <chrono>
#include <thread>
+#include <string_view>
// ----------------------------------------------------------------------------
@@ -86,6 +85,8 @@
namespace android {
+using namespace std::string_view_literals;
+
using ::android::base::StringPrintf;
using aidl_utils::statusTFromBinderStatus;
using media::IEffectClient;
@@ -102,10 +103,10 @@
static const AudioHalVersionInfo kMaxAAudioPropertyDeviceHalVersion =
AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 7, 1);
-static constexpr char kDeadlockedString[] = "AudioFlinger may be deadlocked\n";
-static constexpr char kHardwareLockedString[] = "Hardware lock is taken\n";
-static constexpr char kClientLockedString[] = "Client lock is taken\n";
-static constexpr char kNoEffectsFactory[] = "Effects Factory is absent\n";
+constexpr auto kDeadlockedString = "AudioFlinger may be deadlocked\n"sv;
+constexpr auto kHardwareLockedString = "Hardware lock is taken\n"sv;
+constexpr auto kClientLockedString = "Client lock is taken\n"sv;
+constexpr auto kNoEffectsFactory = "Effects Factory is absent\n"sv;
static constexpr char kAudioServiceName[] = "audio";
@@ -116,19 +117,26 @@
// Keep a strong reference to media.log service around forever.
// The service is within our parent process so it can never die in a way that we could observe.
// These two variables are const after initialization.
-static sp<IBinder> sMediaLogServiceAsBinder;
static sp<IMediaLogService> sMediaLogService;
static pthread_once_t sMediaLogOnce = PTHREAD_ONCE_INIT;
static void sMediaLogInit()
{
- sMediaLogServiceAsBinder = defaultServiceManager()->getService(String16("media.log"));
+ auto sMediaLogServiceAsBinder = defaultServiceManager()->getService(String16("media.log"));
if (sMediaLogServiceAsBinder != 0) {
sMediaLogService = interface_cast<IMediaLogService>(sMediaLogServiceAsBinder);
}
}
+static int writeStr(int fd, std::string_view s) {
+ return write(fd, s.data(), s.size());
+}
+
+static int writeStr(int fd, const String8& s) {
+ return write(fd, s.c_str(), s.size());
+}
+
static error::BinderResult<ValidatedAttributionSourceState>
validateAttributionFromContextOrTrustedCaller(AttributionSourceState attr,
const IPermissionProvider& provider) {
@@ -533,7 +541,7 @@
const audio_attributes_t *attr,
audio_config_base_t *config,
const AudioClient& client,
- audio_port_handle_t *deviceId,
+ DeviceIdVector *deviceIds,
audio_session_t *sessionId,
const sp<MmapStreamCallback>& callback,
sp<MmapStreamInterface>& interface,
@@ -545,7 +553,7 @@
status_t ret = NO_INIT;
if (af != 0) {
ret = af->openMmapStream(
- direction, attr, config, client, deviceId,
+ direction, attr, config, client, deviceIds,
sessionId, callback, interface, handle);
}
return ret;
@@ -555,7 +563,7 @@
const audio_attributes_t *attr,
audio_config_base_t *config,
const AudioClient& client,
- audio_port_handle_t *deviceId,
+ DeviceIdVector *deviceIds,
audio_session_t *sessionId,
const sp<MmapStreamCallback>& callback,
sp<MmapStreamInterface>& interface,
@@ -621,15 +629,18 @@
bool isSpatialized;
bool isBitPerfect;
float volume;
+ bool muted;
ret = AudioSystem::getOutputForAttr(&localAttr, &io,
actualSessionId,
&streamType, adjAttributionSource,
&fullConfig,
(audio_output_flags_t)(AUDIO_OUTPUT_FLAG_MMAP_NOIRQ |
AUDIO_OUTPUT_FLAG_DIRECT),
- deviceId, &portId, &secondaryOutputs, &isSpatialized,
+ deviceIds, &portId, &secondaryOutputs,
+ &isSpatialized,
&isBitPerfect,
- &volume);
+ &volume,
+ &muted);
if (ret != NO_ERROR) {
config->sample_rate = fullConfig.sample_rate;
config->channel_mask = fullConfig.channel_mask;
@@ -638,12 +649,17 @@
ALOGW_IF(!secondaryOutputs.empty(),
"%s does not support secondary outputs, ignoring them", __func__);
} else {
+ audio_port_handle_t deviceId = getFirstDeviceId(*deviceIds);
ret = AudioSystem::getInputForAttr(&localAttr, &io,
RECORD_RIID_INVALID,
actualSessionId,
adjAttributionSource,
config,
- AUDIO_INPUT_FLAG_MMAP_NOIRQ, deviceId, &portId);
+ AUDIO_INPUT_FLAG_MMAP_NOIRQ, &deviceId, &portId);
+ deviceIds->clear();
+ if (deviceId != AUDIO_PORT_HANDLE_NONE) {
+ deviceIds->push_back(deviceId);
+ }
}
if (ret != NO_ERROR) {
return ret;
@@ -657,7 +673,7 @@
const sp<IAfMmapThread> thread = mMmapThreads.valueFor(io);
if (thread != 0) {
interface = IAfMmapThread::createMmapStreamInterfaceAdapter(thread);
- thread->configure(&localAttr, streamType, actualSessionId, callback, *deviceId, portId);
+ thread->configure(&localAttr, streamType, actualSessionId, callback, *deviceIds, portId);
*handle = portId;
*sessionId = actualSessionId;
config->sample_rate = thread->sampleRate();
@@ -738,18 +754,19 @@
return NULL;
}
-void AudioFlinger::dumpClients_ll(int fd, const Vector<String16>& args __unused)
-{
+void AudioFlinger::dumpClients_ll(int fd, bool dumpAllocators) {
String8 result;
- result.append("Client Allocators:\n");
- for (size_t i = 0; i < mClients.size(); ++i) {
- sp<Client> client = mClients.valueAt(i).promote();
- if (client != 0) {
- result.appendFormat("Client: %d\n", client->pid());
- result.append(client->allocator().dump().c_str());
+ if (dumpAllocators) {
+ result.append("Client Allocators:\n");
+ for (size_t i = 0; i < mClients.size(); ++i) {
+ sp<Client> client = mClients.valueAt(i).promote();
+ if (client != 0) {
+ result.appendFormat("Client: %d\n", client->pid());
+ result.append(client->allocator().dump().c_str());
+ }
}
- }
+ }
result.append("Notification Clients:\n");
result.append(" pid uid name\n");
@@ -770,12 +787,11 @@
result.appendFormat(" %7d %4d %7d %6u %s\n", r->mSessionid, r->mCnt, r->mPid,
r->mUid, info->package.c_str());
}
- write(fd, result.c_str(), result.size());
+ writeStr(fd, result);
}
-void AudioFlinger::dumpInternals_l(int fd, const Vector<String16>& args __unused)
-{
+void AudioFlinger::dumpInternals_l(int fd) {
const size_t SIZE = 256;
char buffer[SIZE];
String8 result;
@@ -783,7 +799,7 @@
snprintf(buffer, SIZE, "Hardware status: %d\n", hardwareStatus);
result.append(buffer);
- write(fd, result.c_str(), result.size());
+ writeStr(fd, result);
dprintf(fd, "Vibrator infos(size=%zu):\n", mAudioVibratorInfos.size());
for (const auto& vibratorInfo : mAudioVibratorInfos) {
@@ -793,8 +809,43 @@
mBluetoothLatencyModesEnabled ? "" : "not ");
}
-void AudioFlinger::dumpPermissionDenial(int fd, const Vector<String16>& args __unused)
-{
+void AudioFlinger::dumpStats(int fd) {
+ // Dump binder stats
+ dprintf(fd, "\nIAudioFlinger binder call profile:\n");
+ writeStr(fd, getIAudioFlingerStatistics().dump());
+
+ extern mediautils::MethodStatistics<int>& getIEffectStatistics();
+ dprintf(fd, "\nIEffect binder call profile:\n");
+ writeStr(fd, getIEffectStatistics().dump());
+
+ // Automatically fetch HIDL or AIDL statistics.
+ const std::string_view halType = (mDevicesFactoryHal->getHalVersion().getType() ==
+ AudioHalVersionInfo::Type::HIDL)
+ ? METHOD_STATISTICS_MODULE_NAME_AUDIO_HIDL
+ : METHOD_STATISTICS_MODULE_NAME_AUDIO_AIDL;
+ const std::shared_ptr<std::vector<std::string>> halClassNames =
+ mediautils::getStatisticsClassesForModule(halType);
+ if (halClassNames) {
+ for (const auto& className : *halClassNames) {
+ auto stats = mediautils::getStatisticsForClass(className);
+ if (stats) {
+ dprintf(fd, "\n%s binder call profile:\n", className.c_str());
+ writeStr(fd, stats->dump());
+ }
+ }
+ }
+
+ dprintf(fd, "\nTimeCheck:\n");
+ writeStr(fd, mediautils::TimeCheck::toString());
+ dprintf(fd, "\n");
+ // dump mutex stats
+ writeStr(fd, audio_utils::mutex::all_stats_to_string());
+ // dump held mutexes
+ writeStr(fd, audio_utils::mutex::all_threads_to_string());
+
+}
+
+void AudioFlinger::dumpPermissionDenial(int fd) {
const size_t SIZE = 256;
char buffer[SIZE];
String8 result;
@@ -803,52 +854,102 @@
IPCThreadState::self()->getCallingPid(),
IPCThreadState::self()->getCallingUid());
result.append(buffer);
- write(fd, result.c_str(), result.size());
+ writeStr(fd, result);
+}
+
+static void dump_printHelp(int fd) {
+ constexpr static auto helpStr =
+ "AudioFlinger dumpsys help options\n"
+ " -h/--help: Print this help text\n"
+ " --hal: Include dump of audio hal\n"
+ " --stats: Include call/lock/watchdog stats\n"
+ " --effects: Include effect definitions\n"
+ " --memory: Include memory dump\n"
+ " -a/--all: Print all except --memory\n"sv;
+
+ write(fd, helpStr.data(), helpStr.length());
}
status_t AudioFlinger::dump(int fd, const Vector<String16>& args)
-NO_THREAD_SAFETY_ANALYSIS // conditional try lock
{
+ using afutils::FallibleLockGuard;
if (!dumpAllowed()) {
- dumpPermissionDenial(fd, args);
- } else {
- // get state of hardware lock
- const bool hardwareLocked = afutils::dumpTryLock(hardwareMutex());
- if (!hardwareLocked) {
- String8 result(kHardwareLockedString);
- write(fd, result.c_str(), result.size());
+ dumpPermissionDenial(fd);
+ return NO_ERROR;
+ }
+ // Arg parsing
+ struct {
+ bool shouldDumpMem, shouldDumpStats, shouldDumpHal, shouldDumpEffects;
+ } parsedArgs {}; // zero-init
+
+ for (const auto& arg : args) {
+ const String8 utf8arg{arg};
+ if (utf8arg == "-h" || utf8arg == "--help") {
+ dump_printHelp(fd);
+ return NO_ERROR;
+ }
+ if (utf8arg == "-a" || utf8arg == "--all") {
+ parsedArgs.shouldDumpStats = true;
+ parsedArgs.shouldDumpHal = true;
+ parsedArgs.shouldDumpEffects = true;
+ continue;
+ }
+ if (utf8arg == "--hal") {
+ parsedArgs.shouldDumpHal = true;
+ continue;
+ }
+ if (utf8arg == "--stats") {
+ parsedArgs.shouldDumpStats = true;
+ continue;
+ }
+ if (utf8arg == "--effects") {
+ parsedArgs.shouldDumpEffects = true;
+ continue;
+ }
+ if (utf8arg == "--memory") {
+ parsedArgs.shouldDumpMem = true;
+ continue;
+ }
+ // Unknown arg silently ignored
+ }
+
+ {
+ std::string res;
+ res.reserve(100);
+ res += "Start begin: ";
+ const auto startTimeStr = audio_utils_time_string_from_ns(mStartTime);
+ res += startTimeStr.time;
+ const auto startFinishedTime = getStartupFinishedTime();
+ if (startFinishedTime != 0) {
+ res += "\nStart end: ";
+ const auto startEndStr = audio_utils_time_string_from_ns(startFinishedTime);
+ res += startEndStr.time;
} else {
- hardwareMutex().unlock();
+ res += "\nStartup not yet finished!";
+ }
+ const auto nowTimeStr = audio_utils_time_string_from_ns(audio_utils_get_real_time_ns());
+ res += "\nNow: ";
+ res += nowTimeStr.time;
+ res += "\n";
+ writeStr(fd, res);
+ }
+ // get state of hardware lock
+ {
+ FallibleLockGuard l{hardwareMutex()};
+ if (!l) writeStr(fd, kHardwareLockedString);
+ }
+ {
+ FallibleLockGuard l{mutex()};
+ if (!l) writeStr(fd, kDeadlockedString);
+ {
+ FallibleLockGuard ll{clientMutex()};
+ if (!ll) writeStr(fd, kClientLockedString);
+ dumpClients_ll(fd, parsedArgs.shouldDumpMem);
}
- const bool locked = afutils::dumpTryLock(mutex());
+ dumpInternals_l(fd);
- // failed to lock - AudioFlinger is probably deadlocked
- if (!locked) {
- String8 result(kDeadlockedString);
- write(fd, result.c_str(), result.size());
- }
-
- const bool clientLocked = afutils::dumpTryLock(clientMutex());
- if (!clientLocked) {
- String8 result(kClientLockedString);
- write(fd, result.c_str(), result.size());
- }
-
- if (mEffectsFactoryHal != 0) {
- mEffectsFactoryHal->dumpEffects(fd);
- } else {
- String8 result(kNoEffectsFactory);
- write(fd, result.c_str(), result.size());
- }
-
- dumpClients_ll(fd, args);
- if (clientLocked) {
- clientMutex().unlock();
- }
-
- dumpInternals_l(fd, args);
-
+ dprintf(fd, "\n ## BEGIN thread dump \n");
// dump playback threads
for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
mPlaybackThreads.valueAt(i)->dump(fd, args);
@@ -866,141 +967,84 @@
// dump orphan effect chains
if (mOrphanEffectChains.size() != 0) {
- write(fd, " Orphan Effect Chains\n", strlen(" Orphan Effect Chains\n"));
+ writeStr(fd, " Orphan Effect Chains\n");
for (size_t i = 0; i < mOrphanEffectChains.size(); i++) {
mOrphanEffectChains.valueAt(i)->dump(fd, args);
}
}
- // dump all hardware devs
- for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
- sp<DeviceHalInterface> dev = mAudioHwDevs.valueAt(i)->hwDevice();
- dev->dump(fd, args);
- }
-
- mPatchPanel->dump(fd);
-
- mDeviceEffectManager->dump(fd);
-
- std::string melOutput = mMelReporter->dump();
- write(fd, melOutput.c_str(), melOutput.size());
+ // dump historical threads in the last 10 seconds
+ writeStr(fd, mThreadLog.dumpToString(
+ "Historical Thread Log ", 0 /* lines */,
+ audio_utils_get_real_time_ns() - 10 * 60 * NANOS_PER_SECOND));
// dump external setParameters
+ dprintf(fd, "\n ## BEGIN setParameters dump \n");
auto dumpLogger = [fd](SimpleLog& logger, const char* name) {
- dprintf(fd, "\n%s setParameters:\n", name);
+ dprintf(fd, "\n %s setParameters:\n", name);
logger.dump(fd, " " /* prefix */);
};
dumpLogger(mRejectedSetParameterLog, "Rejected");
dumpLogger(mAppSetParameterLog, "App");
dumpLogger(mSystemSetParameterLog, "System");
- // dump historical threads in the last 10 seconds
- const std::string threadLog = mThreadLog.dumpToString(
- "Historical Thread Log ", 0 /* lines */,
- audio_utils_get_real_time_ns() - 10 * 60 * NANOS_PER_SECOND);
- write(fd, threadLog.c_str(), threadLog.size());
- BUFLOG_RESET;
+ dprintf(fd, "\n ## BEGIN misc af dump \n");
+ mPatchPanel->dump(fd);
+ mDeviceEffectManager->dump(fd);
+ writeStr(fd, mMelReporter->dump());
if (media::psh_utils::AudioPowerManager::enabled()) {
char value[PROPERTY_VALUE_MAX];
property_get("ro.build.display.id", value, "Unknown build");
std::string build(value);
- build.append("\n");
- write(fd, build.c_str(), build.size());
- const std::string powerLog =
- media::psh_utils::AudioPowerManager::getAudioPowerManager().toString();
- write(fd, powerLog.c_str(), powerLog.size());
+ writeStr(fd, build + "\n");
+ writeStr(fd, media::psh_utils::AudioPowerManager::getAudioPowerManager().toString());
}
- if (locked) {
- mutex().unlock();
- }
-
-#ifdef TEE_SINK
- // NBAIO_Tee dump is safe to call outside of AF lock.
- NBAIO_Tee::dumpAll(fd, "_DUMP");
-#endif
- // append a copy of media.log here by forwarding fd to it, but don't attempt
- // to lookup the service if it's not running, as it will block for a second
- if (sMediaLogServiceAsBinder != 0) {
- dprintf(fd, "\nmedia.log:\n");
- sMediaLogServiceAsBinder->dump(fd, args);
- }
-
- // check for optional arguments
- bool dumpMem = false;
- bool unreachableMemory = false;
- for (const auto &arg : args) {
- if (arg == String16("-m")) {
- dumpMem = true;
- } else if (arg == String16("--unreachable")) {
- unreachableMemory = true;
+ if (parsedArgs.shouldDumpEffects) {
+ dprintf(fd, "\n ## BEGIN effects dump \n");
+ if (mEffectsFactoryHal != 0) {
+ mEffectsFactoryHal->dumpEffects(fd);
+ } else {
+ writeStr(fd, kNoEffectsFactory);
}
}
- if (dumpMem) {
- dprintf(fd, "\nDumping memory:\n");
- std::string s = dumpMemoryAddresses(100 /* limit */);
- write(fd, s.c_str(), s.size());
- }
- if (unreachableMemory) {
- dprintf(fd, "\nDumping unreachable memory:\n");
- // TODO - should limit be an argument parameter?
- std::string s = GetUnreachableMemoryString(true /* contents */, 100 /* limit */);
- write(fd, s.c_str(), s.size());
- }
- {
- std::string timeCheckStats = getIAudioFlingerStatistics().dump();
- dprintf(fd, "\nIAudioFlinger binder call profile:\n");
- write(fd, timeCheckStats.c_str(), timeCheckStats.size());
-
- extern mediautils::MethodStatistics<int>& getIEffectStatistics();
- timeCheckStats = getIEffectStatistics().dump();
- dprintf(fd, "\nIEffect binder call profile:\n");
- write(fd, timeCheckStats.c_str(), timeCheckStats.size());
-
- // Automatically fetch HIDL or AIDL statistics.
- const std::string_view halType = (mDevicesFactoryHal->getHalVersion().getType() ==
- AudioHalVersionInfo::Type::HIDL)
- ? METHOD_STATISTICS_MODULE_NAME_AUDIO_HIDL
- : METHOD_STATISTICS_MODULE_NAME_AUDIO_AIDL;
- const std::shared_ptr<std::vector<std::string>> halClassNames =
- mediautils::getStatisticsClassesForModule(halType);
- if (halClassNames) {
- for (const auto& className : *halClassNames) {
- auto stats = mediautils::getStatisticsForClass(className);
- if (stats) {
- timeCheckStats = stats->dump();
- dprintf(fd, "\n%s binder call profile:\n", className.c_str());
- write(fd, timeCheckStats.c_str(), timeCheckStats.size());
- }
- }
+ if (parsedArgs.shouldDumpHal) {
+ dprintf(fd, "\n ## BEGIN HAL dump \n");
+ FallibleLockGuard ll{hardwareMutex()};
+ // dump all hardware devs
+ for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
+ sp<DeviceHalInterface> dev = mAudioHwDevs.valueAt(i)->hwDevice();
+ dev->dump(fd, args);
}
-
- timeCheckStats = mediautils::TimeCheck::toString();
- dprintf(fd, "\nTimeCheck:\n");
- write(fd, timeCheckStats.c_str(), timeCheckStats.size());
- dprintf(fd, "\n");
}
- // dump mutex stats
- const auto mutexStats = audio_utils::mutex::all_stats_to_string();
- write(fd, mutexStats.c_str(), mutexStats.size());
+ } // end af lock
- // dump held mutexes
- const auto mutexThreadInfo = audio_utils::mutex::all_threads_to_string();
- write(fd, mutexThreadInfo.c_str(), mutexThreadInfo.size());
+ if (parsedArgs.shouldDumpStats) {
+ dprintf(fd, "\n ## BEGIN stats dump \n");
+ dumpStats(fd);
}
+
+ if (parsedArgs.shouldDumpMem) {
+ dprintf(fd, "\n ## BEGIN memory dump \n");
+ writeStr(fd, dumpMemoryAddresses(100 /* limit */));
+ dprintf(fd, "\nDumping unreachable memory:\n");
+ // TODO - should limit be an argument parameter?
+ writeStr(fd, GetUnreachableMemoryString(true /* contents */, 100 /* limit */));
+ }
+
return NO_ERROR;
}
-sp<Client> AudioFlinger::registerPid(pid_t pid)
+sp<Client> AudioFlinger::registerClient(pid_t pid, uid_t uid)
{
audio_utils::lock_guard _cl(clientMutex());
// If pid is already in the mClients wp<> map, then use that entry
// (for which promote() is always != 0), otherwise create a new entry and Client.
sp<Client> client = mClients.valueFor(pid).promote();
if (client == 0) {
- client = sp<Client>::make(sp<IAfClientCallback>::fromExisting(this), pid);
+ client = sp<Client>::make(sp<IAfClientCallback>::fromExisting(this), pid, uid);
mClients.add(pid, client);
}
@@ -1081,6 +1125,7 @@
bool isSpatialized = false;
bool isBitPerfect = false;
float volume;
+ bool muted;
audio_io_handle_t effectThreadId = AUDIO_IO_HANDLE_NONE;
std::vector<int> effectIds;
@@ -1127,6 +1172,7 @@
adjAttributionSource = std::move(validatedAttrSource).unwrapInto();
}
+ DeviceIdVector selectedDeviceIds;
audio_session_t sessionId = input.sessionId;
if (sessionId == AUDIO_SESSION_ALLOCATE) {
sessionId = (audio_session_t) newAudioUniqueId(AUDIO_UNIQUE_ID_USE_SESSION);
@@ -1137,11 +1183,14 @@
output.sessionId = sessionId;
output.outputId = AUDIO_IO_HANDLE_NONE;
- output.selectedDeviceId = input.selectedDeviceId;
+ if (input.selectedDeviceId != AUDIO_PORT_HANDLE_NONE) {
+ selectedDeviceIds.push_back(input.selectedDeviceId);
+ }
lStatus = AudioSystem::getOutputForAttr(&localAttr, &output.outputId, sessionId, &streamType,
adjAttributionSource, &input.config, input.flags,
- &output.selectedDeviceId, &portId, &secondaryOutputs,
- &isSpatialized, &isBitPerfect, &volume);
+ &selectedDeviceIds, &portId, &secondaryOutputs,
+ &isSpatialized, &isBitPerfect, &volume, &muted);
+ output.selectedDeviceIds = selectedDeviceIds;
if (lStatus != NO_ERROR || output.outputId == AUDIO_IO_HANDLE_NONE) {
ALOGE("createTrack() getOutputForAttr() return error %d or invalid output handle", lStatus);
@@ -1178,7 +1227,7 @@
goto Exit;
}
- client = registerPid(adjAttributionSource.pid);
+ client = registerClient(adjAttributionSource.pid, adjAttributionSource.uid);
IAfPlaybackThread* effectThread = nullptr;
sp<IAfEffectChain> effectChain = nullptr;
@@ -1198,7 +1247,7 @@
if (effectThread == nullptr) {
effectChain = getOrphanEffectChain_l(sessionId);
}
- ALOGV("createTrack() sessionId: %d volume: %f", sessionId, volume);
+ ALOGV("createTrack() sessionId: %d volume: %f muted %d", sessionId, volume, muted);
output.sampleRate = input.config.sample_rate;
output.frameCount = input.frameCount;
@@ -1213,7 +1262,7 @@
input.sharedBuffer, sessionId, &output.flags,
callingPid, adjAttributionSource, input.clientInfo.clientTid,
&lStatus, portId, input.audioTrackCallback, isSpatialized,
- isBitPerfect, &output.afTrackFlags, volume);
+ isBitPerfect, &output.afTrackFlags, volume, muted);
LOG_ALWAYS_FATAL_IF((lStatus == NO_ERROR) && (track == 0));
// we don't abort yet if lStatus != NO_ERROR; there is still work to be done regardless
@@ -1637,7 +1686,7 @@
}
status_t AudioFlinger::setStreamVolume(audio_stream_type_t stream, float value,
- audio_io_handle_t output)
+ bool muted, audio_io_handle_t output)
{
// check calling permissions
if (!settingsAllowed()) {
@@ -1659,14 +1708,14 @@
if (volumeInterface == NULL) {
return BAD_VALUE;
}
- volumeInterface->setStreamVolume(stream, value);
+ volumeInterface->setStreamVolume(stream, value, muted);
return NO_ERROR;
}
status_t AudioFlinger::setPortsVolume(
- const std::vector<audio_port_handle_t>& ports, float volume, audio_io_handle_t output)
-{
+ const std::vector<audio_port_handle_t> &ports, float volume, bool muted,
+ audio_io_handle_t output) {
for (const auto& port : ports) {
if (port == AUDIO_PORT_HANDLE_NONE) {
return BAD_VALUE;
@@ -1681,12 +1730,12 @@
audio_utils::lock_guard lock(mutex());
IAfPlaybackThread *thread = checkPlaybackThread_l(output);
if (thread != nullptr) {
- return thread->setPortsVolume(ports, volume);
+ return thread->setPortsVolume(ports, volume, muted);
}
const sp<IAfMmapThread> mmapThread = checkMmapThread_l(output);
if (mmapThread != nullptr && mmapThread->isOutput()) {
IAfMmapPlaybackThread *mmapPlaybackThread = mmapThread->asIAfMmapPlaybackThread().get();
- return mmapPlaybackThread->setPortsVolume(ports, volume);
+ return mmapPlaybackThread->setPortsVolume(ports, volume, muted);
}
return BAD_VALUE;
}
@@ -2473,7 +2522,7 @@
output.selectedDeviceId = input.selectedDeviceId;
output.flags = input.flags;
- client = registerPid(VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(adjAttributionSource.pid)));
+ client = registerClient(adjAttributionSource.pid, adjAttributionSource.uid);
// Not a conventional loop, but a retry loop for at most two iterations total.
// Try first maybe with FAST flag then try again without FAST flag if that fails.
@@ -4081,7 +4130,8 @@
0ns /* timeout */,
frameCountToBeReady,
track->getSpeed(),
- 1.f /* volume */);
+ 1.f /* volume */,
+ false /* muted */);
status = patchTrack->initCheck();
if (status != NO_ERROR) {
ALOGE("Secondary output patchTrack init failed: %d", status);
@@ -4396,7 +4446,7 @@
audio_utils::lock_guard _l(mutex());
if (sessionId == AUDIO_SESSION_DEVICE) {
- sp<Client> client = registerPid(currentPid);
+ sp<Client> client = registerClient(currentPid, adjAttributionSource.uid);
ALOGV("%s device type %#x address %s", __func__, device.mType, device.getAddress());
handle = mDeviceEffectManager->createEffect_l(
&descOut, device, client, effectClient, mPatchPanel->patches_l(),
@@ -4458,7 +4508,7 @@
goto Exit;
}
ALOGV("%s() got io %d for effect %s", __func__, io, descOut.name);
- sp<Client> client = registerPid(currentPid);
+ sp<Client> client = registerClient(currentPid, adjAttributionSource.uid);
bool pinned = !audio_is_global_session(sessionId) && isSessionAcquired_l(sessionId);
handle = createOrphanEffect_l(client, effectClient, priority, sessionId,
&descOut, &enabledOut, &lStatus, pinned,
@@ -4520,7 +4570,7 @@
}
}
- sp<Client> client = registerPid(currentPid);
+ sp<Client> client = registerClient(currentPid, adjAttributionSource.uid);
// create effect on selected output thread
bool pinned = !audio_is_global_session(sessionId) && isSessionAcquired_l(sessionId);
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 50fd48c..133410e 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -27,6 +27,7 @@
#include "IAfTrack.h"
#include "MelReporter.h"
#include "PatchCommandThread.h"
+#include "audio_utils/clock.h"
// External classes
#include <audio_utils/mutex.h>
@@ -65,6 +66,11 @@
status_t resetReferencesForTest();
+ // Called by main when startup finished -- for logging purposes only
+ void startupFinished() {
+ mStartupFinishedTime.store(audio_utils_get_real_time_ns(), std::memory_order_release);
+ }
+
private:
// ---- begin IAudioFlinger interface
@@ -93,12 +99,12 @@
status_t getMasterBalance(float* balance) const final EXCLUDES_AudioFlinger_Mutex;
status_t setStreamVolume(audio_stream_type_t stream, float value,
- audio_io_handle_t output) final EXCLUDES_AudioFlinger_Mutex;
+ bool muted, audio_io_handle_t output) final EXCLUDES_AudioFlinger_Mutex;
status_t setStreamMute(audio_stream_type_t stream, bool muted) final
EXCLUDES_AudioFlinger_Mutex;
status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds, float volume,
- audio_io_handle_t output) final EXCLUDES_AudioFlinger_Mutex;
+ bool muted, audio_io_handle_t output) final EXCLUDES_AudioFlinger_Mutex;
status_t setMode(audio_mode_t mode) final EXCLUDES_AudioFlinger_Mutex;
@@ -420,6 +426,10 @@
sp<EffectsFactoryHalInterface> getEffectsFactory();
+ int64_t getStartupFinishedTime() {
+ return mStartupFinishedTime.load(std::memory_order_acquire);
+ }
+
public:
// TODO(b/292281786): Remove this when Oboeservice can get access to
// openMmapStream through an IAudioFlinger handle directly.
@@ -429,7 +439,7 @@
const audio_attributes_t *attr,
audio_config_base_t *config,
const AudioClient& client,
- audio_port_handle_t *deviceId,
+ DeviceIdVector *deviceIds,
audio_session_t *sessionId,
const sp<MmapStreamCallback>& callback,
sp<MmapStreamInterface>& interface,
@@ -469,9 +479,10 @@
// AudioFlinger::setParameters() updates with mutex().
std::atomic_uint32_t mScreenState{};
- void dumpPermissionDenial(int fd, const Vector<String16>& args);
- void dumpClients_ll(int fd, const Vector<String16>& args) REQUIRES(mutex(), clientMutex());
- void dumpInternals_l(int fd, const Vector<String16>& args) REQUIRES(mutex());
+ void dumpPermissionDenial(int fd);
+ void dumpClients_ll(int fd, bool dumpAllocators) REQUIRES(mutex(), clientMutex());
+ void dumpInternals_l(int fd) REQUIRES(mutex());
+ void dumpStats(int fd);
SimpleLog mThreadLog{16}; // 16 Thread history limit
@@ -727,7 +738,8 @@
// Audio data transfer is directly handled by the client creating the MMAP stream
DefaultKeyedVector<audio_io_handle_t, sp<IAfMmapThread>> mMmapThreads GUARDED_BY(mutex());
- sp<Client> registerPid(pid_t pid) EXCLUDES_AudioFlinger_ClientMutex; // always returns non-0
+ // always returns non-null
+ sp<Client> registerClient(pid_t pid, uid_t uid) EXCLUDES_AudioFlinger_ClientMutex;
sp<IAfEffectHandle> createOrphanEffect_l(const sp<Client>& client,
const sp<media::IEffectClient>& effectClient,
@@ -801,6 +813,10 @@
// Local interface to AudioPolicyService, late inited, but logically const
mediautils::atomic_sp<media::IAudioPolicyServiceLocal> mAudioPolicyServiceLocal;
+
+ const int64_t mStartTime = audio_utils_get_real_time_ns();
+ // Late-inited from main()
+ std::atomic<int64_t> mStartupFinishedTime {};
};
// ----------------------------------------------------------------------------
diff --git a/services/audioflinger/Client.cpp b/services/audioflinger/Client.cpp
index 93599ac..4858469 100644
--- a/services/audioflinger/Client.cpp
+++ b/services/audioflinger/Client.cpp
@@ -18,9 +18,10 @@
namespace android {
-Client::Client(const sp<IAfClientCallback>& afClientCallback, pid_t pid)
+Client::Client(const sp<IAfClientCallback>& afClientCallback, pid_t pid, uid_t uid)
: mAfClientCallback(afClientCallback)
, mPid(pid)
+ , mUid(uid)
, mClientAllocator(AllocatorFactory::getClientAllocator()) {}
// Client destructor must be called with AudioFlinger::mClientLock held
@@ -34,4 +35,4 @@
return mClientAllocator;
}
-} // namespace android
\ No newline at end of file
+} // namespace android
diff --git a/services/audioflinger/Client.h b/services/audioflinger/Client.h
index ff0d751..c2fef39 100644
--- a/services/audioflinger/Client.h
+++ b/services/audioflinger/Client.h
@@ -42,13 +42,14 @@
class Client : public RefBase {
public:
- Client(const sp<IAfClientCallback>& audioFlinger, pid_t pid);
+ Client(const sp<IAfClientCallback>& audioFlinger, pid_t pid, uid_t uid);
// TODO(b/289139675) make Client container.
// Client destructor must be called with AudioFlinger::mClientLock held
~Client() override;
AllocatorFactory::ClientAllocator& allocator();
pid_t pid() const { return mPid; }
+ uid_t uid() const { return mUid; }
const auto& afClientCallback() const { return mAfClientCallback; }
private:
@@ -56,6 +57,7 @@
const sp<IAfClientCallback> mAfClientCallback;
const pid_t mPid;
+ const uid_t mUid;
AllocatorFactory::ClientAllocator mClientAllocator;
};
diff --git a/services/audioflinger/DeviceEffectManager.cpp b/services/audioflinger/DeviceEffectManager.cpp
index 7cb9329..42c6401 100644
--- a/services/audioflinger/DeviceEffectManager.cpp
+++ b/services/audioflinger/DeviceEffectManager.cpp
@@ -22,7 +22,7 @@
#include "EffectConfiguration.h"
-#include <afutils/DumpTryLock.h>
+#include <afutils/FallibleLockGuard.h>
#include <audio_utils/primitives.h>
#include <media/audiohal/EffectsFactoryHalInterface.h>
#include <utils/Log.h>
@@ -208,10 +208,9 @@
}
void DeviceEffectManager::dump(int fd)
-NO_THREAD_SAFETY_ANALYSIS // conditional try lock
{
- const bool locked = afutils::dumpTryLock(mutex());
- if (!locked) {
+ afutils::FallibleLockGuard l{mutex()};
+ if (!l) {
String8 result("DeviceEffectManager may be deadlocked\n");
write(fd, result.c_str(), result.size());
}
@@ -227,10 +226,6 @@
effect->dump2(fd, 4);
}
}
-
- if (locked) {
- mutex().unlock();
- }
}
size_t DeviceEffectManager::removeEffect(const sp<IAfDeviceEffectProxy>& effect)
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index 84505d3..b9d3ebe 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -24,7 +24,7 @@
#include "Client.h"
#include "EffectConfiguration.h"
-#include <afutils/DumpTryLock.h>
+#include <afutils/FallibleLockGuard.h>
#include <audio_utils/channels.h>
#include <audio_utils/primitives.h>
#include <media/AudioCommonTypes.h>
@@ -506,52 +506,50 @@
}
void EffectBase::dump(int fd, const Vector<String16>& args __unused) const
-NO_THREAD_SAFETY_ANALYSIS // conditional try lock
{
String8 result;
result.appendFormat("\tEffect ID %d:\n", mId);
- const bool locked = afutils::dumpTryLock(mutex());
- // failed to lock - AudioFlinger is probably deadlocked
- if (!locked) {
- result.append("\t\tCould not lock Fx mutex:\n");
- }
- bool isInternal = isInternal_l();
- result.append("\t\tSession State Registered Internal Enabled Suspended:\n");
- result.appendFormat("\t\t%05d %03d %s %s %s %s\n",
- mSessionId, mState, mPolicyRegistered ? "y" : "n", isInternal ? "y" : "n",
- ((isInternal && isEnabled()) || (!isInternal && mPolicyEnabled)) ? "y" : "n",
- mSuspended ? "y" : "n");
-
- result.append("\t\tDescriptor:\n");
- char uuidStr[64];
- AudioEffect::guidToString(&mDescriptor.uuid, uuidStr, sizeof(uuidStr));
- result.appendFormat("\t\t- UUID: %s\n", uuidStr);
- AudioEffect::guidToString(&mDescriptor.type, uuidStr, sizeof(uuidStr));
- result.appendFormat("\t\t- TYPE: %s\n", uuidStr);
- result.appendFormat("\t\t- apiVersion: %08X\n\t\t- flags: %08X (%s)\n",
- mDescriptor.apiVersion,
- mDescriptor.flags,
- effectFlagsToString(mDescriptor.flags).c_str());
- result.appendFormat("\t\t- name: %s\n",
- mDescriptor.name);
-
- result.appendFormat("\t\t- implementor: %s\n",
- mDescriptor.implementor);
-
- result.appendFormat("\t\t%zu Clients:\n", mHandles.size());
- result.append("\t\t\t Pid Priority Ctrl Locked client server\n");
- char buffer[256];
- for (size_t i = 0; i < mHandles.size(); ++i) {
- IAfEffectHandle *handle = mHandles[i];
- if (handle != NULL && !handle->disconnected()) {
- handle->dumpToBuffer(buffer, sizeof(buffer));
- result.append(buffer);
+ {
+ afutils::FallibleLockGuard l{mutex()};
+ // failed to lock - AudioFlinger is probably deadlocked
+ if (!l) {
+ result.append("\t\tCould not lock Fx mutex:\n");
}
- }
- if (locked) {
- mutex().unlock();
+ bool isInternal = isInternal_l();
+ result.append("\t\tSession State Registered Internal Enabled Suspended:\n");
+ result.appendFormat("\t\t%05d %03d %s %s %s %s\n",
+ mSessionId, mState, mPolicyRegistered ? "y" : "n", isInternal ? "y" : "n",
+ ((isInternal && isEnabled()) || (!isInternal && mPolicyEnabled)) ? "y" : "n",
+ mSuspended ? "y" : "n");
+
+ result.append("\t\tDescriptor:\n");
+ char uuidStr[64];
+ AudioEffect::guidToString(&mDescriptor.uuid, uuidStr, sizeof(uuidStr));
+ result.appendFormat("\t\t- UUID: %s\n", uuidStr);
+ AudioEffect::guidToString(&mDescriptor.type, uuidStr, sizeof(uuidStr));
+ result.appendFormat("\t\t- TYPE: %s\n", uuidStr);
+ result.appendFormat("\t\t- apiVersion: %08X\n\t\t- flags: %08X (%s)\n",
+ mDescriptor.apiVersion,
+ mDescriptor.flags,
+ effectFlagsToString(mDescriptor.flags).c_str());
+ result.appendFormat("\t\t- name: %s\n",
+ mDescriptor.name);
+
+ result.appendFormat("\t\t- implementor: %s\n",
+ mDescriptor.implementor);
+
+ result.appendFormat("\t\t%zu Clients:\n", mHandles.size());
+ result.append("\t\t\t Pid Priority Ctrl Locked client server\n");
+ char buffer[256];
+ for (size_t i = 0; i < mHandles.size(); ++i) {
+ IAfEffectHandle *handle = mHandles[i];
+ if (handle != NULL && !handle->disconnected()) {
+ handle->dumpToBuffer(buffer, sizeof(buffer));
+ result.append(buffer);
+ }
+ }
}
write(fd, result.c_str(), result.length());
@@ -1710,13 +1708,11 @@
return ss.str();
}
-void EffectModule::dump(int fd, const Vector<String16>& args) const
-NO_THREAD_SAFETY_ANALYSIS // conditional try lock
-{
+void EffectModule::dump(int fd, const Vector<String16>& args) const {
EffectBase::dump(fd, args);
String8 result;
- const bool locked = afutils::dumpTryLock(mutex());
+ afutils::FallibleLockGuard l{mutex()};
result.append("\t\tStatus Engine:\n");
result.appendFormat("\t\t%03d %p\n",
@@ -1758,9 +1754,6 @@
(void)mEffectInterface->dump(fd);
}
- if (locked) {
- mutex().unlock();
- }
}
// ----------------------------------------------------------------------------
@@ -2203,22 +2196,20 @@
}
void EffectHandle::dumpToBuffer(char* buffer, size_t size) const
-NO_THREAD_SAFETY_ANALYSIS // conditional try lock
{
- const bool locked = mCblk != nullptr && afutils::dumpTryLock(mCblk->lock);
+ std::optional<afutils::FallibleLockGuard> guard;
+ if (mCblk != nullptr) {
+ guard.emplace(mCblk->lock);
+ }
snprintf(buffer, size, "\t\t\t%5d %5d %3s %3s %5u %5u\n",
(mClient == 0) ? getpid() : mClient->pid(),
mPriority,
mHasControl ? "yes" : "no",
- locked ? "yes" : "no",
+ guard.has_value() && *guard ? "yes" : "no",
mCblk ? mCblk->clientIndex : 0,
mCblk ? mCblk->serverIndex : 0
);
-
- if (locked) {
- mCblk->lock.unlock();
- }
}
#undef LOG_TAG
@@ -2803,41 +2794,36 @@
}
void EffectChain::dump(int fd, const Vector<String16>& args) const
-NO_THREAD_SAFETY_ANALYSIS // conditional try lock
{
String8 result;
-
+ afutils::FallibleLockGuard l{mutex()};
const size_t numEffects = mEffects.size();
result.appendFormat(" %zu effects for session %d\n", numEffects, mSessionId);
-
- if (numEffects) {
- const bool locked = afutils::dumpTryLock(mutex());
- // failed to lock - AudioFlinger is probably deadlocked
- if (!locked) {
- result.append("\tCould not lock mutex:\n");
- }
-
- const std::string inBufferStr = dumpInOutBuffer(true /* isInput */, mInBuffer);
- const std::string outBufferStr = dumpInOutBuffer(false /* isInput */, mOutBuffer);
- result.appendFormat("\t%-*s%-*s Active tracks:\n",
- (int)inBufferStr.size(), "In buffer ",
- (int)outBufferStr.size(), "Out buffer ");
- result.appendFormat("\t%s %s %d\n",
- inBufferStr.c_str(), outBufferStr.c_str(), mActiveTrackCnt);
+ if (numEffects == 0) {
write(fd, result.c_str(), result.size());
+ return;
+ }
- for (size_t i = 0; i < numEffects; ++i) {
- sp<IAfEffectModule> effect = mEffects[i];
- if (effect != 0) {
- effect->dump(fd, args);
- }
- }
- if (locked) {
- mutex().unlock();
+ // failed to lock - AudioFlinger is probably deadlocked
+ if (!l) {
+ result.append("\tCould not lock mutex:\n");
+ }
+
+ const std::string inBufferStr = dumpInOutBuffer(true /* isInput */, mInBuffer);
+ const std::string outBufferStr = dumpInOutBuffer(false /* isInput */, mOutBuffer);
+ result.appendFormat("\t%-*s%-*s Active tracks:\n",
+ (int)inBufferStr.size(), "In buffer ",
+ (int)outBufferStr.size(), "Out buffer ");
+ result.appendFormat("\t%s %s %d\n",
+ inBufferStr.c_str(), outBufferStr.c_str(), mActiveTrackCnt);
+ write(fd, result.c_str(), result.size());
+
+ for (size_t i = 0; i < numEffects; ++i) {
+ sp<IAfEffectModule> effect = mEffects[i];
+ if (effect != 0) {
+ effect->dump(fd, args);
}
- } else {
- write(fd, result.c_str(), result.size());
}
}
@@ -3712,13 +3698,13 @@
}
void DeviceEffectProxy::dump2(int fd, int spaces) const
-NO_THREAD_SAFETY_ANALYSIS // conditional try lock
{
const Vector<String16> args;
EffectBase::dump(fd, args);
- const bool locked = afutils::dumpTryLock(proxyMutex());
- if (!locked) {
+ afutils::FallibleLockGuard l{proxyMutex()};
+
+ if (!l) {
String8 result("DeviceEffectProxy may be deadlocked\n");
write(fd, result.c_str(), result.size());
}
@@ -3745,10 +3731,6 @@
effect->dump(fd, args);
}
}
-
- if (locked) {
- proxyMutex().unlock();
- }
}
#undef LOG_TAG
diff --git a/services/audioflinger/IAfEffect.h b/services/audioflinger/IAfEffect.h
index 3452e94..3a059b6 100644
--- a/services/audioflinger/IAfEffect.h
+++ b/services/audioflinger/IAfEffect.h
@@ -345,7 +345,8 @@
// sendMetadata_l() must be called with thread->mLock held
virtual void sendMetadata_l(const std::vector<playback_track_metadata_v7_t>& allMetadata,
- const std::optional<const std::vector<playback_track_metadata_v7_t>> spatializedMetadata);
+ const std::optional<const std::vector<playback_track_metadata_v7_t>>
+ spatializedMetadata) = 0;
virtual void dump(int fd, const Vector<String16>& args) const = 0;
};
diff --git a/services/audioflinger/IAfThread.h b/services/audioflinger/IAfThread.h
index 8596acb..3163d4c 100644
--- a/services/audioflinger/IAfThread.h
+++ b/services/audioflinger/IAfThread.h
@@ -402,7 +402,7 @@
// the Thread is not busy releasing the Tracks, during which the Thread mutex
// may be temporarily unlocked. Some Track methods will use this method to
// avoid races.
- virtual void waitWhileThreadBusy_l(audio_utils::unique_lock& ul)
+ virtual void waitWhileThreadBusy_l(audio_utils::unique_lock<audio_utils::mutex>& ul)
REQUIRES(mutex()) = 0;
// The ThreadloopExecutor is used to defer functors or dtors
@@ -481,7 +481,8 @@
bool isSpatialized,
bool isBitPerfect,
audio_output_flags_t* afTrackFlags,
- float volume)
+ float volume,
+ bool muted)
REQUIRES(audio_utils::AudioFlinger_Mutex) = 0;
virtual status_t addTrack_l(const sp<IAfTrack>& track) REQUIRES(mutex()) = 0;
@@ -558,8 +559,8 @@
virtual void setTracksInternalMute(std::map<audio_port_handle_t, bool>* tracksInternalMute)
EXCLUDES_ThreadBase_Mutex = 0;
- virtual status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds, float volume)
- EXCLUDES_ThreadBase_Mutex = 0;
+ virtual status_t setPortsVolume(const std::vector<audio_port_handle_t> &portIds, float volume,
+ bool muted) EXCLUDES_ThreadBase_Mutex = 0;
};
class IAfDirectOutputThread : public virtual IAfPlaybackThread {
@@ -660,7 +661,7 @@
audio_stream_type_t streamType,
audio_session_t sessionId,
const sp<MmapStreamCallback>& callback,
- audio_port_handle_t deviceId,
+ const DeviceIdVector& deviceIds,
audio_port_handle_t portId) EXCLUDES_ThreadBase_Mutex = 0;
virtual void disconnect() EXCLUDES_ThreadBase_Mutex = 0;
@@ -700,8 +701,8 @@
virtual AudioStreamOut* clearOutput() EXCLUDES_ThreadBase_Mutex = 0;
- virtual status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds, float volume)
- EXCLUDES_ThreadBase_Mutex = 0;
+ virtual status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds, float volume,
+ bool muted) EXCLUDES_ThreadBase_Mutex = 0;
};
class IAfMmapCaptureThread : public virtual IAfMmapThread {
diff --git a/services/audioflinger/IAfTrack.h b/services/audioflinger/IAfTrack.h
index ee834d6..d27d52a 100644
--- a/services/audioflinger/IAfTrack.h
+++ b/services/audioflinger/IAfTrack.h
@@ -291,7 +291,18 @@
float speed = 1.0f,
bool isSpatialized = false,
bool isBitPerfect = false,
- float volume = 0.0f);
+ float volume = 0.0f,
+ bool muted = false);
+
+ static constexpr std::string_view getLogHeader() {
+ using namespace std::literals;
+ return "Type Id Active Client(pid/uid) Session Port Id S Flags "
+ " Format Chn mask SRate "
+ "ST Usg CT "
+ " G db L dB R dB VS dB PortVol dB PortMuted "
+ " Server FrmCnt FrmRdy F Underruns Flushed BitPerfect InternalMute"
+ " Latency\n"sv;
+ }
virtual void pause() = 0;
virtual void flush() = 0;
@@ -466,7 +477,14 @@
const android::content::AttributionSourceState& attributionSource,
pid_t creatorPid,
audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE,
- float volume = 0.0f);
+ float volume = 0.0f,
+ bool muted = false);
+
+ static constexpr std::string_view getLogHeader() {
+ using namespace std::literals;
+ return "Client(pid/uid) Session Port Id"
+ " Format Chn mask SRate Flags Usg/Src PortVol dB PortMuted\n"sv;
+ };
// protected by MMapThread::mLock
virtual void setSilenced_l(bool silenced) = 0;
@@ -511,6 +529,13 @@
audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE,
int32_t startFrames = -1);
+ static constexpr std::string_view getLogHeader() {
+ using namespace std::literals;
+ return "Active Id Client(pid/uid) Session Port Id S Flags "
+ " Format Chn mask SRate Source "
+ " Server FrmCnt FrmRdy Sil Latency\n"sv;
+ }
+
// clear the buffer overflow flag
virtual void clearOverflow() = 0;
// set the buffer overflow flag and return previous value
@@ -587,7 +612,8 @@
* the lowest possible latency
* even if it might glitch. */
float speed = 1.0f,
- float volume = 1.0f);
+ float volume = 1.0f,
+ bool muted = false);
};
class IAfPatchRecord : public virtual IAfRecordTrack, public virtual IAfPatchTrackBase {
diff --git a/services/audioflinger/MmapTracks.h b/services/audioflinger/MmapTracks.h
index 8758bd0..0210bc2 100644
--- a/services/audioflinger/MmapTracks.h
+++ b/services/audioflinger/MmapTracks.h
@@ -36,7 +36,8 @@
const android::content::AttributionSourceState& attributionSource,
pid_t creatorPid,
audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE,
- float volume = 0.0f);
+ float volume = 0.0f,
+ bool muted = false);
~MmapTrack() override;
status_t initCheck() const final;
@@ -71,7 +72,11 @@
void setPortVolume(float volume) override {
mVolume = volume;
}
+ void setPortMute(bool muted) override {
+ mMutedFromPort = muted;
+ }
float getPortVolume() const override { return mVolume; }
+ bool getPortMute() const override { return mMutedFromPort; }
private:
DISALLOW_COPY_AND_ASSIGN(MmapTrack);
@@ -86,6 +91,7 @@
void onTimestamp(const ExtendedTimestamp ×tamp) final;
const pid_t mPid;
+ const uid_t mUid;
bool mSilenced; // protected by MMapThread::mLock
bool mSilencedNotified; // protected by MMapThread::mLock
@@ -95,8 +101,9 @@
/* GUARDED_BY(MmapPlaybackThread::mLock) */;
mute_state_t mMuteState
/* GUARDED_BY(MmapPlaybackThread::mLock) */;
+ bool mMutedFromPort;
float mVolume = 0.0f;
}; // end of Track
-} // namespace android
\ No newline at end of file
+} // namespace android
diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp
index d0b96de..be59299 100644
--- a/services/audioflinger/PatchPanel.cpp
+++ b/services/audioflinger/PatchPanel.cpp
@@ -650,7 +650,8 @@
{} /*timeout*/,
frameCountToBeReady,
1.0f /*speed*/,
- 1.0f /*volume*/);
+ 1.0f /*volume*/,
+ false /*muted*/);
status = mPlayback.checkTrack(tempPatchTrack.get());
if (status != NO_ERROR) {
return status;
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 84758a4..2c3212c 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -97,7 +97,8 @@
float speed = 1.0f,
bool isSpatialized = false,
bool isBitPerfect = false,
- float volume = 0.0f);
+ float volume = 0.0f,
+ bool muted = false);
~Track() override;
status_t initCheck() const final;
void appendDumpHeader(String8& result) const final;
@@ -226,7 +227,9 @@
// VolumePortInterface implementation
void setPortVolume(float volume) override;
+ void setPortMute(bool muted) override;
float getPortVolume() const override { return mVolume; }
+ bool getPortMute() const override { return mMutedFromPort; }
protected:
@@ -410,9 +413,10 @@
// TODO: replace PersistableBundle with own struct
// access these two variables only when holding player thread lock.
std::unique_ptr<os::PersistableBundle> mMuteEventExtras;
- mute_state_t mMuteState;
- bool mInternalMute = false;
- std::atomic<float> mVolume = 0.0f;
+ std::atomic<mute_state_t> mMuteState;
+ std::atomic<bool> mMutedFromPort;
+ bool mInternalMute = false;
+ std::atomic<float> mVolume = 0.0f;
}; // end of Track
@@ -510,7 +514,8 @@
* the lowest possible latency
* even if it might glitch. */
float speed = 1.0f,
- float volume = 1.0f);
+ float volume = 1.0f,
+ bool muted = false);
~PatchTrack() override;
size_t framesReady() const final;
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index c45405b..200175b 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -27,7 +27,7 @@
#include "MelReporter.h"
#include "ResamplerBufferProvider.h"
-#include <afutils/DumpTryLock.h>
+#include <afutils/FallibleLockGuard.h>
#include <afutils/Permission.h>
#include <afutils/TypedLogger.h>
#include <afutils/Vibrator.h>
@@ -338,28 +338,32 @@
// under #ifdef __cplusplus #endif
static std::string patchSinksToString(const struct audio_patch *patch)
{
- std::stringstream ss;
+ std::string s;
for (size_t i = 0; i < patch->num_sinks; ++i) {
- if (i > 0) {
- ss << "|";
+ if (i > 0) s.append("|");
+ if (patch->sinks[i].ext.device.address[0]) {
+ s.append("(").append(toString(patch->sinks[i].ext.device.type))
+ .append(", ").append(patch->sinks[i].ext.device.address).append(")");
+ } else {
+ s.append(toString(patch->sinks[i].ext.device.type));
}
- ss << "(" << toString(patch->sinks[i].ext.device.type)
- << ", " << patch->sinks[i].ext.device.address << ")";
}
- return ss.str();
+ return s;
}
static std::string patchSourcesToString(const struct audio_patch *patch)
{
- std::stringstream ss;
+ std::string s;
for (size_t i = 0; i < patch->num_sources; ++i) {
- if (i > 0) {
- ss << "|";
+ if (i > 0) s.append("|");
+ if (patch->sources[i].ext.device.address[0]) {
+ s.append("(").append(toString(patch->sources[i].ext.device.type))
+ .append(", ").append(patch->sources[i].ext.device.address).append(")");
+ } else {
+ s.append(toString(patch->sources[i].ext.device.type));
}
- ss << "(" << toString(patch->sources[i].ext.device.type)
- << ", " << patch->sources[i].ext.device.address << ")";
}
- return ss.str();
+ return s;
}
static std::string toString(audio_latency_mode_t mode) {
@@ -1042,27 +1046,25 @@
}
void ThreadBase::dump(int fd, const Vector<String16>& args)
-NO_THREAD_SAFETY_ANALYSIS // conditional try lock
{
dprintf(fd, "\n%s thread %p, name %s, tid %d, type %d (%s):\n", isOutput() ? "Output" : "Input",
this, mThreadName, getTid(), type(), threadTypeToString(type()));
- const bool locked = afutils::dumpTryLock(mutex());
- if (!locked) {
- dprintf(fd, " Thread may be deadlocked\n");
- }
-
- dumpBase_l(fd, args);
- dumpInternals_l(fd, args);
- dumpTracks_l(fd, args);
- dumpEffectChains_l(fd, args);
-
- if (locked) {
- mutex().unlock();
+ {
+ afutils::FallibleLockGuard l{mutex()};
+ if (!l) {
+ dprintf(fd, " Thread may be deadlocked\n");
+ }
+ dumpBase_l(fd, args);
+ dumpInternals_l(fd, args);
+ dumpTracks_l(fd, args);
+ dumpEffectChains_l(fd, args);
}
dprintf(fd, " Local log:\n");
- mLocalLog.dump(fd, " " /* prefix */, 40 /* lines */);
+ const auto logHeader = this->getLocalLogHeader();
+ write(fd, logHeader.data(), logHeader.length());
+ mLocalLog.dump(fd, " " /* prefix */);
// --all does the statistics
bool dumpAll = false;
@@ -2407,7 +2409,8 @@
bool isSpatialized,
bool isBitPerfect,
audio_output_flags_t *afTrackFlags,
- float volume)
+ float volume,
+ bool muted)
{
size_t frameCount = *pFrameCount;
size_t notificationFrameCount = *pNotificationFrameCount;
@@ -2736,7 +2739,7 @@
nullptr /* buffer */, (size_t)0 /* bufferSize */, sharedBuffer,
sessionId, creatorPid, attributionSource, trackFlags,
IAfTrackBase::TYPE_DEFAULT, portId, SIZE_MAX /*frameCountToBeReady*/,
- speed, isSpatialized, isBitPerfect, volume);
+ speed, isSpatialized, isBitPerfect, volume, muted);
lStatus = track != 0 ? track->initCheck() : (status_t) NO_MEMORY;
if (lStatus != NO_ERROR) {
@@ -2844,10 +2847,14 @@
}
}
-void PlaybackThread::setStreamVolume(audio_stream_type_t stream, float value)
+void PlaybackThread::setStreamVolume(audio_stream_type_t stream, float value, bool muted)
{
+ ALOGV("%s: stream %d value %f muted %d", __func__, stream, value, muted);
audio_utils::lock_guard _l(mutex());
mStreamTypes[stream].volume = value;
+ if (com_android_media_audio_ring_my_car()) {
+ mStreamTypes[stream].mute = muted;
+ }
broadcast_l();
}
@@ -2865,13 +2872,14 @@
}
status_t PlaybackThread::setPortsVolume(
- const std::vector<audio_port_handle_t>& portIds, float volume) {
+ const std::vector<audio_port_handle_t>& portIds, float volume, bool muted) {
audio_utils::lock_guard _l(mutex());
for (const auto& portId : portIds) {
for (size_t i = 0; i < mTracks.size(); i++) {
sp<IAfTrack> track = mTracks[i].get();
if (portId == track->portId()) {
track->setPortVolume(volume);
+ track->setPortMute(muted);
break;
}
}
@@ -5106,6 +5114,12 @@
}
}
+std::string PlaybackThread::getLocalLogHeader() const {
+ using namespace std::literals;
+ static constexpr auto indent = " "
+ " "sv;
+ return std::string{indent}.append(IAfTrack::getLogHeader());
+}
// ----------------------------------------------------------------------------
/* static */
@@ -5267,7 +5281,10 @@
mFastMixerNBLogWriter = afThreadCallback->newWriter_l(kFastMixerLogSize, "FastMixer");
state->mNBLogWriter = mFastMixerNBLogWriter.get();
sq->end();
- sq->push(FastMixerStateQueue::BLOCK_UNTIL_PUSHED);
+ {
+ audio_utils::mutex::scoped_queue_wait_check queueWaitCheck(mFastMixer->getTid());
+ sq->push(FastMixerStateQueue::BLOCK_UNTIL_PUSHED);
+ }
NBLog::thread_info_t info;
info.id = mId;
@@ -5326,8 +5343,11 @@
}
state->mCommand = FastMixerState::EXIT;
sq->end();
- sq->push(FastMixerStateQueue::BLOCK_UNTIL_PUSHED);
- mFastMixer->join();
+ {
+ audio_utils::mutex::scoped_join_wait_check queueWaitCheck(mFastMixer->getTid());
+ sq->push(FastMixerStateQueue::BLOCK_UNTIL_PUSHED);
+ mFastMixer->join();
+ }
// Though the fast mixer thread has exited, it's state queue is still valid.
// We'll use that extract the final state which contains one remaining fast track
// corresponding to our sub-mix.
@@ -5407,7 +5427,10 @@
FastThreadDumpState::kSamplingNforLowRamDevice : FastThreadDumpState::kSamplingN);
#endif
sq->end();
- sq->push(FastMixerStateQueue::BLOCK_UNTIL_PUSHED);
+ {
+ audio_utils::mutex::scoped_queue_wait_check queueWaitCheck(mFastMixer->getTid());
+ sq->push(FastMixerStateQueue::BLOCK_UNTIL_PUSHED);
+ }
if (kUseFastMixer == FastMixer_Dynamic) {
mNormalSink = mPipeSink;
}
@@ -5440,7 +5463,10 @@
mFastMixerFutex = 0;
sq->end();
// BLOCK_UNTIL_PUSHED would be insufficient, as we need it to stop doing I/O now
- sq->push(FastMixerStateQueue::BLOCK_UNTIL_ACKED);
+ {
+ audio_utils::mutex::scoped_queue_wait_check queueWaitCheck(mFastMixer->getTid());
+ sq->push(FastMixerStateQueue::BLOCK_UNTIL_ACKED);
+ }
if (kUseFastMixer == FastMixer_Dynamic) {
mNormalSink = mOutputSink;
}
@@ -5825,7 +5851,7 @@
volume = masterVolume * mStreamTypes[track->streamType()].volume;
}
} else {
- if (track->isPlaybackRestricted()) {
+ if (track->isPlaybackRestricted() || track->getPortMute()) {
volume = 0.f;
} else {
volume = masterVolume * track->getPortVolume();
@@ -5849,7 +5875,8 @@
mStreamTypes[track->streamType()].mute,
track->isPlaybackRestricted(),
vlf == 0.f && vrf == 0.f,
- vh == 0.f});
+ vh == 0.f,
+ /*muteFromPortVolume=*/false});
} else {
track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
/*muteState=*/{masterVolume == 0.f,
@@ -5857,7 +5884,8 @@
/* muteFromStreamMuted= */ false,
track->isPlaybackRestricted(),
vlf == 0.f && vrf == 0.f,
- vh == 0.f});
+ vh == 0.f,
+ track->getPortMute()});
}
vlf *= volume;
vrf *= volume;
@@ -6021,7 +6049,7 @@
}
} else {
v = masterVolume * track->getPortVolume();
- if (track->isPlaybackRestricted()) {
+ if (track->isPlaybackRestricted() || track->getPortMute()) {
v = 0;
}
}
@@ -6051,7 +6079,8 @@
mStreamTypes[track->streamType()].mute,
track->isPlaybackRestricted(),
vlf == 0.f && vrf == 0.f,
- vh == 0.f});
+ vh == 0.f,
+ /*muteFromPortVolume=*/false});
} else {
track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
/*muteState=*/{masterVolume == 0.f,
@@ -6059,7 +6088,8 @@
/* muteFromStreamMuted= */ false,
track->isPlaybackRestricted(),
vlf == 0.f && vrf == 0.f,
- vh == 0.f});
+ vh == 0.f,
+ track->getPortMute()});
}
// now apply the master volume and stream type volume and shaper volume
vlf *= v * vh;
@@ -6336,7 +6366,10 @@
//
// This occurs with BT suspend when we idle the FastMixer with
// active tracks, which may be added or removed.
- sq->push(coldIdle ? FastMixerStateQueue::BLOCK_NEVER : block);
+ {
+ audio_utils::mutex::scoped_queue_wait_check queueWaitCheck(mFastMixer->getTid());
+ sq->push(coldIdle ? FastMixerStateQueue::BLOCK_NEVER : block);
+ }
}
#ifdef AUDIO_WATCHDOG
if (pauseAudioWatchdog && mAudioWatchdog != 0) {
@@ -6814,7 +6847,8 @@
mStreamTypes[track->streamType()].mute,
track->isPlaybackRestricted(),
clientVolumeMute,
- shaperVolume == 0.f});
+ shaperVolume == 0.f,
+ /*muteFromPortVolume=*/false});
} else {
if (mMasterMute || track->isPlaybackRestricted()) {
left = right = 0;
@@ -6842,7 +6876,8 @@
/* muteFromStreamMuted= */ false,
track->isPlaybackRestricted(),
clientVolumeMute,
- shaperVolume == 0.f});
+ shaperVolume == 0.f,
+ track->getPortMute()});
}
if (lastTrack) {
@@ -7295,11 +7330,14 @@
{
PlaybackThread::flushHw_l();
mOutput->flush();
- mHwPaused = false;
mFlushPending = false;
mTimestampVerifier.discontinuity(discontinuityForStandbyOrFlush());
mTimestamp.clear();
mMonotonicFrameCounter.onFlush();
+ // We do not reset mHwPaused which is hidden from the Track client.
+ // Note: the client track in Tracks.cpp and AudioTrack.cpp
+ // has a FLUSHED state but the DirectOutputThread does not;
+ // those tracks will continue to show isStopped().
}
int64_t DirectOutputThread::computeWaitTimeNs_l() const {
@@ -7943,8 +7981,9 @@
return;
}
if (!audioserver_flags::portid_volume_management()) {
- thread->setStreamVolume(AUDIO_STREAM_PATCH, 1.0f);
+ thread->setStreamVolume(AUDIO_STREAM_PATCH, /*volume=*/1.0f, /*muted=*/false);
}
+
mOutputTracks.add(outputTrack);
ALOGV("addOutputTrack() track %p, on thread %p", outputTrack.get(), thread);
updateWaitTime_l();
@@ -8311,8 +8350,10 @@
afThreadCallback->newWriter_l(kFastCaptureLogSize, "FastCapture");
state->mNBLogWriter = mFastCaptureNBLogWriter.get();
sq->end();
- sq->push(FastCaptureStateQueue::BLOCK_UNTIL_PUSHED);
-
+ {
+ audio_utils::mutex::scoped_queue_wait_check queueWaitCheck(mFastCapture->getTid());
+ sq->push(FastCaptureStateQueue::BLOCK_UNTIL_PUSHED);
+ }
// start the fast capture
mFastCapture->run("FastCapture", ANDROID_PRIORITY_URGENT_AUDIO);
pid_t tid = mFastCapture->getTid();
@@ -8346,8 +8387,11 @@
}
state->mCommand = FastCaptureState::EXIT;
sq->end();
- sq->push(FastCaptureStateQueue::BLOCK_UNTIL_PUSHED);
- mFastCapture->join();
+ {
+ audio_utils::mutex::scoped_join_wait_check queueWaitCheck(mFastCapture->getTid());
+ sq->push(FastCaptureStateQueue::BLOCK_UNTIL_PUSHED);
+ mFastCapture->join();
+ }
mFastCapture.clear();
}
mAfThreadCallback->unregisterWriter(mFastCaptureNBLogWriter);
@@ -8980,7 +9024,11 @@
mFastCaptureFutex = 0;
sq->end();
// BLOCK_UNTIL_PUSHED would be insufficient, as we need it to stop doing I/O now
- sq->push(FastCaptureStateQueue::BLOCK_UNTIL_ACKED);
+ {
+ audio_utils::mutex::scoped_queue_wait_check queueWaitCheck(mFastCapture->getTid());
+ sq->push(FastCaptureStateQueue::BLOCK_UNTIL_ACKED);
+ }
+
#if 0
if (kUseFastCapture == FastCapture_Dynamic) {
// FIXME
@@ -10213,6 +10261,13 @@
}
}
+std::string RecordThread::getLocalLogHeader() const {
+ using namespace std::literals;
+ static constexpr auto indent = " "
+ " "sv;
+ return std::string{indent}.append(IAfRecordTrack::getLogHeader());
+}
+
// ----------------------------------------------------------------------------
// Mmap
// ----------------------------------------------------------------------------
@@ -10342,13 +10397,13 @@
audio_stream_type_t streamType __unused,
audio_session_t sessionId,
const sp<MmapStreamCallback>& callback,
- audio_port_handle_t deviceId,
+ const DeviceIdVector& deviceIds,
audio_port_handle_t portId)
{
mAttr = *attr;
mSessionId = sessionId;
mCallback = callback;
- mDeviceId = deviceId;
+ mDeviceIds = deviceIds;
mPortId = portId;
}
@@ -10432,6 +10487,7 @@
const auto localSessionId = mSessionId;
auto localAttr = mAttr;
float volume = 0.0f;
+ bool muted = false;
if (isOutput()) {
audio_config_t config = AUDIO_CONFIG_INITIALIZER;
config.sample_rate = mSampleRate;
@@ -10440,7 +10496,7 @@
audio_stream_type_t stream = streamType_l();
audio_output_flags_t flags =
(audio_output_flags_t)(AUDIO_OUTPUT_FLAG_MMAP_NOIRQ | AUDIO_OUTPUT_FLAG_DIRECT);
- audio_port_handle_t deviceId = mDeviceId;
+ DeviceIdVector deviceIds = mDeviceIds;
std::vector<audio_io_handle_t> secondaryOutputs;
bool isSpatialized;
bool isBitPerfect;
@@ -10451,12 +10507,13 @@
adjAttributionSource,
&config,
flags,
- &deviceId,
+ &deviceIds,
&portId,
&secondaryOutputs,
&isSpatialized,
&isBitPerfect,
- &volume);
+ &volume,
+ &muted);
mutex().lock();
mAttr = localAttr;
ALOGD_IF(!secondaryOutputs.empty(),
@@ -10466,7 +10523,7 @@
config.sample_rate = mSampleRate;
config.channel_mask = mChannelMask;
config.format = mFormat;
- audio_port_handle_t deviceId = mDeviceId;
+ audio_port_handle_t deviceId = getFirstDeviceId(mDeviceIds);
mutex().unlock();
ret = AudioSystem::getInputForAttr(&localAttr, &io,
RECORD_RIID_INVALID,
@@ -10526,7 +10583,7 @@
mChannelMask, mSessionId, isOutput(),
client.attributionSource,
IPCThreadState::self()->getCallingPid(), portId,
- volume);
+ volume, muted);
if (!isOutput()) {
track->setSilenced_l(isClientSilenced_l(portId));
}
@@ -10819,7 +10876,7 @@
// store new device and send to effects
audio_devices_t type = AUDIO_DEVICE_NONE;
- audio_port_handle_t deviceId;
+ DeviceIdVector deviceIds;
AudioDeviceTypeAddrVector sinkDeviceTypeAddrs;
AudioDeviceTypeAddr sourceDeviceTypeAddr;
uint32_t numDevices = 0;
@@ -10833,12 +10890,12 @@
type = static_cast<audio_devices_t>(type | patch->sinks[i].ext.device.type);
sinkDeviceTypeAddrs.emplace_back(patch->sinks[i].ext.device.type,
patch->sinks[i].ext.device.address);
+ deviceIds.push_back(patch->sinks[i].id);
}
- deviceId = patch->sinks[0].id;
numDevices = mPatch.num_sinks;
} else {
type = patch->sources[0].ext.device.type;
- deviceId = patch->sources[0].id;
+ deviceIds.push_back(patch->sources[0].id);
numDevices = mPatch.num_sources;
sourceDeviceTypeAddr.mType = patch->sources[0].ext.device.type;
sourceDeviceTypeAddr.setAddress(patch->sources[0].ext.device.address);
@@ -10864,11 +10921,11 @@
// For mmap streams, once the routing has changed, they will be disconnected. It should be
// okay to notify the client earlier before the new patch creation.
- if (mDeviceId != deviceId) {
+ if (!areDeviceIdsEqual(deviceIds, mDeviceIds)) {
if (const sp<MmapStreamCallback> callback = mCallback.promote()) {
// The aaudioservice handle the routing changed event asynchronously. In that case,
// it is safe to hold the lock here.
- callback->onRoutingChanged(deviceId);
+ callback->onRoutingChanged(deviceIds);
}
}
@@ -10888,7 +10945,7 @@
*handle = AUDIO_PATCH_HANDLE_NONE;
}
- if (numDevices == 0 || mDeviceId != deviceId) {
+ if (numDevices == 0 || (!areDeviceIdsEqual(deviceIds, mDeviceIds))) {
if (isOutput()) {
sendIoConfigEvent_l(AUDIO_OUTPUT_CONFIG_CHANGED);
mOutDeviceTypeAddrs = sinkDeviceTypeAddrs;
@@ -10898,7 +10955,7 @@
mInDeviceTypeAddr = sourceDeviceTypeAddr;
}
mPatch = *patch;
- mDeviceId = deviceId;
+ mDeviceIds = deviceIds;
}
// Force meteadata update after a route change
mActiveTracks.setHasChanged();
@@ -11053,7 +11110,7 @@
if (const sp<MmapStreamCallback> callback = mCallback.promote()) {
// The aaudioservice handle the routing changed event asynchronously. In that case,
// it is safe to hold the lock here.
- callback->onRoutingChanged(AUDIO_PORT_HANDLE_NONE);
+ callback->onRoutingChanged({});
} else if (mNoCallbackWarningCount < kMaxNoCallbackWarnings) {
ALOGW("Could not notify MMAP stream tear down: no onRoutingChanged callback!");
mNoCallbackWarningCount++;
@@ -11093,6 +11150,13 @@
write(fd, result.c_str(), result.size());
}
+std::string MmapThread::getLocalLogHeader() const {
+ using namespace std::literals;
+ static constexpr auto indent = " "
+ " "sv;
+ return std::string{indent}.append(IAfMmapTrack::getLogHeader());
+}
+
/* static */
sp<IAfMmapPlaybackThread> IAfMmapPlaybackThread::create(
const sp<IAfThreadCallback>& afThreadCallback, audio_io_handle_t id,
@@ -11138,11 +11202,11 @@
audio_stream_type_t streamType,
audio_session_t sessionId,
const sp<MmapStreamCallback>& callback,
- audio_port_handle_t deviceId,
+ const DeviceIdVector& deviceIds,
audio_port_handle_t portId)
{
audio_utils::lock_guard l(mutex());
- MmapThread::configure_l(attr, streamType, sessionId, callback, deviceId, portId);
+ MmapThread::configure_l(attr, streamType, sessionId, callback, deviceIds, portId);
mStreamType = streamType;
}
@@ -11177,10 +11241,14 @@
}
}
-void MmapPlaybackThread::setStreamVolume(audio_stream_type_t stream, float value)
+void MmapPlaybackThread::setStreamVolume(audio_stream_type_t stream, float value, bool muted)
{
+ ALOGV("%s: stream %d value %f muted %d", __func__, stream, value, muted);
audio_utils::lock_guard _l(mutex());
mStreamTypes[stream].volume = value;
+ if (com_android_media_audio_ring_my_car()) {
+ mStreamTypes[stream].mute = muted;
+ }
if (stream == mStreamType) {
broadcast_l();
}
@@ -11202,12 +11270,13 @@
}
status_t MmapPlaybackThread::setPortsVolume(
- const std::vector<audio_port_handle_t>& portIds, float volume) {
+ const std::vector<audio_port_handle_t>& portIds, float volume, bool muted) {
audio_utils::lock_guard _l(mutex());
for (const auto& portId : portIds) {
for (const sp<IAfMmapTrack>& track : mActiveTracks) {
if (portId == track->portId()) {
track->setPortVolume(volume);
+ track->setPortMute(muted);
break;
}
}
@@ -11265,7 +11334,11 @@
// will be broadcasted to all tracks. Thus, take arbitrarily first track volume.
size_t numtracks = mActiveTracks.size();
if (numtracks) {
- volume = mMasterVolume * mActiveTracks[0]->getPortVolume();
+ if (mActiveTracks[0]->getPortMute()) {
+ volume = 0;
+ } else {
+ volume = mMasterVolume * mActiveTracks[0]->getPortVolume();
+ }
}
}
}
@@ -11309,7 +11382,8 @@
// TODO(b/241533526): adjust logic to include mute from AppOps
false /*muteFromPlaybackRestricted*/,
false /*muteFromClientVolume*/,
- false /*muteFromVolumeShaper*/});
+ false /*muteFromVolumeShaper*/,
+ false /*muteFromPortVolume*/});
} else {
track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
/*muteState=*/{mMasterMute,
@@ -11318,7 +11392,8 @@
// TODO(b/241533526): adjust logic to include mute from AppOps
false /*muteFromPlaybackRestricted*/,
false /*muteFromClientVolume*/,
- false /*muteFromVolumeShaper*/});
+ false /*muteFromVolumeShaper*/,
+ track->getPortMute()});
}
}
}
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 4c4939b..1d6e244 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -576,6 +576,9 @@
return mThreadloopExecutor;
}
+ // Used to print the header for the local log on a particular thread type
+ virtual std::string getLocalLogHeader() const { return {}; };
+
protected:
// entry describing an effect being suspended in mSuspendedSessions keyed vector
@@ -623,7 +626,8 @@
* ThreadBase_Mutex during this time. No other mutex is held.
*/
- void waitWhileThreadBusy_l(audio_utils::unique_lock& ul) final REQUIRES(mutex()) {
+ void waitWhileThreadBusy_l(audio_utils::unique_lock<audio_utils::mutex>& ul)
+ final REQUIRES(mutex()) {
// the wait returns immediately if the predicate is satisfied.
mThreadBusyCv.wait(ul, [&]{ return mThreadBusy == false;});
}
@@ -887,7 +891,7 @@
bool mHasChanged = false;
};
- SimpleLog mLocalLog; // locked internally
+ SimpleLog mLocalLog {/* maxLogLines= */ 120}; // locked internally
// mThreadloopExecutor contains deferred functors and object (dtors) to
// be executed at the end of the processing period, without any
@@ -1016,11 +1020,12 @@
void setMasterVolume(float value) final;
void setMasterBalance(float balance) override EXCLUDES_ThreadBase_Mutex;
void setMasterMute(bool muted) final;
- void setStreamVolume(audio_stream_type_t stream, float value) final EXCLUDES_ThreadBase_Mutex;
+ void setStreamVolume(audio_stream_type_t stream, float value, bool muted) final
+ EXCLUDES_ThreadBase_Mutex;
void setStreamMute(audio_stream_type_t stream, bool muted) final EXCLUDES_ThreadBase_Mutex;
float streamVolume(audio_stream_type_t stream) const final EXCLUDES_ThreadBase_Mutex;
- status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds, float volume)
- final EXCLUDES_ThreadBase_Mutex;
+ status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds, float volume,
+ bool muted) final EXCLUDES_ThreadBase_Mutex;
void setVolumeForOutput_l(float left, float right) const final;
@@ -1047,7 +1052,8 @@
bool isSpatialized,
bool isBitPerfect,
audio_output_flags_t* afTrackFlags,
- float volume) final
+ float volume,
+ bool muted) final
REQUIRES(audio_utils::AudioFlinger_Mutex);
bool isTrackActive(const sp<IAfTrack>& track) const final {
@@ -1229,6 +1235,9 @@
override EXCLUDES_ThreadBase_Mutex {
// Do nothing. It is only used for bit perfect thread
}
+
+ std::string getLocalLogHeader() const override;
+
protected:
// updated by readOutputParameters_l()
size_t mNormalFrameCount; // normal mixer and effects
@@ -2133,6 +2142,8 @@
return !(mInput == nullptr || mInput->stream == nullptr);
}
+ std::string getLocalLogHeader() const override;
+
protected:
void dumpInternals_l(int fd, const Vector<String16>& args) override REQUIRES(mutex());
void dumpTracks_l(int fd, const Vector<String16>& args) override REQUIRES(mutex());
@@ -2232,17 +2243,17 @@
audio_stream_type_t streamType,
audio_session_t sessionId,
const sp<MmapStreamCallback>& callback,
- audio_port_handle_t deviceId,
+ const DeviceIdVector& deviceIds,
audio_port_handle_t portId) override EXCLUDES_ThreadBase_Mutex {
audio_utils::lock_guard l(mutex());
- configure_l(attr, streamType, sessionId, callback, deviceId, portId);
+ configure_l(attr, streamType, sessionId, callback, deviceIds, portId);
}
void configure_l(const audio_attributes_t* attr,
audio_stream_type_t streamType,
audio_session_t sessionId,
const sp<MmapStreamCallback>& callback,
- audio_port_handle_t deviceId,
+ const DeviceIdVector& deviceIds,
audio_port_handle_t portId) REQUIRES(mutex());
void disconnect() final EXCLUDES_ThreadBase_Mutex;
@@ -2324,6 +2335,8 @@
bool isStreamInitialized() const override { return false; }
+ std::string getLocalLogHeader() const override;
+
void setClientSilencedState_l(audio_port_handle_t portId, bool silenced) REQUIRES(mutex()) {
mClientSilencedStates[portId] = silenced;
}
@@ -2350,9 +2363,9 @@
void dumpTracks_l(int fd, const Vector<String16>& args) final REQUIRES(mutex());
/**
- * @brief mDeviceId current device port unique identifier
+ * @brief mDeviceIds current device port unique identifiers
*/
- audio_port_handle_t mDeviceId GUARDED_BY(mutex()) = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector mDeviceIds GUARDED_BY(mutex());
audio_attributes_t mAttr GUARDED_BY(mutex());
audio_session_t mSessionId GUARDED_BY(mutex());
@@ -2384,7 +2397,7 @@
audio_stream_type_t streamType,
audio_session_t sessionId,
const sp<MmapStreamCallback>& callback,
- audio_port_handle_t deviceId,
+ const DeviceIdVector& deviceIds,
audio_port_handle_t portId) final EXCLUDES_ThreadBase_Mutex;
AudioStreamOut* clearOutput() final EXCLUDES_ThreadBase_Mutex;
@@ -2394,11 +2407,13 @@
// Needs implementation?
void setMasterBalance(float /* value */) final EXCLUDES_ThreadBase_Mutex {}
void setMasterMute(bool muted) final EXCLUDES_ThreadBase_Mutex;
- void setStreamVolume(audio_stream_type_t stream, float value) final EXCLUDES_ThreadBase_Mutex;
+
+ void setStreamVolume(audio_stream_type_t stream, float value, bool muted) final
+ EXCLUDES_ThreadBase_Mutex;
void setStreamMute(audio_stream_type_t stream, bool muted) final EXCLUDES_ThreadBase_Mutex;
float streamVolume(audio_stream_type_t stream) const final EXCLUDES_ThreadBase_Mutex;
- status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds, float volume)
- final EXCLUDES_ThreadBase_Mutex;
+ status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds, float volume,
+ bool muted) final EXCLUDES_ThreadBase_Mutex;
void setMasterMute_l(bool muted) REQUIRES(mutex()) { mMasterMute = muted; }
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 28d19ad..5fbe48c 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -728,7 +728,8 @@
float speed,
bool isSpatialized,
bool isBitPerfect,
- float volume) {
+ float volume,
+ bool muted) {
return sp<Track>::make(thread,
client,
streamType,
@@ -750,7 +751,8 @@
speed,
isSpatialized,
isBitPerfect,
- volume);
+ volume,
+ muted);
}
// Track constructor must be called with AudioFlinger::mLock and ThreadBase::mLock held
@@ -776,7 +778,8 @@
float speed,
bool isSpatialized,
bool isBitPerfect,
- float volume)
+ float volume,
+ bool muted)
: TrackBase(thread, client, attr, sampleRate, format, channelMask, frameCount,
// TODO: Using unsecurePointer() has some associated security pitfalls
// (see declaration for details).
@@ -861,10 +864,13 @@
populateUsageAndContentTypeFromStreamType();
+ mMutedFromPort = muted;
+
// Audio patch and call assistant volume are always max
if (mAttr.usage == AUDIO_USAGE_CALL_ASSISTANT
|| mAttr.usage == AUDIO_USAGE_VIRTUAL_SOURCE) {
mVolume = 1.0f;
+ mMutedFromPort = false;
}
mServerLatencySupported = checkServerLatencySupported(format, flags);
@@ -1000,13 +1006,8 @@
void Track::appendDumpHeader(String8& result) const
{
- result.appendFormat("Type Id Active Client Session Port Id S Flags "
- " Format Chn mask SRate "
- "ST Usg CT "
- " G db L dB R dB VS dB PortVol dB "
- " Server FrmCnt FrmRdy F Underruns Flushed BitPerfect InternalMute"
- "%s\n",
- isServerLatencySupported() ? " Latency" : "");
+ const auto res = IAfTrack::getLogHeader();
+ result.append(res.data(), res.size());
}
void Track::appendDump(String8& result, bool active) const
@@ -1086,13 +1087,14 @@
? 'r' /* buffer reduced */: bufferSizeInFrames > mFrameCount
? 'e' /* error */ : ' ' /* identical */;
- result.appendFormat("%7s %6u %7u %7u %2s 0x%03X "
+ result.appendFormat("%7s %7u/%7u %7u %7u %2s 0x%03X "
"%08X %08X %6u "
"%2u %3x %2x "
- "%5.2g %5.2g %5.2g %5.2g%c %11.2g "
+ "%5.2g %5.2g %5.2g %5.2g%c %11.2g %10s "
"%08X %6zu%c %6zu %c %9u%c %7u %10s %12s",
active ? "yes" : "no",
- (mClient == 0) ? getpid() : mClient->pid(),
+ mClient ? mClient->pid() : getpid() ,
+ mClient ? mClient->uid() : getuid(),
mSessionId,
mPortId,
getTrackStateAsCodedString(),
@@ -1112,6 +1114,7 @@
20.0 * log10(vsVolume.first), // VolumeShaper(s) total volume
vsVolume.second ? 'A' : ' ', // if any VolumeShapers active
20.0 * log10(mVolume),
+ getPortMute() ? "true" : "false",
mCblk->mServer,
bufferSizeInFrames,
@@ -1324,7 +1327,8 @@
// states to reset position info for pcm tracks
if (audio_is_linear_pcm(mFormat)
- && (state == IDLE || state == STOPPED || state == FLUSHED)) {
+ && (state == IDLE || state == STOPPED || state == FLUSHED
+ || state == PAUSED)) {
mFrameMap.reset();
if (!isFastTrack()) {
@@ -1618,8 +1622,23 @@
if (mType != TYPE_PATCH) {
// Do not recursively propagate a PatchTrack setPortVolume to
// downstream PatchTracks.
- forEachTeePatchTrack_l([volume](const auto& patchTrack) {
- patchTrack->setPortVolume(volume); });
+ forEachTeePatchTrack_l([volume](const auto &patchTrack) {
+ patchTrack->setPortVolume(volume);
+ });
+ }
+}
+
+void Track::setPortMute(bool muted) {
+ if (mMutedFromPort == muted) {
+ return;
+ }
+ mMutedFromPort = muted;
+ if (mType != TYPE_PATCH) {
+ // Do not recursively propagate a PatchTrack setPortVolume to
+ // downstream PatchTracks.
+ forEachTeePatchTrack_l([muted](const auto &patchTrack) {
+ patchTrack->setPortMute(muted);
+ });
}
}
@@ -1724,8 +1743,8 @@
}
if (result == OK) {
- ALOGI("%s(%d): processed mute state for port ID %d from %d to %d", __func__, id(), mPortId,
- static_cast<int>(mMuteState), static_cast<int>(muteState));
+ ALOGI("%s(%d): processed mute state for port ID %d from %#x to %#x", __func__, id(),
+ mPortId, static_cast<int>(mMuteState.load()), static_cast<int>(muteState));
mMuteState = muteState;
} else {
ALOGW("%s(%d): cannot process mute state for port ID %d, status error %d", __func__, id(),
@@ -2502,7 +2521,8 @@
* the lowest possible latency
* even if it might glitch. */
float speed,
- float volume)
+ float volume,
+ bool muted)
{
return sp<PatchTrack>::make(
playbackThread,
@@ -2517,7 +2537,8 @@
timeout,
frameCountToBeReady,
speed,
- volume);
+ volume,
+ muted);
}
PatchTrack::PatchTrack(IAfPlaybackThread* playbackThread,
@@ -2532,14 +2553,15 @@
const Timeout& timeout,
size_t frameCountToBeReady,
float speed,
- float volume)
+ float volume,
+ bool muted)
: Track(playbackThread, NULL, streamType,
AUDIO_ATTRIBUTES_INITIALIZER,
sampleRate, format, channelMask, frameCount,
buffer, bufferSize, nullptr /* sharedBuffer */,
AUDIO_SESSION_NONE, getpid(), audioServerAttributionSource(getpid()), flags,
TYPE_PATCH, AUDIO_PORT_HANDLE_NONE, frameCountToBeReady, speed,
- false /*isSpatialized*/, false /*isBitPerfect*/, volume),
+ false /*isSpatialized*/, false /*isBitPerfect*/, volume, muted),
PatchTrackBase(mCblk ? new AudioTrackClientProxy(mCblk, mBuffer, frameCount, mFrameSize,
true /*clientInServer*/) : nullptr,
playbackThread, timeout)
@@ -2989,21 +3011,20 @@
void RecordTrack::appendDumpHeader(String8& result) const
{
- result.appendFormat("Active Id Client Session Port Id S Flags "
- " Format Chn mask SRate Source "
- " Server FrmCnt FrmRdy Sil%s\n",
- isServerLatencySupported() ? " Latency" : "");
+ const auto res = IAfRecordTrack::getLogHeader();
+ result.append(res.data(), res.size());
}
void RecordTrack::appendDump(String8& result, bool active) const
{
- result.appendFormat("%c%5s %6d %6u %7u %7u %2s 0x%03X "
+ result.appendFormat("%c%5s %6d %7u/%7u %7u %7u %2s 0x%03X "
"%08X %08X %6u %6X "
"%08X %6zu %6zu %3c",
isFastTrack() ? 'F' : ' ',
active ? "yes" : "no",
mId,
- (mClient == 0) ? getpid() : mClient->pid(),
+ mClient ? mClient->pid() : getpid(),
+ mClient ? mClient->uid() : getuid(),
mSessionId,
mPortId,
getTrackStateAsCodedString(),
@@ -3532,7 +3553,8 @@
const android::content::AttributionSourceState& attributionSource,
pid_t creatorPid,
audio_port_handle_t portId,
- float volume)
+ float volume,
+ bool muted)
{
return sp<MmapTrack>::make(
thread,
@@ -3545,7 +3567,8 @@
attributionSource,
creatorPid,
portId,
- volume);
+ volume,
+ muted);
}
MmapTrack::MmapTrack(IAfThreadBase* thread,
@@ -3558,7 +3581,8 @@
const AttributionSourceState& attributionSource,
pid_t creatorPid,
audio_port_handle_t portId,
- float volume)
+ float volume,
+ bool muted)
: TrackBase(thread, NULL, attr, sampleRate, format,
channelMask, (size_t)0 /* frameCount */,
nullptr /* buffer */, (size_t)0 /* bufferSize */,
@@ -3569,14 +3593,17 @@
TYPE_DEFAULT, portId,
std::string(AMEDIAMETRICS_KEY_PREFIX_AUDIO_MMAP) + std::to_string(portId)),
mPid(VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.pid))),
+ mUid(VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid))),
mSilenced(false), mSilencedNotified(false), mVolume(volume)
{
+ mMutedFromPort = muted;
// Once this item is logged by the server, the client can add properties.
mTrackMetrics.logConstructor(creatorPid, uid(), id());
if (isOut && (attr.usage == AUDIO_USAGE_CALL_ASSISTANT
|| attr.usage == AUDIO_USAGE_VIRTUAL_SOURCE)) {
// Audio patch and call assistant volume are always max
mVolume = 1.0f;
+ mMutedFromPort = false;
}
}
@@ -3656,14 +3683,15 @@
void MmapTrack::appendDumpHeader(String8& result) const
{
- result.appendFormat("Client Session Port Id Format Chn mask SRate Flags %s %s\n",
- isOut() ? "Usg CT": "Source", isOut() ? "PortVol dB" : "");
+ const auto res = IAfMmapTrack::getLogHeader();
+ result.append(res.data(), res.size());
}
void MmapTrack::appendDump(String8& result, bool active __unused) const
{
- result.appendFormat("%6u %7u %7u %08X %08X %6u 0x%03X ",
+ result.appendFormat("%7u/%7u %7u %7u %08X %08X %6u 0x%03X ",
mPid,
+ mUid,
mSessionId,
mPortId,
mFormat,
@@ -3671,10 +3699,11 @@
mSampleRate,
mAttr.flags);
if (isOut()) {
- result.appendFormat("%3x %2x", mAttr.usage, mAttr.content_type);
+ result.appendFormat("%4x %2x", mAttr.usage, mAttr.content_type);
result.appendFormat("%11.2g", 20.0 * log10(mVolume));
+ result.appendFormat("%12s", mMutedFromPort ? "true" : "false");
} else {
- result.appendFormat("%6x", mAttr.source);
+ result.appendFormat("%7x", mAttr.source);
}
result.append("\n");
}
diff --git a/services/audioflinger/afutils/AllocatorFactory.h b/services/audioflinger/afutils/AllocatorFactory.h
index 7534607..4c290a0 100644
--- a/services/audioflinger/afutils/AllocatorFactory.h
+++ b/services/audioflinger/afutils/AllocatorFactory.h
@@ -33,25 +33,36 @@
constexpr inline size_t SHARED_SIZE_SMALL = SHARED_SIZE - SHARED_SIZE_LARGE; // 20 MiB
constexpr inline size_t SMALL_THRESHOLD = 1024 * 40; // 40 KiB
+template <typename Policy>
+inline auto getSharedPool() {
+ using namespace mediautils;
+ return std::make_shared<LockedAllocator<PolicyAllocator<MemoryHeapBaseAllocator, Policy>>>();
+}
+
+// The following pools are global but lazy initialized. Stored in shared_ptr since they are
+// referred by clients, but they could also be leaked.
+
+// Pool from which every client gets their dedicated, exclusive quota.
inline auto getDedicated() {
using namespace mediautils;
- static const auto allocator =
- std::make_shared<PolicyAllocator<MemoryHeapBaseAllocator, SizePolicy<DED_SIZE>>>();
+ static const auto allocator = getSharedPool<SizePolicy<DED_SIZE>>();
return allocator;
}
+// Pool from which clients with large allocation sizes can fall back to when their dedicated
+// allocation is surpassed. More likely to fill.
inline auto getSharedLarge() {
using namespace mediautils;
- static const auto allocator = std::make_shared<
- PolicyAllocator<MemoryHeapBaseAllocator, SizePolicy<SHARED_SIZE_LARGE>>>();
+ static const auto allocator = getSharedPool<SizePolicy<SHARED_SIZE_LARGE>>();
return allocator;
}
+// Pool from which clients with reasonable allocation sizes can fall back to when
+// their dedicated allocation is surpassed, so that small buffer clients are always served.
inline auto getSharedSmall() {
using namespace mediautils;
static const auto allocator =
- std::make_shared<PolicyAllocator<MemoryHeapBaseAllocator,
- SizePolicy<SHARED_SIZE_SMALL, 0, SMALL_THRESHOLD>>>();
+ getSharedPool<SizePolicy<SHARED_SIZE_SMALL, 0, SMALL_THRESHOLD>>();
return allocator;
}
@@ -78,8 +89,7 @@
getSharedLarge(), "Large Shared");
};
const auto makeSmallShared = []() {
- return wrapWithPolicySnooping<
- SizePolicy<SHARED_SIZE_SMALL / ADV_THRESHOLD_INV>>(
+ return wrapWithPolicySnooping<SizePolicy<SHARED_SIZE_SMALL / ADV_THRESHOLD_INV>>(
getSharedSmall(), "Small Shared");
};
diff --git a/services/audioflinger/afutils/DumpTryLock.h b/services/audioflinger/afutils/DumpTryLock.h
deleted file mode 100644
index 05e050e..0000000
--- a/services/audioflinger/afutils/DumpTryLock.h
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- *
- * Copyright 2023, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#pragma once
-
-#include <audio_utils/mutex.h>
-#include <utils/Mutex.h>
-
-namespace android::afutils {
-
-inline bool dumpTryLock(Mutex& mutex)
-{
- static constexpr int kDumpLockTimeoutNs = 1'000'000'000;
- const status_t err = mutex.timedLock(kDumpLockTimeoutNs);
- return err == NO_ERROR;
-}
-
-// Note: the std::timed_mutex try_lock_for and try_lock_until methods are inefficient.
-// It is better to use std::mutex and call this method.
-//
-inline bool dumpTryLock(audio_utils::mutex& mutex) TRY_ACQUIRE(true, mutex)
-{
- static constexpr int64_t kDumpLockTimeoutNs = 1'000'000'000;
- return mutex.try_lock(kDumpLockTimeoutNs);
-}
-
-} // android::afutils
diff --git a/services/audioflinger/afutils/FallibleLockGuard.h b/services/audioflinger/afutils/FallibleLockGuard.h
new file mode 100644
index 0000000..a2e66f6
--- /dev/null
+++ b/services/audioflinger/afutils/FallibleLockGuard.h
@@ -0,0 +1,69 @@
+/*
+ *
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android-base/thread_annotations.h>
+#include <audio_utils/mutex.h>
+#include <utils/Mutex.h>
+
+#include <functional>
+
+namespace android::afutils {
+
+// Lock guard with a acquire timeout, which for the purpose of thread safety annotations acts as if
+// it has the capability (i.e. the thread annotations *lie*). Used for dump utilities, where if we
+// are deadlocked, we access without the lock since we are about to abort due to watchdog anyway.
+// If the lock was truly successfully acquired, unlock on dtor. Like all guards (if successful),
+// this guard is solely responsible for locking on ctor/unlocking on dtor, and the mutex reference
+// must be valid for the lifetime of the object
+class [[nodiscard]] SCOPED_CAPABILITY FallibleLockGuard {
+ public:
+ static constexpr int kDefaultTimeout = 1'000'000'000;
+
+ explicit FallibleLockGuard(Mutex& mutex, int64_t timeoutNs = kDefaultTimeout) ACQUIRE(mutex) {
+ if (mutex.timedLock(timeoutNs) == NO_ERROR) {
+ mUnlockFunc = [&mutex]() NO_THREAD_SAFETY_ANALYSIS { mutex.unlock(); };
+ }
+ }
+
+ explicit FallibleLockGuard(audio_utils::mutex& mutex, int64_t timeoutNs = kDefaultTimeout)
+ ACQUIRE(mutex) {
+ if (mutex.try_lock(timeoutNs)) {
+ mUnlockFunc = [&mutex]() NO_THREAD_SAFETY_ANALYSIS { mutex.unlock(); };
+ }
+ }
+
+ FallibleLockGuard(const FallibleLockGuard& other) = delete;
+
+ FallibleLockGuard(FallibleLockGuard&& other) {
+ mUnlockFunc.swap(other.mUnlockFunc);
+ }
+
+ FallibleLockGuard& operator=(const FallibleLockGuard& other) = delete;
+
+ // Return if the underlying lock was successfully locked
+ explicit operator bool() const { return static_cast<bool>(mUnlockFunc); }
+
+ ~FallibleLockGuard() RELEASE() {
+ if (mUnlockFunc) mUnlockFunc();
+ }
+
+ private:
+ std::function<void()> mUnlockFunc;
+};
+} // android::afutils
diff --git a/services/audioflinger/datapath/VolumeInterface.h b/services/audioflinger/datapath/VolumeInterface.h
index 1564fe1..02b6ade 100644
--- a/services/audioflinger/datapath/VolumeInterface.h
+++ b/services/audioflinger/datapath/VolumeInterface.h
@@ -25,7 +25,7 @@
virtual void setMasterVolume(float value) = 0;
virtual void setMasterBalance(float balance) = 0;
virtual void setMasterMute(bool muted) = 0;
- virtual void setStreamVolume(audio_stream_type_t stream, float value) = 0;
+ virtual void setStreamVolume(audio_stream_type_t stream, float value, bool muted) = 0;
virtual void setStreamMute(audio_stream_type_t stream, bool muted) = 0;
// TODO(b/290699744) add "get" prefix for getter below.
virtual float streamVolume(audio_stream_type_t stream) const = 0;
diff --git a/services/audioflinger/datapath/VolumePortInterface.h b/services/audioflinger/datapath/VolumePortInterface.h
index fb1c463..fe3b782 100644
--- a/services/audioflinger/datapath/VolumePortInterface.h
+++ b/services/audioflinger/datapath/VolumePortInterface.h
@@ -23,7 +23,10 @@
class VolumePortInterface : public virtual RefBase {
public:
virtual void setPortVolume(float volume) = 0;
+ virtual void setPortMute(bool mute) = 0;
virtual float getPortVolume() const = 0;
+ /** Returns the muted state defined by the volume group which is playing on this port. */
+ virtual bool getPortMute() const = 0;
};
} // namespace android
diff --git a/services/audioflinger/timing/MonotonicFrameCounter.cpp b/services/audioflinger/timing/MonotonicFrameCounter.cpp
index 286f549..175e2f5 100644
--- a/services/audioflinger/timing/MonotonicFrameCounter.cpp
+++ b/services/audioflinger/timing/MonotonicFrameCounter.cpp
@@ -26,9 +26,9 @@
int64_t newFrameCount, int64_t newTime) {
if (newFrameCount < 0 || newTime < 0) {
const auto result = getLastReportedFrameCount();
- ALOGW("%s: invalid (frame, time) pair newFrameCount:%lld newFrameCount:%lld,"
+ ALOGW("%s: invalid (frame, time) pair newFrameCount:%lld newTime:%lld,"
" using %lld as frameCount",
- __func__, (long long) newFrameCount, (long long)newFrameCount,
+ __func__, (long long)newFrameCount, (long long)newTime,
(long long)result);
return result;
}
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index edcb805..e5bd121 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -19,13 +19,18 @@
#include <android/media/DeviceConnectedState.h>
#include <android/media/TrackInternalMuteInfo.h>
+#include <android/media/audio/common/AudioConfigBase.h>
+#include <android/media/audio/common/AudioMMapPolicyInfo.h>
+#include <android/media/audio/common/AudioMMapPolicyType.h>
+#include <android/media/GetInputForAttrResponse.h>
+#include <android/content/AttributionSourceState.h>
+#include <error/BinderResult.h>
#include <media/AudioCommonTypes.h>
#include <media/AudioContainers.h>
#include <media/AudioDeviceTypeAddr.h>
-#include <media/AudioSystem.h>
#include <media/AudioPolicy.h>
+#include <media/AudioSystem.h>
#include <media/DeviceDescriptorBase.h>
-#include <android/content/AttributionSourceState.h>
#include <utils/String8.h>
namespace android {
@@ -142,13 +147,14 @@
const AttributionSourceState& attributionSource,
audio_config_t *config,
audio_output_flags_t *flags,
- audio_port_handle_t *selectedDeviceId,
+ DeviceIdVector *selectedDeviceIds,
audio_port_handle_t *portId,
std::vector<audio_io_handle_t> *secondaryOutputs,
output_type_t *outputType,
bool *isSpatialized,
bool *isBitPerfect,
- float *volume) = 0;
+ float *volume,
+ bool *muted) = 0;
// indicates to the audio policy manager that the output starts being used by corresponding
// stream.
virtual status_t startOutput(audio_port_handle_t portId) = 0;
@@ -158,18 +164,26 @@
// releases the output, return true if the output descriptor is reopened.
virtual bool releaseOutput(audio_port_handle_t portId) = 0;
- // request an input appropriate for record from the supplied device with supplied parameters.
- virtual status_t getInputForAttr(const audio_attributes_t *attr,
- audio_io_handle_t *input,
+ // Request an input appropriate for record from the supplied device with supplied parameters.
+ // attr -- attributes for the requested record
+ // requestedInput -- input only for MMAP mode where an input is re-used, otherwise output param
+ // requestedDeviceId, config, flags -- additional params for matching
+ // riid, session, attributionSource -- params which encapsulate client info to associate with
+ // this input
+ //
+ // On most errors, return a Status describing the error in the error object.
+ // However, in cases where an appropriate device cannot be found for a config, the error side of
+ // the unexpected will contain a suggested config.
+ virtual base::expected<media::GetInputForAttrResponse,
+ std::variant<binder::Status, media::audio::common::AudioConfigBase>>
+ getInputForAttr(audio_attributes_t attributes,
+ audio_io_handle_t requestedInput,
+ audio_port_handle_t requestedDeviceId,
+ audio_config_base_t config,
+ audio_input_flags_t flags,
audio_unique_id_t riid,
audio_session_t session,
- const AttributionSourceState& attributionSource,
- audio_config_base_t *config,
- audio_input_flags_t flags,
- audio_port_handle_t *selectedDeviceId,
- input_type_t *inputType,
- audio_port_handle_t *portId,
- uint32_t *virtualDeviceId) = 0;
+ const AttributionSourceState& attributionSource) = 0;
// indicates to the audio policy manager that the input starts being used.
virtual status_t startInput(audio_port_handle_t portId) = 0;
// indicates to the audio policy manager that the input stops being used.
@@ -197,6 +211,7 @@
// setting volume for all devices
virtual status_t setStreamVolumeIndex(audio_stream_type_t stream,
int index,
+ bool muted,
audio_devices_t device) = 0;
// retrieve current volume index for the specified stream and the
@@ -208,6 +223,7 @@
virtual status_t setVolumeIndexForAttributes(const audio_attributes_t &attr,
int index,
+ bool muted,
audio_devices_t device) = 0;
virtual status_t getVolumeIndexForAttributes(const audio_attributes_t &attr,
int &index,
@@ -444,6 +460,13 @@
virtual status_t clearPreferredMixerAttributes(const audio_attributes_t* attr,
audio_port_handle_t portId,
uid_t uid) = 0;
+
+ virtual status_t getMmapPolicyInfos(
+ media::audio::common::AudioMMapPolicyType policyType,
+ std::vector<media::audio::common::AudioMMapPolicyInfo> *policyInfos) = 0;
+ virtual status_t getMmapPolicyForDevice(
+ media::audio::common::AudioMMapPolicyType policyType,
+ media::audio::common::AudioMMapPolicyInfo *policyInfo) = 0;
};
// Audio Policy client Interface
@@ -514,7 +537,7 @@
// set a stream volume for a particular output. For the same user setting, a given stream type
// can have different volumes
// for each output (destination device) it is attached to.
- virtual status_t setStreamVolume(audio_stream_type_t stream, float volume,
+ virtual status_t setStreamVolume(audio_stream_type_t stream, float volume, bool muted,
audio_io_handle_t output, int delayMs = 0) = 0;
/**
* Set volume for given AudioTrack port ids for a particular output.
@@ -522,12 +545,13 @@
* can have different volumes for each output (destination device) it is attached to.
* @param ports to consider
* @param volume to apply
+ * @param muted to apply
* @param output to consider
* @param delayMs to use
* @return NO_ERROR if successful
*/
virtual status_t setPortsVolume(const std::vector<audio_port_handle_t>& ports, float volume,
- audio_io_handle_t output, int delayMs = 0) = 0;
+ bool muted, audio_io_handle_t output, int delayMs = 0) = 0;
// function enabling to send proprietary informations directly from audio policy manager to
// audio hardware interface.
@@ -608,6 +632,36 @@
virtual status_t setTracksInternalMute(
const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) = 0;
+
+ virtual status_t getMmapPolicyInfos(
+ media::audio::common::AudioMMapPolicyType policyType,
+ std::vector<media::audio::common::AudioMMapPolicyInfo> *policyInfos) = 0;
+
+ enum class MixType {
+ // e.g. audio recording from a microphone
+ NONE = 0,
+ // used for "remote submix" legacy mode (no DAP), capture of the media to play it remotely
+ CAPTURE,
+ // used for platform audio rerouting, where mixes are handled by external and dynamically
+ // installed policies which reroute audio mixes
+ EXT_POLICY_REROUTE,
+ // used for playback capture with a MediaProjection
+ PUBLIC_CAPTURE_PLAYBACK,
+ // used for capture from telephony RX path
+ TELEPHONY_RX_CAPTURE,
+ };
+
+ struct PermissionReqs {
+ media::audio::common::AudioSource source;
+ MixType mixType;
+ uint32_t virtualDeviceId;
+ // Flag based validation
+ bool isHotword;
+ bool isCallRedir;
+ };
+
+ virtual error::BinderResult<bool> checkPermissionForInput(const AttributionSourceState& attr,
+ const PermissionReqs& req) = 0;
};
// These are the signatures of createAudioPolicyManager/destroyAudioPolicyManager
diff --git a/services/audiopolicy/OWNERS b/services/audiopolicy/OWNERS
index 50ceadf..4a65069 100644
--- a/services/audiopolicy/OWNERS
+++ b/services/audiopolicy/OWNERS
@@ -1,4 +1,5 @@
# Bug component: 48436
+atneya@google.com
elaurent@google.com
jiabin@google.com
jmtrivi@google.com
diff --git a/services/audiopolicy/common/managerdefinitions/Android.bp b/services/audiopolicy/common/managerdefinitions/Android.bp
index 4dedcd6..0e1d090 100644
--- a/services/audiopolicy/common/managerdefinitions/Android.bp
+++ b/services/audiopolicy/common/managerdefinitions/Android.bp
@@ -39,6 +39,7 @@
"android.media.audiopolicy-aconfig-cc",
"audioclient-types-aidl-cpp",
"audiopolicy-types-aidl-cpp",
+ "com.android.media.audio-aconfig-cc",
"com.android.media.audioserver-aconfig-cc",
"libaconfig_storage_read_api_cc",
"libaudioclient_aidl_conversion",
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioIODescriptorInterface.h b/services/audiopolicy/common/managerdefinitions/include/AudioIODescriptorInterface.h
index e519766..918e247 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioIODescriptorInterface.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioIODescriptorInterface.h
@@ -64,6 +64,21 @@
if (activeClients.size() == activeClientsWithRoute.size()) {
return devices.getDeviceFromId(activeClientsWithRoute[0]->preferredDeviceId());
}
+ if (activeClientsWithRoute.size() == 0) {
+ return nullptr;
+ }
+ uid_t uniqueUid = activeClients[0]->uid();
+ for (const auto &client : activeClients) {
+ if (uniqueUid != client->uid()) {
+ return nullptr;
+ }
+ }
+ for (const auto &client : activeClientsWithRoute) {
+ if (uniqueUid != client->uid()) {
+ return nullptr;
+ }
+ }
+ return devices.getDeviceFromId(activeClientsWithRoute[0]->preferredDeviceId());
}
}
return nullptr;
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index 835fad2..9bceee7 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -89,7 +89,9 @@
class VolumeActivity : public ActivityTracking
{
public:
- bool isMuted() const { return mMuteCount > 0; }
+ bool isMutedInternally() const { return mMuteCount > 0; }
+ bool isMutedByGroup() const { return mMutedByGroup > 0; }
+ void setMutedByGroup(bool mutedByGroup) { mMutedByGroup = mutedByGroup; }
int getMuteCount() const { return mMuteCount; }
int incMuteCount() { return ++mMuteCount; }
int decMuteCount() { return mMuteCount > 0 ? --mMuteCount : -1; }
@@ -107,6 +109,7 @@
private:
int mMuteCount = 0; /**< mute request counter */
+ bool mMutedByGroup = false; /**< mute from AudioService, does not add to counter */
float mCurVolumeDb = NAN; /**< current volume in dB. */
bool mIsVoice = false; /** true if this volume source is used for voice call volume */
};
@@ -209,16 +212,25 @@
return mVolumeActivities.find(vs) != std::end(mVolumeActivities)?
mVolumeActivities.at(vs).getActivityCount() : 0;
}
- bool isMuted(VolumeSource vs) const
+ bool isMutedInternally(VolumeSource vs) const
{
return mVolumeActivities.find(vs) != std::end(mVolumeActivities)?
- mVolumeActivities.at(vs).isMuted() : false;
+ mVolumeActivities.at(vs).isMutedInternally() : false;
}
int getMuteCount(VolumeSource vs) const
{
return mVolumeActivities.find(vs) != std::end(mVolumeActivities)?
mVolumeActivities.at(vs).getMuteCount() : 0;
}
+ bool isMutedByGroup(VolumeSource vs)
+ {
+ return mVolumeActivities.find(vs) != std::end(mVolumeActivities)?
+ mVolumeActivities.at(vs).isMutedByGroup() : false;
+ }
+ bool hasVolumeSource(VolumeSource vs)
+ {
+ return mVolumeActivities.find(vs) != std::end(mVolumeActivities);
+ }
int incMuteCount(VolumeSource vs)
{
return mVolumeActivities[vs].incMuteCount();
@@ -227,10 +239,11 @@
{
return mVolumeActivities[vs].decMuteCount();
}
- void setCurVolume(VolumeSource vs, float volumeDb, bool isVoiceVolSrc)
+ void setCurVolume(VolumeSource vs, float volumeDb, bool mutedByGroup, bool isVoiceVolSrc)
{
// Even if not activity for this source registered, need to create anyway
mVolumeActivities[vs].setVolume(volumeDb);
+ mVolumeActivities[vs].setMutedByGroup(mutedByGroup);
mVolumeActivities[vs].setIsVoice(isVoiceVolSrc);
}
float getCurVolume(VolumeSource vs) const
diff --git a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
index 688772c..c2ee5f6 100644
--- a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
+++ b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
@@ -73,7 +73,8 @@
enum CompatibilityScore{
NO_MATCH = 0,
PARTIAL_MATCH = 1,
- EXACT_MATCH = 2
+ PARTIAL_MATCH_WITH_FLAG = 2,
+ EXACT_MATCH = 3
};
/**
@@ -92,7 +93,6 @@
* @param channelMask to be checked for compatibility. Must be specified
* @param updatedChannelMask if non-NULL, it is assigned the actual channel mask
* @param flags to be checked for compatibility
- * @param exactMatchRequiredForInputFlags true if exact match is required on flags
* @return how the IO profile is compatible with the given parameters.
*/
CompatibilityScore getCompatibilityScore(const DeviceVector &devices,
@@ -103,8 +103,7 @@
audio_channel_mask_t channelMask,
audio_channel_mask_t *updatedChannelMask,
// FIXME parameter type
- uint32_t flags,
- bool exactMatchRequiredForInputFlags = false) const;
+ uint32_t flags) const;
/**
* @brief areAllDevicesSupported: Checks if the given devices are supported by the IO profile.
@@ -119,11 +118,9 @@
* specified flags.
*
* @param flags to be checked for compatibility
- * @param exactMatchRequiredForInputFlags true if exact match is required on flags
* @return true if the profile is compatible, false otherwise.
*/
- bool isCompatibleProfileForFlags(uint32_t flags,
- bool exactMatchRequiredForInputFlags = false) const;
+ bool isCompatibleProfileForFlags(uint32_t flags) const;
void dump(String8 *dst, int spaces) const;
void log();
@@ -235,6 +232,7 @@
private:
void refreshMixerBehaviors();
+ CompatibilityScore getFlagsCompatibleScore(uint32_t flags) const;
DeviceVector mSupportedDevices; // supported devices: this input/output can be routed from/to
diff --git a/services/audiopolicy/common/managerdefinitions/include/IVolumeCurves.h b/services/audiopolicy/common/managerdefinitions/include/IVolumeCurves.h
index fd8b81a..ebfc597 100644
--- a/services/audiopolicy/common/managerdefinitions/include/IVolumeCurves.h
+++ b/services/audiopolicy/common/managerdefinitions/include/IVolumeCurves.h
@@ -41,6 +41,8 @@
virtual status_t initVolume(int indexMin, int indexMax) = 0;
virtual std::vector<audio_attributes_t> getAttributes() const = 0;
virtual std::vector<audio_stream_type_t> getStreamTypes() const = 0;
+ virtual void setIsMuted(bool isMuted) = 0;
+ virtual bool isMuted() const = 0;
virtual void dump(String8 *dst, int spaces = 0, bool curvePoints = false) const = 0;
};
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index 2c41de4..c417462 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -22,6 +22,7 @@
#include <AudioPolicyInterface.h>
#include "AudioOutputDescriptor.h"
#include "AudioPolicyMix.h"
+#include <com_android_media_audio.h>
#include "IOProfile.h"
#include "Volume.h"
#include "HwModule.h"
@@ -161,7 +162,7 @@
return false;
}
-bool AudioOutputDescriptor::setVolume(float volumeDb, bool /*muted*/,
+bool AudioOutputDescriptor::setVolume(float volumeDb, bool mutedByGroup,
VolumeSource volumeSource,
const StreamTypeVector &/*streams*/,
const DeviceTypeSet& deviceTypes,
@@ -169,7 +170,6 @@
bool force,
bool isVoiceVolSrc)
{
-
if (!supportedDevices().containsDeviceAmongTypes(deviceTypes)) {
ALOGV("%s output ID %d unsupported device %s",
__func__, getId(), toString(deviceTypes).c_str());
@@ -177,10 +177,14 @@
}
// We actually change the volume if:
// - the float value returned by computeVolume() changed
+ // - the muted state changed
// - the force flag is set
- if (volumeDb != getCurVolume(volumeSource) || force) {
- ALOGV("%s for volumeSrc %d, volume %f, delay %d", __func__, volumeSource, volumeDb, delayMs);
- setCurVolume(volumeSource, volumeDb, isVoiceVolSrc);
+ const bool mutedChanged =
+ com_android_media_audio_ring_my_car() && (isMutedByGroup(volumeSource) != mutedByGroup);
+ if (volumeDb != getCurVolume(volumeSource) || mutedChanged || force) {
+ ALOGV("%s for volumeSrc %d, volume %f, mutedByGroup %d, delay %d", __func__, volumeSource,
+ volumeDb, mutedByGroup, delayMs);
+ setCurVolume(volumeSource, volumeDb, mutedByGroup, isVoiceVolSrc);
return true;
}
return false;
@@ -497,7 +501,7 @@
}
void SwAudioOutputDescriptor::setSwMute(
- bool muted, VolumeSource vs, const StreamTypeVector &streamTypes,
+ bool mutedByGroup, VolumeSource vs, const StreamTypeVector &streamTypes,
const DeviceTypeSet& deviceTypes, uint32_t delayMs) {
// volume source active and more than one volume source is active, otherwise, no-op or let
// setVolume controlling SW and/or HW Gains
@@ -506,11 +510,11 @@
for (const auto& devicePort : devices()) {
if (isSingleDeviceType(deviceTypes, devicePort->type()) &&
devicePort->hasGainController(true /*canUseForVolume*/)) {
- float volumeAmpl = muted ? 0.0f : Volume::DbToAmpl(0);
ALOGV("%s: output: %d, vs: %d, muted: %d, active vs count: %zu", __func__,
- mIoHandle, vs, muted, getActiveVolumeSources().size());
+ mIoHandle, vs, mutedByGroup, getActiveVolumeSources().size());
for (const auto &stream : streamTypes) {
- mClientInterface->setStreamVolume(stream, volumeAmpl, mIoHandle, delayMs);
+ mClientInterface->setStreamVolume(stream, Volume::DbToAmpl(0), mutedByGroup,
+ mIoHandle, delayMs);
}
return;
}
@@ -521,11 +525,12 @@
for (const auto &devicePort: devices()) {
if (isSingleDeviceType(deviceTypes, devicePort->type()) &&
devicePort->hasGainController(true /*canUseForVolume*/)) {
- float volumeAmpl = muted ? 0.0f : Volume::DbToAmpl(0);
+ float volumeAmpl = Volume::DbToAmpl(0);
ALOGV("%s: output: %d, vs: %d, muted: %d, active vs count: %zu", __func__,
- mIoHandle, vs, muted, getActiveVolumeSources().size());
+ mIoHandle, vs, mutedByGroup, getActiveVolumeSources().size());
mClientInterface->setPortsVolume(
- getPortsForVolumeSource(vs), volumeAmpl, mIoHandle, delayMs);
+ getPortsForVolumeSource(vs), Volume::DbToAmpl(0), mutedByGroup,
+ mIoHandle, delayMs);
return;
}
}
@@ -533,7 +538,7 @@
}
}
-bool SwAudioOutputDescriptor::setVolume(float volumeDb, bool muted,
+bool SwAudioOutputDescriptor::setVolume(float volumeDb, bool mutedByGroup,
VolumeSource vs, const StreamTypeVector &streamTypes,
const DeviceTypeSet& deviceTypes,
uint32_t delayMs,
@@ -542,18 +547,22 @@
{
StreamTypeVector streams = streamTypes;
if (!AudioOutputDescriptor::setVolume(
- volumeDb, muted, vs, streamTypes, deviceTypes, delayMs, force, isVoiceVolSrc)) {
+ volumeDb, mutedByGroup, vs, streamTypes, deviceTypes, delayMs, force, isVoiceVolSrc)) {
if (hasStream(streamTypes, AUDIO_STREAM_BLUETOOTH_SCO)) {
VolumeSource callVolSrc = getVoiceSource();
- if (callVolSrc != VOLUME_SOURCE_NONE && volumeDb != getCurVolume(callVolSrc)) {
- setCurVolume(callVolSrc, volumeDb, true);
+ const bool mutedChanged =
+ com_android_media_audio_ring_my_car() && hasVolumeSource(callVolSrc) &&
+ (isMutedByGroup(callVolSrc) != mutedByGroup);
+ if (callVolSrc != VOLUME_SOURCE_NONE &&
+ (volumeDb != getCurVolume(callVolSrc) || mutedChanged)) {
+ setCurVolume(callVolSrc, volumeDb, mutedByGroup, true);
float volumeAmpl = Volume::DbToAmpl(volumeDb);
if (audioserver_flags::portid_volume_management()) {
mClientInterface->setPortsVolume(getPortsForVolumeSource(callVolSrc),
- volumeAmpl, mIoHandle, delayMs);
+ volumeAmpl, mutedByGroup, mIoHandle, delayMs);
} else {
mClientInterface->setStreamVolume(AUDIO_STREAM_VOICE_CALL,
- volumeAmpl, mIoHandle, delayMs);
+ volumeAmpl, mutedByGroup, mIoHandle, delayMs);
}
}
}
@@ -580,18 +589,19 @@
// Allows to mute SW Gain on AudioFlinger only for volume group with explicit
// stream(s)
if (!streamTypes.empty() || (getActiveVolumeSources().size() == 1)) {
- const bool canMute = muted && (volumeDb != 0.0f) && !streamTypes.empty();
- float volumeAmpl = canMute ? 0.0f : Volume::DbToAmpl(0);
+ const bool canMute = mutedByGroup && !streamTypes.empty();
+ const float volumeAmpl = Volume::DbToAmpl(0);
for (const auto &stream: streams) {
- mClientInterface->setStreamVolume(stream, volumeAmpl, mIoHandle, delayMs);
+ mClientInterface->setStreamVolume(stream, volumeAmpl, canMute, mIoHandle,
+ delayMs);
}
}
} else {
- float volumeAmpl = (muted && volumeDb != 0.0f) ? 0.0f : Volume::DbToAmpl(0);
+ float volumeAmpl = Volume::DbToAmpl(0);
ALOGV("%s: output: %d, vs: %d, active vs count: %zu", __func__,
mIoHandle, vs, getActiveVolumeSources().size());
mClientInterface->setPortsVolume(
- getPortsForVolumeSource(vs), volumeAmpl, mIoHandle, delayMs);
+ getPortsForVolumeSource(vs), volumeAmpl, mutedByGroup, mIoHandle, delayMs);
}
AudioGains gains = devicePort->getGains();
int gainMinValueInMb = gains[0]->getMinValueInMb();
@@ -615,26 +625,27 @@
if (audioserver_flags::portid_volume_management()) {
if (callVolSrc != VOLUME_SOURCE_NONE) {
mClientInterface->setPortsVolume(getPortsForVolumeSource(callVolSrc), volumeAmpl,
- mIoHandle, delayMs);
+ mutedByGroup, mIoHandle, delayMs);
}
} else {
- mClientInterface->setStreamVolume(AUDIO_STREAM_VOICE_CALL, volumeAmpl, mIoHandle,
- delayMs);
+ mClientInterface->setStreamVolume(AUDIO_STREAM_VOICE_CALL, volumeAmpl, mutedByGroup,
+ mIoHandle, delayMs);
}
if (callVolSrc != VOLUME_SOURCE_NONE) {
- setCurVolume(callVolSrc, getCurVolume(vs), true);
+ setCurVolume(callVolSrc, getCurVolume(vs), mutedByGroup, true);
}
}
if (audioserver_flags::portid_volume_management()) {
- ALOGV("%s output %d for volumeSource %d, volume %f, delay %d active=%d", __func__,
- mIoHandle, vs, volumeDb, delayMs, isActive(vs));
- mClientInterface->setPortsVolume(getPortsForVolumeSource(vs), volumeAmpl, mIoHandle,
- delayMs);
+ ALOGV("%s output %d for volumeSource %d, volume %f, mutedByGroup %d, delay %d active=%d",
+ __func__, mIoHandle, vs, volumeDb, mutedByGroup, delayMs, isActive(vs));
+ mClientInterface->setPortsVolume(getPortsForVolumeSource(vs), volumeAmpl, mutedByGroup,
+ mIoHandle, delayMs);
} else {
for (const auto &stream : streams) {
- ALOGV("%s output %d for volumeSource %d, volume %f, delay %d stream=%s", __func__,
- mIoHandle, vs, volumeDb, delayMs, toString(stream).c_str());
- mClientInterface->setStreamVolume(stream, volumeAmpl, mIoHandle, delayMs);
+ ALOGV("%s output %d for volumeSource %d, volume %f, mutedByGroup %d delay %d stream=%s",
+ __func__, mIoHandle, vs, volumeDb, mutedByGroup, delayMs,
+ toString(stream).c_str());
+ mClientInterface->setStreamVolume(stream, volumeAmpl, mutedByGroup, mIoHandle, delayMs);
}
}
return true;
diff --git a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
index 991b103..bc9eb20 100644
--- a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
@@ -42,15 +42,14 @@
audio_channel_mask_t channelMask,
audio_channel_mask_t *updatedChannelMask,
// FIXME type punning here
- uint32_t flags,
- bool exactMatchRequiredForInputFlags) const {
+ uint32_t flags) const {
const bool isPlaybackThread =
getType() == AUDIO_PORT_TYPE_MIX && getRole() == AUDIO_PORT_ROLE_SOURCE;
const bool isRecordThread =
getType() == AUDIO_PORT_TYPE_MIX && getRole() == AUDIO_PORT_ROLE_SINK;
ALOG_ASSERT(isPlaybackThread != isRecordThread);
- if (!areAllDevicesSupported(devices) ||
- !isCompatibleProfileForFlags(flags, exactMatchRequiredForInputFlags)) {
+ const auto flagsCompatibleScore = getFlagsCompatibleScore(flags);
+ if (!areAllDevicesSupported(devices) || flagsCompatibleScore == NO_MATCH) {
return NO_MATCH;
}
@@ -81,7 +80,11 @@
result = EXACT_MATCH;
} else if (checkCompatibleAudioProfile(
myUpdatedSamplingRate, myUpdatedChannelMask, myUpdatedFormat) == NO_ERROR) {
- result = PARTIAL_MATCH;
+ if (flagsCompatibleScore == EXACT_MATCH) {
+ result = PARTIAL_MATCH_WITH_FLAG;
+ } else {
+ result = PARTIAL_MATCH;
+ }
} else {
return result;
}
@@ -118,32 +121,8 @@
return mSupportedDevices.containsAllDevices(devices);
}
-bool IOProfile::isCompatibleProfileForFlags(uint32_t flags,
- bool exactMatchRequiredForInputFlags) const {
- const bool isPlaybackThread =
- getType() == AUDIO_PORT_TYPE_MIX && getRole() == AUDIO_PORT_ROLE_SOURCE;
- const bool isRecordThread =
- getType() == AUDIO_PORT_TYPE_MIX && getRole() == AUDIO_PORT_ROLE_SINK;
- ALOG_ASSERT(isPlaybackThread != isRecordThread);
-
- const uint32_t mustMatchOutputFlags =
- AUDIO_OUTPUT_FLAG_DIRECT|AUDIO_OUTPUT_FLAG_HW_AV_SYNC|AUDIO_OUTPUT_FLAG_MMAP_NOIRQ;
- if (isPlaybackThread &&
- !audio_output_flags_is_subset((audio_output_flags_t)getFlags(),
- (audio_output_flags_t)flags,
- mustMatchOutputFlags)) {
- return false;
- }
- // The only input flag that is allowed to be different is the fast flag.
- // An existing fast stream is compatible with a normal track request.
- // An existing normal stream is compatible with a fast track request,
- // but the fast request will be denied by AudioFlinger and converted to normal track.
- if (isRecordThread && ((getFlags() ^ flags) &
- ~(exactMatchRequiredForInputFlags ? AUDIO_INPUT_FLAG_NONE : AUDIO_INPUT_FLAG_FAST))) {
- return false;
- }
-
- return true;
+bool IOProfile::isCompatibleProfileForFlags(uint32_t flags) const {
+ return getFlagsCompatibleScore(flags) != NO_MATCH;
}
bool IOProfile::containsSingleDeviceSupportingEncodedFormats(
@@ -228,6 +207,39 @@
}
}
+IOProfile::CompatibilityScore IOProfile::getFlagsCompatibleScore(uint32_t flags) const {
+ const bool isPlaybackThread =
+ getType() == AUDIO_PORT_TYPE_MIX && getRole() == AUDIO_PORT_ROLE_SOURCE;
+ const bool isRecordThread =
+ getType() == AUDIO_PORT_TYPE_MIX && getRole() == AUDIO_PORT_ROLE_SINK;
+ ALOG_ASSERT(isPlaybackThread != isRecordThread);
+
+ const uint32_t mustMatchOutputFlags =
+ AUDIO_OUTPUT_FLAG_DIRECT|AUDIO_OUTPUT_FLAG_HW_AV_SYNC|AUDIO_OUTPUT_FLAG_MMAP_NOIRQ;
+ if (isPlaybackThread &&
+ !audio_output_flags_is_subset((audio_output_flags_t)getFlags(),
+ (audio_output_flags_t)flags,
+ mustMatchOutputFlags)) {
+ return NO_MATCH;
+ }
+ // The only input flag that is allowed to be different is the fast flag.
+ // An existing fast stream is compatible with a normal track request.
+ // An existing normal stream is compatible with a fast track request,
+ // but the fast request will be denied by AudioFlinger and converted to normal track.
+ if (isRecordThread) {
+ const auto unmatchedFlag = getFlags() ^ flags;
+ if (unmatchedFlag == AUDIO_INPUT_FLAG_NONE) {
+ return EXACT_MATCH;
+ } else if (unmatchedFlag == AUDIO_INPUT_FLAG_FAST) {
+ return PARTIAL_MATCH;
+ } else {
+ return NO_MATCH;
+ }
+ }
+
+ return EXACT_MATCH;
+}
+
void IOProfile::dump(String8 *dst, int spaces) const
{
String8 extraInfo;
diff --git a/services/audiopolicy/engine/common/include/VolumeCurve.h b/services/audiopolicy/engine/common/include/VolumeCurve.h
index 2e75ff1..e5f7a41 100644
--- a/services/audiopolicy/engine/common/include/VolumeCurve.h
+++ b/services/audiopolicy/engine/common/include/VolumeCurve.h
@@ -179,6 +179,12 @@
}
StreamTypeVector getStreamTypes() const override { return mStreams; }
+ void setIsMuted(bool isMuted)
+ {
+ mIsMuted = isMuted;
+ }
+ bool isMuted() const { return mIsMuted; }
+
void dump(String8 *dst, int spaces = 0, bool curvePoints = false) const override;
private:
@@ -187,6 +193,7 @@
int mIndexMin; /**< min volume index. */
int mIndexMax; /**< max volume index. */
const bool mCanBeMuted = true; /**< true is the stream can be muted. */
+ bool mIsMuted = false; /**< true if the stream is currently muted. */
AttributesVector mAttributes;
StreamTypeVector mStreams; /**< Keep it for legacy. */
diff --git a/services/audiopolicy/engine/common/src/EngineDefaultConfig.h b/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
index c4bf64a..229c5e2 100644
--- a/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
+++ b/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
@@ -131,6 +131,8 @@
{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT,
AUDIO_FLAG_BEACON, ""},
{AUDIO_CONTENT_TYPE_ULTRASOUND, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT,
+ AUDIO_FLAG_NONE, ""},
+ {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_SPEAKER_CLEANUP, AUDIO_SOURCE_DEFAULT,
AUDIO_FLAG_NONE, ""}
}
}
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp
index c9a77a4..27a290f 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp
@@ -45,4 +45,7 @@
"libparameter",
"libutils",
],
+ defaults: [
+ "aconfig_lib_cc_shared_link.defaults",
+ ],
}
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index 7de6939..1082d31 100644
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -156,40 +156,21 @@
return EngineBase::setForceUse(usage, config);
}
-bool Engine::isBtScoActive(DeviceVector& availableOutputDevices,
- const SwAudioOutputCollection &outputs) const {
+bool Engine::isBtScoActive(DeviceVector& availableOutputDevices) const {
+ // SCO is considered active if:
+ // 1) a SCO device is connected
+ // 2) the preferred device for PHONE strategy is BT SCO: this is controlled only by java
+ // AudioService and is only true if the SCO audio link as been confirmed active by BT.
if (availableOutputDevices.getDevicesFromTypes(getAudioDeviceOutAllScoSet()).isEmpty()) {
return false;
}
- // SCO is active if:
- // 1) we are in a call and SCO is the preferred device for PHONE strategy
- if (isInCall() && audio_is_bluetooth_out_sco_device(
+
+ if (!audio_is_bluetooth_out_sco_device(
getPreferredDeviceTypeForLegacyStrategy(availableOutputDevices, STRATEGY_PHONE))) {
- return true;
+ return false;
}
- // 2) A strategy for which the preferred device is SCO is active
- for (const auto &ps : getOrderedProductStrategies()) {
- if (outputs.isStrategyActive(ps) &&
- !getPreferredAvailableDevicesForProductStrategy(availableOutputDevices, ps)
- .getDevicesFromTypes(getAudioDeviceOutAllScoSet()).isEmpty()) {
- return true;
- }
- }
- // 3) a ringtone is active and SCO is used for ringing
- if (outputs.isActiveLocally(toVolumeSource(AUDIO_STREAM_RING))
- && (getForceUse(AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING)
- == AUDIO_POLICY_FORCE_BT_SCO)) {
- return true;
- }
- // 4) an active input is routed from SCO
- DeviceVector availableInputDevices = getApmObserver()->getAvailableInputDevices();
- const auto &inputs = getApmObserver()->getInputs();
- if (inputs.activeInputsCountOnDevices(availableInputDevices.getDevicesFromType(
- AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET)) > 0) {
- return true;
- }
- return false;
+ return true;
}
void Engine::filterOutputDevicesForStrategy(legacy_strategy strategy,
@@ -200,7 +181,7 @@
if (com::android::media::audioserver::use_bt_sco_for_media()) {
// remove A2DP and LE Audio devices whenever BT SCO is in use
- if (isBtScoActive(availableOutputDevices, outputs)) {
+ if (isBtScoActive(availableOutputDevices)) {
availableOutputDevices.remove(
availableOutputDevices.getDevicesFromTypes(getAudioDeviceOutAllA2dpSet()));
availableOutputDevices.remove(
@@ -372,69 +353,58 @@
// if SCO headset is connected and we are told to use it, play ringtone over
// speaker and BT SCO
- if (!availableOutputDevices.getDevicesFromTypes(getAudioDeviceOutAllScoSet()).isEmpty()) {
- DeviceVector devices2;
- devices2 = availableOutputDevices.getFirstDevicesFromTypes({
+ if (!availableOutputDevices.getDevicesFromTypes(getAudioDeviceOutAllScoSet()).isEmpty()
+ && audio_is_bluetooth_out_sco_device(getPreferredDeviceTypeForLegacyStrategy(
+ availableOutputDevices, STRATEGY_PHONE))) {
+ DeviceVector devices2 = availableOutputDevices.getFirstDevicesFromTypes({
AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT, AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET,
AUDIO_DEVICE_OUT_BLUETOOTH_SCO});
+ // devices2 cannot be empty at this point
// Use ONLY Bluetooth SCO output when ringing in vibration mode
if (!((getForceUse(AUDIO_POLICY_FORCE_FOR_SYSTEM) == AUDIO_POLICY_FORCE_SYSTEM_ENFORCED)
- && (strategy == STRATEGY_ENFORCED_AUDIBLE))) {
- if (getForceUse(AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING)
- == AUDIO_POLICY_FORCE_BT_SCO) {
- if (!devices2.isEmpty()) {
- devices = devices2;
- break;
- }
- }
+ && (strategy == STRATEGY_ENFORCED_AUDIBLE))
+ && (getForceUse(AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING)
+ == AUDIO_POLICY_FORCE_BT_SCO)) {
+ devices = devices2;
+ break;
}
// Use both Bluetooth SCO and phone default output when ringing in normal mode
- if (audio_is_bluetooth_out_sco_device(getPreferredDeviceTypeForLegacyStrategy(
- availableOutputDevices, STRATEGY_PHONE))) {
- if (strategy == STRATEGY_SONIFICATION) {
- devices.replaceDevicesByType(
- AUDIO_DEVICE_OUT_SPEAKER,
- availableOutputDevices.getDevicesFromType(
- AUDIO_DEVICE_OUT_SPEAKER_SAFE));
- }
- if (!devices2.isEmpty()) {
- devices.add(devices2);
- break;
- }
+ if (strategy == STRATEGY_SONIFICATION) {
+ devices.replaceDevicesByType(
+ AUDIO_DEVICE_OUT_SPEAKER,
+ availableOutputDevices.getDevicesFromType(
+ AUDIO_DEVICE_OUT_SPEAKER_SAFE));
}
+ devices.add(devices2);
+ break;
}
// if LEA headset is connected and we are told to use it, play ringtone over
// speaker and BT LEA
- if (!availableOutputDevices.getDevicesFromTypes(getAudioDeviceOutAllBleSet()).isEmpty()) {
+ if (!availableOutputDevices.getDevicesFromTypes(getAudioDeviceOutAllBleSet()).isEmpty()
+ && audio_is_ble_out_device(getPreferredDeviceTypeForLegacyStrategy(
+ availableOutputDevices, STRATEGY_PHONE))) {
DeviceVector devices2;
devices2 = availableOutputDevices.getFirstDevicesFromTypes({
AUDIO_DEVICE_OUT_BLE_HEADSET, AUDIO_DEVICE_OUT_BLE_SPEAKER});
+ // devices2 cannot be empty at this point
// Use ONLY Bluetooth LEA output when ringing in vibration mode
if (!((getForceUse(AUDIO_POLICY_FORCE_FOR_SYSTEM) == AUDIO_POLICY_FORCE_SYSTEM_ENFORCED)
- && (strategy == STRATEGY_ENFORCED_AUDIBLE))) {
- if (getForceUse(AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING)
- == AUDIO_POLICY_FORCE_BT_BLE) {
- if (!devices2.isEmpty()) {
- devices = devices2;
- break;
- }
- }
+ && (strategy == STRATEGY_ENFORCED_AUDIBLE))
+ && (getForceUse(AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING)
+ == AUDIO_POLICY_FORCE_BT_BLE)) {
+ devices = devices2;
+ break;
}
// Use both Bluetooth LEA and phone default output when ringing in normal mode
- if (audio_is_ble_out_device(getPreferredDeviceTypeForLegacyStrategy(
- availableOutputDevices, STRATEGY_PHONE))) {
- if (strategy == STRATEGY_SONIFICATION) {
- devices.replaceDevicesByType(
- AUDIO_DEVICE_OUT_SPEAKER,
- availableOutputDevices.getDevicesFromType(
- AUDIO_DEVICE_OUT_SPEAKER_SAFE));
- }
- if (!devices2.isEmpty()) {
- devices.add(devices2);
- break;
- }
+ if (strategy == STRATEGY_SONIFICATION) {
+ devices.replaceDevicesByType(
+ AUDIO_DEVICE_OUT_SPEAKER,
+ availableOutputDevices.getDevicesFromType(
+ AUDIO_DEVICE_OUT_SPEAKER_SAFE));
}
+ devices.add(devices2);
+ break;
}
// The second device used for sonification is the same as the device used by media strategy
@@ -497,6 +467,18 @@
// Get the last connected device of wired and bluetooth a2dp
devices2 = availableOutputDevices.getFirstDevicesFromTypes(
getLastRemovableMediaDevices(GROUP_NONE, excludedDevices));
+ if (com::android::media::audioserver::use_bt_sco_for_media()) {
+ if (isBtScoActive(availableOutputDevices)
+ && !(devices2.getDevicesFromTypes(
+ getAudioDeviceOutAllA2dpSet()).isEmpty()
+ && devices2.getDevicesFromTypes(
+ getAudioDeviceOutAllBleSet()).isEmpty())) {
+ devices2 = availableOutputDevices.getFirstDevicesFromTypes(
+ { AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT,
+ AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET,
+ AUDIO_DEVICE_OUT_BLUETOOTH_SCO});
+ }
+ }
} else {
// Get the last connected device of wired except bluetooth a2dp
devices2 = availableOutputDevices.getFirstDevicesFromTypes(
@@ -504,15 +486,6 @@
}
}
- if (com::android::media::audioserver::use_bt_sco_for_media()) {
- if (devices2.isEmpty() && isBtScoActive(availableOutputDevices, outputs)) {
- devices2 = availableOutputDevices.getFirstDevicesFromTypes(
- { AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT,
- AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET,
- AUDIO_DEVICE_OUT_BLUETOOTH_SCO});
- }
- }
-
if ((devices2.isEmpty()) &&
(getForceUse(AUDIO_POLICY_FORCE_FOR_DOCK) == AUDIO_POLICY_FORCE_ANALOG_DOCK)) {
devices2 = availableOutputDevices.getDevicesFromType(
diff --git a/services/audiopolicy/enginedefault/src/Engine.h b/services/audiopolicy/enginedefault/src/Engine.h
index 862b5fd..e9c71dd 100644
--- a/services/audiopolicy/enginedefault/src/Engine.h
+++ b/services/audiopolicy/enginedefault/src/Engine.h
@@ -95,8 +95,7 @@
DeviceVector getDisabledDevicesForInputSource(
const DeviceVector& availableInputDevices, audio_source_t inputSource) const;
- bool isBtScoActive(DeviceVector& availableOutputDevices,
- const SwAudioOutputCollection &outputs) const;
+ bool isBtScoActive(DeviceVector& availableOutputDevices) const;
std::map<product_strategy_t, legacy_strategy> mLegacyStrategyMap;
};
diff --git a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
index fd40c04..b17a248 100644
--- a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
+++ b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
@@ -198,7 +198,7 @@
virtual ~AudioPolicyManagerFuzzer() = default;
virtual bool initialize();
virtual void SetUpManagerConfig();
- bool getOutputForAttr(audio_port_handle_t *selectedDeviceId, audio_format_t format,
+ bool getOutputForAttr(DeviceIdVector *selectedDeviceIds, audio_format_t format,
audio_channel_mask_t channelMask, int sampleRate,
audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
audio_io_handle_t *output = nullptr,
@@ -248,7 +248,7 @@
void AudioPolicyManagerFuzzer::SetUpManagerConfig() { mConfig->setDefault(); }
bool AudioPolicyManagerFuzzer::getOutputForAttr(
- audio_port_handle_t *selectedDeviceId, audio_format_t format, audio_channel_mask_t channelMask,
+ DeviceIdVector *selectedDeviceIds, audio_format_t format, audio_channel_mask_t channelMask,
int sampleRate, audio_output_flags_t flags, audio_io_handle_t *output,
audio_port_handle_t *portId, audio_attributes_t attr) {
audio_io_handle_t localOutput;
@@ -266,14 +266,15 @@
bool isSpatialized;
bool isBitPerfect;
float volume;
+ bool muted;
// TODO b/182392769: use attribution source util
AttributionSourceState attributionSource;
attributionSource.uid = 0;
attributionSource.token = sp<BBinder>::make();
if (mManager->getOutputForAttr(&attr, output, AUDIO_SESSION_NONE, &stream, attributionSource,
- &config, &flags, selectedDeviceId, portId, {}, &outputType, &isSpatialized,
- &isBitPerfect, &volume) != OK) {
+ &config, &flags, selectedDeviceIds, portId, {}, &outputType, &isSpatialized,
+ &isBitPerfect, &volume, &muted) != OK) {
return false;
}
if (*output == AUDIO_IO_HANDLE_NONE || *portId == AUDIO_PORT_HANDLE_NONE) {
@@ -285,7 +286,7 @@
bool AudioPolicyManagerFuzzer::getInputForAttr(
const audio_attributes_t &attr, audio_unique_id_t riid, audio_port_handle_t *selectedDeviceId,
audio_format_t format, audio_channel_mask_t channelMask, int sampleRate,
- audio_input_flags_t flags, audio_port_handle_t *portId, uint32_t *virtualDeviceId) {
+ audio_input_flags_t flags, audio_port_handle_t *portId, uint32_t*) {
audio_io_handle_t input = AUDIO_IO_HANDLE_NONE;
audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
config.sample_rate = sampleRate;
@@ -294,16 +295,15 @@
audio_port_handle_t localPortId;
if (!portId) portId = &localPortId;
*portId = AUDIO_PORT_HANDLE_NONE;
- AudioPolicyInterface::input_type_t inputType;
AttributionSourceState attributionSource;
attributionSource.uid = 0;
attributionSource.token = sp<BBinder>::make();
- if (mManager->getInputForAttr(&attr, &input, riid, AUDIO_SESSION_NONE, attributionSource,
- &config, flags, selectedDeviceId, &inputType, portId, virtualDeviceId) != OK) {
- return false;
- }
- if (*portId == AUDIO_PORT_HANDLE_NONE || input == AUDIO_IO_HANDLE_NONE) {
+ const auto inputRes = mManager->getInputForAttr(attr, input, *selectedDeviceId, config, flags,
+ riid, AUDIO_SESSION_NONE, attributionSource);
+ if (!inputRes.has_value()) return false;
+
+ if (inputRes->portId == AUDIO_PORT_HANDLE_NONE || inputRes->input == AUDIO_IO_HANDLE_NONE) {
return false;
}
return true;
@@ -725,8 +725,8 @@
std::string tags(mFdp->ConsumeBool() ? "" : "addr=remote_submix_media");
strncpy(attr.tags, tags.c_str(), AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1);
- audio_port_handle_t playbackRoutedPortId = AUDIO_PORT_HANDLE_NONE;
- getOutputForAttr(&playbackRoutedPortId, mAudioConfig.format, mAudioConfig.channel_mask,
+ DeviceIdVector playbackRoutedPortIds;
+ getOutputForAttr(&playbackRoutedPortIds, mAudioConfig.format, mAudioConfig.channel_mask,
mAudioConfig.sample_rate, AUDIO_OUTPUT_FLAG_NONE, nullptr /*output*/,
nullptr /*portId*/, attr);
}
@@ -806,13 +806,13 @@
findDevicePort(AUDIO_PORT_ROLE_SINK, getValueFromVector<audio_devices_t>(mFdp, kAudioDevices),
mMixAddress, &injectionPort);
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_usage_t usage = getValueFromVector<audio_usage_t>(mFdp, kAudioUsages);
audio_attributes_t attr = {AUDIO_CONTENT_TYPE_UNKNOWN, usage, AUDIO_SOURCE_DEFAULT,
AUDIO_FLAG_NONE, ""};
std::string tags = std::string("addr=") + mMixAddress;
strncpy(attr.tags, tags.c_str(), AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1);
- getOutputForAttr(&selectedDeviceId, mAudioConfig.format, mAudioConfig.channel_mask,
+ getOutputForAttr(&selectedDeviceIds, mAudioConfig.format, mAudioConfig.channel_mask,
mAudioConfig.sample_rate /*sampleRate*/, AUDIO_OUTPUT_FLAG_NONE,
nullptr /*output*/, &mPortId, attr);
ret = mManager->startOutput(mPortId);
@@ -902,15 +902,17 @@
audio_is_output_device(type) ? AUDIO_PORT_ROLE_SINK : AUDIO_PORT_ROLE_SOURCE;
findDevicePort(role, type, address, &devicePort);
- audio_port_handle_t routedPortId = devicePort.id;
// Try start input or output according to the device type
if (audio_is_output_devices(type)) {
- getOutputForAttr(&routedPortId, getValueFromVector<audio_format_t>(mFdp, kAudioFormats),
+ DeviceIdVector routedPortIds = { devicePort.id };
+ getOutputForAttr(&routedPortIds,
+ getValueFromVector<audio_format_t>(mFdp, kAudioFormats),
getValueFromVector<audio_channel_mask_t>(mFdp, kAudioChannelOutMasks),
getValueFromVector<uint32_t>(mFdp, kSamplingRates),
AUDIO_OUTPUT_FLAG_NONE);
} else if (audio_is_input_device(type)) {
RecordingActivityTracker tracker;
+ audio_port_handle_t routedPortId = devicePort.id;
getInputForAttr({}, tracker.getRiid(), &routedPortId,
getValueFromVector<audio_format_t>(mFdp, kAudioFormats),
getValueFromVector<audio_channel_mask_t>(mFdp, kAudioChannelInMasks),
@@ -983,10 +985,10 @@
if (ret != NO_ERROR) {
return;
}
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_io_handle_t output;
audio_port_handle_t portId;
- getOutputForAttr(&selectedDeviceId, getValueFromVector<audio_format_t>(mFdp, kAudioFormats),
+ getOutputForAttr(&selectedDeviceIds, getValueFromVector<audio_format_t>(mFdp, kAudioFormats),
getValueFromVector<audio_channel_mask_t>(mFdp, kAudioChannelOutMasks),
getValueFromVector<uint32_t>(mFdp, kSamplingRates), flags, &output, &portId);
sp<SwAudioOutputDescriptor> outDesc = mManager->getOutputs().valueFor(output);
diff --git a/services/audiopolicy/managerdefault/Android.bp b/services/audiopolicy/managerdefault/Android.bp
index e6f6374..94be786 100644
--- a/services/audiopolicy/managerdefault/Android.bp
+++ b/services/audiopolicy/managerdefault/Android.bp
@@ -41,6 +41,7 @@
// a dependency on it in the device makefile. There will be no build time
// conflict with libaudiopolicyenginedefault.
"audioclient-types-aidl-cpp",
+ "audiopolicy-aidl-cpp",
// Flag support
"android.media.audiopolicy-aconfig-cc",
"com.android.media.audioserver-aconfig-cc",
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 89b1fc7..18b5ea9 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -14,7 +14,6 @@
* limitations under the License.
*/
-#include "utils/Errors.h"
#define LOG_TAG "APM_AudioPolicyManager"
// Need to keep the log statements even in production builds
@@ -40,11 +39,13 @@
#include <vector>
#include <Serializer.h>
+#include <android/media/audio/common/AudioMMapPolicy.h>
#include <android/media/audio/common/AudioPort.h>
#include <com_android_media_audio.h>
#include <android_media_audiopolicy.h>
#include <com_android_media_audioserver.h>
#include <cutils/bitops.h>
+#include <error/expected_utils.h>
#include <media/AudioParameter.h>
#include <policy.h>
#include <private/android_filesystem_config.h>
@@ -64,8 +65,14 @@
using android::media::audio::common::AudioDevice;
using android::media::audio::common::AudioDeviceAddress;
+using android::media::audio::common::AudioDeviceDescription;
+using android::media::audio::common::AudioMMapPolicy;
+using android::media::audio::common::AudioMMapPolicyInfo;
+using android::media::audio::common::AudioMMapPolicyType;
using android::media::audio::common::AudioPortDeviceExt;
using android::media::audio::common::AudioPortExt;
+using android::media::audio::common::AudioConfigBase;
+using binder::Status;
using com::android::media::audioserver::fix_call_audio_patch;
using content::AttributionSourceState;
@@ -1257,7 +1264,7 @@
uid_t uid,
audio_config_t *config,
audio_output_flags_t *flags,
- audio_port_handle_t *selectedDeviceId,
+ DeviceIdVector *selectedDeviceIds,
bool *isRequestedDeviceForExclusiveUse,
std::vector<sp<AudioPolicyMix>> *secondaryMixes,
output_type_t *outputType,
@@ -1265,7 +1272,8 @@
bool *isBitPerfect)
{
DeviceVector outputDevices;
- const audio_port_handle_t requestedPortId = *selectedDeviceId;
+ audio_port_handle_t requestedPortId = getFirstDeviceId(*selectedDeviceIds);
+ selectedDeviceIds->clear();
DeviceVector msdDevices = getMsdAudioOutDevices();
const sp<DeviceDescriptor> requestedDevice =
mAvailableOutputDevices.getDeviceFromId(requestedPortId);
@@ -1342,8 +1350,9 @@
if (policyDesc != nullptr) {
policyDesc->mPolicyMix = primaryMix;
*output = policyDesc->mIoHandle;
- *selectedDeviceId = policyMixDevice != nullptr ? policyMixDevice->getId()
- : AUDIO_PORT_HANDLE_NONE;
+ if (policyMixDevice != nullptr) {
+ selectedDeviceIds->push_back(policyMixDevice->getId());
+ }
if ((policyDesc->mFlags & AUDIO_OUTPUT_FLAG_DIRECT) != AUDIO_OUTPUT_FLAG_DIRECT) {
// Remove direct flag as it is not on a direct output.
*flags = (audio_output_flags_t) (*flags & ~AUDIO_OUTPUT_FLAG_DIRECT);
@@ -1420,6 +1429,16 @@
(!info->isBitPerfect() || info->getActiveClientCount() == 0)) {
info = nullptr;
}
+
+ if (info != nullptr && info->isBitPerfect() &&
+ (*flags & (AUDIO_OUTPUT_FLAG_DIRECT | AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD |
+ AUDIO_OUTPUT_FLAG_HW_AV_SYNC | AUDIO_OUTPUT_FLAG_MMAP_NOIRQ)) != 0) {
+ // Reject direct request if a preferred mixer config in use is bit-perfect.
+ ALOGD("%s reject direct request as bit-perfect mixer attributes is active",
+ __func__);
+ return BAD_VALUE;
+ }
+
if (com::android::media::audioserver::
fix_concurrent_playback_behavior_with_bit_perfect_client()) {
if (info != nullptr && info->getUid() == uid &&
@@ -1470,11 +1489,13 @@
return INVALID_OPERATION;
}
- *selectedDeviceId = getFirstDeviceId(outputDevices);
for (auto &outputDevice : outputDevices) {
- if (outputDevice->getId() == mConfig->getDefaultOutputDevice()->getId()) {
- *selectedDeviceId = outputDevice->getId();
- break;
+ if (std::find(selectedDeviceIds->begin(), selectedDeviceIds->end(),
+ outputDevice->getId()) == selectedDeviceIds->end()) {
+ selectedDeviceIds->push_back(outputDevice->getId());
+ if (outputDevice->getId() == mConfig->getDefaultOutputDevice()->getId()) {
+ std::swap(selectedDeviceIds->front(), selectedDeviceIds->back());
+ }
}
}
@@ -1484,7 +1505,8 @@
*outputType = API_OUTPUT_LEGACY;
}
- ALOGV("%s returns output %d selectedDeviceId %d", __func__, *output, *selectedDeviceId);
+ ALOGV("%s returns output %d selectedDeviceIds %s", __func__, *output,
+ toString(*selectedDeviceIds).c_str());
return NO_ERROR;
}
@@ -1496,13 +1518,14 @@
const AttributionSourceState& attributionSource,
audio_config_t *config,
audio_output_flags_t *flags,
- audio_port_handle_t *selectedDeviceId,
+ DeviceIdVector *selectedDeviceIds,
audio_port_handle_t *portId,
std::vector<audio_io_handle_t> *secondaryOutputs,
output_type_t *outputType,
bool *isSpatialized,
bool *isBitPerfect,
- float *volume)
+ float *volume,
+ bool *muted)
{
// The supplied portId must be AUDIO_PORT_HANDLE_NONE
if (*portId != AUDIO_PORT_HANDLE_NONE) {
@@ -1510,20 +1533,22 @@
}
const uid_t uid = VALUE_OR_RETURN_STATUS(
aidl2legacy_int32_t_uid_t(attributionSource.uid));
- const audio_port_handle_t requestedPortId = *selectedDeviceId;
audio_attributes_t resultAttr;
bool isRequestedDeviceForExclusiveUse = false;
std::vector<sp<AudioPolicyMix>> secondaryMixes;
- const sp<DeviceDescriptor> requestedDevice =
- mAvailableOutputDevices.getDeviceFromId(requestedPortId);
+ DeviceIdVector requestedDeviceIds = *selectedDeviceIds;
// Prevent from storing invalid requested device id in clients
- const audio_port_handle_t sanitizedRequestedPortId =
- requestedDevice != nullptr ? requestedPortId : AUDIO_PORT_HANDLE_NONE;
- *selectedDeviceId = sanitizedRequestedPortId;
+ DeviceIdVector sanitizedRequestedPortIds;
+ for (auto deviceId : *selectedDeviceIds) {
+ if (mAvailableOutputDevices.getDeviceFromId(deviceId) != nullptr) {
+ sanitizedRequestedPortIds.push_back(deviceId);
+ }
+ }
+ *selectedDeviceIds = sanitizedRequestedPortIds;
status_t status = getOutputForAttrInt(&resultAttr, output, session, attr, stream, uid,
- config, flags, selectedDeviceId, &isRequestedDeviceForExclusiveUse,
+ config, flags, selectedDeviceIds, &isRequestedDeviceForExclusiveUse,
secondaryOutputs != nullptr ? &secondaryMixes : nullptr, outputType, isSpatialized,
isBitPerfect);
if (status != NO_ERROR) {
@@ -1548,9 +1573,10 @@
*portId = PolicyAudioPort::getNextUniqueId();
sp<SwAudioOutputDescriptor> outputDesc = mOutputs.valueFor(*output);
+ // TODO(b/367816690): Add device id sets to TrackClientDescriptor
sp<TrackClientDescriptor> clientDesc =
new TrackClientDescriptor(*portId, uid, session, resultAttr, clientConfig,
- sanitizedRequestedPortId, *stream,
+ getFirstDeviceId(sanitizedRequestedPortIds), *stream,
mEngine->getProductStrategyForAttributes(resultAttr),
toVolumeSource(resultAttr),
*flags, isRequestedDeviceForExclusiveUse,
@@ -1559,9 +1585,11 @@
outputDesc->addClient(clientDesc);
*volume = Volume::DbToAmpl(outputDesc->getCurVolume(toVolumeSource(resultAttr)));
+ *muted = outputDesc->isMutedByGroup(toVolumeSource(resultAttr));
- ALOGV("%s() returns output %d requestedPortId %d selectedDeviceId %d for port ID %d", __func__,
- *output, requestedPortId, *selectedDeviceId, *portId);
+ ALOGV("%s() returns output %d requestedPortIds %s selectedDeviceIds %s for port ID %d",
+ __func__, *output, toString(requestedDeviceIds).c_str(),
+ toString(*selectedDeviceIds).c_str(), *portId);
return NO_ERROR;
}
@@ -2324,7 +2352,7 @@
sp<SwAudioOutputDescriptor> outputDesc = mOutputs.getOutputForClient(portId);
if (outputDesc == 0) {
ALOGW("startOutput() no output for client %d", portId);
- return BAD_VALUE;
+ return DEAD_OBJECT;
}
sp<TrackClientDescriptor> client = outputDesc->getClient(portId);
@@ -2607,8 +2635,7 @@
auto &curves = getVolumeCurves(client->attributes());
if (NO_ERROR != checkAndSetVolume(curves, client->volumeSource(),
curves.getVolumeIndex(outputDesc->devices().types()),
- outputDesc,
- outputDesc->devices().types(), 0 /*delay*/,
+ outputDesc, outputDesc->devices().types(), 0 /*delay*/,
outputDesc->useHwGain() /*force*/)) {
// request AudioService to reinitialize the volume curves asynchronously
ALOGE("checkAndSetVolume failed, requesting volume range init");
@@ -2702,7 +2729,7 @@
sp<SwAudioOutputDescriptor> outputDesc = mOutputs.getOutputForClient(portId);
if (outputDesc == 0) {
ALOGW("stopOutput() no output for client %d", portId);
- return BAD_VALUE;
+ return DEAD_OBJECT;
}
sp<TrackClientDescriptor> client = outputDesc->getClient(portId);
@@ -2900,63 +2927,67 @@
return false;
}
-status_t AudioPolicyManager::getInputForAttr(const audio_attributes_t *attr,
- audio_io_handle_t *input,
- audio_unique_id_t riid,
- audio_session_t session,
- const AttributionSourceState& attributionSource,
- audio_config_base_t *config,
- audio_input_flags_t flags,
- audio_port_handle_t *selectedDeviceId,
- input_type_t *inputType,
- audio_port_handle_t *portId,
- uint32_t *virtualDeviceId)
+base::expected<media::GetInputForAttrResponse, std::variant<binder::Status, AudioConfigBase>>
+AudioPolicyManager::getInputForAttr(audio_attributes_t attributes,
+ audio_io_handle_t requestedInput,
+ audio_port_handle_t requestedDeviceId,
+ audio_config_base_t config,
+ audio_input_flags_t flags,
+ audio_unique_id_t riid,
+ audio_session_t session,
+ const AttributionSourceState& attributionSource)
{
ALOGV("%s() source %d, sampling rate %d, format %#x, channel mask %#x, session %d, "
"flags %#x attributes=%s requested device ID %d",
- __func__, attr->source, config->sample_rate, config->format, config->channel_mask,
- session, flags, toString(*attr).c_str(), *selectedDeviceId);
+ __func__, attributes.source, config.sample_rate, config.format, config.channel_mask,
+ session, flags, toString(attributes).c_str(), requestedDeviceId);
- status_t status = NO_ERROR;
- audio_attributes_t attributes = *attr;
sp<AudioPolicyMix> policyMix;
sp<DeviceDescriptor> device;
sp<AudioInputDescriptor> inputDesc;
sp<AudioInputDescriptor> previousInputDesc;
sp<RecordClientDescriptor> clientDesc;
- audio_port_handle_t requestedDeviceId = *selectedDeviceId;
- uid_t uid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_uid_t(attributionSource.uid));
+ uid_t uid = static_cast<uid_t>(attributionSource.uid);
bool isSoundTrigger;
+ int vdi = 0 /* default device id */;
+ audio_io_handle_t input = AUDIO_IO_HANDLE_NONE;
- // The supplied portId must be AUDIO_PORT_HANDLE_NONE
- if (*portId != AUDIO_PORT_HANDLE_NONE) {
- return INVALID_OPERATION;
- }
-
- if (attr->source == AUDIO_SOURCE_DEFAULT) {
+ if (attributes.source == AUDIO_SOURCE_DEFAULT) {
attributes.source = AUDIO_SOURCE_MIC;
}
+ using PermissionReqs = AudioPolicyClientInterface::PermissionReqs;
+ using MixType = AudioPolicyClientInterface::MixType;
+ PermissionReqs permReq {
+ .source = legacy2aidl_audio_source_t_AudioSource(attributes.source).value(),
+ .mixType = MixType::NONE, // can be modified
+ .virtualDeviceId = 0, // can be modified
+ .isHotword = (flags & (AUDIO_INPUT_FLAG_HW_HOTWORD | AUDIO_INPUT_FLAG_HOTWORD_TAP |
+ AUDIO_INPUT_FLAG_HW_LOOKBACK)) != 0,
+ .isCallRedir = (attributes.flags & AUDIO_FLAG_CALL_REDIRECTION) != 0,
+ };
+
// Explicit routing?
sp<DeviceDescriptor> explicitRoutingDevice =
- mAvailableInputDevices.getDeviceFromId(*selectedDeviceId);
+ mAvailableInputDevices.getDeviceFromId(requestedDeviceId);
// special case for mmap capture: if an input IO handle is specified, we reuse this input if
// possible
if ((flags & AUDIO_INPUT_FLAG_MMAP_NOIRQ) == AUDIO_INPUT_FLAG_MMAP_NOIRQ &&
- *input != AUDIO_IO_HANDLE_NONE) {
- ssize_t index = mInputs.indexOfKey(*input);
+ requestedInput != AUDIO_IO_HANDLE_NONE) {
+ input = requestedInput;
+ ssize_t index = mInputs.indexOfKey(requestedInput);
if (index < 0) {
- ALOGW("getInputForAttr() unknown MMAP input %d", *input);
- status = BAD_VALUE;
- goto error;
+ return base::unexpected{Status::fromExceptionCode(
+ EX_ILLEGAL_ARGUMENT,
+ String8::format("%s unknown MMAP input %d", __func__, requestedInput))};
}
sp<AudioInputDescriptor> inputDesc = mInputs.valueAt(index);
RecordClientVector clients = inputDesc->getClientsForSession(session);
if (clients.size() == 0) {
- ALOGW("getInputForAttr() unknown session %d on input %d", session, *input);
- status = BAD_VALUE;
- goto error;
+ return base::unexpected{Status::fromExceptionCode(
+ EX_ILLEGAL_ARGUMENT, String8::format("%s unknown session %d on input %d",
+ __func__, session, requestedInput))};
}
// For MMAP mode, the first call to getInputForAttr() is made on behalf of audioflinger.
// The second call is for the first active client and sets the UID. Any further call
@@ -2972,146 +3003,146 @@
continue;
}
if (uid != client->uid() && !client->isSilenced()) {
- ALOGW("getInputForAttr() bad uid %d for client %d uid %d",
- uid, client->portId(), client->uid());
- status = INVALID_OPERATION;
- goto error;
+ return base::unexpected{Status::fromExceptionCode(
+ EX_ILLEGAL_STATE,
+ String8::format("%s bad uid %d for client %d uid %d", __func__, uid,
+ client->portId(), client->uid()))};
}
}
}
- *inputType = API_INPUT_LEGACY;
device = inputDesc->getDevice();
-
- ALOGV("%s reusing MMAP input %d for session %d", __FUNCTION__, *input, session);
- goto exit;
- }
-
- *input = AUDIO_IO_HANDLE_NONE;
- *inputType = API_INPUT_INVALID;
-
- if (attributes.source == AUDIO_SOURCE_REMOTE_SUBMIX &&
- extractAddressFromAudioAttributes(attributes).has_value()) {
- status = mPolicyMixes.getInputMixForAttr(attributes, &policyMix);
- if (status != NO_ERROR) {
- ALOGW("%s could not find input mix for attr %s",
- __func__, toString(attributes).c_str());
- goto error;
- }
- device = mAvailableInputDevices.getDevice(AUDIO_DEVICE_IN_REMOTE_SUBMIX,
- String8(attr->tags + strlen("addr=")),
- AUDIO_FORMAT_DEFAULT);
- if (device == nullptr) {
- ALOGW("%s could not find in Remote Submix device for source %d, tags %s",
- __func__, attributes.source, attributes.tags);
- status = BAD_VALUE;
- goto error;
- }
-
- if (is_mix_loopback_render(policyMix->mRouteFlags)) {
- *inputType = API_INPUT_MIX_PUBLIC_CAPTURE_PLAYBACK;
- } else {
- *inputType = API_INPUT_MIX_EXT_POLICY_REROUTE;
- }
- if (virtualDeviceId) {
- *virtualDeviceId = policyMix->mVirtualDeviceId;
- }
+ ALOGV("%s reusing MMAP input %d for session %d", __FUNCTION__, requestedInput, session);
} else {
- if (explicitRoutingDevice != nullptr) {
- device = explicitRoutingDevice;
+ if (attributes.source == AUDIO_SOURCE_REMOTE_SUBMIX &&
+ extractAddressFromAudioAttributes(attributes).has_value()) {
+ status_t status = mPolicyMixes.getInputMixForAttr(attributes, &policyMix);
+ if (status != NO_ERROR) {
+ ALOGW("%s could not find input mix for attr %s",
+ __func__, toString(attributes).c_str());
+ return base::unexpected {aidl_utils::binderStatusFromStatusT(status)};
+ }
+ device = mAvailableInputDevices.getDevice(AUDIO_DEVICE_IN_REMOTE_SUBMIX,
+ String8(attributes.tags + strlen("addr=")),
+ AUDIO_FORMAT_DEFAULT);
+ if (device == nullptr) {
+ return base::unexpected{Status::fromExceptionCode(
+ EX_ILLEGAL_ARGUMENT,
+ String8::format(
+ "%s could not find in Remote Submix device for source %d, tags %s",
+ __func__, attributes.source, attributes.tags))};
+ }
+
+ if (is_mix_loopback_render(policyMix->mRouteFlags)) {
+ permReq.mixType = MixType::PUBLIC_CAPTURE_PLAYBACK;
+ } else {
+ permReq.mixType = MixType::EXT_POLICY_REROUTE;
+ }
+ // TODO is this correct?
+ permReq.virtualDeviceId = policyMix->mVirtualDeviceId;
} else {
- // Prevent from storing invalid requested device id in clients
- requestedDeviceId = AUDIO_PORT_HANDLE_NONE;
- device = mEngine->getInputDeviceForAttributes(attributes, uid, session, &policyMix);
- ALOGV_IF(device != nullptr, "%s found device type is 0x%X",
- __FUNCTION__, device->type());
- }
- if (device == nullptr) {
- ALOGW("getInputForAttr() could not find device for source %d", attributes.source);
- status = BAD_VALUE;
- goto error;
- }
- if (device->type() == AUDIO_DEVICE_IN_ECHO_REFERENCE) {
- *inputType = API_INPUT_MIX_CAPTURE;
- } else if (policyMix) {
- ALOG_ASSERT(policyMix->mMixType == MIX_TYPE_RECORDERS, "Invalid Mix Type");
- // there is an external policy, but this input is attached to a mix of recorders,
- // meaning it receives audio injected into the framework, so the recorder doesn't
- // know about it and is therefore considered "legacy"
- *inputType = API_INPUT_LEGACY;
-
- if (virtualDeviceId) {
- *virtualDeviceId = policyMix->mVirtualDeviceId;
+ if (explicitRoutingDevice != nullptr) {
+ device = explicitRoutingDevice;
+ } else {
+ // Prevent from storing invalid requested device id in clients
+ requestedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ device = mEngine->getInputDeviceForAttributes(attributes, uid, session, &policyMix);
+ ALOGV_IF(device != nullptr, "%s found device type is 0x%X",
+ __FUNCTION__, device->type());
}
- } else if (audio_is_remote_submix_device(device->type())) {
- *inputType = API_INPUT_MIX_CAPTURE;
- } else if (device->type() == AUDIO_DEVICE_IN_TELEPHONY_RX) {
- *inputType = API_INPUT_TELEPHONY_RX;
- } else {
- *inputType = API_INPUT_LEGACY;
+ if (device == nullptr) {
+ return base::unexpected{Status::fromExceptionCode(
+ EX_ILLEGAL_ARGUMENT,
+ String8::format("%s could not find device for source %d", __func__,
+ attributes.source))};
+ }
+ if (device->type() == AUDIO_DEVICE_IN_ECHO_REFERENCE) {
+ permReq.mixType = MixType::CAPTURE;
+ } else if (policyMix) {
+ ALOG_ASSERT(policyMix->mMixType == MIX_TYPE_RECORDERS, "Invalid Mix Type");
+ // there is an external policy, but this input is attached to a mix of recorders,
+ // meaning it receives audio injected into the framework, so the recorder doesn't
+ // know about it and is therefore considered "legacy"
+ permReq.mixType = MixType::NONE;
+ permReq.virtualDeviceId = policyMix->mVirtualDeviceId;
+ } else if (audio_is_remote_submix_device(device->type())) {
+ permReq.mixType = MixType::CAPTURE;
+ } else if (device->type() == AUDIO_DEVICE_IN_TELEPHONY_RX) {
+ permReq.mixType = MixType::TELEPHONY_RX_CAPTURE;
+ } else {
+ permReq.mixType = MixType::NONE;
+ }
}
+ auto permRes = mpClientInterface->checkPermissionForInput(attributionSource, permReq);
+ if (!permRes.has_value()) return base::unexpected {permRes.error()};
+ if (!permRes.value()) {
+ return base::unexpected{Status::fromExceptionCode(
+ EX_SECURITY, String8::format("%s: %s missing perms for source %d mix %d vdi %d"
+ "hotword? %d callredir? %d", __func__, attributionSource.toString().c_str(),
+ static_cast<int>(permReq.source),
+ static_cast<int>(permReq.mixType),
+ permReq.virtualDeviceId,
+ permReq.isHotword,
+ permReq.isCallRedir))};
+ }
+
+ input = getInputForDevice(device, session, attributes, config, flags, policyMix);
+ if (input == AUDIO_IO_HANDLE_NONE) {
+ AudioProfileVector profiles;
+ status_t ret = getProfilesForDevices(
+ DeviceVector(device), profiles, flags, true /*isInput*/);
+ if (ret == NO_ERROR && !profiles.empty()) {
+ const auto channels = profiles[0]->getChannels();
+ if (!channels.empty() && (channels.find(config.channel_mask) == channels.end())) {
+ config.channel_mask = *channels.begin();
+ }
+ const auto sampleRates = profiles[0]->getSampleRates();
+ if (!sampleRates.empty() &&
+ (sampleRates.find(config.sample_rate) == sampleRates.end())) {
+ config.sample_rate = *sampleRates.begin();
+ }
+ config.format = profiles[0]->getFormat();
+ }
+ const auto suggestedConfig = VALUE_OR_FATAL(
+ legacy2aidl_audio_config_base_t_AudioConfigBase(config, true /*isInput*/));
+ return base::unexpected {suggestedConfig};
+ }
}
- *input = getInputForDevice(device, session, attributes, config, flags, policyMix);
- if (*input == AUDIO_IO_HANDLE_NONE) {
- status = INVALID_OPERATION;
- AudioProfileVector profiles;
- status_t ret = getProfilesForDevices(
- DeviceVector(device), profiles, flags, true /*isInput*/);
- if (ret == NO_ERROR && !profiles.empty()) {
- const auto channels = profiles[0]->getChannels();
- if (!channels.empty() && (channels.find(config->channel_mask) == channels.end())) {
- config->channel_mask = *channels.begin();
- }
- const auto sampleRates = profiles[0]->getSampleRates();
- if (!sampleRates.empty() &&
- (sampleRates.find(config->sample_rate) == sampleRates.end())) {
- config->sample_rate = *sampleRates.begin();
- }
- config->format = profiles[0]->getFormat();
- }
- goto error;
- }
-
-
- if (policyMix != nullptr && virtualDeviceId != nullptr) {
- *virtualDeviceId = policyMix->mVirtualDeviceId;
- }
-
-exit:
-
- *selectedDeviceId = mAvailableInputDevices.contains(device) ?
+ auto selectedDeviceId = mAvailableInputDevices.contains(device) ?
device->getId() : AUDIO_PORT_HANDLE_NONE;
isSoundTrigger = attributes.source == AUDIO_SOURCE_HOTWORD &&
mSoundTriggerSessions.indexOfKey(session) >= 0;
- *portId = PolicyAudioPort::getNextUniqueId();
- clientDesc = new RecordClientDescriptor(*portId, riid, uid, session, attributes, *config,
+ const auto allocatedPortId = PolicyAudioPort::getNextUniqueId();
+
+ clientDesc = new RecordClientDescriptor(allocatedPortId, riid, uid, session, attributes, config,
requestedDeviceId, attributes.source, flags,
isSoundTrigger);
- inputDesc = mInputs.valueFor(*input);
+ inputDesc = mInputs.valueFor(input);
// Move (if found) effect for the client session to its input
- mEffects.moveEffectsForIo(session, *input, &mInputs, mpClientInterface);
+ mEffects.moveEffectsForIo(session, input, &mInputs, mpClientInterface);
inputDesc->addClient(clientDesc);
- ALOGV("getInputForAttr() returns input %d type %d selectedDeviceId %d for port ID %d",
- *input, *inputType, *selectedDeviceId, *portId);
+ ALOGV("getInputForAttr() returns input %d selectedDeviceId %d vdi %d for port ID %d",
+ input, selectedDeviceId, permReq.virtualDeviceId, allocatedPortId);
- return NO_ERROR;
-
-error:
- return status;
+ auto ret = media::GetInputForAttrResponse {};
+ ret.input = input;
+ ret.selectedDeviceId = selectedDeviceId;
+ ret.portId = allocatedPortId;
+ ret.virtualDeviceId = permReq.virtualDeviceId;
+ ret.config = legacy2aidl_audio_config_base_t_AudioConfigBase(config, true /*isInput*/).value();
+ return ret;
}
-
-audio_io_handle_t AudioPolicyManager::getInputForDevice(const sp<DeviceDescriptor> &device,
+audio_io_handle_t AudioPolicyManager::getInputForDevice(const sp<DeviceDescriptor>& device,
audio_session_t session,
- const audio_attributes_t &attributes,
- audio_config_base_t *config,
+ const audio_attributes_t& attributes,
+ const audio_config_base_t& config,
audio_input_flags_t flags,
- const sp<AudioPolicyMix> &policyMix)
-{
+ const sp<AudioPolicyMix>& policyMix) {
audio_io_handle_t input = AUDIO_IO_HANDLE_NONE;
audio_source_t halInputSource = attributes.source;
bool isSoundTrigger = false;
@@ -3127,7 +3158,7 @@
halInputSource = AUDIO_SOURCE_VOICE_RECOGNITION;
}
} else if (attributes.source == AUDIO_SOURCE_VOICE_COMMUNICATION &&
- audio_is_linear_pcm(config->format)) {
+ audio_is_linear_pcm(config.format)) {
flags = (audio_input_flags_t)(flags | AUDIO_INPUT_FLAG_VOIP_TX);
}
@@ -3136,10 +3167,10 @@
}
// sampling rate and flags may be updated by getInputProfile
- uint32_t profileSamplingRate = (config->sample_rate == 0) ?
- SAMPLE_RATE_HZ_DEFAULT : config->sample_rate;
- audio_format_t profileFormat = config->format;
- audio_channel_mask_t profileChannelMask = config->channel_mask;
+ uint32_t profileSamplingRate = (config.sample_rate == 0) ?
+ SAMPLE_RATE_HZ_DEFAULT : config.sample_rate;
+ audio_format_t profileFormat = config.format;
+ audio_channel_mask_t profileChannelMask = config.channel_mask;
audio_input_flags_t profileFlags = flags;
// find a compatible input profile (not necessarily identical in parameters)
sp<IOProfile> profile = getInputProfile(
@@ -3149,7 +3180,7 @@
}
// Pick input sampling rate if not specified by client
- uint32_t samplingRate = config->sample_rate;
+ uint32_t samplingRate = config.sample_rate;
if (samplingRate == 0) {
samplingRate = profileSamplingRate;
}
@@ -3405,7 +3436,7 @@
sp<AudioInputDescriptor> inputDesc = mInputs.getInputForClient(portId);
if (inputDesc == 0) {
ALOGW("%s no input for client %d", __FUNCTION__, portId);
- return BAD_VALUE;
+ return DEAD_OBJECT;
}
audio_io_handle_t input = inputDesc->mIoHandle;
sp<RecordClientDescriptor> client = inputDesc->getClient(portId);
@@ -3545,19 +3576,46 @@
bool enabled,
audio_stream_type_t streamToDriveAbs)
{
- if (!enabled) {
- mAbsoluteVolumeDrivingStreams.erase(deviceType);
- return NO_ERROR;
- }
+ ALOGI("%s: deviceType 0x%X, enabled %d, streamToDriveAbs %d", __func__, deviceType, enabled,
+ streamToDriveAbs);
+ bool changed = false;
audio_attributes_t attributesToDriveAbs = mEngine->getAttributesForStreamType(streamToDriveAbs);
- if (attributesToDriveAbs == AUDIO_ATTRIBUTES_INITIALIZER) {
- ALOGW("%s: no attributes for stream %s, bailing out", __func__,
- toString(streamToDriveAbs).c_str());
- return BAD_VALUE;
+ if (enabled) {
+ if (attributesToDriveAbs == AUDIO_ATTRIBUTES_INITIALIZER) {
+ ALOGW("%s: no attributes for stream %s, bailing out", __func__,
+ toString(streamToDriveAbs).c_str());
+ return BAD_VALUE;
+ }
+
+ const auto attrIt = mAbsoluteVolumeDrivingStreams.find(deviceType);
+ if (attrIt == mAbsoluteVolumeDrivingStreams.end() ||
+ (attrIt->second.usage != attributesToDriveAbs.usage ||
+ attrIt->second.content_type != attributesToDriveAbs.content_type ||
+ attrIt->second.flags != attributesToDriveAbs.flags)) {
+ mAbsoluteVolumeDrivingStreams[deviceType] = attributesToDriveAbs;
+ changed = true;
+ }
+ } else {
+ if (mAbsoluteVolumeDrivingStreams.erase(deviceType) != 0) {
+ changed = true;
+ }
}
- mAbsoluteVolumeDrivingStreams[deviceType] = attributesToDriveAbs;
+ const DeviceVector devices = mEngine->getOutputDevicesForAttributes(
+ attributesToDriveAbs, nullptr /* preferredDevice */, true /* fromCache */);
+ changed &= devices.types().contains(deviceType);
+ // if something changed on the output device for the changed attributes, apply the stream
+ // volumes regarding the new absolute mode to all the outputs without any delay
+ if (changed) {
+ for (size_t i = 0; i < mOutputs.size(); i++) {
+ sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
+ ALOGI("%s: apply stream volumes for portId %d and device type %d", __func__,
+ desc->getId(), deviceType);
+ applyStreamVolumes(desc, {deviceType});
+ }
+ }
+
return NO_ERROR;
}
@@ -3581,6 +3639,7 @@
status_t AudioPolicyManager::setStreamVolumeIndex(audio_stream_type_t stream,
int index,
+ bool muted,
audio_devices_t device)
{
auto attributes = mEngine->getAttributesForStreamType(stream);
@@ -3590,7 +3649,7 @@
}
ALOGV("%s: stream %s attributes=%s, index %d , device 0x%X", __func__,
toString(stream).c_str(), toString(attributes).c_str(), index, device);
- return setVolumeIndexForAttributes(attributes, index, device);
+ return setVolumeIndexForAttributes(attributes, index, muted, device);
}
status_t AudioPolicyManager::getStreamVolumeIndex(audio_stream_type_t stream,
@@ -3609,6 +3668,7 @@
status_t AudioPolicyManager::setVolumeIndexForAttributes(const audio_attributes_t &attributes,
int index,
+ bool muted,
audio_devices_t device)
{
// Get Volume group matching the Audio Attributes
@@ -3633,7 +3693,8 @@
toVolumeSource(AUDIO_STREAM_VOICE_CALL, false) : vs;
product_strategy_t strategy = mEngine->getProductStrategyForAttributes(attributes);
- status = setVolumeCurveIndex(index, device, curves);
+
+ status = setVolumeCurveIndex(index, muted, device, curves);
if (status != NO_ERROR) {
ALOGE("%s failed to set curve index for group %d device 0x%X", __func__, group, device);
return status;
@@ -3695,8 +3756,9 @@
// HW Gain management, do not change the volume
if (desc->useHwGain()) {
applyVolume = false;
+ bool swMute = com_android_media_audio_ring_my_car() ? curves.isMuted() : (index == 0);
// If the volume source is active with higher priority source, ensure at least Sw Muted
- desc->setSwMute((index == 0), vs, curves.getStreamTypes(), curDevices, 0 /*delayMs*/);
+ desc->setSwMute(swMute, vs, curves.getStreamTypes(), curDevices, 0 /*delayMs*/);
for (const auto &productStrategy : mEngine->getOrderedProductStrategies()) {
auto activeClients = desc->clientsList(true /*activeOnly*/, productStrategy,
false /*preferredDevice*/);
@@ -3734,8 +3796,7 @@
//FIXME: workaround for truncated touch sounds
// delayed volume change for system stream to be removed when the problem is
// handled by system UI
- status_t volStatus = checkAndSetVolume(
- curves, vs, index, desc, curDevices,
+ status_t volStatus = checkAndSetVolume(curves, vs, index, desc, curDevices,
((vs == toVolumeSource(AUDIO_STREAM_SYSTEM, false))?
TOUCH_SOUND_FIXED_DELAY_MS : 0));
if (volStatus != NO_ERROR) {
@@ -3753,7 +3814,7 @@
if (isVolumeConsistentForCalls(vs, {mCallRxSourceClient->sinkDevice()->type()},
isVoiceVolSrc, isBtScoVolSrc, __func__)
&& (isVoiceVolSrc || isBtScoVolSrc)) {
- bool voiceVolumeManagedByHost = isVoiceVolSrc &&
+ bool voiceVolumeManagedByHost = !isBtScoVolSrc &&
!audio_is_ble_out_device(mCallRxSourceClient->sinkDevice()->type());
setVoiceVolume(index, curves, voiceVolumeManagedByHost, 0);
}
@@ -3764,6 +3825,7 @@
}
status_t AudioPolicyManager::setVolumeCurveIndex(int index,
+ bool muted,
audio_devices_t device,
IVolumeCurves &volumeCurves)
{
@@ -3783,8 +3845,9 @@
// Force max volume if stream cannot be muted
if (!volumeCurves.canBeMuted()) index = volumeCurves.getVolumeIndexMax();
- ALOGV("%s device %08x, index %d", __FUNCTION__ , device, index);
+ ALOGV("%s device %08x, index %d, muted %d", __FUNCTION__ , device, index, muted);
volumeCurves.addCurrentVolumeIndex(device, index);
+ volumeCurves.setIsMuted(muted);
return NO_ERROR;
}
@@ -4533,6 +4596,9 @@
"Engine could not set preferred devices %s for audio source %d role %d",
dumpAudioDeviceTypeAddrVector(devices).c_str(), audioSource, role);
+ if (status == NO_ERROR) {
+ updateInputRouting();
+ }
return status;
}
@@ -4699,6 +4765,18 @@
dumpDeviceTypes({it.first}).c_str(),
mEngine->getVolumeGroupForAttributes(it.second));
}
+
+ // dump mmap policy by device
+ dst->appendFormat("\nMmap policy:\n");
+ for (const auto& [policyType, policyByDevice] : mMmapPolicyByDeviceType) {
+ std::stringstream ss;
+ ss << '{';
+ for (const auto& [deviceType, policy] : policyByDevice) {
+ ss << deviceType.toString() << ":" << toString(policy) << " ";
+ }
+ ss << '}';
+ dst->appendFormat(" - %s: %s\n", toString(policyType).c_str(), ss.str().c_str());
+ }
}
status_t AudioPolicyManager::dump(int fd)
@@ -4864,6 +4942,17 @@
flags = (audio_output_flags_t)((flags & relevantFlags) | AUDIO_OUTPUT_FLAG_DIRECT);
DeviceVector engineOutputDevices = mEngine->getOutputDevicesForAttributes(*attr);
+ if (std::any_of(engineOutputDevices.begin(), engineOutputDevices.end(),
+ [this, attr](sp<DeviceDescriptor> device) {
+ return getPreferredMixerAttributesInfo(
+ device->getId(),
+ mEngine->getProductStrategyForAttributes(*attr),
+ true /*activeBitPerfectPreferred*/) != nullptr;
+ })) {
+ // Bit-perfect playback is active on one of the selected devices, direct output will
+ // be rejected at this instant.
+ return AUDIO_DIRECT_NOT_SUPPORTED;
+ }
for (const auto& hwModule : mHwModules) {
DeviceVector outputDevices = engineOutputDevices;
// the MSD module checks for different conditions and output devices
@@ -4992,8 +5081,7 @@
nullptr /*updatedFormat*/,
mixerAttributes->config.channel_mask,
nullptr /*updatedChannelMask*/,
- flags,
- false /*exactMatchRequiredForInputFlags*/)
+ flags)
!= IOProfile::NO_MATCH) {
profile = curProfile;
break;
@@ -5547,14 +5635,14 @@
: audio_channel_mask_in_to_out(sourceMask);
config.format = sourceDesc->config().format;
audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE;
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
bool isRequestedDeviceForExclusiveUse = false;
output_type_t outputType;
bool isSpatialized;
bool isBitPerfect;
getOutputForAttrInt(&resultAttr, &output, AUDIO_SESSION_NONE, &attributes,
&stream, sourceDesc->uid(), &config, &flags,
- &selectedDeviceId, &isRequestedDeviceForExclusiveUse,
+ &selectedDeviceIds, &isRequestedDeviceForExclusiveUse,
nullptr, &outputType, &isSpatialized, &isBitPerfect);
if (output == AUDIO_IO_HANDLE_NONE) {
ALOGV("%s no output for device %s",
@@ -7764,7 +7852,8 @@
}
// Honor explicit routing requests only if no client using default routing is active on this
- // input: a specific app can not force routing for other apps by setting a preferred device.
+ // input or if all active clients are from the same app: a specific app can not force routing
+ // for other apps by setting a preferred device.
bool active;
device = findPreferredDevice(inputDesc, AUDIO_SOURCE_DEFAULT, active, mAvailableInputDevices);
if (device != nullptr) {
@@ -7879,7 +7968,8 @@
}
for (size_t i = 0; i < mOutputs.size(); i++) {
sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
- setVolumeSourceMute(ttsVolumeSource, mute/*on*/, desc, 0 /*delay*/, DeviceTypeSet());
+ setVolumeSourceMutedInternally(ttsVolumeSource, mute/*on*/, desc, 0 /*delay*/,
+ DeviceTypeSet());
const uint32_t latency = desc->latency() * 2;
if (desc->isActive(latency * 2) && latency > maxLatency) {
maxLatency = latency;
@@ -7973,9 +8063,10 @@
for (const auto &activeVs : outputDesc->getActiveVolumeSources()) {
// make sure that we do not start the temporary mute period too early in case of
// delayed device change
- setVolumeSourceMute(activeVs, true, outputDesc, delayMs);
- setVolumeSourceMute(activeVs, false, outputDesc, delayMs + tempMuteDurationMs,
- devices.types());
+ setVolumeSourceMutedInternally(activeVs, true, outputDesc, delayMs);
+ setVolumeSourceMutedInternally(activeVs, false, outputDesc,
+ delayMs + tempMuteDurationMs,
+ devices.types());
}
}
@@ -8201,7 +8292,10 @@
const underlying_input_flag_t oriFlags = flags;
for (;;) {
- sp<IOProfile> firstInexact = nullptr;
+ sp<IOProfile> inexact = nullptr;
+ uint32_t inexactSamplingRate = 0;
+ audio_format_t inexactFormat = AUDIO_FORMAT_INVALID;
+ audio_channel_mask_t inexactChannelMask = AUDIO_CHANNEL_INVALID;
uint32_t updatedSamplingRate = 0;
audio_format_t updatedFormat = AUDIO_FORMAT_INVALID;
audio_channel_mask_t updatedChannelMask = AUDIO_CHANNEL_INVALID;
@@ -8209,7 +8303,7 @@
for (const auto& profile : hwModule->getInputProfiles()) {
// profile->log();
//updatedFormat = format;
- if (profile->getCompatibilityScore(
+ auto compatibleScore = profile->getCompatibilityScore(
DeviceVector(device),
samplingRate,
&updatedSamplingRate,
@@ -8218,36 +8312,28 @@
channelMask,
&updatedChannelMask,
// FIXME ugly cast
- (audio_output_flags_t) flags,
- true /*exactMatchRequiredForInputFlags*/) == IOProfile::EXACT_MATCH) {
+ (audio_output_flags_t) flags);
+ if (compatibleScore == IOProfile::EXACT_MATCH) {
samplingRate = updatedSamplingRate;
format = updatedFormat;
channelMask = updatedChannelMask;
return profile;
- }
- if (firstInexact == nullptr
- && profile->getCompatibilityScore(
- DeviceVector(device),
- samplingRate,
- &updatedSamplingRate,
- format,
- &updatedFormat,
- channelMask,
- &updatedChannelMask,
- // FIXME ugly cast
- (audio_output_flags_t) flags,
- false /*exactMatchRequiredForInputFlags*/)
- != IOProfile::NO_MATCH) {
- firstInexact = profile;
+ } else if ((flags != AUDIO_INPUT_FLAG_NONE
+ && compatibleScore == IOProfile::PARTIAL_MATCH_WITH_FLAG)
+ || (inexact == nullptr && compatibleScore != IOProfile::NO_MATCH)) {
+ inexact = profile;
+ inexactSamplingRate = updatedSamplingRate;
+ inexactFormat = updatedFormat;
+ inexactChannelMask = updatedChannelMask;
}
}
}
- if (firstInexact != nullptr) {
- samplingRate = updatedSamplingRate;
- format = updatedFormat;
- channelMask = updatedChannelMask;
- return firstInexact;
+ if (inexact != nullptr) {
+ samplingRate = inexactSamplingRate;
+ format = inexactFormat;
+ channelMask = inexactChannelMask;
+ return inexact;
} else if (flags & AUDIO_INPUT_FLAG_RAW) {
flags = (audio_input_flags_t) (flags & ~AUDIO_INPUT_FLAG_RAW); // retry
} else if ((flags & mustMatchFlag) == AUDIO_INPUT_FLAG_NONE &&
@@ -8274,9 +8360,9 @@
float volumeDb = curves.volIndexToDb(deviceCategory, index);
if (com_android_media_audio_abs_volume_index_fix()) {
- if (mAbsoluteVolumeDrivingStreams.find(volumeDevice) !=
- mAbsoluteVolumeDrivingStreams.end()) {
- audio_attributes_t attributesToDriveAbs = mAbsoluteVolumeDrivingStreams[volumeDevice];
+ const auto it = mAbsoluteVolumeDrivingStreams.find(volumeDevice);
+ if (it != mAbsoluteVolumeDrivingStreams.end()) {
+ audio_attributes_t attributesToDriveAbs = it->second;
auto groupToDriveAbs = mEngine->getVolumeGroupForAttributes(attributesToDriveAbs);
if (groupToDriveAbs == VOLUME_GROUP_NONE) {
ALOGD("%s: no group matching with %s", __FUNCTION__,
@@ -8288,7 +8374,9 @@
VolumeSource vsToDriveAbs = toVolumeSource(groupToDriveAbs);
if (vsToDriveAbs == volumeSource) {
// attenuation is applied by the abs volume controller
- return (index != 0) ? volumeDbMax : volumeDb;
+ // do not mute LE broadcast to allow the secondary device to continue playing
+ return (index != 0 || volumeDevice == AUDIO_DEVICE_OUT_BLE_BROADCAST) ? volumeDbMax
+ : volumeDb;
} else {
IVolumeCurves &curvesAbs = getVolumeCurves(vsToDriveAbs);
int indexAbs = curvesAbs.getVolumeIndex({volumeDevice});
@@ -8482,7 +8570,7 @@
static std::set<IVolumeCurves*> invalidCurvesReported;
// do not change actual attributes volume if the attributes is muted
- if (outputDesc->isMuted(volumeSource)) {
+ if (!com_android_media_audio_ring_my_car() && outputDesc->isMutedInternally(volumeSource)) {
ALOGVV("%s: volume source %d muted count %d active=%d", __func__, volumeSource,
outputDesc->getMuteCount(volumeSource), outputDesc->isActive(volumeSource));
return NO_ERROR;
@@ -8515,19 +8603,27 @@
}
float volumeDb = computeVolume(curves, volumeSource, index, deviceTypes);
+ const VolumeSource dtmfVolSrc = toVolumeSource(AUDIO_STREAM_DTMF, false);
if (outputDesc->isFixedVolume(deviceTypes) ||
// Force VoIP volume to max for bluetooth SCO/BLE device except if muted
- (index != 0 && (isVoiceVolSrc || isBtScoVolSrc) &&
+ (index != 0 && (isVoiceVolSrc || isBtScoVolSrc
+ || (isInCall() && (dtmfVolSrc == volumeSource))) &&
(isSingleDeviceType(deviceTypes, audio_is_bluetooth_out_sco_device)
|| isSingleDeviceType(deviceTypes, audio_is_ble_out_device)))) {
volumeDb = 0.0f;
}
- const bool muted = (index == 0) && (volumeDb != 0.0f);
+
+ bool muted;
+ if (!com_android_media_audio_ring_my_car()) {
+ muted = (index == 0) && (volumeDb != 0.0f);
+ } else {
+ muted = curves.isMuted();
+ }
outputDesc->setVolume(volumeDb, muted, volumeSource, curves.getStreamTypes(),
deviceTypes, delayMs, force, isVoiceVolSrc);
if (outputDesc == mPrimaryOutput && (isVoiceVolSrc || isBtScoVolSrc)) {
- bool voiceVolumeManagedByHost = isVoiceVolSrc &&
+ bool voiceVolumeManagedByHost = !isBtScoVolSrc &&
!isSingleDeviceType(deviceTypes, audio_is_ble_out_device);
setVoiceVolume(index, curves, voiceVolumeManagedByHost, delayMs);
}
@@ -8537,6 +8633,11 @@
void AudioPolicyManager::setVoiceVolume(
int index, IVolumeCurves &curves, bool voiceVolumeManagedByHost, int delayMs) {
float voiceVolume;
+
+ if (com_android_media_audio_ring_my_car() && curves.isMuted()) {
+ index = 0;
+ }
+
// Force voice volume to max or mute for Bluetooth SCO/BLE as other attenuations are managed
// by the headset
if (voiceVolumeManagedByHost) {
@@ -8562,7 +8663,6 @@
const bool isHAUsed = isHearingAidUsedForComm();
if (com_android_media_audio_replace_stream_bt_sco()) {
- ALOGV("%s stream bt sco is replaced, no volume consistency check for calls", __func__);
isBtScoVolSrc = (volumeSource != VOLUME_SOURCE_NONE) && (callVolSrc == volumeSource) &&
(isScoRequested || isHAUsed);
return true;
@@ -8590,8 +8690,7 @@
ALOGVV("applyStreamVolumes() for device %s", dumpDeviceTypes(deviceTypes).c_str());
for (const auto &volumeGroup : mEngine->getVolumeGroups()) {
auto &curves = getVolumeCurves(toVolumeSource(volumeGroup));
- checkAndSetVolume(curves, toVolumeSource(volumeGroup),
- curves.getVolumeIndex(deviceTypes),
+ checkAndSetVolume(curves, toVolumeSource(volumeGroup), curves.getVolumeIndex(deviceTypes),
outputDesc, deviceTypes, delayMs, force);
}
}
@@ -8614,23 +8713,23 @@
}
}
for (auto source : sourcesToMute) {
- setVolumeSourceMute(source, on, outputDesc, delayMs, deviceTypes);
+ setVolumeSourceMutedInternally(source, on, outputDesc, delayMs, deviceTypes);
}
}
-void AudioPolicyManager::setVolumeSourceMute(VolumeSource volumeSource,
- bool on,
- const sp<AudioOutputDescriptor>& outputDesc,
- int delayMs,
- DeviceTypeSet deviceTypes)
+void AudioPolicyManager::setVolumeSourceMutedInternally(VolumeSource volumeSource,
+ bool on,
+ const sp<AudioOutputDescriptor>& outputDesc,
+ int delayMs,
+ DeviceTypeSet deviceTypes)
{
if (deviceTypes.empty()) {
deviceTypes = outputDesc->devices().types();
}
auto &curves = getVolumeCurves(volumeSource);
if (on) {
- if (!outputDesc->isMuted(volumeSource)) {
+ if (!outputDesc->isMutedInternally(volumeSource)) {
if (curves.canBeMuted() &&
(volumeSource != toVolumeSource(AUDIO_STREAM_ENFORCED_AUDIBLE, false) ||
(mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_SYSTEM) ==
@@ -8642,7 +8741,7 @@
// ignored
outputDesc->incMuteCount(volumeSource);
} else {
- if (!outputDesc->isMuted(volumeSource)) {
+ if (!outputDesc->isMutedInternally(volumeSource)) {
ALOGV("%s unmuting non muted attributes!", __func__);
return;
}
@@ -8658,8 +8757,12 @@
bool AudioPolicyManager::isValidAttributes(const audio_attributes_t *paa)
{
+ if ((paa->flags & AUDIO_FLAG_SCO) != 0) {
+ ALOGW("%s: deprecated use of AUDIO_FLAG_SCO in attributes flags %d", __func__, paa->flags);
+ }
+
// has flags that map to a stream type?
- if ((paa->flags & (AUDIO_FLAG_AUDIBILITY_ENFORCED | AUDIO_FLAG_SCO | AUDIO_FLAG_BEACON)) != 0) {
+ if ((paa->flags & (AUDIO_FLAG_AUDIBILITY_ENFORCED | AUDIO_FLAG_BEACON)) != 0) {
return true;
}
@@ -8687,6 +8790,7 @@
case AUDIO_USAGE_SAFETY:
case AUDIO_USAGE_VEHICLE_STATUS:
case AUDIO_USAGE_ANNOUNCEMENT:
+ case AUDIO_USAGE_SPEAKER_CLEANUP:
break;
default:
return false;
@@ -9208,8 +9312,7 @@
: hwModule->getOutputProfiles();
for (const auto& profile : ioProfiles) {
if (!profile->areAllDevicesSupported(devices) ||
- !profile->isCompatibleProfileForFlags(
- flags, false /*exactMatchRequiredForInputFlags*/)) {
+ !profile->isCompatibleProfileForFlags(flags)) {
continue;
}
audioProfiles.addAllValidProfiles(profile->asAudioPort()->getAudioProfiles());
@@ -9335,4 +9438,88 @@
}
}
+status_t AudioPolicyManager::getMmapPolicyInfos(AudioMMapPolicyType policyType,
+ std::vector<AudioMMapPolicyInfo> *policyInfos) {
+ if (policyType != AudioMMapPolicyType::DEFAULT &&
+ policyType != AudioMMapPolicyType::EXCLUSIVE) {
+ return BAD_VALUE;
+ }
+ if (mMmapPolicyByDeviceType.count(policyType) == 0) {
+ if (status_t status = updateMmapPolicyInfos(policyType); status != NO_ERROR) {
+ return status;
+ }
+ }
+ *policyInfos = mMmapPolicyInfos[policyType];
+ return NO_ERROR;
+}
+
+status_t AudioPolicyManager::getMmapPolicyForDevice(
+ AudioMMapPolicyType policyType, AudioMMapPolicyInfo *policyInfo) {
+ if (policyType != AudioMMapPolicyType::DEFAULT &&
+ policyType != AudioMMapPolicyType::EXCLUSIVE) {
+ return BAD_VALUE;
+ }
+ if (mMmapPolicyByDeviceType.count(policyType) == 0) {
+ if (status_t status = updateMmapPolicyInfos(policyType); status != NO_ERROR) {
+ return status;
+ }
+ }
+ auto it = mMmapPolicyByDeviceType[policyType].find(policyInfo->device.type);
+ policyInfo->mmapPolicy = it == mMmapPolicyByDeviceType[policyType].end()
+ ? AudioMMapPolicy::NEVER : it->second;
+ return NO_ERROR;
+}
+
+status_t AudioPolicyManager::updateMmapPolicyInfos(AudioMMapPolicyType policyType) {
+ std::vector<AudioMMapPolicyInfo> policyInfos;
+ if (status_t status = mpClientInterface->getMmapPolicyInfos(policyType, &policyInfos);
+ status != NO_ERROR) {
+ ALOGE("%s, failed, error = %d", __func__, status);
+ return status;
+ }
+ std::map<AudioDeviceDescription, AudioMMapPolicy> mmapPolicyByDeviceType;
+ if (policyInfos.size() == 1 && policyInfos[0].device == AudioDevice()) {
+ // When there is only one AudioMMapPolicyInfo instance and the device is a default value,
+ // it indicates the mmap policy is reported via system property. In that case, use the
+ // routing information to fill details for how mmap is supported for a particular device.
+ for (const auto &hwModule: mHwModules) {
+ for (const auto &profile: hwModule->getInputProfiles()) {
+ if ((profile->getFlags() & AUDIO_INPUT_FLAG_MMAP_NOIRQ)
+ != AUDIO_INPUT_FLAG_MMAP_NOIRQ) {
+ continue;
+ }
+ for (const auto &device: profile->getSupportedDevices()) {
+ auto deviceDesc =
+ legacy2aidl_audio_devices_t_AudioDeviceDescription(device->type());
+ if (deviceDesc.ok()) {
+ mmapPolicyByDeviceType.emplace(
+ deviceDesc.value(), policyInfos[0].mmapPolicy);
+ }
+ }
+ }
+ for (const auto &profile: hwModule->getOutputProfiles()) {
+ if ((profile->getFlags() & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ)
+ != AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) {
+ continue;
+ }
+ for (const auto &device: profile->getSupportedDevices()) {
+ auto deviceDesc =
+ legacy2aidl_audio_devices_t_AudioDeviceDescription(device->type());
+ if (deviceDesc.ok()) {
+ mmapPolicyByDeviceType.emplace(
+ deviceDesc.value(), policyInfos[0].mmapPolicy);
+ }
+ }
+ }
+ }
+ } else {
+ for (const auto &info: policyInfos) {
+ mmapPolicyByDeviceType[info.device.type] = info.mmapPolicy;
+ }
+ }
+ mMmapPolicyByDeviceType.emplace(policyType, mmapPolicyByDeviceType);
+ mMmapPolicyInfos.emplace(policyType, policyInfos);
+ return NO_ERROR;
+}
+
} // namespace android
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 9ad2ea5..44863ee 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -123,27 +123,28 @@
const AttributionSourceState& attributionSource,
audio_config_t *config,
audio_output_flags_t *flags,
- audio_port_handle_t *selectedDeviceId,
+ DeviceIdVector *selectedDeviceIds,
audio_port_handle_t *portId,
std::vector<audio_io_handle_t> *secondaryOutputs,
output_type_t *outputType,
bool *isSpatialized,
bool *isBitPerfect,
- float *volume) override;
+ float *volume,
+ bool *muted) override;
virtual status_t startOutput(audio_port_handle_t portId);
virtual status_t stopOutput(audio_port_handle_t portId);
virtual bool releaseOutput(audio_port_handle_t portId);
- virtual status_t getInputForAttr(const audio_attributes_t *attr,
- audio_io_handle_t *input,
+
+ base::expected<media::GetInputForAttrResponse, std::variant<binder::Status,
+ media::audio::common::AudioConfigBase>>
+ getInputForAttr(audio_attributes_t attributes,
+ audio_io_handle_t requestedInput,
+ audio_port_handle_t requestedDeviceId,
+ audio_config_base_t config,
+ audio_input_flags_t flags,
audio_unique_id_t riid,
audio_session_t session,
- const AttributionSourceState& attributionSource,
- audio_config_base_t *config,
- audio_input_flags_t flags,
- audio_port_handle_t *selectedDeviceId,
- input_type_t *inputType,
- audio_port_handle_t *portId,
- uint32_t *virtualDeviceId);
+ const AttributionSourceState& attributionSource) override;
// indicates to the audio policy manager that the input starts being used.
virtual status_t startInput(audio_port_handle_t portId);
@@ -169,6 +170,7 @@
virtual void initStreamVolume(audio_stream_type_t stream, int indexMin, int indexMax);
virtual status_t setStreamVolumeIndex(audio_stream_type_t stream,
int index,
+ bool muted,
audio_devices_t device);
virtual status_t getStreamVolumeIndex(audio_stream_type_t stream,
int *index,
@@ -176,6 +178,7 @@
virtual status_t setVolumeIndexForAttributes(const audio_attributes_t &attr,
int index,
+ bool muted,
audio_devices_t device);
virtual status_t getVolumeIndexForAttributes(const audio_attributes_t &attr,
int &index,
@@ -185,6 +188,7 @@
virtual status_t getMinVolumeIndexForAttributes(const audio_attributes_t &attr, int &index);
status_t setVolumeCurveIndex(int index,
+ bool muted,
audio_devices_t device,
IVolumeCurves &volumeCurves);
@@ -436,6 +440,13 @@
void onNewAudioModulesAvailable() override;
+ status_t getMmapPolicyInfos(
+ media::audio::common::AudioMMapPolicyType policyType,
+ std::vector<media::audio::common::AudioMMapPolicyInfo> *policyInfos) override;
+ status_t getMmapPolicyForDevice(
+ media::audio::common::AudioMMapPolicyType policyType,
+ media::audio::common::AudioMMapPolicyInfo *policyInfo) override;
+
status_t initialize();
protected:
@@ -650,7 +661,8 @@
DeviceTypeSet deviceTypes = DeviceTypeSet());
/**
- * @brief setVolumeSourceMute Mute or unmute the volume source on the specified output
+ * @brief setVolumeSourceMutedInternally Mute or unmute the volume source on the specified
+ * output
* @param volumeSource to be muted/unmute (may host legacy streams or by extension set of
* audio attributes)
* @param on true to mute, false to umute
@@ -658,11 +670,11 @@
* @param delayMs
* @param device
*/
- void setVolumeSourceMute(VolumeSource volumeSource,
- bool on,
- const sp<AudioOutputDescriptor>& outputDesc,
- int delayMs = 0,
- DeviceTypeSet deviceTypes = DeviceTypeSet());
+ void setVolumeSourceMutedInternally(VolumeSource volumeSource,
+ bool on,
+ const sp<AudioOutputDescriptor>& outputDesc,
+ int delayMs = 0,
+ DeviceTypeSet deviceTypes = DeviceTypeSet());
audio_mode_t getPhoneState();
@@ -881,15 +893,7 @@
return mAvailableInputDevices.getDevicesFromHwModule(
mPrimaryOutput->getModuleHandle());
}
- /**
- * @brief getFirstDeviceId of the Device Vector
- * @return if the collection is not empty, it returns the first device Id,
- * otherwise AUDIO_PORT_HANDLE_NONE
- */
- audio_port_handle_t getFirstDeviceId(const DeviceVector &devices) const
- {
- return (devices.size() > 0) ? devices.itemAt(0)->getId() : AUDIO_PORT_HANDLE_NONE;
- }
+
String8 getFirstDeviceAddress(const DeviceVector &devices) const
{
return (devices.size() > 0) ?
@@ -1130,7 +1134,7 @@
uid_t uid,
audio_config_t *config,
audio_output_flags_t *flags,
- audio_port_handle_t *selectedDeviceId,
+ DeviceIdVector *selectedDeviceIds,
bool *isRequestedDeviceForExclusiveUse,
std::vector<sp<AudioPolicyMix>> *secondaryMixes,
output_type_t *outputType,
@@ -1222,7 +1226,7 @@
audio_io_handle_t getInputForDevice(const sp<DeviceDescriptor> &device,
audio_session_t session,
const audio_attributes_t &attributes,
- audio_config_base_t *config,
+ const audio_config_base_t &config,
audio_input_flags_t flags,
const sp<AudioPolicyMix> &policyMix);
@@ -1408,9 +1412,17 @@
int index,
const DeviceTypeSet &deviceTypes);
+ status_t updateMmapPolicyInfos(media::audio::common::AudioMMapPolicyType policyType);
+
// Contains for devices that support absolute volume the audio attributes
// corresponding to the streams that are driving the volume changes
std::unordered_map<audio_devices_t, audio_attributes_t> mAbsoluteVolumeDrivingStreams;
+
+ std::map<media::audio::common::AudioMMapPolicyType,
+ const std::vector<media::audio::common::AudioMMapPolicyInfo>> mMmapPolicyInfos;
+ std::map<media::audio::common::AudioMMapPolicyType,
+ const std::map<media::audio::common::AudioDeviceDescription,
+ media::audio::common::AudioMMapPolicy>> mMmapPolicyByDeviceType;
};
};
diff --git a/services/audiopolicy/service/Android.bp b/services/audiopolicy/service/Android.bp
index e157808..f415a41 100644
--- a/services/audiopolicy/service/Android.bp
+++ b/services/audiopolicy/service/Android.bp
@@ -11,16 +11,20 @@
cc_defaults {
name: "libaudiopolicyservice_dependencies",
+ defaults: [
+ "latest_android_media_audio_common_types_cpp_shared",
+ ],
+
include_dirs: [
"frameworks/av/services/audiopolicy", // include path outside of libaudiopolicyservice
],
shared_libs: [
+ "android.media.audio-aconfig-cc",
"android.media.audiopolicy-aconfig-cc",
"audio-permission-aidl-cpp",
"audioclient-types-aidl-cpp",
"audioflinger-aidl-cpp",
- "audiopermissioncontroller",
"audiopolicy-aidl-cpp",
"audiopolicy-types-aidl-cpp",
"capture_state_listener-aidl-cpp",
@@ -32,6 +36,7 @@
"libaudioclient_aidl_conversion",
"libaudiofoundation",
"libaudiohal",
+ "libaudiopermission",
"libaudiopolicy",
"libaudiopolicycomponents",
"libaudiopolicymanagerdefault",
@@ -63,7 +68,6 @@
name: "libaudiopolicyservice",
defaults: [
- "latest_android_media_audio_common_types_cpp_shared",
"libaudiopolicyservice_dependencies",
],
@@ -114,6 +118,6 @@
name: "audiopolicyservicelocal_headers",
host_supported: true,
export_include_dirs: ["include"],
- header_libs: ["audiopermissioncontroller_headers"],
- export_header_lib_headers: ["audiopermissioncontroller_headers"],
+ header_libs: ["libaudiopermission_headers"],
+ export_header_lib_headers: ["libaudiopermission_headers"],
}
diff --git a/services/audiopolicy/service/AudioPolicyClientImpl.cpp b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
index 363dfa7..765928e 100644
--- a/services/audiopolicy/service/AudioPolicyClientImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
@@ -186,21 +186,19 @@
}
status_t AudioPolicyService::AudioPolicyClient::setStreamVolume(audio_stream_type_t stream,
- float volume, audio_io_handle_t output,
- int delay_ms)
+ float volume, bool muted, audio_io_handle_t output, int delay_ms)
{
- return mAudioPolicyService->setStreamVolume(stream, volume, output,
- delay_ms);
+ return mAudioPolicyService->setStreamVolume(stream, volume, muted, output, delay_ms);
}
status_t AudioPolicyService::AudioPolicyClient::setPortsVolume(
- const std::vector<audio_port_handle_t> &ports, float volume, audio_io_handle_t output,
- int delayMs)
+ const std::vector<audio_port_handle_t> &ports, float volume, bool muted,
+ audio_io_handle_t output, int delayMs)
{
if (ports.empty()) {
return NO_ERROR;
}
- return mAudioPolicyService->setPortsVolume(ports, volume, output, delayMs);
+ return mAudioPolicyService->setPortsVolume(ports, volume, muted, output, delayMs);
}
void AudioPolicyService::AudioPolicyClient::setParameters(audio_io_handle_t io_handle,
@@ -376,4 +374,14 @@
return af->setTracksInternalMute(tracksInternalMute);
}
+status_t AudioPolicyService::AudioPolicyClient::getMmapPolicyInfos(
+ media::audio::common::AudioMMapPolicyType policyType,
+ std::vector<media::audio::common::AudioMMapPolicyInfo> *policyInfos) {
+ sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
+ if (af == nullptr) {
+ return PERMISSION_DENIED;
+ }
+ return af->getMmapPolicyInfos(policyType, policyInfos);
+}
+
} // namespace android
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 3932a39..12320b7 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -23,7 +23,10 @@
#include <android/content/AttributionSourceState.h>
#include <android_media_audiopolicy.h>
+#include <android_media_audio.h>
+#include <binder/Enums.h>
#include <com_android_media_audio.h>
+#include <cutils/properties.h>
#include <error/expected_utils.h>
#include <media/AidlConversion.h>
#include <media/AudioPolicy.h>
@@ -49,14 +52,18 @@
#define CHECK_PERM(expr1, expr2) \
VALUE_OR_RETURN_STATUS(getPermissionProvider().checkPermission((expr1), (expr2)))
+#define PROPAGATE_FALSEY(val) do { if (!val.has_value() || !val.value()) return val; } while (0)
+
#define MAX_ITEMS_PER_LIST 1024
namespace android {
namespace audiopolicy_flags = android::media::audiopolicy;
using binder::Status;
using aidl_utils::binderStatusFromStatusT;
+using android::media::audio::concurrent_audio_record_bypass_permission;
using com::android::media::audio::audioserver_permissions;
using com::android::media::permission::NativePermissionController;
+using com::android::media::permission::PermissionEnum;
using com::android::media::permission::PermissionEnum::ACCESS_ULTRASOUND;
using com::android::media::permission::PermissionEnum::CALL_AUDIO_INTERCEPTION;
using com::android::media::permission::PermissionEnum::CAPTURE_AUDIO_HOTWORD;
@@ -70,6 +77,7 @@
using com::android::media::permission::PermissionEnum::MODIFY_PHONE_STATE;
using com::android::media::permission::PermissionEnum::RECORD_AUDIO;
using com::android::media::permission::PermissionEnum::WRITE_SECURE_SETTINGS;
+using com::android::media::permission::PermissionEnum::BYPASS_CONCURRENT_RECORD_AUDIO_RESTRICTION;
using content::AttributionSourceState;
using media::audio::common::AudioConfig;
using media::audio::common::AudioConfigBase;
@@ -367,7 +375,7 @@
const AttributionSourceState& attributionSource,
const AudioConfig& configAidl,
int32_t flagsAidl,
- int32_t selectedDeviceIdAidl,
+ const std::vector<int32_t>& selectedDeviceIdsAidl,
media::GetOutputForAttrResponse* _aidl_return)
{
audio_attributes_t attr = VALUE_OR_RETURN_BINDER_STATUS(
@@ -379,8 +387,9 @@
aidl2legacy_AudioConfig_audio_config_t(configAidl, false /*isInput*/));
audio_output_flags_t flags = VALUE_OR_RETURN_BINDER_STATUS(
aidl2legacy_int32_t_audio_output_flags_t_mask(flagsAidl));
- audio_port_handle_t selectedDeviceId = VALUE_OR_RETURN_BINDER_STATUS(
- aidl2legacy_int32_t_audio_port_handle_t(selectedDeviceIdAidl));
+ DeviceIdVector selectedDeviceIds = VALUE_OR_RETURN_BINDER_STATUS(
+ convertContainer<DeviceIdVector>(selectedDeviceIdsAidl,
+ aidl2legacy_int32_t_audio_port_handle_t));
audio_io_handle_t output;
audio_port_handle_t portId;
@@ -423,21 +432,34 @@
}
}
+ //TODO this permission check should extend to all system usages
+ if (attr.usage == AUDIO_USAGE_SPEAKER_CLEANUP) {
+ if (!(audioserver_permissions() ?
+ CHECK_PERM(MODIFY_AUDIO_ROUTING, attributionSource.uid)
+ : modifyAudioRoutingAllowed())) {
+ ALOGE("%s: permission denied: SPEAKER_CLEANUP not allowed for uid %d pid %d",
+ __func__, attributionSource.uid, attributionSource.pid);
+ return binderStatusFromStatusT(PERMISSION_DENIED);
+ }
+ }
+
AutoCallerClear acc;
AudioPolicyInterface::output_type_t outputType;
bool isSpatialized = false;
bool isBitPerfect = false;
float volume;
+ bool muted;
status_t result = mAudioPolicyManager->getOutputForAttr(&attr, &output, session,
&stream,
attributionSource,
&config,
- &flags, &selectedDeviceId, &portId,
+ &flags, &selectedDeviceIds, &portId,
&secondaryOutputs,
&outputType,
&isSpatialized,
&isBitPerfect,
- &volume);
+ &volume,
+ &muted);
// FIXME: Introduce a way to check for the the telephony device before opening the output
if (result == NO_ERROR) {
@@ -478,20 +500,24 @@
}
if (result == NO_ERROR) {
- attr = VALUE_OR_RETURN_BINDER_STATUS(
- mUsecaseValidator->verifyAudioAttributes(output, attributionSource, attr));
+ // usecase validator is disabled by default
+ if (property_get_bool("ro.audio.usecase_validator_enabled", false /* default */)) {
+ attr = VALUE_OR_RETURN_BINDER_STATUS(
+ mUsecaseValidator->verifyAudioAttributes(output, attributionSource, attr));
+ }
sp<AudioPlaybackClient> client =
new AudioPlaybackClient(attr, output, attributionSource, session,
- portId, selectedDeviceId, stream, isSpatialized, config.channel_mask);
+ portId, selectedDeviceIds, stream, isSpatialized, config.channel_mask);
mAudioPlaybackClients.add(portId, client);
_aidl_return->output = VALUE_OR_RETURN_BINDER_STATUS(
legacy2aidl_audio_io_handle_t_int32_t(output));
_aidl_return->stream = VALUE_OR_RETURN_BINDER_STATUS(
legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
- _aidl_return->selectedDeviceId = VALUE_OR_RETURN_BINDER_STATUS(
- legacy2aidl_audio_port_handle_t_int32_t(selectedDeviceId));
+ _aidl_return->selectedDeviceIds = VALUE_OR_RETURN_BINDER_STATUS(
+ convertContainer<std::vector<int32_t>>(selectedDeviceIds,
+ legacy2aidl_audio_port_handle_t_int32_t));
_aidl_return->portId = VALUE_OR_RETURN_BINDER_STATUS(
legacy2aidl_audio_port_handle_t_int32_t(portId));
_aidl_return->secondaryOutputs = VALUE_OR_RETURN_BINDER_STATUS(
@@ -502,6 +528,7 @@
_aidl_return->attr = VALUE_OR_RETURN_BINDER_STATUS(
legacy2aidl_audio_attributes_t_AudioAttributes(attr));
_aidl_return->volume = volume;
+ _aidl_return->muted = muted;
} else {
_aidl_return->configBase.format = VALUE_OR_RETURN_BINDER_STATUS(
legacy2aidl_audio_format_t_AudioFormatDescription(config.format));
@@ -636,6 +663,137 @@
mAudioPolicyManager->releaseOutput(portId);
}
+// These are sources for which CAPTURE_AUDIO_OUTPUT granted access
+// for legacy reasons, before more specific permissions were deployed.
+// TODO: remove this access
+static bool isLegacyOutputSource(AudioSource source) {
+ switch (source) {
+ case AudioSource::VOICE_CALL:
+ case AudioSource::VOICE_DOWNLINK:
+ case AudioSource::VOICE_UPLINK:
+ case AudioSource::FM_TUNER:
+ return true;
+ default:
+ return false;
+ }
+}
+
+error::BinderResult<bool> AudioPolicyService::AudioPolicyClient::checkPermissionForInput(
+ const AttributionSourceState& attrSource, const PermissionReqs& req) {
+
+ error::BinderResult<bool> permRes = true;
+ const auto check_perm = [&](PermissionEnum perm, uid_t uid) {
+ return mAudioPolicyService->getPermissionProvider().checkPermission(perm, uid);
+ };
+ switch (req.source) {
+ case AudioSource::VOICE_UPLINK:
+ case AudioSource::VOICE_DOWNLINK:
+ case AudioSource::VOICE_CALL:
+ permRes = audioserver_permissions()
+ ? check_perm(CALL_AUDIO_INTERCEPTION, attrSource.uid)
+ : callAudioInterceptionAllowed(attrSource);
+ break;
+ case AudioSource::ECHO_REFERENCE:
+ permRes = audioserver_permissions() ? check_perm(CAPTURE_AUDIO_OUTPUT, attrSource.uid)
+ : captureAudioOutputAllowed(attrSource);
+ break;
+ case AudioSource::FM_TUNER:
+ permRes = audioserver_permissions()
+ ? check_perm(CAPTURE_TUNER_AUDIO_INPUT, attrSource.uid)
+ : captureTunerAudioInputAllowed(attrSource);
+ break;
+ case AudioSource::HOTWORD:
+ permRes = audioserver_permissions() ? check_perm(CAPTURE_AUDIO_HOTWORD, attrSource.uid)
+ : captureHotwordAllowed(attrSource);
+ break;
+ case AudioSource::ULTRASOUND:
+ permRes = audioserver_permissions() ? check_perm(ACCESS_ULTRASOUND, attrSource.uid)
+ : accessUltrasoundAllowed(attrSource);
+ break;
+ case AudioSource::SYS_RESERVED_INVALID:
+ case AudioSource::DEFAULT:
+ case AudioSource::MIC:
+ case AudioSource::CAMCORDER:
+ case AudioSource::VOICE_RECOGNITION:
+ case AudioSource::VOICE_COMMUNICATION:
+ case AudioSource::UNPROCESSED:
+ case AudioSource::VOICE_PERFORMANCE:
+ // No additional check intended
+ case AudioSource::REMOTE_SUBMIX:
+ // special-case checked based on mix type below
+ break;
+ }
+
+ if (!permRes.has_value()) return permRes;
+ if (!permRes.value()) {
+ if (isLegacyOutputSource(req.source)) {
+ permRes = audioserver_permissions() ? check_perm(CAPTURE_AUDIO_OUTPUT, attrSource.uid)
+ : captureAudioOutputAllowed(attrSource);
+ PROPAGATE_FALSEY(permRes);
+ } else {
+ return false;
+ }
+ }
+
+ if (req.isHotword) {
+ permRes = audioserver_permissions() ? check_perm(CAPTURE_AUDIO_HOTWORD, attrSource.uid)
+ : captureHotwordAllowed(attrSource);
+ PROPAGATE_FALSEY(permRes);
+ }
+
+ // TODO evaluate whether we should be checking call redirection like this
+ bool isAllowedDueToCallPerm = false;
+ if (req.isCallRedir) {
+ const auto checkCall = audioserver_permissions()
+ ? check_perm(CALL_AUDIO_INTERCEPTION, attrSource.uid)
+ : callAudioInterceptionAllowed(attrSource);
+ isAllowedDueToCallPerm = VALUE_OR_RETURN(checkCall);
+ }
+
+ switch (req.mixType) {
+ case MixType::NONE:
+ break;
+ case MixType::PUBLIC_CAPTURE_PLAYBACK:
+ // this use case has been validated in audio service with a MediaProjection token,
+ // and doesn't rely on regular permissions
+ // TODO (b/378778313)
+ break;
+ case MixType::TELEPHONY_RX_CAPTURE:
+ if (isAllowedDueToCallPerm) break;
+ // FIXME: use the same permission as for remote submix for now.
+ FALLTHROUGH_INTENDED;
+ case MixType::CAPTURE:
+ permRes = audioserver_permissions() ? check_perm(CAPTURE_AUDIO_OUTPUT, attrSource.uid)
+ : captureAudioOutputAllowed(attrSource);
+ break;
+ case MixType::EXT_POLICY_REROUTE:
+ // TODO intended?
+ if (isAllowedDueToCallPerm) break;
+ permRes = audioserver_permissions() ? check_perm(MODIFY_AUDIO_ROUTING, attrSource.uid)
+ : modifyAudioRoutingAllowed(attrSource);
+ break;
+ }
+
+ PROPAGATE_FALSEY(permRes);
+
+ // All sources which aren't output capture
+ // AND capture from vdi policy mix (the injected audio is mic data from another device)
+ // REQUIRE RECORD perms
+ const auto legacySource = aidl2legacy_AudioSource_audio_source_t(req.source).value();
+ if (req.virtualDeviceId != kDefaultVirtualDeviceId) {
+ // TODO assert that this is always a recordOpSource
+ // TODO upcall solution
+ return recordingAllowed(attrSource, req.virtualDeviceId, legacySource);
+ }
+
+ if (isRecordOpRequired(legacySource)) {
+ permRes = audioserver_permissions() ? check_perm(RECORD_AUDIO, attrSource.uid)
+ : recordingAllowed(attrSource, legacySource);
+ PROPAGATE_FALSEY(permRes);
+ }
+ return true;
+}
+
Status AudioPolicyService::getInputForAttr(const media::audio::common::AudioAttributes& attrAidl,
int32_t inputAidl,
int32_t riidAidl,
@@ -645,23 +803,22 @@
int32_t flagsAidl,
int32_t selectedDeviceIdAidl,
media::GetInputForAttrResponse* _aidl_return) {
- audio_attributes_t attr = VALUE_OR_RETURN_BINDER_STATUS(
+ auto inputSource = attrAidl.source;
+ const audio_attributes_t attr = VALUE_OR_RETURN_BINDER_STATUS(
aidl2legacy_AudioAttributes_audio_attributes_t(attrAidl));
- audio_io_handle_t input = VALUE_OR_RETURN_BINDER_STATUS(
+ const audio_io_handle_t requestedInput = VALUE_OR_RETURN_BINDER_STATUS(
aidl2legacy_int32_t_audio_io_handle_t(inputAidl));
- audio_unique_id_t riid = VALUE_OR_RETURN_BINDER_STATUS(
+ const audio_unique_id_t riid = VALUE_OR_RETURN_BINDER_STATUS(
aidl2legacy_int32_t_audio_unique_id_t(riidAidl));
- audio_session_t session = VALUE_OR_RETURN_BINDER_STATUS(
+ const audio_session_t session = VALUE_OR_RETURN_BINDER_STATUS(
aidl2legacy_int32_t_audio_session_t(sessionAidl));
- audio_config_base_t config = VALUE_OR_RETURN_BINDER_STATUS(
+ const audio_config_base_t config = VALUE_OR_RETURN_BINDER_STATUS(
aidl2legacy_AudioConfigBase_audio_config_base_t(configAidl, true /*isInput*/));
- audio_input_flags_t flags = VALUE_OR_RETURN_BINDER_STATUS(
+ const audio_input_flags_t flags = VALUE_OR_RETURN_BINDER_STATUS(
aidl2legacy_int32_t_audio_input_flags_t_mask(flagsAidl));
- audio_port_handle_t selectedDeviceId = VALUE_OR_RETURN_BINDER_STATUS(
+ const audio_port_handle_t requestedDeviceId = VALUE_OR_RETURN_BINDER_STATUS(
aidl2legacy_int32_t_audio_port_handle_t(selectedDeviceIdAidl));
- audio_port_handle_t portId;
-
if (mAudioPolicyManager == NULL) {
return binderStatusFromStatusT(NO_INIT);
}
@@ -669,207 +826,69 @@
RETURN_IF_BINDER_ERROR(
binderStatusFromStatusT(AudioValidator::validateAudioAttributes(attr, "68953950")));
- audio_source_t inputSource = attr.source;
- if (inputSource == AUDIO_SOURCE_DEFAULT) {
- inputSource = AUDIO_SOURCE_MIC;
- }
-
- // already checked by client, but double-check in case the client wrapper is bypassed
- if ((inputSource < AUDIO_SOURCE_DEFAULT)
- || (inputSource >= AUDIO_SOURCE_CNT
- && inputSource != AUDIO_SOURCE_HOTWORD
- && inputSource != AUDIO_SOURCE_FM_TUNER
- && inputSource != AUDIO_SOURCE_ECHO_REFERENCE
- && inputSource != AUDIO_SOURCE_ULTRASOUND)) {
+ if (inputSource == AudioSource::SYS_RESERVED_INVALID ||
+ std::find(enum_range<AudioSource>().begin(), enum_range<AudioSource>().end(),
+ inputSource) == enum_range<AudioSource>().end()) {
return binderStatusFromStatusT(BAD_VALUE);
}
- RETURN_IF_BINDER_ERROR(validateUsage(attr, attributionSource));
-
- uint32_t virtualDeviceId = kDefaultVirtualDeviceId;
-
- // check calling permissions.
- // Capturing from the following sources does not require permission RECORD_AUDIO
- // as the captured audio does not come from a microphone:
- // - FM_TUNER source is controlled by captureTunerAudioInputAllowed() or
- // captureAudioOutputAllowed() (deprecated).
- // - REMOTE_SUBMIX source is controlled by captureAudioOutputAllowed() if the input
- // type is API_INPUT_MIX_EXT_POLICY_REROUTE and by AudioService if a media projection
- // is used and input type is API_INPUT_MIX_PUBLIC_CAPTURE_PLAYBACK
- // - ECHO_REFERENCE source is controlled by captureAudioOutputAllowed()
- const auto isRecordingAllowed = audioserver_permissions() ?
- CHECK_PERM(RECORD_AUDIO, attributionSource.uid) :
- recordingAllowed(attributionSource, inputSource);
- if (!(isRecordingAllowed
- || inputSource == AUDIO_SOURCE_FM_TUNER
- || inputSource == AUDIO_SOURCE_REMOTE_SUBMIX
- || inputSource == AUDIO_SOURCE_ECHO_REFERENCE)) {
- ALOGE("%s permission denied: recording not allowed for %s",
- __func__, attributionSource.toString().c_str());
- return binderStatusFromStatusT(PERMISSION_DENIED);
+ if (inputSource == AudioSource::DEFAULT) {
+ inputSource = AudioSource::MIC;
}
- bool canCaptureOutput = audioserver_permissions() ?
- CHECK_PERM(CAPTURE_AUDIO_OUTPUT, attributionSource.uid)
- : captureAudioOutputAllowed(attributionSource);
- bool canInterceptCallAudio = audioserver_permissions() ?
- CHECK_PERM(CALL_AUDIO_INTERCEPTION, attributionSource.uid)
- : callAudioInterceptionAllowed(attributionSource);
- bool isCallAudioSource = inputSource == AUDIO_SOURCE_VOICE_UPLINK
- || inputSource == AUDIO_SOURCE_VOICE_DOWNLINK
- || inputSource == AUDIO_SOURCE_VOICE_CALL;
+ const bool isCallRedir = (attr.flags & AUDIO_FLAG_CALL_REDIRECTION) != 0;
- if (isCallAudioSource && !canInterceptCallAudio && !canCaptureOutput) {
- return binderStatusFromStatusT(PERMISSION_DENIED);
- }
- if (inputSource == AUDIO_SOURCE_ECHO_REFERENCE
- && !canCaptureOutput) {
- return binderStatusFromStatusT(PERMISSION_DENIED);
- }
- if (inputSource == AUDIO_SOURCE_FM_TUNER
- && !canCaptureOutput
- && !(audioserver_permissions() ?
- CHECK_PERM(CAPTURE_TUNER_AUDIO_INPUT, attributionSource.uid)
- : captureTunerAudioInputAllowed(attributionSource))) {
- return binderStatusFromStatusT(PERMISSION_DENIED);
+ //TODO(b/374751406): remove forcing canBypassConcurrentPolicy to canCaptureOutput
+ // once all system apps using CAPTURE_AUDIO_OUTPUT to capture during calls
+ // are updated to use the new CONCURRENT_AUDIO_RECORD_BYPASS permission.
+ bool canBypassConcurrentPolicy = audioserver_permissions()
+ ? CHECK_PERM(CAPTURE_AUDIO_OUTPUT, attributionSource.uid)
+ : captureAudioOutputAllowed(attributionSource);
+ if (concurrent_audio_record_bypass_permission()) {
+ canBypassConcurrentPolicy = audioserver_permissions() ?
+ CHECK_PERM(BYPASS_CONCURRENT_RECORD_AUDIO_RESTRICTION,
+ attributionSource.uid)
+ : bypassConcurrentPolicyAllowed(attributionSource);
}
- bool canCaptureHotword = audioserver_permissions() ?
- CHECK_PERM(CAPTURE_AUDIO_HOTWORD, attributionSource.uid)
- : captureHotwordAllowed(attributionSource);
- if ((inputSource == AUDIO_SOURCE_HOTWORD) && !canCaptureHotword) {
- return binderStatusFromStatusT(PERMISSION_DENIED);
- }
-
- if (((flags & (AUDIO_INPUT_FLAG_HW_HOTWORD |
- AUDIO_INPUT_FLAG_HOTWORD_TAP |
- AUDIO_INPUT_FLAG_HW_LOOKBACK)) != 0)
- && !canCaptureHotword) {
- ALOGE("%s: permission denied: hotword mode not allowed"
- " for uid %d pid %d", __func__, attributionSource.uid, attributionSource.pid);
- return binderStatusFromStatusT(PERMISSION_DENIED);
- }
-
- if (attr.source == AUDIO_SOURCE_ULTRASOUND) {
- if (!(audioserver_permissions() ?
- CHECK_PERM(ACCESS_ULTRASOUND, attributionSource.uid)
- : accessUltrasoundAllowed(attributionSource))) {
- ALOGE("%s: permission denied: ultrasound not allowed for uid %d pid %d",
- __func__, attributionSource.uid, attributionSource.pid);
- return binderStatusFromStatusT(PERMISSION_DENIED);
- }
- }
-
- sp<AudioPolicyEffects>audioPolicyEffects;
+ sp<AudioPolicyEffects> audioPolicyEffects;
+ base::expected<media::GetInputForAttrResponse, std::variant<binder::Status, AudioConfigBase>>
+ res;
{
- status_t status;
- AudioPolicyInterface::input_type_t inputType;
-
audio_utils::lock_guard _l(mMutex);
- {
- AutoCallerClear acc;
- // the audio_in_acoustics_t parameter is ignored by get_input()
- status = mAudioPolicyManager->getInputForAttr(&attr, &input, riid, session,
- attributionSource, &config,
- flags, &selectedDeviceId,
- &inputType, &portId,
- &virtualDeviceId);
-
+ AutoCallerClear acc;
+ // the audio_in_acoustics_t parameter is ignored by get_input()
+ res = mAudioPolicyManager->getInputForAttr(attr, requestedInput, requestedDeviceId,
+ config, flags, riid, session,
+ attributionSource);
+ if (!res.has_value()) {
+ if (res.error().index() == 1) {
+ _aidl_return->config = std::get<1>(res.error());
+ return Status::fromExceptionCode(EX_ILLEGAL_STATE);
+ } else {
+ return std::get<0>(res.error());
+ }
}
+
audioPolicyEffects = mAudioPolicyEffects;
- if (status == NO_ERROR) {
- // enforce permission (if any) required for each type of input
- switch (inputType) {
- case AudioPolicyInterface::API_INPUT_MIX_PUBLIC_CAPTURE_PLAYBACK:
- // this use case has been validated in audio service with a MediaProjection token,
- // and doesn't rely on regular permissions
- case AudioPolicyInterface::API_INPUT_LEGACY:
- break;
- case AudioPolicyInterface::API_INPUT_TELEPHONY_RX:
- if ((attr.flags & AUDIO_FLAG_CALL_REDIRECTION) != 0
- && canInterceptCallAudio) {
- break;
- }
- // FIXME: use the same permission as for remote submix for now.
- FALLTHROUGH_INTENDED;
- case AudioPolicyInterface::API_INPUT_MIX_CAPTURE:
- if (!canCaptureOutput) {
- ALOGE("%s permission denied: capture not allowed", __func__);
- status = PERMISSION_DENIED;
- }
- break;
- case AudioPolicyInterface::API_INPUT_MIX_EXT_POLICY_REROUTE: {
- bool modAudioRoutingAllowed;
- if (audioserver_permissions()) {
- auto result = getPermissionProvider().checkPermission(
- MODIFY_AUDIO_ROUTING, attributionSource.uid);
- if (!result.ok()) {
- ALOGE("%s permission provider error: %s", __func__,
- result.error().toString8().c_str());
- status = aidl_utils::statusTFromBinderStatus(result.error());
- break;
- }
- modAudioRoutingAllowed = result.value();
- } else {
- modAudioRoutingAllowed = modifyAudioRoutingAllowed(attributionSource);
- }
- if (!(modAudioRoutingAllowed
- || ((attr.flags & AUDIO_FLAG_CALL_REDIRECTION) != 0
- && canInterceptCallAudio))) {
- ALOGE("%s permission denied for remote submix capture", __func__);
- status = PERMISSION_DENIED;
- }
- break;
- }
- case AudioPolicyInterface::API_INPUT_INVALID:
- default:
- LOG_ALWAYS_FATAL("%s encountered an invalid input type %d",
- __func__, (int)inputType);
- }
-
- if (audiopolicy_flags::record_audio_device_aware_permission()) {
- // enforce device-aware RECORD_AUDIO permission
- if (virtualDeviceId != kDefaultVirtualDeviceId &&
- !recordingAllowed(attributionSource, virtualDeviceId, inputSource)) {
- status = PERMISSION_DENIED;
- }
- }
- }
-
- if (status != NO_ERROR) {
- if (status == PERMISSION_DENIED) {
- AutoCallerClear acc;
- mAudioPolicyManager->releaseInput(portId);
- } else {
- _aidl_return->config = VALUE_OR_RETURN_BINDER_STATUS(
- legacy2aidl_audio_config_base_t_AudioConfigBase(config, true /*isInput*/));
- }
- return binderStatusFromStatusT(status);
- }
-
- sp<AudioRecordClient> client = new AudioRecordClient(attr, input, session, portId,
- selectedDeviceId, attributionSource,
- virtualDeviceId,
- canCaptureOutput, canCaptureHotword,
- mOutputCommandThread);
- mAudioRecordClients.add(portId, client);
+ sp<AudioRecordClient> client = new AudioRecordClient(
+ attr, res->input, session, res->portId, {res->selectedDeviceId}, attributionSource,
+ res->virtualDeviceId, canBypassConcurrentPolicy, mOutputCommandThread);
+ mAudioRecordClients.add(res->portId, client);
}
- if (audioPolicyEffects != 0) {
+ if (audioPolicyEffects != nullptr) {
// create audio pre processors according to input source
- status_t status = audioPolicyEffects->addInputEffects(input, inputSource, session);
+ status_t status = audioPolicyEffects->addInputEffects(res->input,
+ aidl2legacy_AudioSource_audio_source_t(inputSource).value(), session);
if (status != NO_ERROR && status != ALREADY_EXISTS) {
- ALOGW("Failed to add effects on input %d", input);
+ ALOGW("Failed to add effects on input %d", res->input);
}
}
- _aidl_return->input = VALUE_OR_RETURN_BINDER_STATUS(
- legacy2aidl_audio_io_handle_t_int32_t(input));
- _aidl_return->selectedDeviceId = VALUE_OR_RETURN_BINDER_STATUS(
- legacy2aidl_audio_port_handle_t_int32_t(selectedDeviceId));
- _aidl_return->portId = VALUE_OR_RETURN_BINDER_STATUS(
- legacy2aidl_audio_port_handle_t_int32_t(portId));
+ *_aidl_return = res.value();
+
return Status::ok();
}
@@ -883,6 +902,17 @@
return {};
}
+std::string AudioPolicyService::getDeviceTypeStrForPortIds(DeviceIdVector portIds) {
+ std::string output = {};
+ for (auto it = portIds.begin(); it != portIds.end(); ++it) {
+ if (it != portIds.begin()) {
+ output += ", ";
+ }
+ output += getDeviceTypeStrForPortId(*it);
+ }
+ return output;
+}
+
Status AudioPolicyService::startInput(int32_t portIdAidl)
{
audio_port_handle_t portId = VALUE_OR_RETURN_BINDER_STATUS(
@@ -904,13 +934,12 @@
std::stringstream msg;
msg << "Audio recording on session " << client->session;
+
const auto permitted = startRecording(client->attributionSource, client->virtualDeviceId,
String16(msg.str().c_str()), client->attributes.source);
// check calling permissions
- if (permitted == PERMISSION_HARD_DENIED && client->attributes.source != AUDIO_SOURCE_FM_TUNER
- && client->attributes.source != AUDIO_SOURCE_REMOTE_SUBMIX
- && client->attributes.source != AUDIO_SOURCE_ECHO_REFERENCE) {
+ if (permitted == PERMISSION_HARD_DENIED) {
ALOGE("%s permission denied: recording not allowed for attribution source %s",
__func__, client->attributionSource.toString().c_str());
return binderStatusFromStatusT(PERMISSION_DENIED);
@@ -973,6 +1002,8 @@
"android.media.audiopolicy.active.session";
static constexpr char kAudioPolicyActiveDevice[] =
"android.media.audiopolicy.active.device";
+ static constexpr char kAudioPolicyActiveDevices[] =
+ "android.media.audiopolicy.active.devices";
mediametrics::Item *item = mediametrics::Item::create(kAudioPolicy);
if (item != NULL) {
@@ -990,8 +1021,8 @@
item->setCString(kAudioPolicyRqstPkg,
std::to_string(client->attributionSource.uid).c_str());
}
- item->setCString(
- kAudioPolicyRqstDevice, getDeviceTypeStrForPortId(client->deviceId).c_str());
+ item->setCString(kAudioPolicyRqstDevice,
+ getDeviceTypeStrForPortId(getFirstDeviceId(client->deviceIds)).c_str());
int count = mAudioRecordClients.size();
for (int i = 0; i < count ; i++) {
@@ -1013,7 +1044,9 @@
other->attributionSource.uid).c_str());
}
item->setCString(kAudioPolicyActiveDevice,
- getDeviceTypeStrForPortId(other->deviceId).c_str());
+ getDeviceTypeStrForPortId(getFirstDeviceId(other->deviceIds)).c_str());
+ item->setCString(kAudioPolicyActiveDevices,
+ getDeviceTypeStrForPortIds(other->deviceIds).c_str());
}
}
item->selfrecord();
@@ -1116,8 +1149,15 @@
Status AudioPolicyService::setDeviceAbsoluteVolumeEnabled(const AudioDevice& deviceAidl,
bool enabled,
AudioStreamType streamToDriveAbsAidl) {
- audio_stream_type_t streamToDriveAbs = VALUE_OR_RETURN_BINDER_STATUS(
- aidl2legacy_AudioStreamType_audio_stream_type_t(streamToDriveAbsAidl));
+ ALOGI("%s: deviceAidl %s, enabled %d, streamToDriveAbsAidl %d", __func__,
+ deviceAidl.toString().c_str(), enabled, streamToDriveAbsAidl);
+
+ audio_stream_type_t streamToDriveAbs = AUDIO_STREAM_DEFAULT;
+ if (enabled) {
+ streamToDriveAbs = VALUE_OR_RETURN_BINDER_STATUS(
+ aidl2legacy_AudioStreamType_audio_stream_type_t(streamToDriveAbsAidl));
+ }
+
audio_devices_t deviceType;
std::string address;
RETURN_BINDER_STATUS_IF_ERROR(
@@ -1131,9 +1171,7 @@
: settingsAllowed())) {
return binderStatusFromStatusT(PERMISSION_DENIED);
}
- if (uint32_t(streamToDriveAbs) >= AUDIO_STREAM_PUBLIC_CNT) {
- return binderStatusFromStatusT(BAD_VALUE);
- }
+
audio_utils::lock_guard _l(mMutex);
AutoCallerClear acc;
return binderStatusFromStatusT(
@@ -1168,7 +1206,7 @@
Status AudioPolicyService::setStreamVolumeIndex(AudioStreamType streamAidl,
const AudioDeviceDescription& deviceAidl,
- int32_t indexAidl) {
+ int32_t indexAidl, bool muted) {
audio_stream_type_t stream = VALUE_OR_RETURN_BINDER_STATUS(
aidl2legacy_AudioStreamType_audio_stream_type_t(streamAidl));
int index = VALUE_OR_RETURN_BINDER_STATUS(convertIntegral<int>(indexAidl));
@@ -1190,6 +1228,7 @@
AutoCallerClear acc;
return binderStatusFromStatusT(mAudioPolicyManager->setStreamVolumeIndex(stream,
index,
+ muted,
device));
}
@@ -1218,7 +1257,7 @@
Status AudioPolicyService::setVolumeIndexForAttributes(
const media::audio::common::AudioAttributes& attrAidl,
- const AudioDeviceDescription& deviceAidl, int32_t indexAidl) {
+ const AudioDeviceDescription& deviceAidl, int32_t indexAidl, bool muted) {
audio_attributes_t attributes = VALUE_OR_RETURN_BINDER_STATUS(
aidl2legacy_AudioAttributes_audio_attributes_t(attrAidl));
int index = VALUE_OR_RETURN_BINDER_STATUS(convertIntegral<int>(indexAidl));
@@ -1238,7 +1277,7 @@
audio_utils::lock_guard _l(mMutex);
AutoCallerClear acc;
return binderStatusFromStatusT(
- mAudioPolicyManager->setVolumeIndexForAttributes(attributes, index, device));
+ mAudioPolicyManager->setVolumeIndexForAttributes(attributes, index, muted, device));
}
Status AudioPolicyService::getVolumeIndexForAttributes(
@@ -1685,6 +1724,19 @@
return Status::ok();
}
+template <typename Port>
+void anonymizePortBluetoothAddress(Port& port) {
+ if (port.type != AUDIO_PORT_TYPE_DEVICE) {
+ return;
+ }
+ if (!(audio_is_a2dp_device(port.ext.device.type)
+ || audio_is_ble_device(port.ext.device.type)
+ || audio_is_bluetooth_sco_device(port.ext.device.type)
+ || audio_is_hearing_aid_out_device(port.ext.device.type))) {
+ return;
+ }
+ anonymizeBluetoothAddress(port.ext.device.address);
+}
Status AudioPolicyService::listAudioPorts(media::AudioPortRole roleAidl,
media::AudioPortType typeAidl, Int* count,
@@ -1703,14 +1755,27 @@
std::unique_ptr<audio_port_v7[]> ports(new audio_port_v7[num_ports]);
unsigned int generation;
- audio_utils::lock_guard _l(mMutex);
- if (mAudioPolicyManager == NULL) {
- return binderStatusFromStatusT(NO_INIT);
- }
+ const AttributionSourceState attributionSource = getCallingAttributionSource();
AutoCallerClear acc;
- RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
- mAudioPolicyManager->listAudioPorts(role, type, &num_ports, ports.get(), &generation)));
- numPortsReq = std::min(numPortsReq, num_ports);
+ {
+ audio_utils::lock_guard _l(mMutex);
+ if (mAudioPolicyManager == NULL) {
+ return binderStatusFromStatusT(NO_INIT);
+ }
+ // AudioPolicyManager->listAudioPorts makes a deep copy of port structs into ports
+ // so it is safe to access after releasing the mutex
+ RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+ mAudioPolicyManager->listAudioPorts(
+ role, type, &num_ports, ports.get(), &generation)));
+ numPortsReq = std::min(numPortsReq, num_ports);
+ }
+
+ if (mustAnonymizeBluetoothAddress(attributionSource, String16(__func__))) {
+ for (size_t i = 0; i < numPortsReq; ++i) {
+ anonymizePortBluetoothAddress(ports[i]);
+ }
+ }
+
RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
convertRange(ports.get(), ports.get() + numPortsReq, std::back_inserter(*portsAidl),
legacy2aidl_audio_port_v7_AudioPortFw)));
@@ -1733,12 +1798,24 @@
Status AudioPolicyService::getAudioPort(int portId,
media::AudioPortFw* _aidl_return) {
audio_port_v7 port{ .id = portId };
- audio_utils::lock_guard _l(mMutex);
- if (mAudioPolicyManager == NULL) {
- return binderStatusFromStatusT(NO_INIT);
- }
+
+ const AttributionSourceState attributionSource = getCallingAttributionSource();
AutoCallerClear acc;
- RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(mAudioPolicyManager->getAudioPort(&port)));
+
+ {
+ audio_utils::lock_guard _l(mMutex);
+ if (mAudioPolicyManager == NULL) {
+ return binderStatusFromStatusT(NO_INIT);
+ }
+ // AudioPolicyManager->getAudioPort makes a deep copy of the port struct into port
+ // so it is safe to access after releasing the mutex
+ RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(mAudioPolicyManager->getAudioPort(&port)));
+ }
+
+ if (mustAnonymizeBluetoothAddress(attributionSource, String16(__func__))) {
+ anonymizePortBluetoothAddress(port);
+ }
+
*_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_audio_port_v7_AudioPortFw(port));
return Status::ok();
}
@@ -1800,14 +1877,32 @@
std::unique_ptr<audio_patch[]> patches(new audio_patch[num_patches]);
unsigned int generation;
- audio_utils::lock_guard _l(mMutex);
- if (mAudioPolicyManager == NULL) {
- return binderStatusFromStatusT(NO_INIT);
- }
+ const AttributionSourceState attributionSource = getCallingAttributionSource();
AutoCallerClear acc;
- RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
- mAudioPolicyManager->listAudioPatches(&num_patches, patches.get(), &generation)));
- numPatchesReq = std::min(numPatchesReq, num_patches);
+
+ {
+ audio_utils::lock_guard _l(mMutex);
+ if (mAudioPolicyManager == NULL) {
+ return binderStatusFromStatusT(NO_INIT);
+ }
+ // AudioPolicyManager->listAudioPatches makes a deep copy of patches structs into patches
+ // so it is safe to access after releasing the mutex
+ RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+ mAudioPolicyManager->listAudioPatches(&num_patches, patches.get(), &generation)));
+ numPatchesReq = std::min(numPatchesReq, num_patches);
+ }
+
+ if (mustAnonymizeBluetoothAddress(attributionSource, String16(__func__))) {
+ for (size_t i = 0; i < numPatchesReq; ++i) {
+ for (size_t j = 0; j < patches[i].num_sources; ++j) {
+ anonymizePortBluetoothAddress(patches[i].sources[j]);
+ }
+ for (size_t j = 0; j < patches[i].num_sinks; ++j) {
+ anonymizePortBluetoothAddress(patches[i].sinks[j]);
+ }
+ }
+ }
+
RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
convertRange(patches.get(), patches.get() + numPatchesReq,
std::back_inserter(*patchesAidl), legacy2aidl_audio_patch_AudioPatchFw)));
@@ -2751,4 +2846,24 @@
return Status::ok();
}
+Status AudioPolicyService::getMmapPolicyInfos(
+ AudioMMapPolicyType policyType, std::vector<AudioMMapPolicyInfo> *_aidl_return) {
+ if (mAudioPolicyManager == nullptr) {
+ return binderStatusFromStatusT(NO_INIT);
+ }
+ audio_utils::lock_guard _l(mMutex);
+ return binderStatusFromStatusT(
+ mAudioPolicyManager->getMmapPolicyInfos(policyType, _aidl_return));
+}
+
+Status AudioPolicyService::getMmapPolicyForDevice(
+ AudioMMapPolicyType policyType, AudioMMapPolicyInfo *policyInfo) {
+ if (mAudioPolicyManager == nullptr) {
+ return binderStatusFromStatusT(NO_INIT);
+ }
+ audio_utils::lock_guard _l(mMutex);
+ return binderStatusFromStatusT(
+ mAudioPolicyManager->getMmapPolicyForDevice(policyType, policyInfo));
+}
+
} // namespace android
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 7b7275e..4c506e8 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -170,6 +170,8 @@
BINDER_METHOD_ENTRY(clearPreferredMixerAttributes) \
BINDER_METHOD_ENTRY(getRegisteredPolicyMixes) \
BINDER_METHOD_ENTRY(getPermissionController) \
+BINDER_METHOD_ENTRY(getMmapPolicyInfos) \
+BINDER_METHOD_ENTRY(getMmapPolicyForDevice) \
\
// singleton for Binder Method Statistics for IAudioPolicyService
static auto& getIAudioPolicyServiceStatistics() {
@@ -851,13 +853,13 @@
// AND an accessibility service is TOP
// AND source is either VOICE_RECOGNITION OR HOTWORD
// OR there is no active privacy sensitive capture or call
-// OR client has CAPTURE_AUDIO_OUTPUT privileged permission
+// OR client can capture calls
// AND source is VOICE_RECOGNITION OR HOTWORD
// The client is an assistant AND active assistant is not being used
// AND an accessibility service is on TOP or a RTT call is active
// AND the source is VOICE_RECOGNITION or HOTWORD
// OR there is no active privacy sensitive capture or call
-// OR client has CAPTURE_AUDIO_OUTPUT privileged permission
+// OR client can capture calls
// AND is TOP most recent assistant and uses VOICE_RECOGNITION or HOTWORD
// OR there is no top recent assistant and source is HOTWORD
// OR The client is an accessibility service
@@ -865,7 +867,7 @@
// AND the source is VOICE_RECOGNITION or HOTWORD
// OR The assistant is not on TOP
// AND there is no active privacy sensitive capture or call
-// OR client has CAPTURE_AUDIO_OUTPUT privileged permission
+// OR client can capture calls
// AND is on TOP
// AND the source is VOICE_RECOGNITION or HOTWORD
// OR the client source is virtual (remote submix, call audio TX or RX...)
@@ -873,7 +875,7 @@
// AND is on TOP
// OR all active clients are using HOTWORD source
// AND no call is active
-// OR client has CAPTURE_AUDIO_OUTPUT privileged permission
+// OR client can capture calls
// OR the client is the current InputMethodService
// AND a RTT call is active AND the source is VOICE_RECOGNITION
// OR The client is an active communication owner
@@ -882,7 +884,11 @@
// AND The assistant is not on TOP
// AND is on TOP or latest started
// AND there is no active privacy sensitive capture or call
-// OR client has CAPTURE_AUDIO_OUTPUT privileged permission
+// OR client can capture calls
+// NOTE: a client can capture calls if it either:
+// has CAPTURE_AUDIO_OUTPUT privileged permission (temporarily until
+// all system apps are updated)
+// or has CONCURRENT_AUDIO_RECORD_BYPASS privileged permission
sp<AudioRecordClient> topActive;
@@ -1022,7 +1028,7 @@
// else
// favor the privacy sensitive case
if (topActive != nullptr && topSensitiveActive != nullptr
- && !topActive->canCaptureOutput) {
+ && !topActive->canBypassConcurrentPolicy) {
topActive = nullptr;
}
@@ -1041,6 +1047,9 @@
current->attributionSource.uid == topSensitiveActive->attributionSource.uid;
bool isTopOrLatestAssistant = latestActiveAssistant == nullptr ? false :
current->attributionSource.uid == latestActiveAssistant->attributionSource.uid;
+ bool isActiveAssistant =
+ (useActiveAssistantList && mUidPolicy->isActiveAssistantUid(currentUid))
+ || mUidPolicy->isAssistantUid(currentUid);
// TODO: b/339112720
// Refine this logic when we have the correct phone state owner UID. The current issue is
@@ -1050,8 +1059,8 @@
mMutex) {
uid_t recordUid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(
recordClient->attributionSource.uid));
- bool canCaptureCall = recordClient->canCaptureOutput;
- bool canCaptureCommunication = recordClient->canCaptureOutput
+ bool canCaptureCall = recordClient->canBypassConcurrentPolicy;
+ bool canCaptureCommunication = recordClient->canBypassConcurrentPolicy
|| !isPhoneStateOwnerActive
|| recordUid == mPhoneStateOwnerUid;
return !(isInCall && !canCaptureCall)
@@ -1067,9 +1076,9 @@
// AND is ongoing communication owner
// AND is on TOP or latest started
const bool allowSensitiveCapture =
- !isSensitiveActive || isTopOrLatestSensitive || current->canCaptureOutput;
+ !isSensitiveActive || isTopOrLatestSensitive || current->canBypassConcurrentPolicy;
bool allowCapture = false;
- if (!isAssistantOnTop) {
+ if (!isAssistantOnTop || isActiveAssistant) {
allowCapture = (isTopOrLatestActive || isTopOrLatestSensitive) &&
allowSensitiveCapture && canCaptureIfInCallOrCommunication;
} else {
@@ -1817,6 +1826,7 @@
ul.unlock();
command->mStatus = AudioSystem::setStreamVolume(data->mStream,
data->mVolume,
+ data->mIsMuted,
data->mIO);
ul.lock();
}break;
@@ -1827,6 +1837,7 @@
ul.unlock();
command->mStatus = AudioSystem::setPortsVolume(data->mPorts,
data->mVolume,
+ data->mMuted,
data->mIO);
ul.lock();
} break;
@@ -2147,6 +2158,7 @@
status_t AudioPolicyService::AudioCommandThread::volumeCommand(audio_stream_type_t stream,
float volume,
+ bool muted,
audio_io_handle_t output,
int delayMs)
{
@@ -2155,6 +2167,7 @@
sp<VolumeData> data = new VolumeData();
data->mStream = stream;
data->mVolume = volume;
+ data->mIsMuted = muted;
data->mIO = output;
command->mParam = data;
command->mWaitStatus = true;
@@ -2164,14 +2177,15 @@
}
status_t AudioPolicyService::AudioCommandThread::volumePortsCommand(
- const std::vector<audio_port_handle_t> &ports, float volume, audio_io_handle_t output,
- int delayMs)
+ const std::vector<audio_port_handle_t> &ports, float volume, bool muted,
+ audio_io_handle_t output, int delayMs)
{
sp<AudioCommand> command = new AudioCommand();
command->mCommand = SET_PORTS_VOLUME;
sp<VolumePortsData> data = new VolumePortsData();
data->mPorts = ports;
data->mVolume = volume;
+ data->mMuted = muted;
data->mIO = output;
command->mParam = data;
command->mWaitStatus = true;
@@ -2675,17 +2689,18 @@
int AudioPolicyService::setStreamVolume(audio_stream_type_t stream,
float volume,
+ bool muted,
audio_io_handle_t output,
int delayMs)
{
- return (int)mAudioCommandThread->volumeCommand(stream, volume,
+ return (int)mAudioCommandThread->volumeCommand(stream, volume, muted,
output, delayMs);
}
int AudioPolicyService::setPortsVolume(const std::vector<audio_port_handle_t> &ports, float volume,
- audio_io_handle_t output, int delayMs)
+ bool muted, audio_io_handle_t output, int delayMs)
{
- return (int)mAudioCommandThread->volumePortsCommand(ports, volume, output, delayMs);
+ return (int)mAudioCommandThread->volumePortsCommand(ports, volume, muted, output, delayMs);
}
int AudioPolicyService::setVoiceVolume(float volume, int delayMs)
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index e22637f..acd9fe9 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -58,6 +58,8 @@
using media::audio::common::AudioDevice;
using media::audio::common::AudioDeviceDescription;
using media::audio::common::AudioFormatDescription;
+using media::audio::common::AudioMMapPolicyInfo;
+using media::audio::common::AudioMMapPolicyType;
using media::audio::common::AudioMode;
using media::audio::common::AudioSource;
using media::audio::common::AudioStreamType;
@@ -115,7 +117,7 @@
int32_t session,
const AttributionSourceState &attributionSource,
const AudioConfig& config,
- int32_t flags, int32_t selectedDeviceId,
+ int32_t flags, const std::vector<int32_t>& selectedDeviceIds,
media::GetOutputForAttrResponse* _aidl_return) override;
binder::Status startOutput(int32_t portId) override;
binder::Status stopOutput(int32_t portId) override;
@@ -136,13 +138,13 @@
int32_t indexMax) override;
binder::Status setStreamVolumeIndex(AudioStreamType stream,
const AudioDeviceDescription& device,
- int32_t index) override;
+ int32_t index, bool muted) override;
binder::Status getStreamVolumeIndex(AudioStreamType stream,
const AudioDeviceDescription& device,
int32_t* _aidl_return) override;
binder::Status setVolumeIndexForAttributes(const media::audio::common::AudioAttributes& attr,
const AudioDeviceDescription& device,
- int32_t index) override;
+ int32_t index, bool muted) override;
binder::Status getVolumeIndexForAttributes(const media::audio::common::AudioAttributes& attr,
const AudioDeviceDescription& device,
int32_t* _aidl_return) override;
@@ -328,6 +330,13 @@
// Should only be called by AudioService to push permission data down to audioserver
binder::Status getPermissionController(sp<INativePermissionController>* out) override;
+ binder::Status getMmapPolicyInfos(
+ AudioMMapPolicyType policyType,
+ std::vector<AudioMMapPolicyInfo>* _aidl_return) override;
+ binder::Status getMmapPolicyForDevice(
+ AudioMMapPolicyType policyType,
+ AudioMMapPolicyInfo* policyInfo) override;
+
status_t onTransact(uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) override;
// -- IAudioPolicyLocal methods
@@ -353,6 +362,7 @@
virtual status_t setStreamVolume(audio_stream_type_t stream,
float volume,
+ bool muted,
audio_io_handle_t output,
int delayMs = 0);
@@ -364,12 +374,13 @@
*
* @param ports to consider
* @param volume to set
+ * @param muted to set
* @param output to consider
* @param delayMs to use
* @return NO_ERROR if successful
*/
virtual status_t setPortsVolume(const std::vector<audio_port_handle_t> &ports, float volume,
- audio_io_handle_t output, int delayMs = 0);
+ bool muted, audio_io_handle_t output, int delayMs = 0);
virtual status_t setVoiceVolume(float volume, int delayMs = 0);
void doOnNewAudioModulesAvailable();
@@ -463,6 +474,8 @@
std::string getDeviceTypeStrForPortId(audio_port_handle_t portId);
+ std::string getDeviceTypeStrForPortIds(DeviceIdVector portIds);
+
status_t getAudioPolicyEffects(sp<AudioPolicyEffects>& audioPolicyEffects);
app_state_t apmStatFromAmState(int amState);
@@ -625,10 +638,10 @@
virtual bool threadLoop();
void exit();
- status_t volumeCommand(audio_stream_type_t stream, float volume,
+ status_t volumeCommand(audio_stream_type_t stream, float volume, bool muted,
audio_io_handle_t output, int delayMs = 0);
status_t volumePortsCommand(const std::vector<audio_port_handle_t> &ports,
- float volume, audio_io_handle_t output, int delayMs = 0);
+ float volume, bool muted, audio_io_handle_t output, int delayMs = 0);
status_t parametersCommand(audio_io_handle_t ioHandle,
const char *keyValuePairs, int delayMs = 0);
status_t voiceVolumeCommand(float volume, int delayMs = 0);
@@ -700,6 +713,7 @@
public:
audio_stream_type_t mStream;
float mVolume;
+ bool mIsMuted;
audio_io_handle_t mIO;
};
@@ -707,13 +721,15 @@
public:
std::vector<audio_port_handle_t> mPorts;
float mVolume;
+ bool mMuted;
audio_io_handle_t mIO;
std::string dumpPorts() {
- return std::string("volume ") + std::to_string(mVolume) + " on IO " +
- std::to_string(mIO) + " and ports " +
- std::accumulate(std::begin(mPorts), std::end(mPorts), std::string{},
- [] (const std::string& ls, int rs) {
- return ls + std::to_string(rs) + " "; });
+ return std::string("volume ") + std::to_string(mVolume) + std::string("muted ") +
+ std::to_string(mMuted) + " on IO " + std::to_string(mIO) + " and ports " +
+ std::accumulate(std::begin(mPorts), std::end(mPorts), std::string{},
+ [](const std::string &ls, int rs) {
+ return ls + std::to_string(rs) + " ";
+ });
}
};
@@ -854,9 +870,10 @@
// misc control functions
//
- // set a stream volume for a particular output. For the same user setting, a given stream type can have different volumes
- // for each output (destination device) it is attached to.
- virtual status_t setStreamVolume(audio_stream_type_t stream, float volume, audio_io_handle_t output, int delayMs = 0);
+ // set a stream volume for a particular output. For the same user setting, a given stream
+ // type can have different volumes for each output (destination device) it is attached to.
+ virtual status_t setStreamVolume(audio_stream_type_t stream, float volume, bool muted,
+ audio_io_handle_t output, int delayMs = 0);
/**
* Set a volume on port(s) for a particular output. For the same user setting, a volume
* group (and associated given port of the client's track) can have different volumes for
@@ -864,12 +881,13 @@
*
* @param ports to consider
* @param volume to set
+ * @param muted to set
* @param output to consider
* @param delayMs to use
* @return NO_ERROR if successful
*/
status_t setPortsVolume(const std::vector<audio_port_handle_t> &ports, float volume,
- audio_io_handle_t output, int delayMs = 0) override;
+ bool muted, audio_io_handle_t output, int delayMs = 0) override;
// function enabling to send proprietary informations directly from audio policy manager to audio hardware interface.
virtual void setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs, int delayMs = 0);
@@ -938,6 +956,13 @@
status_t setTracksInternalMute(
const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) override;
+ status_t getMmapPolicyInfos(
+ media::audio::common::AudioMMapPolicyType policyType,
+ std::vector<media::audio::common::AudioMMapPolicyInfo> *policyInfos) override;
+
+ error::BinderResult<bool> checkPermissionForInput(const AttributionSourceState& attr,
+ const PermissionReqs& req) override;
+
private:
AudioPolicyService *mAudioPolicyService;
};
@@ -995,10 +1020,10 @@
const audio_io_handle_t io,
const AttributionSourceState& attributionSource,
const audio_session_t session, audio_port_handle_t portId,
- const audio_port_handle_t deviceId) :
+ const DeviceIdVector deviceIds) :
attributes(attributes), io(io), attributionSource(
attributionSource), session(session), portId(portId),
- deviceId(deviceId), active(false) {}
+ deviceIds(deviceIds), active(false) {}
~AudioClient() override = default;
@@ -1007,7 +1032,7 @@
const AttributionSourceState attributionSource; //client attributionsource
const audio_session_t session; // audio session ID
const audio_port_handle_t portId;
- const audio_port_handle_t deviceId; // selected input device port ID
+ const DeviceIdVector deviceIds; // selected input device port IDs
bool active; // Playback/Capture is active or inactive
};
private:
@@ -1022,10 +1047,10 @@
AudioPlaybackClient(const audio_attributes_t attributes,
const audio_io_handle_t io, AttributionSourceState attributionSource,
const audio_session_t session, audio_port_handle_t portId,
- audio_port_handle_t deviceId, audio_stream_type_t stream,
+ DeviceIdVector deviceIds, audio_stream_type_t stream,
bool isSpatialized, audio_channel_mask_t channelMask) :
AudioClient(attributes, io, attributionSource, session, portId,
- deviceId), stream(stream), isSpatialized(isSpatialized),
+ deviceIds), stream(stream), isSpatialized(isSpatialized),
channelMask(channelMask) {}
~AudioPlaybackClient() override = default;
diff --git a/services/audiopolicy/service/AudioRecordClient.cpp b/services/audiopolicy/service/AudioRecordClient.cpp
index 733f0d6..01e557c 100644
--- a/services/audiopolicy/service/AudioRecordClient.cpp
+++ b/services/audiopolicy/service/AudioRecordClient.cpp
@@ -19,6 +19,7 @@
#include "AudioRecordClient.h"
#include "AudioPolicyService.h"
#include "binder/AppOpsManager.h"
+#include "mediautils/ServiceUtilities.h"
#include <android_media_audiopolicy.h>
#include <algorithm>
@@ -63,12 +64,12 @@
using iterator_category = std::forward_iterator_tag;
using difference_type = std::ptrdiff_t;
using value_type = AttributionSourceState;
- using pointer = const value_type*;
- using reference = const value_type&;
+ using pointer = value_type*;
+ using reference = value_type&;
AttrSourceItr() : mAttr(nullptr) {}
- AttrSourceItr(const AttributionSourceState& attr) : mAttr(&attr) {}
+ AttrSourceItr(AttributionSourceState& attr) : mAttr(&attr) {}
reference operator*() const { return *mAttr; }
pointer operator->() const { return mAttr; }
@@ -88,7 +89,7 @@
static AttrSourceItr end() { return AttrSourceItr{}; }
private:
- const AttributionSourceState * mAttr;
+ AttributionSourceState * mAttr;
};
} // anonymous
@@ -118,17 +119,31 @@
}
return new OpRecordAudioMonitor(attributionSource, virtualDeviceId, attr,
- getOpForSource(attr.source), commandThread);
+ getOpForSource(attr.source),
+ isRecordOpRequired(attr.source),
+ commandThread);
}
OpRecordAudioMonitor::OpRecordAudioMonitor(
const AttributionSourceState &attributionSource,
const uint32_t virtualDeviceId, const audio_attributes_t &attr,
int32_t appOp,
+ bool shouldMonitorRecord,
wp<AudioPolicyService::AudioCommandThread> commandThread) :
mHasOp(true), mAttributionSource(attributionSource),
mVirtualDeviceId(virtualDeviceId), mAttr(attr), mAppOp(appOp),
+ mShouldMonitorRecord(shouldMonitorRecord),
mCommandThread(commandThread) {
+ // The vdi is carried in the attribution source for appops perm checks.
+ // Overwrite the entire chain with the vdi associated with the mix this client is attached to
+ // This ensures the checkOps triggered by the listener are correct.
+ // Note: we still only register for events by package name, so we assume that we get events
+ // independent of vdi.
+ if (mVirtualDeviceId != 0 /* default vdi */) {
+ // TODO (atneya@) lift for const
+ std::for_each(AttrSourceItr{mAttributionSource}, AttrSourceItr::end(),
+ [&](auto& attr) { attr.deviceId = mVirtualDeviceId; });
+ }
}
OpRecordAudioMonitor::~OpRecordAudioMonitor()
@@ -160,7 +175,7 @@
});
};
reg(mAppOp);
- if (mAppOp != AppOpsManager::OP_RECORD_AUDIO) {
+ if (mAppOp != AppOpsManager::OP_RECORD_AUDIO && mShouldMonitorRecord) {
reg(AppOpsManager::OP_RECORD_AUDIO);
}
}
@@ -186,7 +201,7 @@
});
};
bool hasIt = check(mAppOp);
- if (mAppOp != AppOpsManager::OP_RECORD_AUDIO) {
+ if (mAppOp != AppOpsManager::OP_RECORD_AUDIO && mShouldMonitorRecord) {
hasIt = hasIt && check(AppOpsManager::OP_RECORD_AUDIO);
}
diff --git a/services/audiopolicy/service/AudioRecordClient.h b/services/audiopolicy/service/AudioRecordClient.h
index 76aff41..3553f1d 100644
--- a/services/audiopolicy/service/AudioRecordClient.h
+++ b/services/audiopolicy/service/AudioRecordClient.h
@@ -47,6 +47,7 @@
uint32_t virtualDeviceId,
const audio_attributes_t &attr,
int32_t appOp,
+ bool shouldMonitorRecord,
wp<AudioPolicyService::AudioCommandThread> commandThread);
void onFirstRef() override;
@@ -70,10 +71,11 @@
void checkOp(bool updateUidStates = false);
std::atomic_bool mHasOp;
- const AttributionSourceState mAttributionSource;
+ AttributionSourceState mAttributionSource;
const uint32_t mVirtualDeviceId;
const audio_attributes_t mAttr;
const int32_t mAppOp;
+ const bool mShouldMonitorRecord;
wp<AudioPolicyService::AudioCommandThread> mCommandThread;
};
@@ -85,17 +87,16 @@
AudioRecordClient(const audio_attributes_t attributes,
const audio_io_handle_t io,
const audio_session_t session, audio_port_handle_t portId,
- const audio_port_handle_t deviceId,
+ const DeviceIdVector deviceIds,
const AttributionSourceState& attributionSource,
const uint32_t virtualDeviceId,
- bool canCaptureOutput, bool canCaptureHotword,
+ bool canBypassConcurrentPolicy,
wp<AudioPolicyService::AudioCommandThread> commandThread) :
AudioClient(attributes, io, attributionSource,
- session, portId, deviceId), attributionSource(attributionSource),
+ session, portId, deviceIds), attributionSource(attributionSource),
virtualDeviceId(virtualDeviceId),
- startTimeNs(0), canCaptureOutput(canCaptureOutput),
- canCaptureHotword(canCaptureHotword), silenced(false),
- mOpRecordAudioMonitor(
+ startTimeNs(0), canBypassConcurrentPolicy(canBypassConcurrentPolicy),
+ silenced(false), mOpRecordAudioMonitor(
OpRecordAudioMonitor::createIfNeeded(attributionSource,
virtualDeviceId,
attributes, commandThread)) {
@@ -110,8 +111,7 @@
const AttributionSourceState attributionSource; // attribution source of client
const uint32_t virtualDeviceId; // id of the virtual device associated with the audio device
nsecs_t startTimeNs;
- const bool canCaptureOutput;
- const bool canCaptureHotword;
+ const bool canBypassConcurrentPolicy;
bool silenced;
private:
diff --git a/services/audiopolicy/service/SpatializerPoseController.cpp b/services/audiopolicy/service/SpatializerPoseController.cpp
index 874bde4..368dde0 100644
--- a/services/audiopolicy/service/SpatializerPoseController.cpp
+++ b/services/audiopolicy/service/SpatializerPoseController.cpp
@@ -22,6 +22,7 @@
#define LOG_TAG "SpatializerPoseController"
//#define LOG_NDEBUG 0
+#include <audio_utils/mutex.h>
#include <cutils/properties.h>
#include <sensor/Sensor.h>
#include <media/MediaMetricsItem.h>
@@ -131,20 +132,22 @@
Pose3f headToStage;
std::optional<HeadTrackingMode> modeIfChanged;
{
- std::unique_lock lock(mMutex);
- if (maxUpdatePeriod.has_value()) {
- mCondVar.wait_for(lock, maxUpdatePeriod.value(),
- [this] { return mShouldExit || mShouldCalculate; });
- } else {
- mCondVar.wait(lock, [this] { return mShouldExit || mShouldCalculate; });
+ audio_utils::unique_lock ul(mMutex);
+ while (true) {
+ if (mShouldExit) {
+ ALOGV("Exiting thread");
+ return;
+ }
+ if (mShouldCalculate) {
+ std::tie(headToStage, modeIfChanged) = calculate_l();
+ break;
+ }
+ if (maxUpdatePeriod.has_value()) {
+ mCondVar.wait_for(ul, maxUpdatePeriod.value());
+ } else {
+ mCondVar.wait(ul);
+ }
}
- if (mShouldExit) {
- ALOGV("Exiting thread");
- return;
- }
-
- // Calculate.
- std::tie(headToStage, modeIfChanged) = calculate_l();
}
// Invoke the callbacks outside the lock.
@@ -173,7 +176,7 @@
SpatializerPoseController::~SpatializerPoseController() {
{
- std::unique_lock lock(mMutex);
+ std::lock_guard lock(mMutex);
mShouldExit = true;
mCondVar.notify_all();
}
@@ -278,8 +281,10 @@
}
void SpatializerPoseController::waitUntilCalculated() {
- std::unique_lock lock(mMutex);
- mCondVar.wait(lock, [this] { return mCalculated; });
+ audio_utils::unique_lock ul(mMutex);
+ while (!mCalculated) {
+ mCondVar.wait(ul);
+ }
}
std::tuple<media::Pose3f, std::optional<media::HeadTrackingMode>>
@@ -358,14 +363,15 @@
}
}
-std::string SpatializerPoseController::toString(unsigned level) const {
+std::string SpatializerPoseController::toString(unsigned level) const NO_THREAD_SAFETY_ANALYSIS {
std::string prefixSpace(level, ' ');
std::string ss = prefixSpace + "SpatializerPoseController:\n";
bool needUnlock = false;
prefixSpace += ' ';
auto now = std::chrono::steady_clock::now();
- if (!mMutex.try_lock_until(now + media::kSpatializerDumpSysTimeOutInSecond)) {
+ if (!audio_utils::std_mutex_timed_lock(mMutex, std::chrono::nanoseconds(
+ media::kSpatializerDumpSysTimeOutInSecond).count())) {
ss.append(prefixSpace).append("try_lock failed, dumpsys maybe INACCURATE!\n");
} else {
needUnlock = true;
diff --git a/services/audiopolicy/service/SpatializerPoseController.h b/services/audiopolicy/service/SpatializerPoseController.h
index 7fa4f86..9955cd8 100644
--- a/services/audiopolicy/service/SpatializerPoseController.h
+++ b/services/audiopolicy/service/SpatializerPoseController.h
@@ -118,34 +118,34 @@
std::string toString(unsigned level) const;
private:
- mutable std::timed_mutex mMutex;
+ mutable std::mutex mMutex;
Listener* const mListener;
const std::chrono::microseconds mSensorPeriod;
- std::unique_ptr<media::HeadTrackingProcessor> mProcessor;
- int32_t mHeadSensor = media::SensorPoseProvider::INVALID_HANDLE;
- int32_t mScreenSensor = media::SensorPoseProvider::INVALID_HANDLE;
- std::optional<media::HeadTrackingMode> mActualMode;
- std::condition_variable_any mCondVar;
- bool mShouldCalculate = true;
- bool mShouldExit = false;
- bool mCalculated = false;
+ std::unique_ptr<media::HeadTrackingProcessor> mProcessor GUARDED_BY(mMutex);
+ int32_t mHeadSensor GUARDED_BY(mMutex) = media::SensorPoseProvider::INVALID_HANDLE;
+ int32_t mScreenSensor GUARDED_BY(mMutex) = media::SensorPoseProvider::INVALID_HANDLE;
+ std::optional<media::HeadTrackingMode> mActualMode GUARDED_BY(mMutex);
+ std::condition_variable mCondVar GUARDED_BY(mMutex);
+ bool mShouldCalculate GUARDED_BY(mMutex) = true;
+ bool mShouldExit GUARDED_BY(mMutex) = false;
+ bool mCalculated GUARDED_BY(mMutex) = false;
- media::VectorRecorder mHeadSensorRecorder{
+ media::VectorRecorder mHeadSensorRecorder GUARDED_BY(mMutex) {
8 /* vectorSize */, std::chrono::seconds(1), 10 /* maxLogLine */,
{ 3, 6, 7 } /* delimiterIdx */};
- media::VectorRecorder mHeadSensorDurableRecorder{
+ media::VectorRecorder mHeadSensorDurableRecorder GUARDED_BY(mMutex) {
8 /* vectorSize */, std::chrono::minutes(1), 10 /* maxLogLine */,
{ 3, 6, 7 } /* delimiterIdx */};
- media::VectorRecorder mScreenSensorRecorder{
+ media::VectorRecorder mScreenSensorRecorder GUARDED_BY(mMutex) {
4 /* vectorSize */, std::chrono::seconds(1), 10 /* maxLogLine */,
{ 3 } /* delimiterIdx */};
- media::VectorRecorder mScreenSensorDurableRecorder{
+ media::VectorRecorder mScreenSensorDurableRecorder GUARDED_BY(mMutex) {
4 /* vectorSize */, std::chrono::minutes(1), 10 /* maxLogLine */,
{ 3 } /* delimiterIdx */};
// Next to last variable as releasing this stops the callbacks
- std::unique_ptr<media::SensorPoseProvider> mPoseProvider;
+ std::unique_ptr<media::SensorPoseProvider> mPoseProvider GUARDED_BY(mMutex);
// It's important that mThread is the last variable in this class
// since we starts mThread in initializer list
@@ -158,7 +158,8 @@
* Calculates the new outputs and updates internal state. Must be called with the lock held.
* Returns values that should be passed to the respective callbacks.
*/
- std::tuple<media::Pose3f, std::optional<media::HeadTrackingMode>> calculate_l();
+ std::tuple<media::Pose3f, std::optional<media::HeadTrackingMode>> calculate_l()
+ REQUIRES(mMutex);
};
} // namespace android
diff --git a/services/audiopolicy/tests/Android.bp b/services/audiopolicy/tests/Android.bp
index 154b063..a6e5c75 100644
--- a/services/audiopolicy/tests/Android.bp
+++ b/services/audiopolicy/tests/Android.bp
@@ -21,6 +21,7 @@
],
shared_libs: [
+ "audiopolicy-aidl-cpp",
"framework-permission-aidl-cpp",
"libaudioclient",
"libaudiofoundation",
@@ -29,10 +30,10 @@
"libbase",
"libbinder",
"libcutils",
- "libcutils",
"libhidlbase",
"liblog",
"libmedia_helper",
+ "libstagefright_foundation",
"libutils",
"libxml2",
"server_configurable_flags",
@@ -41,7 +42,9 @@
static_libs: [
"android.media.audiopolicy-aconfig-cc",
"audioclient-types-aidl-cpp",
+ "com.android.media.audio-aconfig-cc",
"com.android.media.audioserver-aconfig-cc",
+ "libaudio_aidl_conversion_common_cpp",
"libaudiopolicycomponents",
"libflagtest",
"libgmock",
diff --git a/services/audiopolicy/tests/AudioPolicyTestClient.h b/services/audiopolicy/tests/AudioPolicyTestClient.h
index 9ddfd6c..8e5fb96 100644
--- a/services/audiopolicy/tests/AudioPolicyTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyTestClient.h
@@ -56,11 +56,13 @@
status_t closeInput(audio_io_handle_t /*input*/) override { return NO_INIT; }
status_t setStreamVolume(audio_stream_type_t /*stream*/,
float /*volume*/,
+ bool /*muted*/,
audio_io_handle_t /*output*/,
int /*delayMs*/) override { return NO_INIT; }
- status_t setPortsVolume(const std::vector<audio_port_handle_t>& /*ports*/, float /*volume*/,
- audio_io_handle_t /*output*/, int /*delayMs*/) override { return NO_INIT; }
+ status_t setPortsVolume(const std::vector<audio_port_handle_t> & /*ports*/, float /*volume*/,
+ bool /*muted*/, audio_io_handle_t /*output*/,
+ int /*delayMs*/) override { return NO_INIT; }
void setParameters(audio_io_handle_t /*ioHandle*/,
const String8& /*keyValuePairs*/,
@@ -120,6 +122,16 @@
const std::vector<media::TrackInternalMuteInfo>& /*tracksInternalMute*/) override {
return INVALID_OPERATION;
}
+
+ status_t getMmapPolicyInfos(
+ media::audio::common::AudioMMapPolicyType /*policyType*/,
+ std::vector<media::audio::common::AudioMMapPolicyInfo>* /*policyInfos*/) override {
+ return INVALID_OPERATION;
+ }
+ error::BinderResult<bool> checkPermissionForInput(const AttributionSourceState& /* attr */,
+ const PermissionReqs& /* req */) {
+ return true;
+ }
};
} // namespace android
diff --git a/services/audiopolicy/tests/AudioPolicyTestManager.h b/services/audiopolicy/tests/AudioPolicyTestManager.h
index bf45bb2..e30882c 100644
--- a/services/audiopolicy/tests/AudioPolicyTestManager.h
+++ b/services/audiopolicy/tests/AudioPolicyTestManager.h
@@ -46,6 +46,7 @@
using AudioPolicyManager::setDeviceConnectionState;
using AudioPolicyManager::deviceToAudioPort;
using AudioPolicyManager::handleDeviceConfigChange;
+ using AudioPolicyManager::getInputProfile;
uint32_t getAudioPortGeneration() const { return mAudioPortGeneration; }
HwModuleCollection getHwModules() const { return mHwModules; }
};
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index 1649099..a8f79c3 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -97,13 +97,6 @@
attributionSourceState.token = sp<BBinder>::make();
return attributionSourceState;
}
-
-bool equals(const audio_config_base_t& config1, const audio_config_base_t& config2) {
- return config1.format == config2.format
- && config1.sample_rate == config2.sample_rate
- && config1.channel_mask == config2.channel_mask;
-}
-
} // namespace
TEST(AudioPolicyConfigTest, DefaultConfigForTestsIsEmpty) {
@@ -191,7 +184,7 @@
// When explicit routing is needed, selectedDeviceId needs to be set as the wanted port
// id. Otherwise, selectedDeviceId needs to be initialized as AUDIO_PORT_HANDLE_NONE.
void getOutputForAttr(
- audio_port_handle_t *selectedDeviceId,
+ DeviceIdVector *selectedDeviceIds,
audio_format_t format,
audio_channel_mask_t channelMask,
int sampleRate,
@@ -284,7 +277,7 @@
}
void AudioPolicyManagerTest::getOutputForAttr(
- audio_port_handle_t *selectedDeviceId,
+ DeviceIdVector *selectedDeviceIds,
audio_format_t format,
audio_channel_mask_t channelMask,
int sampleRate,
@@ -310,11 +303,13 @@
bool isSpatialized;
bool isBitPerfectInternal;
float volume;
+ bool muted;
AttributionSourceState attributionSource = createAttributionSourceState(uid);
ASSERT_EQ(OK, mManager->getOutputForAttr(
&attr, output, session, &stream, attributionSource, &config, &flags,
- selectedDeviceId, portId, {}, &outputType, &isSpatialized,
- isBitPerfect == nullptr ? &isBitPerfectInternal : isBitPerfect, &volume));
+ selectedDeviceIds, portId, {}, &outputType, &isSpatialized,
+ isBitPerfect == nullptr ? &isBitPerfectInternal : isBitPerfect, &volume,
+ &muted));
ASSERT_NE(AUDIO_PORT_HANDLE_NONE, *portId);
ASSERT_NE(AUDIO_IO_HANDLE_NONE, *output);
}
@@ -339,12 +334,15 @@
if (!portId) portId = &localPortId;
*portId = AUDIO_PORT_HANDLE_NONE;
if (!virtualDeviceId) virtualDeviceId = 0;
- AudioPolicyInterface::input_type_t inputType;
AttributionSourceState attributionSource = createAttributionSourceState(/*uid=*/ 0);
- ASSERT_EQ(OK, mManager->getInputForAttr(
- &attr, input, riid, session, attributionSource, &config, flags,
- selectedDeviceId, &inputType, portId, virtualDeviceId));
- ASSERT_NE(AUDIO_PORT_HANDLE_NONE, *portId);
+ auto inputRes = mManager->getInputForAttr(attr, *input, *selectedDeviceId,
+ config, flags, riid, session, attributionSource);
+ ASSERT_TRUE(inputRes.has_value());
+ ASSERT_NE(inputRes->portId, AUDIO_PORT_HANDLE_NONE);
+ *input = inputRes->input;
+ if (selectedDeviceId != nullptr) *selectedDeviceId = inputRes->selectedDeviceId;
+ *portId = inputRes->portId;
+ if (virtualDeviceId != nullptr) *virtualDeviceId = inputRes->virtualDeviceId;
}
void AudioPolicyManagerTest::getAudioPorts(audio_port_type_t type, audio_port_role_t role,
@@ -646,42 +644,42 @@
TEST_P(AudioPolicyManagerTestMsd, GetOutputForAttrEncodedRoutesToMsd) {
const PatchCountCheck patchCount = snapshotPatchCount();
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1,
+ DeviceIdVector selectedDeviceIds;
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT);
- ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
+ ASSERT_EQ(mDefaultOutputDevice->getId(), selectedDeviceIds[0]);
ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
}
TEST_P(AudioPolicyManagerTestMsd, GetOutputForAttrPcmRoutesToMsd) {
const PatchCountCheck patchCount = snapshotPatchCount();
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
- getOutputForAttr(&selectedDeviceId,
+ DeviceIdVector selectedDeviceIds;
+ getOutputForAttr(&selectedDeviceIds,
AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, k48000SamplingRate);
- ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
+ ASSERT_EQ(mDefaultOutputDevice->getId(), selectedDeviceIds[0]);
ASSERT_EQ(mExpectedAudioPatchCount - 1, patchCount.deltaFromSnapshot());
}
TEST_P(AudioPolicyManagerTestMsd, GetOutputForAttrEncodedPlusPcmRoutesToMsd) {
const PatchCountCheck patchCount = snapshotPatchCount();
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1,
+ DeviceIdVector selectedDeviceIds;
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT);
- ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
+ ASSERT_EQ(mDefaultOutputDevice->getId(), selectedDeviceIds[0]);
ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
- selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
- getOutputForAttr(&selectedDeviceId,
+ selectedDeviceIds.clear();
+ getOutputForAttr(&selectedDeviceIds,
AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, k48000SamplingRate);
- ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
+ ASSERT_EQ(mDefaultOutputDevice->getId(), selectedDeviceIds[0]);
ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
}
TEST_P(AudioPolicyManagerTestMsd, GetOutputForAttrUnsupportedFormatBypassesMsd) {
const PatchCountCheck patchCount = snapshotPatchCount();
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_DTS, AUDIO_CHANNEL_OUT_5POINT1,
+ DeviceIdVector selectedDeviceIds;
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_DTS, AUDIO_CHANNEL_OUT_5POINT1,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT);
- ASSERT_NE(selectedDeviceId, mMsdOutputDevice->getId());
+ ASSERT_NE(mMsdOutputDevice->getId(), selectedDeviceIds[0]);
ASSERT_EQ(1, patchCount.deltaFromSnapshot());
}
@@ -689,32 +687,33 @@
// Switch between formats that are supported and not supported by MSD.
{
const PatchCountCheck patchCount = snapshotPatchCount();
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_port_handle_t portId;
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, nullptr /*output*/, &portId);
- ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
+ ASSERT_EQ(mDefaultOutputDevice->getId(), selectedDeviceIds[0]);
ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
mManager->releaseOutput(portId);
ASSERT_EQ(mExpectedAudioPatchCount - 1, patchCount.deltaFromSnapshot());
}
{
const PatchCountCheck patchCount = snapshotPatchCount();
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_port_handle_t portId;
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_DTS, AUDIO_CHANNEL_OUT_5POINT1,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_DTS, AUDIO_CHANNEL_OUT_5POINT1,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, nullptr /*output*/, &portId);
- ASSERT_NE(selectedDeviceId, mMsdOutputDevice->getId());
+ ASSERT_GT(selectedDeviceIds.size(), 0);
+ ASSERT_NE(mMsdOutputDevice->getId(), selectedDeviceIds[0]);
ASSERT_EQ(-static_cast<int>(mExpectedAudioPatchCount) + 2, patchCount.deltaFromSnapshot());
mManager->releaseOutput(portId);
ASSERT_EQ(0, patchCount.deltaFromSnapshot());
}
{
const PatchCountCheck patchCount = snapshotPatchCount();
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1,
+ DeviceIdVector selectedDeviceIds;
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT);
- ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
+ ASSERT_EQ(mDefaultOutputDevice->getId(), selectedDeviceIds[0]);
ASSERT_EQ(1, patchCount.deltaFromSnapshot());
}
}
@@ -750,7 +749,7 @@
TEST_P(AudioPolicyManagerTestMsd, GetDirectProfilesForAttributesWithMsd) {
const audio_attributes_t attr = {
AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+ AUDIO_SOURCE_INVALID, AUDIO_FLAG_NONE, ""};
// count expected direct profiles for the default device
int countDirectProfilesPrimary = 0;
@@ -1136,14 +1135,14 @@
&mediaAttr, usbPortId, uid, &mixerAttributes[0]));
audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr,
AUDIO_SESSION_NONE, uid);
status_t status = mManager->startOutput(portId);
if (status == DEAD_OBJECT) {
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr,
AUDIO_SESSION_NONE, uid);
status = mManager->startOutput(portId);
@@ -1170,6 +1169,56 @@
"", "", AUDIO_FORMAT_LDAC));
}
+template <typename T>
+bool hasDuplicates(const T& container) {
+ return std::unordered_set<typename T::value_type>(container.begin(),
+ container.end()).size() != container.size();
+}
+
+TEST_F(AudioPolicyManagerTestWithConfigurationFile, UniqueSelectedDeviceIds) {
+ mClient->addSupportedFormat(AUDIO_FORMAT_PCM_16_BIT);
+ mClient->addSupportedChannelMask(AUDIO_CHANNEL_OUT_STEREO);
+ ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
+ AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+ "", "", AUDIO_FORMAT_DEFAULT));
+ auto devices = mManager->getAvailableOutputDevices();
+ audio_port_handle_t usbPortId = AUDIO_PORT_HANDLE_NONE;
+ audio_port_handle_t speakerPortId = AUDIO_PORT_HANDLE_NONE;
+ for (auto device : devices) {
+ if (device->type() == AUDIO_DEVICE_OUT_USB_DEVICE) {
+ usbPortId = device->getId();
+ }
+ if (device->type() == AUDIO_DEVICE_OUT_SPEAKER) {
+ speakerPortId = device->getId();
+ }
+ }
+ EXPECT_NE(AUDIO_PORT_HANDLE_NONE, usbPortId);
+ EXPECT_NE(AUDIO_PORT_HANDLE_NONE, speakerPortId);
+
+ const uid_t uid = 1234;
+ const audio_attributes_t mediaAttr = {
+ .content_type = AUDIO_CONTENT_TYPE_SONIFICATION,
+ .usage = AUDIO_USAGE_ALARM,
+ };
+
+ audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
+ audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
+ ASSERT_NO_FATAL_FAILURE(getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT,
+ AUDIO_CHANNEL_OUT_STEREO, k48000SamplingRate, AUDIO_OUTPUT_FLAG_NONE, &output,
+ &portId, mediaAttr, AUDIO_SESSION_NONE, uid));
+ EXPECT_FALSE(selectedDeviceIds.empty());
+ EXPECT_NE(std::find(selectedDeviceIds.begin(), selectedDeviceIds.end(), usbPortId),
+ selectedDeviceIds.end());
+ EXPECT_NE(std::find(selectedDeviceIds.begin(), selectedDeviceIds.end(), speakerPortId),
+ selectedDeviceIds.end());
+ EXPECT_FALSE(hasDuplicates(selectedDeviceIds));
+
+ ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
+ AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+ "", "", AUDIO_FORMAT_DEFAULT));
+}
+
TEST_F(AudioPolicyManagerTestWithConfigurationFile, PreferExactConfigForInput) {
const audio_channel_mask_t deviceChannelMask = AUDIO_CHANNEL_IN_3POINT1;
mClient->addSupportedFormat(AUDIO_FORMAT_PCM_16_BIT);
@@ -1178,40 +1227,39 @@
AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
"", "", AUDIO_FORMAT_DEFAULT));
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ const audio_port_handle_t requestedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ const audio_io_handle_t requestedInput = AUDIO_PORT_HANDLE_NONE;
+ const AttributionSourceState attributionSource = createAttributionSourceState(/*uid=*/ 0);
+ AudioPolicyInterface::input_type_t inputType;
+
audio_attributes_t attr = {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
AUDIO_SOURCE_VOICE_COMMUNICATION,AUDIO_FLAG_NONE, ""};
- AudioPolicyInterface::input_type_t inputType;
- audio_io_handle_t input = AUDIO_PORT_HANDLE_NONE;
- AttributionSourceState attributionSource = createAttributionSourceState(/*uid=*/ 0);
audio_config_base_t requestedConfig = {
.sample_rate = k48000SamplingRate,
.channel_mask = AUDIO_CHANNEL_IN_STEREO,
.format = AUDIO_FORMAT_PCM_16_BIT,
};
- audio_config_base_t config = requestedConfig;
- audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
- uint32_t *virtualDeviceId = 0;
- ASSERT_EQ(OK, mManager->getInputForAttr(
- &attr, &input, 1 /*riid*/, AUDIO_SESSION_NONE, attributionSource, &config,
- AUDIO_INPUT_FLAG_NONE,
- &selectedDeviceId, &inputType, &portId, virtualDeviceId));
- ASSERT_NE(AUDIO_PORT_HANDLE_NONE, portId);
- ASSERT_TRUE(equals(requestedConfig, config));
+ auto inputRes = mManager->getInputForAttr(attr, requestedInput, requestedDeviceId,
+ requestedConfig, AUDIO_INPUT_FLAG_NONE, 1 /*riid*/,
+ AUDIO_SESSION_NONE, attributionSource);
+ ASSERT_TRUE(inputRes.has_value());
+ ASSERT_NE(inputRes->portId, AUDIO_PORT_HANDLE_NONE);
+ ASSERT_EQ(VALUE_OR_FATAL(legacy2aidl_audio_config_base_t_AudioConfigBase(
+ requestedConfig, true /* isInput */)),
+ inputRes->config);
attr = {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
AUDIO_SOURCE_VOICE_COMMUNICATION, AUDIO_FLAG_NONE, ""};
requestedConfig.channel_mask = deviceChannelMask;
- config = requestedConfig;
- selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
- input = AUDIO_PORT_HANDLE_NONE;
- portId = AUDIO_PORT_HANDLE_NONE;
- ASSERT_EQ(OK, mManager->getInputForAttr(
- &attr, &input, 1 /*riid*/, AUDIO_SESSION_NONE, attributionSource, &config,
- AUDIO_INPUT_FLAG_NONE,
- &selectedDeviceId, &inputType, &portId, virtualDeviceId));
- ASSERT_NE(AUDIO_PORT_HANDLE_NONE, portId);
- ASSERT_TRUE(equals(requestedConfig, config));
+
+ inputRes = mManager->getInputForAttr(attr, requestedInput, requestedDeviceId, requestedConfig,
+ AUDIO_INPUT_FLAG_NONE, 1 /*riid*/, AUDIO_SESSION_NONE,
+ attributionSource);
+ ASSERT_TRUE(inputRes.has_value());
+ ASSERT_NE(inputRes->portId, AUDIO_PORT_HANDLE_NONE);
+ ASSERT_EQ(VALUE_OR_FATAL(legacy2aidl_audio_config_base_t_AudioConfigBase(requestedConfig,
+ true /* isInput */)),
+ inputRes->config);
ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_IN_USB_DEVICE,
AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
@@ -1247,6 +1295,60 @@
EXPECT_EQ(streamCountBefore, mClient->getOpenedInputsCount());
}
+TEST_F(AudioPolicyManagerTestWithConfigurationFile, UpdateConfigFromInexactProfile) {
+ const audio_format_t expectedFormat = AUDIO_FORMAT_PCM_16_BIT;
+ const uint32_t expectedSampleRate = 48000;
+ const audio_channel_mask_t expectedChannelMask = AUDIO_CHANNEL_IN_STEREO;
+ const std::string expectedIOProfile = "primary input";
+
+ auto devices = mManager->getAvailableInputDevices();
+ sp<DeviceDescriptor> mic = nullptr;
+ for (auto device : devices) {
+ if (device->type() == AUDIO_DEVICE_IN_BUILTIN_MIC) {
+ mic = device;
+ break;
+ }
+ }
+ EXPECT_NE(nullptr, mic);
+
+ audio_format_t requestedFormat = AUDIO_FORMAT_PCM_16_BIT;
+ uint32_t requestedSampleRate = 44100;
+ audio_channel_mask_t requestedChannelMask = AUDIO_CHANNEL_IN_STEREO;
+ auto profile = mManager->getInputProfile(
+ mic, requestedSampleRate, requestedFormat, requestedChannelMask, AUDIO_INPUT_FLAG_NONE);
+ EXPECT_EQ(expectedIOProfile, profile->getName());
+ EXPECT_EQ(expectedFormat, requestedFormat);
+ EXPECT_EQ(expectedSampleRate, requestedSampleRate);
+ EXPECT_EQ(expectedChannelMask, requestedChannelMask);
+}
+
+TEST_F(AudioPolicyManagerTestWithConfigurationFile, MatchesMoreInputFlagsWhenPossible) {
+ const audio_format_t expectedFormat = AUDIO_FORMAT_PCM_16_BIT;
+ const uint32_t expectedSampleRate = 48000;
+ const audio_channel_mask_t expectedChannelMask = AUDIO_CHANNEL_IN_STEREO;
+ const std::string expectedIOProfile = "mixport_fast_input";
+
+ auto devices = mManager->getAvailableInputDevices();
+ sp<DeviceDescriptor> mic = nullptr;
+ for (auto device : devices) {
+ if (device->type() == AUDIO_DEVICE_IN_BUILTIN_MIC) {
+ mic = device;
+ break;
+ }
+ }
+ EXPECT_NE(nullptr, mic);
+
+ audio_format_t requestedFormat = AUDIO_FORMAT_PCM_24_BIT_PACKED;
+ uint32_t requestedSampleRate = 48000;
+ audio_channel_mask_t requestedChannelMask = AUDIO_CHANNEL_IN_STEREO;
+ auto profile = mManager->getInputProfile(
+ mic, requestedSampleRate, requestedFormat, requestedChannelMask, AUDIO_INPUT_FLAG_FAST);
+ EXPECT_EQ(expectedIOProfile, profile->getName());
+ EXPECT_EQ(expectedFormat, requestedFormat);
+ EXPECT_EQ(expectedSampleRate, requestedSampleRate);
+ EXPECT_EQ(expectedChannelMask, requestedChannelMask);
+}
+
class AudioPolicyManagerTestDynamicPolicy : public AudioPolicyManagerTestWithConfigurationFile {
protected:
void TearDown() override;
@@ -1897,14 +1999,15 @@
const DPTestParam param = GetParam();
const audio_attributes_t& attr = param.attributes;
- audio_port_handle_t playbackRoutedPortId = AUDIO_PORT_HANDLE_NONE;
- getOutputForAttr(&playbackRoutedPortId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ DeviceIdVector playbackRoutedPortIds;
+ getOutputForAttr(&playbackRoutedPortIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_NONE, nullptr /*output*/, nullptr /*portId*/,
attr, param.session);
if (param.expected_match) {
- EXPECT_EQ(mInjectionPort.id, playbackRoutedPortId);
+ ASSERT_EQ(mInjectionPort.id, playbackRoutedPortIds[0]);
} else {
- EXPECT_NE(mInjectionPort.id, playbackRoutedPortId);
+ ASSERT_GT(playbackRoutedPortIds.size(), 0);
+ ASSERT_NE(mInjectionPort.id, playbackRoutedPortIds[0]);
}
}
@@ -2073,13 +2176,14 @@
audio_config_t audioConfig;
audio_io_handle_t mOutput;
audio_stream_type_t mStream = AUDIO_STREAM_DEFAULT;
- audio_port_handle_t mSelectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector mSelectedDeviceIds;
audio_port_handle_t mPortId = AUDIO_PORT_HANDLE_NONE;
AudioPolicyInterface::output_type_t mOutputType;
audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
bool mIsSpatialized;
bool mIsBitPerfect;
float mVolume;
+ bool mMuted;
};
TEST_P(AudioPolicyManagerTestMMapPlaybackRerouting, MmapPlaybackStreamMatchingLoopbackDapMixFails) {
@@ -2097,8 +2201,9 @@
ASSERT_EQ(INVALID_OPERATION,
mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
createAttributionSourceState(testUid), &audioConfig,
- &outputFlags, &mSelectedDeviceId, &mPortId, {},
- &mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume));
+ &outputFlags, &mSelectedDeviceIds, &mPortId, {},
+ &mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume,
+ &mMuted));
}
TEST_P(AudioPolicyManagerTestMMapPlaybackRerouting,
@@ -2116,8 +2221,9 @@
ASSERT_EQ(NO_ERROR,
mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
createAttributionSourceState(testUid), &audioConfig,
- &outputFlags, &mSelectedDeviceId, &mPortId, {},
- &mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume));
+ &outputFlags, &mSelectedDeviceIds, &mPortId, {},
+ &mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume,
+ &mMuted));
}
TEST_F(AudioPolicyManagerTestMMapPlaybackRerouting,
@@ -2147,9 +2253,9 @@
ASSERT_EQ(NO_ERROR,
mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
createAttributionSourceState(testUid), &audioConfig,
- &outputFlags, &mSelectedDeviceId, &mPortId, {},
- &mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume));
- ASSERT_EQ(usbDevicePort.id, mSelectedDeviceId);
+ &outputFlags, &mSelectedDeviceIds, &mPortId, {},
+ &mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume,
+ &mMuted));
auto outputDesc = mManager->getOutputs().valueFor(mOutput);
ASSERT_NE(nullptr, outputDesc);
ASSERT_EQ(mmapDirectFlags, outputDesc->getFlags().output);
@@ -2163,9 +2269,10 @@
ASSERT_EQ(NO_ERROR,
mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
createAttributionSourceState(testUid), &audioConfig,
- &outputFlags, &mSelectedDeviceId, &mPortId, {},
- &mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume));
- ASSERT_EQ(usbDevicePort.id, mSelectedDeviceId);
+ &outputFlags, &mSelectedDeviceIds, &mPortId, {},
+ &mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume,
+ &mMuted));
+ ASSERT_EQ(usbDevicePort.id, mSelectedDeviceIds[0]);
outputDesc = mManager->getOutputs().valueFor(mOutput);
ASSERT_NE(nullptr, outputDesc);
ASSERT_NE(mmapDirectFlags, outputDesc->getFlags().output);
@@ -2192,8 +2299,9 @@
ASSERT_EQ(INVALID_OPERATION,
mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
createAttributionSourceState(testUid), &audioConfig,
- &outputFlags, &mSelectedDeviceId, &mPortId, {},
- &mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume));
+ &outputFlags, &mSelectedDeviceIds, &mPortId, {},
+ &mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume,
+ &mMuted));
}
INSTANTIATE_TEST_SUITE_P(
@@ -2236,13 +2344,13 @@
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
mMixAddress, &injectionPort));
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_usage_t usage = AUDIO_USAGE_VIRTUAL_SOURCE;
audio_attributes_t attr =
{AUDIO_CONTENT_TYPE_UNKNOWN, usage, AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
std::string tags = std::string("addr=") + mMixAddress;
strncpy(attr.tags, tags.c_str(), AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1);
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_NONE, nullptr /*output*/, &mPortId, attr);
ASSERT_EQ(NO_ERROR, mManager->startOutput(mPortId));
ASSERT_EQ(injectionPort.id, getDeviceIdFromPatch(mClient->getLastAddedPatch()));
@@ -2440,19 +2548,21 @@
? AUDIO_PORT_ROLE_SINK : AUDIO_PORT_ROLE_SOURCE;
ASSERT_TRUE(findDevicePort(role, type, address, &devicePort));
- audio_port_handle_t routedPortId = devicePort.id;
// Try start input or output according to the device type
if (audio_is_output_devices(type)) {
- getOutputForAttr(&routedPortId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ DeviceIdVector routedPortIds = { devicePort.id };
+ getOutputForAttr(&routedPortIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_NONE);
+ ASSERT_EQ(devicePort.id, routedPortIds[0]);
} else if (audio_is_input_device(type)) {
+ audio_port_handle_t routedPortId = devicePort.id;
RecordingActivityTracker tracker;
audio_io_handle_t input = AUDIO_PORT_HANDLE_NONE;
getInputForAttr({}, &input, AUDIO_SESSION_NONE, tracker.getRiid(), &routedPortId,
AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO, k48000SamplingRate,
AUDIO_INPUT_FLAG_NONE);
+ ASSERT_EQ(devicePort.id, routedPortId);
}
- ASSERT_EQ(devicePort.id, routedPortId);
ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(
type, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
@@ -2713,24 +2823,24 @@
AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig);
ASSERT_EQ(NO_ERROR, ret);
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_io_handle_t output;
audio_port_handle_t portId;
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_E_AC3_JOC, AUDIO_CHANNEL_OUT_5POINT1,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_E_AC3_JOC, AUDIO_CHANNEL_OUT_5POINT1,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId);
- ASSERT_NE(AUDIO_PORT_HANDLE_NONE, selectedDeviceId);
+ ASSERT_GT(selectedDeviceIds.size(), 0);
sp<SwAudioOutputDescriptor> outDesc = mManager->getOutputs().valueFor(output);
ASSERT_NE(nullptr, outDesc.get());
ASSERT_EQ(AUDIO_FORMAT_E_AC3_JOC, outDesc->getFormat());
ASSERT_EQ(AUDIO_CHANNEL_OUT_5POINT1, outDesc->getChannelMask());
ASSERT_EQ(k48000SamplingRate, outDesc->getSamplingRate());
- selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ selectedDeviceIds.clear();
output = AUDIO_IO_HANDLE_NONE;
portId = AUDIO_PORT_HANDLE_NONE;
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_7POINT1POINT4,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_7POINT1POINT4,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId);
- ASSERT_NE(AUDIO_PORT_HANDLE_NONE, selectedDeviceId);
+ ASSERT_GT(selectedDeviceIds.size(), 0);
outDesc = mManager->getOutputs().valueFor(output);
ASSERT_NE(nullptr, outDesc.get());
ASSERT_EQ(AUDIO_FORMAT_PCM_16_BIT, outDesc->getFormat());
@@ -2750,25 +2860,25 @@
AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
std::vector<AudioMixMatchCriterion> navMatchCriteria = {
- createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
/*exclude=*/ false)};
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
- AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+ AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
audio_port_v7 mediaDevicePort;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
sCarBusMediaOutput, &mediaDevicePort));
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_io_handle_t output;
audio_port_handle_t portId;
const audio_attributes_t mediaAttribute = {
AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute);
- ASSERT_EQ(mediaDevicePort.id, selectedDeviceId);
+ ASSERT_EQ(mediaDevicePort.id, selectedDeviceIds[0]);
}
TEST_F(AudioPolicyManagerCarTest, GetOutputForAttrWithSelectedOutputAfterRegisteringPolicyMix) {
@@ -2783,25 +2893,25 @@
AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
std::vector<AudioMixMatchCriterion> navMatchCriteria = {
- createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
/*exclude=*/ false)};
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
- AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+ AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
audio_port_v7 navDevicePort;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
sCarBusNavigationOutput, &navDevicePort));
- audio_port_handle_t selectedDeviceId = navDevicePort.id;
+ DeviceIdVector selectedDeviceIds = { navDevicePort.id };
audio_io_handle_t output;
audio_port_handle_t portId;
const audio_attributes_t mediaAttribute = {
AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute);
- ASSERT_EQ(navDevicePort.id, selectedDeviceId);
+ ASSERT_EQ(navDevicePort.id, selectedDeviceIds[0]);
}
TEST_F(AudioPolicyManagerCarTest, GetOutputForAttrWithSelectedOutputAfterUserAffinities) {
@@ -2816,10 +2926,10 @@
AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
std::vector<AudioMixMatchCriterion> navMatchCriteria = {
- createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
/*exclude=*/ false)};
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
- AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+ AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
const AudioDeviceTypeAddr mediaOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput);
const AudioDeviceTypeAddrVector outputDevices = {mediaOutputDevice};
@@ -2827,17 +2937,18 @@
audio_port_v7 navDevicePort;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
sCarBusNavigationOutput, &navDevicePort));
- audio_port_handle_t selectedDeviceId = navDevicePort.id;
+ DeviceIdVector selectedDeviceIds = { navDevicePort.id };
audio_io_handle_t output;
audio_port_handle_t portId;
const audio_attributes_t mediaAttribute = {
AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute);
- ASSERT_NE(navDevicePort.id, selectedDeviceId);
+ ASSERT_GT(selectedDeviceIds.size(), 0);
+ ASSERT_NE(navDevicePort.id, selectedDeviceIds[0]);
}
TEST_F(AudioPolicyManagerCarTest, GetOutputForAttrWithExcludeUserIdCriteria) {
@@ -2852,11 +2963,11 @@
AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
std::vector<AudioMixMatchCriterion> navMatchCriteria = {
- createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
/*exclude=*/ false),
- createUserIdCriterion(/* userId */ 0, /* exclude */ true)};
+ createUserIdCriterion(/* userId */ 0, /* exclude */ true)};
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
- AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+ AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
audio_port_v7 navDevicePort;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
@@ -2864,14 +2975,15 @@
audio_io_handle_t output;
audio_port_handle_t portId;
const audio_attributes_t navigationAttribute = {
- AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+ DeviceIdVector selectedDeviceIds;
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, navigationAttribute);
- ASSERT_NE(navDevicePort.id, selectedDeviceId);
+ ASSERT_GT(selectedDeviceIds.size(), 0);
+ ASSERT_NE(navDevicePort.id, selectedDeviceIds[0]);
}
TEST_F(AudioPolicyManagerCarTest, GetOutputForAttrWithSelectedOutputExcludeUserIdCriteria) {
@@ -2886,30 +2998,30 @@
AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
std::vector<AudioMixMatchCriterion> navMatchCriteria = {
- createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
/*exclude=*/ false),
- createUserIdCriterion(0 /* userId */, /* exclude */ true)};
+ createUserIdCriterion(0 /* userId */, /* exclude */ true)};
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
- AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+ AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
audio_port_v7 navDevicePort;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
- sCarBusNavigationOutput, &navDevicePort));
- audio_port_handle_t selectedDeviceId = navDevicePort.id;
+ sCarBusNavigationOutput, &navDevicePort));
+ DeviceIdVector selectedDeviceIds = { navDevicePort.id };
audio_io_handle_t output;
audio_port_handle_t portId;
const audio_attributes_t mediaAttribute = {
- AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+ AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute);
- ASSERT_EQ(navDevicePort.id, selectedDeviceId);
+ ASSERT_EQ(navDevicePort.id, selectedDeviceIds[0]);
}
TEST_F(AudioPolicyManagerCarTest,
- GetOutputForAttrWithMatchingMixAndSelectedOutputAfterUserAffinities) {
+ GetOutputForAttrWithMatchingMixAndSelectedOutputAfterUserAffinities) {
status_t ret;
audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
@@ -2921,10 +3033,10 @@
AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
std::vector<AudioMixMatchCriterion> navMatchCriteria = {
- createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
/*exclude=*/ false)};
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
- AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+ AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
const AudioDeviceTypeAddr mediaOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput);
const AudioDeviceTypeAddr navOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput);
@@ -2933,21 +3045,21 @@
audio_port_v7 navDevicePort;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
sCarBusNavigationOutput, &navDevicePort));
- audio_port_handle_t selectedDeviceId = navDevicePort.id;
+ DeviceIdVector selectedDeviceIds = { navDevicePort.id };
audio_io_handle_t output;
audio_port_handle_t portId;
const audio_attributes_t mediaAttribute = {
- AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+ AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute);
- ASSERT_EQ(navDevicePort.id, selectedDeviceId);
+ ASSERT_EQ(navDevicePort.id, selectedDeviceIds[0]);
}
TEST_F(AudioPolicyManagerCarTest,
- GetOutputForAttrWithNoMatchingMaxAndSelectedOutputAfterUserAffinities) {
+ GetOutputForAttrWithNoMatchingMaxAndSelectedOutputAfterUserAffinities) {
status_t ret;
audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
@@ -2959,10 +3071,10 @@
AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
std::vector<AudioMixMatchCriterion> navMatchCriteria = {
- createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
/*exclude=*/ false)};
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
- AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+ AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
const AudioDeviceTypeAddr mediaOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput);
const AudioDeviceTypeAddr navOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput);
@@ -2971,21 +3083,21 @@
audio_port_v7 navDevicePort;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
sCarBusNavigationOutput, &navDevicePort));
- audio_port_handle_t selectedDeviceId = navDevicePort.id;
+ DeviceIdVector selectedDeviceIds = { navDevicePort.id };
audio_io_handle_t output;
audio_port_handle_t portId;
const audio_attributes_t alarmAttribute = {
- AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ALARM,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+ AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ALARM,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, alarmAttribute);
- ASSERT_EQ(navDevicePort.id, selectedDeviceId);
+ ASSERT_EQ(navDevicePort.id, selectedDeviceIds[0]);
}
TEST_F(AudioPolicyManagerCarTest,
- GetOutputForAttrWithMatMixAfterUserAffinitiesForOneUser) {
+ GetOutputForAttrWithMatMixAfterUserAffinitiesForOneUser) {
status_t ret;
audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
@@ -3008,23 +3120,23 @@
audio_port_v7 primaryZoneDevicePort;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
sCarBusMediaOutput, &primaryZoneDevicePort));
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_io_handle_t output;
audio_port_handle_t portId;
const audio_attributes_t mediaAttribute = {
- AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+ AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
uid_t user11AppUid = multiuser_get_uid(/* user_id */ 11, /* app_id */ 12345);
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute,
AUDIO_SESSION_NONE, user11AppUid);
- ASSERT_EQ(primaryZoneDevicePort.id, selectedDeviceId);
+ ASSERT_EQ(primaryZoneDevicePort.id, selectedDeviceIds[0]);
}
TEST_F(AudioPolicyManagerCarTest,
- GetOutputForAttrWithMatMixAfterUserAffinitiesForTwoUsers) {
+ GetOutputForAttrWithMatMixAfterUserAffinitiesForTwoUsers) {
status_t ret;
audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
@@ -3050,23 +3162,23 @@
audio_port_v7 secondaryZoneDevicePort;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
sCarRearZoneOneOutput, &secondaryZoneDevicePort));
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_io_handle_t output;
audio_port_handle_t portId;
const audio_attributes_t mediaAttribute = {
- AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+ AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
uid_t user11AppUid = multiuser_get_uid(/* user_id */ 11, /* app_id */ 12345);
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute,
AUDIO_SESSION_NONE, user11AppUid);
- ASSERT_EQ(secondaryZoneDevicePort.id, selectedDeviceId);
+ ASSERT_EQ(secondaryZoneDevicePort.id, selectedDeviceIds[0]);
}
TEST_F(AudioPolicyManagerCarTest,
- GetOutputForAttrWithMatMixAfterUserAffinitiesForThreeUsers) {
+ GetOutputForAttrWithMatMixAfterUserAffinitiesForThreeUsers) {
status_t ret;
audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
@@ -3095,19 +3207,19 @@
audio_port_v7 tertiaryZoneDevicePort;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
sCarRearZoneTwoOutput, &tertiaryZoneDevicePort));
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_io_handle_t output;
audio_port_handle_t portId;
const audio_attributes_t mediaAttribute = {
- AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+ AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_MEDIA,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
uid_t user15AppUid = multiuser_get_uid(/* user_id */ 15, /* app_id */ 12345);
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, mediaAttribute,
AUDIO_SESSION_NONE, user15AppUid);
- ASSERT_EQ(tertiaryZoneDevicePort.id, selectedDeviceId);
+ ASSERT_EQ(tertiaryZoneDevicePort.id, selectedDeviceIds[0]);
}
TEST_F(AudioPolicyManagerCarTest, GetOutputForAttrWithNoMatchingMix) {
@@ -3122,10 +3234,10 @@
AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput, audioConfig, mediaMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
std::vector<AudioMixMatchCriterion> navMatchCriteria = {
- createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ createUsageCriterion(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
/*exclude=*/ false)};
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
- AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
+ AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput, audioConfig, navMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
const AudioDeviceTypeAddr mediaOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusMediaOutput);
const AudioDeviceTypeAddr navOutputDevice(AUDIO_DEVICE_OUT_BUS, sCarBusNavigationOutput);
@@ -3134,17 +3246,17 @@
audio_port_v7 navDevicePort;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
sCarBusNavigationOutput, &navDevicePort));
- audio_port_handle_t selectedDeviceId = navDevicePort.id;
+ DeviceIdVector selectedDeviceIds = { navDevicePort.id };
audio_io_handle_t output;
audio_port_handle_t portId;
const audio_attributes_t alarmAttribute = {
- AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ALARM,
- AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
+ AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ALARM,
+ AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, &output, &portId, alarmAttribute);
- ASSERT_EQ(navDevicePort.id, selectedDeviceId);
+ ASSERT_EQ(navDevicePort.id, selectedDeviceIds[0]);
}
TEST_F(AudioPolicyManagerCarTest, GetOutputForAttrForMMapWithPolicyMatched) {
@@ -3156,13 +3268,13 @@
std::vector<AudioMixMatchCriterion> mediaMatchCriteria = {
createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/ false)};
ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
- AUDIO_DEVICE_OUT_BUS, sCarBusMmapOutput, audioConfig, mediaMatchCriteria);
+ AUDIO_DEVICE_OUT_BUS, sCarBusMmapOutput, audioConfig, mediaMatchCriteria);
ASSERT_EQ(NO_ERROR, ret);
ASSERT_EQ(NO_ERROR, ret);
audio_port_v7 mmapDevicePort;
ASSERT_TRUE(findDevicePort(AUDIO_PORT_ROLE_SINK, AUDIO_DEVICE_OUT_BUS,
- sCarBusMmapOutput, &mmapDevicePort));
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ sCarBusMmapOutput, &mmapDevicePort));
+ DeviceIdVector selectedDeviceIds;
audio_io_handle_t output;
audio_port_handle_t portId;
const audio_attributes_t mediaAttribute = {
@@ -3170,12 +3282,13 @@
AUDIO_SOURCE_DEFAULT, AUDIO_FLAG_NONE, ""};
getOutputForAttr(
- &selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ &selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate,
(audio_output_flags_t)(AUDIO_OUTPUT_FLAG_MMAP_NOIRQ | AUDIO_OUTPUT_FLAG_DIRECT),
&output, &portId, mediaAttribute);
- ASSERT_EQ(mmapDevicePort.id, selectedDeviceId);
+ ASSERT_EQ(mmapDevicePort.id, selectedDeviceIds[0]);
+
}
class AudioPolicyManagerTVTest : public AudioPolicyManagerTestWithConfigurationFile {
@@ -3195,10 +3308,10 @@
ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(
AUDIO_DEVICE_OUT_AUX_DIGITAL, AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
"" /*address*/, "" /*name*/, AUDIO_FORMAT_DEFAULT));
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_io_handle_t output;
audio_port_handle_t portId;
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
k48000SamplingRate, flags, &output, &portId);
sp<SwAudioOutputDescriptor> outDesc = mManager->getOutputs().valueFor(output);
ASSERT_NE(nullptr, outDesc.get());
@@ -3277,7 +3390,7 @@
void AudioPolicyManagerPhoneTest::testOutputMixPortSelectionForAttr(
audio_output_flags_t flags, audio_format_t format, int samplingRate, bool isMusic,
const char* expectedMixPortName) {
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_io_handle_t output;
audio_port_handle_t portId;
audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
@@ -3285,7 +3398,7 @@
attr.content_type = AUDIO_CONTENT_TYPE_MUSIC;
attr.usage = AUDIO_USAGE_MEDIA;
}
- getOutputForAttr(&selectedDeviceId, format, AUDIO_CHANNEL_OUT_STEREO, samplingRate, flags,
+ getOutputForAttr(&selectedDeviceIds, format, AUDIO_CHANNEL_OUT_STEREO, samplingRate, flags,
&output, &portId, attr);
EXPECT_NO_FATAL_FAILURE(verifyMixPortNameAndFlags(output, expectedMixPortName));
mManager->releaseOutput(portId);
@@ -3854,7 +3967,7 @@
audio_port_handle_t mUsbPortId = AUDIO_PORT_HANDLE_NONE;
audio_io_handle_t mBitPerfectOutput = AUDIO_IO_HANDLE_NONE;
- audio_port_handle_t mSelectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector mSelectedDeviceIds;
audio_port_handle_t mBitPerfectPortId = AUDIO_PORT_HANDLE_NONE;
static constexpr audio_attributes_t sMediaAttr = {
@@ -3913,12 +4026,12 @@
reset();
bool isBitPerfect;
- getOutputForAttr(&mSelectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+ getOutputForAttr(&mSelectedDeviceIds, mBitPerfectFormat, mBitPerfectChannelMask,
mBitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &mBitPerfectOutput,
&mBitPerfectPortId, sMediaAttr, AUDIO_SESSION_NONE, mUid, &isBitPerfect);
status_t status = mManager->startOutput(mBitPerfectPortId);
if (status == DEAD_OBJECT) {
- getOutputForAttr(&mSelectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+ getOutputForAttr(&mSelectedDeviceIds, mBitPerfectFormat, mBitPerfectChannelMask,
mBitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &mBitPerfectOutput,
&mBitPerfectPortId, sMediaAttr, AUDIO_SESSION_NONE, mUid, &isBitPerfect);
status = mManager->startOutput(mBitPerfectPortId);
@@ -3934,8 +4047,8 @@
void AudioPolicyManagerTestBitPerfectBase::reset() {
mBitPerfectOutput = AUDIO_IO_HANDLE_NONE;
- mSelectedDeviceId = AUDIO_PORT_HANDLE_NONE;
mBitPerfectPortId = AUDIO_PORT_HANDLE_NONE;
+ mSelectedDeviceIds.clear();
}
void AudioPolicyManagerTestBitPerfectBase::getBitPerfectOutput(status_t expected) {
@@ -3951,11 +4064,12 @@
bool isSpatialized;
bool isBitPerfect;
float volume;
+ bool muted;
EXPECT_EQ(expected,
mManager->getOutputForAttr(&sMediaAttr, &mBitPerfectOutput, AUDIO_SESSION_NONE,
&stream, attributionSource, &config, &flags,
- &mSelectedDeviceId, &mBitPerfectPortId, {}, &outputType,
- &isSpatialized, &isBitPerfect, &volume));
+ &mSelectedDeviceIds, &mBitPerfectPortId, {}, &outputType,
+ &isSpatialized, &isBitPerfect, &volume, &muted));
}
class AudioPolicyManagerTestBitPerfect : public AudioPolicyManagerTestBitPerfectBase {
@@ -3964,13 +4078,13 @@
TEST_F(AudioPolicyManagerTestBitPerfect, UseBitPerfectOutput) {
const uid_t anotherUid = 5678;
audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
bool isBitPerfect;
// When there is no active bit-perfect playback, the output selection will follow default
// routing strategy.
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_QUAD,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_QUAD,
48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, sMediaAttr,
AUDIO_SESSION_NONE, mUid, &isBitPerfect);
EXPECT_FALSE(isBitPerfect);
@@ -3984,14 +4098,14 @@
// If the playback is from preferred mixer attributes owner but the request doesn't match
// preferred mixer attributes, it will not be bit-perfect.
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_QUAD,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_QUAD,
48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, sMediaAttr,
AUDIO_SESSION_NONE, mUid, &isBitPerfect);
EXPECT_FALSE(isBitPerfect);
EXPECT_EQ(mBitPerfectOutput, output);
// When bit-perfect playback is active, all other playback will be routed to bit-perfect output.
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, sMediaAttr,
AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
EXPECT_FALSE(isBitPerfect);
@@ -4003,9 +4117,9 @@
.usage = AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
};
audio_io_handle_t dtmfOutput = AUDIO_IO_HANDLE_NONE;
- selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ selectedDeviceIds.clear();
portId = AUDIO_PORT_HANDLE_NONE;
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
48000, AUDIO_OUTPUT_FLAG_NONE, &dtmfOutput, &portId, dtmfAttr,
AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
EXPECT_FALSE(isBitPerfect);
@@ -4013,7 +4127,7 @@
// When configuration matches preferred mixer attributes, which is bit-perfect, but the client
// is not the owner of preferred mixer attributes, the playback will not be bit-perfect.
- getOutputForAttr(&selectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+ getOutputForAttr(&selectedDeviceIds, mBitPerfectFormat, mBitPerfectChannelMask,
mBitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, sMediaAttr,
AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
EXPECT_FALSE(isBitPerfect);
@@ -4039,9 +4153,9 @@
.content_type = AUDIO_CONTENT_TYPE_SONIFICATION,
.usage = AUDIO_USAGE_ASSISTANCE_SONIFICATION,
};
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
bool isBitPerfect;
- getOutputForAttr(&selectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+ getOutputForAttr(&selectedDeviceIds, mBitPerfectFormat, mBitPerfectChannelMask,
anotherSampleRate, AUDIO_OUTPUT_FLAG_NONE, &systemSoundOutput,
&systemSoundPortId, systemSoundAttr, AUDIO_SESSION_NONE, mUid, &isBitPerfect);
EXPECT_FALSE(isBitPerfect);
@@ -4061,7 +4175,7 @@
.content_type = AUDIO_CONTENT_TYPE_SONIFICATION,
.usage = AUDIO_USAGE_NOTIFICATION,
};
- getOutputForAttr(&selectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+ getOutputForAttr(&selectedDeviceIds, mBitPerfectFormat, mBitPerfectChannelMask,
anotherSampleRate, AUDIO_OUTPUT_FLAG_NONE, ¬ificationOutput,
¬ificationPortId, notificationAttr, AUDIO_SESSION_NONE, mUid,
&isBitPerfect);
@@ -4128,11 +4242,11 @@
.content_type = AUDIO_CONTENT_TYPE_UNKNOWN,
.usage = GetParam(),
};
- audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ DeviceIdVector selectedDeviceIds;
audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
ASSERT_NO_FATAL_FAILURE(
- getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+ getOutputForAttr(&selectedDeviceIds, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, attr));
EXPECT_NE(mBitPerfectOutput, output);
EXPECT_EQ(NO_ERROR, mManager->startOutput(portId));
diff --git a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
index 67e99f2..9cb3608 100644
--- a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
+++ b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
@@ -66,6 +66,16 @@
channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
</mixPort>
<mixPort name="hifi_input" role="sink" />
+ <mixPort name="multiple_channels_input" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000"
+ channelMasks="AUDIO_CHANNEL_IN_5POINT1"/>
+ </mixPort>
+ <mixPort name="mixport_fast_input" role="sink" flags="AUDIO_INPUT_FLAG_FAST">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000"
+ channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+ </mixPort>
</mixPorts>
<devicePorts>
<devicePort tagName="Speaker" type="AUDIO_DEVICE_OUT_SPEAKER" role="sink">
@@ -114,6 +124,10 @@
sources="BUS Device In"/>
<route type="mix" sink="hifi_input"
sources="USB Device In" />
+ <route type="mix" sink="multiple_channels_input"
+ sources="Built-In Mic" />
+ <route type="mix" sink="mixport_fast_input"
+ sources="Built-In Mic"/>
</routes>
</module>
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index 3f2a617..b9c8206 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -101,8 +101,8 @@
"android.frameworks.cameraservice.device@2.0",
"android.frameworks.cameraservice.device@2.1",
"android.frameworks.cameraservice.common-V1-ndk",
- "android.frameworks.cameraservice.service-V2-ndk",
- "android.frameworks.cameraservice.device-V2-ndk",
+ "android.frameworks.cameraservice.service-V3-ndk",
+ "android.frameworks.cameraservice.device-V3-ndk",
"android.hardware.camera.common-V1-ndk",
"android.hardware.camera.device-V3-ndk",
"android.hardware.camera.metadata-V3-ndk",
@@ -179,6 +179,7 @@
"device3/aidl/AidlCamera3Device.cpp",
"device3/aidl/AidlCamera3OutputUtils.cpp",
"device3/aidl/AidlCamera3OfflineSession.cpp",
+ "device3/aidl/AidlCamera3SharedDevice.cpp",
"gui/RingBufferConsumer.cpp",
"hidl/AidlCameraDeviceCallbacks.cpp",
"hidl/AidlCameraServiceListener.cpp",
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index eb8708e..31a45c3 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -89,13 +89,13 @@
#include "utils/Utils.h"
namespace {
- const char* kPermissionServiceName = "permission";
const char* kActivityServiceName = "activity";
const char* kSensorPrivacyServiceName = "sensor_privacy";
const char* kAppopsServiceName = "appops";
const char* kProcessInfoServiceName = "processinfo";
const char* kVirtualDeviceBackCameraId = "0";
const char* kVirtualDeviceFrontCameraId = "1";
+ const char* kUnknownPackageName = "<unknown>";
int32_t getDeviceId(const android::CameraMetadata& cameraInfo) {
if (!cameraInfo.exists(ANDROID_INFO_DEVICE_ID)) {
@@ -105,6 +105,20 @@
const auto &deviceIdEntry = cameraInfo.find(ANDROID_INFO_DEVICE_ID);
return deviceIdEntry.data.i32[0];
}
+
+ static android::PermissionChecker::PermissionResult appOpModeToPermissionResult(int32_t res) {
+ switch (res) {
+ case android::AppOpsManager::MODE_ERRORED:
+ return android::PermissionChecker::PERMISSION_HARD_DENIED;
+ case android::AppOpsManager::MODE_IGNORED:
+ return android::PermissionChecker::PERMISSION_SOFT_DENIED;
+ case android::AppOpsManager::MODE_ALLOWED:
+ return android::PermissionChecker::PERMISSION_GRANTED;
+ }
+
+ ALOGE("%s: Unexpected appOpMode %d", __FUNCTION__, res);
+ return android::PermissionChecker::PERMISSION_HARD_DENIED;
+ }
} // namespace anonymous
namespace android {
@@ -550,7 +564,7 @@
updateStatus(StatusInternal::NOT_PRESENT, cameraId);
mVirtualDeviceCameraIdMapper.removeCamera(cameraId);
- sp<BasicClient> clientToDisconnectOnline, clientToDisconnectOffline;
+ std::vector<sp<BasicClient>> clientsToDisconnectOnline, clientsToDisconnectOffline;
{
// Don't do this in updateStatus to avoid deadlock over mServiceLock
Mutex::Autolock lock(mServiceLock);
@@ -560,12 +574,12 @@
// Remove online as well as offline client from the list of active clients,
// if they are present
- clientToDisconnectOnline = removeClientLocked(cameraId);
- clientToDisconnectOffline = removeClientLocked(kOfflineDevice + cameraId);
+ clientsToDisconnectOnline = removeClientsLocked(cameraId);
+ clientsToDisconnectOffline = removeClientsLocked(kOfflineDevice + cameraId);
}
- disconnectClient(cameraId, clientToDisconnectOnline);
- disconnectClient(kOfflineDevice + cameraId, clientToDisconnectOffline);
+ disconnectClients(cameraId, clientsToDisconnectOnline);
+ disconnectClients(kOfflineDevice + cameraId, clientsToDisconnectOffline);
removeStates(cameraId);
} else {
@@ -639,6 +653,13 @@
}
}
+void CameraService::disconnectClients(const std::string& id,
+ std::vector<sp<BasicClient>> clientsToDisconnect) {
+ for (auto& client : clientsToDisconnect) {
+ disconnectClient(id, client);
+ }
+}
+
void CameraService::disconnectClient(const std::string& id, sp<BasicClient> clientToDisconnect) {
if (clientToDisconnect.get() != nullptr) {
ALOGI("%s: Client for camera ID %s evicted due to device status change from HAL",
@@ -916,8 +937,7 @@
cameraId.c_str());
}
- bool overrideForPerfClass = flags::calculate_perf_override_during_session_support() &&
- SessionConfigurationUtils::targetPerfClassPrimaryCamera(
+ bool overrideForPerfClass = SessionConfigurationUtils::targetPerfClassPrimaryCamera(
mPerfClassPrimaryCameraIds, cameraId, targetSdkVersion);
auto ret = isSessionConfigurationWithParametersSupportedUnsafe(cameraId,
@@ -1013,23 +1033,23 @@
bool overrideForPerfClass = SessionConfigurationUtils::targetPerfClassPrimaryCamera(
mPerfClassPrimaryCameraIds, cameraId, targetSdkVersion);
- if (flags::check_session_support_before_session_char()) {
- bool sessionConfigSupported;
- Status res = isSessionConfigurationWithParametersSupportedUnsafe(
- cameraId, sessionConfiguration, overrideForPerfClass, &sessionConfigSupported);
- if (!res.isOk()) {
- // isSessionConfigurationWithParametersSupportedUnsafe should log what went wrong and
- // report the correct Status to send to the client. Simply forward the error to
- // the client.
- outMetadata->clear();
- return res;
- }
- if (!sessionConfigSupported) {
- std::string msg = fmt::sprintf(
- "Session configuration not supported for camera device %s.", cameraId.c_str());
- outMetadata->clear();
- return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
- }
+
+ bool sessionConfigSupported;
+ Status res = isSessionConfigurationWithParametersSupportedUnsafe(
+ cameraId, sessionConfiguration, overrideForPerfClass, &sessionConfigSupported);
+ if (!res.isOk()) {
+ // isSessionConfigurationWithParametersSupportedUnsafe should log what went wrong and
+ // report the correct Status to send to the client. Simply forward the error to
+ // the client.
+ outMetadata->clear();
+ return res;
+ }
+
+ if (!sessionConfigSupported) {
+ std::string msg = fmt::sprintf("Session configuration not supported for camera device %s.",
+ cameraId.c_str());
+ outMetadata->clear();
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
}
status_t ret = mCameraProviderManager->getSessionCharacteristics(
@@ -1071,7 +1091,7 @@
}
}
- Status res = filterSensitiveMetadataIfNeeded(cameraId, outMetadata);
+ res = filterSensitiveMetadataIfNeeded(cameraId, outMetadata);
if (flags::analytics_24q3()) {
mCameraServiceProxyWrapper->logSessionCharacteristicsQuery(cameraId,
getCallingUid(), sessionConfiguration, res);
@@ -1465,13 +1485,13 @@
}
}
-Status CameraService::makeClient(const sp<CameraService>& cameraService,
- const sp<IInterface>& cameraCb, const std::string& packageName, bool systemNativeClient,
- const std::optional<std::string>& featureId, const std::string& cameraId,
- int api1CameraId, int facing, int sensorOrientation, int clientPid, uid_t clientUid,
+Status CameraService::makeClient(
+ const sp<CameraService>& cameraService, const sp<IInterface>& cameraCb,
+ const AttributionSourceState& clientAttribution, int callingPid, bool systemNativeClient,
+ const std::string& cameraId, int api1CameraId, int facing, int sensorOrientation,
int servicePid, std::pair<int, IPCTransport> deviceVersionAndTransport,
apiLevel effectiveApiLevel, bool overrideForPerfClass, int rotationOverride,
- bool forceSlowJpegMode, const std::string& originalCameraId,
+ bool forceSlowJpegMode, const std::string& originalCameraId, bool sharedMode,
/*out*/sp<BasicClient>* client) {
// For HIDL devices
if (deviceVersionAndTransport.second == IPCTransport::HIDL) {
@@ -1504,20 +1524,20 @@
if (effectiveApiLevel == API_1) { // Camera1 API route
sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
*client = new Camera2Client(cameraService, tmp, cameraService->mCameraServiceProxyWrapper,
- cameraService->mAttributionAndPermissionUtils, packageName, featureId, cameraId,
- api1CameraId, facing, sensorOrientation,
- clientPid, clientUid, servicePid, overrideForPerfClass, rotationOverride,
- forceSlowJpegMode);
+ cameraService->mAttributionAndPermissionUtils,
+ clientAttribution, callingPid, cameraId, api1CameraId, facing,
+ sensorOrientation, servicePid, overrideForPerfClass,
+ rotationOverride, forceSlowJpegMode, /*sharedMode*/false);
ALOGI("%s: Camera1 API (legacy), rotationOverride %d, forceSlowJpegMode %d",
__FUNCTION__, rotationOverride, forceSlowJpegMode);
} else { // Camera2 API route
sp<hardware::camera2::ICameraDeviceCallbacks> tmp =
static_cast<hardware::camera2::ICameraDeviceCallbacks*>(cameraCb.get());
- *client = new CameraDeviceClient(cameraService, tmp,
- cameraService->mCameraServiceProxyWrapper,
- cameraService->mAttributionAndPermissionUtils, packageName, systemNativeClient,
- featureId, cameraId, facing, sensorOrientation, clientPid, clientUid, servicePid,
- overrideForPerfClass, rotationOverride, originalCameraId);
+ *client = new CameraDeviceClient(
+ cameraService, tmp, cameraService->mCameraServiceProxyWrapper,
+ cameraService->mAttributionAndPermissionUtils, clientAttribution, callingPid,
+ systemNativeClient, cameraId, facing, sensorOrientation, servicePid,
+ overrideForPerfClass, rotationOverride, originalCameraId, sharedMode);
ALOGI("%s: Camera2 API, rotationOverride %d", __FUNCTION__, rotationOverride);
}
return Status::ok();
@@ -1607,14 +1627,19 @@
int callingPid = getCallingPid();
logConnectionAttempt(callingPid, kServiceName, cameraIdStr, API_1);
- if (!(ret = connectHelper<ICameraClient,Client>(
- sp<ICameraClient>{nullptr}, cameraIdStr, cameraId,
- kServiceName, /*systemNativeClient*/ false, {}, uid, callingPid,
- API_1, /*shimUpdateOnly*/ true, /*oomScoreOffset*/ 0,
- /*targetSdkVersion*/ __ANDROID_API_FUTURE__,
- /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT,
- /*forceSlowJpegMode*/false, cameraIdStr, /*isNonSystemNdk*/ false, /*out*/ tmp)
- ).isOk()) {
+ AttributionSourceState clientAttribution =
+ buildAttributionSource(callingPid, uid, kServiceName, kDefaultDeviceId);
+
+ if (!(ret = connectHelper<ICameraClient, Client>(
+ sp<ICameraClient>{nullptr}, cameraIdStr, cameraId, clientAttribution,
+ /*systemNativeClient*/ false, API_1, /*shimUpdateOnly*/ true,
+ /*oomScoreOffset*/ 0,
+ /*targetSdkVersion*/ __ANDROID_API_FUTURE__,
+ /*rotationOverride*/
+ hardware::ICameraService::ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT,
+ /*forceSlowJpegMode*/ false, cameraIdStr, /*isNonSystemNdk*/ false,
+ /*sharedMode*/false, /*out*/ tmp))
+ .isOk()) {
ALOGE("%s: Error initializing shim metadata: %s", __FUNCTION__, ret.toString8().c_str());
}
return ret;
@@ -1683,14 +1708,12 @@
}
Status CameraService::validateConnectLocked(const std::string& cameraId,
- const std::string& clientName8, int clientUid, int clientPid) const {
-
+ const AttributionSourceState& clientAttribution,
+ bool sharedMode) const {
#ifdef __BRILLO__
- UNUSED(clientName8);
- UNUSED(clientUid);
- UNUSED(clientPid);
+ UNUSED(clientAttribution);
#else
- Status allowed = validateClientPermissionsLocked(cameraId, clientName8, clientUid, clientPid);
+ Status allowed = validateClientPermissionsLocked(cameraId, clientAttribution, sharedMode);
if (!allowed.isOk()) {
return allowed;
}
@@ -1727,24 +1750,15 @@
return Status::ok();
}
-Status CameraService::errorNotTrusted(int clientPid, int clientUid, const std::string& cameraId,
- const std::string& clientName, bool isPid) const {
+Status CameraService::validateClientPermissionsLocked(
+ const std::string& cameraId, const AttributionSourceState& clientAttribution,
+ bool sharedMode) const {
int callingPid = getCallingPid();
int callingUid = getCallingUid();
- ALOGE("CameraService::connect X (calling PID %d, calling UID %d) rejected "
- "(don't trust %s %d)", callingPid, callingUid, isPid ? "clientPid" : "clientUid",
- isPid ? clientPid : clientUid);
- return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
- "Untrusted caller (calling PID %d, UID %d) trying to "
- "forward camera access to camera %s for client %s (PID %d, UID %d)",
- getCallingPid(), getCallingUid(), cameraId.c_str(),
- clientName.c_str(), clientPid, clientUid);
-}
-Status CameraService::validateClientPermissionsLocked(const std::string& cameraId,
- const std::string& clientName, int clientUid, int clientPid) const {
- int callingPid = getCallingPid();
- int callingUid = getCallingUid();
+ int clientPid = clientAttribution.pid;
+ int clientUid = clientAttribution.uid;
+ const std::string clientName = clientAttribution.packageName.value_or(kUnknownPackageName);
if (shouldRejectSystemCameraConnection(cameraId)) {
ALOGW("Attempting to connect to system-only camera id %s, connection rejected",
@@ -1759,14 +1773,24 @@
"found while trying to query device kind", cameraId.c_str());
}
+ if (flags::camera_multi_client() && sharedMode
+ && (deviceKind != SystemCameraKind::SYSTEM_ONLY_CAMERA)) {
+ ALOGE("%s: camera id %s is not system camera. Device sharing only supported for"
+ " system cameras.", __FUNCTION__, cameraId.c_str());
+ return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "camera device sharing not supported for "
+ "camera ID \"%s\"", cameraId.c_str());
+ }
+
// Get the device id that owns this camera.
auto [deviceId, _] = mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(cameraId);
+ AttributionSourceState clientAttributionWithDeviceId = clientAttribution;
+ clientAttributionWithDeviceId.deviceId = deviceId;
// If it's not calling from cameraserver, check the permission if the
// device isn't a system only camera (shouldRejectSystemCameraConnection already checks for
// android.permission.SYSTEM_CAMERA for system only camera devices).
bool checkPermissionForCamera =
- hasPermissionsForCamera(cameraId, clientPid, clientUid, clientName, deviceId);
+ hasPermissionsForCamera(cameraId, clientAttributionWithDeviceId);
if (callingPid != getpid() &&
(deviceKind != SystemCameraKind::SYSTEM_ONLY_CAMERA) && !checkPermissionForCamera) {
ALOGE("Permission Denial: can't use the camera pid=%d, uid=%d", clientPid, clientUid);
@@ -1880,6 +1904,28 @@
__FUNCTION__);
}
+ if (flags::camera_multi_client()) {
+ sp<BasicClient> clientSp = clientDescriptor->getValue();
+ auto primaryClient = mActiveClientManager.getPrimaryClient(desc->getKey());
+ if (primaryClient == nullptr) {
+ // There is no primary client yet. Assign this first client as
+ // primary
+ clientSp->setPrimaryClient(true);
+ } else {
+ // There is already primary client. If the incoming client has a
+ // higher priority than the existing primary, then assign incoming
+ // client as primary and change the existing client to secondary.
+ // Otherwise incoming client is secondary client.
+ if (clientDescriptor->getPriority() <= primaryClient->getPriority()) {
+ clientSp->setPrimaryClient(true);
+ primaryClient->getValue()->setPrimaryClient(false);
+ primaryClient->getValue()->notifyClientSharedAccessPriorityChanged(false);
+ } else {
+ clientSp->setPrimaryClient(false);
+ }
+ }
+ }
+
// And register a death notification for the client callback. Do
// this last to avoid Binder policy where a nested Binder
// transaction might be pre-empted to service the client death
@@ -1894,6 +1940,7 @@
status_t CameraService::handleEvictionsLocked(const std::string& cameraId, int clientPid,
apiLevel effectiveApiLevel, const sp<IBinder>& remoteCallback,
const std::string& packageName, int oomScoreOffset, bool systemNativeClient,
+ bool sharedMode,
/*out*/
sp<BasicClient>* client,
std::shared_ptr<resource_policy::ClientDescriptor<std::string, sp<BasicClient>>>* partial) {
@@ -1945,7 +1992,8 @@
clientDescriptor = CameraClientManager::makeClientDescriptor(cameraId,
sp<BasicClient>{nullptr}, static_cast<int32_t>(state->getCost()),
state->getConflicting(), resource_policy::NATIVE_ADJ, clientPid,
- ActivityManager::PROCESS_STATE_BOUND_TOP, oomScoreOffset, systemNativeClient);
+ ActivityManager::PROCESS_STATE_BOUND_TOP, oomScoreOffset, systemNativeClient,
+ sharedMode);
} else {
// Get current active client PIDs
std::vector<int> ownerPids(mActiveClientManager.getAllOwners());
@@ -1981,7 +2029,7 @@
clientDescriptor = CameraClientManager::makeClientDescriptor(cameraId,
sp<BasicClient>{nullptr}, static_cast<int32_t>(state->getCost()),
state->getConflicting(), actualScore, clientPid, actualState,
- oomScoreOffset, systemNativeClient);
+ oomScoreOffset, systemNativeClient, sharedMode);
}
resource_policy::ClientPriority clientPriority = clientDescriptor->getPriority();
@@ -2133,38 +2181,33 @@
std::string clientPackageNameMaybe = clientAttribution.packageName.value_or("");
bool isNonSystemNdk = clientPackageNameMaybe.size() == 0;
- std::string clientPackageName = resolvePackageName(clientAttribution.uid,
- clientPackageNameMaybe);
- logConnectionAttempt(clientAttribution.pid, clientPackageName, cameraIdStr, API_1);
- int clientUid = clientAttribution.uid;
- int clientPid = clientAttribution.pid;
-
- // Resolve the client identity. In the near future, we will no longer rely on USE_CALLING_*, and
- // need a way to guarantee the caller identity early.
-
- // Check if we can trust clientUid
- if (!resolveClientUid(clientUid)) {
- return errorNotTrusted(clientPid, clientUid, cameraIdStr, clientPackageName,
- /* isPid=*/ false);
+ AttributionSourceState resolvedClientAttribution(clientAttribution);
+ ret = resolveAttributionSource(resolvedClientAttribution, __FUNCTION__, cameraIdStr);
+ if (!ret.isOk()) {
+ logRejected(cameraIdStr, getCallingPid(),
+ clientAttribution.packageName.value_or(kUnknownPackageName),
+ toStdString(ret.toString8()));
+ return ret;
}
- // Check if we can trust clientUid
- if (!resolveClientPid(clientPid)) {
- return errorNotTrusted(clientPid, clientUid, cameraIdStr, clientPackageName,
- /* isPid= */ true);
- }
+ const int clientPid = resolvedClientAttribution.pid;
+ const int clientUid = resolvedClientAttribution.uid;
+ const std::string& clientPackageName = *resolvedClientAttribution.packageName;
+
+ logConnectionAttempt(clientPid, clientPackageName, cameraIdStr, API_1);
sp<Client> client = nullptr;
- ret = connectHelper<ICameraClient,Client>(cameraClient, cameraIdStr, api1CameraId,
- clientPackageName, /*systemNativeClient*/ false, {},
- clientUid, clientPid, API_1,
- /*shimUpdateOnly*/ false, /*oomScoreOffset*/ 0, targetSdkVersion,
- rotationOverride, forceSlowJpegMode, cameraIdStr, isNonSystemNdk, /*out*/client);
+ ret = connectHelper<ICameraClient, Client>(
+ cameraClient, cameraIdStr, api1CameraId, resolvedClientAttribution,
+ /*systemNativeClient*/ false, API_1,
+ /*shimUpdateOnly*/ false, /*oomScoreOffset*/ 0, targetSdkVersion, rotationOverride,
+ forceSlowJpegMode, cameraIdStr, isNonSystemNdk, /*sharedMode*/false, /*out*/ client);
if (!ret.isOk()) {
- logRejected(cameraIdStr, getCallingPid(), clientAttribution.packageName.value_or(""),
- toStdString(ret.toString8()));
+ logRejected(cameraIdStr, getCallingPid(),
+ clientAttribution.packageName.value_or(kUnknownPackageName),
+ toStdString(ret.toString8()));
return ret;
}
@@ -2243,8 +2286,7 @@
const std::string& unresolvedCameraId,
int oomScoreOffset, int targetSdkVersion,
int rotationOverride, const AttributionSourceState& clientAttribution, int32_t devicePolicy,
- /*out*/
- sp<hardware::camera2::ICameraDeviceUser>* device) {
+ bool sharedMode, /*out*/sp<hardware::camera2::ICameraDeviceUser>* device) {
ATRACE_CALL();
RunThreadWithRealtimePriority priorityBump;
Status ret = Status::ok();
@@ -2253,9 +2295,12 @@
int callingPid = getCallingPid();
int callingUid = getCallingUid();
bool systemNativeClient = false;
+ AttributionSourceState resolvedClientAttribution(clientAttribution);
if (callerHasSystemUid() && (clientPackageNameMaybe.size() == 0)) {
std::string systemClient = fmt::sprintf("client.pid<%d>", callingPid);
clientPackageNameMaybe = systemClient;
+ // Pass in packageName since AttributionAndPermissionUtils can't resolve vndk clients.
+ resolvedClientAttribution.packageName = clientPackageNameMaybe;
systemNativeClient = true;
}
@@ -2270,31 +2315,30 @@
std::string cameraId = cameraIdOptional.value();
bool isNonSystemNdk = clientPackageNameMaybe.size() == 0;
- std::string clientPackageName = resolvePackageName(clientAttribution.uid,
- clientPackageNameMaybe);
- logConnectionAttempt(clientAttribution.pid, clientPackageName, cameraId, API_2);
- userid_t clientUserId = multiuser_get_user_id(clientAttribution.uid);
- if (clientAttribution.uid == USE_CALLING_UID) {
- clientUserId = multiuser_get_user_id(callingUid);
+ if (!flags::data_delivery_permission_checks()) {
+ resolvedClientAttribution.pid = USE_CALLING_PID;
}
- // Resolve the client identity. In the near future, we will no longer rely on USE_CALLING_*, and
- // need a way to guarantee the caller identity early.
-
- int clientUid = clientAttribution.uid;
- int clientPid = callingPid;
- // Check if we can trust clientUid
- if (!resolveClientUid(clientUid)) {
- return errorNotTrusted(clientPid, clientUid, cameraId, clientPackageName,
- /* isPid= */ false);
+ ret = resolveAttributionSource(resolvedClientAttribution, __FUNCTION__, cameraId);
+ if (!ret.isOk()) {
+ logRejected(cameraId, getCallingPid(), clientAttribution.packageName.value_or(""),
+ toStdString(ret.toString8()));
+ return ret;
}
+ const int clientPid = resolvedClientAttribution.pid;
+ const int clientUid = resolvedClientAttribution.uid;
+ const std::string& clientPackageName = *resolvedClientAttribution.packageName;
+ userid_t clientUserId = multiuser_get_user_id(resolvedClientAttribution.uid);
+
+ logConnectionAttempt(clientPid, clientPackageName, cameraId, API_2);
+
if (oomScoreOffset < 0) {
- std::string msg =
- fmt::sprintf("Cannot increase the priority of a client %s pid %d for "
- "camera id %s", clientPackageName.c_str(), callingPid,
- cameraId.c_str());
+ std::string msg = fmt::sprintf(
+ "Cannot increase the priority of a client %s pid %d for "
+ "camera id %s",
+ clientPackageName.c_str(), clientPid, cameraId.c_str());
ALOGE("%s: %s", __FUNCTION__, msg.c_str());
return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, msg.c_str());
}
@@ -2309,25 +2353,24 @@
}
// enforce system camera permissions
- if (oomScoreOffset > 0
- && !hasPermissionsForSystemCamera(cameraId, callingPid,
- callingUid)
- && !isTrustedCallingUid(callingUid)) {
- std::string msg = fmt::sprintf("Cannot change the priority of a client %s pid %d for "
- "camera id %s without SYSTEM_CAMERA permissions",
- clientPackageName.c_str(), callingPid, cameraId.c_str());
+ if (oomScoreOffset > 0 && !hasPermissionsForSystemCamera(cameraId, clientPid, callingUid) &&
+ !isTrustedCallingUid(callingUid)) {
+ std::string msg = fmt::sprintf(
+ "Cannot change the priority of a client %s pid %d for "
+ "camera id %s without SYSTEM_CAMERA permissions",
+ clientPackageName.c_str(), clientPid, cameraId.c_str());
ALOGE("%s: %s", __FUNCTION__, msg.c_str());
return STATUS_ERROR(ERROR_PERMISSION_DENIED, msg.c_str());
}
- ret = connectHelper<hardware::camera2::ICameraDeviceCallbacks,CameraDeviceClient>(cameraCb,
- cameraId, /*api1CameraId*/-1, clientPackageName, systemNativeClient,
- clientAttribution.attributionTag, clientUid, clientPid, API_2,
- /*shimUpdateOnly*/ false, oomScoreOffset, targetSdkVersion, rotationOverride,
- /*forceSlowJpegMode*/false, unresolvedCameraId, isNonSystemNdk, /*out*/client);
+ ret = connectHelper<hardware::camera2::ICameraDeviceCallbacks, CameraDeviceClient>(
+ cameraCb, cameraId, /*api1CameraId*/ -1, resolvedClientAttribution, systemNativeClient,
+ API_2, /*shimUpdateOnly*/ false, oomScoreOffset, targetSdkVersion, rotationOverride,
+ /*forceSlowJpegMode*/ false, unresolvedCameraId, isNonSystemNdk, sharedMode,
+ /*out*/ client);
if (!ret.isOk()) {
- logRejected(cameraId, callingPid, clientPackageName, toStdString(ret.toString8()));
+ logRejected(cameraId, clientPid, clientPackageName, toStdString(ret.toString8()));
return ret;
}
@@ -2387,112 +2430,51 @@
return false;
}
-std::string CameraService::getPackageNameFromUid(int clientUid) const {
- std::string packageName("");
-
- sp<IPermissionController> permCtrl;
- if (flags::cache_permission_services()) {
- permCtrl = getPermissionController();
- } else {
- sp<IServiceManager> sm = defaultServiceManager();
-#pragma clang diagnostic push
-#pragma clang diagnostic ignored "-Wdeprecated-declarations"
- // Using deprecated function to preserve functionality until the
- // cache_permission_services flag is removed.
- sp<IBinder> binder = sm->getService(toString16(kPermissionServiceName));
-#pragma clang diagnostic pop
- if (binder == 0) {
- ALOGE("Cannot get permission service");
- permCtrl = nullptr;
- } else {
- permCtrl = interface_cast<IPermissionController>(binder);
- }
- }
-
- if (permCtrl == nullptr) {
- // Return empty package name and the further interaction
- // with camera will likely fail
- return packageName;
- }
-
- Vector<String16> packages;
-
- permCtrl->getPackagesForUid(clientUid, packages);
-
- if (packages.isEmpty()) {
- ALOGE("No packages for calling UID %d", clientUid);
- // Return empty package name and the further interaction
- // with camera will likely fail
- return packageName;
- }
-
- // Arbitrarily pick the first name in the list
- packageName = toStdString(packages[0]);
-
- return packageName;
-}
-
void CameraService::logConnectionAttempt(int clientPid, const std::string& clientPackageName,
- const std::string& cameraId, apiLevel effectiveApiLevel) const {
- int packagePid = (clientPid == USE_CALLING_PID) ?
- getCallingPid() : clientPid;
+ const std::string& cameraId,
+ apiLevel effectiveApiLevel) const {
ALOGI("CameraService::connect call (PID %d \"%s\", camera ID %s) and "
- "Camera API version %d", packagePid, clientPackageName.c_str(), cameraId.c_str(),
- static_cast<int>(effectiveApiLevel));
+ "Camera API version %d",
+ clientPid, clientPackageName.c_str(), cameraId.c_str(),
+ static_cast<int>(effectiveApiLevel));
}
-std::string CameraService::resolvePackageName(int clientUid,
- const std::string& clientPackageNameMaybe) const {
- if (clientPackageNameMaybe.size() <= 0) {
- int packageUid = (clientUid == USE_CALLING_UID) ?
- getCallingUid() : clientUid;
- // NDK calls don't come with package names, but we need one for various cases.
- // Generally, there's a 1:1 mapping between UID and package name, but shared UIDs
- // do exist. For all authentication cases, all packages under the same UID get the
- // same permissions, so picking any associated package name is sufficient. For some
- // other cases, this may give inaccurate names for clients in logs.
- return getPackageNameFromUid(packageUid);
- } else {
- return clientPackageNameMaybe;
- }
-}
-
-template<class CALLBACK, class CLIENT>
+template <class CALLBACK, class CLIENT>
Status CameraService::connectHelper(const sp<CALLBACK>& cameraCb, const std::string& cameraId,
- int api1CameraId, const std::string& clientPackageName, bool systemNativeClient,
- const std::optional<std::string>& clientFeatureId, int clientUid, int clientPid,
- apiLevel effectiveApiLevel, bool shimUpdateOnly, int oomScoreOffset, int targetSdkVersion,
- int rotationOverride, bool forceSlowJpegMode,
- const std::string& originalCameraId, bool isNonSystemNdk, /*out*/sp<CLIENT>& device) {
+ int api1CameraId,
+ const AttributionSourceState& clientAttribution,
+ bool systemNativeClient, apiLevel effectiveApiLevel,
+ bool shimUpdateOnly, int oomScoreOffset, int targetSdkVersion,
+ int rotationOverride, bool forceSlowJpegMode,
+ const std::string& originalCameraId, bool isNonSystemNdk,
+ bool sharedMode, /*out*/ sp<CLIENT>& device) {
binder::Status ret = binder::Status::ok();
- int packageUid = (clientUid == USE_CALLING_UID) ?
- getCallingUid() : clientUid;
- int packagePid = (clientPid == USE_CALLING_PID) ?
- getCallingPid() : clientPid;
-
nsecs_t openTimeNs = systemTime();
sp<CLIENT> client = nullptr;
int facing = -1;
int orientation = 0;
+ const std::string clientPackageName =
+ clientAttribution.packageName.value_or(kUnknownPackageName);
+
{
// Acquire mServiceLock and prevent other clients from connecting
std::unique_ptr<AutoConditionLock> lock =
AutoConditionLock::waitAndAcquire(mServiceLockWrapper, DEFAULT_CONNECT_TIMEOUT_NS);
if (lock == nullptr) {
- ALOGE("CameraService::connect (PID %d) rejected (too many other clients connecting)."
- , clientPid);
- return STATUS_ERROR_FMT(ERROR_MAX_CAMERAS_IN_USE,
+ ALOGE("CameraService::connect (PID %d) rejected (too many other clients connecting).",
+ clientAttribution.pid);
+ return STATUS_ERROR_FMT(
+ ERROR_MAX_CAMERAS_IN_USE,
"Cannot open camera %s for \"%s\" (PID %d): Too many other clients connecting",
- cameraId.c_str(), clientPackageName.c_str(), clientPid);
+ cameraId.c_str(), clientPackageName.c_str(), clientAttribution.pid);
}
// Enforce client permissions and do basic validity checks
- if (!(ret = validateConnectLocked(cameraId, clientPackageName,
- /*inout*/clientUid, /*inout*/clientPid)).isOk()) {
+ if (!(ret = validateConnectLocked(cameraId, clientAttribution, sharedMode)).isOk()) {
return ret;
}
@@ -2509,9 +2491,12 @@
sp<BasicClient> clientTmp = nullptr;
std::shared_ptr<resource_policy::ClientDescriptor<std::string, sp<BasicClient>>> partial;
- if ((err = handleEvictionsLocked(cameraId, clientPid, effectiveApiLevel,
- IInterface::asBinder(cameraCb), clientPackageName, oomScoreOffset,
- systemNativeClient, /*out*/&clientTmp, /*out*/&partial)) != NO_ERROR) {
+ if ((err = handleEvictionsLocked(
+ cameraId, clientAttribution.pid, effectiveApiLevel,
+ IInterface::asBinder(cameraCb),
+ clientAttribution.packageName.value_or(kUnknownPackageName), oomScoreOffset,
+ systemNativeClient, sharedMode, /*out*/ &clientTmp,
+ /*out*/ &partial)) != NO_ERROR) {
switch (err) {
case -ENODEV:
return STATUS_ERROR_FMT(ERROR_DISCONNECTED,
@@ -2557,12 +2542,13 @@
// Only use passed in clientPid to check permission. Use calling PID as the client PID
// that's connected to camera service directly.
- if(!(ret = makeClient(this, cameraCb, clientPackageName, systemNativeClient,
- clientFeatureId, cameraId, api1CameraId, facing,
- orientation, getCallingPid(), clientUid, getpid(),
- deviceVersionAndTransport, effectiveApiLevel, overrideForPerfClass,
- rotationOverride, forceSlowJpegMode, originalCameraId,
- /*out*/&tmp)).isOk()) {
+ if (!(ret = makeClient(this, cameraCb, clientAttribution, getCallingPid(),
+ systemNativeClient, cameraId, api1CameraId, facing, orientation,
+ getpid(), deviceVersionAndTransport, effectiveApiLevel,
+ overrideForPerfClass, rotationOverride, forceSlowJpegMode,
+ originalCameraId, sharedMode,
+ /*out*/ &tmp))
+ .isOk()) {
return ret;
}
client = static_cast<CLIENT*>(tmp.get());
@@ -2663,8 +2649,9 @@
client->setRotateAndCropOverride(rotateAndCropMode);
} else {
client->setRotateAndCropOverride(
- mCameraServiceProxyWrapper->getRotateAndCropOverride(
- clientPackageName, facing, multiuser_get_user_id(clientUid)));
+ mCameraServiceProxyWrapper->getRotateAndCropOverride(
+ clientPackageName, facing,
+ multiuser_get_user_id(clientAttribution.uid)));
}
}
@@ -2689,8 +2676,9 @@
bool isCameraPrivacyEnabled;
if (flags::camera_privacy_allowlist()) {
// Set camera muting behavior.
- isCameraPrivacyEnabled = this->isCameraPrivacyEnabled(
- toString16(client->getPackageName()), cameraId, packagePid, packageUid);
+ isCameraPrivacyEnabled =
+ this->isCameraPrivacyEnabled(toString16(client->getPackageName()), cameraId,
+ clientAttribution.pid, clientAttribution.uid);
} else {
isCameraPrivacyEnabled =
mSensorPrivacyPolicy->isCameraPrivacyEnabled();
@@ -2787,7 +2775,7 @@
if (lock == nullptr) {
ALOGE("%s: (PID %d) rejected (too many other clients connecting)."
- , __FUNCTION__, offlineClient->getClientPid());
+ , __FUNCTION__, offlineClient->getClientCallingPid());
return TIMED_OUT;
}
@@ -2807,7 +2795,7 @@
/*conflictingKeys*/ std::set<std::string>(), onlinePriority.getScore(),
onlineClientDesc->getOwnerId(), onlinePriority.getState(),
// native clients don't have offline processing support.
- /*ommScoreOffset*/ 0, /*systemNativeClient*/false);
+ /*ommScoreOffset*/ 0, /*systemNativeClient*/false, /*sharedMode*/false);
if (offlineClientDesc == nullptr) {
ALOGE("%s: Offline client descriptor was NULL", __FUNCTION__);
return BAD_VALUE;
@@ -3740,6 +3728,25 @@
updateAudioRestrictionLocked();
}
+bool CameraService::isOnlyClient(const BasicClient* client) {
+ Mutex::Autolock lock(mServiceLock);
+ bool ret = true;
+ if (!flags::camera_multi_client()) {
+ return ret;
+ }
+ if (client != nullptr) {
+ std::string camId = client->mCameraIdStr;
+ for (const auto& i : mActiveClientManager.getAll()) {
+ auto clientSp = i->getValue();
+ auto curCamId = i->getKey();
+ if (!curCamId.compare(camId) && clientSp.get() != client) {
+ return false;
+ }
+ }
+ }
+ return ret;
+}
+
bool CameraService::evictClientIdByRemote(const wp<IBinder>& remote) {
bool ret = false;
{
@@ -3800,20 +3807,37 @@
return state;
}
-sp<CameraService::BasicClient> CameraService::removeClientLocked(const std::string& cameraId) {
+std::vector<sp<CameraService::BasicClient>> CameraService::removeClientsLocked(
+ const std::string& cameraId) {
// Remove from active clients list
- auto clientDescriptorPtr = mActiveClientManager.remove(cameraId);
- if (clientDescriptorPtr == nullptr) {
- ALOGW("%s: Could not evict client, no client for camera ID %s", __FUNCTION__,
- cameraId.c_str());
- return sp<BasicClient>{nullptr};
- }
+ std::vector<sp<CameraService::BasicClient>> clients;
+ if (flags::camera_multi_client()) {
+ std::vector<CameraService::DescriptorPtr> clientDescriptors;
+ clientDescriptors = mActiveClientManager.removeAll(cameraId);
+ for (const auto& clientDescriptorPtr : clientDescriptors) {
+ if (clientDescriptorPtr != nullptr) {
+ sp<BasicClient> client = clientDescriptorPtr->getValue();
+ if (client.get() != nullptr) {
+ cacheClientTagDumpIfNeeded(clientDescriptorPtr->getKey(), client.get());
+ clients.push_back(client);
+ }
+ }
+ }
+ } else {
+ auto clientDescriptorPtr = mActiveClientManager.remove(cameraId);
+ if (clientDescriptorPtr == nullptr) {
+ ALOGW("%s: Could not evict client, no client for camera ID %s", __FUNCTION__,
+ cameraId.c_str());
+ return clients;
+ }
- sp<BasicClient> client = clientDescriptorPtr->getValue();
- if (client.get() != nullptr) {
- cacheClientTagDumpIfNeeded(clientDescriptorPtr->getKey(), client.get());
+ sp<BasicClient> client = clientDescriptorPtr->getValue();
+ if (client.get() != nullptr) {
+ cacheClientTagDumpIfNeeded(clientDescriptorPtr->getKey(), client.get());
+ clients.push_back(client);
+ }
}
- return client;
+ return clients;
}
void CameraService::doUserSwitch(const std::vector<int32_t>& newUserIds) {
@@ -4092,25 +4116,17 @@
// ----------------------------------------------------------------------------
-CameraService::Client::Client(const sp<CameraService>& cameraService,
- const sp<ICameraClient>& cameraClient,
+CameraService::Client::Client(
+ const sp<CameraService>& cameraService, const sp<ICameraClient>& cameraClient,
std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
- const std::string& clientPackageName, bool systemNativeClient,
- const std::optional<std::string>& clientFeatureId,
- const std::string& cameraIdStr,
- int api1CameraId, int cameraFacing, int sensorOrientation,
- int clientPid, uid_t clientUid,
- int servicePid, int rotationOverride) :
- CameraService::BasicClient(cameraService,
- IInterface::asBinder(cameraClient),
- attributionAndPermissionUtils,
- clientPackageName, systemNativeClient, clientFeatureId,
- cameraIdStr, cameraFacing, sensorOrientation,
- clientPid, clientUid,
- servicePid, rotationOverride),
- mCameraId(api1CameraId)
-{
- int callingPid = getCallingPid();
+ const AttributionSourceState& clientAttribution, int callingPid, bool systemNativeClient,
+ const std::string& cameraIdStr, int api1CameraId, int cameraFacing, int sensorOrientation,
+ int servicePid, int rotationOverride, bool sharedMode)
+ : CameraService::BasicClient(cameraService, IInterface::asBinder(cameraClient),
+ attributionAndPermissionUtils, clientAttribution, callingPid,
+ systemNativeClient, cameraIdStr, cameraFacing, sensorOrientation,
+ servicePid, rotationOverride, sharedMode),
+ mCameraId(api1CameraId) {
LOG1("Client::Client E (pid %d, id %d)", callingPid, mCameraId);
mRemoteCallback = cameraClient;
@@ -4132,27 +4148,28 @@
sp<CameraService> CameraService::BasicClient::BasicClient::sCameraService;
-CameraService::BasicClient::BasicClient(const sp<CameraService>& cameraService,
- const sp<IBinder>& remoteCallback,
+CameraService::BasicClient::BasicClient(
+ const sp<CameraService>& cameraService, const sp<IBinder>& remoteCallback,
std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
- const std::string& clientPackageName, bool nativeClient,
- const std::optional<std::string>& clientFeatureId, const std::string& cameraIdStr,
- int cameraFacing, int sensorOrientation, int clientPid, uid_t clientUid,
- int servicePid, int rotationOverride):
- AttributionAndPermissionUtilsEncapsulator(attributionAndPermissionUtils),
- mDestructionStarted(false),
- mCameraIdStr(cameraIdStr), mCameraFacing(cameraFacing), mOrientation(sensorOrientation),
- mClientPackageName(clientPackageName), mSystemNativeClient(nativeClient),
- mClientFeatureId(clientFeatureId),
- mClientPid(clientPid), mClientUid(clientUid),
- mServicePid(servicePid),
- mDisconnected(false), mUidIsTrusted(false),
- mRotationOverride(rotationOverride),
- mAudioRestriction(hardware::camera2::ICameraDeviceUser::AUDIO_RESTRICTION_NONE),
- mRemoteBinder(remoteCallback),
- mOpsActive(false),
- mOpsStreaming(false)
-{
+ const AttributionSourceState& clientAttribution, int callingPid, bool nativeClient,
+ const std::string& cameraIdStr, int cameraFacing, int sensorOrientation, int servicePid,
+ int rotationOverride, bool sharedMode)
+ : AttributionAndPermissionUtilsEncapsulator(attributionAndPermissionUtils),
+ mDestructionStarted(false),
+ mCameraIdStr(cameraIdStr),
+ mCameraFacing(cameraFacing),
+ mOrientation(sensorOrientation),
+ mClientAttribution(clientAttribution),
+ mCallingPid(callingPid),
+ mSystemNativeClient(nativeClient),
+ mServicePid(servicePid),
+ mDisconnected(false),
+ mUidIsTrusted(false),
+ mRotationOverride(rotationOverride), mSharedMode(sharedMode),
+ mAudioRestriction(hardware::camera2::ICameraDeviceUser::AUDIO_RESTRICTION_NONE),
+ mRemoteBinder(remoteCallback),
+ mCameraOpen(false),
+ mCameraStreaming(false) {
if (sCameraService == nullptr) {
sCameraService = cameraService;
}
@@ -4172,7 +4189,7 @@
mAppOpsManager = std::make_unique<AppOpsManager>();
}
- mUidIsTrusted = isTrustedCallingUid(mClientUid);
+ mUidIsTrusted = isTrustedCallingUid(mClientAttribution.uid);
}
CameraService::BasicClient::~BasicClient() {
@@ -4188,23 +4205,33 @@
mDisconnected = true;
sCameraService->removeByClient(this);
- sCameraService->logDisconnected(mCameraIdStr, mClientPid, mClientPackageName);
- sCameraService->mCameraProviderManager->removeRef(CameraProviderManager::DeviceMode::CAMERA,
- mCameraIdStr);
+ sCameraService->logDisconnected(mCameraIdStr, mCallingPid, getPackageName());
+ if (!flags::camera_multi_client() || !mSharedMode || (mSharedMode &&
+ sCameraService->isOnlyClient(this))) {
+ // Remove the HAL reference for the camera in either of the following scenarios :
+ // 1) Camera was opened in non-shared mode.
+ // 2) Camera was opened in shared mode and this is the last client using
+ // the camera which is being disconnected
+ sCameraService->mCameraProviderManager->removeRef(CameraProviderManager::DeviceMode::CAMERA,
+ mCameraIdStr);
+ }
sp<IBinder> remote = getRemote();
if (remote != nullptr) {
remote->unlinkToDeath(sCameraService);
}
- finishCameraOps();
- // Notify flashlight that a camera device is closed.
- sCameraService->mFlashlight->deviceClosed(mCameraIdStr);
+ notifyCameraClosing();
+ if (!flags::camera_multi_client() || !mSharedMode || (mSharedMode &&
+ sCameraService->isOnlyClient(this))) {
+ // Notify flashlight that a camera device is closed.
+ sCameraService->mFlashlight->deviceClosed(mCameraIdStr);
+ }
ALOGI("%s: Disconnected client for camera %s for PID %d", __FUNCTION__, mCameraIdStr.c_str(),
- mClientPid);
+ mCallingPid);
// client shouldn't be able to call into us anymore
- mClientPid = 0;
+ mCallingPid = 0;
const auto& mActivityManager = getActivityManager();
if (mActivityManager) {
@@ -4240,7 +4267,7 @@
}
std::string CameraService::BasicClient::getPackageName() const {
- return mClientPackageName;
+ return mClientAttribution.packageName.value_or(kUnknownPackageName);
}
int CameraService::BasicClient::getCameraFacing() const {
@@ -4251,12 +4278,16 @@
return mOrientation;
}
-int CameraService::BasicClient::getClientPid() const {
- return mClientPid;
+int CameraService::BasicClient::getClientCallingPid() const {
+ return mCallingPid;
}
uid_t CameraService::BasicClient::getClientUid() const {
- return mClientUid;
+ return mClientAttribution.uid;
+}
+
+const std::optional<std::string>& CameraService::BasicClient::getClientAttributionTag() const {
+ return mClientAttribution.attributionTag;
}
bool CameraService::BasicClient::canCastToApiClient(apiLevel level) const {
@@ -4293,21 +4324,22 @@
}
}
-status_t CameraService::BasicClient::handleAppOpMode(int32_t mode) {
- if (mode == AppOpsManager::MODE_ERRORED) {
- ALOGI("Camera %s: Access for \"%s\" has been revoked",
- mCameraIdStr.c_str(), mClientPackageName.c_str());
+status_t CameraService::BasicClient::handlePermissionResult(
+ PermissionChecker::PermissionResult result) {
+ if (result == PermissionChecker::PERMISSION_HARD_DENIED) {
+ ALOGI("Camera %s: Access for \"%s\" has been revoked", mCameraIdStr.c_str(),
+ getPackageName().c_str());
return PERMISSION_DENIED;
- } else if (!mUidIsTrusted && mode == AppOpsManager::MODE_IGNORED) {
- // If the calling Uid is trusted (a native service), the AppOpsManager could
- // return MODE_IGNORED. Do not treat such case as error.
- bool isUidActive = sCameraService->mUidPolicy->isUidActive(mClientUid,
- mClientPackageName);
+ } else if (!mUidIsTrusted && result == PermissionChecker::PERMISSION_SOFT_DENIED) {
+ // If the calling Uid is trusted (a native service), the AppOpsManager/PermissionChecker
+ // could return MODE_IGNORED/PERMISSION_SOFT_DENIED. Do not treat such case as error.
+ bool isUidActive =
+ sCameraService->mUidPolicy->isUidActive(getClientUid(), getPackageName());
bool isCameraPrivacyEnabled;
if (flags::camera_privacy_allowlist()) {
isCameraPrivacyEnabled = sCameraService->isCameraPrivacyEnabled(
- toString16(mClientPackageName), std::string(), mClientPid, mClientUid);
+ toString16(getPackageName()), std::string(), mCallingPid, getClientUid());
} else {
isCameraPrivacyEnabled =
sCameraService->mSensorPrivacyPolicy->isCameraPrivacyEnabled();
@@ -4319,9 +4351,9 @@
// capabilities are unknown.
if (!isUidActive || !isCameraPrivacyEnabled) {
ALOGI("Camera %s: Access for \"%s\" has been restricted."
- "uid active: %s, privacy enabled: %s", mCameraIdStr.c_str(),
- mClientPackageName.c_str(), isUidActive ? "true" : "false",
- isCameraPrivacyEnabled ? "true" : "false");
+ "uid active: %s, privacy enabled: %s",
+ mCameraIdStr.c_str(), getPackageName().c_str(), isUidActive ? "true" : "false",
+ isCameraPrivacyEnabled ? "true" : "false");
// Return the same error as for device policy manager rejection
return -EACCES;
}
@@ -4329,40 +4361,50 @@
return OK;
}
-status_t CameraService::BasicClient::startCameraOps() {
+status_t CameraService::BasicClient::handleAppOpMode(int32_t mode) {
+ return handlePermissionResult(appOpModeToPermissionResult(mode));
+}
+
+status_t CameraService::BasicClient::notifyCameraOpening() {
ATRACE_CALL();
- {
- ALOGV("%s: Start camera ops, package name = %s, client UID = %d",
- __FUNCTION__, mClientPackageName.c_str(), mClientUid);
- }
- if (mAppOpsManager != nullptr) {
- // Notify app ops that the camera is not available
- mOpsCallback = new OpsCallback(this);
+ // Don't start watching until we're streaming when using permissionChecker for data delivery
+ if (!flags::data_delivery_permission_checks()) {
+ ALOGD("%s: Start camera ops, package name = %s, client UID = %d", __FUNCTION__,
+ getPackageName().c_str(), getClientUid());
- mAppOpsManager->startWatchingMode(AppOpsManager::OP_CAMERA,
- toString16(mClientPackageName),
- AppOpsManager::WATCH_FOREGROUND_CHANGES, mOpsCallback);
+ if (mAppOpsManager != nullptr) {
+ // Notify app ops that the camera is not available
+ mOpsCallback = new OpsCallback(this);
- // Just check for camera acccess here on open - delay startOp until
- // camera frames start streaming in startCameraStreamingOps
- int32_t mode = mAppOpsManager->checkOp(AppOpsManager::OP_CAMERA, mClientUid,
- toString16(mClientPackageName));
- status_t res = handleAppOpMode(mode);
- if (res != OK) {
- return res;
+ mAppOpsManager->startWatchingMode(
+ AppOpsManager::OP_CAMERA, toString16(getPackageName()),
+ AppOpsManager::WATCH_FOREGROUND_CHANGES, mOpsCallback);
+
+ // Just check for camera access here on open - delay startOp until
+ // camera frames start streaming in startCameraStreamingOps
+ int32_t mode = mAppOpsManager->checkOp(AppOpsManager::OP_CAMERA, getClientUid(),
+ toString16(getPackageName()));
+ status_t res = handleAppOpMode(mode);
+ if (res != OK) {
+ return res;
+ }
}
+ } else {
+ // TODO: Remove when removing the data_delivery_permission_checks flag
+ ALOGD("%s: Bypassing checkOp for uid %d", __FUNCTION__, getClientUid());
}
- mOpsActive = true;
+ mCameraOpen = true;
// Transition device availability listeners from PRESENT -> NOT_AVAILABLE
sCameraService->updateStatus(StatusInternal::NOT_AVAILABLE, mCameraIdStr);
- sCameraService->mUidPolicy->registerMonitorUid(mClientUid, /*openCamera*/true);
+ sCameraService->mUidPolicy->registerMonitorUid(getClientUid(), /*openCamera*/ true);
// Notify listeners of camera open/close status
- sCameraService->updateOpenCloseStatus(mCameraIdStr, true/*open*/, mClientPackageName);
+ sCameraService->updateOpenCloseStatus(mCameraIdStr, true /*open*/, getPackageName(),
+ mSharedMode);
return OK;
}
@@ -4370,30 +4412,52 @@
status_t CameraService::BasicClient::startCameraStreamingOps() {
ATRACE_CALL();
- if (!mOpsActive) {
+ if (!mCameraOpen) {
ALOGE("%s: Calling streaming start when not yet active", __FUNCTION__);
return INVALID_OPERATION;
}
- if (mOpsStreaming) {
+
+ if (mCameraStreaming) {
ALOGV("%s: Streaming already active!", __FUNCTION__);
return OK;
}
- ALOGV("%s: Start camera streaming ops, package name = %s, client UID = %d",
- __FUNCTION__, mClientPackageName.c_str(), mClientUid);
+ ALOGV("%s: Start camera streaming ops, package name = %s, client UID = %d", __FUNCTION__,
+ getPackageName().c_str(), getClientUid());
if (mAppOpsManager != nullptr) {
- int32_t mode = mAppOpsManager->startOpNoThrow(AppOpsManager::OP_CAMERA, mClientUid,
- toString16(mClientPackageName), /*startIfModeDefault*/ false,
- toString16(mClientFeatureId),
- toString16("start camera ") + toString16(mCameraIdStr));
- status_t res = handleAppOpMode(mode);
- if (res != OK) {
- return res;
+ if (flags::data_delivery_permission_checks()) {
+ ALOGD("%s: Start data delivery for uid %d", __FUNCTION__, getClientUid());
+
+ const PermissionChecker::PermissionResult result =
+ checkPermissionsForCameraForStartDataDelivery(mCameraIdStr, mClientAttribution);
+ status_t res = handlePermissionResult(result);
+ if (res != OK) {
+ return res;
+ }
+
+ mOpsCallback = new OpsCallback(this);
+ std::for_each(AttrSourceItr{mClientAttribution}, AttrSourceItr::end(),
+ [&](const auto& attr) {
+ mAppOpsManager->startWatchingMode(
+ AppOpsManager::OP_CAMERA,
+ toString16(attr.packageName.value_or("")),
+ AppOpsManager::WATCH_FOREGROUND_CHANGES, mOpsCallback);
+ });
+ } else {
+ ALOGD("%s: startOp for uid %d", __FUNCTION__, getClientUid());
+ int32_t mode = mAppOpsManager->startOpNoThrow(
+ AppOpsManager::OP_CAMERA, getClientUid(), toString16(getPackageName()),
+ /*startIfModeDefault*/ false, toString16(getClientAttributionTag()),
+ toString16("start camera ") + toString16(mCameraIdStr));
+ status_t res = handleAppOpMode(mode);
+ if (res != OK) {
+ return res;
+ }
}
}
- mOpsStreaming = true;
+ mCameraStreaming = true;
return OK;
}
@@ -4401,14 +4465,20 @@
status_t CameraService::BasicClient::noteAppOp() {
ATRACE_CALL();
- ALOGV("%s: Start camera noteAppOp, package name = %s, client UID = %d",
- __FUNCTION__, mClientPackageName.c_str(), mClientUid);
+ ALOGV("%s: Start camera noteAppOp, package name = %s, client UID = %d", __FUNCTION__,
+ getPackageName().c_str(), getClientUid());
// noteAppOp is only used for when camera mute is not supported, in order
// to trigger the sensor privacy "Unblock" dialog
- if (mAppOpsManager != nullptr) {
- int32_t mode = mAppOpsManager->noteOp(AppOpsManager::OP_CAMERA, mClientUid,
- toString16(mClientPackageName), toString16(mClientFeatureId),
+ if (flags::data_delivery_permission_checks()) {
+ // Ignore the result, since we're only triggering the dialog
+ ALOGD("%s: Check data delivery permissions for uid %d", __FUNCTION__, getClientUid());
+ hasPermissionsForCameraForDataDelivery(std::string(), mClientAttribution);
+ } else if (mAppOpsManager != nullptr) {
+ ALOGD("%s: noteOp for uid %d", __FUNCTION__, getClientUid());
+ int32_t mode = mAppOpsManager->noteOp(
+ AppOpsManager::OP_CAMERA, getClientUid(), toString16(getPackageName()),
+ toString16(getClientAttributionTag()),
toString16("start camera ") + toString16(mCameraIdStr));
status_t res = handleAppOpMode(mode);
if (res != OK) {
@@ -4422,35 +4492,48 @@
status_t CameraService::BasicClient::finishCameraStreamingOps() {
ATRACE_CALL();
- if (!mOpsActive) {
+ if (!mCameraOpen) {
ALOGE("%s: Calling streaming start when not yet active", __FUNCTION__);
return INVALID_OPERATION;
}
- if (!mOpsStreaming) {
+ if (!mCameraStreaming) {
ALOGV("%s: Streaming not active!", __FUNCTION__);
return OK;
}
if (mAppOpsManager != nullptr) {
- mAppOpsManager->finishOp(AppOpsManager::OP_CAMERA, mClientUid,
- toString16(mClientPackageName), toString16(mClientFeatureId));
- mOpsStreaming = false;
+ if (flags::data_delivery_permission_checks()) {
+ ALOGD("%s: finishDataDelivery for uid %d", __FUNCTION__, getClientUid());
+ finishDataDelivery(mClientAttribution);
+
+ // Stop watching app op changes after stop streaming
+ if (mOpsCallback != nullptr) {
+ mAppOpsManager->stopWatchingMode(mOpsCallback);
+ mOpsCallback.clear();
+ }
+ } else {
+ ALOGD("%s: finishOp for uid %d", __FUNCTION__, getClientUid());
+ mAppOpsManager->finishOp(AppOpsManager::OP_CAMERA, getClientUid(),
+ toString16(getPackageName()),
+ toString16(getClientAttributionTag()));
+ }
+ mCameraStreaming = false;
}
return OK;
}
-status_t CameraService::BasicClient::finishCameraOps() {
+status_t CameraService::BasicClient::notifyCameraClosing() {
ATRACE_CALL();
- if (mOpsStreaming) {
+ if (mCameraStreaming) {
// Make sure we've notified everyone about camera stopping
finishCameraStreamingOps();
}
- // Check if startCameraOps succeeded, and if so, finish the camera op
- if (mOpsActive) {
- mOpsActive = false;
+ // Check if notifyCameraOpening succeeded, and if so, finish the camera op if necessary
+ if (mCameraOpen) {
+ mCameraOpen = false;
// This function is called when a client disconnects. This should
// release the camera, but actually only if it was in a proper
@@ -4459,23 +4542,42 @@
StatusInternal::ENUMERATING, StatusInternal::NOT_PRESENT};
// Transition to PRESENT if the camera is not in either of the rejected states
- sCameraService->updateStatus(StatusInternal::PRESENT,
- mCameraIdStr, rejected);
+ if (!flags::camera_multi_client() || !mSharedMode || (mSharedMode
+ && sCameraService->isOnlyClient(this))) {
+ sCameraService->updateStatus(StatusInternal::PRESENT,
+ mCameraIdStr, rejected);
+ }
}
- // Always stop watching, even if no camera op is active
- if (mOpsCallback != nullptr && mAppOpsManager != nullptr) {
- mAppOpsManager->stopWatchingMode(mOpsCallback);
- }
- mOpsCallback.clear();
- sCameraService->mUidPolicy->unregisterMonitorUid(mClientUid, /*closeCamera*/true);
+ // When using the data delivery permission checks, the open state does not involve AppOps
+ if (!flags::data_delivery_permission_checks()) {
+ // Always stop watching, even if no camera op is active
+ if (mOpsCallback != nullptr && mAppOpsManager != nullptr) {
+ mAppOpsManager->stopWatchingMode(mOpsCallback);
+ }
+ mOpsCallback.clear();
+ }
+
+ sCameraService->mUidPolicy->unregisterMonitorUid(getClientUid(), /*closeCamera*/ true);
// Notify listeners of camera open/close status
- sCameraService->updateOpenCloseStatus(mCameraIdStr, false/*open*/, mClientPackageName);
+ sCameraService->updateOpenCloseStatus(mCameraIdStr, false /*open*/, getPackageName(),
+ mSharedMode);
return OK;
}
+int32_t CameraService::getUidProcessState(int32_t uid) {
+ const auto& activityManager = getActivityManager();
+ int32_t procState = ActivityManager::PROCESS_STATE_NONEXISTENT;
+ if (activityManager != nullptr) {
+ procState = activityManager->getUidProcessState(uid, toString16(kServiceName));
+ } else {
+ ALOGE("%s: getActivityManager returned nullptr.", __FUNCTION__);
+ }
+ return procState;
+}
+
void CameraService::BasicClient::opChanged(int32_t op, const String16&) {
ATRACE_CALL();
if (mAppOpsManager == nullptr) {
@@ -4487,40 +4589,72 @@
return;
}
- int32_t res;
- res = mAppOpsManager->checkOp(AppOpsManager::OP_CAMERA,
- mClientUid, toString16(mClientPackageName));
- ALOGV("checkOp returns: %d, %s ", res,
- res == AppOpsManager::MODE_ALLOWED ? "ALLOWED" :
- res == AppOpsManager::MODE_IGNORED ? "IGNORED" :
- res == AppOpsManager::MODE_ERRORED ? "ERRORED" :
- "UNKNOWN");
+ PermissionChecker::PermissionResult res;
+ if (flags::data_delivery_permission_checks()) {
+ int32_t appOpMode = AppOpsManager::MODE_ALLOWED;
+ std::for_each(AttrSourceItr{mClientAttribution}, AttrSourceItr::end(),
+ [&](const auto& attr) {
+ appOpMode = std::max(appOpMode, mAppOpsManager->checkOp(
+ AppOpsManager::OP_CAMERA, attr.uid,
+ toString16(attr.packageName.value_or(""))));
+ });
+ ALOGV("checkOp returns: %d, %s ", res,
+ appOpMode == AppOpsManager::MODE_ALLOWED ? "ALLOWED"
+ : appOpMode == AppOpsManager::MODE_IGNORED ? "IGNORED"
+ : appOpMode == AppOpsManager::MODE_ERRORED ? "ERRORED"
+ : "UNKNOWN");
+ res = appOpModeToPermissionResult(appOpMode);
+ } else {
+ int32_t appOpMode = mAppOpsManager->checkOp(AppOpsManager::OP_CAMERA, getClientUid(),
+ toString16(getPackageName()));
+ ALOGV("checkOp returns: %d, %s ", res,
+ appOpMode == AppOpsManager::MODE_ALLOWED ? "ALLOWED"
+ : appOpMode == AppOpsManager::MODE_IGNORED ? "IGNORED"
+ : appOpMode == AppOpsManager::MODE_ERRORED ? "ERRORED"
+ : "UNKNOWN");
+ res = appOpModeToPermissionResult(appOpMode);
+ }
- if (res == AppOpsManager::MODE_ERRORED) {
+ if (res == PermissionChecker::PERMISSION_HARD_DENIED) {
ALOGI("Camera %s: Access for \"%s\" revoked", mCameraIdStr.c_str(),
- mClientPackageName.c_str());
+ getPackageName().c_str());
block();
- } else if (res == AppOpsManager::MODE_IGNORED) {
- bool isUidActive = sCameraService->mUidPolicy->isUidActive(mClientUid, mClientPackageName);
+ } else if (res == PermissionChecker::PERMISSION_SOFT_DENIED) {
+ bool isUidActive =
+ sCameraService->mUidPolicy->isUidActive(getClientUid(), getPackageName());
// Uid may be active, but not visible to the user (e.g. PROCESS_STATE_FOREGROUND_SERVICE).
// If not visible, but still active, then we want to block instead of muting the camera.
- int32_t procState = sCameraService->mUidPolicy->getProcState(mClientUid);
+ int32_t procState = ActivityManager::PROCESS_STATE_NONEXISTENT;
+ if (flags::data_delivery_permission_checks()) {
+ // Use the proc state of the last uid in the chain (ultimately receiving the data)
+ // when determining whether to mute or block
+ int32_t uid = -1;
+ std::for_each(AttrSourceItr{mClientAttribution}, AttrSourceItr::end(),
+ [&](const auto& attr) {
+ uid = static_cast<uid_t>(attr.uid);
+ });
+ procState = getUidProcessState(uid);
+ } else if (flags::query_process_state()) {
+ procState = getUidProcessState(getClientUid());
+ } else {
+ procState = sCameraService->mUidPolicy->getProcState(getClientUid());
+ }
bool isUidVisible = (procState <= ActivityManager::PROCESS_STATE_BOUND_TOP);
bool isCameraPrivacyEnabled;
if (flags::camera_privacy_allowlist()) {
isCameraPrivacyEnabled = sCameraService->isCameraPrivacyEnabled(
- toString16(mClientPackageName),std::string(),mClientPid,mClientUid);
+ toString16(getPackageName()), std::string(), mCallingPid, getClientUid());
} else {
isCameraPrivacyEnabled =
sCameraService->mSensorPrivacyPolicy->isCameraPrivacyEnabled();
}
ALOGI("Camera %s: Access for \"%s\" has been restricted, isUidTrusted %d, isUidActive %d"
- " isUidVisible %d, isCameraPrivacyEnabled %d", mCameraIdStr.c_str(),
- mClientPackageName.c_str(), mUidIsTrusted, isUidActive, isUidVisible,
- isCameraPrivacyEnabled);
+ " isUidVisible %d, isCameraPrivacyEnabled %d procState %d",
+ mCameraIdStr.c_str(), getPackageName().c_str(), mUidIsTrusted, isUidActive,
+ isUidVisible, isCameraPrivacyEnabled, procState);
// If the calling Uid is trusted (a native service), or the client Uid is active / visible
// (WAR for b/175320666)the AppOpsManager could return MODE_IGNORED. Do not treat such
// cases as error.
@@ -4531,7 +4665,7 @@
block();
}
}
- } else if (res == AppOpsManager::MODE_ALLOWED) {
+ } else if (res == PermissionChecker::PERMISSION_GRANTED) {
setCameraMute(sCameraService->mOverrideCameraMuteMode);
}
}
@@ -4541,12 +4675,41 @@
// Reset the client PID to allow server-initiated disconnect,
// and to prevent further calls by client.
- mClientPid = getCallingPid();
+ mCallingPid = getCallingPid();
CaptureResultExtras resultExtras; // a dummy result (invalid)
notifyError(hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISABLED, resultExtras);
disconnect();
}
+status_t CameraService::BasicClient::isPrimaryClient(bool* isPrimary) {
+ ATRACE_CALL();
+ if (!flags::camera_multi_client()) {
+ return INVALID_OPERATION;
+ }
+
+ if (!mSharedMode) {
+ ALOGW("%s: Invalid operation when camera is not opened in shared mode", __FUNCTION__);
+ return INVALID_OPERATION;
+ }
+ *isPrimary = mIsPrimaryClient;
+ return OK;
+}
+
+status_t CameraService::BasicClient::setPrimaryClient(bool isPrimary) {
+ ATRACE_CALL();
+
+ if (!flags::camera_multi_client()) {
+ return INVALID_OPERATION;
+ }
+
+ if (!mSharedMode) {
+ ALOGW("%s:Invalid operation when camera is not opened in shared mode", __FUNCTION__);
+ return INVALID_OPERATION;
+ }
+ mIsPrimaryClient = isPrimary;
+ return OK;
+}
+
// ----------------------------------------------------------------------------
void CameraService::Client::notifyError(int32_t errorCode,
@@ -5097,12 +5260,27 @@
void CameraService::CameraState::setClientPackage(const std::string& clientPackage) {
Mutex::Autolock lock(mStatusLock);
- mClientPackage = clientPackage;
+ mClientPackages.clear();
+ mClientPackages.insert(clientPackage);
}
std::string CameraService::CameraState::getClientPackage() const {
Mutex::Autolock lock(mStatusLock);
- return mClientPackage;
+ if (!mClientPackages.empty()) {
+ std::set<std::string>::iterator it = mClientPackages.begin();
+ return *it;
+ }
+ return std::string();
+}
+
+void CameraService::CameraState::addClientPackage(const std::string& clientPackage) {
+ Mutex::Autolock lock(mStatusLock);
+ mClientPackages.insert(clientPackage);
+}
+
+void CameraService::CameraState::removeClientPackage(const std::string& clientPackage) {
+ Mutex::Autolock lock(mStatusLock);
+ mClientPackages.erase(clientPackage);
}
// ----------------------------------------------------------------------------
@@ -5150,6 +5328,39 @@
return descriptor->getValue();
}
+void CameraService::CameraClientManager::remove(const CameraService::DescriptorPtr& value) {
+ ClientManager::remove(value);
+ if (!flags::camera_multi_client()) {
+ return;
+ }
+ auto clientToRemove = value->getValue();
+ if ((clientToRemove.get() != nullptr) && clientToRemove->mSharedMode) {
+ bool primaryClient = false;
+ status_t ret = clientToRemove->isPrimaryClient(&primaryClient);
+ if ((ret == OK) && primaryClient) {
+ // Primary client is being removed. Find the next higher priority
+ // client to become primary client.
+ auto clientDescriptor = get(value->getKey());
+ if (clientDescriptor == nullptr) {
+ ALOGV("CameraService::CameraClientManager::no other clients are using same camera");
+ return;
+ }
+ resource_policy::ClientPriority highestPriority = clientDescriptor->getPriority();
+ sp<BasicClient> highestPriorityClient = clientDescriptor->getValue();
+ if (highestPriorityClient.get() != nullptr) {
+ for (auto& i : getAll()) {
+ if ((i->getKey() == value->getKey()) && (i->getPriority() < highestPriority)) {
+ highestPriority = i->getPriority();
+ highestPriorityClient = i->getValue();
+ }
+ }
+ highestPriorityClient->setPrimaryClient(true);
+ highestPriorityClient->notifyClientSharedAccessPriorityChanged(true);
+ }
+ }
+ }
+}
+
std::string CameraService::CameraClientManager::toString() const {
auto all = getAll();
std::ostringstream ret;
@@ -5195,14 +5406,14 @@
CameraService::DescriptorPtr CameraService::CameraClientManager::makeClientDescriptor(
const std::string& key, const sp<BasicClient>& value, int32_t cost,
const std::set<std::string>& conflictingKeys, int32_t score, int32_t ownerId,
- int32_t state, int32_t oomScoreOffset, bool systemNativeClient) {
+ int32_t state, int32_t oomScoreOffset, bool systemNativeClient, bool sharedMode) {
int32_t score_adj = systemNativeClient ? kSystemNativeClientScore : score;
int32_t state_adj = systemNativeClient ? kSystemNativeClientState : state;
return std::make_shared<resource_policy::ClientDescriptor<std::string, sp<BasicClient>>>(
key, value, cost, conflictingKeys, score_adj, ownerId, state_adj,
- systemNativeClient, oomScoreOffset);
+ systemNativeClient, oomScoreOffset, sharedMode);
}
CameraService::DescriptorPtr CameraService::CameraClientManager::makeClientDescriptor(
@@ -5211,7 +5422,7 @@
return makeClientDescriptor(partial->getKey(), value, partial->getCost(),
partial->getConflicting(), partial->getPriority().getScore(),
partial->getOwnerId(), partial->getPriority().getState(), oomScoreOffset,
- systemNativeClient);
+ systemNativeClient, partial->getSharedMode());
}
// ----------------------------------------------------------------------------
@@ -5743,7 +5954,7 @@
}
void CameraService::updateOpenCloseStatus(const std::string& cameraId, bool open,
- const std::string& clientPackageName) {
+ const std::string& clientPackageName, bool sharedMode) {
auto state = getCameraState(cameraId);
if (state == nullptr) {
ALOGW("%s: Could not update the status for %s, no such device exists", __FUNCTION__,
@@ -5751,9 +5962,17 @@
return;
}
if (open) {
- state->setClientPackage(clientPackageName);
+ if (flags::camera_multi_client() && sharedMode) {
+ state->addClientPackage(clientPackageName);
+ } else {
+ state->setClientPackage(clientPackageName);
+ }
} else {
- state->setClientPackage(std::string());
+ if (flags::camera_multi_client() && sharedMode) {
+ state->removeClientPackage(clientPackageName);
+ } else {
+ state->setClientPackage(std::string());
+ }
}
// Get the device id and app-visible camera id for the given HAL-visible camera id.
@@ -5772,7 +5991,10 @@
ret = it->getListener()->onCameraOpened(mappedCameraId, clientPackageName,
deviceId);
} else {
- ret = it->getListener()->onCameraClosed(mappedCameraId, deviceId);
+ if (!flags::camera_multi_client() || !sharedMode || (sharedMode &&
+ mActiveClientManager.getCameraClient(cameraId) == nullptr)) {
+ ret = it->getListener()->onCameraClosed(mappedCameraId, deviceId);
+ }
}
it->handleBinderStatus(ret,
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 0ac391d..6f29ff4 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -24,19 +24,19 @@
#include <android/hardware/camera2/BnCameraInjectionSession.h>
#include <android/hardware/camera2/ICameraInjectionCallback.h>
-#include <cutils/multiuser.h>
-#include <utils/Vector.h>
-#include <utils/KeyedVector.h>
#include <binder/ActivityManager.h>
#include <binder/AppOpsManager.h>
#include <binder/BinderService.h>
-#include <binder/IServiceManager.h>
#include <binder/IActivityManager.h>
#include <binder/IAppOpsCallback.h>
-#include <binder/IPermissionController.h>
+#include <binder/IServiceManager.h>
#include <binder/IUidObserver.h>
+#include <cutils/multiuser.h>
+#include <gui/Flags.h>
#include <hardware/camera.h>
#include <sensorprivacy/SensorPrivacyManager.h>
+#include <utils/KeyedVector.h>
+#include <utils/Vector.h>
#include <android/hardware/camera/common/1.0/types.h>
@@ -181,7 +181,7 @@
const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
const std::string& cameraId, int scoreOffset, int targetSdkVersion,
int rotationOverride, const AttributionSourceState& clientAttribution,
- int32_t devicePolicy,
+ int32_t devicePolicy, bool sharedMode,
/*out*/
sp<hardware::camera2::ICameraDeviceUser>* device);
@@ -384,11 +384,16 @@
virtual void notifyError(int32_t errorCode,
const CaptureResultExtras& resultExtras) = 0;
+ virtual void notifyClientSharedAccessPriorityChanged(bool primaryClient) = 0;
+
// Get the UID of the application client using this
virtual uid_t getClientUid() const;
- // Get the PID of the application client using this
- virtual int getClientPid() const;
+ // Get the calling PID of the application client using this
+ virtual int getClientCallingPid() const;
+
+ // Get the attribution tag (previously featureId) of the application client using this
+ virtual const std::optional<std::string>& getClientAttributionTag() const;
// Check what API level is used for this client. This is used to determine which
// superclass this can be cast to.
@@ -450,62 +455,60 @@
virtual status_t injectSessionParams(
const hardware::camera2::impl::CameraMetadataNative& sessionParams) = 0;
+ status_t isPrimaryClient(/*out*/bool* isPrimary);
+
+ status_t setPrimaryClient(bool isPrimary);
+
protected:
- BasicClient(const sp<CameraService>& cameraService,
- const sp<IBinder>& remoteCallback,
- std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
- const std::string& clientPackageName,
- bool nativeClient,
- const std::optional<std::string>& clientFeatureId,
- const std::string& cameraIdStr,
- int cameraFacing,
- int sensorOrientation,
- int clientPid,
- uid_t clientUid,
- int servicePid,
- int rotationOverride);
+ BasicClient(const sp<CameraService>& cameraService, const sp<IBinder>& remoteCallback,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
+ const AttributionSourceState& clientAttribution, int callingPid,
+ bool nativeClient, const std::string& cameraIdStr, int cameraFacing,
+ int sensorOrientation, int servicePid, int rotationOverride, bool sharedMode);
virtual ~BasicClient();
- // the instance is in the middle of destruction. When this is set,
+ // The instance is in the middle of destruction. When this is set,
// the instance should not be accessed from callback.
// CameraService's mClientLock should be acquired to access this.
// - subclasses should set this to true in their destructors.
- bool mDestructionStarted;
+ bool mDestructionStarted;
- // these are initialized in the constructor.
+ // These are initialized in the constructor.
static sp<CameraService> sCameraService;
const std::string mCameraIdStr;
const int mCameraFacing;
const int mOrientation;
- std::string mClientPackageName;
+ AttributionSourceState mClientAttribution;
+ int mCallingPid;
bool mSystemNativeClient;
- std::optional<std::string> mClientFeatureId;
- pid_t mClientPid;
- const uid_t mClientUid;
const pid_t mServicePid;
bool mDisconnected;
bool mUidIsTrusted;
int mRotationOverride;
+ bool mSharedMode;
+ bool mIsPrimaryClient;
mutable Mutex mAudioRestrictionLock;
int32_t mAudioRestriction;
// - The app-side Binder interface to receive callbacks from us
- sp<IBinder> mRemoteBinder; // immutable after constructor
+ sp<IBinder> mRemoteBinder; // immutable after constructor
// Permissions management methods for camera lifecycle
- // Notify rest of system/apps about camera opening, and check appops
- virtual status_t startCameraOps();
+ // Notify rest of system/apps about camera opening, and (legacy) check appops
+ virtual status_t notifyCameraOpening();
// Notify rest of system/apps about camera starting to stream data, and confirm appops
virtual status_t startCameraStreamingOps();
// Notify rest of system/apps about camera stopping streaming data
virtual status_t finishCameraStreamingOps();
// Notify rest of system/apps about camera closing
- virtual status_t finishCameraOps();
- // Handle errors for start/checkOps
+ virtual status_t notifyCameraClosing();
+ // Handle errors for start/checkOps, startDataDelivery
virtual status_t handleAppOpMode(int32_t mode);
+ virtual status_t handlePermissionResult(
+ PermissionChecker::PermissionResult result);
// Just notify camera appops to trigger unblocking dialog if sensor
// privacy is enabled and camera mute is not supported
virtual status_t noteAppOp();
@@ -523,12 +526,10 @@
}; // class OpsCallback
sp<OpsCallback> mOpsCallback;
- // Track whether checkOps was called successfully, to avoid
- // finishing what we didn't start, on camera open.
- bool mOpsActive;
- // Track whether startOps was called successfully on start of
- // camera streaming.
- bool mOpsStreaming;
+ // Track if the camera is currently active.
+ bool mCameraOpen;
+ // Track if the camera is currently streaming.
+ bool mCameraStreaming;
// IAppOpsCallback interface, indirected through opListener
virtual void opChanged(int32_t op, const String16& packageName);
@@ -544,10 +545,9 @@
virtual status_t connect(const sp<hardware::ICameraClient>& client) = 0;
virtual status_t lock() = 0;
virtual status_t unlock() = 0;
- virtual status_t setPreviewTarget(const sp<IGraphicBufferProducer>& bufferProducer)=0;
+ virtual status_t setPreviewTarget(const sp<SurfaceType>& target) = 0;
virtual void setPreviewCallbackFlag(int flag) = 0;
- virtual status_t setPreviewCallbackTarget(
- const sp<IGraphicBufferProducer>& callbackProducer) = 0;
+ virtual status_t setPreviewCallbackTarget(const sp<SurfaceType>& target) = 0;
virtual status_t startPreview() = 0;
virtual void stopPreview() = 0;
virtual bool previewEnabled() = 0;
@@ -562,23 +562,16 @@
virtual status_t setParameters(const String8& params) = 0;
virtual String8 getParameters() const = 0;
virtual status_t sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) = 0;
- virtual status_t setVideoTarget(const sp<IGraphicBufferProducer>& bufferProducer) = 0;
+ virtual status_t setVideoTarget(const sp<SurfaceType>& target) = 0;
// Interface used by CameraService
Client(const sp<CameraService>& cameraService,
- const sp<hardware::ICameraClient>& cameraClient,
- std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
- const std::string& clientPackageName,
- bool systemNativeClient,
- const std::optional<std::string>& clientFeatureId,
- const std::string& cameraIdStr,
- int api1CameraId,
- int cameraFacing,
- int sensorOrientation,
- int clientPid,
- uid_t clientUid,
- int servicePid,
- int rotationOverride);
+ const sp<hardware::ICameraClient>& cameraClient,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
+ const AttributionSourceState& clientAttribution, int callingPid,
+ bool systemNativeClient, const std::string& cameraIdStr, int api1CameraId,
+ int cameraFacing, int sensorOrientation, int servicePid, int rotationOverride,
+ bool sharedMode);
~Client();
// return our camera client
@@ -638,6 +631,11 @@
CameraClientManager();
virtual ~CameraClientManager();
+ // Bring all remove() functions into scope
+ using ClientManager::remove;
+
+ virtual void remove(const DescriptorPtr& value) override;
+
/**
* Return a strong pointer to the active BasicClient for this camera ID, or an empty
* if none exists.
@@ -655,7 +653,8 @@
static DescriptorPtr makeClientDescriptor(const std::string& key,
const sp<BasicClient>& value, int32_t cost,
const std::set<std::string>& conflictingKeys, int32_t score,
- int32_t ownerId, int32_t state, int oomScoreOffset, bool systemNativeClient);
+ int32_t ownerId, int32_t state, int oomScoreOffset, bool systemNativeClient,
+ bool sharedMode);
/**
* Make a ClientDescriptor object wrapping the given BasicClient strong pointer with
@@ -670,6 +669,15 @@
int32_t updateAudioRestriction();
int32_t updateAudioRestrictionLocked();
+ /**
+ * Returns true if the given client is the only client in the active clients list for a given
+ * camera.
+ *
+ * This method acquires mServiceLock.
+ */
+ bool isOnlyClient(const BasicClient* client);
+
+
private:
// TODO: b/263304156 update this to make use of a death callback for more
@@ -686,24 +694,7 @@
return activityManager;
}
- static const sp<IPermissionController>& getPermissionController() {
- static const char* kPermissionControllerService = "permission";
- static thread_local sp<IPermissionController> sPermissionController = nullptr;
-
- if (sPermissionController == nullptr ||
- !IInterface::asBinder(sPermissionController)->isBinderAlive()) {
- sp<IServiceManager> sm = defaultServiceManager();
- sp<IBinder> binder = sm->checkService(toString16(kPermissionControllerService));
- if (binder == nullptr) {
- ALOGE("%s: Could not get permission service", __FUNCTION__);
- sPermissionController = nullptr;
- } else {
- sPermissionController = interface_cast<IPermissionController>(binder);
- }
- }
-
- return sPermissionController;
- }
+ static int32_t getUidProcessState(int32_t uid);
/**
* Typesafe version of device status, containing both the HAL-layer and the service interface-
@@ -806,6 +797,10 @@
void setClientPackage(const std::string& clientPackage);
std::string getClientPackage() const;
+ void addClientPackage(const std::string& clientPackage);
+ void removeClientPackage(const std::string& clientPackage);
+ std::set<std::string> getClientPackages() const;
+
/**
* Return the unavailable physical ids for this device.
*
@@ -818,7 +813,7 @@
const int mCost;
std::set<std::string> mConflicting;
std::set<std::string> mUnavailablePhysicalIds;
- std::string mClientPackage;
+ std::set<std::string> mClientPackages;
mutable Mutex mStatusLock;
CameraParameters mShimParams;
const SystemCameraKind mSystemCameraKind;
@@ -947,19 +942,15 @@
void removeStates(const std::string& id);
// Check if we can connect, before we acquire the service lock.
- // If clientPid/clientUid are USE_CALLING_PID/USE_CALLING_UID, they will be overwritten with
- // the calling pid/uid.
- binder::Status validateConnectLocked(const std::string& cameraId, const std::string& clientName,
- int clientUid, int clientPid) const;
- binder::Status validateClientPermissionsLocked(const std::string& cameraId,
- const std::string& clientName, int clientUid, int clientPid) const;
+ binder::Status validateConnectLocked(const std::string& cameraId,
+ const AttributionSourceState& clientAttribution,
+ bool sharedMode) const;
+ binder::Status validateClientPermissionsLocked(
+ const std::string& cameraId, const AttributionSourceState& clientAttribution,
+ bool sharedMode) const;
- // If clientPackageNameMaybe is empty, attempts to resolve the package name.
- std::string resolvePackageName(int clientUid, const std::string& clientPackageNameMaybe) const;
void logConnectionAttempt(int clientPid, const std::string& clientPackageName,
const std::string& cameraId, apiLevel effectiveApiLevel) const;
- binder::Status errorNotTrusted(int clientPid, int clientUid, const std::string& cameraId,
- const std::string& clientName, bool isPid) const;
bool isCameraPrivacyEnabled(const String16& packageName,const std::string& cameraId,
int clientPid, int ClientUid);
@@ -968,7 +959,7 @@
// Only call with with mServiceLock held.
status_t handleEvictionsLocked(const std::string& cameraId, int clientPid,
apiLevel effectiveApiLevel, const sp<IBinder>& remoteCallback,
- const std::string& packageName, int scoreOffset, bool systemNativeClient,
+ const std::string& packageName, int scoreOffset, bool systemNativeClient, bool sharedMode,
/*out*/
sp<BasicClient>* client,
std::shared_ptr<resource_policy::ClientDescriptor<std::string, sp<BasicClient>>>* partial);
@@ -997,24 +988,15 @@
// sorted in alpha-numeric order.
void filterAPI1SystemCameraLocked(const std::vector<std::string> &normalDeviceIds);
- // In some cases the calling code has no access to the package it runs under.
- // For example, NDK camera API.
- // In this case we will get the packages for the calling UID and pick the first one
- // for attributing the app op. This will work correctly for runtime permissions
- // as for legacy apps we will toggle the app op for all packages in the UID.
- // The caveat is that the operation may be attributed to the wrong package and
- // stats based on app ops may be slightly off.
- std::string getPackageNameFromUid(int clientUid) const;
-
// Single implementation shared between the various connect calls
- template<class CALLBACK, class CLIENT>
+ template <class CALLBACK, class CLIENT>
binder::Status connectHelper(const sp<CALLBACK>& cameraCb, const std::string& cameraId,
- int api1CameraId, const std::string& clientPackageName, bool systemNativeClient,
- const std::optional<std::string>& clientFeatureId, int clientUid, int clientPid,
- apiLevel effectiveApiLevel, bool shimUpdateOnly, int scoreOffset, int targetSdkVersion,
- int rotationOverride, bool forceSlowJpegMode,
- const std::string& originalCameraId, bool isNonSystemNdk,
- /*out*/sp<CLIENT>& device);
+ int api1CameraId, const AttributionSourceState& clientAttribution,
+ bool systemNativeClient, apiLevel effectiveApiLevel,
+ bool shimUpdateOnly, int scoreOffset, int targetSdkVersion,
+ int rotationOverride, bool forceSlowJpegMode,
+ const std::string& originalCameraId, bool isNonSystemNdk,
+ bool sharedMode, /*out*/ sp<CLIENT>& device);
// Lock guarding camera service state
Mutex mServiceLock;
@@ -1122,12 +1104,12 @@
std::string cameraIdIntToStrLocked(int cameraIdInt, int32_t deviceId, int32_t devicePolicy);
/**
- * Remove a single client corresponding to the given camera id from the list of active clients.
+ * Remove all the clients corresponding to the given camera id from the list of active clients.
* If none exists, return an empty strongpointer.
*
* This method must be called with mServiceLock held.
*/
- sp<CameraService::BasicClient> removeClientLocked(const std::string& cameraId);
+ std::vector<sp<CameraService::BasicClient>> removeClientsLocked(const std::string& cameraId);
/**
* Handle a notification that the current device user has changed.
@@ -1335,7 +1317,7 @@
* This method acqiures mStatusListenerLock.
*/
void updateOpenCloseStatus(const std::string& cameraId, bool open,
- const std::string& packageName);
+ const std::string& packageName, bool sharedMode);
// flashlight control
sp<CameraFlashlight> mFlashlight;
@@ -1502,14 +1484,16 @@
static std::string getFormattedCurrentTime();
static binder::Status makeClient(const sp<CameraService>& cameraService,
- const sp<IInterface>& cameraCb, const std::string& packageName,
- bool systemNativeClient, const std::optional<std::string>& featureId,
- const std::string& cameraId, int api1CameraId, int facing, int sensorOrientation,
- int clientPid, uid_t clientUid, int servicePid,
- std::pair<int, IPCTransport> deviceVersionAndIPCTransport, apiLevel effectiveApiLevel,
- bool overrideForPerfClass, int rotationOverride, bool forceSlowJpegMode,
- const std::string& originalCameraId,
- /*out*/ sp<BasicClient>* client);
+ const sp<IInterface>& cameraCb,
+ const AttributionSourceState& clientAttribution,
+ int callingPid, bool systemNativeClient,
+ const std::string& cameraId, int api1CameraId, int facing,
+ int sensorOrientation, int servicePid,
+ std::pair<int, IPCTransport> deviceVersionAndIPCTransport,
+ apiLevel effectiveApiLevel, bool overrideForPerfClass,
+ int rotationOverride, bool forceSlowJpegMode,
+ const std::string& originalCameraId, bool sharedMode,
+ /*out*/ sp<BasicClient>* client);
static std::string toString(std::set<userid_t> intSet);
static int32_t mapToInterface(TorchModeStatus status);
@@ -1524,6 +1508,9 @@
void disconnectClient(const std::string& id, sp<BasicClient> clientToDisconnect);
+ void disconnectClients(const std::string& id,
+ std::vector<sp<BasicClient>> clientsToDisconnect);
+
// Regular online and offline devices must not be in conflict at camera service layer.
// Use separate keys for offline devices.
static const std::string kOfflineDevice;
diff --git a/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.cpp b/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.cpp
index e648a36..70647b4 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.cpp
+++ b/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.cpp
@@ -79,6 +79,15 @@
return binder::Status::ok();
}
+binder::Status AidlCameraDeviceCallbacks::onClientSharedAccessPriorityChanged(bool primaryClient) {
+ if (!flags::camera_multi_client()) {
+ return binder::Status::ok();
+ }
+ auto ret = mBase->onClientSharedAccessPriorityChanged(primaryClient);
+ LOG_STATUS_ERROR_IF_NOT_OK(ret, "onClientSharedAccessPriorityChanged")
+ return binder::Status::ok();
+ }
+
binder::Status AidlCameraDeviceCallbacks::onDeviceIdle() {
auto ret = mBase->onDeviceIdle();
LOG_STATUS_ERROR_IF_NOT_OK(ret, "onDeviceIdle")
diff --git a/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.h b/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.h
index 5cff5b3..07bf7d8 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.h
+++ b/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.h
@@ -75,6 +75,8 @@
binder::Status onRequestQueueEmpty() override;
+ binder::Status onClientSharedAccessPriorityChanged(bool primaryClient) override;
+
status_t linkToDeath(const sp<DeathRecipient>& recipient, void* cookie,
uint32_t flags) override;
status_t unlinkToDeath(const wp<DeathRecipient>& recipient, void* cookie, uint32_t flags,
diff --git a/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp
index 9e6a925..fc987b2 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp
+++ b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp
@@ -192,6 +192,16 @@
return fromUStatus(ret);
}
+ndk::ScopedAStatus AidlCameraDeviceUser::isPrimaryClient(bool* _aidl_return) {
+ bool isPrimary;
+ UStatus ret = mDeviceRemote->isPrimaryClient(&isPrimary);
+ if (!ret.isOk()) {
+ ALOGE("%s: Failed to get isPrimaryClient: %s", __FUNCTION__, ret.toString8().c_str());
+ }
+ *_aidl_return = isPrimary;
+ return fromUStatus(ret);
+}
+
ndk::ScopedAStatus AidlCameraDeviceUser::flush(int64_t* _aidl_return) {
UStatus ret = mDeviceRemote->flush(_aidl_return);
return fromUStatus(ret);
@@ -278,4 +288,4 @@
return true;
}
-} // namespace android::frameworks::cameraservice::device::implementation
\ No newline at end of file
+} // namespace android::frameworks::cameraservice::device::implementation
diff --git a/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.h b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.h
index 8014951..8fa33f7 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.h
+++ b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.h
@@ -97,6 +97,8 @@
return mCaptureResultMetadataQueue;
}
+ ndk::ScopedAStatus isPrimaryClient(bool* _aidl_return) override;
+
private:
bool initDevice();
diff --git a/services/camera/libcameraservice/aidl/AidlCameraService.cpp b/services/camera/libcameraservice/aidl/AidlCameraService.cpp
index 7f674bd..a2c431e 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraService.cpp
+++ b/services/camera/libcameraservice/aidl/AidlCameraService.cpp
@@ -30,6 +30,9 @@
#include <hidl/HidlTransportSupport.h>
#include <utils/AttributionAndPermissionUtils.h>
#include <utils/Utils.h>
+#include <com_android_internal_camera_flags.h>
+
+namespace flags = com::android::internal::camera::flags;
namespace android::frameworks::cameraservice::service::implementation {
@@ -131,10 +134,28 @@
return ScopedAStatus::ok();
}
+
ndk::ScopedAStatus AidlCameraService::connectDevice(
const std::shared_ptr<SICameraDeviceCallback>& in_callback,
const std::string& in_cameraId,
std::shared_ptr<SICameraDeviceUser>* _aidl_return) {
+ return connectDeviceImpl(in_callback, in_cameraId, /*sharedMode*/false, _aidl_return);
+}
+
+ndk::ScopedAStatus AidlCameraService::connectDeviceV2(
+ const std::shared_ptr<SICameraDeviceCallback>& in_callback,
+ const std::string& in_cameraId, bool sharedMode,
+ std::shared_ptr<SICameraDeviceUser>* _aidl_return) {
+ if (!flags::camera_multi_client()) {
+ return fromSStatus(SStatus::INVALID_OPERATION);
+ }
+ return connectDeviceImpl(in_callback, in_cameraId, sharedMode, _aidl_return);
+}
+
+ndk::ScopedAStatus AidlCameraService::connectDeviceImpl(
+ const std::shared_ptr<SICameraDeviceCallback>& in_callback,
+ const std::string& in_cameraId, bool sharedMode,
+ std::shared_ptr<SICameraDeviceUser>* _aidl_return) {
// Here, we first get NDK ICameraDeviceUser from mCameraService, then save
// that interface in the newly created AidlCameraDeviceUser impl class.
if (mCameraService == nullptr) {
@@ -164,6 +185,7 @@
ROTATION_OVERRIDE_NONE,
clientAttribution,
/* devicePolicy= */ 0,
+ sharedMode,
&unstableDevice);
if (!serviceRet.isOk()) {
ALOGE("%s: Unable to connect to camera device: %s", __FUNCTION__,
diff --git a/services/camera/libcameraservice/aidl/AidlCameraService.h b/services/camera/libcameraservice/aidl/AidlCameraService.h
index 4c67ac7..80e965d 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraService.h
+++ b/services/camera/libcameraservice/aidl/AidlCameraService.h
@@ -60,6 +60,9 @@
ndk::ScopedAStatus removeListener(
const std::shared_ptr<SICameraServiceListener>& in_listener) override;
+ ndk::ScopedAStatus connectDeviceV2(const std::shared_ptr<SICameraDeviceCallback>& in_callback,
+ const std::string& in_cameraId, bool sharedMode,
+ std::shared_ptr<SICameraDeviceUser>* _aidl_return);
private:
void addToListenerCacheLocked(std::shared_ptr<SICameraServiceListener> stableCsListener,
sp<hardware::ICameraServiceListener> csListener);
@@ -70,6 +73,9 @@
SStatus addListenerInternal(const std::shared_ptr<SICameraServiceListener>& listener,
std::vector<hardware::CameraStatus>* cameraStatusAndIds);
+ ndk::ScopedAStatus connectDeviceImpl(const std::shared_ptr<SICameraDeviceCallback>& in_callback,
+ const std::string& in_cameraId, bool sharedMode,
+ std::shared_ptr<SICameraDeviceUser>* _aidl_return);
::android::CameraService* mCameraService;
diff --git a/services/camera/libcameraservice/aidl/AidlCameraServiceListener.h b/services/camera/libcameraservice/aidl/AidlCameraServiceListener.h
index a7c32e3..c0dc688 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraServiceListener.h
+++ b/services/camera/libcameraservice/aidl/AidlCameraServiceListener.h
@@ -75,6 +75,11 @@
uint32_t flags) override;
status_t unlinkToDeath(const wp<DeathRecipient>& recipient, void* cookie, uint32_t flags,
wp<DeathRecipient>* outRecipient) override;
+ binder::Status onCameraOpenedInSharedMode(const std::string& /*cameraId*/,
+ const std::string& /*clientPackageId*/, int32_t /*deviceId*/, bool /*primaryClient*/) {
+ // empty implementation
+ return binder::Status::ok();
+ }
private:
std::shared_ptr<SICameraServiceListener> mBase;
@@ -86,4 +91,4 @@
} // android
-#endif // FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERASERVICELISTENER_H_
\ No newline at end of file
+#endif // FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERASERVICELISTENER_H_
diff --git a/services/camera/libcameraservice/aidl/ExtensionMetadataTags.h b/services/camera/libcameraservice/aidl/ExtensionMetadataTags.h
index 61b150d..550b3ab 100644
--- a/services/camera/libcameraservice/aidl/ExtensionMetadataTags.h
+++ b/services/camera/libcameraservice/aidl/ExtensionMetadataTags.h
@@ -30,4 +30,5 @@
std::vector<camera_metadata_tag> extension_metadata_keys{
ANDROID_EXTENSION_STRENGTH,
ANDROID_EXTENSION_CURRENT_TYPE,
+ ANDROID_EXTENSION_NIGHT_MODE_INDICATOR,
};
diff --git a/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h b/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
index b07d8d5..43ddac6 100644
--- a/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
+++ b/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
@@ -93,6 +93,19 @@
ANDROID_FLASH_TORCH_STRENGTH_MAX_LEVEL,
ANDROID_INFO_SESSION_CONFIGURATION_QUERY_VERSION,
} },
+ {36, {
+ ANDROID_COLOR_CORRECTION_AVAILABLE_MODES,
+ ANDROID_COLOR_CORRECTION_COLOR_TEMPERATURE_RANGE,
+ ANDROID_CONTROL_AE_AVAILABLE_PRIORITY_MODES,
+ ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_MIN_FRAME_DURATIONS,
+ ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
+ ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STALL_DURATIONS,
+ ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STALL_DURATIONS_MAXIMUM_RESOLUTION,
+ ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS,
+ ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
+ ANDROID_SHARED_SESSION_COLOR_SPACE,
+ ANDROID_SHARED_SESSION_OUTPUT_CONFIGURATIONS,
+ } },
};
/**
@@ -125,4 +138,11 @@
ANDROID_STATISTICS_LENS_INTRINSIC_SAMPLES,
ANDROID_STATISTICS_LENS_INTRINSIC_TIMESTAMPS,
} },
+ {36, {
+ ANDROID_COLOR_CORRECTION_COLOR_TEMPERATURE,
+ ANDROID_COLOR_CORRECTION_COLOR_TINT,
+ ANDROID_CONTROL_AE_PRIORITY_MODE,
+ ANDROID_CONTROL_ZOOM_METHOD,
+ ANDROID_EXTENSION_NIGHT_MODE_INDICATOR,
+ } },
};
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 861414f..9b916bf 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -28,8 +28,10 @@
#include <camera/CameraUtils.h>
#include <camera/StringUtils.h>
#include <com_android_internal_camera_flags.h>
+#include <com_android_window_flags.h>
#include <cutils/properties.h>
#include <gui/Surface.h>
+#include <gui/view/Surface.h>
#include "api1/Camera2Client.h"
@@ -52,35 +54,28 @@
using namespace camera2;
namespace flags = com::android::internal::camera::flags;
+namespace wm_flags = com::android::window::flags;
// Interface used by CameraService
-Camera2Client::Camera2Client(const sp<CameraService>& cameraService,
- const sp<hardware::ICameraClient>& cameraClient,
+Camera2Client::Camera2Client(
+ const sp<CameraService>& cameraService, const sp<hardware::ICameraClient>& cameraClient,
std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
- const std::string& clientPackageName,
- const std::optional<std::string>& clientFeatureId,
- const std::string& cameraDeviceId,
- int api1CameraId,
- int cameraFacing,
- int sensorOrientation,
- int clientPid,
- uid_t clientUid,
- int servicePid,
- bool overrideForPerfClass,
- int rotationOverride,
- bool forceSlowJpegMode):
- Camera2ClientBase(cameraService, cameraClient, cameraServiceProxyWrapper,
- attributionAndPermissionUtils, clientPackageName,
- false/*systemNativeClient - since no ndk for api1*/, clientFeatureId,
- cameraDeviceId, api1CameraId, cameraFacing, sensorOrientation, clientPid,
- clientUid, servicePid, overrideForPerfClass, rotationOverride,
- /*legacyClient*/ true),
- mParameters(api1CameraId, cameraFacing),
- mLatestRequestIds(kMaxRequestIds),
- mLatestFailedRequestIds(kMaxRequestIds)
-{
+ const AttributionSourceState& clientAttribution, int callingPid,
+ const std::string& cameraDeviceId, int api1CameraId, int cameraFacing,
+ int sensorOrientation, int servicePid, bool overrideForPerfClass, int rotationOverride,
+ bool forceSlowJpegMode, bool sharedMode)
+ : Camera2ClientBase(cameraService, cameraClient, cameraServiceProxyWrapper,
+ attributionAndPermissionUtils, clientAttribution, callingPid,
+ false /*systemNativeClient - since no ndk for api1*/, cameraDeviceId,
+ api1CameraId, cameraFacing, sensorOrientation, servicePid,
+ overrideForPerfClass, rotationOverride, sharedMode,
+ /*legacyClient*/ true),
+ mParameters(api1CameraId, cameraFacing),
+ mInitialized(false),
+ mLatestRequestIds(kMaxRequestIds),
+ mLatestFailedRequestIds(kMaxRequestIds) {
ATRACE_CALL();
mRotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_NONE;
@@ -142,8 +137,13 @@
// The 'mRotateAndCropMode' value only accounts for the necessary adjustment
// when the display rotates. The sensor orientation still needs to be calculated
// and applied similar to the Camera2 path.
+ using hardware::BnCameraService::ROTATION_OVERRIDE_ROTATION_ONLY;
+ bool enableTransformInverseDisplay = true;
+ if (wm_flags::enable_camera_compat_for_desktop_windowing()) {
+ enableTransformInverseDisplay = (mRotationOverride != ROTATION_OVERRIDE_ROTATION_ONLY);
+ }
CameraUtils::getRotationTransform(staticInfo, OutputConfiguration::MIRROR_MODE_AUTO,
- &mRotateAndCropPreviewTransform);
+ enableTransformInverseDisplay, &mRotateAndCropPreviewTransform);
mStreamingProcessor = new StreamingProcessor(this);
@@ -195,6 +195,7 @@
ALOGD("%s", l.mParameters.paramsFlattened.c_str());
}
+ mInitialized = true;
return OK;
}
@@ -218,7 +219,7 @@
result << fmt::sprintf("Client2[%d] (%p) PID: %d, dump:\n", mCameraId,
(getRemoteCallback() != NULL ?
(void *) (IInterface::asBinder(getRemoteCallback()).get()) : NULL),
- mClientPid);
+ mCallingPid);
result << " State: ";
#define CASE_APPEND_ENUM(x) case x: result << #x "\n"; break;
@@ -449,7 +450,7 @@
binder::Status res = binder::Status::ok();
// Allow both client and the cameraserver to disconnect at all times
int callingPid = getCallingPid();
- if (callingPid != mClientPid && callingPid != mServicePid) return res;
+ if (callingPid != mCallingPid && callingPid != mServicePid) return res;
if (mDevice == 0) return res;
@@ -526,14 +527,14 @@
ALOGV("%s: E", __FUNCTION__);
Mutex::Autolock icl(mBinderSerializationLock);
- if (mClientPid != 0 && getCallingPid() != mClientPid) {
+ if (mCallingPid != 0 && getCallingPid() != mCallingPid) {
ALOGE("%s: Camera %d: Connection attempt from pid %d; "
"current locked to pid %d", __FUNCTION__,
- mCameraId, getCallingPid(), mClientPid);
+ mCameraId, getCallingPid(), mCallingPid);
return BAD_VALUE;
}
- mClientPid = getCallingPid();
+ mCallingPid = getCallingPid();
mRemoteCallback = client;
mSharedCameraCallbacks = client;
@@ -546,16 +547,16 @@
ALOGV("%s: E", __FUNCTION__);
Mutex::Autolock icl(mBinderSerializationLock);
ALOGV("%s: Camera %d: Lock call from pid %d; current client pid %d",
- __FUNCTION__, mCameraId, getCallingPid(), mClientPid);
+ __FUNCTION__, mCameraId, getCallingPid(), mCallingPid);
- if (mClientPid == 0) {
- mClientPid = getCallingPid();
+ if (mCallingPid == 0) {
+ mCallingPid = getCallingPid();
return OK;
}
- if (mClientPid != getCallingPid()) {
+ if (mCallingPid != getCallingPid()) {
ALOGE("%s: Camera %d: Lock call from pid %d; currently locked to pid %d",
- __FUNCTION__, mCameraId, getCallingPid(), mClientPid);
+ __FUNCTION__, mCameraId, getCallingPid(), mCallingPid);
return EBUSY;
}
@@ -567,46 +568,76 @@
ALOGV("%s: E", __FUNCTION__);
Mutex::Autolock icl(mBinderSerializationLock);
ALOGV("%s: Camera %d: Unlock call from pid %d; current client pid %d",
- __FUNCTION__, mCameraId, getCallingPid(), mClientPid);
+ __FUNCTION__, mCameraId, getCallingPid(), mCallingPid);
- if (mClientPid == getCallingPid()) {
+ if (mCallingPid == getCallingPid()) {
SharedParameters::Lock l(mParameters);
if (l.mParameters.state == Parameters::RECORD ||
l.mParameters.state == Parameters::VIDEO_SNAPSHOT) {
ALOGD("Not allowed to unlock camera during recording.");
return INVALID_OPERATION;
}
- mClientPid = 0;
+ mCallingPid = 0;
mRemoteCallback.clear();
mSharedCameraCallbacks.clear();
return OK;
}
ALOGE("%s: Camera %d: Unlock call from pid %d; currently locked to pid %d",
- __FUNCTION__, mCameraId, getCallingPid(), mClientPid);
+ __FUNCTION__, mCameraId, getCallingPid(), mCallingPid);
return EBUSY;
}
-status_t Camera2Client::setPreviewTarget(
- const sp<IGraphicBufferProducer>& bufferProducer) {
+status_t Camera2Client::setPreviewTarget(const sp<SurfaceType>& target) {
ATRACE_CALL();
ALOGV("%s: E", __FUNCTION__);
Mutex::Autolock icl(mBinderSerializationLock);
status_t res;
- if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
+ if ((res = checkPid(__FUNCTION__)) != OK) return res;
- sp<IBinder> binder;
- sp<Surface> window;
- if (bufferProducer != 0) {
- binder = IInterface::asBinder(bufferProducer);
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ sp<Surface> surface;
+ view::Surface viewSurface;
+ if (target != nullptr) {
// Using controlledByApp flag to ensure that the buffer queue remains in
// async mode for the old camera API, where many applications depend
// on that behavior.
- window = new Surface(bufferProducer, /*controlledByApp*/ true);
+ surface = new Surface(target->getIGraphicBufferProducer(), true);
+ viewSurface = view::Surface::fromSurface(surface);
+ }
+ return setPreviewWindowL(viewSurface, surface);
+#else
+ sp<IBinder> binder;
+ sp<Surface> window;
+ if (target != 0) {
+ binder = IInterface::asBinder(target);
+ // Using controlledByApp flag to ensure that the buffer queue remains in
+ // async mode for the old camera API, where many applications depend
+ // on that behavior.
+ window = new Surface(target, /*controlledByApp*/ true);
}
return setPreviewWindowL(binder, window);
+#endif
}
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+status_t Camera2Client::setPreviewWindowL(const view::Surface& viewSurface,
+ const sp<Surface>& window) {
+ ATRACE_CALL();
+ status_t res;
+
+ uint64_t viewSurfaceID;
+ res = viewSurface.getUniqueId(&viewSurfaceID);
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Could not getUniqueId.", __FUNCTION__, mCameraId);
+ return res;
+ }
+
+ if (viewSurfaceID == mPreviewViewSurfaceID) {
+ ALOGV("%s: Camera %d: New window is same as old window", __FUNCTION__, mCameraId);
+ return NO_ERROR;
+ }
+#else
status_t Camera2Client::setPreviewWindowL(const sp<IBinder>& binder,
const sp<Surface>& window) {
ATRACE_CALL();
@@ -617,6 +648,7 @@
__FUNCTION__, mCameraId);
return NO_ERROR;
}
+#endif
Parameters::State state;
{
@@ -628,9 +660,8 @@
case Parameters::RECORD:
case Parameters::STILL_CAPTURE:
case Parameters::VIDEO_SNAPSHOT:
- ALOGE("%s: Camera %d: Cannot set preview display while in state %s",
- __FUNCTION__, mCameraId,
- Parameters::getStateName(state));
+ ALOGE("%s: Camera %d: Cannot set preview display while in state %s", __FUNCTION__,
+ mCameraId, Parameters::getStateName(state));
return INVALID_OPERATION;
case Parameters::STOPPED:
case Parameters::WAITING_FOR_PREVIEW_WINDOW:
@@ -640,19 +671,23 @@
// Already running preview - need to stop and create a new stream
res = stopStream();
if (res != OK) {
- ALOGE("%s: Unable to stop preview to swap windows: %s (%d)",
- __FUNCTION__, strerror(-res), res);
+ ALOGE("%s: Unable to stop preview to swap windows: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
return res;
}
state = Parameters::WAITING_FOR_PREVIEW_WINDOW;
break;
}
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ mPreviewViewSurfaceID = viewSurfaceID;
+#else
mPreviewSurface = binder;
+#endif
+
res = mStreamingProcessor->setPreviewWindow(window);
if (res != OK) {
- ALOGE("%s: Unable to set new preview window: %s (%d)",
- __FUNCTION__, strerror(-res), res);
+ ALOGE("%s: Unable to set new preview window: %s (%d)", __FUNCTION__, strerror(-res), res);
return res;
}
@@ -725,23 +760,26 @@
}
}
-status_t Camera2Client::setPreviewCallbackTarget(
- const sp<IGraphicBufferProducer>& callbackProducer) {
+status_t Camera2Client::setPreviewCallbackTarget(const sp<SurfaceType>& target) {
ATRACE_CALL();
ALOGV("%s: E", __FUNCTION__);
Mutex::Autolock icl(mBinderSerializationLock);
status_t res;
- if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
+ if ((res = checkPid(__FUNCTION__)) != OK) return res;
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ sp<Surface> window = target;
+#else
sp<Surface> window;
- if (callbackProducer != 0) {
- window = new Surface(callbackProducer);
+ if (target != 0) {
+ window = new Surface(target);
}
+#endif
res = mCallbackProcessor->setCallbackWindow(window);
if (res != OK) {
- ALOGE("%s: Camera %d: Unable to set preview callback surface: %s (%d)",
- __FUNCTION__, mCameraId, strerror(-res), res);
+ ALOGE("%s: Camera %d: Unable to set preview callback surface: %s (%d)", __FUNCTION__,
+ mCameraId, strerror(-res), res);
return res;
}
@@ -757,7 +795,7 @@
l.mParameters.previewCallbackSurface = false;
}
- switch(l.mParameters.state) {
+ switch (l.mParameters.state) {
case Parameters::PREVIEW:
res = startPreviewL(l.mParameters, true);
break;
@@ -769,15 +807,13 @@
break;
}
if (res != OK) {
- ALOGE("%s: Camera %d: Unable to refresh request in state %s",
- __FUNCTION__, mCameraId,
- Parameters::getStateName(l.mParameters.state));
+ ALOGE("%s: Camera %d: Unable to refresh request in state %s", __FUNCTION__, mCameraId,
+ Parameters::getStateName(l.mParameters.state));
}
return OK;
}
-
status_t Camera2Client::startPreview() {
ATRACE_CALL();
ALOGV("%s: E", __FUNCTION__);
@@ -1006,6 +1042,12 @@
void Camera2Client::stopPreviewL() {
ATRACE_CALL();
+
+ if (!mInitialized) {
+ // If we haven't initialized yet, there's no stream to stop (b/379558387)
+ return;
+ }
+
status_t res;
const nsecs_t kStopCaptureTimeout = 3000000000LL; // 3 seconds
Parameters::State state;
@@ -2266,29 +2308,47 @@
return res;
}
-status_t Camera2Client::setVideoTarget(const sp<IGraphicBufferProducer>& bufferProducer) {
+status_t Camera2Client::setVideoTarget(const sp<SurfaceType>& target) {
ATRACE_CALL();
ALOGV("%s: E", __FUNCTION__);
Mutex::Autolock icl(mBinderSerializationLock);
status_t res;
if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
- sp<IBinder> binder = IInterface::asBinder(bufferProducer);
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ uint64_t videoSurfaceID;
+ res = target->getUniqueId(&videoSurfaceID);
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Could not getUniqueId in setVideoTarget.", __FUNCTION__, mCameraId);
+ return res;
+ }
+ if (videoSurfaceID == mVideoSurfaceID) {
+ ALOGE("%s: Camera %d: New video window is same as old video window", __FUNCTION__,
+ mCameraId);
+ return NO_ERROR;
+ }
+#else
+ sp<IBinder> binder = IInterface::asBinder(target);
if (binder == mVideoSurface) {
ALOGV("%s: Camera %d: New video window is same as old video window",
__FUNCTION__, mCameraId);
return NO_ERROR;
}
+#endif
sp<Surface> window;
int format;
android_dataspace dataSpace;
- if (bufferProducer != nullptr) {
+ if (target != nullptr) {
// Using controlledByApp flag to ensure that the buffer queue remains in
// async mode for the old camera API, where many applications depend
// on that behavior.
- window = new Surface(bufferProducer, /*controlledByApp*/ true);
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ window = new Surface(target->getIGraphicBufferProducer(), /*controlledByApp*/ true);
+#else
+ window = new Surface(target, /*controlledByApp*/ true);
+#endif
ANativeWindow *anw = window.get();
@@ -2327,7 +2387,11 @@
return INVALID_OPERATION;
}
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ mVideoSurfaceID = videoSurfaceID;
+#else
mVideoSurface = binder;
+#endif
res = mStreamingProcessor->setRecordingWindow(window);
if (res != OK) {
ALOGE("%s: Unable to set new recording window: %s (%d)",
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index a0c9f2d..a90e8cc 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -17,12 +17,16 @@
#ifndef ANDROID_SERVERS_CAMERA_CAMERA2CLIENT_H
#define ANDROID_SERVERS_CAMERA_CAMERA2CLIENT_H
-#include "CameraService.h"
-#include "common/CameraDeviceBase.h"
-#include "common/Camera2ClientBase.h"
-#include "api1/client2/Parameters.h"
-#include "api1/client2/FrameProcessor.h"
+#include <atomic>
+
+#include <gui/Flags.h>
+#include <gui/view/Surface.h>
#include <media/RingBuffer.h>
+#include "CameraService.h"
+#include "api1/client2/FrameProcessor.h"
+#include "api1/client2/Parameters.h"
+#include "common/Camera2ClientBase.h"
+#include "common/CameraDeviceBase.h"
namespace android {
@@ -53,11 +57,9 @@
virtual status_t connect(const sp<hardware::ICameraClient>& client);
virtual status_t lock();
virtual status_t unlock();
- virtual status_t setPreviewTarget(
- const sp<IGraphicBufferProducer>& bufferProducer);
+ virtual status_t setPreviewTarget(const sp<SurfaceType>& target);
virtual void setPreviewCallbackFlag(int flag);
- virtual status_t setPreviewCallbackTarget(
- const sp<IGraphicBufferProducer>& callbackProducer);
+ virtual status_t setPreviewCallbackTarget(const sp<SurfaceType>& target);
virtual status_t startPreview();
virtual void stopPreview();
@@ -78,7 +80,7 @@
virtual status_t sendCommand(int32_t cmd, int32_t arg1, int32_t arg2);
virtual void notifyError(int32_t errorCode,
const CaptureResultExtras& resultExtras);
- virtual status_t setVideoTarget(const sp<IGraphicBufferProducer>& bufferProducer);
+ virtual status_t setVideoTarget(const sp<SurfaceType>& target);
virtual status_t setAudioRestriction(int mode);
virtual int32_t getGlobalAudioRestriction();
virtual status_t setRotateAndCropOverride(uint8_t rotateAndCrop, bool fromHal = false);
@@ -101,21 +103,13 @@
*/
Camera2Client(const sp<CameraService>& cameraService,
- const sp<hardware::ICameraClient>& cameraClient,
- std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
- std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
- const std::string& clientPackageName,
- const std::optional<std::string>& clientFeatureId,
- const std::string& cameraDeviceId,
- int api1CameraId,
- int cameraFacing,
- int sensorOrientation,
- int clientPid,
- uid_t clientUid,
- int servicePid,
- bool overrideForPerfClass,
- int rotationOverride,
- bool forceSlowJpegMode);
+ const sp<hardware::ICameraClient>& cameraClient,
+ std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
+ const AttributionSourceState& clientAttribution, int callingPid,
+ const std::string& cameraDeviceId, int api1CameraId, int cameraFacing,
+ int sensorOrientation, int servicePid, bool overrideForPerfClass,
+ int rotationOverride, bool forceSlowJpegMode, bool sharedMode);
virtual ~Camera2Client();
@@ -183,8 +177,12 @@
/** ICamera interface-related private members */
typedef camera2::Parameters Parameters;
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ status_t setPreviewWindowL(const view::Surface& viewSurface, const sp<Surface>& window);
+#else
status_t setPreviewWindowL(const sp<IBinder>& binder,
const sp<Surface>& window);
+#endif
status_t startPreviewL(Parameters ¶ms, bool restart);
void stopPreviewL();
status_t startRecordingL(Parameters ¶ms, bool restart);
@@ -221,8 +219,13 @@
/* Preview/Recording related members */
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ uint64_t mPreviewViewSurfaceID;
+ uint64_t mVideoSurfaceID;
+#else
sp<IBinder> mPreviewSurface;
sp<IBinder> mVideoSurface;
+#endif
sp<camera2::StreamingProcessor> mStreamingProcessor;
/** Preview callback related members */
@@ -235,6 +238,8 @@
sp<camera2::JpegProcessor> mJpegProcessor;
sp<camera2::ZslProcessor> mZslProcessor;
+ std::atomic<bool> mInitialized;
+
/** Utility members */
bool mLegacyMode;
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index f469aad..8c30d54 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -61,61 +61,36 @@
const sp<CameraService>& cameraService,
const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
- const std::string& clientPackageName,
- bool systemNativeClient,
- const std::optional<std::string>& clientFeatureId,
- const std::string& cameraId,
- [[maybe_unused]] int api1CameraId,
- int cameraFacing,
- int sensorOrientation,
- int clientPid,
- uid_t clientUid,
- int servicePid,
- int rotationOverride) :
- BasicClient(cameraService,
- IInterface::asBinder(remoteCallback),
- attributionAndPermissionUtils,
- clientPackageName,
- systemNativeClient,
- clientFeatureId,
- cameraId,
- cameraFacing,
- sensorOrientation,
- clientPid,
- clientUid,
- servicePid,
- rotationOverride),
- mRemoteCallback(remoteCallback) {
-}
+ const AttributionSourceState& clientAttribution, int callingPid, bool systemNativeClient,
+ const std::string& cameraId, [[maybe_unused]] int api1CameraId, int cameraFacing,
+ int sensorOrientation, int servicePid, int rotationOverride, bool sharedMode)
+ : BasicClient(cameraService, IInterface::asBinder(remoteCallback),
+ attributionAndPermissionUtils, clientAttribution, callingPid, systemNativeClient,
+ cameraId, cameraFacing, sensorOrientation, servicePid, rotationOverride,
+ sharedMode),
+ mRemoteCallback(remoteCallback) {}
// Interface used by CameraService
-CameraDeviceClient::CameraDeviceClient(const sp<CameraService>& cameraService,
+CameraDeviceClient::CameraDeviceClient(
+ const sp<CameraService>& cameraService,
const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
- const std::string& clientPackageName,
- bool systemNativeClient,
- const std::optional<std::string>& clientFeatureId,
- const std::string& cameraId,
- int cameraFacing,
- int sensorOrientation,
- int clientPid,
- uid_t clientUid,
- int servicePid,
- bool overrideForPerfClass,
- int rotationOverride,
- const std::string& originalCameraId) :
- Camera2ClientBase(cameraService, remoteCallback, cameraServiceProxyWrapper,
- attributionAndPermissionUtils, clientPackageName,
- systemNativeClient, clientFeatureId, cameraId, /*API1 camera ID*/ -1, cameraFacing,
- sensorOrientation, clientPid, clientUid, servicePid, overrideForPerfClass,
- rotationOverride),
- mInputStream(),
- mStreamingRequestId(REQUEST_ID_NONE),
- mRequestIdCounter(0),
- mOverrideForPerfClass(overrideForPerfClass),
- mOriginalCameraId(originalCameraId) {
+ const AttributionSourceState& clientAttribution, int callingPid, bool systemNativeClient,
+ const std::string& cameraId, int cameraFacing, int sensorOrientation, int servicePid,
+ bool overrideForPerfClass, int rotationOverride, const std::string& originalCameraId,
+ bool sharedMode)
+ : Camera2ClientBase(cameraService, remoteCallback, cameraServiceProxyWrapper,
+ attributionAndPermissionUtils, clientAttribution, callingPid,
+ systemNativeClient, cameraId, /*API1 camera ID*/ -1, cameraFacing,
+ sensorOrientation, servicePid, overrideForPerfClass, rotationOverride,
+ sharedMode),
+ mInputStream(),
+ mStreamingRequestId(REQUEST_ID_NONE),
+ mRequestIdCounter(0),
+ mOverrideForPerfClass(overrideForPerfClass),
+ mOriginalCameraId(originalCameraId) {
ATRACE_CALL();
ALOGI("CameraDeviceClient %s: Opened", cameraId.c_str());
}
@@ -638,9 +613,20 @@
}
binder::Status CameraDeviceClient::beginConfigure() {
- // TODO: Implement this.
ATRACE_CALL();
- ALOGV("%s: Not implemented yet.", __FUNCTION__);
+ if (!flags::camera_multi_client()) {
+ return binder::Status::ok();
+ }
+ if (!mDevice.get()) {
+ return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
+ }
+ status_t res = mDevice->beginConfigure();
+ if (res != OK) {
+ std::string msg = fmt::sprintf("Camera %s: Error beginning stream configuration: %s (%d)",
+ mCameraIdStr.c_str(), strerror(-res), res);
+ ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+ return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
+ }
return binder::Status::ok();
}
@@ -673,6 +659,12 @@
return res;
}
+ if (flags::camera_multi_client() && mSharedMode) {
+ // For shared camera session, streams are already configured
+ // earlier, hence no need to do it here.
+ return res;
+ }
+
status_t err = mDevice->configureStreams(sessionParams, operatingMode);
if (err == BAD_VALUE) {
std::string msg = fmt::sprintf("Camera %s: Unsupported set of inputs/outputs provided",
@@ -796,6 +788,7 @@
bool isInput = false;
std::vector<sp<IBinder>> surfaces;
+ std::vector<size_t> removedSurfaceIds;
ssize_t dIndex = NAME_NOT_FOUND;
ssize_t compositeIndex = NAME_NOT_FOUND;
@@ -806,6 +799,9 @@
for (size_t i = 0; i < mStreamMap.size(); ++i) {
if (streamId == mStreamMap.valueAt(i).streamId()) {
surfaces.push_back(mStreamMap.keyAt(i));
+ if (flags::camera_multi_client() && mSharedMode) {
+ removedSurfaceIds.push_back(mStreamMap.valueAt(i).surfaceId());
+ }
}
}
@@ -833,8 +829,14 @@
}
}
- // Also returns BAD_VALUE if stream ID was not valid
- status_t err = mDevice->deleteStream(streamId);
+
+ status_t err;
+ if (flags::camera_multi_client() && mSharedMode) {
+ err = mDevice->removeSharedSurfaces(streamId, removedSurfaceIds);
+ } else {
+ // Also returns BAD_VALUE if stream ID was not valid
+ err = mDevice->deleteStream(streamId);
+ }
if (err != OK) {
std::string msg = fmt::sprintf("Camera %s: Unexpected error %s (%d) when deleting stream "
@@ -908,7 +910,6 @@
int64_t dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
int64_t streamUseCase = outputConfiguration.getStreamUseCase();
int timestampBase = outputConfiguration.getTimestampBase();
- int mirrorMode = outputConfiguration.getMirrorMode();
int32_t colorSpace = outputConfiguration.getColorSpace();
bool useReadoutTimestamp = outputConfiguration.useReadoutTimestamp();
@@ -927,8 +928,9 @@
return res;
}
- std::vector<sp<Surface>> surfaces;
+ std::vector<SurfaceHolder> surfaces;
std::vector<sp<IBinder>> binders;
+ std::vector<OutputStreamInfo> streamInfos;
status_t err;
// Create stream for deferred surface case.
@@ -952,6 +954,7 @@
return STATUS_ERROR(CameraService::ERROR_ALREADY_EXISTS, msg.c_str());
}
+ int mirrorMode = outputConfiguration.getMirrorMode(bufferProducer);
sp<Surface> surface;
res = SessionConfigurationUtils::createSurfaceFromGbp(streamInfo,
isStreamInfoValid, surface, bufferProducer, mCameraIdStr,
@@ -966,52 +969,58 @@
}
binders.push_back(IInterface::asBinder(bufferProducer));
- surfaces.push_back(surface);
+ surfaces.push_back({surface, mirrorMode});
+ if (flags::camera_multi_client() && mSharedMode) {
+ streamInfos.push_back(streamInfo);
+ }
}
- // If mOverrideForPerfClass is true, do not fail createStream() for small
- // JPEG sizes because existing createSurfaceFromGbp() logic will find the
- // closest possible supported size.
-
int streamId = camera3::CAMERA3_STREAM_ID_INVALID;
std::vector<int> surfaceIds;
- bool isDepthCompositeStream =
- camera3::DepthCompositeStream::isDepthCompositeStream(surfaces[0]);
- bool isHeicCompositeStream = camera3::HeicCompositeStream::isHeicCompositeStream(surfaces[0]);
- bool isJpegRCompositeStream =
- camera3::JpegRCompositeStream::isJpegRCompositeStream(surfaces[0]) &&
- !mDevice->isCompositeJpegRDisabled();
- if (isDepthCompositeStream || isHeicCompositeStream || isJpegRCompositeStream) {
- sp<CompositeStream> compositeStream;
- if (isDepthCompositeStream) {
- compositeStream = new camera3::DepthCompositeStream(mDevice, getRemoteCallback());
- } else if (isHeicCompositeStream) {
- compositeStream = new camera3::HeicCompositeStream(mDevice, getRemoteCallback());
- } else {
- compositeStream = new camera3::JpegRCompositeStream(mDevice, getRemoteCallback());
+ if (flags::camera_multi_client() && mSharedMode) {
+ err = mDevice->getSharedStreamId(outputConfiguration, &streamId);
+ if (err == OK) {
+ err = mDevice->addSharedSurfaces(streamId, streamInfos, surfaces, &surfaceIds);
}
-
- err = compositeStream->createStream(surfaces, deferredConsumer, streamInfo.width,
+ } else {
+ bool isDepthCompositeStream =
+ camera3::DepthCompositeStream::isDepthCompositeStream(surfaces[0].mSurface);
+ bool isHeicCompositeStream = camera3::HeicCompositeStream::isHeicCompositeStream(
+ surfaces[0].mSurface);
+ bool isJpegRCompositeStream =
+ camera3::JpegRCompositeStream::isJpegRCompositeStream(surfaces[0].mSurface) &&
+ !mDevice->isCompositeJpegRDisabled();
+ if (isDepthCompositeStream || isHeicCompositeStream || isJpegRCompositeStream) {
+ sp<CompositeStream> compositeStream;
+ if (isDepthCompositeStream) {
+ compositeStream = new camera3::DepthCompositeStream(mDevice, getRemoteCallback());
+ } else if (isHeicCompositeStream) {
+ compositeStream = new camera3::HeicCompositeStream(mDevice, getRemoteCallback());
+ } else {
+ compositeStream = new camera3::JpegRCompositeStream(mDevice, getRemoteCallback());
+ }
+ err = compositeStream->createStream(surfaces, deferredConsumer, streamInfo.width,
streamInfo.height, streamInfo.format,
static_cast<camera_stream_rotation_t>(outputConfiguration.getRotation()),
&streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution,
streamInfo.colorSpace, streamInfo.dynamicRangeProfile, streamInfo.streamUseCase,
useReadoutTimestamp);
- if (err == OK) {
- Mutex::Autolock l(mCompositeLock);
- mCompositeStreamMap.add(IInterface::asBinder(surfaces[0]->getIGraphicBufferProducer()),
- compositeStream);
+ if (err == OK) {
+ Mutex::Autolock l(mCompositeLock);
+ mCompositeStreamMap.add(
+ IInterface::asBinder(surfaces[0].mSurface->getIGraphicBufferProducer()),
+ compositeStream);
+ }
+ } else {
+ err = mDevice->createStream(surfaces, deferredConsumer, streamInfo.width,
+ streamInfo.height, streamInfo.format, streamInfo.dataSpace,
+ static_cast<camera_stream_rotation_t>(outputConfiguration.getRotation()),
+ &streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
+ outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution,
+ /*consumerUsage*/0, streamInfo.dynamicRangeProfile, streamInfo.streamUseCase,
+ streamInfo.timestampBase, streamInfo.colorSpace, useReadoutTimestamp);
}
- } else {
- err = mDevice->createStream(surfaces, deferredConsumer, streamInfo.width,
- streamInfo.height, streamInfo.format, streamInfo.dataSpace,
- static_cast<camera_stream_rotation_t>(outputConfiguration.getRotation()),
- &streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
- outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution,
- /*consumerUsage*/0, streamInfo.dynamicRangeProfile, streamInfo.streamUseCase,
- streamInfo.timestampBase, streamInfo.mirrorMode, streamInfo.colorSpace,
- useReadoutTimestamp);
}
if (err != OK) {
@@ -1036,9 +1045,6 @@
__FUNCTION__, mCameraIdStr.c_str(), streamId, streamInfo.width,
streamInfo.height, streamInfo.format);
- // Set transform flags to ensure preview to be rotated correctly.
- res = setStreamTransformLocked(streamId, streamInfo.mirrorMode);
-
// Fill in mHighResolutionCameraIdToStreamIdSet map
const std::string &cameraIdUsed =
physicalCameraId.size() != 0 ? physicalCameraId : mCameraIdStr;
@@ -1087,7 +1093,7 @@
consumerUsage |= GraphicBuffer::USAGE_HW_COMPOSER;
}
int streamId = camera3::CAMERA3_STREAM_ID_INVALID;
- std::vector<sp<Surface>> noSurface;
+ std::vector<SurfaceHolder> noSurface;
std::vector<int> surfaceIds;
const std::string &physicalCameraId = outputConfiguration.getPhysicalCameraId();
const std::string &cameraIdUsed =
@@ -1113,7 +1119,6 @@
outputConfiguration.isMultiResolution(), consumerUsage,
outputConfiguration.getDynamicRangeProfile(),
outputConfiguration.getStreamUseCase(),
- outputConfiguration.getMirrorMode(),
outputConfiguration.useReadoutTimestamp());
if (err != OK) {
@@ -1132,16 +1137,12 @@
outputConfiguration.getDynamicRangeProfile(),
outputConfiguration.getStreamUseCase(),
outputConfiguration.getTimestampBase(),
- outputConfiguration.getMirrorMode(),
colorSpace));
ALOGV("%s: Camera %s: Successfully created a new stream ID %d for a deferred surface"
" (%d x %d) stream with format 0x%x.",
__FUNCTION__, mCameraIdStr.c_str(), streamId, width, height, format);
- // Set transform flags to ensure preview to be rotated correctly.
- res = setStreamTransformLocked(streamId, outputConfiguration.getMirrorMode());
-
*newStreamId = streamId;
// Fill in mHighResolutionCameraIdToStreamIdSet
// Only needed for high resolution sensors
@@ -1153,33 +1154,6 @@
return res;
}
-binder::Status CameraDeviceClient::setStreamTransformLocked(int streamId, int mirrorMode) {
- int32_t transform = 0;
- status_t err;
- binder::Status res;
-
- if (!mDevice.get()) {
- return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
- }
-
- err = getRotationTransformLocked(mirrorMode, &transform);
- if (err != OK) {
- // Error logged by getRotationTransformLocked.
- return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION,
- "Unable to calculate rotation transform for new stream");
- }
-
- err = mDevice->setStreamTransform(streamId, transform);
- if (err != OK) {
- std::string msg = fmt::sprintf("Failed to set stream transform (stream id %d)",
- streamId);
- ALOGE("%s: %s", __FUNCTION__, msg.c_str());
- return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
- }
-
- return res;
-}
-
binder::Status CameraDeviceClient::createInputStream(
int width, int height, int format, bool isMultiResolution,
/*out*/
@@ -1312,7 +1286,7 @@
std::vector<size_t> removedSurfaceIds;
std::vector<sp<IBinder>> removedOutputs;
- std::vector<sp<Surface>> newOutputs;
+ std::vector<SurfaceHolder> newOutputs;
std::vector<OutputStreamInfo> streamInfos;
KeyedVector<sp<IBinder>, sp<IGraphicBufferProducer>> newOutputsMap;
for (auto &it : bufferProducers) {
@@ -1341,11 +1315,11 @@
int timestampBase = outputConfiguration.getTimestampBase();
int64_t dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
int32_t colorSpace = outputConfiguration.getColorSpace();
- int mirrorMode = outputConfiguration.getMirrorMode();
for (size_t i = 0; i < newOutputsMap.size(); i++) {
OutputStreamInfo outInfo;
sp<Surface> surface;
+ int mirrorMode = outputConfiguration.getMirrorMode(newOutputsMap.valueAt(i));
res = SessionConfigurationUtils::createSurfaceFromGbp(outInfo,
/*isStreamInfoValid*/ false, surface, newOutputsMap.valueAt(i), mCameraIdStr,
mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
@@ -1354,7 +1328,7 @@
return res;
streamInfos.push_back(outInfo);
- newOutputs.push_back(surface);
+ newOutputs.push_back({surface, mirrorMode});
}
//Trivial case no changes required
@@ -1711,14 +1685,13 @@
return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
}
- std::vector<sp<Surface>> consumerSurfaces;
+ std::vector<SurfaceHolder> consumerSurfaceHolders;
const std::vector<int32_t> &sensorPixelModesUsed =
outputConfiguration.getSensorPixelModesUsed();
int64_t dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
int32_t colorSpace = outputConfiguration.getColorSpace();
int64_t streamUseCase = outputConfiguration.getStreamUseCase();
int timestampBase = outputConfiguration.getTimestampBase();
- int mirrorMode = outputConfiguration.getMirrorMode();
for (auto& bufferProducer : bufferProducers) {
// Don't create multiple streams for the same target surface
ssize_t index = mStreamMap.indexOfKey(IInterface::asBinder(bufferProducer));
@@ -1729,6 +1702,7 @@
}
sp<Surface> surface;
+ int mirrorMode = outputConfiguration.getMirrorMode(bufferProducer);
res = SessionConfigurationUtils::createSurfaceFromGbp(mStreamInfoMap[streamId],
true /*isStreamInfoValid*/, surface, bufferProducer, mCameraIdStr,
mDevice->infoPhysical(physicalId), sensorPixelModesUsed, dynamicRangeProfile,
@@ -1737,12 +1711,12 @@
if (!res.isOk())
return res;
- consumerSurfaces.push_back(surface);
+ consumerSurfaceHolders.push_back({surface, mirrorMode});
}
// Gracefully handle case where finalizeOutputConfigurations is called
// without any new surface.
- if (consumerSurfaces.size() == 0) {
+ if (consumerSurfaceHolders.size() == 0) {
mStreamInfoMap[streamId].finalized = true;
return res;
}
@@ -1750,11 +1724,11 @@
// Finish the deferred stream configuration with the surface.
status_t err;
std::vector<int> consumerSurfaceIds;
- err = mDevice->setConsumerSurfaces(streamId, consumerSurfaces, &consumerSurfaceIds);
+ err = mDevice->setConsumerSurfaces(streamId, consumerSurfaceHolders, &consumerSurfaceIds);
if (err == OK) {
- for (size_t i = 0; i < consumerSurfaces.size(); i++) {
+ for (size_t i = 0; i < consumerSurfaceHolders.size(); i++) {
sp<IBinder> binder = IInterface::asBinder(
- consumerSurfaces[i]->getIGraphicBufferProducer());
+ consumerSurfaceHolders[i].mSurface->getIGraphicBufferProducer());
ALOGV("%s: mStreamMap add binder %p streamId %d, surfaceId %d", __FUNCTION__,
binder.get(), streamId, consumerSurfaceIds[i]);
mStreamMap.add(binder, StreamSurfaceId(streamId, consumerSurfaceIds[i]));
@@ -1805,6 +1779,20 @@
return binder::Status::ok();
}
+binder::Status CameraDeviceClient::isPrimaryClient(/*out*/bool* isPrimary) {
+ ATRACE_CALL();
+ binder::Status res = binder::Status::ok();
+ if (!flags::camera_multi_client()) {
+ return res;
+ }
+ if (!(res = checkPidStatus(__FUNCTION__)).isOk()) return res;
+ if (isPrimary != nullptr) {
+ status_t ret = BasicClient::isPrimaryClient(isPrimary);
+ return binder::Status::fromStatusT(ret);
+ }
+ return res;
+}
+
status_t CameraDeviceClient::setCameraServiceWatchdog(bool enabled) {
return mDevice->setCameraServiceWatchdog(enabled);
}
@@ -1934,10 +1922,10 @@
sp<CameraOfflineSessionClient> offlineClient;
if (offlineSession.get() != nullptr) {
- offlineClient = new CameraOfflineSessionClient(sCameraService,
- offlineSession, offlineCompositeStreamMap, cameraCb, mAttributionAndPermissionUtils,
- mClientPackageName, mClientFeatureId, mCameraIdStr, mCameraFacing, mOrientation,
- mClientPid, mClientUid, mServicePid);
+ offlineClient = new CameraOfflineSessionClient(
+ sCameraService, offlineSession, offlineCompositeStreamMap, cameraCb,
+ mAttributionAndPermissionUtils, mClientAttribution, mCallingPid, mCameraIdStr,
+ mCameraFacing, mOrientation, mServicePid, /*sharedMode*/false);
ret = sCameraService->addOfflineClient(mCameraIdStr, offlineClient);
}
@@ -1984,7 +1972,7 @@
mCameraIdStr.c_str(),
(getRemoteCallback() != NULL ?
IInterface::asBinder(getRemoteCallback()).get() : NULL) );
- dprintf(fd, " Current client UID %u\n", mClientUid);
+ dprintf(fd, " Current client UID %u\n", getClientUid());
dprintf(fd, " State:\n");
dprintf(fd, " Request ID counter: %d\n", mRequestIdCounter);
@@ -2140,46 +2128,59 @@
}
}
+void CameraDeviceClient::notifyClientSharedAccessPriorityChanged(bool primaryClient) {
+ // Thread safe. Don't bother locking.
+ if (!flags::camera_multi_client()) {
+ return;
+ }
+ sp<hardware::camera2::ICameraDeviceCallbacks> remoteCb = getRemoteCallback();
+ if (remoteCb != 0) {
+ remoteCb->onClientSharedAccessPriorityChanged(primaryClient);
+ }
+}
+
void CameraDeviceClient::detachDevice() {
if (mDevice == 0) return;
nsecs_t startTime = systemTime();
- ALOGV("Camera %s: Stopping processors", mCameraIdStr.c_str());
+ if (!flags::camera_multi_client() || sCameraService->isOnlyClient(this)){
+ ALOGV("Camera %s: Stopping processors", mCameraIdStr.c_str());
- if (mFrameProcessor.get() != nullptr) {
- mFrameProcessor->removeListener(
- camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MIN_ID,
- camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MAX_ID, /*listener*/this);
- mFrameProcessor->requestExit();
- ALOGV("Camera %s: Waiting for threads", mCameraIdStr.c_str());
- mFrameProcessor->join();
- ALOGV("Camera %s: Disconnecting device", mCameraIdStr.c_str());
- }
-
- // WORKAROUND: HAL refuses to disconnect while there's streams in flight
- {
- int64_t lastFrameNumber;
- status_t code;
- if ((code = mDevice->flush(&lastFrameNumber)) != OK) {
- ALOGE("%s: flush failed with code 0x%x", __FUNCTION__, code);
+ if (mFrameProcessor.get() != nullptr) {
+ mFrameProcessor->removeListener(
+ camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MIN_ID,
+ camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MAX_ID, /*listener*/this);
+ mFrameProcessor->requestExit();
+ ALOGV("Camera %s: Waiting for threads", mCameraIdStr.c_str());
+ mFrameProcessor->join();
+ ALOGV("Camera %s: Disconnecting device", mCameraIdStr.c_str());
}
- if ((code = mDevice->waitUntilDrained()) != OK) {
- ALOGE("%s: waitUntilDrained failed with code 0x%x", __FUNCTION__,
- code);
- }
- }
+ // WORKAROUND: HAL refuses to disconnect while there's streams in flight
+ {
+ int64_t lastFrameNumber;
+ status_t code;
+ if ((code = mDevice->flush(&lastFrameNumber)) != OK) {
+ ALOGE("%s: flush failed with code 0x%x", __FUNCTION__, code);
+ }
- {
- Mutex::Autolock l(mCompositeLock);
- for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
- auto ret = mCompositeStreamMap.valueAt(i)->deleteInternalStreams();
- if (ret != OK) {
- ALOGE("%s: Failed removing composite stream %s (%d)", __FUNCTION__,
- strerror(-ret), ret);
+ if ((code = mDevice->waitUntilDrained()) != OK) {
+ ALOGE("%s: waitUntilDrained failed with code 0x%x", __FUNCTION__,
+ code);
}
}
- mCompositeStreamMap.clear();
+
+ {
+ Mutex::Autolock l(mCompositeLock);
+ for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
+ auto ret = mCompositeStreamMap.valueAt(i)->deleteInternalStreams();
+ if (ret != OK) {
+ ALOGE("%s: Failed removing composite stream %s (%d)", __FUNCTION__,
+ strerror(-ret), ret);
+ }
+ }
+ mCompositeStreamMap.clear();
+ }
}
bool hasDeviceError = mDevice->hasDeviceError();
@@ -2271,14 +2272,6 @@
return true;
}
-status_t CameraDeviceClient::getRotationTransformLocked(int mirrorMode,
- int32_t* transform) {
- ALOGV("%s: begin", __FUNCTION__);
-
- const CameraMetadata& staticInfo = mDevice->info();
- return CameraUtils::getRotationTransform(staticInfo, mirrorMode, transform);
-}
-
const CameraMetadata &CameraDeviceClient::getStaticInfo(const std::string &cameraId) {
if (mDevice->getId() == cameraId) {
return mDevice->info();
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 42f2752..a8cf451 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -48,20 +48,14 @@
}
protected:
- CameraDeviceClientBase(const sp<CameraService>& cameraService,
+ CameraDeviceClientBase(
+ const sp<CameraService>& cameraService,
const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
- const std::string& clientPackageName,
- bool systemNativeClient,
- const std::optional<std::string>& clientFeatureId,
- const std::string& cameraId,
- int api1CameraId,
- int cameraFacing,
- int sensorOrientation,
- int clientPid,
- uid_t clientUid,
- int servicePid,
- int rotationOverride);
+ const AttributionSourceState& clientAttribution, int callingPid,
+ bool systemNativeClient, const std::string& cameraId, int api1CameraId,
+ int cameraFacing, int sensorOrientation, int servicePid, int rotationOverride,
+ bool sharedMode);
sp<hardware::camera2::ICameraDeviceCallbacks> mRemoteCallback;
};
@@ -175,26 +169,20 @@
/*out*/
sp<hardware::camera2::ICameraOfflineSession>* session) override;
+ virtual binder::Status isPrimaryClient(/*out*/bool* isPrimary) override;
+
/**
* Interface used by CameraService
*/
CameraDeviceClient(const sp<CameraService>& cameraService,
- const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
- std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
- std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
- const std::string& clientPackageName,
- bool clientPackageOverride,
- const std::optional<std::string>& clientFeatureId,
- const std::string& cameraId,
- int cameraFacing,
- int sensorOrientation,
- int clientPid,
- uid_t clientUid,
- int servicePid,
- bool overrideForPerfClass,
- int rotationOverride,
- const std::string& originalCameraId);
+ const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
+ std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
+ const AttributionSourceState& clientAttribution, int callingPid,
+ bool clientPackageOverride, const std::string& cameraId, int cameraFacing,
+ int sensorOrientation, int servicePid, bool overrideForPerfClass,
+ int rotationOverride, const std::string& originalCameraId, bool sharedMode);
virtual ~CameraDeviceClient();
virtual status_t initialize(sp<CameraProviderManager> manager,
@@ -237,6 +225,7 @@
virtual void notifyPrepared(int streamId);
virtual void notifyRequestQueueEmpty();
virtual void notifyRepeatingRequestError(long lastFrameNumber);
+ virtual void notifyClientSharedAccessPriorityChanged(bool primaryClient);
void setImageDumpMask(int mask) { if (mDevice != nullptr) mDevice->setImageDumpMask(mask); }
/**
@@ -247,9 +236,6 @@
virtual void onResultAvailable(const CaptureResult& result);
virtual void detachDevice();
- // Calculate the ANativeWindow transform from android.sensor.orientation
- status_t getRotationTransformLocked(int mirrorMode, /*out*/int32_t* transform);
-
bool supportsUltraHighResolutionCapture(const std::string &cameraId);
bool isSensorPixelModeConsistent(const std::list<int> &streamIdList,
@@ -306,10 +292,6 @@
bool isShared,
int* newStreamId = NULL);
- // Set the stream transform flags to automatically rotate the camera stream for preview use
- // cases.
- binder::Status setStreamTransformLocked(int streamId, int mirrorMode);
-
// Utility method to insert the surface into SurfaceMap
binder::Status insertGbpLocked(const sp<IGraphicBufferProducer>& gbp,
/*out*/SurfaceMap* surfaceMap, /*out*/Vector<int32_t>* streamIds,
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
index 9a1fdd6..71fd3ba 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
@@ -34,8 +34,8 @@
return OK;
}
- // Verify ops permissions
- auto res = startCameraOps();
+ // Verify ops permissions and/or open camera
+ auto res = notifyCameraOpening();
if (res != OK) {
return res;
}
@@ -163,7 +163,7 @@
}
// Allow both client and the media server to disconnect at all times
int callingPid = getCallingPid();
- if (callingPid != mClientPid &&
+ if (callingPid != mCallingPid &&
callingPid != mServicePid) {
return res;
}
@@ -171,7 +171,7 @@
mDisconnected = true;
sCameraService->removeByClient(this);
- sCameraService->logDisconnectedOffline(mCameraIdStr, mClientPid, mClientPackageName);
+ sCameraService->logDisconnectedOffline(mCameraIdStr, mCallingPid, getPackageName());
sp<IBinder> remote = getRemote();
if (remote != nullptr) {
@@ -184,12 +184,12 @@
mFrameProcessor->requestExit();
mFrameProcessor->join();
- finishCameraOps();
+ notifyCameraClosing();
ALOGI("%s: Disconnected client for offline camera %s for PID %d", __FUNCTION__,
- mCameraIdStr.c_str(), mClientPid);
+ mCameraIdStr.c_str(), mCallingPid);
// client shouldn't be able to call into us anymore
- mClientPid = 0;
+ mCallingPid = 0;
if (mOfflineSession.get() != nullptr) {
auto ret = mOfflineSession->disconnect();
@@ -227,11 +227,11 @@
}
}
-status_t CameraOfflineSessionClient::startCameraOps() {
+status_t CameraOfflineSessionClient::notifyCameraOpening() {
ATRACE_CALL();
{
- ALOGV("%s: Start camera ops, package name = %s, client UID = %d",
- __FUNCTION__, mClientPackageName.c_str(), mClientUid);
+ ALOGV("%s: Notify camera opening, package name = %s, client UID = %d", __FUNCTION__,
+ getPackageName().c_str(), getClientUid());
}
if (mAppOpsManager != nullptr) {
@@ -239,47 +239,48 @@
mOpsCallback = new OpsCallback(this);
int32_t res;
// TODO : possibly change this to OP_OFFLINE_CAMERA_SESSION
- mAppOpsManager->startWatchingMode(AppOpsManager::OP_CAMERA,
- toString16(mClientPackageName), mOpsCallback);
+ mAppOpsManager->startWatchingMode(AppOpsManager::OP_CAMERA, toString16(getPackageName()),
+ mOpsCallback);
// TODO : possibly change this to OP_OFFLINE_CAMERA_SESSION
- res = mAppOpsManager->startOpNoThrow(AppOpsManager::OP_CAMERA,
- mClientUid, toString16(mClientPackageName), /*startIfModeDefault*/ false);
+ res = mAppOpsManager->startOpNoThrow(AppOpsManager::OP_CAMERA, getClientUid(),
+ toString16(getPackageName()),
+ /*startIfModeDefault*/ false);
if (res == AppOpsManager::MODE_ERRORED) {
- ALOGI("Offline Camera %s: Access for \"%s\" has been revoked",
- mCameraIdStr.c_str(), mClientPackageName.c_str());
+ ALOGI("Offline Camera %s: Access for \"%s\" has been revoked", mCameraIdStr.c_str(),
+ getPackageName().c_str());
return PERMISSION_DENIED;
}
// If the calling Uid is trusted (a native service), the AppOpsManager could
// return MODE_IGNORED. Do not treat such case as error.
if (!mUidIsTrusted && res == AppOpsManager::MODE_IGNORED) {
- ALOGI("Offline Camera %s: Access for \"%s\" has been restricted",
- mCameraIdStr.c_str(), mClientPackageName.c_str());
+ ALOGI("Offline Camera %s: Access for \"%s\" has been restricted", mCameraIdStr.c_str(),
+ getPackageName().c_str());
// Return the same error as for device policy manager rejection
return -EACCES;
}
}
- mOpsActive = true;
+ mCameraOpen = true;
// Transition device state to OPEN
- sCameraService->mUidPolicy->registerMonitorUid(mClientUid, /*openCamera*/true);
+ sCameraService->mUidPolicy->registerMonitorUid(getClientUid(), /*openCamera*/ true);
return OK;
}
-status_t CameraOfflineSessionClient::finishCameraOps() {
+status_t CameraOfflineSessionClient::notifyCameraClosing() {
ATRACE_CALL();
- // Check if startCameraOps succeeded, and if so, finish the camera op
- if (mOpsActive) {
+ // Check if notifyCameraOpening succeeded, and if so, finish the camera op if necessary
+ if (mCameraOpen) {
// Notify app ops that the camera is available again
if (mAppOpsManager != nullptr) {
- // TODO : possibly change this to OP_OFFLINE_CAMERA_SESSION
- mAppOpsManager->finishOp(AppOpsManager::OP_CAMERA, mClientUid,
- toString16(mClientPackageName));
- mOpsActive = false;
+ // TODO : possibly change this to OP_OFFLINE_CAMERA_SESSION
+ mAppOpsManager->finishOp(AppOpsManager::OP_CAMERA, getClientUid(),
+ toString16(getPackageName()));
+ mCameraOpen = false;
}
}
// Always stop watching, even if no camera op is active
@@ -288,7 +289,7 @@
}
mOpsCallback.clear();
- sCameraService->mUidPolicy->unregisterMonitorUid(mClientUid, /*closeCamera*/true);
+ sCameraService->mUidPolicy->unregisterMonitorUid(getClientUid(), /*closeCamera*/ true);
return OK;
}
@@ -307,6 +308,9 @@
}
}
+void CameraOfflineSessionClient::notifyClientSharedAccessPriorityChanged(bool /*primaryClient*/) {
+}
+
void CameraOfflineSessionClient::notifyShutter(const CaptureResultExtras& resultExtras,
nsecs_t timestamp) {
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
index 77de874..78a3055 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
@@ -44,25 +44,22 @@
{
public:
CameraOfflineSessionClient(
- const sp<CameraService>& cameraService,
- sp<CameraOfflineSessionBase> session,
+ const sp<CameraService>& cameraService, sp<CameraOfflineSessionBase> session,
const KeyedVector<sp<IBinder>, sp<CompositeStream>>& offlineCompositeStreamMap,
const sp<ICameraDeviceCallbacks>& remoteCallback,
std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
- const std::string& clientPackageName,
- const std::optional<std::string>& clientFeatureId,
- const std::string& cameraIdStr, int cameraFacing, int sensorOrientation,
- int clientPid, uid_t clientUid, int servicePid) :
- CameraService::BasicClient(
- cameraService,
- IInterface::asBinder(remoteCallback),
- attributionAndPermissionUtils,
- // (v)ndk doesn't have offline session support
- clientPackageName, /*overridePackageName*/false, clientFeatureId,
- cameraIdStr, cameraFacing, sensorOrientation, clientPid, clientUid, servicePid,
- hardware::ICameraService::ROTATION_OVERRIDE_NONE),
- mRemoteCallback(remoteCallback), mOfflineSession(session),
- mCompositeStreamMap(offlineCompositeStreamMap) {}
+ const AttributionSourceState& clientAttribution, int callingPid,
+ const std::string& cameraIdStr, int cameraFacing, int sensorOrientation, int servicePid,
+ bool sharedMode)
+ : CameraService::BasicClient(cameraService, IInterface::asBinder(remoteCallback),
+ attributionAndPermissionUtils,
+ // (v)ndk doesn't have offline session support
+ clientAttribution, callingPid, /*overridePackageName*/ false,
+ cameraIdStr, cameraFacing, sensorOrientation, servicePid,
+ hardware::ICameraService::ROTATION_OVERRIDE_NONE, sharedMode),
+ mRemoteCallback(remoteCallback),
+ mOfflineSession(session),
+ mCompositeStreamMap(offlineCompositeStreamMap) {}
virtual ~CameraOfflineSessionClient() {}
@@ -102,8 +99,8 @@
status_t setZoomOverride(int32_t zoomOverride) override;
// permissions management
- status_t startCameraOps() override;
- status_t finishCameraOps() override;
+ status_t notifyCameraOpening() override;
+ status_t notifyCameraClosing() override;
// FilteredResultListener API
void onResultAvailable(const CaptureResult& result) override;
@@ -123,6 +120,7 @@
void notifyRepeatingRequestError(long lastFrameNumber) override;
status_t injectCamera(const std::string& injectedCamId,
sp<CameraProviderManager> manager) override;
+ void notifyClientSharedAccessPriorityChanged(bool primaryClient) override;
status_t stopInjection() override;
status_t injectSessionParams(
const hardware::camera2::impl::CameraMetadataNative& sessionParams) override;
diff --git a/services/camera/libcameraservice/api2/CompositeStream.cpp b/services/camera/libcameraservice/api2/CompositeStream.cpp
index 8f53458..6d7fabd 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/CompositeStream.cpp
@@ -44,7 +44,7 @@
}
}
-status_t CompositeStream::createStream(const std::vector<sp<Surface>>& consumers,
+status_t CompositeStream::createStream(const std::vector<SurfaceHolder>& consumers,
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
camera_stream_rotation_t rotation, int * id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
diff --git a/services/camera/libcameraservice/api2/CompositeStream.h b/services/camera/libcameraservice/api2/CompositeStream.h
index fa569ce..2b158c9 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.h
+++ b/services/camera/libcameraservice/api2/CompositeStream.h
@@ -41,7 +41,7 @@
CompositeStream(sp<CameraDeviceBase> device, wp<hardware::camera2::ICameraDeviceCallbacks> cb);
virtual ~CompositeStream() {}
- status_t createStream(const std::vector<sp<Surface>>& consumers,
+ status_t createStream(const std::vector<SurfaceHolder>& consumers,
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
@@ -55,7 +55,7 @@
void switchToOffline();
// Create and register all internal camera streams.
- virtual status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
+ virtual status_t createInternalStreams(const std::vector<SurfaceHolder>& consumers,
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
index 244a1e5..14618c4 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
@@ -588,7 +588,7 @@
}
-status_t DepthCompositeStream::createInternalStreams(const std::vector<sp<Surface>>& consumers,
+status_t DepthCompositeStream::createInternalStreams(const std::vector<SurfaceHolder>& consumers,
bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
@@ -643,7 +643,7 @@
if (ret == OK) {
mBlobStreamId = *id;
mBlobSurfaceId = (*surfaceIds)[0];
- mOutputSurface = consumers[0];
+ mOutputSurface = consumers[0].mSurface;
} else {
return ret;
}
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.h b/services/camera/libcameraservice/api2/DepthCompositeStream.h
index 75deef7..9c0311e 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.h
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.h
@@ -49,7 +49,7 @@
static bool isDepthCompositeStreamInfo(const OutputStreamInfo& streamInfo);
// CompositeStream overrides
- status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
+ status_t createInternalStreams(const std::vector<SurfaceHolder>& consumers,
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index 3af673b..768eaf8 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -27,54 +27,79 @@
#include <aidl/android/hardware/camera/device/CameraBlobId.h>
#include <camera/StringUtils.h>
#include <com_android_graphics_libgui_flags.h>
+#include <com_android_internal_camera_flags.h>
#include <gui/Surface.h>
#include <libyuv.h>
#include <utils/Log.h>
#include <utils/Trace.h>
+#include <ultrahdr/jpegr.h>
+#include <ultrahdr/ultrahdrcommon.h>
-#include <mediadrm/ICrypto.h>
#include <media/MediaCodecBuffer.h>
+#include <media/stagefright/MediaCodecConstants.h>
+#include <media/stagefright/MetaData.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/MediaDefs.h>
-#include <media/stagefright/MediaCodecConstants.h>
+#include <mediadrm/ICrypto.h>
+#include <memory>
+#include "HeicCompositeStream.h"
+#include "HeicEncoderInfoManager.h"
#include "common/CameraDeviceBase.h"
+#include "system/camera_metadata.h"
#include "utils/ExifUtils.h"
#include "utils/SessionConfigurationUtils.h"
#include "utils/Utils.h"
-#include "HeicEncoderInfoManager.h"
-#include "HeicCompositeStream.h"
using aidl::android::hardware::camera::device::CameraBlob;
using aidl::android::hardware::camera::device::CameraBlobId;
+namespace flags = com::android::internal::camera::flags;
+
namespace android {
namespace camera3 {
HeicCompositeStream::HeicCompositeStream(sp<CameraDeviceBase> device,
- wp<hardware::camera2::ICameraDeviceCallbacks> cb) :
- CompositeStream(device, cb),
- mUseHeic(false),
- mNumOutputTiles(1),
- mOutputWidth(0),
- mOutputHeight(0),
- mMaxHeicBufferSize(0),
- mGridWidth(HeicEncoderInfoManager::kGridWidth),
- mGridHeight(HeicEncoderInfoManager::kGridHeight),
- mGridRows(1),
- mGridCols(1),
- mUseGrid(false),
- mAppSegmentStreamId(-1),
- mAppSegmentSurfaceId(-1),
- mMainImageStreamId(-1),
- mMainImageSurfaceId(-1),
- mYuvBufferAcquired(false),
- mStreamSurfaceListener(new StreamSurfaceListener()),
- mDequeuedOutputBufferCnt(0),
- mCodecOutputCounter(0),
- mQuality(-1),
- mGridTimestampUs(0),
- mStatusId(StatusTracker::NO_STATUS_ID) {
+ wp<hardware::camera2::ICameraDeviceCallbacks> cb)
+ : CompositeStream(device, cb),
+ mUseHeic(false),
+ mNumOutputTiles(1),
+ mNumGainmapOutputTiles(1),
+ mOutputWidth(0),
+ mOutputHeight(0),
+ mGainmapOutputWidth(0),
+ mGainmapOutputHeight(0),
+ mMaxHeicBufferSize(0),
+ mGridWidth(HeicEncoderInfoManager::kGridWidth),
+ mGridHeight(HeicEncoderInfoManager::kGridHeight),
+ mGainmapGridWidth(HeicEncoderInfoManager::kGridWidth),
+ mGainmapGridHeight(HeicEncoderInfoManager::kGridHeight),
+ mGridRows(1),
+ mGridCols(1),
+ mGainmapGridRows(1),
+ mGainmapGridCols(1),
+ mUseGrid(false),
+ mGainmapUseGrid(false),
+ mAppSegmentStreamId(-1),
+ mAppSegmentSurfaceId(-1),
+ mMainImageStreamId(-1),
+ mMainImageSurfaceId(-1),
+ mYuvBufferAcquired(false),
+ mStreamSurfaceListener(new StreamSurfaceListener()),
+ mDequeuedOutputBufferCnt(0),
+ mCodecOutputCounter(0),
+ mCodecGainmapOutputCounter(0),
+ mQuality(-1),
+ mGridTimestampUs(0),
+ mStatusId(StatusTracker::NO_STATUS_ID) {
+ mStaticInfo = device->info();
+ camera_metadata_entry halHeicSupport = mStaticInfo.find(ANDROID_HEIC_INFO_SUPPORTED);
+ if (halHeicSupport.count == 1 &&
+ halHeicSupport.data.u8[0] == ANDROID_HEIC_INFO_SUPPORTED_TRUE) {
+ // The camera device supports the HEIC stream combination,
+ // use the standard stream combintion.
+ mAppSegmentSupported = true;
+ }
}
HeicCompositeStream::~HeicCompositeStream() {
@@ -84,6 +109,7 @@
mInputAppSegmentBuffers.clear();
mCodecOutputBuffers.clear();
+ mGainmapCodecOutputBuffers.clear();
mAppSegmentStreamId = -1;
mAppSegmentSurfaceId = -1;
@@ -97,7 +123,8 @@
}
bool HeicCompositeStream::isHeicCompositeStreamInfo(const OutputStreamInfo& streamInfo) {
- return ((streamInfo.dataSpace == static_cast<android_dataspace_t>(HAL_DATASPACE_HEIF)) &&
+ return ((streamInfo.dataSpace == static_cast<android_dataspace_t>(HAL_DATASPACE_HEIF) ||
+ (streamInfo.dataSpace == static_cast<android_dataspace_t>(kUltraHDRDataSpace))) &&
(streamInfo.format == HAL_PIXEL_FORMAT_BLOB));
}
@@ -120,23 +147,38 @@
return false;
}
- return ((format == HAL_PIXEL_FORMAT_BLOB) && (dataspace == HAL_DATASPACE_HEIF));
+ return ((format == HAL_PIXEL_FORMAT_BLOB) && ((dataspace == HAL_DATASPACE_HEIF) ||
+ (dataspace == static_cast<int>(kUltraHDRDataSpace))));
}
-status_t HeicCompositeStream::createInternalStreams(const std::vector<sp<Surface>>& consumers,
+status_t HeicCompositeStream::createInternalStreams(const std::vector<SurfaceHolder>& consumers,
bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds,
int /*streamSetId*/, bool /*isShared*/, int32_t colorSpace,
int64_t /*dynamicProfile*/, int64_t /*streamUseCase*/, bool useReadoutTimestamp) {
+
sp<CameraDeviceBase> device = mDevice.promote();
if (!device.get()) {
ALOGE("%s: Invalid camera device!", __FUNCTION__);
return NO_INIT;
}
- status_t res = initializeCodec(width, height, device);
+ ANativeWindow* anw = consumers[0].mSurface.get();
+ int dataspace;
+ status_t res;
+ if ((res = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE, &dataspace)) != OK) {
+ ALOGE("%s: Failed to query Surface dataspace: %s (%d)", __FUNCTION__, strerror(-res),
+ res);
+ return res;
+ }
+ if ((dataspace == static_cast<int>(kUltraHDRDataSpace)) && flags::camera_heif_gainmap()) {
+ mHDRGainmapEnabled = true;
+ mInternalDataSpace = static_cast<android_dataspace_t>(HAL_DATASPACE_BT2020_HLG);
+ }
+
+ res = initializeCodec(width, height, device);
if (res != OK) {
ALOGE("%s: Failed to initialize HEIC/HEVC codec: %s (%d)",
__FUNCTION__, strerror(-res), res);
@@ -144,42 +186,48 @@
}
#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
- mAppSegmentConsumer = new CpuConsumer(kMaxAcquiredAppSegment);
- mAppSegmentConsumer->setFrameAvailableListener(this);
- mAppSegmentConsumer->setName(String8("Camera3-HeicComposite-AppSegmentStream"));
- mAppSegmentSurface = mAppSegmentConsumer->getSurface();
- sp<IGraphicBufferProducer> producer = mAppSegmentSurface->getIGraphicBufferProducer();
+ if (mAppSegmentSupported) {
+ mAppSegmentConsumer = new CpuConsumer(kMaxAcquiredAppSegment);
+ mAppSegmentConsumer->setFrameAvailableListener(this);
+ mAppSegmentConsumer->setName(String8("Camera3-HeicComposite-AppSegmentStream"));
+ mAppSegmentSurface = mAppSegmentConsumer->getSurface();
+ }
+ sp<IGraphicBufferProducer> producer = mAppSegmentSurface.get() != nullptr ?
+ mAppSegmentSurface->getIGraphicBufferProducer() : nullptr;
#else
sp<IGraphicBufferProducer> producer;
sp<IGraphicBufferConsumer> consumer;
- BufferQueue::createBufferQueue(&producer, &consumer);
- mAppSegmentConsumer = new CpuConsumer(consumer, kMaxAcquiredAppSegment);
- mAppSegmentConsumer->setFrameAvailableListener(this);
- mAppSegmentConsumer->setName(String8("Camera3-HeicComposite-AppSegmentStream"));
- mAppSegmentSurface = new Surface(producer);
+ if (mAppSegmentSupported) {
+ BufferQueue::createBufferQueue(&producer, &consumer);
+ mAppSegmentConsumer = new CpuConsumer(consumer, kMaxAcquiredAppSegment);
+ mAppSegmentConsumer->setFrameAvailableListener(this);
+ mAppSegmentConsumer->setName(String8("Camera3-HeicComposite-AppSegmentStream"));
+ mAppSegmentSurface = new Surface(producer);
+ }
#endif // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
- mStaticInfo = device->info();
-
- res = device->createStream(mAppSegmentSurface, mAppSegmentMaxSize, 1, format,
- kAppSegmentDataSpace, rotation, &mAppSegmentStreamId, physicalCameraId,
- sensorPixelModesUsed, surfaceIds, camera3::CAMERA3_STREAM_SET_ID_INVALID,
- /*isShared*/false, /*isMultiResolution*/false,
- /*consumerUsage*/0, ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
- ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
- OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- OutputConfiguration::MIRROR_MODE_AUTO,
- colorSpace,
- useReadoutTimestamp);
- if (res == OK) {
- mAppSegmentSurfaceId = (*surfaceIds)[0];
- } else {
- ALOGE("%s: Failed to create JPEG App segment stream: %s (%d)", __FUNCTION__,
- strerror(-res), res);
- return res;
+ if (mAppSegmentSupported) {
+ std::vector<int> sourceSurfaceId;
+ res = device->createStream(mAppSegmentSurface, mAppSegmentMaxSize, 1, format,
+ kAppSegmentDataSpace, rotation, &mAppSegmentStreamId, physicalCameraId,
+ sensorPixelModesUsed, &sourceSurfaceId, camera3::CAMERA3_STREAM_SET_ID_INVALID,
+ /*isShared*/false, /*isMultiResolution*/false,
+ /*consumerUsage*/0, ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+ ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+ OutputConfiguration::MIRROR_MODE_AUTO,
+ colorSpace,
+ useReadoutTimestamp);
+ if (res == OK) {
+ mAppSegmentSurfaceId = sourceSurfaceId[0];
+ } else {
+ ALOGE("%s: Failed to create JPEG App segment stream: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
}
- if (!mUseGrid) {
+ if (!mUseGrid && !mHDRGainmapEnabled) {
res = mCodec->createInputSurface(&producer);
if (res != OK) {
ALOGE("%s: Failed to create input surface for Heic codec: %s (%d)",
@@ -206,21 +254,32 @@
return res;
}
- std::vector<int> sourceSurfaceId;
- //Use YUV_888 format if framework tiling is needed.
- int srcStreamFmt = mUseGrid ? HAL_PIXEL_FORMAT_YCbCr_420_888 :
- HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
- res = device->createStream(mMainImageSurface, width, height, srcStreamFmt, kHeifDataSpace,
- rotation, id, physicalCameraId, sensorPixelModesUsed, &sourceSurfaceId,
+ if (mHDRGainmapEnabled) {
+ res = mGainmapCodec->start();
+ if (res != OK) {
+ ALOGE("%s: Failed to start gainmap codec: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+ }
+
+ //Use YUV_420 format if framework tiling is needed.
+ int srcStreamFmt = mHDRGainmapEnabled ?
+ static_cast<android_pixel_format_t>(HAL_PIXEL_FORMAT_YCBCR_P010) : mUseGrid ?
+ HAL_PIXEL_FORMAT_YCbCr_420_888 : HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+ res = device->createStream(mMainImageSurface, width, height, srcStreamFmt, mInternalDataSpace,
+ rotation, id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
camera3::CAMERA3_STREAM_SET_ID_INVALID, /*isShared*/false, /*isMultiResolution*/false,
- /*consumerUsage*/0, ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+ /*consumerUsage*/0, mHDRGainmapEnabled ?
+ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10 :
+ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
OutputConfiguration::MIRROR_MODE_AUTO,
colorSpace,
useReadoutTimestamp);
if (res == OK) {
- mMainImageSurfaceId = sourceSurfaceId[0];
+ mMainImageSurfaceId = (*surfaceIds)[0];
mMainImageStreamId = *id;
} else {
ALOGE("%s: Failed to create main image stream: %s (%d)", __FUNCTION__,
@@ -228,7 +287,7 @@
return res;
}
- mOutputSurface = consumers[0];
+ mOutputSurface = consumers[0].mSurface;
res = registerCompositeStreamListener(mMainImageStreamId);
if (res != OK) {
ALOGE("%s: Failed to register HAL main image stream: %s (%d)", __FUNCTION__,
@@ -236,11 +295,13 @@
return res;
}
- res = registerCompositeStreamListener(mAppSegmentStreamId);
- if (res != OK) {
- ALOGE("%s: Failed to register HAL app segment stream: %s (%d)", __FUNCTION__,
- strerror(-res), res);
- return res;
+ if (mAppSegmentSupported) {
+ res = registerCompositeStreamListener(mAppSegmentStreamId);
+ if (res != OK) {
+ ALOGE("%s: Failed to register HAL app segment stream: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
}
initCopyRowFunction(width);
@@ -299,6 +360,9 @@
mCodecOutputBufferFrameNumbers.push(bufferInfo.mFrameNumber);
ALOGV("%s: [%" PRId64 "]: Adding main image frame number (%zu frame numbers in total)",
__FUNCTION__, bufferInfo.mFrameNumber, mMainImageFrameNumbers.size());
+ if (mHDRGainmapEnabled) {
+ mCodecGainmapOutputBufferFrameNumbers.push(bufferInfo.mFrameNumber);
+ }
} else if (bufferInfo.mStreamId == mAppSegmentStreamId) {
mAppSegmentFrameNumbers.push(bufferInfo.mFrameNumber);
ALOGV("%s: [%" PRId64 "]: Adding app segment frame number (%zu frame numbers in total)",
@@ -346,13 +410,13 @@
mInputAppSegmentBuffers.push_back(item.mTimestamp);
mInputReadyCondition.signal();
}
- } else if (item.mDataSpace == kHeifDataSpace) {
- ALOGV("%s: YUV_888 buffer with ts: %" PRIu64 " ms. arrived!",
+ } else if (item.mDataSpace == mInternalDataSpace) {
+ ALOGV("%s: YUV_420 buffer with ts: %" PRIu64 " ms. arrived!",
__func__, ns2ms(item.mTimestamp));
Mutex::Autolock l(mMutex);
- if (!mUseGrid) {
- ALOGE("%s: YUV_888 internal stream is only supported for HEVC tiling",
+ if (!mUseGrid && !mHDRGainmapEnabled) {
+ ALOGE("%s: YUV_420 internal stream is only supported for HEVC tiling",
__FUNCTION__);
return;
}
@@ -367,6 +431,7 @@
status_t HeicCompositeStream::getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/) {
+ bool gainmapEnabled = false;
if (compositeOutput == nullptr) {
return BAD_VALUE;
}
@@ -381,30 +446,44 @@
return OK;
}
- compositeOutput->insert(compositeOutput->end(), 2, streamInfo);
+ if (streamInfo.dataSpace == static_cast<android_dataspace_t>(kUltraHDRDataSpace)) {
+ gainmapEnabled = true;
+ }
- // JPEG APPS segments Blob stream info
- (*compositeOutput)[0].width = calcAppSegmentMaxSize(ch);
- (*compositeOutput)[0].height = 1;
- (*compositeOutput)[0].format = HAL_PIXEL_FORMAT_BLOB;
- (*compositeOutput)[0].dataSpace = kAppSegmentDataSpace;
- (*compositeOutput)[0].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
+ compositeOutput->clear();
+ compositeOutput->push_back({});
// YUV/IMPLEMENTATION_DEFINED stream info
- (*compositeOutput)[1].width = streamInfo.width;
- (*compositeOutput)[1].height = streamInfo.height;
- (*compositeOutput)[1].format = useGrid ? HAL_PIXEL_FORMAT_YCbCr_420_888 :
- HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
- (*compositeOutput)[1].dataSpace = kHeifDataSpace;
- (*compositeOutput)[1].consumerUsage = useHeic ? GRALLOC_USAGE_HW_IMAGE_ENCODER :
+ (*compositeOutput)[0].width = streamInfo.width;
+ (*compositeOutput)[0].height = streamInfo.height;
+ (*compositeOutput)[0].format = gainmapEnabled ?
+ static_cast<android_pixel_format_t>(HAL_PIXEL_FORMAT_YCBCR_P010) : useGrid ?
+ HAL_PIXEL_FORMAT_YCbCr_420_888 : HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+ (*compositeOutput)[0].dataSpace = gainmapEnabled ?
+ static_cast<android_dataspace_t>(HAL_DATASPACE_BT2020_HLG) : kHeifDataSpace;
+ (*compositeOutput)[0].consumerUsage = useHeic ? GRALLOC_USAGE_HW_IMAGE_ENCODER :
useGrid ? GRALLOC_USAGE_SW_READ_OFTEN : GRALLOC_USAGE_HW_VIDEO_ENCODER;
+
+ camera_metadata_ro_entry halHeicSupport = ch.find(ANDROID_HEIC_INFO_SUPPORTED);
+ if (halHeicSupport.count == 1 &&
+ halHeicSupport.data.u8[0] == ANDROID_HEIC_INFO_SUPPORTED_TRUE) {
+
+ compositeOutput->push_back({});
+ // JPEG APPS segments Blob stream info
+ (*compositeOutput)[1].width = calcAppSegmentMaxSize(ch);
+ (*compositeOutput)[1].height = 1;
+ (*compositeOutput)[1].format = HAL_PIXEL_FORMAT_BLOB;
+ (*compositeOutput)[1].dataSpace = kAppSegmentDataSpace;
+ (*compositeOutput)[1].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
+ }
+
return NO_ERROR;
}
bool HeicCompositeStream::isSizeSupportedByHeifEncoder(int32_t width, int32_t height,
- bool* useHeic, bool* useGrid, int64_t* stall, AString* hevcName) {
- static HeicEncoderInfoManager& heicManager = HeicEncoderInfoManager::getInstance();
+ bool* useHeic, bool* useGrid, int64_t* stall, AString* hevcName, bool allowSWCodec) {
+ static HeicEncoderInfoManager& heicManager = HeicEncoderInfoManager::getInstance(allowSWCodec);
return heicManager.isSizeSupported(width, height, useHeic, useGrid, stall, hevcName);
}
@@ -421,7 +500,7 @@
}
void HeicCompositeStream::onHeicOutputFrameAvailable(
- const CodecOutputBufferInfo& outputBufferInfo) {
+ const CodecOutputBufferInfo& outputBufferInfo, bool isGainmap) {
Mutex::Autolock l(mMutex);
ALOGV("%s: index %d, offset %d, size %d, time %" PRId64 ", flags 0x%x",
@@ -431,31 +510,34 @@
if (!mErrorState) {
if ((outputBufferInfo.size > 0) &&
((outputBufferInfo.flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) == 0)) {
- mCodecOutputBuffers.push_back(outputBufferInfo);
+ isGainmap ? mGainmapCodecOutputBuffers.push_back(outputBufferInfo) :
+ mCodecOutputBuffers.push_back(outputBufferInfo);
mInputReadyCondition.signal();
} else {
ALOGV("%s: Releasing output buffer: size %d flags: 0x%x ", __FUNCTION__,
outputBufferInfo.size, outputBufferInfo.flags);
- mCodec->releaseOutputBuffer(outputBufferInfo.index);
+ isGainmap ? mGainmapCodec->releaseOutputBuffer(outputBufferInfo.index) :
+ mCodec->releaseOutputBuffer(outputBufferInfo.index);
}
} else {
- mCodec->releaseOutputBuffer(outputBufferInfo.index);
+ isGainmap ? mGainmapCodec->releaseOutputBuffer(outputBufferInfo.index) :
+ mCodec->releaseOutputBuffer(outputBufferInfo.index);
}
}
-void HeicCompositeStream::onHeicInputFrameAvailable(int32_t index) {
+void HeicCompositeStream::onHeicInputFrameAvailable(int32_t index, bool isGainmap) {
Mutex::Autolock l(mMutex);
- if (!mUseGrid) {
+ if (!mUseGrid && !mHDRGainmapEnabled) {
ALOGE("%s: Codec YUV input mode must only be used for Hevc tiling mode", __FUNCTION__);
return;
}
- mCodecInputBuffers.push_back(index);
+ isGainmap ? mGainmapCodecInputBuffers.push_back(index) : mCodecInputBuffers.push_back(index);
mInputReadyCondition.signal();
}
-void HeicCompositeStream::onHeicFormatChanged(sp<AMessage>& newFormat) {
+void HeicCompositeStream::onHeicGainmapFormatChanged(sp<AMessage>& newFormat) {
if (newFormat == nullptr) {
ALOGE("%s: newFormat must not be null!", __FUNCTION__);
return;
@@ -470,6 +552,66 @@
// For HEVC codec, below keys need to be filled out or overwritten so that the
// muxer can handle them as HEIC output image.
newFormat->setString(KEY_MIME, mimeHeic);
+ newFormat->setInt32(KEY_WIDTH, mGainmapOutputWidth);
+ newFormat->setInt32(KEY_HEIGHT, mGainmapOutputHeight);
+ }
+
+ if (mGainmapUseGrid) {
+ int32_t gridRows, gridCols, tileWidth, tileHeight;
+ if (newFormat->findInt32(KEY_GRID_ROWS, &gridRows) &&
+ newFormat->findInt32(KEY_GRID_COLUMNS, &gridCols) &&
+ newFormat->findInt32(KEY_TILE_WIDTH, &tileWidth) &&
+ newFormat->findInt32(KEY_TILE_HEIGHT, &tileHeight)) {
+ mGainmapGridWidth = tileWidth;
+ mGainmapGridHeight = tileHeight;
+ mGainmapGridRows = gridRows;
+ mGainmapGridCols = gridCols;
+ } else {
+ newFormat->setInt32(KEY_TILE_WIDTH, mGainmapGridWidth);
+ newFormat->setInt32(KEY_TILE_HEIGHT, mGainmapGridHeight);
+ newFormat->setInt32(KEY_GRID_ROWS, mGainmapGridRows);
+ newFormat->setInt32(KEY_GRID_COLUMNS, mGainmapGridCols);
+ }
+ int32_t left, top, right, bottom;
+ if (newFormat->findRect("crop", &left, &top, &right, &bottom)) {
+ newFormat->setRect("crop", 0, 0, mGainmapOutputWidth - 1, mGainmapOutputHeight - 1);
+ }
+ }
+ newFormat->setInt32(KEY_IS_DEFAULT, 1 /*isPrimary*/);
+
+ int32_t gridRows, gridCols;
+ if (newFormat->findInt32(KEY_GRID_ROWS, &gridRows) &&
+ newFormat->findInt32(KEY_GRID_COLUMNS, &gridCols)) {
+ mNumGainmapOutputTiles = gridRows * gridCols;
+ } else {
+ mNumGainmapOutputTiles = 1;
+ }
+
+ mGainmapFormat = newFormat;
+
+ ALOGV("%s: mNumOutputTiles is %zu", __FUNCTION__, mNumOutputTiles);
+ mInputReadyCondition.signal();
+}
+
+
+void HeicCompositeStream::onHeicFormatChanged(sp<AMessage>& newFormat, bool isGainmap) {
+ if (newFormat == nullptr) {
+ ALOGE("%s: newFormat must not be null!", __FUNCTION__);
+ return;
+ }
+
+ if (isGainmap) {
+ return onHeicGainmapFormatChanged(newFormat);
+ }
+ Mutex::Autolock l(mMutex);
+
+ AString mime;
+ AString mimeHeic(MIMETYPE_IMAGE_ANDROID_HEIC);
+ newFormat->findString(KEY_MIME, &mime);
+ if (mime != mimeHeic) {
+ // For HEVC codec, below keys need to be filled out or overwritten so that the
+ // muxer can handle them as HEIC output image.
+ newFormat->setString(KEY_MIME, mimeHeic);
newFormat->setInt32(KEY_WIDTH, mOutputWidth);
newFormat->setInt32(KEY_HEIGHT, mOutputHeight);
}
@@ -577,10 +719,12 @@
status_t HeicCompositeStream::insertGbp(SurfaceMap* /*out*/outSurfaceMap,
Vector<int32_t>* /*out*/outputStreamIds, int32_t* /*out*/currentStreamId) {
- if (outSurfaceMap->find(mAppSegmentStreamId) == outSurfaceMap->end()) {
- outputStreamIds->push_back(mAppSegmentStreamId);
+ if (mAppSegmentSupported) {
+ if (outSurfaceMap->find(mAppSegmentStreamId) == outSurfaceMap->end()) {
+ outputStreamIds->push_back(mAppSegmentStreamId);
+ }
+ (*outSurfaceMap)[mAppSegmentStreamId].push_back(mAppSegmentSurfaceId);
}
- (*outSurfaceMap)[mAppSegmentStreamId].push_back(mAppSegmentSurfaceId);
if (outSurfaceMap->find(mMainImageStreamId) == outSurfaceMap->end()) {
outputStreamIds->push_back(mMainImageStreamId);
@@ -600,7 +744,9 @@
return BAD_VALUE;
}
- compositeStreamIds->push_back(mAppSegmentStreamId);
+ if (mAppSegmentSupported) {
+ compositeStreamIds->push_back(mAppSegmentStreamId);
+ }
compositeStreamIds->push_back(mMainImageStreamId);
return OK;
@@ -762,6 +908,31 @@
mCodecOutputBuffers.erase(it);
}
+ while (!mGainmapCodecOutputBuffers.empty()) {
+ auto it = mGainmapCodecOutputBuffers.begin();
+ // Assume encoder input to output is FIFO, use a queue to look up
+ // frameNumber when handling codec outputs.
+ int64_t bufferFrameNumber = -1;
+ if (mCodecGainmapOutputBufferFrameNumbers.empty()) {
+ ALOGV("%s: Failed to find buffer frameNumber for gainmap codec output buffer!",
+ __FUNCTION__);
+ break;
+ } else {
+ // Direct mapping between camera frame number and codec timestamp (in us).
+ bufferFrameNumber = mCodecGainmapOutputBufferFrameNumbers.front();
+ mCodecGainmapOutputCounter++;
+ if (mCodecGainmapOutputCounter == mNumGainmapOutputTiles) {
+ mCodecGainmapOutputBufferFrameNumbers.pop();
+ mCodecGainmapOutputCounter = 0;
+ }
+
+ mPendingInputFrames[bufferFrameNumber].gainmapCodecOutputBuffers.push_back(*it);
+ ALOGV("%s: [%" PRId64 "]: Pushing gainmap codecOutputBuffers (frameNumber %" PRId64 ")",
+ __FUNCTION__, bufferFrameNumber, it->timeUs);
+ }
+ mGainmapCodecOutputBuffers.erase(it);
+ }
+
while (!mCaptureResults.empty()) {
auto it = mCaptureResults.begin();
// Negative frame number indicates that something went wrong during the capture result
@@ -772,6 +943,9 @@
if (mPendingInputFrames[frameNumber].timestamp == it->first) {
mPendingInputFrames[frameNumber].result =
std::make_unique<CameraMetadata>(std::get<1>(it->second));
+ if (!mAppSegmentSupported) {
+ mPendingInputFrames[frameNumber].exifError = true;
+ }
} else {
ALOGE("%s: Capture result frameNumber/timestamp mapping changed between "
"shutter and capture result! before: %" PRId64 ", after: %" PRId64,
@@ -825,6 +999,27 @@
break;
}
}
+
+ // Distribute codec input buffers to be filled out from YUV output
+ for (auto it = mPendingInputFrames.begin();
+ it != mPendingInputFrames.end() && mGainmapCodecInputBuffers.size() > 0; it++) {
+ InputFrame& inputFrame(it->second);
+ if (inputFrame.gainmapCodecInputCounter < mGainmapGridRows * mGainmapGridCols) {
+ // Available input tiles that are required for the current input
+ // image.
+ size_t newInputTiles = std::min(mGainmapCodecInputBuffers.size(),
+ mGainmapGridRows * mGainmapGridCols - inputFrame.gainmapCodecInputCounter);
+ for (size_t i = 0; i < newInputTiles; i++) {
+ CodecInputBufferInfo inputInfo = { mGainmapCodecInputBuffers[0],
+ mGridTimestampUs++, inputFrame.gainmapCodecInputCounter };
+ inputFrame.gainmapCodecInputBuffers.push_back(inputInfo);
+
+ mGainmapCodecInputBuffers.erase(mGainmapCodecInputBuffers.begin());
+ inputFrame.gainmapCodecInputCounter++;
+ }
+ break;
+ }
+ }
}
bool HeicCompositeStream::getNextReadyInputLocked(int64_t *frameNumber /*out*/) {
@@ -845,7 +1040,8 @@
(it.second.appSegmentBuffer.data != nullptr || it.second.exifError) &&
!it.second.appSegmentWritten && it.second.result != nullptr &&
it.second.muxer != nullptr;
- bool codecOutputReady = !it.second.codecOutputBuffers.empty();
+ bool codecOutputReady = !it.second.codecOutputBuffers.empty() ||
+ !it.second.gainmapCodecOutputBuffers.empty();
bool codecInputReady = (it.second.yuvBuffer.data != nullptr) &&
(!it.second.codecInputBuffers.empty());
bool hasOutputBuffer = it.second.muxer != nullptr ||
@@ -856,6 +1052,9 @@
if (it.second.format == nullptr && mFormat != nullptr) {
it.second.format = mFormat->dup();
}
+ if (it.second.gainmapFormat == nullptr && mGainmapFormat != nullptr){
+ it.second.gainmapFormat = mGainmapFormat->dup();
+ }
newInputAvailable = true;
break;
}
@@ -886,11 +1085,15 @@
(inputFrame.appSegmentBuffer.data != nullptr || inputFrame.exifError) &&
!inputFrame.appSegmentWritten && inputFrame.result != nullptr &&
inputFrame.muxer != nullptr;
- bool codecOutputReady = inputFrame.codecOutputBuffers.size() > 0;
+ bool codecOutputReady = inputFrame.codecOutputBuffers.size() > 0 ||
+ inputFrame.gainmapCodecOutputBuffers.size() > 0;
bool codecInputReady = inputFrame.yuvBuffer.data != nullptr &&
!inputFrame.codecInputBuffers.empty();
+ bool gainmapCodecInputReady = inputFrame.gainmapImage.get() != nullptr &&
+ !inputFrame.gainmapCodecInputBuffers.empty();
bool hasOutputBuffer = inputFrame.muxer != nullptr ||
(mDequeuedOutputBufferCnt < kMaxOutputSurfaceProducerCount);
+ bool hasGainmapMetadata = !inputFrame.isoGainmapMetadata.empty();
ALOGV("%s: [%" PRId64 "]: appSegmentReady %d, codecOutputReady %d, codecInputReady %d,"
" dequeuedOutputBuffer %d, timestamp %" PRId64, __FUNCTION__, frameNumber,
@@ -899,6 +1102,15 @@
// Handle inputs for Hevc tiling
if (codecInputReady) {
+ if (mHDRGainmapEnabled && (inputFrame.baseBuffer.get() == nullptr)) {
+ auto res = generateBaseImageAndGainmap(inputFrame);
+ if (res != OK) {
+ ALOGE("%s: Error generating SDR base image and HDR gainmap: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+ }
+
res = processCodecInputFrame(inputFrame);
if (res != OK) {
ALOGE("%s: Failed to process codec input frame: %s (%d)", __FUNCTION__,
@@ -907,6 +1119,15 @@
}
}
+ if (gainmapCodecInputReady) {
+ res = processCodecGainmapInputFrame(inputFrame);
+ if (res != OK) {
+ ALOGE("%s: Failed to process gainmap codec input frame: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+ }
+
if (!(codecOutputReady && hasOutputBuffer) && !appSegmentReady) {
return OK;
}
@@ -923,6 +1144,31 @@
}
}
+ // Write the HDR gainmap metadata
+ if (hasGainmapMetadata) {
+ uint8_t kGainmapMetaMarker[] = {'t', 'm', 'a', 'p', '\0', '\0'};
+ sp<ABuffer> aBuffer =
+ new ABuffer(inputFrame.isoGainmapMetadata.size() + sizeof(kGainmapMetaMarker));
+ memcpy(aBuffer->data(), kGainmapMetaMarker, sizeof(kGainmapMetaMarker));
+ memcpy(aBuffer->data() + sizeof(kGainmapMetaMarker), inputFrame.isoGainmapMetadata.data(),
+ inputFrame.isoGainmapMetadata.size());
+
+ aBuffer->meta()->setInt32(KEY_COLOR_FORMAT, kCodecColorFormat);
+ aBuffer->meta()->setInt32("color-primaries", kCodecColorPrimaries);
+ aBuffer->meta()->setInt32("color-transfer", kCodecColorTransfer);
+ aBuffer->meta()->setInt32("color-matrix", kCodecColorMatrix);
+ aBuffer->meta()->setInt32("color-range", kCodecColorRange);
+ auto res = inputFrame.muxer->writeSampleData(aBuffer, inputFrame.trackIndex,
+ inputFrame.timestamp,
+ MediaCodec::BUFFER_FLAG_MUXER_DATA);
+ if (res != OK) {
+ ALOGE("%s: Failed to write HDR gainmap metadata to muxer: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+ inputFrame.isoGainmapMetadata.clear();
+ }
+
// Write JPEG APP segments data to the muxer.
if (appSegmentReady) {
res = processAppSegment(frameNumber, inputFrame);
@@ -943,7 +1189,17 @@
}
}
- if (inputFrame.pendingOutputTiles == 0) {
+ // Write media codec gainmap bitstream buffers to muxer.
+ while (!inputFrame.gainmapCodecOutputBuffers.empty()) {
+ res = processOneCodecGainmapOutputFrame(frameNumber, inputFrame);
+ if (res != OK) {
+ ALOGE("%s: Failed to process codec gainmap output frame: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+ }
+
+ if ((inputFrame.pendingOutputTiles == 0) && (inputFrame.gainmapPendingOutputTiles == 0)) {
if (inputFrame.appSegmentWritten) {
res = processCompletedInputFrame(frameNumber, inputFrame);
if (res != OK) {
@@ -1001,6 +1257,16 @@
inputFrame.trackIndex = trackId;
inputFrame.pendingOutputTiles = mNumOutputTiles;
+ if (inputFrame.gainmapFormat.get() != nullptr) {
+ trackId = inputFrame.muxer->addTrack(inputFrame.gainmapFormat);
+ if (trackId < 0) {
+ ALOGE("%s: Failed to addTrack to the muxer: %zd", __FUNCTION__, trackId);
+ return NO_INIT;
+ }
+ inputFrame.gainmapTrackIndex = trackId;
+ inputFrame.gainmapPendingOutputTiles = mNumGainmapOutputTiles;
+ }
+
res = inputFrame.muxer->start();
if (res != OK) {
ALOGE("%s: Failed to start MediaMuxer: %s (%d)",
@@ -1085,9 +1351,101 @@
inputFrame.appSegmentWritten = true;
// Release the buffer now so any pending input app segments can be processed
- mAppSegmentConsumer->unlockBuffer(inputFrame.appSegmentBuffer);
- inputFrame.appSegmentBuffer.data = nullptr;
- inputFrame.exifError = false;
+ if (!inputFrame.exifError) {
+ mAppSegmentConsumer->unlockBuffer(inputFrame.appSegmentBuffer);
+ inputFrame.appSegmentBuffer.data = nullptr;
+ inputFrame.exifError = false;
+ }
+
+ return OK;
+}
+
+status_t HeicCompositeStream::generateBaseImageAndGainmap(InputFrame &inputFrame) {
+ ultrahdr::JpegR jpegR(nullptr /*gles ctx*/, kGainmapScale);
+ inputFrame.baseBuffer = std::make_unique<ultrahdr::uhdr_raw_image_ext_t>(
+ kUltraHdrOutputFmt, kUltraHdrOutputGamut, kUltraHdrInputTransfer, kUltraHdrOutputRange,
+ inputFrame.yuvBuffer.width, inputFrame.yuvBuffer.height, 8/*stride*/);
+
+ uhdr_raw_image_t hdr_intent;
+ hdr_intent.fmt = kUltraHdrInputFmt;
+ hdr_intent.cg = kUltraHdrInputGamut;
+ hdr_intent.ct = kUltraHdrInputTransfer;
+ hdr_intent.range = kUltraHdrInputRange;
+ hdr_intent.w = inputFrame.yuvBuffer.width;
+ hdr_intent.h = inputFrame.yuvBuffer.height;
+ hdr_intent.planes[UHDR_PLANE_Y] = inputFrame.yuvBuffer.data;
+ hdr_intent.planes[UHDR_PLANE_UV] = inputFrame.yuvBuffer.dataCb;
+ hdr_intent.planes[UHDR_PLANE_V] = nullptr;
+ //libUltraHDR expects the stride in pixels
+ hdr_intent.stride[UHDR_PLANE_Y] = inputFrame.yuvBuffer.stride / 2;
+ hdr_intent.stride[UHDR_PLANE_UV] = inputFrame.yuvBuffer.chromaStride / 2;
+ hdr_intent.stride[UHDR_PLANE_V] = 0;
+ auto res = jpegR.toneMap(&hdr_intent, inputFrame.baseBuffer.get());
+ if (res.error_code == UHDR_CODEC_OK) {
+ ALOGV("%s: Base image tonemapped successfully", __FUNCTION__);
+ } else {
+ ALOGE("%s: Failed during HDR to SDR tonemap: %d", __FUNCTION__, res.error_code);
+ return BAD_VALUE;
+ }
+
+ inputFrame.baseImage = std::make_unique<CpuConsumer::LockedBuffer>();
+ *inputFrame.baseImage = inputFrame.yuvBuffer;
+ inputFrame.baseImage->data = reinterpret_cast<uint8_t*>(
+ inputFrame.baseBuffer->planes[UHDR_PLANE_Y]);
+ inputFrame.baseImage->dataCb = reinterpret_cast<uint8_t*>(
+ inputFrame.baseBuffer->planes[UHDR_PLANE_U]);
+ inputFrame.baseImage->dataCr = reinterpret_cast<uint8_t*>(
+ inputFrame.baseBuffer->planes[UHDR_PLANE_V]);
+ inputFrame.baseImage->chromaStep = 1;
+ inputFrame.baseImage->stride = inputFrame.baseBuffer->stride[UHDR_PLANE_Y];
+ inputFrame.baseImage->chromaStride = inputFrame.baseBuffer->stride[UHDR_PLANE_UV];
+ inputFrame.baseImage->dataSpace = HAL_DATASPACE_V0_JFIF;
+
+ ultrahdr::uhdr_gainmap_metadata_ext_t metadata;
+ res = jpegR.generateGainMap(inputFrame.baseBuffer.get(), &hdr_intent, &metadata,
+ inputFrame.gainmap, false /*sdr_is_601*/, true /*use_luminance*/);
+ if (res.error_code == UHDR_CODEC_OK) {
+ ALOGV("%s: HDR gainmap generated successfully!", __FUNCTION__);
+ } else {
+ ALOGE("%s: Failed HDR gainmap: %d", __FUNCTION__, res.error_code);
+ return BAD_VALUE;
+ }
+ // Ensure the gaimap U/V planes are all 0
+ inputFrame.gainmapChroma = std::make_unique<uint8_t[]>(
+ inputFrame.gainmap->w * inputFrame.gainmap->h / 2);
+ memset(inputFrame.gainmapChroma.get(), 0, inputFrame.gainmap->w * inputFrame.gainmap->h / 2);
+
+ ultrahdr::uhdr_gainmap_metadata_frac iso_secondary_metadata;
+ res = ultrahdr::uhdr_gainmap_metadata_frac::gainmapMetadataFloatToFraction(
+ &metadata, &iso_secondary_metadata);
+ if (res.error_code == UHDR_CODEC_OK) {
+ ALOGV("%s: HDR gainmap converted to fractions successfully!", __FUNCTION__);
+ } else {
+ ALOGE("%s: Failed to convert HDR gainmap to fractions: %d", __FUNCTION__,
+ res.error_code);
+ return BAD_VALUE;
+ }
+
+ res = ultrahdr::uhdr_gainmap_metadata_frac::encodeGainmapMetadata(&iso_secondary_metadata,
+ inputFrame.isoGainmapMetadata);
+ if (res.error_code == UHDR_CODEC_OK) {
+ ALOGV("%s: HDR gainmap encoded to ISO format successfully!", __FUNCTION__);
+ } else {
+ ALOGE("%s: Failed to encode HDR gainmap to ISO format: %d", __FUNCTION__,
+ res.error_code);
+ return BAD_VALUE;
+ }
+
+ inputFrame.gainmapImage = std::make_unique<CpuConsumer::LockedBuffer>();
+ *inputFrame.gainmapImage = inputFrame.yuvBuffer;
+ inputFrame.gainmapImage->data = reinterpret_cast<uint8_t*>(
+ inputFrame.gainmap->planes[UHDR_PLANE_Y]);
+ inputFrame.gainmapImage->dataCb = inputFrame.gainmapChroma.get();
+ inputFrame.gainmapImage->dataCr = inputFrame.gainmapChroma.get() + 1;
+ inputFrame.gainmapImage->chromaStep = 2;
+ inputFrame.gainmapImage->stride = inputFrame.gainmap->stride[UHDR_PLANE_Y];
+ inputFrame.gainmapImage->chromaStride = inputFrame.gainmap->w;
+ inputFrame.gainmapImage->dataSpace = HAL_DATASPACE_V0_JFIF;
return OK;
}
@@ -1115,7 +1473,9 @@
" timeUs %" PRId64, __FUNCTION__, tileX, tileY, top, left, width, height,
inputBuffer.timeUs);
- res = copyOneYuvTile(buffer, inputFrame.yuvBuffer, top, left, width, height);
+ auto yuvInput = (inputFrame.baseImage.get() != nullptr) ?
+ *inputFrame.baseImage.get() : inputFrame.yuvBuffer;
+ res = copyOneYuvTile(buffer, yuvInput, top, left, width, height);
if (res != OK) {
ALOGE("%s: Failed to copy YUV tile %s (%d)", __FUNCTION__,
strerror(-res), res);
@@ -1135,6 +1495,50 @@
return OK;
}
+status_t HeicCompositeStream::processCodecGainmapInputFrame(InputFrame &inputFrame) {
+ for (auto& inputBuffer : inputFrame.gainmapCodecInputBuffers) {
+ sp<MediaCodecBuffer> buffer;
+ auto res = mGainmapCodec->getInputBuffer(inputBuffer.index, &buffer);
+ if (res != OK) {
+ ALOGE("%s: Error getting codec input buffer: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+
+ // Copy one tile from source to destination.
+ size_t tileX = inputBuffer.tileIndex % mGainmapGridCols;
+ size_t tileY = inputBuffer.tileIndex / mGainmapGridCols;
+ size_t top = mGainmapGridHeight * tileY;
+ size_t left = mGainmapGridWidth * tileX;
+ size_t width = (tileX == static_cast<size_t>(mGainmapGridCols) - 1) ?
+ mGainmapOutputWidth - tileX * mGainmapGridWidth : mGainmapGridWidth;
+ size_t height = (tileY == static_cast<size_t>(mGainmapGridRows) - 1) ?
+ mGainmapOutputHeight - tileY * mGainmapGridHeight : mGainmapGridHeight;
+ ALOGV("%s: gainmap inputBuffer tileIndex [%zu, %zu], top %zu, left %zu, width %zu, "
+ "height %zu, timeUs %" PRId64, __FUNCTION__, tileX, tileY, top, left, width, height,
+ inputBuffer.timeUs);
+
+ auto yuvInput = *inputFrame.gainmapImage;
+ res = copyOneYuvTile(buffer, yuvInput, top, left, width, height);
+ if (res != OK) {
+ ALOGE("%s: Failed to copy YUV tile %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+
+ res = mGainmapCodec->queueInputBuffer(inputBuffer.index, 0, buffer->capacity(),
+ inputBuffer.timeUs, 0, nullptr /*errorDetailMsg*/);
+ if (res != OK) {
+ ALOGE("%s: Failed to queueInputBuffer to Codec: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+ }
+
+ inputFrame.gainmapCodecInputBuffers.clear();
+ return OK;
+}
+
status_t HeicCompositeStream::processOneCodecOutputFrame(int64_t frameNumber,
InputFrame &inputFrame) {
auto it = inputFrame.codecOutputBuffers.begin();
@@ -1152,6 +1556,13 @@
}
sp<ABuffer> aBuffer = new ABuffer(buffer->data(), buffer->size());
+ if (mHDRGainmapEnabled) {
+ aBuffer->meta()->setInt32(KEY_COLOR_FORMAT, kCodecColorFormat);
+ aBuffer->meta()->setInt32("color-primaries", kCodecColorPrimaries);
+ aBuffer->meta()->setInt32("color-transfer", kCodecColorTransfer);
+ aBuffer->meta()->setInt32("color-matrix", kCodecColorMatrix);
+ aBuffer->meta()->setInt32("color-range", kCodecColorRange);
+ }
res = inputFrame.muxer->writeSampleData(
aBuffer, inputFrame.trackIndex, inputFrame.timestamp, 0 /*flags*/);
if (res != OK) {
@@ -1174,6 +1585,54 @@
return OK;
}
+status_t HeicCompositeStream::processOneCodecGainmapOutputFrame(int64_t frameNumber,
+ InputFrame &inputFrame) {
+ auto it = inputFrame.gainmapCodecOutputBuffers.begin();
+ sp<MediaCodecBuffer> buffer;
+ status_t res = mGainmapCodec->getOutputBuffer(it->index, &buffer);
+ if (res != OK) {
+ ALOGE("%s: Error getting Heic gainmap codec output buffer at index %d: %s (%d)",
+ __FUNCTION__, it->index, strerror(-res), res);
+ return res;
+ }
+ if (buffer == nullptr) {
+ ALOGE("%s: Invalid Heic gainmap codec output buffer at index %d",
+ __FUNCTION__, it->index);
+ return BAD_VALUE;
+ }
+
+ uint8_t kGainmapMarker[] = {'g', 'm', 'a', 'p', '\0', '\0'};
+ sp<ABuffer> aBuffer = new ABuffer(buffer->size() + sizeof(kGainmapMarker));
+ memcpy(aBuffer->data(), kGainmapMarker, sizeof(kGainmapMarker));
+ memcpy(aBuffer->data() + sizeof(kGainmapMarker), buffer->data(), buffer->size());
+ aBuffer->meta()->setInt32(KEY_COLOR_FORMAT, kCodecGainmapColorFormat);
+ aBuffer->meta()->setInt32("color-primaries", kCodecGainmapColorPrimaries);
+ aBuffer->meta()->setInt32("color-transfer", kCodecGainmapColorTransfer);
+ aBuffer->meta()->setInt32("color-matrix", kCodecGainmapColorMatrix);
+ aBuffer->meta()->setInt32("color-range", kCodecGainmapColorRange);
+ res = inputFrame.muxer->writeSampleData(aBuffer, inputFrame.gainmapTrackIndex,
+ inputFrame.timestamp,
+ MediaCodec::BUFFER_FLAG_MUXER_DATA);
+ if (res != OK) {
+ ALOGE("%s: Failed to write buffer index %d to muxer: %s (%d)",
+ __FUNCTION__, it->index, strerror(-res), res);
+ return res;
+ }
+
+ mGainmapCodec->releaseOutputBuffer(it->index);
+ if (inputFrame.gainmapPendingOutputTiles == 0) {
+ ALOGW("%s: Codec generated more gainmap tiles than expected!", __FUNCTION__);
+ } else {
+ inputFrame.gainmapPendingOutputTiles--;
+ }
+
+ inputFrame.gainmapCodecOutputBuffers.erase(inputFrame.gainmapCodecOutputBuffers.begin());
+
+ ALOGV("%s: [%" PRId64 "]: Gainmap output buffer index %d",
+ __FUNCTION__, frameNumber, it->index);
+ return OK;
+}
+
status_t HeicCompositeStream::processCompletedInputFrame(int64_t frameNumber,
InputFrame &inputFrame) {
sp<ANativeWindow> outputANW = mOutputSurface;
@@ -1256,6 +1715,13 @@
inputFrame->codecOutputBuffers.erase(it);
}
+ while (!inputFrame->gainmapCodecOutputBuffers.empty()) {
+ auto it = inputFrame->gainmapCodecOutputBuffers.begin();
+ ALOGV("%s: release gainmap output buffer index %d", __FUNCTION__, it->index);
+ mGainmapCodec->releaseOutputBuffer(it->index);
+ inputFrame->gainmapCodecOutputBuffers.erase(it);
+ }
+
if (inputFrame->yuvBuffer.data != nullptr) {
mMainImageConsumer->unlockBuffer(inputFrame->yuvBuffer);
inputFrame->yuvBuffer.data = nullptr;
@@ -1267,6 +1733,11 @@
inputFrame->codecInputBuffers.erase(it);
}
+ while (!inputFrame->gainmapCodecInputBuffers.empty()) {
+ auto it = inputFrame->gainmapCodecInputBuffers.begin();
+ inputFrame->gainmapCodecInputBuffers.erase(it);
+ }
+
if (inputFrame->error || mErrorState) {
ALOGV("%s: notifyError called for frameNumber %" PRId64, __FUNCTION__, frameNumber);
notifyError(frameNumber, inputFrame->requestId);
@@ -1292,7 +1763,8 @@
while (it != mPendingInputFrames.end()) {
auto& inputFrame = it->second;
if (inputFrame.error ||
- (inputFrame.appSegmentWritten && inputFrame.pendingOutputTiles == 0)) {
+ (inputFrame.appSegmentWritten && inputFrame.pendingOutputTiles == 0 &&
+ inputFrame.gainmapPendingOutputTiles == 0)) {
releaseInputFrameLocked(it->first, &inputFrame);
it = mPendingInputFrames.erase(it);
inputFrameDone = true;
@@ -1318,6 +1790,110 @@
}
}
+status_t HeicCompositeStream::initializeGainmapCodec() {
+ ALOGV("%s", __FUNCTION__);
+
+ if (!mHDRGainmapEnabled) {
+ return OK;
+ }
+ uint32_t width = mOutputWidth / kGainmapScale;
+ uint32_t height = mOutputHeight / kGainmapScale;
+ bool useGrid = false;
+ bool useHeic = false;
+ AString hevcName;
+ bool isSizeSupported = isSizeSupportedByHeifEncoder(width, height,
+ &useHeic, &useGrid, nullptr, &hevcName);
+ if (!isSizeSupported) {
+ ALOGE("%s: Encoder doesn't support size %u x %u!",
+ __FUNCTION__, width, height);
+ return BAD_VALUE;
+ }
+
+ // Create HEVC codec.
+ mGainmapCodec = MediaCodec::CreateByComponentName(mCodecLooper, hevcName);
+ if (mGainmapCodec == nullptr) {
+ ALOGE("%s: Failed to create gainmap codec", __FUNCTION__);
+ return NO_INIT;
+ }
+
+ // Create Looper and handler for Codec callback.
+ mGainmapCodecCallbackHandler = new CodecCallbackHandler(this, true /*isGainmap*/);
+ if (mGainmapCodecCallbackHandler == nullptr) {
+ ALOGE("%s: Failed to create gainmap codec callback handler", __FUNCTION__);
+ return NO_MEMORY;
+ }
+ mGainmapCallbackLooper = new ALooper;
+ mGainmapCallbackLooper->setName("Camera3-HeicComposite-MediaCodecGainmapCallbackLooper");
+ auto res = mGainmapCallbackLooper->start(
+ false, // runOnCallingThread
+ false, // canCallJava
+ PRIORITY_AUDIO);
+ if (res != OK) {
+ ALOGE("%s: Failed to start gainmap media callback looper: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return NO_INIT;
+ }
+ mGainmapCallbackLooper->registerHandler(mGainmapCodecCallbackHandler);
+
+ mGainmapAsyncNotify = new AMessage(kWhatCallbackNotify, mGainmapCodecCallbackHandler);
+ res = mGainmapCodec->setCallback(mGainmapAsyncNotify);
+ if (res != OK) {
+ ALOGE("%s: Failed to set MediaCodec callback: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+
+ // Create output format and configure the Codec.
+ sp<AMessage> outputFormat = new AMessage();
+ outputFormat->setString(KEY_MIME, MIMETYPE_VIDEO_HEVC);
+ outputFormat->setInt32(KEY_BITRATE_MODE, BITRATE_MODE_CQ);
+ outputFormat->setInt32(KEY_QUALITY, kDefaultJpegQuality);
+ // Ask codec to skip timestamp check and encode all frames.
+ outputFormat->setInt64(KEY_MAX_PTS_GAP_TO_ENCODER, kNoFrameDropMaxPtsGap);
+
+ int32_t gridWidth, gridHeight, gridRows, gridCols;
+ if (useGrid){
+ gridWidth = HeicEncoderInfoManager::kGridWidth;
+ gridHeight = HeicEncoderInfoManager::kGridHeight;
+ gridRows = (height + gridHeight - 1)/gridHeight;
+ gridCols = (width + gridWidth - 1)/gridWidth;
+ } else {
+ gridWidth = width;
+ gridHeight = height;
+ gridRows = 1;
+ gridCols = 1;
+ }
+
+ outputFormat->setInt32(KEY_WIDTH, !useGrid ? width : gridWidth);
+ outputFormat->setInt32(KEY_HEIGHT, !useGrid ? height : gridHeight);
+ outputFormat->setInt32(KEY_I_FRAME_INTERVAL, 0);
+ outputFormat->setInt32(KEY_COLOR_FORMAT, COLOR_FormatYUV420Flexible);
+ outputFormat->setInt32(KEY_FRAME_RATE, useGrid ? gridRows * gridCols : kNoGridOpRate);
+ // This only serves as a hint to encoder when encoding is not real-time.
+ outputFormat->setInt32(KEY_OPERATING_RATE, useGrid ? kGridOpRate : kNoGridOpRate);
+
+ res = mGainmapCodec->configure(outputFormat, nullptr /*nativeWindow*/,
+ nullptr /*crypto*/, CONFIGURE_FLAG_ENCODE);
+ if (res != OK) {
+ ALOGE("%s: Failed to configure codec: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+
+ mGainmapGridWidth = gridWidth;
+ mGainmapGridHeight = gridHeight;
+ mGainmapGridRows = gridRows;
+ mGainmapGridCols = gridCols;
+ mGainmapUseGrid = useGrid;
+ mGainmapOutputWidth = width;
+ mGainmapOutputHeight = height;
+ mMaxHeicBufferSize +=
+ ALIGN(mGainmapOutputWidth, HeicEncoderInfoManager::kGridWidth) *
+ ALIGN(mGainmapOutputHeight, HeicEncoderInfoManager::kGridHeight) * 3 / 2;
+
+ return OK;
+}
+
status_t HeicCompositeStream::initializeCodec(uint32_t width, uint32_t height,
const sp<CameraDeviceBase>& cameraDevice) {
ALOGV("%s", __FUNCTION__);
@@ -1331,6 +1907,12 @@
__FUNCTION__, width, height);
return BAD_VALUE;
}
+ if (mHDRGainmapEnabled) {
+ // HDR Gainmap tonemapping and generation can only be done in SW
+ // using P010 as input. HEIC codecs expect private/impl.defined
+ // which is opaque.
+ mUseHeic = false;
+ }
// Create Looper for MediaCodec.
auto desiredMime = mUseHeic ? MIMETYPE_IMAGE_ANDROID_HEIC : MIMETYPE_VIDEO_HEVC;
@@ -1417,7 +1999,7 @@
outputFormat->setInt32(KEY_HEIGHT, !useGrid ? height : gridHeight);
outputFormat->setInt32(KEY_I_FRAME_INTERVAL, 0);
outputFormat->setInt32(KEY_COLOR_FORMAT,
- useGrid ? COLOR_FormatYUV420Flexible : COLOR_FormatSurface);
+ useGrid || mHDRGainmapEnabled ? COLOR_FormatYUV420Flexible : COLOR_FormatSurface);
outputFormat->setInt32(KEY_FRAME_RATE, useGrid ? gridRows * gridCols : kNoGridOpRate);
// This only serves as a hint to encoder when encoding is not real-time.
outputFormat->setInt32(KEY_OPERATING_RATE, useGrid ? kGridOpRate : kNoGridOpRate);
@@ -1442,7 +2024,24 @@
ALIGN(mOutputWidth, HeicEncoderInfoManager::kGridWidth) *
ALIGN(mOutputHeight, HeicEncoderInfoManager::kGridHeight) * 3 / 2 + mAppSegmentMaxSize;
- return OK;
+ return initializeGainmapCodec();
+}
+
+void HeicCompositeStream::deinitGainmapCodec() {
+ ALOGV("%s", __FUNCTION__);
+ if (mGainmapCodec != nullptr) {
+ mGainmapCodec->stop();
+ mGainmapCodec->release();
+ mGainmapCodec.clear();
+ }
+
+ if (mGainmapCallbackLooper != nullptr) {
+ mGainmapCallbackLooper->stop();
+ mGainmapCallbackLooper.clear();
+ }
+
+ mGainmapAsyncNotify.clear();
+ mGainmapFormat.clear();
}
void HeicCompositeStream::deinitCodec() {
@@ -1453,6 +2052,8 @@
mCodec.clear();
}
+ deinitGainmapCodec();
+
if (mCodecLooper != nullptr) {
mCodecLooper->stop();
mCodecLooper.clear();
@@ -1873,7 +2474,7 @@
ALOGE("CB_INPUT_AVAILABLE: index is expected.");
break;
}
- parent->onHeicInputFrameAvailable(index);
+ parent->onHeicInputFrameAvailable(index, mIsGainmap);
break;
}
@@ -1912,7 +2513,7 @@
timeUs,
(uint32_t)flags};
- parent->onHeicOutputFrameAvailable(bufferInfo);
+ parent->onHeicOutputFrameAvailable(bufferInfo, mIsGainmap);
break;
}
@@ -1928,7 +2529,7 @@
if (format != nullptr) {
formatCopy = format->dup();
}
- parent->onHeicFormatChanged(formatCopy);
+ parent->onHeicFormatChanged(formatCopy, mIsGainmap);
break;
}
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.h b/services/camera/libcameraservice/api2/HeicCompositeStream.h
index ba10e05..beb08b0 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.h
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.h
@@ -17,6 +17,9 @@
#ifndef ANDROID_SERVERS_CAMERA_CAMERA3_HEIC_COMPOSITE_STREAM_H
#define ANDROID_SERVERS_CAMERA_CAMERA3_HEIC_COMPOSITE_STREAM_H
+#include <algorithm>
+#include <android/data_space.h>
+#include <memory>
#include <queue>
#include <gui/CpuConsumer.h>
@@ -27,6 +30,8 @@
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/MediaCodec.h>
#include <media/stagefright/MediaMuxer.h>
+#include <ultrahdr/ultrahdrcommon.h>
+#include <ultrahdr/gainmapmetadata.h>
#include "CompositeStream.h"
@@ -43,7 +48,7 @@
static bool isHeicCompositeStream(const sp<Surface> &surface);
static bool isHeicCompositeStreamInfo(const OutputStreamInfo& streamInfo);
- status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
+ status_t createInternalStreams(const std::vector<SurfaceHolder>& consumers,
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
@@ -79,8 +84,13 @@
void getStreamStats(hardware::CameraStreamStats*) override {};
static bool isSizeSupportedByHeifEncoder(int32_t width, int32_t height,
- bool* useHeic, bool* useGrid, int64_t* stall, AString* hevcName = nullptr);
+ bool* useHeic, bool* useGrid, int64_t* stall, AString* hevcName = nullptr,
+ bool allowSWCodec = false);
static bool isInMemoryTempFileSupported();
+
+ // HDR Gainmap subsampling
+ static const size_t kGainmapScale = 4;
+
protected:
bool threadLoop() override;
@@ -108,12 +118,12 @@
class CodecCallbackHandler : public AHandler {
public:
- explicit CodecCallbackHandler(wp<HeicCompositeStream> parent) {
- mParent = parent;
- }
+ explicit CodecCallbackHandler(wp<HeicCompositeStream> parent, bool isGainmap = false) :
+ mParent(parent), mIsGainmap(isGainmap) {}
virtual void onMessageReceived(const sp<AMessage> &msg);
private:
wp<HeicCompositeStream> mParent;
+ bool mIsGainmap;
};
enum {
@@ -122,30 +132,34 @@
bool mUseHeic;
sp<MediaCodec> mCodec;
- sp<ALooper> mCodecLooper, mCallbackLooper;
- sp<CodecCallbackHandler> mCodecCallbackHandler;
- sp<AMessage> mAsyncNotify;
- sp<AMessage> mFormat;
- size_t mNumOutputTiles;
+ sp<MediaCodec> mGainmapCodec;
+ sp<ALooper> mCodecLooper, mCallbackLooper, mGainmapCallbackLooper;
+ sp<CodecCallbackHandler> mCodecCallbackHandler, mGainmapCodecCallbackHandler;
+ sp<AMessage> mAsyncNotify, mGainmapAsyncNotify;
+ sp<AMessage> mFormat, mGainmapFormat;
+ size_t mNumOutputTiles, mNumGainmapOutputTiles;
- int32_t mOutputWidth, mOutputHeight;
+ int32_t mOutputWidth, mOutputHeight, mGainmapOutputWidth, mGainmapOutputHeight;
size_t mMaxHeicBufferSize;
- int32_t mGridWidth, mGridHeight;
- size_t mGridRows, mGridCols;
- bool mUseGrid; // Whether to use framework YUV frame tiling.
+ int32_t mGridWidth, mGridHeight, mGainmapGridWidth, mGainmapGridHeight;
+ size_t mGridRows, mGridCols, mGainmapGridRows, mGainmapGridCols;
+ bool mUseGrid, mGainmapUseGrid; // Whether to use framework YUV frame tiling.
static const int64_t kNoFrameDropMaxPtsGap = -1000000;
static const int32_t kNoGridOpRate = 30;
static const int32_t kGridOpRate = 120;
- void onHeicOutputFrameAvailable(const CodecOutputBufferInfo& bufferInfo);
- void onHeicInputFrameAvailable(int32_t index); // Only called for YUV input mode.
- void onHeicFormatChanged(sp<AMessage>& newFormat);
+ void onHeicOutputFrameAvailable(const CodecOutputBufferInfo& bufferInfo, bool isGainmap);
+ void onHeicInputFrameAvailable(int32_t index, bool isGainmap);// Only called for YUV input mode.
+ void onHeicFormatChanged(sp<AMessage>& newFormat, bool isGainmap);
+ void onHeicGainmapFormatChanged(sp<AMessage>& newFormat);
void onHeicCodecError();
status_t initializeCodec(uint32_t width, uint32_t height,
const sp<CameraDeviceBase>& cameraDevice);
void deinitCodec();
+ status_t initializeGainmapCodec();
+ void deinitGainmapCodec();
//
// Composite stream related structures, utility functions and callbacks.
@@ -155,33 +169,51 @@
int32_t quality;
CpuConsumer::LockedBuffer appSegmentBuffer;
- std::vector<CodecOutputBufferInfo> codecOutputBuffers;
+ std::vector<CodecOutputBufferInfo> codecOutputBuffers, gainmapCodecOutputBuffers;
std::unique_ptr<CameraMetadata> result;
// Fields that are only applicable to HEVC tiling.
CpuConsumer::LockedBuffer yuvBuffer;
- std::vector<CodecInputBufferInfo> codecInputBuffers;
+ std::vector<CodecInputBufferInfo> codecInputBuffers, gainmapCodecInputBuffers;
bool error; // Main input image buffer error
bool exifError; // Exif/APP_SEGMENT buffer error
int64_t timestamp;
int32_t requestId;
- sp<AMessage> format;
+ sp<AMessage> format, gainmapFormat;
sp<MediaMuxer> muxer;
int fenceFd;
int fileFd;
- ssize_t trackIndex;
+ ssize_t trackIndex, gainmapTrackIndex;
ANativeWindowBuffer *anb;
bool appSegmentWritten;
- size_t pendingOutputTiles;
- size_t codecInputCounter;
+ size_t pendingOutputTiles, gainmapPendingOutputTiles;
+ size_t codecInputCounter, gainmapCodecInputCounter;
- InputFrame() : orientation(0), quality(kDefaultJpegQuality), error(false),
- exifError(false), timestamp(-1), requestId(-1), fenceFd(-1),
- fileFd(-1), trackIndex(-1), anb(nullptr), appSegmentWritten(false),
- pendingOutputTiles(0), codecInputCounter(0) { }
+ std::unique_ptr<CpuConsumer::LockedBuffer> baseImage, gainmapImage;
+ std::unique_ptr<ultrahdr::uhdr_raw_image_ext> baseBuffer, gainmap;
+ std::unique_ptr<uint8_t[]> gainmapChroma;
+ std::vector<uint8_t> isoGainmapMetadata;
+
+ InputFrame()
+ : orientation(0),
+ quality(kDefaultJpegQuality),
+ error(false),
+ exifError(false),
+ timestamp(-1),
+ requestId(-1),
+ fenceFd(-1),
+ fileFd(-1),
+ trackIndex(-1),
+ gainmapTrackIndex(-1),
+ anb(nullptr),
+ appSegmentWritten(false),
+ pendingOutputTiles(0),
+ gainmapPendingOutputTiles(0),
+ codecInputCounter(0),
+ gainmapCodecInputCounter(0) {}
};
void compilePendingInputLocked();
@@ -192,9 +224,11 @@
status_t processInputFrame(int64_t frameNumber, InputFrame &inputFrame);
status_t processCodecInputFrame(InputFrame &inputFrame);
+ status_t processCodecGainmapInputFrame(InputFrame &inputFrame);
status_t startMuxerForInputFrame(int64_t frameNumber, InputFrame &inputFrame);
status_t processAppSegment(int64_t frameNumber, InputFrame &inputFrame);
status_t processOneCodecOutputFrame(int64_t frameNumber, InputFrame &inputFrame);
+ status_t processOneCodecGainmapOutputFrame(int64_t frameNumber, InputFrame &inputFrame);
status_t processCompletedInputFrame(int64_t frameNumber, InputFrame &inputFrame);
void releaseInputFrameLocked(int64_t frameNumber, InputFrame *inputFrame /*out*/);
@@ -216,6 +250,7 @@
static_cast<android_dataspace>(HAL_DATASPACE_JPEG_APP_SEGMENTS);
static const android_dataspace kHeifDataSpace =
static_cast<android_dataspace>(HAL_DATASPACE_HEIF);
+ android_dataspace mInternalDataSpace = kHeifDataSpace;
// Use the limit of pipeline depth in the API sepc as maximum number of acquired
// app segment buffers.
static const uint32_t kMaxAcquiredAppSegment = 8;
@@ -260,15 +295,15 @@
std::vector<int64_t> mInputAppSegmentBuffers;
// Keep all incoming HEIC blob buffer pending further processing.
- std::vector<CodecOutputBufferInfo> mCodecOutputBuffers;
- std::queue<int64_t> mCodecOutputBufferFrameNumbers;
- size_t mCodecOutputCounter;
+ std::vector<CodecOutputBufferInfo> mCodecOutputBuffers, mGainmapCodecOutputBuffers;
+ std::queue<int64_t> mCodecOutputBufferFrameNumbers, mCodecGainmapOutputBufferFrameNumbers;
+ size_t mCodecOutputCounter, mCodecGainmapOutputCounter;
int32_t mQuality;
// Keep all incoming Yuv buffer pending tiling and encoding (for HEVC YUV tiling only)
std::vector<int64_t> mInputYuvBuffers;
// Keep all codec input buffers ready to be filled out (for HEVC YUV tiling only)
- std::vector<int32_t> mCodecInputBuffers;
+ std::vector<int32_t> mCodecInputBuffers, mGainmapCodecInputBuffers;
// Artificial strictly incremental YUV grid timestamp to make encoder happy.
int64_t mGridTimestampUs;
@@ -286,6 +321,49 @@
// The status id for tracking the active/idle status of this composite stream
int mStatusId;
void markTrackerIdle();
+
+ //APP_SEGMENT stream supported
+ bool mAppSegmentSupported = false;
+
+ bool mHDRGainmapEnabled = false;
+
+ // UltraHDR tonemap color and format aspects
+ static const uhdr_img_fmt_t kUltraHdrInputFmt = UHDR_IMG_FMT_24bppYCbCrP010;
+ static const uhdr_color_gamut kUltraHdrInputGamut = UHDR_CG_BT_2100;
+ static const uhdr_color_transfer kUltraHdrInputTransfer = UHDR_CT_HLG;
+ static const uhdr_color_range kUltraHdrInputRange = UHDR_CR_FULL_RANGE;
+
+ static const uhdr_img_fmt_t kUltraHdrOutputFmt = UHDR_IMG_FMT_12bppYCbCr420;
+ static const uhdr_color_gamut kUltraHdrOutputGamut = UHDR_CG_DISPLAY_P3;
+ static const uhdr_color_transfer kUltraHdrOutputTransfer = UHDR_CT_SRGB;
+ static const uhdr_color_range kUltraHdrOutputRange = UHDR_CR_FULL_RANGE;
+
+ static const auto kUltraHDRDataSpace = ADATASPACE_HEIF_ULTRAHDR;
+
+ // MediaMuxer/Codec color and format aspects for base image and gainmap metadata
+ static const int32_t kCodecColorFormat = COLOR_FormatYUV420Flexible;
+ static const ColorAspects::Primaries kCodecColorPrimaries =
+ ColorAspects::Primaries::PrimariesEG432;
+ static const ColorAspects::MatrixCoeffs kCodecColorMatrix =
+ ColorAspects::MatrixCoeffs::MatrixUnspecified;
+ static const ColorAspects::Transfer kCodecColorTransfer =
+ ColorAspects::Transfer::TransferSRGB;
+ static const ColorAspects::Range kCodecColorRange =
+ ColorAspects::Range::RangeFull;
+
+ // MediaMuxer/Codec color and format aspects for gainmap as per ISO 23008-12:2024
+ static const int32_t kCodecGainmapColorFormat = COLOR_FormatYUV420Flexible;
+ static const ColorAspects::Primaries kCodecGainmapColorPrimaries =
+ ColorAspects::Primaries::PrimariesUnspecified;
+ static const ColorAspects::MatrixCoeffs kCodecGainmapColorMatrix =
+ ColorAspects::MatrixCoeffs::MatrixUnspecified;
+ static const ColorAspects::Transfer kCodecGainmapColorTransfer =
+ ColorAspects::Transfer::TransferUnspecified;
+ static const ColorAspects::Range kCodecGainmapColorRange =
+ ColorAspects::Range::RangeFull;
+
+
+ status_t generateBaseImageAndGainmap(InputFrame &inputFrame);
};
}; // namespace camera3
diff --git a/services/camera/libcameraservice/api2/HeicEncoderInfoManager.cpp b/services/camera/libcameraservice/api2/HeicEncoderInfoManager.cpp
index d36ca3b..92072b0 100644
--- a/services/camera/libcameraservice/api2/HeicEncoderInfoManager.cpp
+++ b/services/camera/libcameraservice/api2/HeicEncoderInfoManager.cpp
@@ -20,6 +20,7 @@
#include <cstdint>
#include <regex>
+#include <com_android_internal_camera_flags.h>
#include <cutils/properties.h>
#include <log/log_main.h>
#include <system/graphics.h>
@@ -33,14 +34,16 @@
namespace android {
namespace camera3 {
-HeicEncoderInfoManager::HeicEncoderInfoManager() :
+namespace flags = com::android::internal::camera::flags;
+
+HeicEncoderInfoManager::HeicEncoderInfoManager(bool useSWCodec) :
mIsInited(false),
mMinSizeHeic(0, 0),
mMaxSizeHeic(INT32_MAX, INT32_MAX),
mHasHEVC(false),
mHasHEIC(false),
mDisableGrid(false) {
- if (initialize() == OK) {
+ if (initialize(useSWCodec) == OK) {
mIsInited = true;
}
}
@@ -72,14 +75,15 @@
(width <= 1920 && height <= 1080))) {
enableGrid = false;
}
- if (hevcName != nullptr) {
- *hevcName = mHevcName;
- }
} else {
// No encoder available for the requested size.
return false;
}
+ if (hevcName != nullptr) {
+ *hevcName = mHevcName;
+ }
+
if (stall != nullptr) {
// Find preferred encoder which advertise
// "measured-frame-rate-WIDTHxHEIGHT-range" key.
@@ -109,7 +113,7 @@
return true;
}
-status_t HeicEncoderInfoManager::initialize() {
+status_t HeicEncoderInfoManager::initialize(bool allowSWCodec) {
mDisableGrid = property_get_bool("camera.heic.disable_grid", false);
sp<IMediaCodecList> codecsList = MediaCodecList::getInstance();
if (codecsList == nullptr) {
@@ -119,7 +123,7 @@
sp<AMessage> heicDetails = getCodecDetails(codecsList, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC);
- if (!getHevcCodecDetails(codecsList, MEDIA_MIMETYPE_VIDEO_HEVC)) {
+ if (!getHevcCodecDetails(codecsList, MEDIA_MIMETYPE_VIDEO_HEVC, allowSWCodec)) {
if (heicDetails != nullptr) {
ALOGE("%s: Device must support HEVC codec if HEIC codec is available!",
__FUNCTION__);
@@ -268,7 +272,7 @@
}
bool HeicEncoderInfoManager::getHevcCodecDetails(
- sp<IMediaCodecList> codecsList, const char* mime) {
+ sp<IMediaCodecList> codecsList, const char* mime, bool allowSWCodec) {
bool found = false;
ssize_t idx = 0;
while ((idx = codecsList->findCodecByType(mime, true /*encoder*/, idx)) >= 0) {
@@ -280,11 +284,13 @@
ALOGV("%s: [%s] codec found", __FUNCTION__,
info->getCodecName());
- // Filter out software ones as they may be too slow
- if (!(info->getAttributes() & MediaCodecInfo::kFlagIsHardwareAccelerated)) {
- ALOGV("%s: [%s] Filter out software ones as they may be too slow", __FUNCTION__,
- info->getCodecName());
- continue;
+ if (!allowSWCodec) {
+ // Filter out software ones as they may be too slow
+ if (!(info->getAttributes() & MediaCodecInfo::kFlagIsHardwareAccelerated)) {
+ ALOGV("%s: [%s] Filter out software ones as they may be too slow", __FUNCTION__,
+ info->getCodecName());
+ continue;
+ }
}
const sp<MediaCodecInfo::Capabilities> caps =
diff --git a/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h b/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h
index a65be9c..1e28eca 100644
--- a/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h
+++ b/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h
@@ -30,8 +30,8 @@
class HeicEncoderInfoManager {
public:
- static HeicEncoderInfoManager& getInstance() {
- static HeicEncoderInfoManager instance;
+ static HeicEncoderInfoManager& getInstance(bool useSWCodec) {
+ static HeicEncoderInfoManager instance(useSWCodec);
return instance;
}
@@ -51,10 +51,10 @@
typedef std::unordered_map<std::pair<int32_t, int32_t>,
std::pair<int32_t, int32_t>, SizePairHash> FrameRateMaps;
- HeicEncoderInfoManager();
+ HeicEncoderInfoManager(bool useSWCodec);
virtual ~HeicEncoderInfoManager();
- status_t initialize();
+ status_t initialize(bool allowSWCodec);
status_t getFrameRateMaps(sp<AMessage> details, FrameRateMaps* maps);
status_t getCodecSizeRange(const char* codecName, sp<AMessage> details,
std::pair<int32_t, int32_t>* minSize, std::pair<int32_t, int32_t>* maxSize,
@@ -62,7 +62,8 @@
FrameRateMaps::const_iterator findClosestSize(const FrameRateMaps& maps,
int32_t width, int32_t height) const;
sp<AMessage> getCodecDetails(sp<IMediaCodecList> codecsList, const char* name);
- bool getHevcCodecDetails(sp<IMediaCodecList> codecsList, const char* mime);
+ bool getHevcCodecDetails(sp<IMediaCodecList> codecsList, const char* mime,
+ bool allowSWCodec = false);
bool mIsInited;
std::pair<int32_t, int32_t> mMinSizeHeic, mMaxSizeHeic;
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
index c5bd7a9..e0d7604 100644
--- a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
@@ -557,7 +557,7 @@
}
-status_t JpegRCompositeStream::createInternalStreams(const std::vector<sp<Surface>>& consumers,
+status_t JpegRCompositeStream::createInternalStreams(const std::vector<SurfaceHolder>& consumers,
bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
@@ -600,7 +600,7 @@
if (ret == OK) {
mP010StreamId = *id;
mP010SurfaceId = (*surfaceIds)[0];
- mOutputSurface = consumers[0];
+ mOutputSurface = consumers[0].mSurface;
} else {
return ret;
}
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.h b/services/camera/libcameraservice/api2/JpegRCompositeStream.h
index d3ab19c..efd31da 100644
--- a/services/camera/libcameraservice/api2/JpegRCompositeStream.h
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.h
@@ -46,7 +46,7 @@
static bool isJpegRCompositeStreamInfo(const OutputStreamInfo& streamInfo);
// CompositeStream overrides
- status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
+ status_t createInternalStreams(const std::vector<SurfaceHolder>& consumers,
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 352c6f8..03abf71 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -31,6 +31,7 @@
#include <camera/CameraSessionStats.h>
#include <camera/StringUtils.h>
#include <com_android_window_flags.h>
+#include <com_android_internal_camera_flags.h>
#include "common/Camera2ClientBase.h"
@@ -39,45 +40,38 @@
#include "device3/Camera3Device.h"
#include "device3/aidl/AidlCamera3Device.h"
#include "device3/hidl/HidlCamera3Device.h"
+#include "device3/aidl/AidlCamera3SharedDevice.h"
namespace android {
using namespace camera2;
namespace wm_flags = com::android::window::flags;
+namespace flags = com::android::internal::camera::flags;
// Interface used by CameraService
template <typename TClientBase>
Camera2ClientBase<TClientBase>::Camera2ClientBase(
- const sp<CameraService>& cameraService,
- const sp<TCamCallbacks>& remoteCallback,
+ const sp<CameraService>& cameraService, const sp<TCamCallbacks>& remoteCallback,
std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
- const std::string& clientPackageName,
- bool systemNativeClient,
- const std::optional<std::string>& clientFeatureId,
- const std::string& cameraId,
- int api1CameraId,
- int cameraFacing,
- int sensorOrientation,
- int clientPid,
- uid_t clientUid,
- int servicePid,
- bool overrideForPerfClass,
- int rotationOverride,
- bool legacyClient):
- TClientBase(cameraService, remoteCallback, attributionAndPermissionUtils, clientPackageName,
- systemNativeClient, clientFeatureId, cameraId, api1CameraId, cameraFacing,
- sensorOrientation, clientPid, clientUid, servicePid, rotationOverride),
- mSharedCameraCallbacks(remoteCallback),
- mCameraServiceProxyWrapper(cameraServiceProxyWrapper),
- mDeviceActive(false), mApi1CameraId(api1CameraId)
-{
+ const AttributionSourceState& clientAttribution, int callingPid, bool systemNativeClient,
+ const std::string& cameraId, int api1CameraId, int cameraFacing, int sensorOrientation,
+ int servicePid, bool overrideForPerfClass, int rotationOverride, bool sharedMode,
+ bool legacyClient)
+ : TClientBase(cameraService, remoteCallback, attributionAndPermissionUtils, clientAttribution,
+ callingPid, systemNativeClient, cameraId, api1CameraId, cameraFacing,
+ sensorOrientation, servicePid, rotationOverride, sharedMode),
+ mSharedCameraCallbacks(remoteCallback),
+ mCameraServiceProxyWrapper(cameraServiceProxyWrapper),
+ mDeviceActive(false),
+ mApi1CameraId(api1CameraId) {
ALOGI("Camera %s: Opened. Client: %s (PID %d, UID %d)", cameraId.c_str(),
- clientPackageName.c_str(), clientPid, clientUid);
+ TClientBase::getPackageName().c_str(), TClientBase::mCallingPid,
+ TClientBase::getClientUid());
- mInitialClientPid = clientPid;
+ mInitialClientPid = TClientBase::mCallingPid;
mOverrideForPerfClass = overrideForPerfClass;
mLegacyClient = legacyClient;
}
@@ -87,10 +81,10 @@
const {
int callingPid = TClientBase::getCallingPid();
- if (callingPid == TClientBase::mClientPid) return NO_ERROR;
+ if (callingPid == TClientBase::mCallingPid) return NO_ERROR;
ALOGE("%s: attempt to use a locked camera from a different process"
- " (old pid %d, new pid %d)", checkLocation, TClientBase::mClientPid, callingPid);
+ " (old pid %d, new pid %d)", checkLocation, TClientBase::mCallingPid, callingPid);
return PERMISSION_DENIED;
}
@@ -124,12 +118,19 @@
TClientBase::mRotationOverride, mLegacyClient);
break;
case IPCTransport::AIDL:
- mDevice =
+ if (flags::camera_multi_client() && TClientBase::mSharedMode) {
+ mDevice = AidlCamera3SharedDevice::getInstance(mCameraServiceProxyWrapper,
+ TClientBase::mAttributionAndPermissionUtils,
+ TClientBase::mCameraIdStr, mOverrideForPerfClass,
+ TClientBase::mRotationOverride, mLegacyClient);
+ } else {
+ mDevice =
new AidlCamera3Device(mCameraServiceProxyWrapper,
TClientBase::mAttributionAndPermissionUtils,
TClientBase::mCameraIdStr, mOverrideForPerfClass,
TClientBase::mRotationOverride, mLegacyClient);
- break;
+ }
+ break;
default:
ALOGE("%s Invalid transport for camera id %s", __FUNCTION__,
TClientBase::mCameraIdStr.c_str());
@@ -141,10 +142,10 @@
return NO_INIT;
}
- // Verify ops permissions
- res = TClientBase::startCameraOps();
+ // Notify camera opening (check op if check_full_attribution_source_chain flag is off).
+ res = TClientBase::notifyCameraOpening();
if (res != OK) {
- TClientBase::finishCameraOps();
+ TClientBase::notifyCameraClosing();
return res;
}
@@ -152,7 +153,7 @@
if (res != OK) {
ALOGE("%s: Camera %s: unable to initialize device: %s (%d)",
__FUNCTION__, TClientBase::mCameraIdStr.c_str(), strerror(-res), res);
- TClientBase::finishCameraOps();
+ TClientBase::notifyCameraClosing();
return res;
}
@@ -171,14 +172,14 @@
Camera2ClientBase<TClientBase>::~Camera2ClientBase() {
ATRACE_CALL();
- TClientBase::mDestructionStarted = true;
-
- disconnect();
+ if (!flags::camera_multi_client() || !TClientBase::mDisconnected) {
+ TClientBase::mDestructionStarted = true;
+ disconnect();
+ }
ALOGI("%s: Client object's dtor for Camera Id %s completed. Client was: %s (PID %d, UID %u)",
- __FUNCTION__, TClientBase::mCameraIdStr.c_str(),
- TClientBase::mClientPackageName.c_str(),
- mInitialClientPid, TClientBase::mClientUid);
+ __FUNCTION__, TClientBase::mCameraIdStr.c_str(), TClientBase::getPackageName().c_str(),
+ mInitialClientPid, TClientBase::getClientUid());
}
template <typename TClientBase>
@@ -189,7 +190,7 @@
TClientBase::mCameraIdStr.c_str(),
(TClientBase::getRemoteCallback() != NULL ?
(void *)IInterface::asBinder(TClientBase::getRemoteCallback()).get() : NULL),
- TClientBase::mClientPid);
+ TClientBase::mCallingPid);
result += " State: ";
write(fd, result.c_str(), result.size());
@@ -261,7 +262,10 @@
template <typename TClientBase>
binder::Status Camera2ClientBase<TClientBase>::disconnect() {
- return disconnectImpl();
+ if (!flags::camera_multi_client() || !TClientBase::mDisconnected) {
+ return disconnectImpl();
+ }
+ return binder::Status::ok();
}
template <typename TClientBase>
@@ -274,7 +278,7 @@
binder::Status res = binder::Status::ok();
// Allow both client and the media server to disconnect at all times
int callingPid = TClientBase::getCallingPid();
- if (callingPid != TClientBase::mClientPid &&
+ if (callingPid != TClientBase::mCallingPid &&
callingPid != TClientBase::mServicePid) return res;
ALOGD("Camera %s: Shutting down", TClientBase::mCameraIdStr.c_str());
@@ -299,7 +303,11 @@
template <typename TClientBase>
void Camera2ClientBase<TClientBase>::detachDevice() {
if (mDevice == 0) return;
- mDevice->disconnect();
+ if (flags::camera_multi_client() && TClientBase::mSharedMode) {
+ mDevice->disconnectClient(TClientBase::getClientUid());
+ } else {
+ mDevice->disconnect();
+ }
ALOGV("Camera %s: Detach complete", TClientBase::mCameraIdStr.c_str());
}
@@ -311,19 +319,19 @@
ALOGV("%s: E", __FUNCTION__);
Mutex::Autolock icl(mBinderSerializationLock);
- if (TClientBase::mClientPid != 0 &&
- TClientBase::getCallingPid() != TClientBase::mClientPid) {
+ if (TClientBase::mCallingPid != 0 &&
+ TClientBase::getCallingPid() != TClientBase::mCallingPid) {
ALOGE("%s: Camera %s: Connection attempt from pid %d; "
"current locked to pid %d",
__FUNCTION__,
TClientBase::mCameraIdStr.c_str(),
TClientBase::getCallingPid(),
- TClientBase::mClientPid);
+ TClientBase::mCallingPid);
return BAD_VALUE;
}
- TClientBase::mClientPid = TClientBase::getCallingPid();
+ TClientBase::mCallingPid = TClientBase::getCallingPid();
TClientBase::mRemoteCallback = client;
mSharedCameraCallbacks = client;
@@ -342,6 +350,12 @@
}
template <typename TClientBase>
+void Camera2ClientBase<TClientBase>::notifyClientSharedAccessPriorityChanged(bool primaryClient) {
+ ALOGV("%s Camera %s access priorities changed for client %d primaryClient=%d", __FUNCTION__,
+ TClientBase::mCameraIdStr.c_str(), TClientBase::getClientUid(), primaryClient);
+}
+
+template <typename TClientBase>
void Camera2ClientBase<TClientBase>::notifyPhysicalCameraChange(const std::string &physicalId) {
using android::hardware::ICameraService;
// We're only interested in this notification if rotationOverride is turned on.
@@ -358,7 +372,7 @@
bool landscapeSensor = (orientation == 0 || orientation == 180);
if (((TClientBase::mRotationOverride ==
ICameraService::ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT) && landscapeSensor) ||
- ((wm_flags::camera_compat_for_freeform() &&
+ ((wm_flags::enable_camera_compat_for_desktop_windowing() &&
TClientBase::mRotationOverride ==
ICameraService::ROTATION_OVERRIDE_ROTATION_ONLY)
&& !landscapeSensor)) {
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index c9d5735..cb30199 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -53,18 +53,10 @@
const sp<TCamCallbacks>& remoteCallback,
std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
- const std::string& clientPackageName,
- bool systemNativeClient,
- const std::optional<std::string>& clientFeatureId,
- const std::string& cameraId,
- int api1CameraId,
- int cameraFacing,
- int sensorOrientation,
- int clientPid,
- uid_t clientUid,
- int servicePid,
- bool overrideForPerfClass,
- int rotationOverride,
+ const AttributionSourceState& clientAttribution, int callingPid,
+ bool systemNativeClient, const std::string& cameraId, int api1CameraId,
+ int cameraFacing, int sensorOrientation, int servicePid,
+ bool overrideForPerfClass, int rotationOverride, bool sharedMode,
bool legacyClient = false);
virtual ~Camera2ClientBase();
@@ -97,6 +89,7 @@
virtual void notifyPrepared(int streamId);
virtual void notifyRequestQueueEmpty();
virtual void notifyRepeatingRequestError(long lastFrameNumber);
+ virtual void notifyClientSharedAccessPriorityChanged(bool primaryClient) override;
void notifyIdleWithUserTag(int64_t requestCount, int64_t resultErrorCount,
bool deviceError,
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index 9c8f5ad..cfedf0c 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -44,9 +44,14 @@
namespace camera3 {
+// TODO: Remove this once the GFX native dataspace
+// dependencies are available
+enum { HEIC_ULTRAHDR, ADATASPACE_HEIF_ULTRAHDR = 0x1006 };
+
typedef enum camera_stream_configuration_mode {
CAMERA_STREAM_CONFIGURATION_NORMAL_MODE = 0,
CAMERA_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE = 1,
+ CAMERA_STREAM_CONFIGURATION_SHARED_MODE = 2,
CAMERA_VENDOR_STREAM_CONFIGURATION_MODE_START = 0x8000
} camera_stream_configuration_mode_t;
@@ -68,6 +73,7 @@
using camera3::camera_request_template_t;;
using camera3::camera_stream_configuration_mode_t;
using camera3::camera_stream_rotation_t;
+using camera3::SurfaceHolder;
class CameraProviderManager;
@@ -92,6 +98,7 @@
virtual status_t initialize(sp<CameraProviderManager> manager,
const std::string& monitorTags) = 0;
virtual status_t disconnect() = 0;
+ virtual status_t disconnectClient(int) {return OK;};
virtual status_t dump(int fd, const Vector<String16> &args) = 0;
virtual status_t startWatchingTags(const std::string &tags) = 0;
@@ -200,7 +207,7 @@
* For HAL_PIXEL_FORMAT_BLOB formats, the width and height should be the
* logical dimensions of the buffer, not the number of bytes.
*/
- virtual status_t createStream(const std::vector<sp<Surface>>& consumers,
+ virtual status_t createStream(const std::vector<SurfaceHolder>& consumers,
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
const std::string& physicalCameraId,
@@ -212,7 +219,6 @@
int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
bool useReadoutTimestamp = false)
= 0;
@@ -286,6 +292,33 @@
*/
virtual status_t deleteStream(int id) = 0;
+
+ /**
+ * This function is responsible for configuring camera streams at the start of a session.
+ * In shared session mode, where multiple clients may access the camera, camera service
+ * applies a predetermined shared session configuration. If the camera is opened in non-shared
+ * mode, this function is a no-op.
+ */
+ virtual status_t beginConfigure() = 0;
+
+ /**
+ * In shared session mode, this function retrieves the stream ID associated with a specific
+ * output configuration.
+ */
+ virtual status_t getSharedStreamId(const OutputConfiguration &config, int *streamId) = 0;
+
+ /**
+ * In shared session mode, this function add surfaces to an existing shared stream ID.
+ */
+ virtual status_t addSharedSurfaces(int streamId,
+ const std::vector<android::camera3::OutputStreamInfo> &outputInfo,
+ const std::vector<SurfaceHolder>& surfaces, std::vector<int> *surfaceIds = nullptr) = 0;
+
+ /**
+ * In shared session mode, this function remove surfaces from an existing shared stream ID.
+ */
+ virtual status_t removeSharedSurfaces(int streamId, const std::vector<size_t> &surfaceIds) = 0;
+
/**
* Take the currently-defined set of streams and configure the HAL to use
* them. This is a long-running operation (may be several hundered ms).
@@ -404,12 +437,12 @@
* Set the deferred consumer surface and finish the rest of the stream configuration.
*/
virtual status_t setConsumerSurfaces(int streamId,
- const std::vector<sp<Surface>>& consumers, std::vector<int> *surfaceIds /*out*/) = 0;
+ const std::vector<SurfaceHolder>& consumers, std::vector<int> *surfaceIds /*out*/) = 0;
/**
* Update a given stream.
*/
- virtual status_t updateStream(int streamId, const std::vector<sp<Surface>> &newSurfaces,
+ virtual status_t updateStream(int streamId, const std::vector<SurfaceHolder> &newSurfaces,
const std::vector<android::camera3::OutputStreamInfo> &outputInfo,
const std::vector<size_t> &removedSurfaceIds,
KeyedVector<sp<Surface>, size_t> *outputMap/*out*/) = 0;
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 51f06cb..a8d7480 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -14,6 +14,7 @@
* limitations under the License.
*/
+#include "system/camera_metadata.h"
#include "system/graphics-base-v1.0.h"
#include "system/graphics-base-v1.1.h"
#define LOG_TAG "CameraProviderManager"
@@ -62,6 +63,7 @@
using namespace camera3::SessionConfigurationUtils;
using std::literals::chrono_literals::operator""s;
using hardware::camera2::utils::CameraIdAndSessionConfiguration;
+using hardware::camera2::params::OutputConfiguration;
namespace flags = com::android::internal::camera::flags;
namespace vd_flags = android::companion::virtualdevice::flags;
@@ -76,6 +78,10 @@
const float CameraProviderManager::kDepthARTolerance = .1f;
const bool CameraProviderManager::kFrameworkJpegRDisabled =
property_get_bool("ro.camera.disableJpegR", false);
+const bool CameraProviderManager::kFrameworkHeicUltraHDRDisabled =
+ property_get_bool("ro.camera.disableHeicUltraHDR", false);
+const bool CameraProviderManager::kFrameworkHeicAllowSWCodecs =
+ property_get_bool("ro.camera.enableSWHEVC", false);
CameraProviderManager::HidlServiceInteractionProxyImpl
CameraProviderManager::sHidlServiceInteractionProxy{};
@@ -1246,6 +1252,169 @@
return false;
}
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::deriveHeicUltraHDRTags(
+ bool maxResolution) {
+ if (!flags::camera_heif_gainmap() || kFrameworkHeicUltraHDRDisabled ||
+ mCompositeHeicUltraHDRDisabled ||
+ !camera3::HeicCompositeStream::isInMemoryTempFileSupported()) {
+ return OK;
+ }
+
+ const int32_t scalerSizesTag =
+ SessionConfigurationUtils::getAppropriateModeTag(
+ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxResolution);
+ const int32_t scalerMinFrameDurationsTag = SessionConfigurationUtils::getAppropriateModeTag(
+ ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, maxResolution);
+ const int32_t scalerStallDurationsTag =
+ SessionConfigurationUtils::getAppropriateModeTag(
+ ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, maxResolution);
+
+ const int32_t heicUltraHDRSizesTag =
+ SessionConfigurationUtils::getAppropriateModeTag(
+ ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS, maxResolution);
+ const int32_t heicUltraHDRStallDurationsTag =
+ SessionConfigurationUtils::getAppropriateModeTag(
+ ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STALL_DURATIONS, maxResolution);
+ const int32_t heicUltraHDRFrameDurationsTag =
+ SessionConfigurationUtils::getAppropriateModeTag(
+ ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_MIN_FRAME_DURATIONS, maxResolution);
+
+ auto& c = mCameraCharacteristics;
+ std::vector<std::tuple<size_t, size_t>> supportedP010Sizes, filteredSizes;
+ auto capabilities = c.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+ if (capabilities.count == 0) {
+ ALOGE("%s: Supported camera capabilities is empty!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ auto end = capabilities.data.u8 + capabilities.count;
+ bool isTenBitOutputSupported = std::find(capabilities.data.u8, end,
+ ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT) != end;
+ if (!isTenBitOutputSupported) {
+ // No 10-bit support, nothing more to do.
+ return OK;
+ }
+
+ getSupportedSizes(c, scalerSizesTag,
+ static_cast<android_pixel_format_t>(HAL_PIXEL_FORMAT_YCBCR_P010), &supportedP010Sizes);
+ auto it = supportedP010Sizes.begin();
+ if (supportedP010Sizes.empty()) {
+ // Nothing to do in this case.
+ return OK;
+ }
+
+ std::vector<int32_t> heicUltraHDREntries;
+ int64_t stall = 0;
+ bool useHeic = false;
+ bool useGrid = false;
+ for (const auto& it : supportedP010Sizes) {
+ int32_t width = std::get<0>(it);
+ int32_t height = std::get<1>(it);
+ int32_t gainmapWidth = std::get<0>(it) / HeicCompositeStream::kGainmapScale;
+ int32_t gainmapHeight = std::get<1>(it) / HeicCompositeStream::kGainmapScale;
+ // Support gainmap sizes that are sufficiently aligned so CPU specific copy
+ // optimizations can be utilized without side effects.
+ if (((gainmapWidth % 64) == 0) && ((gainmapHeight % 2) == 0) &&
+ camera3::HeicCompositeStream::isSizeSupportedByHeifEncoder(width, height,
+ &useHeic, &useGrid, &stall, nullptr /*hevcName*/,
+ kFrameworkHeicAllowSWCodecs) &&
+ camera3::HeicCompositeStream::isSizeSupportedByHeifEncoder(gainmapWidth,
+ gainmapHeight, &useHeic, &useGrid, &stall, nullptr /*hevcName*/,
+ kFrameworkHeicAllowSWCodecs)) {
+ int32_t entry[4] = {HAL_PIXEL_FORMAT_BLOB, static_cast<int32_t> (std::get<0>(it)),
+ static_cast<int32_t> (std::get<1>(it)),
+ ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS_OUTPUT };
+ heicUltraHDREntries.insert(heicUltraHDREntries.end(), entry, entry + 4);
+ filteredSizes.push_back(it);
+ }
+ }
+
+ std::vector<int64_t> heicUltraHDRMinDurations, heicUltraHDRStallDurations;
+ auto ret = deriveBlobDurationEntries(c, maxResolution, filteredSizes,
+ &heicUltraHDRStallDurations, &heicUltraHDRMinDurations);
+ if (ret != OK) {
+ return ret;
+ }
+
+ return insertStreamConfigTags(heicUltraHDRSizesTag, heicUltraHDRFrameDurationsTag,
+ heicUltraHDRStallDurationsTag, heicUltraHDREntries,
+ heicUltraHDRMinDurations, heicUltraHDRStallDurations, &c);
+}
+
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::insertStreamConfigTags(
+ int32_t sizeTag, int32_t minFrameDurationTag, int32_t stallDurationTag,
+ const std::vector<int32_t>& sizeEntries,
+ const std::vector<int64_t>& minFrameDurationEntries,
+ const std::vector<int64_t>& stallDurationEntries, CameraMetadata* c /*out*/) {
+ std::vector<int32_t> supportedChTags;
+ auto chTags = c->find(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
+ if (chTags.count == 0) {
+ ALOGE("%s: No supported camera characteristics keys!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+ supportedChTags.reserve(chTags.count + 3);
+ supportedChTags.insert(supportedChTags.end(), chTags.data.i32, chTags.data.i32 + chTags.count);
+ supportedChTags.push_back(sizeTag);
+ supportedChTags.push_back(minFrameDurationTag);
+ supportedChTags.push_back(stallDurationTag);
+ c->update(sizeTag, sizeEntries.data(), sizeEntries.size());
+ c->update(minFrameDurationTag, minFrameDurationEntries.data(), minFrameDurationEntries.size());
+ c->update(stallDurationTag, stallDurationEntries.data(), stallDurationEntries.size());
+ c->update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, supportedChTags.data(),
+ supportedChTags.size());
+
+ return OK;
+}
+
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::deriveBlobDurationEntries(
+ const CameraMetadata& c, bool maxResolution,
+ const std::vector<std::tuple<size_t, size_t>>& filteredSizes,
+ std::vector<int64_t>* filteredStallDurations /*out*/,
+ std::vector<int64_t>* filteredMinDurations /*out*/) {
+ std::vector<int64_t> blobMinDurations, blobStallDurations;
+ const int32_t scalerMinFrameDurationsTag = SessionConfigurationUtils::getAppropriateModeTag(
+ ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, maxResolution);
+ const int32_t scalerStallDurationsTag = SessionConfigurationUtils::getAppropriateModeTag(
+ ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, maxResolution);
+ // We use the jpeg stall and min frame durations to approximate the respective Heic UltraHDR
+ // durations.
+ getSupportedDurations(c, scalerMinFrameDurationsTag, HAL_PIXEL_FORMAT_BLOB, filteredSizes,
+ &blobMinDurations);
+ getSupportedDurations(c, scalerStallDurationsTag, HAL_PIXEL_FORMAT_BLOB, filteredSizes,
+ &blobStallDurations);
+ if (blobStallDurations.empty() || blobMinDurations.empty() ||
+ filteredSizes.size() != blobMinDurations.size() ||
+ blobMinDurations.size() != blobStallDurations.size()) {
+ ALOGE("%s: Unexpected number of available blob durations! %zu vs. %zu with "
+ "filteredSizes size: %zu",
+ __FUNCTION__, blobMinDurations.size(), blobStallDurations.size(),
+ filteredSizes.size());
+ return BAD_VALUE;
+ }
+
+ auto itDuration = blobMinDurations.begin();
+ auto itSize = filteredSizes.begin();
+ while (itDuration != blobMinDurations.end()) {
+ int64_t entry[4] = {HAL_PIXEL_FORMAT_BLOB, static_cast<int32_t>(std::get<0>(*itSize)),
+ static_cast<int32_t>(std::get<1>(*itSize)), *itDuration};
+ filteredMinDurations->insert(filteredMinDurations->end(), entry, entry + 4);
+ itDuration++;
+ itSize++;
+ }
+
+ itDuration = blobStallDurations.begin();
+ itSize = filteredSizes.begin();
+ while (itDuration != blobStallDurations.end()) {
+ int64_t entry[4] = {HAL_PIXEL_FORMAT_BLOB, static_cast<int32_t>(std::get<0>(*itSize)),
+ static_cast<int32_t>(std::get<1>(*itSize)), *itDuration};
+ filteredStallDurations->insert(filteredStallDurations->end(), entry, entry + 4);
+ itDuration++;
+ itSize++;
+ }
+
+ return OK;
+}
+
status_t CameraProviderManager::ProviderInfo::DeviceInfo3::deriveJpegRTags(bool maxResolution) {
if (kFrameworkJpegRDisabled || mCompositeJpegRDisabled) {
return OK;
@@ -1271,13 +1440,6 @@
ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS, maxResolution);
auto& c = mCameraCharacteristics;
- std::vector<int32_t> supportedChTags;
- auto chTags = c.find(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
- if (chTags.count == 0) {
- ALOGE("%s: No supported camera characteristics keys!", __FUNCTION__);
- return BAD_VALUE;
- }
-
std::vector<std::tuple<size_t, size_t>> supportedP010Sizes, supportedBlobSizes;
auto capabilities = c.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
if (capabilities.count == 0) {
@@ -1331,54 +1493,19 @@
jpegREntries.insert(jpegREntries.end(), entry, entry + 4);
}
- std::vector<int64_t> blobMinDurations, blobStallDurations;
std::vector<int64_t> jpegRMinDurations, jpegRStallDurations;
-
- // We use the jpeg stall and min frame durations to approximate the respective jpeg/r
- // durations.
- getSupportedDurations(c, scalerMinFrameDurationsTag, HAL_PIXEL_FORMAT_BLOB,
- supportedP010Sizes, &blobMinDurations);
- getSupportedDurations(c, scalerStallDurationsTag, HAL_PIXEL_FORMAT_BLOB,
- supportedP010Sizes, &blobStallDurations);
- if (blobStallDurations.empty() || blobMinDurations.empty() ||
- supportedP010Sizes.size() != blobMinDurations.size() ||
- blobMinDurations.size() != blobStallDurations.size()) {
- ALOGE("%s: Unexpected number of available blob durations! %zu vs. %zu with "
- "supportedP010Sizes size: %zu", __FUNCTION__, blobMinDurations.size(),
- blobStallDurations.size(), supportedP010Sizes.size());
- return BAD_VALUE;
+ auto ret = deriveBlobDurationEntries(c, maxResolution, supportedP010Sizes, &jpegRStallDurations,
+ &jpegRMinDurations);
+ if (ret != OK) {
+ return ret;
}
- auto itDuration = blobMinDurations.begin();
- auto itSize = supportedP010Sizes.begin();
- while (itDuration != blobMinDurations.end()) {
- int64_t entry[4] = {HAL_PIXEL_FORMAT_BLOB, static_cast<int32_t> (std::get<0>(*itSize)),
- static_cast<int32_t> (std::get<1>(*itSize)), *itDuration};
- jpegRMinDurations.insert(jpegRMinDurations.end(), entry, entry + 4);
- itDuration++; itSize++;
+ ret = insertStreamConfigTags(jpegRSizesTag, jpegRMinFrameDurationsTag, jpegRStallDurationsTag,
+ jpegREntries, jpegRMinDurations, jpegRStallDurations, &c);
+ if (ret != OK) {
+ return ret;
}
- itDuration = blobStallDurations.begin();
- itSize = supportedP010Sizes.begin();
- while (itDuration != blobStallDurations.end()) {
- int64_t entry[4] = {HAL_PIXEL_FORMAT_BLOB, static_cast<int32_t> (std::get<0>(*itSize)),
- static_cast<int32_t> (std::get<1>(*itSize)), *itDuration};
- jpegRStallDurations.insert(jpegRStallDurations.end(), entry, entry + 4);
- itDuration++; itSize++;
- }
-
- supportedChTags.reserve(chTags.count + 3);
- supportedChTags.insert(supportedChTags.end(), chTags.data.i32,
- chTags.data.i32 + chTags.count);
- supportedChTags.push_back(jpegRSizesTag);
- supportedChTags.push_back(jpegRMinFrameDurationsTag);
- supportedChTags.push_back(jpegRStallDurationsTag);
- c.update(jpegRSizesTag, jpegREntries.data(), jpegREntries.size());
- c.update(jpegRMinFrameDurationsTag, jpegRMinDurations.data(), jpegRMinDurations.size());
- c.update(jpegRStallDurationsTag, jpegRStallDurations.data(), jpegRStallDurations.size());
- c.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, supportedChTags.data(),
- supportedChTags.size());
-
auto colorSpaces = c.find(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP);
if (colorSpaces.count > 0 && !maxResolution) {
bool displayP3Support = false;
@@ -1772,6 +1899,36 @@
return res;
}
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addAePriorityModeTags() {
+ status_t res = OK;
+ auto& c = mCameraCharacteristics;
+
+ auto entry = c.find(ANDROID_CONTROL_AE_AVAILABLE_PRIORITY_MODES);
+ if (entry.count != 0) {
+ return res;
+ }
+
+ std::vector<int32_t> supportedChTags;
+ auto chTags = c.find(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
+ if (chTags.count == 0) {
+ ALOGE("%s: No supported camera characteristics keys!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ std::vector<uint8_t> aePriorityAvailableModes = {
+ ANDROID_CONTROL_AE_PRIORITY_MODE_OFF };
+ supportedChTags.reserve(chTags.count + 1);
+ supportedChTags.insert(supportedChTags.end(), chTags.data.i32,
+ chTags.data.i32 + chTags.count);
+ supportedChTags.push_back(ANDROID_CONTROL_AE_AVAILABLE_PRIORITY_MODES);
+ c.update(ANDROID_CONTROL_AE_AVAILABLE_PRIORITY_MODES,
+ aePriorityAvailableModes.data(), aePriorityAvailableModes.size());
+ c.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, supportedChTags.data(),
+ supportedChTags.size());
+
+ return res;
+}
+
status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addPreCorrectionActiveArraySize() {
status_t res = OK;
auto& c = mCameraCharacteristics;
@@ -1833,6 +1990,67 @@
return res;
}
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addColorCorrectionAvailableModesTag(
+ CameraMetadata& c) {
+ status_t res = OK;
+
+ // The COLOR_CORRECTION_AVAILABLE_MODES key advertises the
+ // supported color correction modes. Previously, if color correction was
+ // supported (COLOR_CORRECTION_MODE was not null), it was assumed
+ // that all existing options, TRANSFORM_MATRIX, FAST, and HIGH_QUALITY, were supported.
+ // However, a new optional mode, CCT, has been introduced. To indicate
+ // whether CCT is supported, the camera device must now explicitly list all
+ // available modes using the COLOR_CORRECTION_AVAILABLE_MODES key.
+ // If the camera device doesn't set COLOR_CORRECTION_AVAILABLE_MODES,
+ // this code falls back to checking for the COLOR_CORRECTION_MODE key.
+ // If present, this adds the required supported modes TRANSFORM_MATRIX,
+ // FAST, HIGH_QUALITY.
+ auto entry = c.find(ANDROID_COLOR_CORRECTION_AVAILABLE_MODES);
+ if (entry.count != 0) {
+ return res;
+ }
+
+ auto reqKeys = c.find(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS);
+ if (reqKeys.count == 0) {
+ ALOGE("%s: No supported camera request keys!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ bool colorCorrectionModeAvailable = false;
+ for (size_t i = 0; i < reqKeys.count; i++) {
+ if (reqKeys.data.i32[i] == ANDROID_COLOR_CORRECTION_MODE) {
+ colorCorrectionModeAvailable = true;
+ break;
+ }
+ }
+
+ if (!colorCorrectionModeAvailable) {
+ return res;
+ }
+
+ std::vector<int32_t> supportedChTags;
+ auto chTags = c.find(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
+ if (chTags.count == 0) {
+ ALOGE("%s: No supported camera characteristics keys!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ std::vector<uint8_t> colorCorrectionAvailableModes = {
+ ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX,
+ ANDROID_COLOR_CORRECTION_MODE_FAST,
+ ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY };
+ supportedChTags.reserve(chTags.count + 1);
+ supportedChTags.insert(supportedChTags.end(), chTags.data.i32,
+ chTags.data.i32 + chTags.count);
+ supportedChTags.push_back(ANDROID_COLOR_CORRECTION_AVAILABLE_MODES);
+ c.update(ANDROID_COLOR_CORRECTION_AVAILABLE_MODES,
+ colorCorrectionAvailableModes.data(), colorCorrectionAvailableModes.size());
+ c.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, supportedChTags.data(),
+ supportedChTags.size());
+
+ return res;
+}
+
status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addSessionConfigQueryVersionTag() {
sp<ProviderInfo> parentProvider = mParentProvider.promote();
if (parentProvider == nullptr) {
@@ -1841,18 +2059,98 @@
int versionCode = ANDROID_INFO_SESSION_CONFIGURATION_QUERY_VERSION_UPSIDE_DOWN_CAKE;
IPCTransport ipcTransport = parentProvider->getIPCTransport();
- int deviceVersion = HARDWARE_DEVICE_API_VERSION(mVersion.get_major(), mVersion.get_minor());
- if (ipcTransport == IPCTransport::AIDL
- && deviceVersion >= CAMERA_DEVICE_API_VERSION_1_3) {
- versionCode = ANDROID_INFO_SESSION_CONFIGURATION_QUERY_VERSION_VANILLA_ICE_CREAM;
+ auto& c = mCameraCharacteristics;
+ status_t res = OK;
+ if (ipcTransport != IPCTransport::AIDL) {
+ res = c.update(ANDROID_INFO_SESSION_CONFIGURATION_QUERY_VERSION, &versionCode, 1);
+ mSessionConfigQueryVersion = versionCode;
+ return res;
}
- auto& c = mCameraCharacteristics;
- status_t res = c.update(ANDROID_INFO_SESSION_CONFIGURATION_QUERY_VERSION, &versionCode, 1);
+ int deviceVersion = HARDWARE_DEVICE_API_VERSION(mVersion.get_major(), mVersion.get_minor());
+ if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_3) {
+ versionCode = ANDROID_INFO_SESSION_CONFIGURATION_QUERY_VERSION_VANILLA_ICE_CREAM;
+ } else if (deviceVersion >= CAMERA_DEVICE_API_VERSION_1_4) {
+ if (flags::feature_combination_baklava()) {
+ versionCode = ANDROID_INFO_SESSION_CONFIGURATION_QUERY_VERSION_BAKLAVA;
+ } else {
+ versionCode = ANDROID_INFO_SESSION_CONFIGURATION_QUERY_VERSION_VANILLA_ICE_CREAM;
+ }
+ }
+ res = c.update(ANDROID_INFO_SESSION_CONFIGURATION_QUERY_VERSION, &versionCode, 1);
mSessionConfigQueryVersion = versionCode;
return res;
}
+bool CameraProviderManager::ProviderInfo::DeviceInfo3::isAutomotiveDevice() {
+ // Checks the property ro.hardware.type and returns true if it is
+ // automotive.
+ char value[PROPERTY_VALUE_MAX] = {0};
+ property_get("ro.hardware.type", value, "");
+ return strncmp(value, "automotive", PROPERTY_VALUE_MAX) == 0;
+}
+
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addSharedSessionConfigurationTags() {
+ status_t res = OK;
+ if (flags::camera_multi_client()) {
+ const int32_t sharedColorSpaceTag = ANDROID_SHARED_SESSION_COLOR_SPACE;
+ const int32_t sharedOutputConfigurationsTag = ANDROID_SHARED_SESSION_OUTPUT_CONFIGURATIONS;
+ auto& c = mCameraCharacteristics;
+ uint8_t colorSpace = 0;
+
+ res = c.update(sharedColorSpaceTag, &colorSpace, 1);
+
+ // ToDo: b/372321187 Hardcoding the shared session configuration. Update the code to
+ // take these values from XML instead.
+ std::vector<int64_t> sharedOutputConfigEntries;
+ int64_t surfaceType1 = OutputConfiguration::SURFACE_TYPE_IMAGE_READER;
+ int64_t width = 1280;
+ int64_t height = 800;
+ int64_t format1 = HAL_PIXEL_FORMAT_RGBA_8888;
+ int64_t mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO;
+ int64_t timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT;
+ int64_t usage1 = 3;
+ int64_t dataspace = 0;
+ int64_t useReadoutTimestamp = 0;
+ int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
+ int64_t physicalCamIdLen = 0;
+
+ // Stream 1 configuration hardcoded
+ sharedOutputConfigEntries.push_back(surfaceType1);
+ sharedOutputConfigEntries.push_back(width);
+ sharedOutputConfigEntries.push_back(height);
+ sharedOutputConfigEntries.push_back(format1);
+ sharedOutputConfigEntries.push_back(mirrorMode);
+ sharedOutputConfigEntries.push_back(useReadoutTimestamp);
+ sharedOutputConfigEntries.push_back(timestampBase);
+ sharedOutputConfigEntries.push_back(dataspace);
+ sharedOutputConfigEntries.push_back(usage1);
+ sharedOutputConfigEntries.push_back(streamUseCase);
+ sharedOutputConfigEntries.push_back(physicalCamIdLen);
+
+ // Stream 2 configuration hardcoded
+ int64_t surfaceType2 = OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW;
+ int64_t format2 = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+ int64_t usage2 = 0;
+
+ sharedOutputConfigEntries.push_back(surfaceType2);
+ sharedOutputConfigEntries.push_back(width);
+ sharedOutputConfigEntries.push_back(height);
+ sharedOutputConfigEntries.push_back(format2);
+ sharedOutputConfigEntries.push_back(mirrorMode);
+ sharedOutputConfigEntries.push_back(useReadoutTimestamp);
+ sharedOutputConfigEntries.push_back(timestampBase);
+ sharedOutputConfigEntries.push_back(dataspace);
+ sharedOutputConfigEntries.push_back(usage2);
+ sharedOutputConfigEntries.push_back(streamUseCase);
+ sharedOutputConfigEntries.push_back(physicalCamIdLen);
+
+ res = c.update(sharedOutputConfigurationsTag, sharedOutputConfigEntries.data(),
+ sharedOutputConfigEntries.size());
+ }
+ return res;
+}
+
status_t CameraProviderManager::ProviderInfo::DeviceInfo3::removeAvailableKeys(
CameraMetadata& c, const std::vector<uint32_t>& keys, uint32_t keyTag) {
status_t res = OK;
@@ -1915,7 +2213,7 @@
bool useGrid = false;
if (camera3::HeicCompositeStream::isSizeSupportedByHeifEncoder(
halStreamConfigs.data.i32[i+1], halStreamConfigs.data.i32[i+2],
- &useHeic, &useGrid, &stall)) {
+ &useHeic, &useGrid, &stall, nullptr /*hevcName*/, kFrameworkHeicAllowSWCodecs)) {
if (useGrid != (format == HAL_PIXEL_FORMAT_YCBCR_420_888)) {
continue;
}
@@ -2112,14 +2410,8 @@
const std::string& providerName, const sp<ProviderInfo>& providerInfo) {
using aidl::android::hardware::camera::provider::ICameraProvider;
- std::shared_ptr<ICameraProvider> interface;
- if (flags::delay_lazy_hal_instantiation()) {
- // Only get remote instance if already running. Lazy Providers will be
- // woken up later.
- interface = mAidlServiceProxy->tryGetService(providerName);
- } else {
- interface = mAidlServiceProxy->getService(providerName);
- }
+ // Only get remote instance if already running. Lazy Providers will be woken up later.
+ std::shared_ptr<ICameraProvider> interface = mAidlServiceProxy->tryGetService(providerName);
if (interface == nullptr) {
ALOGW("%s: AIDL Camera provider HAL '%s' is not actually available", __FUNCTION__,
@@ -2806,7 +3098,7 @@
hardware::CameraInfo *info) const {
if (info == nullptr) return BAD_VALUE;
- bool freeform_compat_enabled = wm_flags::camera_compat_for_freeform();
+ bool freeform_compat_enabled = wm_flags::enable_camera_compat_for_desktop_windowing();
if (!freeform_compat_enabled &&
rotationOverride > hardware::ICameraService::ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT) {
ALOGW("Camera compat freeform flag disabled but rotation override is %d", rotationOverride);
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index b686a58..11985f5 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -88,6 +88,7 @@
#define CAMERA_DEVICE_API_VERSION_1_0 HARDWARE_DEVICE_API_VERSION(1, 0)
#define CAMERA_DEVICE_API_VERSION_1_2 HARDWARE_DEVICE_API_VERSION(1, 2)
#define CAMERA_DEVICE_API_VERSION_1_3 HARDWARE_DEVICE_API_VERSION(1, 3)
+#define CAMERA_DEVICE_API_VERSION_1_4 HARDWARE_DEVICE_API_VERSION(1, 4)
#define CAMERA_DEVICE_API_VERSION_3_0 HARDWARE_DEVICE_API_VERSION(3, 0)
#define CAMERA_DEVICE_API_VERSION_3_1 HARDWARE_DEVICE_API_VERSION(3, 1)
#define CAMERA_DEVICE_API_VERSION_3_2 HARDWARE_DEVICE_API_VERSION(3, 2)
@@ -470,6 +471,9 @@
static const float kDepthARTolerance;
static const bool kFrameworkJpegRDisabled;
+ static const bool kFrameworkHeicUltraHDRDisabled;
+ static const bool kFrameworkHeicAllowSWCodecs;
+
private:
// All private members, unless otherwise noted, expect mInterfaceMutex to be locked before use
mutable std::mutex mInterfaceMutex;
@@ -629,6 +633,7 @@
bool hasFlashUnit() const { return mHasFlashUnit; }
bool supportNativeZoomRatio() const { return mSupportNativeZoomRatio; }
bool isCompositeJpegRDisabled() const { return mCompositeJpegRDisabled; }
+ bool isCompositeHeicUltraHDRDisabled() const { return mCompositeHeicUltraHDRDisabled; }
virtual status_t setTorchMode(bool enabled) = 0;
virtual status_t turnOnTorchWithStrengthLevel(int32_t torchStrength) = 0;
virtual status_t getTorchStrengthLevel(int32_t *torchStrength) = 0;
@@ -685,14 +690,15 @@
mParentProvider(parentProvider), mTorchStrengthLevel(0),
mTorchMaximumStrengthLevel(0), mTorchDefaultStrengthLevel(0),
mHasFlashUnit(false), mSupportNativeZoomRatio(false),
- mPublicCameraIds(publicCameraIds), mCompositeJpegRDisabled(false) {}
+ mPublicCameraIds(publicCameraIds), mCompositeJpegRDisabled(false),
+ mCompositeHeicUltraHDRDisabled(false) {}
virtual ~DeviceInfo() {}
protected:
bool mHasFlashUnit; // const after constructor
bool mSupportNativeZoomRatio; // const after constructor
const std::vector<std::string>& mPublicCameraIds;
- bool mCompositeJpegRDisabled;
+ bool mCompositeJpegRDisabled, mCompositeHeicUltraHDRDisabled;
};
std::vector<std::unique_ptr<DeviceInfo>> mDevices;
std::unordered_set<std::string> mUniqueCameraIds;
@@ -757,11 +763,27 @@
status_t addDynamicDepthTags(bool maxResolution = false);
status_t deriveHeicTags(bool maxResolution = false);
status_t deriveJpegRTags(bool maxResolution = false);
+ status_t deriveHeicUltraHDRTags(bool maxResolution = false);
+ status_t deriveBlobDurationEntries(
+ const CameraMetadata& c, bool maxResolution,
+ const std::vector<std::tuple<size_t, size_t>>& filteredSizes,
+ std::vector<int64_t>* filteredStallDurations /*out*/,
+ std::vector<int64_t>* filteredMinDurations /*out*/);
+ status_t insertStreamConfigTags(int32_t sizeTag, int32_t minFrameDurationTag,
+ int32_t stallDurationTag,
+ const std::vector<int32_t>& sizeEntries,
+ const std::vector<int64_t>& minFrameDurationEntries,
+ const std::vector<int64_t>& stallDurationEntries,
+ CameraMetadata* c /*out*/);
status_t addRotateCropTags();
status_t addAutoframingTags();
status_t addPreCorrectionActiveArraySize();
status_t addReadoutTimestampTag(bool readoutTimestampSupported = true);
+ status_t addColorCorrectionAvailableModesTag(CameraMetadata& ch);
+ status_t addAePriorityModeTags();
status_t addSessionConfigQueryVersionTag();
+ status_t addSharedSessionConfigurationTags();
+ bool isAutomotiveDevice();
static void getSupportedSizes(const CameraMetadata& ch, uint32_t tag,
android_pixel_format_t format,
diff --git a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
index 4bfe11d..88998c6 100644
--- a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
@@ -30,6 +30,7 @@
#include "device3/DistortionMapper.h"
#include "device3/ZoomRatioMapper.h"
+#include <utils/AttributionAndPermissionUtils.h>
#include <utils/SessionConfigurationUtils.h>
#include <utils/Trace.h>
@@ -202,7 +203,7 @@
void AidlProviderInfo::binderDied(void *cookie) {
AidlProviderInfo *provider = reinterpret_cast<AidlProviderInfo *>(cookie);
ALOGI("Camera provider '%s' has died; removing it", provider->mProviderInstance.c_str());
- provider->mManager->removeProvider(provider->mProviderInstance);
+ provider->mManager->removeProvider(std::string(provider->mProviderInstance));
}
status_t AidlProviderInfo::setUpVendorTags() {
@@ -320,7 +321,7 @@
if (link != STATUS_OK) {
ALOGW("%s: Unable to link to provider '%s' death notifications",
__FUNCTION__, mProviderName.c_str());
- mManager->removeProvider(mProviderInstance);
+ mManager->removeProvider(std::string(mProviderInstance));
return nullptr;
}
@@ -517,6 +518,8 @@
mCompositeJpegRDisabled = mCameraCharacteristics.exists(
ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS);
+ mCompositeHeicUltraHDRDisabled = mCameraCharacteristics.exists(
+ ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS);
mSystemCameraKind = getSystemCameraKind();
@@ -548,6 +551,12 @@
ALOGE("%s: Unable to derive Jpeg/R tags based on camera and media capabilities: %s (%d)",
__FUNCTION__, strerror(-res), res);
}
+ res = deriveHeicUltraHDRTags();
+ if (OK != res) {
+ ALOGE("%s: Unable to derive Heic UltraHDR tags based on camera and "
+ "media capabilities: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ }
using camera3::SessionConfigurationUtils::supportsUltraHighResolutionCapture;
if (supportsUltraHighResolutionCapture(mCameraCharacteristics)) {
status_t status = addDynamicDepthTags(/*maxResolution*/true);
@@ -567,6 +576,12 @@
ALOGE("%s: Unable to derive Jpeg/R tags based on camera and media capabilities for"
"maximum resolution mode: %s (%d)", __FUNCTION__, strerror(-status), status);
}
+ status = deriveHeicUltraHDRTags(/*maxResolution*/true);
+ if (OK != status) {
+ ALOGE("%s: Unable to derive Heic UltraHDR tags based on camera and "
+ "media capabilities: %s (%d)",
+ __FUNCTION__, strerror(-status), status);
+ }
}
res = addRotateCropTags();
@@ -596,6 +611,22 @@
__FUNCTION__, strerror(-res), res);
}
+ if (flags::color_temperature()) {
+ res = addColorCorrectionAvailableModesTag(mCameraCharacteristics);
+ if (OK != res) {
+ ALOGE("%s: Unable to add COLOR_CORRECTION_AVAILABLE_MODES tag: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ }
+ }
+
+ if (flags::ae_priority()) {
+ res = addAePriorityModeTags();
+ if (OK != res) {
+ ALOGE("%s: Unable to add CONTROL_AE_AVAILABLE_PRIORITY_MODES tag: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ }
+ }
+
camera_metadata_entry flashAvailable =
mCameraCharacteristics.find(ANDROID_FLASH_INFO_AVAILABLE);
if (flashAvailable.count == 1 &&
@@ -683,6 +714,14 @@
__FUNCTION__, strerror(-res), res);
return;
}
+
+ if (flags::color_temperature()) {
+ res = addColorCorrectionAvailableModesTag(mPhysicalCameraCharacteristics[id]);
+ if (OK != res) {
+ ALOGE("%s: Unable to add COLOR_CORRECTION_AVAILABLE_MODES tag: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ }
+ }
}
}
@@ -694,6 +733,10 @@
{ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, ANDROID_CONTROL_AE_TARGET_FPS_RANGE});
}
+ if (flags::camera_multi_client() && isAutomotiveDevice()) {
+ addSharedSessionConfigurationTags();
+ }
+
if (!kEnableLazyHal) {
// Save HAL reference indefinitely
mSavedInterface = interface;
diff --git a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
index 6cedb04..27ae766 100644
--- a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
@@ -391,7 +391,7 @@
__FUNCTION__,
mProviderName.c_str(),
linked.description().c_str());
- mManager->removeProvider(mProviderInstance);
+ mManager->removeProvider(std::string(mProviderInstance));
return nullptr;
} else if (!linked) {
ALOGW("%s: Unable to link to provider '%s' death notifications",
@@ -451,7 +451,7 @@
ALOGW("%s: Unexpected serviceDied cookie %" PRIu64 ", expected %" PRIu32,
__FUNCTION__, cookie, mId);
}
- mManager->removeProvider(mProviderInstance);
+ mManager->removeProvider(std::string(mProviderInstance));
}
std::unique_ptr<CameraProviderManager::ProviderInfo::DeviceInfo>
@@ -675,6 +675,21 @@
ALOGE("%s: Unable to add sensorReadoutTimestamp tag: %s (%d)",
__FUNCTION__, strerror(-res), res);
}
+ if (flags::color_temperature()) {
+ res = addColorCorrectionAvailableModesTag(mCameraCharacteristics);
+ if (OK != res) {
+ ALOGE("%s: Unable to add COLOR_CORRECTION_AVAILABLE_MODES tag: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ }
+ }
+
+ if (flags::ae_priority()) {
+ res = addAePriorityModeTags();
+ if (OK != res) {
+ ALOGE("%s: Unable to add CONTROL_AE_AVAILABLE_PRIORITY_MODES tag: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ }
+ }
camera_metadata_entry flashAvailable =
mCameraCharacteristics.find(ANDROID_FLASH_INFO_AVAILABLE);
@@ -785,6 +800,14 @@
__FUNCTION__, strerror(-res), res);
return;
}
+
+ if (flags::color_temperature()) {
+ res = addColorCorrectionAvailableModesTag(mPhysicalCameraCharacteristics[id]);
+ if (OK != res) {
+ ALOGE("%s: Unable to add COLOR_CORRECTION_AVAILABLE_MODES tag: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ }
+ }
}
}
}
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index bb54f25..9e89a19 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -44,18 +44,21 @@
#include <utility>
+#include <android/data_space.h>
#include <android-base/stringprintf.h>
#include <sched.h>
#include <utils/Log.h>
#include <utils/Trace.h>
#include <utils/Timers.h>
#include <cutils/properties.h>
+#include <camera/CameraUtils.h>
#include <camera/StringUtils.h>
#include <android-base/properties.h>
#include <android/hardware/camera/device/3.7/ICameraInjectionSession.h>
#include <android/hardware/camera2/ICameraDeviceUser.h>
#include <com_android_internal_camera_flags.h>
+#include <com_android_window_flags.h>
#include "CameraService.h"
#include "aidl/android/hardware/graphics/common/Dataspace.h"
@@ -81,6 +84,8 @@
using namespace android::hardware::cameraservice::utils::conversion::aidl;
namespace flags = com::android::internal::camera::flags;
+namespace wm_flags = com::android::window::flags;
+
namespace android {
Camera3Device::Camera3Device(std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
@@ -1046,13 +1051,13 @@
return BAD_VALUE;
}
- std::vector<sp<Surface>> consumers;
- consumers.push_back(consumer);
+ std::vector<SurfaceHolder> consumers;
+ consumers.push_back(SurfaceHolder{consumer, mirrorMode});
return createStream(consumers, /*hasDeferredConsumer*/ false, width, height,
format, dataSpace, rotation, id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
streamSetId, isShared, isMultiResolution, consumerUsage, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode, colorSpace, useReadoutTimestamp);
+ streamUseCase, timestampBase, colorSpace, useReadoutTimestamp);
}
static bool isRawFormat(int format) {
@@ -1067,14 +1072,14 @@
}
}
-status_t Camera3Device::createStream(const std::vector<sp<Surface>>& consumers,
+status_t Camera3Device::createStream(const std::vector<SurfaceHolder>& consumers,
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
uint64_t consumerUsage, int64_t dynamicRangeProfile, int64_t streamUseCase,
- int timestampBase, int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) {
+ int timestampBase, int32_t colorSpace, bool useReadoutTimestamp) {
ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
@@ -1083,10 +1088,10 @@
ALOGV("Camera %s: Creating new stream %d: %d x %d, format %d, dataspace %d rotation %d"
" consumer usage %" PRIu64 ", isShared %d, physicalCameraId %s, isMultiResolution %d"
" dynamicRangeProfile 0x%" PRIx64 ", streamUseCase %" PRId64 ", timestampBase %d,"
- " mirrorMode %d, colorSpace %d, useReadoutTimestamp %d",
+ " colorSpace %d, useReadoutTimestamp %d",
mId.c_str(), mNextStreamId, width, height, format, dataSpace, rotation,
consumerUsage, isShared, physicalCameraId.c_str(), isMultiResolution,
- dynamicRangeProfile, streamUseCase, timestampBase, mirrorMode, colorSpace,
+ dynamicRangeProfile, streamUseCase, timestampBase, colorSpace,
useReadoutTimestamp);
status_t res;
@@ -1155,11 +1160,11 @@
return BAD_VALUE;
}
}
- newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
+ newStream = new Camera3OutputStream(mNextStreamId, consumers[0].mSurface,
width, height, blobBufferSize, format, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode, colorSpace, useReadoutTimestamp);
+ timestampBase, consumers[0].mMirrorMode, colorSpace, useReadoutTimestamp);
} else if (format == HAL_PIXEL_FORMAT_RAW_OPAQUE) {
bool maxResolution =
sensorPixelModesUsed.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
@@ -1170,34 +1175,34 @@
SET_ERR_L("Invalid RAW opaque buffer size %zd", rawOpaqueBufferSize);
return BAD_VALUE;
}
- newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
+ newStream = new Camera3OutputStream(mNextStreamId, consumers[0].mSurface,
width, height, rawOpaqueBufferSize, format, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode, colorSpace, useReadoutTimestamp);
+ timestampBase, consumers[0].mMirrorMode, colorSpace, useReadoutTimestamp);
} else if (isShared) {
newStream = new Camera3SharedOutputStream(mNextStreamId, consumers,
width, height, format, consumerUsage, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
mUseHalBufManager, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode, colorSpace, useReadoutTimestamp);
+ timestampBase, colorSpace, useReadoutTimestamp);
} else if (consumers.size() == 0 && hasDeferredConsumer) {
newStream = new Camera3OutputStream(mNextStreamId,
width, height, format, consumerUsage, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode, colorSpace, useReadoutTimestamp);
+ timestampBase, colorSpace, useReadoutTimestamp);
} else {
- newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
+ newStream = new Camera3OutputStream(mNextStreamId, consumers[0].mSurface,
width, height, format, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode, colorSpace, useReadoutTimestamp);
+ timestampBase, consumers[0].mMirrorMode, colorSpace, useReadoutTimestamp);
}
size_t consumerCount = consumers.size();
for (size_t i = 0; i < consumerCount; i++) {
- int id = newStream->getSurfaceId(consumers[i]);
+ int id = newStream->getSurfaceId(consumers[i].mSurface);
if (id < 0) {
SET_ERR_L("Invalid surface id");
return BAD_VALUE;
@@ -1205,6 +1210,11 @@
if (surfaceIds != nullptr) {
surfaceIds->push_back(id);
}
+
+ res = deriveAndSetTransformLocked(*newStream, consumers[i].mMirrorMode, id);
+ if (res < 0) {
+ return res;
+ }
}
newStream->setStatusTracker(mStatusTracker);
@@ -1648,7 +1658,7 @@
bool signalPipelineDrain = false;
if (!active &&
(mUseHalBufManager ||
- (flags::session_hal_buf_manager() && mHalBufManagedStreamIds.size() != 0))) {
+ (mHalBufManagedStreamIds.size() != 0))) {
auto streamIds = mOutputStreams.getStreamIds();
if (mStatus == STATUS_ACTIVE) {
mRequestThread->signalPipelineDrain(streamIds);
@@ -2038,7 +2048,7 @@
}
status_t Camera3Device::setConsumerSurfaces(int streamId,
- const std::vector<sp<Surface>>& consumers, std::vector<int> *surfaceIds) {
+ const std::vector<SurfaceHolder>& consumers, std::vector<int> *surfaceIds) {
ATRACE_CALL();
ALOGV("%s: Camera %s: set consumer surface for stream %d",
__FUNCTION__, mId.c_str(), streamId);
@@ -2070,12 +2080,17 @@
}
for (auto &consumer : consumers) {
- int id = stream->getSurfaceId(consumer);
+ int id = stream->getSurfaceId(consumer.mSurface);
if (id < 0) {
CLOGE("Invalid surface id!");
return BAD_VALUE;
}
surfaceIds->push_back(id);
+
+ res = deriveAndSetTransformLocked(*stream, consumer.mMirrorMode, id);
+ if (res != OK) {
+ return res;
+ }
}
if (isDeferred) {
@@ -2101,7 +2116,7 @@
return OK;
}
-status_t Camera3Device::updateStream(int streamId, const std::vector<sp<Surface>> &newSurfaces,
+status_t Camera3Device::updateStream(int streamId, const std::vector<SurfaceHolder> &newSurfaces,
const std::vector<OutputStreamInfo> &outputInfo,
const std::vector<size_t> &removedSurfaceIds, KeyedVector<sp<Surface>, size_t> *outputMap) {
Mutex::Autolock il(mInterfaceLock);
@@ -2131,6 +2146,14 @@
return res;
}
+ for (size_t i = 0; i < outputMap->size(); i++) {
+ res = deriveAndSetTransformLocked(
+ *stream, newSurfaces[i].mMirrorMode, outputMap->valueAt(i));
+ if (res != OK) {
+ return res;
+ }
+ }
+
return res;
}
@@ -2542,6 +2565,8 @@
// always occupy the initial entry.
if ((outputStream->data_space == HAL_DATASPACE_V0_JFIF) ||
(outputStream->data_space ==
+ static_cast<android_dataspace_t>(ADATASPACE_HEIF_ULTRAHDR)) ||
+ (outputStream->data_space ==
static_cast<android_dataspace_t>(
aidl::android::hardware::graphics::common::Dataspace::JPEG_R))) {
bufferSizes[k] = static_cast<uint32_t>(
@@ -2598,25 +2623,23 @@
// It is possible that use hal buffer manager behavior was changed by the
// configureStreams call.
mUseHalBufManager = config.use_hal_buf_manager;
- if (flags::session_hal_buf_manager()) {
- bool prevSessionHalBufManager = (mHalBufManagedStreamIds.size() != 0);
- // It is possible that configureStreams() changed config.hal_buffer_managed_streams
- mHalBufManagedStreamIds = config.hal_buffer_managed_streams;
+ bool prevSessionHalBufManager = (mHalBufManagedStreamIds.size() != 0);
+ // It is possible that configureStreams() changed config.hal_buffer_managed_streams
+ mHalBufManagedStreamIds = config.hal_buffer_managed_streams;
- bool thisSessionHalBufManager = mHalBufManagedStreamIds.size() != 0;
+ bool thisSessionHalBufManager = mHalBufManagedStreamIds.size() != 0;
- if (prevSessionHalBufManager && !thisSessionHalBufManager) {
- mRequestBufferSM.deInit();
- } else if (!prevSessionHalBufManager && thisSessionHalBufManager) {
- res = mRequestBufferSM.initialize(mStatusTracker);
- if (res != OK) {
- SET_ERR_L("%s: Camera %s: RequestBuffer State machine couldn't be initialized!",
- __FUNCTION__, mId.c_str());
- return res;
- }
+ if (prevSessionHalBufManager && !thisSessionHalBufManager) {
+ mRequestBufferSM.deInit();
+ } else if (!prevSessionHalBufManager && thisSessionHalBufManager) {
+ res = mRequestBufferSM.initialize(mStatusTracker);
+ if (res != OK) {
+ SET_ERR_L("%s: Camera %s: RequestBuffer State machine couldn't be initialized!",
+ __FUNCTION__, mId.c_str());
+ return res;
}
- mRequestThread->setHalBufferManagedStreams(mHalBufManagedStreamIds);
}
+ mRequestThread->setHalBufferManagedStreams(mHalBufManagedStreamIds);
// Finish all stream configuration immediately.
// TODO: Try to relax this later back to lazy completion, which should be
@@ -2865,7 +2888,7 @@
bool hasAppCallback, nsecs_t minExpectedDuration, nsecs_t maxExpectedDuration,
bool isFixedFps, const std::set<std::set<std::string>>& physicalCameraIds,
bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto, bool autoframingAuto,
- const std::set<std::string>& cameraIdsWithZoom,
+ const std::set<std::string>& cameraIdsWithZoom, bool useZoomRatio,
const SurfaceMap& outputSurfaces, nsecs_t requestTimeNs) {
ATRACE_CALL();
std::lock_guard<std::mutex> l(mInFlightLock);
@@ -2874,7 +2897,7 @@
res = mInFlightMap.add(frameNumber, InFlightRequest(numBuffers, resultExtras, hasInput,
hasAppCallback, minExpectedDuration, maxExpectedDuration, isFixedFps, physicalCameraIds,
isStillCapture, isZslCapture, rotateAndCropAuto, autoframingAuto, cameraIdsWithZoom,
- requestTimeNs, outputSurfaces));
+ requestTimeNs, useZoomRatio, outputSurfaces));
if (res < 0) return res;
if (mInFlightMap.size() == 1) {
@@ -3021,8 +3044,7 @@
}
bool Camera3Device::HalInterface::isHalBufferManagedStream(int32_t streamId) const {
- return (mUseHalBufManager || (flags::session_hal_buf_manager() &&
- contains(mHalBufManagedStreamIds, streamId)));
+ return (mUseHalBufManager || contains(mHalBufManagedStreamIds, streamId));
}
status_t Camera3Device::HalInterface::popInflightBuffer(
@@ -3774,16 +3796,19 @@
return submitRequestSuccess;
}
-status_t Camera3Device::removeFwkOnlyRegionKeys(CameraMetadata *request) {
- static const std::array<uint32_t, 4> kFwkOnlyRegionKeys = {ANDROID_CONTROL_AF_REGIONS_SET,
- ANDROID_CONTROL_AE_REGIONS_SET, ANDROID_CONTROL_AWB_REGIONS_SET,
- ANDROID_SCALER_CROP_REGION_SET};
+status_t Camera3Device::removeFwkOnlyKeys(CameraMetadata *request) {
+ static const std::array<uint32_t, 5> kFwkOnlyKeys = {
+ ANDROID_CONTROL_AF_REGIONS_SET,
+ ANDROID_CONTROL_AE_REGIONS_SET,
+ ANDROID_CONTROL_AWB_REGIONS_SET,
+ ANDROID_SCALER_CROP_REGION_SET,
+ ANDROID_CONTROL_ZOOM_METHOD};
if (request == nullptr) {
ALOGE("%s request metadata nullptr", __FUNCTION__);
return BAD_VALUE;
}
status_t res = OK;
- for (const auto &key : kFwkOnlyRegionKeys) {
+ for (const auto &key : kFwkOnlyKeys) {
if (request->exists(key)) {
res = request->erase(key);
if (res != OK) {
@@ -3862,7 +3887,7 @@
it != captureRequest->mSettingsList.end(); it++) {
if (parent->mUHRCropAndMeteringRegionMappers.find(it->cameraId) ==
parent->mUHRCropAndMeteringRegionMappers.end()) {
- if (removeFwkOnlyRegionKeys(&(it->metadata)) != OK) {
+ if (removeFwkOnlyKeys(&(it->metadata)) != OK) {
SET_ERR("RequestThread: Unable to remove fwk-only keys from request"
"%d: %s (%d)", halRequest->frame_number, strerror(-res),
res);
@@ -3882,7 +3907,7 @@
return INVALID_OPERATION;
}
captureRequest->mUHRCropAndMeteringRegionsUpdated = true;
- if (removeFwkOnlyRegionKeys(&(it->metadata)) != OK) {
+ if (removeFwkOnlyKeys(&(it->metadata)) != OK) {
SET_ERR("RequestThread: Unable to remove fwk-only keys from request"
"%d: %s (%d)", halRequest->frame_number, strerror(-res),
res);
@@ -4163,6 +4188,7 @@
}
bool isStillCapture = false;
bool isZslCapture = false;
+ bool useZoomRatio = false;
const camera_metadata_t* settings = halRequest->settings;
bool shouldUnlockSettings = false;
if (settings == nullptr) {
@@ -4182,10 +4208,17 @@
if ((e.count > 0) && (e.data.u8[0] == ANDROID_CONTROL_ENABLE_ZSL_TRUE)) {
isZslCapture = true;
}
+
+ if (flags::zoom_method()) {
+ e = camera_metadata_ro_entry_t();
+ find_camera_metadata_ro_entry(settings, ANDROID_CONTROL_ZOOM_METHOD, &e);
+ if ((e.count > 0) && (e.data.u8[0] == ANDROID_CONTROL_ZOOM_METHOD_ZOOM_RATIO)) {
+ useZoomRatio = true;
+ }
+ }
}
bool passSurfaceMap =
- mUseHalBufManager ||
- (flags::session_hal_buf_manager() && containsHalBufferManagedStream);
+ mUseHalBufManager || containsHalBufferManagedStream;
auto expectedDurationInfo = calculateExpectedDurationRange(settings);
res = parent->registerInFlight(halRequest->frame_number,
totalNumBuffers, captureRequest->mResultExtras,
@@ -4196,7 +4229,7 @@
expectedDurationInfo.isFixedFps,
requestedPhysicalCameras, isStillCapture, isZslCapture,
captureRequest->mRotateAndCropAuto, captureRequest->mAutoframingAuto,
- mPrevCameraIdsWithZoom,
+ mPrevCameraIdsWithZoom, useZoomRatio,
passSurfaceMap ? uniqueSurfaceIdMap :
SurfaceMap{}, captureRequest->mRequestTimeNs);
ALOGVV("%s: registered in flight requestId = %" PRId32 ", frameNumber = %" PRId64
@@ -4301,7 +4334,7 @@
void Camera3Device::RequestThread::signalPipelineDrain(const std::vector<int>& streamIds) {
if (!mUseHalBufManager &&
- (flags::session_hal_buf_manager() && mHalBufManagedStreamIds.size() == 0)) {
+ (mHalBufManagedStreamIds.size() == 0)) {
ALOGE("%s called for camera device not supporting HAL buffer management", __FUNCTION__);
return;
}
@@ -4459,8 +4492,7 @@
Camera3Stream *stream = Camera3Stream::cast((*outputBuffers)[i].stream);
int32_t streamId = stream->getId();
bool skipBufferForStream =
- mUseHalBufManager || (flags::session_hal_buf_manager() &&
- contains(mHalBufManagedStreamIds, streamId));
+ mUseHalBufManager || (contains(mHalBufManagedStreamIds, streamId));
if (skipBufferForStream) {
// No output buffer can be returned when using HAL buffer manager for its stream
continue;
@@ -5791,4 +5823,21 @@
}
}
+status_t Camera3Device::deriveAndSetTransformLocked(
+ Camera3OutputStreamInterface& stream, int mirrorMode, int surfaceId) {
+ int transform = -1;
+ bool enableTransformInverseDisplay = true;
+ using hardware::ICameraService::ROTATION_OVERRIDE_ROTATION_ONLY;
+ if (wm_flags::enable_camera_compat_for_desktop_windowing()) {
+ enableTransformInverseDisplay = (mRotationOverride != ROTATION_OVERRIDE_ROTATION_ONLY);
+ }
+ int res = CameraUtils::getRotationTransform(mDeviceInfo, mirrorMode,
+ enableTransformInverseDisplay, &transform);
+ if (res != OK) {
+ return res;
+ }
+ stream.setTransform(transform, false /*mayChangeMirror*/, surfaceId);
+ return OK;
+}
+
}; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 3c45c1a..5d3c010 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -63,6 +63,7 @@
using android::camera3::camera_stream_configuration_mode_t;
using android::camera3::CAMERA_TEMPLATE_COUNT;
using android::camera3::OutputStreamInfo;
+using android::camera3::SurfaceHolder;
namespace android {
@@ -168,7 +169,7 @@
bool useReadoutTimestamp = false)
override;
- status_t createStream(const std::vector<sp<Surface>>& consumers,
+ status_t createStream(const std::vector<SurfaceHolder>& consumers,
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
const std::string& physicalCameraId,
@@ -181,7 +182,6 @@
ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
bool useReadoutTimestamp = false)
override;
@@ -195,6 +195,19 @@
status_t deleteStream(int id) override;
+ virtual status_t beginConfigure() override {return OK;};
+
+ virtual status_t getSharedStreamId(const OutputConfiguration& /*config*/,
+ int* /*streamId*/) override {return INVALID_OPERATION;};
+
+ virtual status_t addSharedSurfaces(int /*streamId*/,
+ const std::vector<android::camera3::OutputStreamInfo>& /*outputInfo*/,
+ const std::vector<SurfaceHolder>& /*surfaces*/,
+ std::vector<int>* /*surfaceIds*/) override {return INVALID_OPERATION;};
+
+ virtual status_t removeSharedSurfaces(int /*streamId*/,
+ const std::vector<size_t>& /*surfaceIds*/) override {return INVALID_OPERATION;};
+
status_t configureStreams(const CameraMetadata& sessionParams,
int operatingMode =
camera_stream_configuration_mode_t::CAMERA_STREAM_CONFIGURATION_NORMAL_MODE) override;
@@ -247,13 +260,13 @@
* consumer configuration.
*/
status_t setConsumerSurfaces(
- int streamId, const std::vector<sp<Surface>>& consumers,
+ int streamId, const std::vector<SurfaceHolder>& consumers,
std::vector<int> *surfaceIds /*out*/) override;
/**
* Update a given stream.
*/
- status_t updateStream(int streamId, const std::vector<sp<Surface>> &newSurfaces,
+ status_t updateStream(int streamId, const std::vector<SurfaceHolder> &newSurfaces,
const std::vector<OutputStreamInfo> &outputInfo,
const std::vector<size_t> &removedSurfaceIds,
KeyedVector<sp<Surface>, size_t> *outputMap/*out*/);
@@ -367,7 +380,7 @@
protected:
status_t disconnectImpl();
- static status_t removeFwkOnlyRegionKeys(CameraMetadata *request);
+ static status_t removeFwkOnlyKeys(CameraMetadata *request);
float getMaxPreviewFps(sp<camera3::Camera3OutputStreamInterface> stream);
@@ -1275,8 +1288,8 @@
bool callback, nsecs_t minExpectedDuration, nsecs_t maxExpectedDuration,
bool isFixedFps, const std::set<std::set<std::string>>& physicalCameraIds,
bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto, bool autoframingAuto,
- const std::set<std::string>& cameraIdsWithZoom, const SurfaceMap& outputSurfaces,
- nsecs_t requestTimeNs);
+ const std::set<std::string>& cameraIdsWithZoom, bool useZoomRatio,
+ const SurfaceMap& outputSurfaces, nsecs_t requestTimeNs);
/**
* Tracking for idle detection
@@ -1644,6 +1657,8 @@
sp<Camera3DeviceInjectionMethods> mInjectionMethods;
void overrideStreamUseCaseLocked();
+ status_t deriveAndSetTransformLocked(camera3::Camera3OutputStreamInterface& stream,
+ int mirrorMode, int surfaceId);
}; // class Camera3Device
diff --git a/services/camera/libcameraservice/device3/Camera3FakeStream.cpp b/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
index 55467c3..79b88f8 100644
--- a/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
@@ -76,7 +76,7 @@
Camera3IOStreamBase::dump(fd, args);
}
-status_t Camera3FakeStream::setTransform(int, bool) {
+status_t Camera3FakeStream::setTransform(int, bool, int) {
ATRACE_CALL();
// Do nothing
return OK;
@@ -120,13 +120,13 @@
return FAKE_ID;
}
-status_t Camera3FakeStream::setConsumers(const std::vector<sp<Surface>>& /*consumers*/) {
+status_t Camera3FakeStream::setConsumers(const std::vector<SurfaceHolder>& /*consumers*/) {
ALOGE("%s: Stream %d: Fake stream doesn't support set consumer surface!",
__FUNCTION__, mId);
return INVALID_OPERATION;
}
-status_t Camera3FakeStream::updateStream(const std::vector<sp<Surface>> &/*outputSurfaces*/,
+status_t Camera3FakeStream::updateStream(const std::vector<SurfaceHolder> &/*outputSurfaces*/,
const std::vector<OutputStreamInfo> &/*outputInfo*/,
const std::vector<size_t> &/*removedSurfaceIds*/,
KeyedVector<sp<Surface>, size_t> * /*outputMap*/) {
diff --git a/services/camera/libcameraservice/device3/Camera3FakeStream.h b/services/camera/libcameraservice/device3/Camera3FakeStream.h
index 7addb90..9291bd0 100644
--- a/services/camera/libcameraservice/device3/Camera3FakeStream.h
+++ b/services/camera/libcameraservice/device3/Camera3FakeStream.h
@@ -52,7 +52,7 @@
virtual void dump(int fd, const Vector<String16> &args);
- status_t setTransform(int transform, bool mayChangeMirror);
+ status_t setTransform(int transform, bool mayChangeMirror, int surfaceId);
virtual status_t detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd);
@@ -80,7 +80,7 @@
/**
* Set the consumer surfaces to the output stream.
*/
- virtual status_t setConsumers(const std::vector<sp<Surface>>& consumers);
+ virtual status_t setConsumers(const std::vector<SurfaceHolder>& consumers);
/**
* Query the output surface id.
@@ -93,7 +93,7 @@
/**
* Update the stream output surfaces.
*/
- virtual status_t updateStream(const std::vector<sp<Surface>> &outputSurfaces,
+ virtual status_t updateStream(const std::vector<SurfaceHolder> &outputSurfaces,
const std::vector<OutputStreamInfo> &outputInfo,
const std::vector<size_t> &removedSurfaceIds,
KeyedVector<sp<Surface>, size_t> *outputMap/*out*/);
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 83c8a38..2eba5a7 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -26,6 +26,7 @@
#include <aidl/android/hardware/camera/device/CameraBlobId.h>
#include "aidl/android/hardware/graphics/common/Dataspace.h"
+#include <android/data_space.h>
#include <android-base/unique_fd.h>
#include <com_android_internal_camera_flags.h>
#include <cutils/properties.h>
@@ -136,7 +137,7 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
- int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) :
+ int32_t colorSpace, bool useReadoutTimestamp) :
Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
/*maxSize*/0, format, dataSpace, rotation,
physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
@@ -150,7 +151,7 @@
mUseReadoutTime(useReadoutTimestamp),
mConsumerUsage(consumerUsage),
mDropBuffers(false),
- mMirrorMode(mirrorMode),
+ mMirrorMode(OutputConfiguration::MIRROR_MODE_AUTO),
mDequeueBufferLatency(kDequeueLatencyBinSize),
mIPCTransport(transport) {
// Deferred consumer only support preview surface format now.
@@ -184,8 +185,7 @@
int setId, bool isMultiResolution,
int64_t dynamicRangeProfile, int64_t streamUseCase,
bool deviceTimeBaseIsRealtime, int timestampBase,
- int mirrorMode, int32_t colorSpace,
- bool useReadoutTimestamp) :
+ int32_t colorSpace, bool useReadoutTimestamp) :
Camera3IOStreamBase(id, type, width, height,
/*maxSize*/0,
format, dataSpace, rotation,
@@ -199,7 +199,7 @@
mUseReadoutTime(useReadoutTimestamp),
mConsumerUsage(consumerUsage),
mDropBuffers(false),
- mMirrorMode(mirrorMode),
+ mMirrorMode(OutputConfiguration::MIRROR_MODE_AUTO),
mDequeueBufferLatency(kDequeueLatencyBinSize),
mIPCTransport(transport) {
@@ -403,6 +403,8 @@
// Fix CameraBlob id type discrepancy between HIDL and AIDL, details : http://b/229688810
if (getFormat() == HAL_PIXEL_FORMAT_BLOB && (getDataSpace() == HAL_DATASPACE_V0_JFIF ||
(getDataSpace() ==
+ static_cast<android_dataspace_t>(ADATASPACE_HEIF_ULTRAHDR)) ||
+ (getDataSpace() ==
static_cast<android_dataspace_t>(
aidl::android::hardware::graphics::common::Dataspace::JPEG_R)))) {
if (mIPCTransport == IPCTransport::HIDL) {
@@ -479,21 +481,23 @@
" DequeueBuffer latency histogram:");
}
-status_t Camera3OutputStream::setTransform(int transform, bool mayChangeMirror) {
+status_t Camera3OutputStream::setTransform(int transform, bool mayChangeMirror, int surfaceId) {
ATRACE_CALL();
Mutex::Autolock l(mLock);
+
if (mMirrorMode != OutputConfiguration::MIRROR_MODE_AUTO && mayChangeMirror) {
// If the mirroring mode is not AUTO, do not allow transform update
// which may change mirror.
return OK;
}
- return setTransformLocked(transform);
-}
-
-status_t Camera3OutputStream::setTransformLocked(int transform) {
status_t res = OK;
+ if (surfaceId != 0) {
+ ALOGE("%s: Invalid surfaceId %d", __FUNCTION__, surfaceId);
+ return BAD_VALUE;
+ }
+
if (transform == -1) return res;
if (mState == STATE_ERROR) {
@@ -525,6 +529,12 @@
return res;
}
+ if ((res = native_window_set_buffers_transform(mConsumer.get(), mTransform)) != OK) {
+ ALOGE("%s: Unable to configure stream transform to %x: %s (%d)",
+ __FUNCTION__, mTransform, strerror(-res), res);
+ return res;
+ }
+
// Set dequeueBuffer/attachBuffer timeout if the consumer is not hw composer or hw texture.
// We need skip these cases as timeout will disable the non-blocking (async) mode.
if (!(isConsumedByHWComposer() || isConsumedByHWTexture())) {
@@ -694,14 +704,6 @@
return res;
}
- res = native_window_set_buffers_transform(mConsumer.get(),
- mTransform);
- if (res != OK) {
- ALOGE("%s: Unable to configure stream transform to %x: %s (%d)",
- __FUNCTION__, mTransform, strerror(-res), res);
- return res;
- }
-
/**
* Camera3 Buffer manager is only supported by HAL3.3 onwards, as the older HALs requires
* buffers to be statically allocated for internal static buffer registration, while the
@@ -1069,7 +1071,7 @@
return OK;
}
-status_t Camera3OutputStream::updateStream(const std::vector<sp<Surface>> &/*outputSurfaces*/,
+status_t Camera3OutputStream::updateStream(const std::vector<SurfaceHolder> &/*outputSurfaces*/,
const std::vector<OutputStreamInfo> &/*outputInfo*/,
const std::vector<size_t> &/*removedSurfaceIds*/,
KeyedVector<sp<Surface>, size_t> * /*outputMapo*/) {
@@ -1206,14 +1208,14 @@
return mConsumer == nullptr;
}
-status_t Camera3OutputStream::setConsumers(const std::vector<sp<Surface>>& consumers) {
+status_t Camera3OutputStream::setConsumers(const std::vector<SurfaceHolder>& consumers) {
Mutex::Autolock l(mLock);
if (consumers.size() != 1) {
ALOGE("%s: it's illegal to set %zu consumer surfaces!",
__FUNCTION__, consumers.size());
return INVALID_OPERATION;
}
- if (consumers[0] == nullptr) {
+ if (consumers[0].mSurface == nullptr) {
ALOGE("%s: it's illegal to set null consumer surface!", __FUNCTION__);
return INVALID_OPERATION;
}
@@ -1223,7 +1225,8 @@
return INVALID_OPERATION;
}
- mConsumer = consumers[0];
+ mConsumer = consumers[0].mSurface;
+ mMirrorMode = consumers[0].mMirrorMode;
return OK;
}
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index f8b78c1..a547f82 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -134,7 +134,6 @@
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
bool useReadoutTimestamp = false);
@@ -150,7 +149,7 @@
* Set the transform on the output stream; one of the
* HAL_TRANSFORM_* / NATIVE_WINDOW_TRANSFORM_* constants.
*/
- status_t setTransform(int transform, bool mayChangeMirror);
+ virtual status_t setTransform(int transform, bool mayChangeMirror, int surfaceId = 0);
/**
* Return if this output stream is for video encoding.
@@ -179,7 +178,7 @@
/**
* Set the consumer surfaces to the output stream.
*/
- virtual status_t setConsumers(const std::vector<sp<Surface>>& consumers);
+ virtual status_t setConsumers(const std::vector<SurfaceHolder>& consumers);
class BufferProducerListener : public SurfaceListener {
public:
@@ -236,7 +235,7 @@
/**
* Update the stream output surfaces.
*/
- virtual status_t updateStream(const std::vector<sp<Surface>> &outputSurfaces,
+ virtual status_t updateStream(const std::vector<SurfaceHolder> &outputSurfaces,
const std::vector<OutputStreamInfo> &outputInfo,
const std::vector<size_t> &removedSurfaceIds,
KeyedVector<sp<Surface>, size_t> *outputMap/*out*/);
@@ -286,7 +285,6 @@
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
bool useReadoutTimestamp = false);
@@ -323,8 +321,6 @@
int mTransform;
- virtual status_t setTransformLocked(int transform);
-
bool mTraceFirstBuffer;
/**
@@ -383,7 +379,7 @@
std::vector<Surface::BatchBuffer> mBatchedBuffers;
// ---- End of mBatchLock protected scope ----
- const int mMirrorMode;
+ int mMirrorMode;
/**
* Internal Camera3Stream interface
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
index 77edfbe..ff7ad56 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
@@ -34,7 +34,7 @@
* Set the transform on the output stream; one of the
* HAL_TRANSFORM_* / NATIVE_WINDOW_TRANSFORM_* constants.
*/
- virtual status_t setTransform(int transform, bool mayChangeMirror) = 0;
+ virtual status_t setTransform(int transform, bool mayChangeMirror, int surfaceId = 0) = 0;
/**
* Return if this output stream is for video encoding.
@@ -49,7 +49,7 @@
/**
* Set the consumer surfaces to the output stream.
*/
- virtual status_t setConsumers(const std::vector<sp<Surface>>& consumers) = 0;
+ virtual status_t setConsumers(const std::vector<SurfaceHolder>& consumers) = 0;
/**
* Detach an unused buffer from the stream.
@@ -81,7 +81,7 @@
/**
* Update the stream output surfaces.
*/
- virtual status_t updateStream(const std::vector<sp<Surface>> &outputSurfaces,
+ virtual status_t updateStream(const std::vector<SurfaceHolder> &outputSurfaces,
const std::vector<OutputStreamInfo> &outputInfo,
const std::vector<size_t> &removedSurfaceIds,
KeyedVector<sp<Surface>, size_t> *outputMap/*out*/) = 0;
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
index 31707ec..ed11a96 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -304,7 +304,7 @@
CameraMetadata &collectedPartialResult,
uint32_t frameNumber,
bool reprocess, bool zslStillCapture, bool rotateAndCropAuto,
- const std::set<std::string>& cameraIdsWithZoom,
+ const std::set<std::string>& cameraIdsWithZoom, bool useZoomRatio,
const std::vector<PhysicalCaptureResultInfo>& physicalMetadatas) {
ATRACE_CALL();
if (pendingMetadata.isEmpty())
@@ -386,7 +386,7 @@
// HAL and app.
bool zoomRatioIs1 = cameraIdsWithZoom.find(states.cameraId) == cameraIdsWithZoom.end();
res = states.zoomRatioMappers[states.cameraId].updateCaptureResult(
- &captureResult.mMetadata, zoomRatioIs1);
+ &captureResult.mMetadata, useZoomRatio, zoomRatioIs1);
if (res != OK) {
SET_ERR("Failed to update capture result zoom ratio metadata for frame %d: %s (%d)",
frameNumber, strerror(-res), res);
@@ -452,9 +452,11 @@
}
}
- zoomRatioIs1 = cameraIdsWithZoom.find(cameraId) == cameraIdsWithZoom.end();
+ // Note: Physical camera continues to use SCALER_CROP_REGION to reflect
+ // zoom levels.
res = states.zoomRatioMappers[cameraId].updateCaptureResult(
- &physicalMetadata.mPhysicalCameraMetadata, zoomRatioIs1);
+ &physicalMetadata.mPhysicalCameraMetadata, /*zoomMethodIsRatio*/false,
+ /*zoomRatioIs1*/false);
if (res != OK) {
SET_ERR("Failed to update camera %s's physical zoom ratio metadata for "
"frame %d: %s(%d)", cameraId.c_str(), frameNumber, strerror(-res), res);
@@ -685,7 +687,8 @@
if (orientation.count > 0) {
int32_t transform;
ret = CameraUtils::getRotationTransform(deviceInfo->second,
- OutputConfiguration::MIRROR_MODE_AUTO, &transform);
+ OutputConfiguration::MIRROR_MODE_AUTO,
+ /*transformInverseDisplay*/true, &transform);
if (ret == OK) {
// It is possible for camera providers to return the capture
// results after the processed frames. In such scenario, we will
@@ -828,7 +831,7 @@
sendCaptureResult(states, metadata, request.resultExtras,
collectedPartialResult, frameNumber,
hasInputBufferInRequest, request.zslCapture && request.stillCapture,
- request.rotateAndCropAuto, cameraIdsWithZoom,
+ request.rotateAndCropAuto, cameraIdsWithZoom, request.useZoomRatio,
request.physicalMetadatas);
}
}
@@ -894,8 +897,7 @@
if (outputBuffers[i].buffer == nullptr) {
if (!useHalBufManager &&
- !(flags::session_hal_buf_manager() &&
- contains(halBufferManagedStreams, streamId))) {
+ !contains(halBufferManagedStreams, streamId)) {
// With HAL buffer management API, HAL sometimes will have to return buffers that
// has not got a output buffer handle filled yet. This is though illegal if HAL
// buffer management API is not being used.
@@ -1098,7 +1100,8 @@
r.pendingMetadata, r.resultExtras,
r.collectedPartialResult, msg.frame_number,
r.hasInputBuffer, r.zslCapture && r.stillCapture,
- r.rotateAndCropAuto, cameraIdsWithZoom, r.physicalMetadatas);
+ r.rotateAndCropAuto, cameraIdsWithZoom, r.useZoomRatio,
+ r.physicalMetadatas);
}
collectAndRemovePendingOutputBuffers(
states.useHalBufManager, states.halBufManagedStreamIds,
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtilsTemplated.h b/services/camera/libcameraservice/device3/Camera3OutputUtilsTemplated.h
index aca7a67..2d75d03 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtilsTemplated.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtilsTemplated.h
@@ -212,8 +212,7 @@
bool noBufferReturned = false;
buffer_handle_t *buffer = nullptr;
if (states.useHalBufManager ||
- (flags::session_hal_buf_manager() &&
- contains(states.halBufManagedStreamIds, bSrc.streamId))) {
+ contains(states.halBufManagedStreamIds, bSrc.streamId)) {
// This is suspicious most of the time but can be correct during flush where HAL
// has to return capture result before a buffer is requested
if (bSrc.bufferId == BUFFER_ID_NO_BUFFER) {
@@ -303,8 +302,7 @@
for (const auto& buf : buffers) {
if (!states.useHalBufManager &&
- !(flags::session_hal_buf_manager() &&
- contains(states.halBufManagedStreamIds, buf.streamId))) {
+ !contains(states.halBufManagedStreamIds, buf.streamId)) {
ALOGE("%s: Camera %s does not support HAL buffer management for stream id %d",
__FUNCTION__, states.cameraId.c_str(), buf.streamId);
return;
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
index 187bd93..b436d2e 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
@@ -18,6 +18,8 @@
#define ATRACE_TAG ATRACE_TAG_CAMERA
//#define LOG_NDEBUG 0
+#include <utils/Trace.h>
+
#include "Flags.h"
#include "Camera3SharedOutputStream.h"
@@ -29,7 +31,7 @@
const size_t Camera3SharedOutputStream::kMaxOutputs;
Camera3SharedOutputStream::Camera3SharedOutputStream(int id,
- const std::vector<sp<Surface>>& surfaces,
+ const std::vector<SurfaceHolder>& surfaces,
uint32_t width, uint32_t height, int format,
uint64_t consumerUsage, android_dataspace dataSpace,
camera_stream_rotation_t rotation,
@@ -37,12 +39,12 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
int setId, bool useHalBufManager, int64_t dynamicProfile,
int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
- int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) :
+ int32_t colorSpace, bool useReadoutTimestamp) :
Camera3OutputStream(id, CAMERA_STREAM_OUTPUT, width, height,
format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
transport, consumerUsage, timestampOffset, setId,
/*isMultiResolution*/false, dynamicProfile, streamUseCase,
- deviceTimeBaseIsRealtime, timestampBase, mirrorMode, colorSpace,
+ deviceTimeBaseIsRealtime, timestampBase, colorSpace,
useReadoutTimestamp),
mUseHalBufManager(useHalBufManager) {
size_t consumerCount = std::min(surfaces.size(), kMaxOutputs);
@@ -50,7 +52,7 @@
ALOGE("%s: Trying to add more consumers than the maximum ", __func__);
}
for (size_t i = 0; i < consumerCount; i++) {
- mSurfaceUniqueIds[i] = std::make_pair(surfaces[i], mNextUniqueSurfaceId++);
+ mSurfaceUniqueIds[i] = SurfaceHolderUniqueId{surfaces[i], mNextUniqueSurfaceId++};
}
}
@@ -72,8 +74,8 @@
std::unordered_map<size_t, sp<Surface>> initialSurfaces;
for (size_t i = 0; i < kMaxOutputs; i++) {
- if (mSurfaceUniqueIds[i].first != nullptr) {
- initialSurfaces.emplace(i, mSurfaceUniqueIds[i].first);
+ if (mSurfaceUniqueIds[i].mSurfaceHolder.mSurface != nullptr) {
+ initialSurfaces.emplace(i, mSurfaceUniqueIds[i].mSurfaceHolder.mSurface);
}
}
@@ -142,19 +144,19 @@
return true;
}
- return (mSurfaceUniqueIds[surface_id].first == nullptr);
+ return (mSurfaceUniqueIds[surface_id].mSurfaceHolder.mSurface == nullptr);
}
-status_t Camera3SharedOutputStream::setConsumers(const std::vector<sp<Surface>>& surfaces) {
+status_t Camera3SharedOutputStream::setConsumers(const std::vector<SurfaceHolder>& surfaceHolders) {
Mutex::Autolock l(mLock);
- if (surfaces.size() == 0) {
+ if (surfaceHolders.size() == 0) {
ALOGE("%s: it's illegal to set zero consumer surfaces!", __FUNCTION__);
return INVALID_OPERATION;
}
status_t ret = OK;
- for (auto& surface : surfaces) {
- if (surface == nullptr) {
+ for (auto& surfaceHolder : surfaceHolders) {
+ if (surfaceHolder.mSurface == nullptr) {
ALOGE("%s: it's illegal to set a null consumer surface!", __FUNCTION__);
return INVALID_OPERATION;
}
@@ -165,11 +167,11 @@
return NO_MEMORY;
}
- mSurfaceUniqueIds[id] = std::make_pair(surface, mNextUniqueSurfaceId++);
+ mSurfaceUniqueIds[id] = SurfaceHolderUniqueId{surfaceHolder, mNextUniqueSurfaceId++};
// Only call addOutput if the splitter has been connected.
if (mStreamSplitter != nullptr) {
- ret = mStreamSplitter->addOutput(id, surface);
+ ret = mStreamSplitter->addOutput(id, surfaceHolder.mSurface);
if (ret != OK) {
ALOGE("%s: addOutput failed with error code %d", __FUNCTION__, ret);
return ret;
@@ -222,7 +224,7 @@
for (const auto& uniqueId : uniqueSurfaceIds) {
bool uniqueIdFound = false;
for (size_t i = 0; i < kMaxOutputs; i++) {
- if (mSurfaceUniqueIds[i].second == uniqueId) {
+ if (mSurfaceUniqueIds[i].mId == uniqueId) {
surfaceIds.push_back(i);
uniqueIdFound = true;
break;
@@ -275,6 +277,23 @@
return res;
}
+ // Set buffer transform for all configured surfaces
+ for (const auto& surfaceUniqueId : mSurfaceUniqueIds) {
+ const sp<Surface>& surface = surfaceUniqueId.mSurfaceHolder.mSurface;
+ int surfaceId = surfaceUniqueId.mId;
+ int32_t transform = surfaceUniqueId.mTransform;
+ if (transform == -1 || surface == nullptr) {
+ continue;
+ }
+
+ res = mStreamSplitter->setTransform(surfaceId, transform);
+ if (res != OK) {
+ ALOGE("%s: StreamSplitter failed to setTransform: %s(%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+ }
+
return OK;
}
@@ -299,8 +318,9 @@
*usage = getPresetConsumerUsage();
for (size_t id = 0; id < kMaxOutputs; id++) {
- if (mSurfaceUniqueIds[id].first != nullptr) {
- res = getEndpointUsageForSurface(&u, mSurfaceUniqueIds[id].first);
+ const auto& surface = mSurfaceUniqueIds[id].mSurfaceHolder.mSurface;
+ if (surface != nullptr) {
+ res = getEndpointUsageForSurface(&u, surface);
*usage |= u;
}
}
@@ -316,7 +336,7 @@
ssize_t Camera3SharedOutputStream::getNextSurfaceIdLocked() {
ssize_t id = -1;
for (size_t i = 0; i < kMaxOutputs; i++) {
- if (mSurfaceUniqueIds[i].first == nullptr) {
+ if (mSurfaceUniqueIds[i].mSurfaceHolder.mSurface == nullptr) {
id = i;
break;
}
@@ -329,7 +349,7 @@
Mutex::Autolock l(mLock);
ssize_t id = -1;
for (size_t i = 0; i < kMaxOutputs; i++) {
- if (mSurfaceUniqueIds[i].first == surface) {
+ if (mSurfaceUniqueIds[i].mSurfaceHolder.mSurface == surface) {
id = i;
break;
}
@@ -353,13 +373,13 @@
if (surfaceId >= kMaxOutputs) {
return BAD_VALUE;
}
- outUniqueIds->push_back(mSurfaceUniqueIds[surfaceId].second);
+ outUniqueIds->push_back(mSurfaceUniqueIds[surfaceId].mId);
}
return OK;
}
status_t Camera3SharedOutputStream::revertPartialUpdateLocked(
- const KeyedVector<sp<Surface>, size_t> &removedSurfaces,
+ const KeyedVector<size_t, SurfaceHolder> &removedSurfaces,
const KeyedVector<sp<Surface>, size_t> &attachedSurfaces) {
status_t ret = OK;
@@ -371,25 +391,25 @@
return UNKNOWN_ERROR;
}
}
- mSurfaceUniqueIds[index] = std::make_pair(nullptr, mNextUniqueSurfaceId++);
+ mSurfaceUniqueIds[index] = SurfaceHolderUniqueId{mNextUniqueSurfaceId++};
}
for (size_t i = 0; i < removedSurfaces.size(); i++) {
- size_t index = removedSurfaces.valueAt(i);
+ size_t index = removedSurfaces.keyAt(i);
if (mStreamSplitter != nullptr) {
- ret = mStreamSplitter->addOutput(index, removedSurfaces.keyAt(i));
+ ret = mStreamSplitter->addOutput(index, removedSurfaces.valueAt(i).mSurface);
if (ret != OK) {
return UNKNOWN_ERROR;
}
}
- mSurfaceUniqueIds[index] = std::make_pair(
- removedSurfaces.keyAt(i), mNextUniqueSurfaceId++);
+ mSurfaceUniqueIds[index] = SurfaceHolderUniqueId{removedSurfaces.valueAt(i),
+ mNextUniqueSurfaceId++};
}
return ret;
}
-status_t Camera3SharedOutputStream::updateStream(const std::vector<sp<Surface>> &outputSurfaces,
+status_t Camera3SharedOutputStream::updateStream(const std::vector<SurfaceHolder> &outputSurfaces,
const std::vector<OutputStreamInfo> &outputInfo,
const std::vector<size_t> &removedSurfaceIds,
KeyedVector<sp<Surface>, size_t> *outputMap) {
@@ -403,7 +423,7 @@
uint64_t usage;
getEndpointUsage(&usage);
- KeyedVector<sp<Surface>, size_t> removedSurfaces;
+ KeyedVector<size_t, SurfaceHolder> removedSurfaces;
//Check whether the new surfaces are compatible.
for (const auto &infoIt : outputInfo) {
bool imgReaderUsage = (infoIt.consumerUsage & GRALLOC_USAGE_SW_READ_OFTEN) ? true : false;
@@ -437,8 +457,8 @@
}
}
- removedSurfaces.add(mSurfaceUniqueIds[it].first, it);
- mSurfaceUniqueIds[it] = std::make_pair(nullptr, mNextUniqueSurfaceId++);
+ removedSurfaces.add(it, mSurfaceUniqueIds[it].mSurfaceHolder);
+ mSurfaceUniqueIds[it] = SurfaceHolderUniqueId{mNextUniqueSurfaceId++};
}
//Next add the new outputs
@@ -453,7 +473,7 @@
return NO_MEMORY;
}
if (mStreamSplitter != nullptr) {
- ret = mStreamSplitter->addOutput(surfaceId, it);
+ ret = mStreamSplitter->addOutput(surfaceId, it.mSurface);
if (ret != OK) {
ALOGE("%s: failed with error code %d", __FUNCTION__, ret);
status_t res = revertPartialUpdateLocked(removedSurfaces, *outputMap);
@@ -463,13 +483,54 @@
return ret;
}
}
- mSurfaceUniqueIds[surfaceId] = std::make_pair(it, mNextUniqueSurfaceId++);
- outputMap->add(it, surfaceId);
+ mSurfaceUniqueIds[surfaceId] = SurfaceHolderUniqueId{it, mNextUniqueSurfaceId++};
+ outputMap->add(it.mSurface, surfaceId);
}
return ret;
}
+status_t Camera3SharedOutputStream::setTransform(
+ int transform, bool mayChangeMirror, int surfaceId) {
+ ATRACE_CALL();
+ Mutex::Autolock l(mLock);
+
+ status_t res = OK;
+
+ if (surfaceId < 0 || (size_t)surfaceId >= mSurfaceUniqueIds.size()) {
+ ALOGE("%s: Invalid surfaceId %d", __FUNCTION__, surfaceId);
+ return BAD_VALUE;
+ }
+ if (transform == -1) return res;
+
+ if (mState == STATE_ERROR) {
+ ALOGE("%s: Stream in error state", __FUNCTION__);
+ return INVALID_OPERATION;
+ }
+
+ auto& surfaceHolderForId = mSurfaceUniqueIds[surfaceId];
+ if (surfaceHolderForId.mSurfaceHolder.mMirrorMode != OutputConfiguration::MIRROR_MODE_AUTO &&
+ mayChangeMirror) {
+ // If the mirroring mode is not AUTO, do not allow transform update
+ // which may change mirror.
+ return OK;
+ }
+
+ surfaceHolderForId.mTransform = transform;
+ if (mState == STATE_CONFIGURED) {
+ sp<Surface> surface = surfaceHolderForId.mSurfaceHolder.mSurface;
+ if (surface != nullptr) {
+ res = mStreamSplitter->setTransform(surfaceId, transform);
+ if (res != OK) {
+ ALOGE("%s: StreamSplitter fails to setTransform: %s(%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+ }
+ }
+ return res;
+}
+
} // namespace camera3
} // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
index ae11507..1fd676c 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
@@ -41,21 +41,15 @@
* surfaces. A valid stream set id needs to be set to support buffer
* sharing between multiple streams.
*/
- Camera3SharedOutputStream(int id, const std::vector<sp<Surface>>& surfaces,
+ Camera3SharedOutputStream(int id, const std::vector<SurfaceHolder>& surfaces,
uint32_t width, uint32_t height, int format,
uint64_t consumerUsage, android_dataspace dataSpace,
camera_stream_rotation_t rotation, nsecs_t timestampOffset,
const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
- int setId = CAMERA3_STREAM_SET_ID_INVALID,
- bool useHalBufManager = false,
- int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
- int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
- bool deviceTimeBaseIsRealtime = false,
- int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
- int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
- bool useReadoutTimestamp = false);
+ int setId, bool useHalBufManager, int64_t dynamicProfile, int64_t streamUseCase,
+ bool deviceTimeBaseIsRealtime, int timestampBase,
+ int32_t colorSpace, bool useReadoutTimestamp);
virtual ~Camera3SharedOutputStream();
@@ -65,7 +59,7 @@
virtual bool isConsumerConfigurationDeferred(size_t surface_id) const;
- virtual status_t setConsumers(const std::vector<sp<Surface>>& consumers);
+ virtual status_t setConsumers(const std::vector<SurfaceHolder>& consumers);
virtual ssize_t getSurfaceId(const sp<Surface> &surface);
@@ -78,7 +72,7 @@
virtual status_t getUniqueSurfaceIds(const std::vector<size_t>& surfaceIds,
/*out*/std::vector<size_t>* outUniqueIds) override;
- virtual status_t updateStream(const std::vector<sp<Surface>> &outputSurfaces,
+ virtual status_t updateStream(const std::vector<SurfaceHolder> &outputSurfaces,
const std::vector<OutputStreamInfo> &outputInfo,
const std::vector<size_t> &removedSurfaceIds,
KeyedVector<sp<Surface>, size_t> *outputMap/*out*/);
@@ -89,6 +83,8 @@
return false;
}
+ virtual status_t setTransform(int transform, bool mayChangeMirror, int surfaceId);
+
private:
static const size_t kMaxOutputs = 4;
@@ -97,17 +93,26 @@
// depends on this flag.
bool mUseHalBufManager;
- // Pair of an output Surface and its unique ID
- typedef std::pair<sp<Surface>, size_t> SurfaceUniqueId;
+ // Struct of an output SurfaceHolder, transform, and its unique ID
+ struct SurfaceHolderUniqueId {
+ SurfaceHolder mSurfaceHolder;
+ int mTransform = -1;
+ size_t mId = -1;
- // Map surfaceId -> (output surface, unique surface ID)
- std::array<SurfaceUniqueId, kMaxOutputs> mSurfaceUniqueIds;
+ SurfaceHolderUniqueId() = default;
+ SurfaceHolderUniqueId(size_t id) : mId(id) {}
+ SurfaceHolderUniqueId(const SurfaceHolder& holder, size_t id) :
+ mSurfaceHolder(holder), mId(id) {}
+ };
+
+ // Map surfaceId -> SurfaceHolderUniqueId
+ std::array<SurfaceHolderUniqueId, kMaxOutputs> mSurfaceUniqueIds;
size_t mNextUniqueSurfaceId = 0;
ssize_t getNextSurfaceIdLocked();
- status_t revertPartialUpdateLocked(const KeyedVector<sp<Surface>, size_t> &removedSurfaces,
+ status_t revertPartialUpdateLocked(const KeyedVector<size_t, SurfaceHolder> &removedSurfaces,
const KeyedVector<sp<Surface>, size_t> &attachedSurfaces);
/**
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index 0786622..8f3249d 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -115,7 +115,6 @@
int64_t dynamicRangeProfile;
int64_t streamUseCase;
int timestampBase;
- int mirrorMode;
int32_t colorSpace;
OutputStreamInfo() :
width(-1), height(-1), format(-1), dataSpace(HAL_DATASPACE_UNKNOWN),
@@ -123,17 +122,21 @@
dynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
streamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
timestampBase(OutputConfiguration::TIMESTAMP_BASE_DEFAULT),
- mirrorMode(OutputConfiguration::MIRROR_MODE_AUTO),
colorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED) {}
OutputStreamInfo(int _width, int _height, int _format, android_dataspace _dataSpace,
uint64_t _consumerUsage, const std::unordered_set<int32_t>& _sensorPixelModesUsed,
- int64_t _dynamicRangeProfile, int _streamUseCase, int _timestampBase, int _mirrorMode,
+ int64_t _dynamicRangeProfile, int _streamUseCase, int _timestampBase,
int32_t _colorSpace) :
width(_width), height(_height), format(_format),
dataSpace(_dataSpace), consumerUsage(_consumerUsage),
sensorPixelModesUsed(_sensorPixelModesUsed), dynamicRangeProfile(_dynamicRangeProfile),
- streamUseCase(_streamUseCase), timestampBase(_timestampBase), mirrorMode(_mirrorMode),
- colorSpace(_colorSpace) {}
+ streamUseCase(_streamUseCase), timestampBase(_timestampBase), colorSpace(_colorSpace) {}
+};
+
+// A holder containing a surface and its corresponding mirroring mode
+struct SurfaceHolder {
+ sp<Surface> mSurface;
+ int mMirrorMode = OutputConfiguration::MIRROR_MODE_AUTO;
};
// Utility class to lock and unlock a GraphicBuffer
diff --git a/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp b/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
index 77c037a..a360abf 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
+++ b/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
@@ -14,32 +14,39 @@
* limitations under the License.
*/
-#include <inttypes.h>
-
#define LOG_TAG "Camera3StreamSplitter"
#define ATRACE_TAG ATRACE_TAG_CAMERA
//#define LOG_NDEBUG 0
+#include <binder/ProcessState.h>
#include <camera/StringUtils.h>
#include <com_android_graphics_libgui_flags.h>
#include <gui/BufferItem.h>
+#include <gui/BufferItemConsumer.h>
#include <gui/BufferQueue.h>
#include <gui/IGraphicBufferConsumer.h>
#include <gui/IGraphicBufferProducer.h>
#include <gui/Surface.h>
-
+#include <system/window.h>
#include <ui/GraphicBuffer.h>
-
-#include <binder/ProcessState.h>
-
#include <utils/Trace.h>
#include <cutils/atomic.h>
+#include <inttypes.h>
+#include <algorithm>
+#include <cstdint>
+#include <memory>
#include "Camera3Stream.h"
+#include "Flags.h"
#include "Camera3StreamSplitter.h"
+// We're relying on a large number of yet-to-be-fully-launched flag dependencies
+// here. So instead of flagging each one, we flag the entire implementation to
+// improve legibility.
+#if USE_NEW_STREAM_SPLITTER
+
namespace android {
status_t Camera3StreamSplitter::connect(const std::unordered_map<size_t, sp<Surface>> &surfaces,
@@ -55,7 +62,7 @@
Mutex::Autolock lock(mMutex);
status_t res = OK;
- if (mOutputs.size() > 0 || mConsumer != nullptr) {
+ if (mOutputSurfaces.size() > 0 || mBufferItemConsumer != nullptr) {
SP_LOGE("%s: already connected", __FUNCTION__);
return BAD_VALUE;
}
@@ -82,43 +89,43 @@
}
}
-#if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
- // Create BufferQueue for input
- BufferQueue::createBufferQueue(&mProducer, &mConsumer);
-#endif // !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
-
// Allocate 1 extra buffer to handle the case where all buffers are detached
// from input, and attached to the outputs. In this case, the input queue's
// dequeueBuffer can still allocate 1 extra buffer before being blocked by
// the output's attachBuffer().
mMaxConsumerBuffers++;
+
#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
- mBufferItemConsumer = new BufferItemConsumer(consumerUsage, mMaxConsumerBuffers);
+ mBufferItemConsumer = sp<BufferItemConsumer>::make(consumerUsage, mMaxConsumerBuffers);
+ mSurface = mBufferItemConsumer->getSurface();
#else
- mBufferItemConsumer = new BufferItemConsumer(mConsumer, consumerUsage, mMaxConsumerBuffers);
+ // Create BufferQueue for input
+ sp<IGraphicBufferProducer> bqProducer;
+ sp<IGraphicBufferConsumer> bqConsumer;
+ BufferQueue::createBufferQueue(&bqProducer, &bqConsumer);
+
+ mBufferItemConsumer = new BufferItemConsumer(bqConsumer, consumerUsage, mMaxConsumerBuffers);
+ mSurface = new Surface(bqProducer);
#endif // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+
if (mBufferItemConsumer == nullptr) {
return NO_MEMORY;
}
-#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
- mProducer = mBufferItemConsumer->getSurface()->getIGraphicBufferProducer();
- mConsumer = mBufferItemConsumer->getIGraphicBufferConsumer();
-#endif // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
- mConsumer->setConsumerName(toString8(mConsumerName));
+ mBufferItemConsumer->setName(toString8(mConsumerName));
- *consumer = new Surface(mProducer);
+ *consumer = mSurface;
if (*consumer == nullptr) {
return NO_MEMORY;
}
- res = mProducer->setAsyncMode(true);
+ res = mSurface->setAsyncMode(true);
if (res != OK) {
SP_LOGE("%s: Failed to enable input queue async mode: %s(%d)", __FUNCTION__,
strerror(-res), res);
return res;
}
- res = mConsumer->consumerConnect(this, /* controlledByApp */ false);
+ mBufferItemConsumer->setFrameAvailableListener(this);
mWidth = width;
mHeight = height;
@@ -139,25 +146,19 @@
ATRACE_CALL();
Mutex::Autolock lock(mMutex);
- for (auto& notifier : mNotifiers) {
- sp<IGraphicBufferProducer> producer = notifier.first;
- sp<OutputListener> listener = notifier.second;
- IInterface::asBinder(producer)->unlinkToDeath(listener);
- }
mNotifiers.clear();
- for (auto& output : mOutputs) {
+ for (auto& output : mOutputSurfaces) {
if (output.second != nullptr) {
output.second->disconnect(NATIVE_WINDOW_API_CAMERA);
}
}
- mOutputs.clear();
mOutputSurfaces.clear();
- mOutputSlots.clear();
+ mHeldBuffers.clear();
mConsumerBufferCount.clear();
- if (mConsumer.get() != nullptr) {
- mConsumer->consumerDisconnect();
+ if (mBufferItemConsumer != nullptr) {
+ mBufferItemConsumer->abandon();
}
if (mBuffers.size() > 0) {
@@ -189,7 +190,7 @@
}
if (mMaxConsumerBuffers > mAcquiredInputBuffers) {
- res = mConsumer->setMaxAcquiredBufferCount(mMaxConsumerBuffers);
+ res = mBufferItemConsumer->setMaxAcquiredBufferCount(mMaxConsumerBuffers);
}
return res;
@@ -200,6 +201,17 @@
mUseHalBufManager = enabled;
}
+status_t Camera3StreamSplitter::setTransform(size_t surfaceId, int transform) {
+ Mutex::Autolock lock(mMutex);
+ if (!mOutputSurfaces.contains(surfaceId) || mOutputSurfaces[surfaceId] == nullptr) {
+ SP_LOGE("%s: No surface at id %zu", __FUNCTION__, surfaceId);
+ return BAD_VALUE;
+ }
+
+ mOutputTransforms[surfaceId] = transform;
+ return OK;
+}
+
status_t Camera3StreamSplitter::addOutputLocked(size_t surfaceId, const sp<Surface>& outputQueue) {
ATRACE_CALL();
if (outputQueue == nullptr) {
@@ -207,7 +219,7 @@
return BAD_VALUE;
}
- if (mOutputs[surfaceId] != nullptr) {
+ if (mOutputSurfaces[surfaceId] != nullptr) {
SP_LOGE("%s: surfaceId: %u already taken!", __FUNCTION__, (unsigned) surfaceId);
return BAD_VALUE;
}
@@ -226,11 +238,9 @@
return res;
}
- sp<IGraphicBufferProducer> gbp = outputQueue->getIGraphicBufferProducer();
// Connect to the buffer producer
- sp<OutputListener> listener(new OutputListener(this, gbp));
- IInterface::asBinder(gbp)->linkToDeath(listener);
- res = outputQueue->connect(NATIVE_WINDOW_API_CAMERA, listener);
+ sp<OutputListener> listener = sp<OutputListener>::make(this, outputQueue);
+ res = outputQueue->connect(NATIVE_WINDOW_API_CAMERA, listener, /* reportBufferRemoval */ false);
if (res != NO_ERROR) {
SP_LOGE("addOutput: failed to connect (%d)", res);
return res;
@@ -272,22 +282,21 @@
outputQueue->setDequeueTimeout(timeout);
}
- res = gbp->allowAllocation(false);
+ res = outputQueue->allowAllocation(false);
if (res != OK) {
SP_LOGE("%s: Failed to turn off allocation for outputQueue", __FUNCTION__);
return res;
}
// Add new entry into mOutputs
- mOutputs[surfaceId] = gbp;
mOutputSurfaces[surfaceId] = outputQueue;
mConsumerBufferCount[surfaceId] = maxConsumerBuffers;
if (mConsumerBufferCount[surfaceId] > mMaxHalBuffers) {
SP_LOGW("%s: Consumer buffer count %zu larger than max. Hal buffers: %zu", __FUNCTION__,
mConsumerBufferCount[surfaceId], mMaxHalBuffers);
}
- mNotifiers[gbp] = listener;
- mOutputSlots[gbp] = std::make_unique<OutputSlots>(totalBufferCount);
+ mNotifiers[outputQueue] = listener;
+ mHeldBuffers[outputQueue] = std::make_unique<HeldBuffers>(totalBufferCount);
mMaxConsumerBuffers += maxConsumerBuffers;
return NO_ERROR;
@@ -304,7 +313,7 @@
}
if (mAcquiredInputBuffers < mMaxConsumerBuffers) {
- res = mConsumer->setMaxAcquiredBufferCount(mMaxConsumerBuffers);
+ res = mBufferItemConsumer->setMaxAcquiredBufferCount(mMaxConsumerBuffers);
if (res != OK) {
SP_LOGE("%s: setMaxAcquiredBufferCount failed %d", __FUNCTION__, res);
return res;
@@ -315,70 +324,54 @@
}
status_t Camera3StreamSplitter::removeOutputLocked(size_t surfaceId) {
- if (mOutputs[surfaceId] == nullptr) {
+ if (mOutputSurfaces[surfaceId] == nullptr) {
SP_LOGE("%s: output surface is not present!", __FUNCTION__);
return BAD_VALUE;
}
- sp<IGraphicBufferProducer> gbp = mOutputs[surfaceId];
+ sp<Surface> surface = mOutputSurfaces[surfaceId];
//Search and decrement the ref. count of any buffers that are
//still attached to the removed surface.
std::vector<uint64_t> pendingBufferIds;
- auto& outputSlots = *mOutputSlots[gbp];
- for (size_t i = 0; i < outputSlots.size(); i++) {
- if (outputSlots[i] != nullptr) {
- pendingBufferIds.push_back(outputSlots[i]->getId());
- auto rc = gbp->detachBuffer(i);
- if (rc != NO_ERROR) {
- //Buffers that fail to detach here will be scheduled for detach in the
- //input buffer queue and the rest of the registered outputs instead.
- //This will help ensure that camera stops accessing buffers that still
- //can get referenced by the disconnected output.
- mDetachedBuffers.emplace(outputSlots[i]->getId());
- }
+
+ // TODO: can we simplify this to just use the tracker?
+ for (const auto& buffer : (*mHeldBuffers[surface])) {
+ pendingBufferIds.push_back(buffer->getId());
+ auto rc = surface->detachBuffer(buffer);
+ if (rc != NO_ERROR) {
+ // Buffers that fail to detach here will be scheduled for detach in the
+ // input buffer queue and the rest of the registered outputs instead.
+ // This will help ensure that camera stops accessing buffers that still
+ // can get referenced by the disconnected output.
+ mDetachedBuffers.emplace(buffer->getId());
}
}
- mOutputs[surfaceId] = nullptr;
mOutputSurfaces[surfaceId] = nullptr;
- mOutputSlots[gbp] = nullptr;
+ mHeldBuffers[surface] = nullptr;
for (const auto &id : pendingBufferIds) {
decrementBufRefCountLocked(id, surfaceId);
}
- auto res = IInterface::asBinder(gbp)->unlinkToDeath(mNotifiers[gbp]);
- if (res != OK) {
- SP_LOGE("%s: Failed to unlink producer death listener: %d ", __FUNCTION__, res);
- return res;
- }
-
- res = gbp->disconnect(NATIVE_WINDOW_API_CAMERA);
+ status_t res = surface->disconnect(NATIVE_WINDOW_API_CAMERA);
if (res != OK) {
SP_LOGE("%s: Unable disconnect from producer interface: %d ", __FUNCTION__, res);
return res;
}
- mNotifiers[gbp] = nullptr;
+ mNotifiers[surface] = nullptr;
mMaxConsumerBuffers -= mConsumerBufferCount[surfaceId];
mConsumerBufferCount[surfaceId] = 0;
return res;
}
-status_t Camera3StreamSplitter::outputBufferLocked(const sp<IGraphicBufferProducer>& output,
+status_t Camera3StreamSplitter::outputBufferLocked(const sp<Surface>& output,
const BufferItem& bufferItem, size_t surfaceId) {
ATRACE_CALL();
status_t res;
- IGraphicBufferProducer::QueueBufferInput queueInput(
- bufferItem.mTimestamp, bufferItem.mIsAutoTimestamp,
- bufferItem.mDataSpace, bufferItem.mCrop,
- static_cast<int32_t>(bufferItem.mScalingMode),
- bufferItem.mTransform, bufferItem.mFence);
-
- IGraphicBufferProducer::QueueBufferOutput queueOutput;
uint64_t bufferId = bufferItem.mGraphicBuffer->getId();
const BufferTracker& tracker = *(mBuffers[bufferId]);
- int slot = getSlotForOutputLocked(output, tracker.getBuffer());
if (mOutputSurfaces[surfaceId] != nullptr) {
sp<ANativeWindow> anw = mOutputSurfaces[surfaceId];
@@ -388,19 +381,31 @@
SP_LOGE("%s: Invalid surface id: %zu!", __FUNCTION__, surfaceId);
}
+ output->setBuffersTimestamp(bufferItem.mTimestamp);
+ output->setBuffersDataSpace(static_cast<ui::Dataspace>(bufferItem.mDataSpace));
+ output->setCrop(&bufferItem.mCrop);
+ output->setScalingMode(bufferItem.mScalingMode);
+
+ int transform = bufferItem.mTransform;
+ if (mOutputTransforms.contains(surfaceId)) {
+ transform = mOutputTransforms[surfaceId];
+ }
+ output->setBuffersTransform(transform);
+
// In case the output BufferQueue has its own lock, if we hold splitter lock while calling
// queueBuffer (which will try to acquire the output lock), the output could be holding its
// own lock calling releaseBuffer (which will try to acquire the splitter lock), running into
// circular lock situation.
mMutex.unlock();
- res = output->queueBuffer(slot, queueInput, &queueOutput);
+ SurfaceQueueBufferOutput queueBufferOutput;
+ res = output->queueBuffer(bufferItem.mGraphicBuffer, bufferItem.mFence, &queueBufferOutput);
mMutex.lock();
- SP_LOGV("%s: Queuing buffer to buffer queue %p slot %d returns %d",
- __FUNCTION__, output.get(), slot, res);
- //During buffer queue 'mMutex' is not held which makes the removal of
- //"output" possible. Check whether this is the case and return.
- if (mOutputSlots[output] == nullptr) {
+ SP_LOGV("%s: Queuing buffer to buffer queue %p bufferId %" PRIu64 " returns %d", __FUNCTION__,
+ output.get(), bufferId, res);
+ // During buffer queue 'mMutex' is not held which makes the removal of
+ // "output" possible. Check whether this is the case and return.
+ if (mOutputSurfaces[surfaceId] == nullptr) {
return res;
}
if (res != OK) {
@@ -418,7 +423,7 @@
// If the queued buffer replaces a pending buffer in the async
// queue, no onBufferReleased is called by the buffer queue.
// Proactively trigger the callback to avoid buffer loss.
- if (queueOutput.bufferReplaced) {
+ if (queueBufferOutput.bufferReplaced) {
onBufferReplacedLocked(output, surfaceId);
}
@@ -456,52 +461,32 @@
auto tracker = std::make_unique<BufferTracker>(gb, surface_ids);
for (auto& surface_id : surface_ids) {
- sp<IGraphicBufferProducer>& gbp = mOutputs[surface_id];
- if (gbp.get() == nullptr) {
+ sp<Surface>& surface = mOutputSurfaces[surface_id];
+ if (surface.get() == nullptr) {
//Output surface got likely removed by client.
continue;
}
- int slot = getSlotForOutputLocked(gbp, gb);
- if (slot != BufferItem::INVALID_BUFFER_SLOT) {
- //Buffer is already attached to this output surface.
- continue;
- }
+
//Temporarly Unlock the mutex when trying to attachBuffer to the output
//queue, because attachBuffer could block in case of a slow consumer. If
//we block while holding the lock, onFrameAvailable and onBufferReleased
//will block as well because they need to acquire the same lock.
mMutex.unlock();
- res = gbp->attachBuffer(&slot, gb);
+ res = surface->attachBuffer(anb);
mMutex.lock();
if (res != OK) {
- SP_LOGE("%s: Cannot attachBuffer from GraphicBufferProducer %p: %s (%d)",
- __FUNCTION__, gbp.get(), strerror(-res), res);
+ SP_LOGE("%s: Cannot attachBuffer from GraphicBufferProducer %p: %s (%d)", __FUNCTION__,
+ surface.get(), strerror(-res), res);
// TODO: might need to detach/cleanup the already attached buffers before return?
return res;
}
- if ((slot < 0) || (slot > BufferQueue::NUM_BUFFER_SLOTS)) {
- SP_LOGE("%s: Slot received %d either bigger than expected maximum %d or negative!",
- __FUNCTION__, slot, BufferQueue::NUM_BUFFER_SLOTS);
- return BAD_VALUE;
- }
//During buffer attach 'mMutex' is not held which makes the removal of
//"gbp" possible. Check whether this is the case and continue.
- if (mOutputSlots[gbp] == nullptr) {
+ if (mHeldBuffers[surface] == nullptr) {
continue;
}
- auto& outputSlots = *mOutputSlots[gbp];
- if (static_cast<size_t> (slot + 1) > outputSlots.size()) {
- outputSlots.resize(slot + 1);
- }
- if (outputSlots[slot] != nullptr) {
- // If the buffer is attached to a slot which already contains a buffer,
- // the previous buffer will be removed from the output queue. Decrement
- // the reference count accordingly.
- decrementBufRefCountLocked(outputSlots[slot]->getId(), surface_id);
- }
- SP_LOGV("%s: Attached buffer %p to slot %d on output %p.",__FUNCTION__, gb.get(),
- slot, gbp.get());
- outputSlots[slot] = gb;
+ mHeldBuffers[surface]->insert(gb);
+ SP_LOGV("%s: Attached buffer %p on output %p.", __FUNCTION__, gb.get(), surface.get());
}
mBuffers[bufferId] = std::move(tracker);
@@ -515,25 +500,14 @@
// Acquire and detach the buffer from the input
BufferItem bufferItem;
- status_t res = mConsumer->acquireBuffer(&bufferItem, /* presentWhen */ 0);
+ status_t res = mBufferItemConsumer->acquireBuffer(&bufferItem, /* presentWhen */ 0);
if (res != NO_ERROR) {
SP_LOGE("%s: Acquiring buffer from input failed (%d)", __FUNCTION__, res);
mOnFrameAvailableRes.store(res);
return;
}
- uint64_t bufferId;
- if (bufferItem.mGraphicBuffer != nullptr) {
- mInputSlots[bufferItem.mSlot] = bufferItem;
- } else if (bufferItem.mAcquireCalled) {
- bufferItem.mGraphicBuffer = mInputSlots[bufferItem.mSlot].mGraphicBuffer;
- mInputSlots[bufferItem.mSlot].mFrameNumber = bufferItem.mFrameNumber;
- } else {
- SP_LOGE("%s: Invalid input graphic buffer!", __FUNCTION__);
- mOnFrameAvailableRes.store(BAD_VALUE);
- return;
- }
- bufferId = bufferItem.mGraphicBuffer->getId();
+ uint64_t bufferId = bufferItem.mGraphicBuffer->getId();
if (mBuffers.find(bufferId) == mBuffers.end()) {
SP_LOGE("%s: Acquired buffer doesn't exist in attached buffer map",
@@ -556,13 +530,12 @@
SP_LOGV("%s: BufferTracker for buffer %" PRId64 ", number of requests %zu",
__FUNCTION__, bufferItem.mGraphicBuffer->getId(), tracker.requestedSurfaces().size());
for (const auto id : tracker.requestedSurfaces()) {
-
- if (mOutputs[id] == nullptr) {
+ if (mOutputSurfaces[id] == nullptr) {
//Output surface got likely removed by client.
continue;
}
- res = outputBufferLocked(mOutputs[id], bufferItem, id);
+ res = outputBufferLocked(mOutputSurfaces[id], bufferItem, id);
if (res != OK) {
SP_LOGE("%s: outputBufferLocked failed %d", __FUNCTION__, res);
mOnFrameAvailableRes.store(res);
@@ -601,26 +574,11 @@
mBuffers.erase(id);
uint64_t bufferId = tracker_ptr->getBuffer()->getId();
- int consumerSlot = -1;
- uint64_t frameNumber;
- auto inputSlot = mInputSlots.begin();
- for (; inputSlot != mInputSlots.end(); inputSlot++) {
- if (inputSlot->second.mGraphicBuffer->getId() == bufferId) {
- consumerSlot = inputSlot->second.mSlot;
- frameNumber = inputSlot->second.mFrameNumber;
- break;
- }
- }
- if (consumerSlot == -1) {
- SP_LOGE("%s: Buffer missing inside input slots!", __FUNCTION__);
- return;
- }
auto detachBuffer = mDetachedBuffers.find(bufferId);
bool detach = (detachBuffer != mDetachedBuffers.end());
if (detach) {
mDetachedBuffers.erase(detachBuffer);
- mInputSlots.erase(inputSlot);
}
// Temporarily unlock mutex to avoid circular lock:
// 1. This function holds splitter lock, calls releaseBuffer which triggers
@@ -629,15 +587,14 @@
// 2. Camera3SharedOutputStream::getBufferLocked calls
// attachBufferToOutputs, which holds the stream lock, and waits for the
// splitter lock.
- sp<IGraphicBufferConsumer> consumer(mConsumer);
mMutex.unlock();
int res = NO_ERROR;
- if (consumer != nullptr) {
+ if (mBufferItemConsumer != nullptr) {
if (detach) {
- res = consumer->detachBuffer(consumerSlot);
+ res = mBufferItemConsumer->detachBuffer(tracker_ptr->getBuffer());
} else {
- res = consumer->releaseBuffer(consumerSlot, frameNumber,
- EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, tracker_ptr->getMergedFence());
+ res = mBufferItemConsumer->releaseBuffer(tracker_ptr->getBuffer(),
+ tracker_ptr->getMergedFence());
}
} else {
SP_LOGE("%s: consumer has become null!", __FUNCTION__);
@@ -659,23 +616,25 @@
}
}
-void Camera3StreamSplitter::onBufferReleasedByOutput(
- const sp<IGraphicBufferProducer>& from) {
+void Camera3StreamSplitter::onBufferReleasedByOutput(const sp<Surface>& from) {
ATRACE_CALL();
- sp<Fence> fence;
- int slot = BufferItem::INVALID_BUFFER_SLOT;
- auto res = from->dequeueBuffer(&slot, &fence, mWidth, mHeight, mFormat, mProducerUsage,
- nullptr, nullptr);
+ from->setBuffersDimensions(mWidth, mHeight);
+ from->setBuffersFormat(mFormat);
+ from->setUsage(mProducerUsage);
+
+ sp<GraphicBuffer> buffer;
+ sp<Fence> fence;
+ auto res = from->dequeueBuffer(&buffer, &fence);
Mutex::Autolock lock(mMutex);
- handleOutputDequeueStatusLocked(res, slot);
+ handleOutputDequeueStatusLocked(res, buffer);
if (res != OK) {
return;
}
size_t surfaceId = 0;
bool found = false;
- for (const auto& it : mOutputs) {
+ for (const auto& it : mOutputSurfaces) {
if (it.second == from) {
found = true;
surfaceId = it.first;
@@ -687,36 +646,29 @@
return;
}
- returnOutputBufferLocked(fence, from, surfaceId, slot);
+ returnOutputBufferLocked(fence, from, surfaceId, buffer);
}
-void Camera3StreamSplitter::onBufferReplacedLocked(
- const sp<IGraphicBufferProducer>& from, size_t surfaceId) {
+void Camera3StreamSplitter::onBufferReplacedLocked(const sp<Surface>& from, size_t surfaceId) {
ATRACE_CALL();
- sp<Fence> fence;
- int slot = BufferItem::INVALID_BUFFER_SLOT;
- auto res = from->dequeueBuffer(&slot, &fence, mWidth, mHeight, mFormat, mProducerUsage,
- nullptr, nullptr);
- handleOutputDequeueStatusLocked(res, slot);
+ from->setBuffersDimensions(mWidth, mHeight);
+ from->setBuffersFormat(mFormat);
+ from->setUsage(mProducerUsage);
+
+ sp<GraphicBuffer> buffer;
+ sp<Fence> fence;
+ auto res = from->dequeueBuffer(&buffer, &fence);
+ handleOutputDequeueStatusLocked(res, buffer);
if (res != OK) {
return;
}
- returnOutputBufferLocked(fence, from, surfaceId, slot);
+ returnOutputBufferLocked(fence, from, surfaceId, buffer);
}
void Camera3StreamSplitter::returnOutputBufferLocked(const sp<Fence>& fence,
- const sp<IGraphicBufferProducer>& from, size_t surfaceId, int slot) {
- sp<GraphicBuffer> buffer;
-
- if (mOutputSlots[from] == nullptr) {
- //Output surface got likely removed by client.
- return;
- }
-
- auto outputSlots = *mOutputSlots[from];
- buffer = outputSlots[slot];
+ const sp<Surface>& from, size_t surfaceId, const sp<GraphicBuffer>& buffer) {
BufferTracker& tracker = *(mBuffers[buffer->getId()]);
// Merge the release fence of the incoming buffer so that the fence we send
// back to the input includes all of the outputs' fences
@@ -727,9 +679,16 @@
auto detachBuffer = mDetachedBuffers.find(buffer->getId());
bool detach = (detachBuffer != mDetachedBuffers.end());
if (detach) {
- auto res = from->detachBuffer(slot);
+ auto res = from->detachBuffer(buffer);
if (res == NO_ERROR) {
- outputSlots[slot] = nullptr;
+ if (mHeldBuffers.contains(from)) {
+ mHeldBuffers[from]->erase(buffer);
+ } else {
+ uint64_t surfaceId = 0;
+ from->getUniqueId(&surfaceId);
+ SP_LOGW("%s: buffer %" PRIu64 " not found in held buffers of surface %" PRIu64,
+ __FUNCTION__, buffer->getId(), surfaceId);
+ }
} else {
SP_LOGE("%s: detach buffer from output failed (%d)", __FUNCTION__, res);
}
@@ -739,22 +698,17 @@
decrementBufRefCountLocked(buffer->getId(), surfaceId);
}
-void Camera3StreamSplitter::handleOutputDequeueStatusLocked(status_t res, int slot) {
+void Camera3StreamSplitter::handleOutputDequeueStatusLocked(status_t res,
+ const sp<GraphicBuffer>& buffer) {
if (res == NO_INIT) {
// If we just discovered that this output has been abandoned, note that,
// but we can't do anything else, since buffer is invalid
onAbandonedLocked();
- } else if (res == IGraphicBufferProducer::BUFFER_NEEDS_REALLOCATION) {
- SP_LOGE("%s: Producer needs to re-allocate buffer!", __FUNCTION__);
- SP_LOGE("%s: This should not happen with buffer allocation disabled!", __FUNCTION__);
- } else if (res == IGraphicBufferProducer::RELEASE_ALL_BUFFERS) {
- SP_LOGE("%s: All slot->buffer mapping should be released!", __FUNCTION__);
- SP_LOGE("%s: This should not happen with buffer allocation disabled!", __FUNCTION__);
} else if (res == NO_MEMORY) {
SP_LOGE("%s: No free buffers", __FUNCTION__);
} else if (res == WOULD_BLOCK) {
SP_LOGE("%s: Dequeue call will block", __FUNCTION__);
- } else if (res != OK || (slot == BufferItem::INVALID_BUFFER_SLOT)) {
+ } else if (res != OK || buffer == nullptr) {
SP_LOGE("%s: dequeue buffer from output failed (%d)", __FUNCTION__, res);
}
}
@@ -773,36 +727,20 @@
SP_LOGV("One of my outputs has abandoned me");
}
-int Camera3StreamSplitter::getSlotForOutputLocked(const sp<IGraphicBufferProducer>& gbp,
- const sp<GraphicBuffer>& gb) {
- auto& outputSlots = *mOutputSlots[gbp];
-
- for (size_t i = 0; i < outputSlots.size(); i++) {
- if (outputSlots[i] == gb) {
- return (int)i;
- }
- }
-
- SP_LOGV("%s: Cannot find slot for gb %p on output %p", __FUNCTION__, gb.get(),
- gbp.get());
- return BufferItem::INVALID_BUFFER_SLOT;
-}
-
-Camera3StreamSplitter::OutputListener::OutputListener(
- wp<Camera3StreamSplitter> splitter,
- wp<IGraphicBufferProducer> output)
- : mSplitter(splitter), mOutput(output) {}
+Camera3StreamSplitter::OutputListener::OutputListener(wp<Camera3StreamSplitter> splitter,
+ wp<Surface> output)
+ : mSplitter(splitter), mOutput(output) {}
void Camera3StreamSplitter::OutputListener::onBufferReleased() {
ATRACE_CALL();
sp<Camera3StreamSplitter> splitter = mSplitter.promote();
- sp<IGraphicBufferProducer> output = mOutput.promote();
+ sp<Surface> output = mOutput.promote();
if (splitter != nullptr && output != nullptr) {
splitter->onBufferReleasedByOutput(output);
}
}
-void Camera3StreamSplitter::OutputListener::binderDied(const wp<IBinder>& /* who */) {
+void Camera3StreamSplitter::OutputListener::onRemoteDied() {
sp<Camera3StreamSplitter> splitter = mSplitter.promote();
if (splitter != nullptr) {
Mutex::Autolock lock(splitter->mMutex);
@@ -833,3 +771,5 @@
}
} // namespace android
+
+#endif // USE_NEW_STREAM_SPLITTER
\ No newline at end of file
diff --git a/services/camera/libcameraservice/device3/Camera3StreamSplitter.h b/services/camera/libcameraservice/device3/Camera3StreamSplitter.h
index 43f12fb..6e5d8f7 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamSplitter.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamSplitter.h
@@ -14,22 +14,25 @@
* limitations under the License.
*/
-#ifndef ANDROID_SERVERS_STREAMSPLITTER_H
-#define ANDROID_SERVERS_STREAMSPLITTER_H
+#pragma once
+#include <memory>
#include <unordered_set>
#include <camera/CameraMetadata.h>
-#include <gui/IConsumerListener.h>
-#include <gui/Surface.h>
#include <gui/BufferItemConsumer.h>
+#include <gui/Surface.h>
#include <utils/Condition.h>
#include <utils/Mutex.h>
#include <utils/StrongPointer.h>
#include <utils/Timers.h>
+#include "Flags.h"
+
+#if USE_NEW_STREAM_SPLITTER // trying to do this for each change would be a huge hassle.
+
#define SP_LOGV(x, ...) ALOGV("[%s] " x, mConsumerName.c_str(), ##__VA_ARGS__)
#define SP_LOGI(x, ...) ALOGI("[%s] " x, mConsumerName.c_str(), ##__VA_ARGS__)
#define SP_LOGW(x, ...) ALOGW("[%s] " x, mConsumerName.c_str(), ##__VA_ARGS__)
@@ -38,8 +41,6 @@
namespace android {
class GraphicBuffer;
-class IGraphicBufferConsumer;
-class IGraphicBufferProducer;
// Camera3StreamSplitter is an autonomous class that manages one input BufferQueue
// and multiple output BufferQueues. By using the buffer attach and detach logic
@@ -47,9 +48,8 @@
// BufferQueue, where each buffer queued to the input is available to be
// acquired by each of the outputs, and is able to be dequeued by the input
// again only once all of the outputs have released it.
-class Camera3StreamSplitter : public BnConsumerListener {
-public:
-
+class Camera3StreamSplitter : public BufferItemConsumer::FrameAvailableListener {
+ public:
// Constructor
Camera3StreamSplitter(bool useHalBufManager = false);
@@ -67,7 +67,7 @@
//
// A return value other than NO_ERROR means that an error has occurred and
// outputQueue has not been added to the splitter. BAD_VALUE is returned if
- // outputQueue is NULL. See IGraphicBufferProducer::connect for explanations
+ // outputQueue is NULL. See Surface::connect for explanations
// of other error codes.
status_t addOutput(size_t surfaceId, const sp<Surface>& outputQueue);
@@ -96,8 +96,9 @@
void setHalBufferManager(bool enabled);
+ status_t setTransform(size_t surfaceId, int transform);
private:
- // From IConsumerListener
+ // From BufferItemConsumer::FrameAvailableListener
//
// During this callback, we store some tracking information, detach the
// buffer from the input, and attach it to each of the outputs. This call
@@ -106,23 +107,13 @@
// input.
void onFrameAvailable(const BufferItem& item) override;
- // From IConsumerListener
+ // From BufferItemConsumer::FrameAvailableListener
//
// Similar to onFrameAvailable, but buffer item is indeed replacing a buffer
// in the buffer queue. This can happen when buffer queue is in droppable
// mode.
void onFrameReplaced(const BufferItem& item) override;
- // From IConsumerListener
- // We don't care about released buffers because we detach each buffer as
- // soon as we acquire it. See the comment for onBufferReleased below for
- // some clarifying notes about the name.
- void onBuffersReleased() override {}
-
- // From IConsumerListener
- // We don't care about sideband streams, since we won't be splitting them
- void onSidebandStreamChanged() override {}
-
// This is the implementation of the onBufferReleased callback from
// IProducerListener. It gets called from an OutputListener (see below), and
// 'from' is which producer interface from which the callback was received.
@@ -132,10 +123,10 @@
// last output releasing the buffer, and if so, release it to the input.
// If we release the buffer to the input, we allow a blocked
// onFrameAvailable call to proceed.
- void onBufferReleasedByOutput(const sp<IGraphicBufferProducer>& from);
+ void onBufferReleasedByOutput(const sp<Surface>& from);
// Called by outputBufferLocked when a buffer in the async buffer queue got replaced.
- void onBufferReplacedLocked(const sp<IGraphicBufferProducer>& from, size_t surfaceId);
+ void onBufferReplacedLocked(const sp<Surface>& from, size_t surfaceId);
// When this is called, the splitter disconnects from (i.e., abandons) its
// input queue and signals any waiting onFrameAvailable calls to wake up.
@@ -149,35 +140,32 @@
void decrementBufRefCountLocked(uint64_t id, size_t surfaceId);
// Check for and handle any output surface dequeue errors.
- void handleOutputDequeueStatusLocked(status_t res, int slot);
+ void handleOutputDequeueStatusLocked(status_t res, const sp<GraphicBuffer>& buffer);
// Handles released output surface buffers.
- void returnOutputBufferLocked(const sp<Fence>& fence, const sp<IGraphicBufferProducer>& from,
- size_t surfaceId, int slot);
+ void returnOutputBufferLocked(const sp<Fence>& fence, const sp<Surface>& from, size_t surfaceId,
+ const sp<GraphicBuffer>& buffer);
// This is a thin wrapper class that lets us determine which BufferQueue
// the IProducerListener::onBufferReleased callback is associated with. We
// create one of these per output BufferQueue, and then pass the producer
// into onBufferReleasedByOutput above.
- class OutputListener : public SurfaceListener,
- public IBinder::DeathRecipient {
- public:
- OutputListener(wp<Camera3StreamSplitter> splitter,
- wp<IGraphicBufferProducer> output);
+ class OutputListener : public SurfaceListener {
+ public:
+ OutputListener(wp<Camera3StreamSplitter> splitter, wp<Surface> output);
virtual ~OutputListener() = default;
- // From IProducerListener
+ // From SurfaceListener
void onBufferReleased() override;
bool needsReleaseNotify() override { return true; };
- void onBuffersDiscarded(const std::vector<sp<GraphicBuffer>>& /*buffers*/) override {};
+ void onBuffersDiscarded(const std::vector<sp<GraphicBuffer>>&) override {}
void onBufferDetached(int /*slot*/) override {}
- // From IBinder::DeathRecipient
- void binderDied(const wp<IBinder>& who) override;
+ void onRemoteDied() override;
private:
wp<Camera3StreamSplitter> mSplitter;
- wp<IGraphicBufferProducer> mOutput;
+ wp<Surface> mOutput;
};
class BufferTracker {
@@ -198,7 +186,6 @@
const std::vector<size_t> requestedSurfaces() const { return mRequestedSurfaces; }
private:
-
// Disallow copying
BufferTracker(const BufferTracker& other);
BufferTracker& operator=(const BufferTracker& other);
@@ -223,16 +210,12 @@
// Send a buffer to particular output, and increment the reference count
// of the buffer. If this output is abandoned, the buffer's reference count
// won't be incremented.
- status_t outputBufferLocked(const sp<IGraphicBufferProducer>& output,
- const BufferItem& bufferItem, size_t surfaceId);
+ status_t outputBufferLocked(const sp<Surface>& output, const BufferItem& bufferItem,
+ size_t surfaceId);
// Get unique name for the buffer queue consumer
std::string getUniqueConsumerName();
- // Helper function to get the BufferQueue slot where a particular buffer is attached to.
- int getSlotForOutputLocked(const sp<IGraphicBufferProducer>& gbp,
- const sp<GraphicBuffer>& gb);
-
// Sum of max consumer buffers for all outputs
size_t mMaxConsumerBuffers = 0;
size_t mMaxHalBuffers = 0;
@@ -249,20 +232,15 @@
Mutex mMutex;
- sp<IGraphicBufferProducer> mProducer;
- sp<IGraphicBufferConsumer> mConsumer;
sp<BufferItemConsumer> mBufferItemConsumer;
sp<Surface> mSurface;
- //Map graphic buffer ids -> buffer items
- std::unordered_map<uint64_t, BufferItem> mInputSlots;
-
- //Map surface ids -> gbp outputs
- std::unordered_map<int, sp<IGraphicBufferProducer> > mOutputs;
-
//Map surface ids -> gbp outputs
std::unordered_map<int, sp<Surface>> mOutputSurfaces;
+ // Map surface ids -> transform
+ std::unordered_map<int, int> mOutputTransforms;
+
//Map surface ids -> consumer buffer count
std::unordered_map<int, size_t > mConsumerBufferCount;
@@ -271,18 +249,22 @@
// buffer, but also contain merged release fences).
std::unordered_map<uint64_t, std::unique_ptr<BufferTracker> > mBuffers;
- struct GBPHash {
- std::size_t operator()(const sp<IGraphicBufferProducer>& producer) const {
- return std::hash<IGraphicBufferProducer *>{}(producer.get());
+ struct SurfaceHash {
+ std::size_t operator()(const sp<Surface>& producer) const {
+ return std::hash<Surface*>{}(producer.get());
}
};
- std::unordered_map<sp<IGraphicBufferProducer>, sp<OutputListener>,
- GBPHash> mNotifiers;
+ struct BufferHash {
+ std::size_t operator()(const sp<GraphicBuffer>& buffer) const {
+ return std::hash<GraphicBuffer*>{}(buffer.get());
+ }
+ };
- typedef std::vector<sp<GraphicBuffer>> OutputSlots;
- std::unordered_map<sp<IGraphicBufferProducer>, std::unique_ptr<OutputSlots>,
- GBPHash> mOutputSlots;
+ std::unordered_map<sp<Surface>, sp<OutputListener>, SurfaceHash> mNotifiers;
+
+ typedef std::unordered_set<sp<GraphicBuffer>, BufferHash> HeldBuffers;
+ std::unordered_map<sp<Surface>, std::unique_ptr<HeldBuffers>, SurfaceHash> mHeldBuffers;
//A set of buffers that could potentially stay in some of the outputs after removal
//and therefore should be detached from the input queue.
@@ -301,4 +283,4 @@
} // namespace android
-#endif
+#endif // USE_NEW_STREAM_SPLITTER
diff --git a/services/camera/libcameraservice/device3/InFlightRequest.h b/services/camera/libcameraservice/device3/InFlightRequest.h
index 3626f20..62980c5 100644
--- a/services/camera/libcameraservice/device3/InFlightRequest.h
+++ b/services/camera/libcameraservice/device3/InFlightRequest.h
@@ -198,6 +198,9 @@
// Current output transformation
int32_t transform;
+ // Whether the app explicitly uses ZOOM_RATIO
+ bool useZoomRatio;
+
static const nsecs_t kDefaultMinExpectedDuration = 33333333; // 33 ms
static const nsecs_t kDefaultMaxExpectedDuration = 100000000; // 100 ms
@@ -220,14 +223,15 @@
rotateAndCropAuto(false),
autoframingAuto(false),
requestTimeNs(0),
- transform(-1) {
+ transform(-1),
+ useZoomRatio(false) {
}
InFlightRequest(int numBuffers, CaptureResultExtras extras, bool hasInput,
bool hasAppCallback, nsecs_t minDuration, nsecs_t maxDuration, bool fixedFps,
const std::set<std::set<std::string>>& physicalCameraIdSet, bool isStillCapture,
bool isZslCapture, bool rotateAndCropAuto, bool autoframingAuto,
- const std::set<std::string>& idsWithZoom, nsecs_t requestNs,
+ const std::set<std::string>& idsWithZoom, nsecs_t requestNs, bool useZoomRatio,
const SurfaceMap& outSurfaces = SurfaceMap{}) :
shutterTimestamp(0),
sensorTimestamp(0),
@@ -250,7 +254,8 @@
cameraIdsWithZoom(idsWithZoom),
requestTimeNs(requestNs),
outputSurfaces(outSurfaces),
- transform(-1) {
+ transform(-1),
+ useZoomRatio(useZoomRatio) {
}
};
diff --git a/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp b/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
index 2016284..ef2109a 100644
--- a/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
+++ b/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
@@ -19,6 +19,8 @@
#include <algorithm>
+#include <com_android_internal_camera_flags.h>
+
#include "device3/ZoomRatioMapper.h"
#include "utils/SessionConfigurationUtilsHost.h"
@@ -42,13 +44,25 @@
}
status_t ZoomRatioMapper::initZoomRatioInTemplate(CameraMetadata *request) {
+ status_t res = OK;
+
+ if (flags::zoom_method()) {
+ uint8_t zoomMethod = ANDROID_CONTROL_ZOOM_METHOD_AUTO;
+ res = request->update(ANDROID_CONTROL_ZOOM_METHOD, &zoomMethod, 1);
+ if (res != OK) {
+ ALOGE("%s: Failed to update CONTROL_ZOOM_METHOD key: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+ }
+
camera_metadata_entry_t entry;
entry = request->find(ANDROID_CONTROL_ZOOM_RATIO);
float defaultZoomRatio = 1.0f;
if (entry.count == 0) {
- return request->update(ANDROID_CONTROL_ZOOM_RATIO, &defaultZoomRatio, 1);
+ res = request->update(ANDROID_CONTROL_ZOOM_RATIO, &defaultZoomRatio, 1);
}
- return OK;
+ return res;
}
status_t ZoomRatioMapper::overrideZoomRatioTags(
@@ -57,40 +71,69 @@
return BAD_VALUE;
}
+ bool halSupportZoomRatio = false;
camera_metadata_entry_t entry;
entry = deviceInfo->find(ANDROID_CONTROL_ZOOM_RATIO_RANGE);
if (entry.count != 2 && entry.count != 0) return BAD_VALUE;
-
// Hal has zoom ratio support
if (entry.count == 2) {
- *supportNativeZoomRatio = true;
- return OK;
+ halSupportZoomRatio = true;
}
- // Hal has no zoom ratio support
- *supportNativeZoomRatio = false;
-
- entry = deviceInfo->find(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM);
- if (entry.count != 1) {
- ALOGI("%s: Camera device doesn't support SCALER_AVAILABLE_MAX_DIGITAL_ZOOM key!",
- __FUNCTION__);
- return OK;
- }
-
- float zoomRange[] = {1.0f, entry.data.f[0]};
- status_t res = deviceInfo->update(ANDROID_CONTROL_ZOOM_RATIO_RANGE, zoomRange, 2);
- if (res != OK) {
- ALOGE("%s: Failed to update CONTROL_ZOOM_RATIO_RANGE key: %s (%d)",
- __FUNCTION__, strerror(-res), res);
- return res;
- }
-
+ // Add ZOOM_METHOD request and result keys
std::vector<int32_t> requestKeys;
entry = deviceInfo->find(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS);
if (entry.count > 0) {
requestKeys.insert(requestKeys.end(), entry.data.i32, entry.data.i32 + entry.count);
}
- requestKeys.push_back(ANDROID_CONTROL_ZOOM_RATIO);
+ if (flags::zoom_method()) {
+ requestKeys.push_back(ANDROID_CONTROL_ZOOM_METHOD);
+ }
+ std::vector<int32_t> resultKeys;
+ entry = deviceInfo->find(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS);
+ if (entry.count > 0) {
+ resultKeys.insert(resultKeys.end(), entry.data.i32, entry.data.i32 + entry.count);
+ }
+ if (flags::zoom_method()) {
+ resultKeys.push_back(ANDROID_CONTROL_ZOOM_METHOD);
+ }
+
+ // Add additional keys if the HAL doesn't support ZOOM_RATIO
+ status_t res = OK;
+ if (!halSupportZoomRatio) {
+ entry = deviceInfo->find(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM);
+ if (entry.count != 1) {
+ ALOGI("%s: Camera device doesn't support SCALER_AVAILABLE_MAX_DIGITAL_ZOOM key!",
+ __FUNCTION__);
+ return OK;
+ }
+ float zoomRange[] = {1.0f, entry.data.f[0]};
+ res = deviceInfo->update(ANDROID_CONTROL_ZOOM_RATIO_RANGE, zoomRange, 2);
+ if (res != OK) {
+ ALOGE("%s: Failed to update CONTROL_ZOOM_RATIO_RANGE key: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+
+ requestKeys.push_back(ANDROID_CONTROL_ZOOM_RATIO);
+ resultKeys.push_back(ANDROID_CONTROL_ZOOM_RATIO);
+
+ std::vector<int32_t> charKeys;
+ entry = deviceInfo->find(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
+ if (entry.count > 0) {
+ charKeys.insert(charKeys.end(), entry.data.i32, entry.data.i32 + entry.count);
+ }
+ charKeys.push_back(ANDROID_CONTROL_ZOOM_RATIO_RANGE);
+ res = deviceInfo->update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
+ charKeys.data(), charKeys.size());
+ if (res != OK) {
+ ALOGE("%s: Failed to update REQUEST_AVAILABLE_CHARACTERISTICS_KEYS: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+ }
+
+ // Update available request and result keys
res = deviceInfo->update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
requestKeys.data(), requestKeys.size());
if (res != OK) {
@@ -98,13 +141,6 @@
__FUNCTION__, strerror(-res), res);
return res;
}
-
- std::vector<int32_t> resultKeys;
- entry = deviceInfo->find(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS);
- if (entry.count > 0) {
- resultKeys.insert(resultKeys.end(), entry.data.i32, entry.data.i32 + entry.count);
- }
- resultKeys.push_back(ANDROID_CONTROL_ZOOM_RATIO);
res = deviceInfo->update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
resultKeys.data(), resultKeys.size());
if (res != OK) {
@@ -113,20 +149,7 @@
return res;
}
- std::vector<int32_t> charKeys;
- entry = deviceInfo->find(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
- if (entry.count > 0) {
- charKeys.insert(charKeys.end(), entry.data.i32, entry.data.i32 + entry.count);
- }
- charKeys.push_back(ANDROID_CONTROL_ZOOM_RATIO_RANGE);
- res = deviceInfo->update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
- charKeys.data(), charKeys.size());
- if (res != OK) {
- ALOGE("%s: Failed to update REQUEST_AVAILABLE_CHARACTERISTICS_KEYS: %s (%d)",
- __FUNCTION__, strerror(-res), res);
- return res;
- }
-
+ *supportNativeZoomRatio = halSupportZoomRatio;
return OK;
}
@@ -223,7 +246,6 @@
if (!mIsValid) return INVALID_OPERATION;
status_t res = OK;
- bool zoomRatioIs1 = true;
camera_metadata_entry_t entry;
int arrayHeight, arrayWidth = 0;
res = getArrayDimensionsToBeUsed(request, &arrayWidth, &arrayHeight);
@@ -231,9 +253,14 @@
return res;
}
entry = request->find(ANDROID_CONTROL_ZOOM_RATIO);
- if (entry.count == 1 && entry.data.f[0] != 1.0f) {
- zoomRatioIs1 = false;
-
+ bool zoomRatioIs1 = (entry.count == 0 || entry.data.f[0] == 1.0f);
+ bool useZoomRatio = !zoomRatioIs1;
+ if (flags::zoom_method()) {
+ entry = request->find(ANDROID_CONTROL_ZOOM_METHOD);
+ useZoomRatio |= (entry.count == 1
+ && entry.data.u8[0] == ANDROID_CONTROL_ZOOM_METHOD_ZOOM_RATIO);
+ }
+ if (useZoomRatio) {
// If cropRegion is windowboxing, override it with activeArray
camera_metadata_entry_t cropRegionEntry = request->find(ANDROID_SCALER_CROP_REGION);
if (cropRegionEntry.count == 4) {
@@ -248,9 +275,9 @@
}
}
- if (mHalSupportsZoomRatio && zoomRatioIs1) {
+ if (mHalSupportsZoomRatio && !useZoomRatio) {
res = separateZoomFromCropLocked(request, false/*isResult*/, arrayWidth, arrayHeight);
- } else if (!mHalSupportsZoomRatio && !zoomRatioIs1) {
+ } else if (!mHalSupportsZoomRatio && useZoomRatio) {
res = combineZoomAndCropLocked(request, false/*isResult*/, arrayWidth, arrayHeight);
}
@@ -263,7 +290,8 @@
return res;
}
-status_t ZoomRatioMapper::updateCaptureResult(CameraMetadata* result, bool requestedZoomRatioIs1) {
+status_t ZoomRatioMapper::updateCaptureResult(
+ CameraMetadata* result, bool zoomMethodIsRatio, bool zoomRatioIs1) {
if (!mIsValid) return INVALID_OPERATION;
status_t res = OK;
@@ -273,9 +301,11 @@
if (res != OK) {
return res;
}
- if (mHalSupportsZoomRatio && requestedZoomRatioIs1) {
+
+ bool useZoomRatio = !zoomRatioIs1 || zoomMethodIsRatio;
+ if (mHalSupportsZoomRatio && !useZoomRatio) {
res = combineZoomAndCropLocked(result, true/*isResult*/, arrayWidth, arrayHeight);
- } else if (!mHalSupportsZoomRatio && !requestedZoomRatioIs1) {
+ } else if (!mHalSupportsZoomRatio && useZoomRatio) {
res = separateZoomFromCropLocked(result, true/*isResult*/, arrayWidth, arrayHeight);
} else {
camera_metadata_entry_t entry = result->find(ANDROID_CONTROL_ZOOM_RATIO);
@@ -285,6 +315,12 @@
}
}
+ if (flags::zoom_method()) {
+ uint8_t zoomMethod = zoomMethodIsRatio ? ANDROID_CONTROL_ZOOM_METHOD_ZOOM_RATIO :
+ ANDROID_CONTROL_ZOOM_METHOD_AUTO;
+ result->update(ANDROID_CONTROL_ZOOM_METHOD, &zoomMethod, 1);
+ }
+
return res;
}
diff --git a/services/camera/libcameraservice/device3/ZoomRatioMapper.h b/services/camera/libcameraservice/device3/ZoomRatioMapper.h
index 1aa8e78..2ae2010 100644
--- a/services/camera/libcameraservice/device3/ZoomRatioMapper.h
+++ b/services/camera/libcameraservice/device3/ZoomRatioMapper.h
@@ -64,7 +64,9 @@
/**
* Update capture result to handle both cropRegion and zoomRatio.
*/
- status_t updateCaptureResult(CameraMetadata *request, bool requestedZoomRatioIs1);
+ status_t updateCaptureResult(CameraMetadata *request,
+ bool zoomMethodIsRatio,
+ bool zoomRatioIs1);
public: // Visible for testing. Do not use concurently.
void scaleCoordinates(int32_t* coordPairs, int coordCount,
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
index 868b7ef..e52e9a2 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
@@ -918,12 +918,6 @@
camera3::camera_stream_t *src = config->streams[i];
Camera3Stream* cam3stream = Camera3Stream::cast(src);
- // For stream configurations with multi res streams, hal buffer manager has to be used.
- if (!flags::session_hal_buf_manager() && cam3stream->getHalStreamGroupId() != -1 &&
- src->stream_type != CAMERA_STREAM_INPUT) {
- mUseHalBufManager = true;
- config->use_hal_buf_manager = true;
- }
cam3stream->setBufferFreedListener(this);
int streamId = cam3stream->getId();
StreamType streamType;
@@ -1002,8 +996,7 @@
err.getMessage());
return AidlProviderInfo::mapToStatusT(err);
}
- if (flags::session_hal_buf_manager() && interfaceVersion >= AIDL_DEVICE_SESSION_V3
- && mSupportSessionHalBufManager) {
+ if (interfaceVersion >= AIDL_DEVICE_SESSION_V3 && mSupportSessionHalBufManager) {
err = mAidlSession->configureStreamsV2(requestedConfiguration, &configureStreamsRet);
finalConfiguration = std::move(configureStreamsRet.halStreams);
} else {
@@ -1015,18 +1008,16 @@
return AidlProviderInfo::mapToStatusT(err);
}
- if (flags::session_hal_buf_manager()) {
- std::set<int32_t> halBufferManagedStreamIds;
- for (const auto &halStream: finalConfiguration) {
- if ((interfaceVersion >= AIDL_DEVICE_SESSION_V3 &&
- mSupportSessionHalBufManager && halStream.enableHalBufferManager)
- || mUseHalBufManager) {
- halBufferManagedStreamIds.insert(halStream.id);
- }
+ std::set<int32_t> halBufferManagedStreamIds;
+ for (const auto &halStream: finalConfiguration) {
+ if ((interfaceVersion >= AIDL_DEVICE_SESSION_V3 &&
+ mSupportSessionHalBufManager && halStream.enableHalBufferManager)
+ || mUseHalBufManager) {
+ halBufferManagedStreamIds.insert(halStream.id);
}
- mHalBufManagedStreamIds = std::move(halBufferManagedStreamIds);
- config->hal_buffer_managed_streams = mHalBufManagedStreamIds;
}
+ mHalBufManagedStreamIds = std::move(halBufferManagedStreamIds);
+ config->hal_buffer_managed_streams = mHalBufManagedStreamIds;
// And convert output stream configuration from AIDL
for (size_t i = 0; i < config->num_streams; i++) {
camera3::camera_stream_t *dst = config->streams[i];
@@ -1096,10 +1087,8 @@
}
dstStream->setUsage(
mapProducerToFrameworkUsage(src.producerUsage));
- if (flags::session_hal_buf_manager()) {
- dstStream->setHalBufferManager(
- contains(config->hal_buffer_managed_streams, streamId));
- }
+ dstStream->setHalBufferManager(
+ contains(config->hal_buffer_managed_streams, streamId));
}
dst->max_buffers = src.maxBuffers;
}
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
index abc3f9c..474dfc7 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
@@ -72,7 +72,8 @@
virtual status_t switchToOffline(const std::vector<int32_t>& /*streamsToKeep*/,
/*out*/ sp<CameraOfflineSessionBase>* /*session*/) override;
- status_t initialize(sp<CameraProviderManager> manager, const std::string& monitorTags) override;
+ virtual status_t initialize(sp<CameraProviderManager> manager, const std::string& monitorTags)
+ override;
class AidlHalInterface : public Camera3Device::HalInterface {
public:
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3SharedDevice.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3SharedDevice.cpp
new file mode 100644
index 0000000..5bd8d8c
--- /dev/null
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3SharedDevice.cpp
@@ -0,0 +1,306 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#define LOG_TAG "AidlCamera3-SharedDevice"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+//#define LOG_NNDEBUG 0 // Per-frame verbose logging
+
+#ifdef LOG_NNDEBUG
+#define ALOGVV(...) ALOGV(__VA_ARGS__)
+#else
+#define ALOGVV(...) ((void)0)
+#endif
+
+// Convenience macro for transient errors
+#define CLOGE(fmt, ...) ALOGE("Camera %s: %s: " fmt, mId.c_str(), __FUNCTION__, \
+ ##__VA_ARGS__)
+
+#define CLOGW(fmt, ...) ALOGW("Camera %s: %s: " fmt, mId.c_str(), __FUNCTION__, \
+ ##__VA_ARGS__)
+
+// Convenience macros for transitioning to the error state
+#define SET_ERR(fmt, ...) setErrorState( \
+ "%s: " fmt, __FUNCTION__, \
+ ##__VA_ARGS__)
+#define SET_ERR_L(fmt, ...) setErrorStateLocked( \
+ "%s: " fmt, __FUNCTION__, \
+ ##__VA_ARGS__)
+#define DECODE_VALUE(decoder, type, var) \
+ do { \
+ if (decoder.get##type(var) != OK) { \
+ return NOT_ENOUGH_DATA; \
+ } \
+ } while (0)
+
+#include <utils/Log.h>
+#include <utils/Trace.h>
+#include <cstring>
+#include "../../common/aidl/AidlProviderInfo.h"
+#include "utils/SessionConfigurationUtils.h"
+#include "AidlCamera3SharedDevice.h"
+
+using namespace android::camera3;
+using namespace android::camera3::SessionConfigurationUtils;
+
+namespace android {
+
+// Metadata android.info.availableSharedOutputConfigurations has list of shared output
+// configurations. Each output configuration has minimum of 11 entries of size long
+// followed by the physical camera id if present.
+// See android.info.availableSharedOutputConfigurations for details.
+static const int SHARED_OUTPUT_CONFIG_NUM_OF_ENTRIES = 11;
+std::map<std::string, sp<AidlCamera3SharedDevice>> AidlCamera3SharedDevice::sSharedDevices;
+std::map<std::string, std::unordered_set<int>> AidlCamera3SharedDevice::sClientsUid;
+sp<AidlCamera3SharedDevice> AidlCamera3SharedDevice::getInstance(
+ std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
+ const std::string& id, bool overrideForPerfClass, int rotationOverride,
+ bool legacyClient) {
+ if (sClientsUid[id].empty()) {
+ AidlCamera3SharedDevice* sharedDevice = new AidlCamera3SharedDevice(
+ cameraServiceProxyWrapper, attributionAndPermissionUtils, id, overrideForPerfClass,
+ rotationOverride, legacyClient);
+ sSharedDevices[id] = sharedDevice;
+ }
+ if (attributionAndPermissionUtils != nullptr) {
+ sClientsUid[id].insert(attributionAndPermissionUtils->getCallingUid());
+ }
+ return sSharedDevices[id];
+}
+
+status_t AidlCamera3SharedDevice::initialize(sp<CameraProviderManager> manager,
+ const std::string& monitorTags) {
+ ATRACE_CALL();
+ status_t res = OK;
+
+ if (mStatus == STATUS_UNINITIALIZED) {
+ res = AidlCamera3Device::initialize(manager, monitorTags);
+ if (res == OK) {
+ mSharedOutputConfigurations = getSharedOutputConfiguration();
+ }
+ }
+ return res;
+}
+
+status_t AidlCamera3SharedDevice::disconnectClient(int clientUid) {
+ if (sClientsUid[mId].erase(clientUid) == 0) {
+ ALOGW("%s: Camera %s: Client %d is not connected to shared device", __FUNCTION__,
+ mId.c_str(), clientUid);
+ }
+ if (sClientsUid[mId].empty()) {
+ return Camera3Device::disconnect();
+ }
+ return OK;
+}
+
+std::vector<OutputConfiguration> AidlCamera3SharedDevice::getSharedOutputConfiguration() {
+ std::vector<OutputConfiguration> sharedConfigs;
+ uint8_t colorspace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
+ camera_metadata_entry sharedSessionColorSpace = mDeviceInfo.find(
+ ANDROID_SHARED_SESSION_COLOR_SPACE);
+ if (sharedSessionColorSpace.count > 0) {
+ colorspace = *sharedSessionColorSpace.data.u8;
+ }
+ camera_metadata_entry sharedSessionConfigs = mDeviceInfo.find(
+ ANDROID_SHARED_SESSION_OUTPUT_CONFIGURATIONS);
+ if (sharedSessionConfigs.count > 0) {
+ int numOfEntries = sharedSessionConfigs.count;
+ int i = 0;
+ uint8_t physicalCameraIdLen;
+ int surfaceType, width, height, format, mirrorMode, timestampBase, dataspace;
+ long usage, streamUseCase;
+ bool isReadOutTimestampEnabled;
+ while (numOfEntries >= SHARED_OUTPUT_CONFIG_NUM_OF_ENTRIES) {
+ surfaceType = (int)sharedSessionConfigs.data.i64[i];
+ width = (int)sharedSessionConfigs.data.i64[i+1];
+ height = (int)sharedSessionConfigs.data.i64[i+2];
+ format = (int)sharedSessionConfigs.data.i64[i+3];
+ mirrorMode = (int)sharedSessionConfigs.data.i64[i+4];
+ isReadOutTimestampEnabled = (sharedSessionConfigs.data.i64[i+5] != 0);
+ timestampBase = (int)sharedSessionConfigs.data.i64[i+6];
+ dataspace = (int)sharedSessionConfigs.data.i64[i+7];
+ usage = sharedSessionConfigs.data.i64[i+8];
+ streamUseCase = sharedSessionConfigs.data.i64[i+9];
+ physicalCameraIdLen = sharedSessionConfigs.data.i64[i+10];
+ numOfEntries -= SHARED_OUTPUT_CONFIG_NUM_OF_ENTRIES;
+ i += SHARED_OUTPUT_CONFIG_NUM_OF_ENTRIES;
+ if (numOfEntries < physicalCameraIdLen) {
+ ALOGE("%s: Camera %s: Number of remaining data (%d entries) in shared configuration"
+ " is less than physical camera id length %d. Malformed metadata"
+ " android.info.availableSharedOutputConfigurations.", __FUNCTION__,
+ mId.c_str(), numOfEntries, physicalCameraIdLen);
+ break;
+ }
+ std::string physicalCameraId;
+ long asciiValue;
+ for (int j = 0; j < physicalCameraIdLen; j++) {
+ asciiValue = sharedSessionConfigs.data.i64[i+j];
+ if (asciiValue == 0) { // Check for null terminator
+ break;
+ }
+ physicalCameraId += static_cast<char>(asciiValue);
+ }
+ OutputConfiguration* outConfig = new OutputConfiguration(surfaceType, width, height,
+ format, colorspace, mirrorMode, isReadOutTimestampEnabled, timestampBase,
+ dataspace, usage, streamUseCase, physicalCameraId);
+ sharedConfigs.push_back(*outConfig);
+ i += physicalCameraIdLen;
+ numOfEntries -= physicalCameraIdLen;
+ }
+ if (numOfEntries != 0) {
+ ALOGE("%s: Camera %s: there are still %d entries left in shared output configuration."
+ " Malformed metadata android.info.availableSharedOutputConfigurations.",
+ __FUNCTION__, mId.c_str(), numOfEntries);
+ }
+ }
+ return sharedConfigs;
+}
+
+status_t AidlCamera3SharedDevice::beginConfigure() {
+ status_t res;
+ int i = 0;
+
+ if (mStatus != STATUS_UNCONFIGURED) {
+ return OK;
+ }
+
+ for (auto config : mSharedOutputConfigurations) {
+ std::vector<SurfaceHolder> consumers;
+ android_dataspace dataSpace;
+ if (config.getColorSpace()
+ != ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED
+ && config.getFormat() != HAL_PIXEL_FORMAT_BLOB) {
+ if (!dataSpaceFromColorSpace(&dataSpace, config.getColorSpace())) {
+ std::string msg = fmt::sprintf("Camera %s: color space %d not supported, "
+ " failed to convert to data space", mId.c_str(), config.getColorSpace());
+ ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+ return INVALID_OPERATION;
+ }
+ }
+ std::unordered_set<int32_t> overriddenSensorPixelModes;
+ if (checkAndOverrideSensorPixelModesUsed(config.getSensorPixelModesUsed(),
+ config.getFormat(), config.getWidth(), config.getHeight(),
+ mDeviceInfo, &overriddenSensorPixelModes) != OK) {
+ std::string msg = fmt::sprintf("Camera %s: sensor pixel modes for stream with "
+ "format %#x are not valid",mId.c_str(), config.getFormat());
+ ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+ return INVALID_OPERATION;
+ }
+ sp<IGraphicBufferProducer> producer;
+ sp<IGraphicBufferConsumer> consumer;
+ BufferQueue::createBufferQueue(&producer, &consumer);
+ mSharedSurfaces[i] = new Surface(producer);
+ consumers.push_back({mSharedSurfaces[i], config.getMirrorMode()});
+ mSharedStreams[i] = new Camera3SharedOutputStream(mNextStreamId, consumers,
+ config.getWidth(),config.getHeight(), config.getFormat(), config.getUsage(),
+ dataSpace, static_cast<camera_stream_rotation_t>(config.getRotation()),
+ mTimestampOffset, config.getPhysicalCameraId(), overriddenSensorPixelModes,
+ getTransportType(), config.getSurfaceSetID(), mUseHalBufManager,
+ config.getDynamicRangeProfile(), config.getStreamUseCase(),
+ mDeviceTimeBaseIsRealtime, config.getTimestampBase(),
+ config.getColorSpace(), config.useReadoutTimestamp());
+ int id = mSharedStreams[i]->getSurfaceId(consumers[0].mSurface);
+ if (id < 0) {
+ SET_ERR_L("Invalid surface id");
+ return BAD_VALUE;
+ }
+ mSharedSurfaceIds[i] = id;
+ mSharedStreams[i]->setStatusTracker(mStatusTracker);
+ mSharedStreams[i]->setBufferManager(mBufferManager);
+ mSharedStreams[i]->setImageDumpMask(mImageDumpMask);
+ res = mOutputStreams.add(mNextStreamId, mSharedStreams[i]);
+ if (res < 0) {
+ SET_ERR_L("Can't add new stream to set: %s (%d)", strerror(-res), res);
+ return res;
+ }
+ mSessionStatsBuilder.addStream(mNextStreamId);
+ mConfiguredOutputs.add(mNextStreamId++, config);
+ i++;
+ }
+ CameraMetadata sessionParams;
+ res = configureStreams(sessionParams, CAMERA_STREAM_CONFIGURATION_SHARED_MODE);
+ if (res != OK) {
+ std::string msg = fmt::sprintf("Camera %s: Error configuring streams: %s (%d)",
+ mId.c_str(), strerror(-res), res);
+ ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+ return res;
+ }
+ return OK;
+}
+
+status_t AidlCamera3SharedDevice::getSharedStreamId(const OutputConfiguration &config,
+ int *streamId) {
+ if (streamId == nullptr) {
+ return BAD_VALUE;
+ }
+ for (size_t i = 0 ; i < mConfiguredOutputs.size(); i++){
+ OutputConfiguration sharedConfig = mConfiguredOutputs.valueAt(i);
+ if (config.sharedConfigEqual(sharedConfig)) {
+ *streamId = mConfiguredOutputs.keyAt(i);
+ return OK;
+ }
+ }
+ return INVALID_OPERATION;
+}
+
+status_t AidlCamera3SharedDevice::addSharedSurfaces(int streamId,
+ const std::vector<android::camera3::OutputStreamInfo> &outputInfo,
+ const std::vector<SurfaceHolder> &surfaces, std::vector<int> *surfaceIds) {
+ KeyedVector<sp<Surface>, size_t> outputMap;
+ std::vector<size_t> removedSurfaceIds;
+ status_t res;
+ sp<Camera3OutputStreamInterface> stream = mOutputStreams.get(streamId);
+ if (stream == nullptr) {
+ CLOGE("Stream %d is unknown", streamId);
+ return BAD_VALUE;
+ }
+
+ res = updateStream(streamId, surfaces, outputInfo, removedSurfaceIds, &outputMap);
+ if (res != OK) {
+ CLOGE("Stream %d failed to update stream (error %d %s) ",
+ streamId, res, strerror(-res));
+ return res;
+ }
+
+ for (size_t i = 0 ; i < outputMap.size(); i++){
+ if (surfaceIds != nullptr) {
+ surfaceIds->push_back(outputMap.valueAt(i));
+ }
+ }
+ return OK;
+}
+
+status_t AidlCamera3SharedDevice::removeSharedSurfaces(int streamId,
+ const std::vector<size_t> &removedSurfaceIds) {
+ KeyedVector<sp<Surface>, size_t> outputMap;
+ std::vector<SurfaceHolder> surfaces;
+ std::vector<OutputStreamInfo> outputInfo;
+ status_t res;
+ sp<Camera3OutputStreamInterface> stream = mOutputStreams.get(streamId);
+ if (stream == nullptr) {
+ CLOGE("Stream %d is unknown", streamId);
+ return BAD_VALUE;
+ }
+
+ res = updateStream(streamId, surfaces, outputInfo, removedSurfaceIds, &outputMap);
+ if (res != OK) {
+ CLOGE("Stream %d failed to update stream (error %d %s) ",
+ streamId, res, strerror(-res));
+ return res;
+ }
+ return OK;
+}
+}
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3SharedDevice.h b/services/camera/libcameraservice/device3/aidl/AidlCamera3SharedDevice.h
new file mode 100644
index 0000000..b2ee2d6
--- /dev/null
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3SharedDevice.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_AIDLCAMERA3SHAREDDEVICE_H
+#define ANDROID_SERVERS_AIDLCAMERA3SHAREDDEVICE_H
+
+#include <camera/camera2/OutputConfiguration.h>
+#include "../Camera3SharedOutputStream.h"
+#include "AidlCamera3Device.h"
+namespace android {
+
+/**
+ * Shared CameraDevice for AIDL HAL devices.
+ */
+using ::android::camera3::Camera3SharedOutputStream;
+class AidlCamera3SharedDevice :
+ public AidlCamera3Device {
+ public:
+ static sp<AidlCamera3SharedDevice> getInstance(
+ std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
+ const std::string& id, bool overrideForPerfClass, int rotationOverride,
+ bool legacyClient = false);
+ status_t initialize(sp<CameraProviderManager> manager,
+ const std::string& monitorTags) override;
+ status_t disconnectClient(int clientUid) override;
+ status_t beginConfigure() override;
+ status_t getSharedStreamId(const OutputConfiguration &config, int *streamId) override;
+ status_t addSharedSurfaces(int streamId,
+ const std::vector<android::camera3::OutputStreamInfo> &outputInfo,
+ const std::vector<SurfaceHolder>& surfaces,
+ std::vector<int> *surfaceIds = nullptr) override;
+ status_t removeSharedSurfaces(int streamId,
+ const std::vector<size_t> &surfaceIds) override;
+ private:
+ static std::map<std::string, sp<AidlCamera3SharedDevice>> sSharedDevices;
+ static std::map<std::string, std::unordered_set<int>> sClientsUid;
+ AidlCamera3SharedDevice(
+ std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
+ const std::string& id, bool overrideForPerfClass, int rotationOverride,
+ bool legacyClient)
+ : AidlCamera3Device(cameraServiceProxyWrapper, attributionAndPermissionUtils, id,
+ overrideForPerfClass, rotationOverride, legacyClient) {}
+ std::vector<OutputConfiguration> getSharedOutputConfiguration();
+ std::vector<OutputConfiguration> mSharedOutputConfigurations;
+ std::vector<int> mSharedSurfaceIds;
+ std::vector<sp<Surface>> mSharedSurfaces;
+ std::vector<sp<Camera3SharedOutputStream>> mSharedStreams;
+ KeyedVector<int32_t, OutputConfiguration> mConfiguredOutputs;
+}; // class AidlCamera3SharedDevice
+}; // namespace android
+#endif
diff --git a/services/camera/libcameraservice/device3/deprecated/DeprecatedCamera3StreamSplitter.cpp b/services/camera/libcameraservice/device3/deprecated/DeprecatedCamera3StreamSplitter.cpp
index c1113e5..41be9a4 100644
--- a/services/camera/libcameraservice/device3/deprecated/DeprecatedCamera3StreamSplitter.cpp
+++ b/services/camera/libcameraservice/device3/deprecated/DeprecatedCamera3StreamSplitter.cpp
@@ -189,6 +189,17 @@
mUseHalBufManager = enabled;
}
+status_t DeprecatedCamera3StreamSplitter::setTransform(size_t surfaceId, int transform) {
+ Mutex::Autolock lock(mMutex);
+ if (!mOutputs.contains(surfaceId) || mOutputs[surfaceId] == nullptr) {
+ SP_LOGE("%s: No surface at id %zu", __FUNCTION__, surfaceId);
+ return BAD_VALUE;
+ }
+
+ mOutputTransforms[surfaceId] = transform;
+ return OK;
+}
+
status_t DeprecatedCamera3StreamSplitter::addOutputLocked(size_t surfaceId,
const sp<Surface>& outputQueue) {
ATRACE_CALL();
@@ -355,9 +366,13 @@
const sp<IGraphicBufferProducer>& output, const BufferItem& bufferItem, size_t surfaceId) {
ATRACE_CALL();
status_t res;
+ int transform = bufferItem.mTransform;
+ if (mOutputTransforms.contains(surfaceId)) {
+ transform = mOutputTransforms[surfaceId];
+ }
IGraphicBufferProducer::QueueBufferInput queueInput(
bufferItem.mTimestamp, bufferItem.mIsAutoTimestamp, bufferItem.mDataSpace,
- bufferItem.mCrop, static_cast<int32_t>(bufferItem.mScalingMode), bufferItem.mTransform,
+ bufferItem.mCrop, static_cast<int32_t>(bufferItem.mScalingMode), transform,
bufferItem.mFence);
IGraphicBufferProducer::QueueBufferOutput queueOutput;
@@ -620,8 +635,7 @@
if (detach) {
res = consumer->detachBuffer(consumerSlot);
} else {
- res = consumer->releaseBuffer(consumerSlot, frameNumber, EGL_NO_DISPLAY,
- EGL_NO_SYNC_KHR, tracker_ptr->getMergedFence());
+ res = consumer->releaseBuffer(consumerSlot, frameNumber, tracker_ptr->getMergedFence());
}
} else {
SP_LOGE("%s: consumer has become null!", __FUNCTION__);
diff --git a/services/camera/libcameraservice/device3/deprecated/DeprecatedCamera3StreamSplitter.h b/services/camera/libcameraservice/device3/deprecated/DeprecatedCamera3StreamSplitter.h
index 4610985..61b43a8 100644
--- a/services/camera/libcameraservice/device3/deprecated/DeprecatedCamera3StreamSplitter.h
+++ b/services/camera/libcameraservice/device3/deprecated/DeprecatedCamera3StreamSplitter.h
@@ -95,6 +95,7 @@
void setHalBufferManager(bool enabled);
+ status_t setTransform(size_t surfaceId, int transform);
private:
// From IConsumerListener
//
@@ -259,6 +260,9 @@
// Map surface ids -> gbp outputs
std::unordered_map<int, sp<Surface>> mOutputSurfaces;
+ // Map surface ids -> transform
+ std::unordered_map<int, int> mOutputTransforms;
+
// Map surface ids -> consumer buffer count
std::unordered_map<int, size_t> mConsumerBufferCount;
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
index f507df9..6986d3c 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
@@ -929,7 +929,7 @@
switch (src->stream_type) {
case CAMERA_STREAM_OUTPUT:
streamType = StreamType::OUTPUT;
- if (flags::session_hal_buf_manager() && mUseHalBufManager) {
+ if (mUseHalBufManager) {
mHalBufManagedStreamIds.insert(streamId);
}
break;
diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
index c968e44..ec8da1a 100644
--- a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
+++ b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
@@ -248,9 +248,7 @@
// item.mGraphicBuffer was populated with the proper graphic-buffer
// at acquire even if it was previously acquired
- err = releaseBufferLocked(item.mSlot, item.mGraphicBuffer,
- EGL_NO_DISPLAY,
- EGL_NO_SYNC_KHR);
+ err = releaseBufferLocked(item.mSlot, item.mGraphicBuffer);
if (err != OK) {
BI_LOGE("Failed to release buffer: %s (%d)",
strerror(-err), err);
diff --git a/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.h b/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.h
index 152002b..98a0dbb 100644
--- a/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.h
+++ b/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.h
@@ -86,6 +86,10 @@
mCaptureResultMetadataQueue = metadataQueue;
}
+ virtual binder::Status onClientSharedAccessPriorityChanged(bool /*primaryClient*/) {
+ return binder::Status::ok();
+ }
+
private:
// Wrapper struct so that parameters to onResultReceived callback may be
// sent through an AMessage.
diff --git a/services/camera/libcameraservice/hidl/AidlCameraServiceListener.h b/services/camera/libcameraservice/hidl/AidlCameraServiceListener.h
index 78fca4e..b31ccc6 100644
--- a/services/camera/libcameraservice/hidl/AidlCameraServiceListener.h
+++ b/services/camera/libcameraservice/hidl/AidlCameraServiceListener.h
@@ -72,6 +72,10 @@
// empty implementation
return binder::Status::ok();
}
+ virtual binder::Status onCameraOpenedInSharedMode(const std::string& /*cameraId*/,
+ const std::string& /*clientPackageId*/, int32_t /*deviceId*/, bool /*primaryClient*/) {
+ return binder::Status::ok();
+ }
};
} // implementation
diff --git a/services/camera/libcameraservice/hidl/HidlCameraService.cpp b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
index 59e892f..9d140f2 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraService.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
@@ -135,7 +135,7 @@
binder::Status serviceRet = mAidlICameraService->connectDevice(
callbacks, cameraId, 0/*oomScoreOffset*/,
/*targetSdkVersion*/__ANDROID_API_FUTURE__, ROTATION_OVERRIDE_NONE,
- clientAttribution, /*devicePolicy*/0, /*out*/&deviceRemote);
+ clientAttribution, /*devicePolicy*/0, /*sharedMode*/false, /*out*/&deviceRemote);
HStatus status = HStatus::NO_ERROR;
if (!serviceRet.isOk()) {
ALOGE("%s: Unable to connect to camera device", __FUNCTION__);
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
index 12ac33f..6c98837 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
@@ -40,6 +40,7 @@
#include <fakeservicemanager/FakeServiceManager.h>
#include <fuzzbinder/random_binder.h>
#include <gui/BufferItemConsumer.h>
+#include <gui/Flags.h>
#include <gui/IGraphicBufferProducer.h>
#include <gui/Surface.h>
#include <gui/SurfaceComposerClient.h>
@@ -620,7 +621,11 @@
previewSurface = surfaceControl->getSurface();
if (previewSurface.get()) {
- cameraDevice->setPreviewTarget(previewSurface->getIGraphicBufferProducer());
+ cameraDevice->setPreviewTarget(previewSurface
+#if !WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ ->getIGraphicBufferProducer()
+#endif
+ );
}
}
cameraDevice->setPreviewCallbackFlag(CAMERA_FRAME_CALLBACK_FLAG_CAMCORDER);
@@ -675,7 +680,11 @@
.apply();
sp<Surface> previewSurfaceVideo = surfaceControlVideo->getSurface();
if (previewSurfaceVideo.get()) {
- cameraDevice->setVideoTarget(previewSurfaceVideo->getIGraphicBufferProducer());
+ cameraDevice->setVideoTarget(previewSurfaceVideo
+#if !WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ ->getIGraphicBufferProducer()
+#endif
+ );
}
}
cameraDevice->stopPreview();
@@ -740,6 +749,13 @@
// No op
return binder::Status::ok();
}
+
+ virtual binder::Status onCameraOpenedInSharedMode(const std::string& /*cameraId*/,
+ const std::string& /*clientPackageName*/, int32_t /*deviceId*/,
+ bool /*isPrimaryClient*/) {
+ // No op
+ return binder::Status::ok();
+ }
};
class TestCameraDeviceCallbacks : public hardware::camera2::BnCameraDeviceCallbacks {
@@ -780,6 +796,11 @@
virtual binder::Status onRequestQueueEmpty() {
return binder::Status::ok();
}
+
+ virtual binder::Status onClientSharedAccessPriorityChanged(bool /*isPrimaryClient*/) {
+ return binder::Status::ok();
+ }
+
};
class Camera2Fuzzer {
@@ -808,7 +829,7 @@
mCameraService->connectDevice(callbacks, s.cameraId,
0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT,
- clientAttribution, /*devicePolicy*/0, &device);
+ clientAttribution, /*devicePolicy*/0, /*sharedMode*/false, &device);
if (device == nullptr) {
continue;
}
diff --git a/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp b/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
index 50aeaca..ff58c4a 100644
--- a/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
@@ -77,6 +77,13 @@
// No op
return binder::Status::ok();
}
+
+ virtual binder::Status onCameraOpenedInSharedMode(const std::string& /*cameraId*/,
+ const std::string& /*clientPackageName*/, int32_t /*deviceId*/,
+ bool /*isPrimaryClient*/) {
+ // No op
+ return binder::Status::ok();
+ }
};
// Empty device callback.
@@ -118,6 +125,10 @@
virtual binder::Status onRequestQueueEmpty() {
return binder::Status::ok();
}
+
+ virtual binder::Status onClientSharedAccessPriorityChanged(bool /*isPrimaryClient*/) {
+ return binder::Status::ok();
+ }
};
// Override isCameraDisabled from the CameraServiceProxy with a flag.
@@ -242,7 +253,7 @@
sCameraService->connectDevice(callbacks, s.cameraId,
0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
hardware::ICameraService::ROTATION_OVERRIDE_NONE,
- clientAttribution, /*devicePolicy*/0, &device);
+ clientAttribution, /*devicePolicy*/0, /*sharedMode*/false, &device);
AutoDisconnectDevice autoDisconnect(device);
ASSERT_TRUE(!status.isOk()) << "connectDevice returned OK status";
ASSERT_EQ(status.serviceSpecificErrorCode(), hardware::ICameraService::ERROR_DISABLED)
@@ -257,7 +268,7 @@
sCameraService->connectDevice(callbacks, s.cameraId,
0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
hardware::ICameraService::ROTATION_OVERRIDE_NONE,
- clientAttribution, /*devicePolicy*/0, &device);
+ clientAttribution, /*devicePolicy*/0, /*sharedMode*/false, &device);
AutoDisconnectDevice autoDisconnect(device);
ASSERT_TRUE(status.isOk());
}
@@ -281,7 +292,7 @@
sCameraService->connectDevice(callbacks, s.cameraId,
0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
hardware::ICameraService::ROTATION_OVERRIDE_NONE,
- clientAttribution, /*devicePolicy*/0, &deviceA);
+ clientAttribution, /*devicePolicy*/0, /*sharedMode*/false, &deviceA);
AutoDisconnectDevice autoDisconnectA(deviceA);
ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
" service specific error code " << status.serviceSpecificErrorCode();
@@ -289,7 +300,7 @@
sCameraService->connectDevice(callbacks, s.cameraId,
0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
hardware::ICameraService::ROTATION_OVERRIDE_NONE,
- clientAttribution, /*devicePolicy*/0, &deviceB);
+ clientAttribution, /*devicePolicy*/0, /*sharedMode*/false, &deviceB);
AutoDisconnectDevice autoDisconnectB(deviceB);
ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
" service specific error code " << status.serviceSpecificErrorCode();
@@ -315,7 +326,7 @@
sCameraService->connectDevice(callbacks, s.cameraId,
0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
hardware::ICameraService::ROTATION_OVERRIDE_NONE,
- clientAttribution, /*devicePolicy*/0, &deviceA);
+ clientAttribution, /*devicePolicy*/0, /*sharedMode*/false, &deviceA);
AutoDisconnectDevice autoDisconnectA(deviceA);
ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
" service specific error code " << status.serviceSpecificErrorCode();
@@ -323,7 +334,7 @@
sCameraService->connectDevice(callbacks, s.cameraId,
1/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
hardware::ICameraService::ROTATION_OVERRIDE_NONE,
- clientAttribution, /*devicePolicy*/0, &deviceB);
+ clientAttribution, /*devicePolicy*/0, /*sharedMode*/false, &deviceB);
AutoDisconnectDevice autoDisconnectB(deviceB);
ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
" service specific error code " << status.serviceSpecificErrorCode();
diff --git a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
index 939126c..56cacef 100644
--- a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
@@ -445,10 +445,6 @@
virtual std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
getService(const std::string& serviceName) override {
- if (!flags::delay_lazy_hal_instantiation()) {
- return mTestAidlCameraProvider;
- }
-
// If no provider has been given, fail; in reality, getService would
// block for HALs that don't start correctly, so we should never use
// getService when we don't have a valid HAL running
diff --git a/services/camera/libcameraservice/tests/ZoomRatioTest.cpp b/services/camera/libcameraservice/tests/ZoomRatioTest.cpp
index badd47a..f00d1e7 100644
--- a/services/camera/libcameraservice/tests/ZoomRatioTest.cpp
+++ b/services/camera/libcameraservice/tests/ZoomRatioTest.cpp
@@ -296,7 +296,8 @@
}
metadata.update(ANDROID_SCALER_CROP_REGION, test2xCropRegion[index], 4);
- res = mapper.updateCaptureResult(&metadata, true/*requestedZoomRatioIs1*/);
+ res = mapper.updateCaptureResult(&metadata, false /*zoomMethodIsRatio*/,
+ true/*requestedZoomRatioIs1*/);
ASSERT_EQ(res, OK);
entry = metadata.find(ANDROID_SCALER_CROP_REGION);
ASSERT_EQ(entry.count, 4U);
@@ -340,7 +341,8 @@
entry = metadata.find(ANDROID_CONTROL_ZOOM_RATIO);
EXPECT_NEAR(entry.data.f[0], 2.0f, kMaxAllowedRatioError);
- res = mapper.updateCaptureResult(&metadata, true/*requestedZoomRatioIs1*/);
+ res = mapper.updateCaptureResult(&metadata, false/*useZoomMethod*/,
+ true/*requestedZoomRatioIs1*/);
ASSERT_EQ(res, OK);
entry = metadata.find(ANDROID_CONTROL_ZOOM_RATIO);
EXPECT_NEAR(entry.data.f[0], 1.0f, kMaxAllowedRatioError);
@@ -364,7 +366,8 @@
entry = metadata.find(ANDROID_CONTROL_ZOOM_RATIO);
EXPECT_NEAR(entry.data.f[0], 1.0f, kMaxAllowedRatioError);
- res = mapper.updateCaptureResult(&metadata, true/*requestedZoomRatioIs1*/);
+ res = mapper.updateCaptureResult(&metadata, false/*zoomMethodIsRatio*/,
+ true/*requestedZoomRatioIs1*/);
ASSERT_EQ(res, OK);
entry = metadata.find(ANDROID_SCALER_CROP_REGION);
ASSERT_EQ(entry.count, 4U);
@@ -452,7 +455,8 @@
entry = metadata.find(ANDROID_CONTROL_ZOOM_RATIO);
ASSERT_EQ(entry.data.f[0], zoomRatio);
- res = mapper.updateCaptureResult(&metadata, false/*requestedZoomRatioIs1*/);
+ res = mapper.updateCaptureResult(&metadata, false/*zoomMethodIsRatio*/,
+ false/*requestedZoomRatioIs1*/);
ASSERT_EQ(res, OK);
entry = metadata.find(ANDROID_SCALER_CROP_REGION);
ASSERT_EQ(entry.count, 4U);
diff --git a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.cpp b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.cpp
index 72f8c4b..80af140 100644
--- a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.cpp
+++ b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.cpp
@@ -14,6 +14,9 @@
* limitations under the License.
*/
+#define LOG_TAG "AttributionAndPermissionUtils"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+
#include "AttributionAndPermissionUtils.h"
#include <binder/AppOpsManager.h>
@@ -25,8 +28,37 @@
#include "CameraService.h"
#include <binder/IPCThreadState.h>
-#include <hwbinder/IPCThreadState.h>
#include <binderthreadstate/CallerUtils.h>
+#include <hwbinder/IPCThreadState.h>
+
+namespace {
+
+using android::content::AttributionSourceState;
+
+static const std::string kPermissionServiceName = "permission";
+
+static std::string getAttributionString(const AttributionSourceState& attributionSource) {
+ std::ostringstream ret;
+ const AttributionSourceState* current = &attributionSource;
+ while (current != nullptr) {
+ if (current != &attributionSource) {
+ ret << ", ";
+ }
+
+ ret << "[uid " << current->uid << ", pid " << current->pid;
+ ret << ", packageName \"" << current->packageName.value_or("<unknown>");
+ ret << "\"]";
+
+ if (!current->next.empty()) {
+ current = ¤t->next[0];
+ } else {
+ current = nullptr;
+ }
+ }
+ return ret.str();
+}
+
+} // namespace
namespace android {
@@ -35,8 +67,7 @@
const std::string AttributionAndPermissionUtils::sDumpPermission("android.permission.DUMP");
const std::string AttributionAndPermissionUtils::sManageCameraPermission(
"android.permission.MANAGE_CAMERA");
-const std::string AttributionAndPermissionUtils::sCameraPermission(
- "android.permission.CAMERA");
+const std::string AttributionAndPermissionUtils::sCameraPermission("android.permission.CAMERA");
const std::string AttributionAndPermissionUtils::sSystemCameraPermission(
"android.permission.SYSTEM_CAMERA");
const std::string AttributionAndPermissionUtils::sCameraHeadlessSystemUserPermission(
@@ -50,14 +81,14 @@
const std::string AttributionAndPermissionUtils::sCameraInjectExternalCameraPermission(
"android.permission.CAMERA_INJECT_EXTERNAL_CAMERA");
-int AttributionAndPermissionUtils::getCallingUid() {
+int AttributionAndPermissionUtils::getCallingUid() const {
if (getCurrentServingCall() == BinderCallType::HWBINDER) {
return hardware::IPCThreadState::self()->getCallingUid();
}
return IPCThreadState::self()->getCallingUid();
}
-int AttributionAndPermissionUtils::getCallingPid() {
+int AttributionAndPermissionUtils::getCallingPid() const {
if (getCurrentServingCall() == BinderCallType::HWBINDER) {
return hardware::IPCThreadState::self()->getCallingPid();
}
@@ -80,74 +111,99 @@
return;
}
-// TODO(362551824): Make USE_CALLING_UID more explicit with a scoped enum.
-bool AttributionAndPermissionUtils::resolveClientUid(/*inout*/ int& clientUid) {
- int callingUid = getCallingUid();
-
- if (clientUid == hardware::ICameraService::USE_CALLING_UID) {
- clientUid = callingUid;
- } else if (!isTrustedCallingUid(callingUid)) {
- return false;
+binder::Status AttributionAndPermissionUtils::resolveAttributionSource(
+ /*inout*/ AttributionSourceState& resolvedAttributionSource, const std::string& methodName,
+ const std::optional<std::string>& cameraIdMaybe) {
+ // Check if we can trust clientUid
+ if (!resolveClientUid(resolvedAttributionSource.uid)) {
+ return errorNotTrusted(resolvedAttributionSource.pid, resolvedAttributionSource.uid,
+ methodName, cameraIdMaybe, *resolvedAttributionSource.packageName,
+ /* isPid= */ false);
}
- return true;
-}
+ resolveAttributionPackage(resolvedAttributionSource);
-// TODO(362551824): Make USE_CALLING_UID more explicit with a scoped enum.
-bool AttributionAndPermissionUtils::resolveClientPid(/*inout*/ int& clientPid) {
- int callingUid = getCallingUid();
- int callingPid = getCallingPid();
-
- if (clientPid == hardware::ICameraService::USE_CALLING_PID) {
- clientPid = callingPid;
- } else if (!isTrustedCallingUid(callingUid)) {
- return false;
+ if (!resolveClientPid(resolvedAttributionSource.pid)) {
+ return errorNotTrusted(resolvedAttributionSource.pid, resolvedAttributionSource.uid,
+ methodName, cameraIdMaybe, *resolvedAttributionSource.packageName,
+ /* isPid= */ true);
}
- return true;
+ return binder::Status::ok();
}
-bool AttributionAndPermissionUtils::checkAutomotivePrivilegedClient(const std::string &cameraId,
- const AttributionSourceState &attributionSource) {
- if (isAutomotivePrivilegedClient(attributionSource.uid)) {
- // If cameraId is empty, then it means that this check is not used for the
- // purpose of accessing a specific camera, hence grant permission just
- // based on uid to the automotive privileged client.
- if (cameraId.empty())
- return true;
+PermissionChecker::PermissionResult AttributionAndPermissionUtils::checkPermission(
+ const std::string& cameraId, const std::string& permission,
+ const AttributionSourceState& attributionSource, const std::string& message,
+ int32_t attributedOpCode, bool forDataDelivery, bool startDataDelivery,
+ bool checkAutomotive) {
+ AttributionSourceState clientAttribution = attributionSource;
+ if (!flags::data_delivery_permission_checks() && !clientAttribution.next.empty()) {
+ clientAttribution.next.clear();
+ }
- auto cameraService = mCameraService.promote();
- if (cameraService == nullptr) {
- ALOGE("%s: CameraService unavailable.", __FUNCTION__);
- return false;
+ if (checkAutomotive && checkAutomotivePrivilegedClient(cameraId, clientAttribution)) {
+ return PermissionChecker::PERMISSION_GRANTED;
+ }
+
+ PermissionChecker::PermissionResult result;
+ if (forDataDelivery) {
+ if (startDataDelivery) {
+ result = mPermissionChecker->checkPermissionForStartDataDeliveryFromDatasource(
+ toString16(permission), clientAttribution, toString16(message),
+ attributedOpCode);
+ } else {
+ result = mPermissionChecker->checkPermissionForDataDeliveryFromDatasource(
+ toString16(permission), clientAttribution, toString16(message),
+ attributedOpCode);
}
-
- // If this call is used for accessing a specific camera then cam_id must be provided.
- // In that case, only pre-grants the permission for accessing the exterior system only
- // camera.
- return cameraService->isAutomotiveExteriorSystemCamera(cameraId);
+ } else {
+ result = mPermissionChecker->checkPermissionForPreflight(
+ toString16(permission), clientAttribution, toString16(message), attributedOpCode);
}
- return false;
+ if (result == PermissionChecker::PERMISSION_HARD_DENIED) {
+ ALOGI("%s (forDataDelivery %d startDataDelivery %d): Permission hard denied "
+ "for client attribution %s",
+ __FUNCTION__, forDataDelivery, startDataDelivery,
+ getAttributionString(clientAttribution).c_str());
+ } else if (result == PermissionChecker::PERMISSION_SOFT_DENIED) {
+ ALOGI("%s checkPermission (forDataDelivery %d startDataDelivery %d): Permission soft "
+ "denied "
+ "for client attribution %s",
+ __FUNCTION__, forDataDelivery, startDataDelivery,
+ getAttributionString(clientAttribution).c_str());
+ }
+ return result;
}
-bool AttributionAndPermissionUtils::checkPermissionForPreflight(const std::string &cameraId,
- const std::string &permission, const AttributionSourceState &attributionSource,
- const std::string& message, int32_t attributedOpCode) {
- if (checkAutomotivePrivilegedClient(cameraId, attributionSource)) {
- return true;
- }
+bool AttributionAndPermissionUtils::checkPermissionForPreflight(
+ const std::string& cameraId, const std::string& permission,
+ const AttributionSourceState& attributionSource, const std::string& message,
+ int32_t attributedOpCode) {
+ return checkPermission(cameraId, permission, attributionSource, message, attributedOpCode,
+ /* forDataDelivery */ false, /* startDataDelivery */ false,
+ /* checkAutomotive */ true) != PermissionChecker::PERMISSION_HARD_DENIED;
+}
- if (!flags::cache_permission_services()) {
- PermissionChecker permissionChecker;
- return permissionChecker.checkPermissionForPreflight(
- toString16(permission), attributionSource, toString16(message),
- attributedOpCode) != PermissionChecker::PERMISSION_HARD_DENIED;
- } else {
- return mPermissionChecker->checkPermissionForPreflight(
- toString16(permission), attributionSource, toString16(message),
- attributedOpCode) != PermissionChecker::PERMISSION_HARD_DENIED;
- }
+bool AttributionAndPermissionUtils::checkPermissionForDataDelivery(
+ const std::string& cameraId, const std::string& permission,
+ const AttributionSourceState& attributionSource, const std::string& message,
+ int32_t attributedOpCode) {
+ return checkPermission(cameraId, permission, attributionSource, message, attributedOpCode,
+ /* forDataDelivery */ true, /* startDataDelivery */ false,
+ /* checkAutomotive */ false) !=
+ PermissionChecker::PERMISSION_HARD_DENIED;
+}
+
+PermissionChecker::PermissionResult
+AttributionAndPermissionUtils::checkPermissionForStartDataDelivery(
+ const std::string& cameraId, const std::string& permission,
+ const AttributionSourceState& attributionSource, const std::string& message,
+ int32_t attributedOpCode) {
+ return checkPermission(cameraId, permission, attributionSource, message, attributedOpCode,
+ /* forDataDelivery */ true, /* startDataDelivery */ true,
+ /* checkAutomotive */ false);
}
// Can camera service trust the caller based on the calling UID?
@@ -180,8 +236,7 @@
bool AttributionAndPermissionUtils::isAutomotivePrivilegedClient(int32_t uid) {
// Returns false if this is not an automotive device type.
- if (!isAutomotiveDevice())
- return false;
+ if (!isAutomotiveDevice()) return false;
// Returns true if the uid is AID_AUTOMOTIVE_EVS which is a
// privileged client uid used for safety critical use cases such as
@@ -189,8 +244,35 @@
return uid == AID_AUTOMOTIVE_EVS;
}
-status_t AttributionAndPermissionUtils::getUidForPackage(const std::string &packageName,
- int userId, /*inout*/uid_t& uid, int err) {
+std::string AttributionAndPermissionUtils::getPackageNameFromUid(int clientUid) const {
+ std::string packageName("");
+
+ sp<IPermissionController> permCtrl = getPermissionController();
+ if (permCtrl == nullptr) {
+ // Return empty package name and the further interaction
+ // with camera will likely fail
+ return packageName;
+ }
+
+ Vector<String16> packages;
+
+ permCtrl->getPackagesForUid(clientUid, packages);
+
+ if (packages.isEmpty()) {
+ ALOGE("No packages for calling UID %d", clientUid);
+ // Return empty package name and the further interaction
+ // with camera will likely fail
+ return packageName;
+ }
+
+ // Arbitrarily pick the first name in the list
+ packageName = toStdString(packages[0]);
+
+ return packageName;
+}
+
+status_t AttributionAndPermissionUtils::getUidForPackage(const std::string& packageName, int userId,
+ /*inout*/ uid_t& uid, int err) {
PermissionController pc;
uid = pc.getPackageUid(toString16(packageName), 0);
if (uid <= 0) {
@@ -213,36 +295,179 @@
return (getCallingPid() == getpid());
}
-bool AttributionAndPermissionUtils::hasPermissionsForCamera(const std::string& cameraId,
- const AttributionSourceState& attributionSource) {
- return checkPermissionForPreflight(cameraId, sCameraPermission,
- attributionSource, std::string(), AppOpsManager::OP_NONE);
+bool AttributionAndPermissionUtils::hasPermissionsForCamera(
+ const std::string& cameraId, const AttributionSourceState& attributionSource,
+ bool forDataDelivery, bool checkAutomotive) {
+ return checkPermission(cameraId, sCameraPermission, attributionSource, std::string(),
+ AppOpsManager::OP_NONE, forDataDelivery, /* startDataDelivery */ false,
+ checkAutomotive) != PermissionChecker::PERMISSION_HARD_DENIED;
}
-bool AttributionAndPermissionUtils::hasPermissionsForSystemCamera(const std::string& cameraId,
- const AttributionSourceState& attributionSource, bool checkCameraPermissions) {
- bool systemCameraPermission = checkPermissionForPreflight(cameraId,
- sSystemCameraPermission, attributionSource, std::string(), AppOpsManager::OP_NONE);
- return systemCameraPermission && (!checkCameraPermissions
- || hasPermissionsForCamera(cameraId, attributionSource));
+PermissionChecker::PermissionResult
+AttributionAndPermissionUtils::checkPermissionsForCameraForPreflight(
+ const std::string& cameraId, const AttributionSourceState& attributionSource) {
+ return checkPermission(cameraId, sCameraPermission, attributionSource, std::string(),
+ AppOpsManager::OP_NONE, /* forDataDelivery */ false,
+ /* startDataDelivery */ false, /* checkAutomotive */ false);
+}
+
+PermissionChecker::PermissionResult
+AttributionAndPermissionUtils::checkPermissionsForCameraForDataDelivery(
+ const std::string& cameraId, const AttributionSourceState& attributionSource) {
+ return checkPermission(cameraId, sCameraPermission, attributionSource, std::string(),
+ AppOpsManager::OP_NONE, /* forDataDelivery */ true,
+ /* startDataDelivery */ false, /* checkAutomotive */ false);
+}
+
+PermissionChecker::PermissionResult
+AttributionAndPermissionUtils::checkPermissionsForCameraForStartDataDelivery(
+ const std::string& cameraId, const AttributionSourceState& attributionSource) {
+ return checkPermission(cameraId, sCameraPermission, attributionSource, std::string(),
+ AppOpsManager::OP_NONE, /* forDataDelivery */ true,
+ /* startDataDelivery */ true, /* checkAutomotive */ false);
+}
+
+bool AttributionAndPermissionUtils::hasPermissionsForSystemCamera(
+ const std::string& cameraId, const AttributionSourceState& attributionSource,
+ bool checkCameraPermissions) {
+ bool systemCameraPermission =
+ checkPermissionForPreflight(cameraId, sSystemCameraPermission, attributionSource,
+ std::string(), AppOpsManager::OP_NONE);
+ return systemCameraPermission &&
+ (!checkCameraPermissions || hasPermissionsForCamera(cameraId, attributionSource));
}
bool AttributionAndPermissionUtils::hasPermissionsForCameraHeadlessSystemUser(
const std::string& cameraId, const AttributionSourceState& attributionSource) {
return checkPermissionForPreflight(cameraId, sCameraHeadlessSystemUserPermission,
- attributionSource, std::string(), AppOpsManager::OP_NONE);
+ attributionSource, std::string(), AppOpsManager::OP_NONE);
}
bool AttributionAndPermissionUtils::hasPermissionsForCameraPrivacyAllowlist(
const AttributionSourceState& attributionSource) {
return checkPermissionForPreflight(std::string(), sCameraPrivacyAllowlistPermission,
- attributionSource, std::string(), AppOpsManager::OP_NONE);
+ attributionSource, std::string(), AppOpsManager::OP_NONE);
}
bool AttributionAndPermissionUtils::hasPermissionsForOpenCloseListener(
const AttributionSourceState& attributionSource) {
return checkPermissionForPreflight(std::string(), sCameraOpenCloseListenerPermission,
- attributionSource, std::string(), AppOpsManager::OP_NONE);
+ attributionSource, std::string(), AppOpsManager::OP_NONE);
+}
+
+void AttributionAndPermissionUtils::finishDataDelivery(
+ const AttributionSourceState& attributionSource) {
+ mPermissionChecker->finishDataDeliveryFromDatasource(AppOpsManager::OP_CAMERA,
+ attributionSource);
+}
+
+bool AttributionAndPermissionUtils::checkAutomotivePrivilegedClient(
+ const std::string& cameraId, const AttributionSourceState& attributionSource) {
+ if (isAutomotivePrivilegedClient(attributionSource.uid)) {
+ // If cameraId is empty, then it means that this check is not used for the
+ // purpose of accessing a specific camera, hence grant permission just
+ // based on uid to the automotive privileged client.
+ if (cameraId.empty()) return true;
+
+ auto cameraService = mCameraService.promote();
+ if (cameraService == nullptr) {
+ ALOGE("%s: CameraService unavailable.", __FUNCTION__);
+ return false;
+ }
+
+ // If this call is used for accessing a specific camera then cam_id must be provided.
+ // In that case, only pre-grants the permission for accessing the exterior system only
+ // camera.
+ return cameraService->isAutomotiveExteriorSystemCamera(cameraId);
+ }
+
+ return false;
+}
+
+void AttributionAndPermissionUtils::resolveAttributionPackage(
+ AttributionSourceState& resolvedAttributionSource) {
+ if (resolvedAttributionSource.packageName.has_value() &&
+ resolvedAttributionSource.packageName->size() > 0) {
+ return;
+ }
+
+ // NDK calls don't come with package names, but we need one for various cases.
+ // Generally, there's a 1:1 mapping between UID and package name, but shared UIDs
+ // do exist. For all authentication cases, all packages under the same UID get the
+ // same permissions, so picking any associated package name is sufficient. For some
+ // other cases, this may give inaccurate names for clients in logs.
+ resolvedAttributionSource.packageName = getPackageNameFromUid(resolvedAttributionSource.uid);
+}
+
+// TODO(362551824): Make USE_CALLING_UID more explicit with a scoped enum.
+bool AttributionAndPermissionUtils::resolveClientUid(/*inout*/ int& clientUid) {
+ int callingUid = getCallingUid();
+
+ bool validUid = true;
+ if (clientUid == hardware::ICameraService::USE_CALLING_UID) {
+ clientUid = callingUid;
+ } else {
+ validUid = isTrustedCallingUid(callingUid);
+ if (flags::data_delivery_permission_checks()) {
+ validUid = validUid || (clientUid == callingUid);
+ }
+ }
+
+ return validUid;
+}
+
+// TODO(362551824): Make USE_CALLING_UID more explicit with a scoped enum.
+bool AttributionAndPermissionUtils::resolveClientPid(/*inout*/ int& clientPid) {
+ int callingUid = getCallingUid();
+ int callingPid = getCallingPid();
+
+ bool validPid = true;
+ if (clientPid == hardware::ICameraService::USE_CALLING_PID) {
+ clientPid = callingPid;
+ } else {
+ validPid = isTrustedCallingUid(callingUid);
+ if (flags::data_delivery_permission_checks()) {
+ validPid = validPid || (clientPid == callingPid);
+ }
+ }
+
+ return validPid;
+}
+
+binder::Status AttributionAndPermissionUtils::errorNotTrusted(
+ int clientPid, int clientUid, const std::string& methodName,
+ const std::optional<std::string>& cameraIdMaybe, const std::string& clientName,
+ bool isPid) const {
+ int callingPid = getCallingPid();
+ int callingUid = getCallingUid();
+ ALOGE("CameraService::%s X (calling PID %d, calling UID %d) rejected "
+ "(don't trust %s %d)",
+ methodName.c_str(), callingPid, callingUid, isPid ? "clientPid" : "clientUid",
+ isPid ? clientPid : clientUid);
+ return STATUS_ERROR_FMT(hardware::ICameraService::ERROR_PERMISSION_DENIED,
+ "Untrusted caller (calling PID %d, UID %d) trying to "
+ "forward camera access to camera %s for client %s (PID %d, UID %d)",
+ getCallingPid(), getCallingUid(), cameraIdMaybe.value_or("N/A").c_str(),
+ clientName.c_str(), clientPid, clientUid);
+}
+
+const sp<IPermissionController>& AttributionAndPermissionUtils::getPermissionController() const {
+ static const char* kPermissionControllerService = "permission";
+ static thread_local sp<IPermissionController> sPermissionController = nullptr;
+
+ if (sPermissionController == nullptr ||
+ !IInterface::asBinder(sPermissionController)->isBinderAlive()) {
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<IBinder> binder = sm->checkService(toString16(kPermissionControllerService));
+ if (binder == nullptr) {
+ ALOGE("%s: Could not get permission service", __FUNCTION__);
+ sPermissionController = nullptr;
+ } else {
+ sPermissionController = interface_cast<IPermissionController>(binder);
+ }
+ }
+
+ return sPermissionController;
}
} // namespace android
diff --git a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h
index a23fba7..1c5d6da 100644
--- a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h
+++ b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h
@@ -19,6 +19,7 @@
#include <android/content/AttributionSourceState.h>
#include <android/permission/PermissionChecker.h>
#include <binder/BinderService.h>
+#include <binder/IPermissionController.h>
#include <private/android_filesystem_config.h>
namespace android {
@@ -28,18 +29,47 @@
using content::AttributionSourceState;
using permission::PermissionChecker;
+class AttrSourceItr {
+ public:
+ using value_type = AttributionSourceState;
+ using pointer = const value_type*;
+ using reference = const value_type&;
+
+ AttrSourceItr() : mAttr(nullptr) {}
+
+ AttrSourceItr(const AttributionSourceState& attr) : mAttr(&attr) {}
+
+ reference operator*() const { return *mAttr; }
+ pointer operator->() const { return mAttr; }
+
+ AttrSourceItr& operator++() {
+ mAttr = !mAttr->next.empty() ? mAttr->next.data() : nullptr;
+ return *this;
+ }
+
+ AttrSourceItr operator++(int) {
+ AttrSourceItr tmp = *this;
+ ++(*this);
+ return tmp;
+ }
+
+ friend bool operator==(const AttrSourceItr& a, const AttrSourceItr& b) = default;
+
+ static AttrSourceItr end() { return AttrSourceItr{}; }
+private:
+ const AttributionSourceState * mAttr;
+};
+
/**
* Utility class consolidating methods/data for verifying permissions and the identity of the
* caller.
*/
class AttributionAndPermissionUtils {
public:
- AttributionAndPermissionUtils() { }
+ AttributionAndPermissionUtils() {}
virtual ~AttributionAndPermissionUtils() {}
- void setCameraService(wp<CameraService> cameraService) {
- mCameraService = cameraService;
- }
+ void setCameraService(wp<CameraService> cameraService) { mCameraService = cameraService; }
static AttributionSourceState buildAttributionSource(int callingPid, int callingUid) {
AttributionSourceState attributionSource{};
@@ -49,20 +79,31 @@
}
static AttributionSourceState buildAttributionSource(int callingPid, int callingUid,
- int32_t deviceId) {
+ int32_t deviceId) {
AttributionSourceState attributionSource = buildAttributionSource(callingPid, callingUid);
attributionSource.deviceId = deviceId;
return attributionSource;
}
// Utilities handling Binder calling identities (previously in CameraThreadState)
- virtual int getCallingUid();
- virtual int getCallingPid();
+ virtual int getCallingUid() const;
+ virtual int getCallingPid() const;
virtual int64_t clearCallingIdentity();
virtual void restoreCallingIdentity(int64_t token);
- virtual bool resolveClientUid(/*inout*/ int& clientUid);
- virtual bool resolveClientPid(/*inout*/ int& clientPid);
+ /**
+ * If flags::data_delivery_permission_checks() is enabled, check the calling attribution
+ * source and resolve its package name, or fill in the pid/uid/package name if necessary.
+ *
+ * @param resolvedAttributionSource The resolved attribution source.
+ * @param methodName The name of the method calling this function (for logging only).
+ * @param cameraIdMaybe The camera ID, if applicable.
+ * @return The status of the operation.
+ */
+ virtual binder::Status resolveAttributionSource(
+ /*inout*/ AttributionSourceState& resolvedAttributionSource,
+ const std::string& methodName,
+ const std::optional<std::string>& cameraIdMaybe = std::nullopt);
/**
* Pre-grants the permission if the attribution source uid is for an automotive
@@ -73,9 +114,19 @@
* which is located outside of the vehicle body frame because camera located inside the vehicle
* cabin would need user permission.
*/
- virtual bool checkPermissionForPreflight(const std::string &cameraId,
- const std::string &permission, const AttributionSourceState& attributionSource,
- const std::string& message, int32_t attributedOpCode);
+ virtual bool checkPermissionForPreflight(const std::string& cameraId,
+ const std::string& permission,
+ const AttributionSourceState& attributionSource,
+ const std::string& message, int32_t attributedOpCode);
+ virtual bool checkPermissionForDataDelivery(const std::string& cameraId,
+ const std::string& permission,
+ const AttributionSourceState& attributionSource,
+ const std::string& message,
+ int32_t attributedOpCode);
+ virtual PermissionChecker::PermissionResult checkPermissionForStartDataDelivery(
+ const std::string& cameraId, const std::string& permission,
+ const AttributionSourceState& attributionSource, const std::string& message,
+ int32_t attributedOpCode);
// Can camera service trust the caller based on the calling UID?
virtual bool isTrustedCallingUid(uid_t uid);
@@ -88,22 +139,41 @@
*/
virtual bool isAutomotivePrivilegedClient(int32_t uid);
- virtual status_t getUidForPackage(const std::string &packageName, int userId,
- /*inout*/uid_t& uid, int err);
+ // In some cases the calling code has no access to the package it runs under.
+ // For example, NDK camera API.
+ // In this case we will get the packages for the calling UID and pick the first one
+ // for attributing the app op. This will work correctly for runtime permissions
+ // as for legacy apps we will toggle the app op for all packages in the UID.
+ // The caveat is that the operation may be attributed to the wrong package and
+ // stats based on app ops may be slightly off.
+ virtual std::string getPackageNameFromUid(int clientUid) const;
+
+ virtual status_t getUidForPackage(const std::string& packageName, int userId,
+ /*inout*/ uid_t& uid, int err);
virtual bool isCallerCameraServerNotDelegating();
// Utils for checking specific permissions
virtual bool hasPermissionsForCamera(const std::string& cameraId,
- const AttributionSourceState& attributionSource);
+ const AttributionSourceState& attributionSource,
+ bool forDataDelivery = false, bool checkAutomotive = true);
+ virtual PermissionChecker::PermissionResult checkPermissionsForCameraForPreflight(
+ const std::string& cameraId, const AttributionSourceState& attributionSource);
+ virtual PermissionChecker::PermissionResult checkPermissionsForCameraForDataDelivery(
+ const std::string& cameraId, const AttributionSourceState& attributionSource);
+ virtual PermissionChecker::PermissionResult checkPermissionsForCameraForStartDataDelivery(
+ const std::string& cameraId, const AttributionSourceState& attributionSource);
virtual bool hasPermissionsForSystemCamera(const std::string& cameraId,
- const AttributionSourceState& attributionSource, bool checkCameraPermissions = true);
- virtual bool hasPermissionsForCameraHeadlessSystemUser(const std::string& cameraId,
- const AttributionSourceState& attributionSource);
+ const AttributionSourceState& attributionSource,
+ bool checkCameraPermissions = true);
+ virtual bool hasPermissionsForCameraHeadlessSystemUser(
+ const std::string& cameraId, const AttributionSourceState& attributionSource);
virtual bool hasPermissionsForCameraPrivacyAllowlist(
const AttributionSourceState& attributionSource);
virtual bool hasPermissionsForOpenCloseListener(
const AttributionSourceState& attributionSource);
+ virtual void finishDataDelivery(const AttributionSourceState& attributionSource);
+
static const std::string sDumpPermission;
static const std::string sManageCameraPermission;
static const std::string sCameraPermission;
@@ -117,10 +187,30 @@
protected:
wp<CameraService> mCameraService;
- bool checkAutomotivePrivilegedClient(const std::string &cameraId,
- const AttributionSourceState &attributionSource);
+ bool checkAutomotivePrivilegedClient(const std::string& cameraId,
+ const AttributionSourceState& attributionSource);
+
+ // If the package name is missing from the AttributionSource and a package name exists for the
+ // AttributionSource's uid, fills in the missing package name.
+ void resolveAttributionPackage(AttributionSourceState& resolvedAttributionSource);
+
+ virtual bool resolveClientUid(/*inout*/ int& clientUid);
+ virtual bool resolveClientPid(/*inout*/ int& clientPid);
+
+ virtual binder::Status errorNotTrusted(int clientPid, int clientUid,
+ const std::string& methodName,
+ const std::optional<std::string>& cameraIdMaybe,
+ const std::string& clientName, bool isPid) const;
private:
+ virtual const sp<IPermissionController>& getPermissionController() const;
+
+ virtual PermissionChecker::PermissionResult checkPermission(
+ const std::string& cameraId, const std::string& permission,
+ const AttributionSourceState& attributionSource, const std::string& message,
+ int32_t attributedOpCode, bool forDataDelivery, bool startDataDelivery,
+ bool checkAutomotive);
+
std::unique_ptr<permission::PermissionChecker> mPermissionChecker =
std::make_unique<permission::PermissionChecker>();
};
@@ -131,39 +221,36 @@
* in the encapsulating class's methods.
*/
class AttributionAndPermissionUtilsEncapsulator {
-protected:
+ protected:
std::shared_ptr<AttributionAndPermissionUtils> mAttributionAndPermissionUtils;
-public:
+ public:
AttributionAndPermissionUtilsEncapsulator(
- std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils)
- : mAttributionAndPermissionUtils(attributionAndPermissionUtils) { }
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils)
+ : mAttributionAndPermissionUtils(attributionAndPermissionUtils) {}
static AttributionSourceState buildAttributionSource(int callingPid, int callingUid) {
return AttributionAndPermissionUtils::buildAttributionSource(callingPid, callingUid);
}
static AttributionSourceState buildAttributionSource(int callingPid, int callingUid,
- int32_t deviceId) {
- return AttributionAndPermissionUtils::buildAttributionSource(
- callingPid, callingUid, deviceId);
+ int32_t deviceId) {
+ return AttributionAndPermissionUtils::buildAttributionSource(callingPid, callingUid,
+ deviceId);
}
static AttributionSourceState buildAttributionSource(int callingPid, int callingUid,
- const std::string& packageName, int32_t deviceId) {
- AttributionSourceState attributionSource = buildAttributionSource(callingPid, callingUid,
- deviceId);
+ const std::string& packageName,
+ int32_t deviceId) {
+ AttributionSourceState attributionSource =
+ buildAttributionSource(callingPid, callingUid, deviceId);
attributionSource.packageName = packageName;
return attributionSource;
}
- int getCallingUid() const {
- return mAttributionAndPermissionUtils->getCallingUid();
- }
+ int getCallingUid() const { return mAttributionAndPermissionUtils->getCallingUid(); }
- int getCallingPid() const {
- return mAttributionAndPermissionUtils->getCallingPid();
- }
+ int getCallingPid() const { return mAttributionAndPermissionUtils->getCallingPid(); }
int64_t clearCallingIdentity() const {
return mAttributionAndPermissionUtils->clearCallingIdentity();
@@ -173,56 +260,90 @@
mAttributionAndPermissionUtils->restoreCallingIdentity(token);
}
- bool resolveClientUid(/*inout*/ int& clientUid) const {
- return mAttributionAndPermissionUtils->resolveClientUid(clientUid);
- }
-
- bool resolveClientPid(/*inout*/ int& clientPid) const {
- return mAttributionAndPermissionUtils->resolveClientPid(clientPid);
+ binder::Status resolveAttributionSource(AttributionSourceState& resolvedAttributionSource,
+ const std::string& methodName,
+ const std::optional<std::string>& cameraIdMaybe) {
+ std::string passedPackageName;
+ if (resolvedAttributionSource.packageName.has_value()) {
+ passedPackageName = resolvedAttributionSource.packageName.value();
+ }
+ auto ret = mAttributionAndPermissionUtils->resolveAttributionSource(
+ resolvedAttributionSource, methodName, cameraIdMaybe);
+ if (!ret.isOk()) {
+ return ret;
+ }
+ // Fix up package name
+ if (passedPackageName.size() != 0) {
+ resolvedAttributionSource.packageName = std::move(passedPackageName);
+ }
+ return ret;
}
// The word 'System' here does not refer to callers only on the system
// partition. They just need to have an android system uid.
- bool callerHasSystemUid() const {
- return (getCallingUid() < AID_APP_START);
- }
+ bool callerHasSystemUid() const { return (getCallingUid() < AID_APP_START); }
bool hasPermissionsForCamera(int callingPid, int callingUid, int32_t deviceId) const {
return hasPermissionsForCamera(std::string(), callingPid, callingUid, deviceId);
}
- bool hasPermissionsForCamera(int callingPid, int callingUid,
- const std::string& packageName, int32_t deviceId) const {
- return hasPermissionsForCamera(std::string(), callingPid, callingUid, packageName,
- deviceId);
- }
-
- bool hasPermissionsForCamera(const std::string& cameraId, int callingPid,
- int callingUid, int32_t deviceId) const {
- auto attributionSource = buildAttributionSource(callingPid, callingUid,
- deviceId);
- return mAttributionAndPermissionUtils->hasPermissionsForCamera(cameraId, attributionSource);
+ bool hasPermissionsForCamera(int callingPid, int callingUid, const std::string& packageName,
+ int32_t deviceId) const {
+ auto attributionSource =
+ buildAttributionSource(callingPid, callingUid, packageName, deviceId);
+ return hasPermissionsForCamera(std::string(), attributionSource);
}
bool hasPermissionsForCamera(const std::string& cameraId, int callingPid, int callingUid,
- const std::string& packageName, int32_t deviceId) const {
- auto attributionSource = buildAttributionSource(callingPid, callingUid, packageName,
- deviceId);
- return mAttributionAndPermissionUtils->hasPermissionsForCamera(cameraId, attributionSource);
+ int32_t deviceId) const {
+ auto attributionSource = buildAttributionSource(callingPid, callingUid, deviceId);
+ return hasPermissionsForCamera(cameraId, attributionSource);
+ }
+
+ bool hasPermissionsForCamera(const std::string& cameraId,
+ const AttributionSourceState& clientAttribution) const {
+ return mAttributionAndPermissionUtils->hasPermissionsForCamera(cameraId, clientAttribution,
+ /* forDataDelivery */ false,
+ /* checkAutomotive */ true);
+ }
+
+ bool hasPermissionsForCameraForDataDelivery(
+ const std::string& cameraId, const AttributionSourceState& clientAttribution) const {
+ return mAttributionAndPermissionUtils->hasPermissionsForCamera(cameraId, clientAttribution,
+ /* forDataDelivery */ true,
+ /* checkAutomotive */ false);
+ }
+
+ PermissionChecker::PermissionResult checkPermissionsForCameraForPreflight(
+ const std::string& cameraId, const AttributionSourceState& clientAttribution) const {
+ return mAttributionAndPermissionUtils->checkPermissionsForCameraForPreflight(
+ cameraId, clientAttribution);
+ }
+
+ PermissionChecker::PermissionResult checkPermissionsForCameraForDataDelivery(
+ const std::string& cameraId, const AttributionSourceState& clientAttribution) const {
+ return mAttributionAndPermissionUtils->checkPermissionsForCameraForDataDelivery(
+ cameraId, clientAttribution);
+ }
+
+ PermissionChecker::PermissionResult checkPermissionsForCameraForStartDataDelivery(
+ const std::string& cameraId, const AttributionSourceState& clientAttribution) const {
+ return mAttributionAndPermissionUtils->checkPermissionsForCameraForStartDataDelivery(
+ cameraId, clientAttribution);
}
bool hasPermissionsForSystemCamera(const std::string& cameraId, int callingPid, int callingUid,
- bool checkCameraPermissions = true) const {
+ bool checkCameraPermissions = true) const {
auto attributionSource = buildAttributionSource(callingPid, callingUid);
return mAttributionAndPermissionUtils->hasPermissionsForSystemCamera(
- cameraId, attributionSource, checkCameraPermissions);
+ cameraId, attributionSource, checkCameraPermissions);
}
bool hasPermissionsForCameraHeadlessSystemUser(const std::string& cameraId, int callingPid,
- int callingUid) const {
+ int callingUid) const {
auto attributionSource = buildAttributionSource(callingPid, callingUid);
return mAttributionAndPermissionUtils->hasPermissionsForCameraHeadlessSystemUser(
- cameraId, attributionSource);
+ cameraId, attributionSource);
}
bool hasPermissionsForCameraPrivacyAllowlist(int callingPid, int callingUid) const {
@@ -237,10 +358,12 @@
attributionSource);
}
- bool isAutomotiveDevice() const {
- return mAttributionAndPermissionUtils->isAutomotiveDevice();
+ void finishDataDelivery(const AttributionSourceState& attributionSource) {
+ mAttributionAndPermissionUtils->finishDataDelivery(attributionSource);
}
+ bool isAutomotiveDevice() const { return mAttributionAndPermissionUtils->isAutomotiveDevice(); }
+
bool isAutomotivePrivilegedClient(int32_t uid) const {
return mAttributionAndPermissionUtils->isAutomotivePrivilegedClient(uid);
}
@@ -253,11 +376,15 @@
return mAttributionAndPermissionUtils->isHeadlessSystemUserMode();
}
- status_t getUidForPackage(const std::string &packageName, int userId,
- /*inout*/uid_t& uid, int err) const {
+ status_t getUidForPackage(const std::string& packageName, int userId,
+ /*inout*/ uid_t& uid, int err) const {
return mAttributionAndPermissionUtils->getUidForPackage(packageName, userId, uid, err);
}
+ std::string getPackageNameFromUid(int clientUid) const {
+ return mAttributionAndPermissionUtils->getPackageNameFromUid(clientUid);
+ }
+
bool isCallerCameraServerNotDelegating() const {
return mAttributionAndPermissionUtils->isCallerCameraServerNotDelegating();
}
@@ -265,4 +392,4 @@
} // namespace android
-#endif // ANDROID_SERVERS_CAMERA_ATTRIBUTION_AND_PERMISSION_UTILS_H
+#endif // ANDROID_SERVERS_CAMERA_ATTRIBUTION_AND_PERMISSION_UTILS_H
diff --git a/services/camera/libcameraservice/utils/ClientManager.h b/services/camera/libcameraservice/utils/ClientManager.h
index 074c84d..0f0dc4c 100644
--- a/services/camera/libcameraservice/utils/ClientManager.h
+++ b/services/camera/libcameraservice/utils/ClientManager.h
@@ -20,6 +20,7 @@
#include <utils/Condition.h>
#include <utils/Mutex.h>
#include <utils/Timers.h>
+#include <utils/Log.h>
#include <algorithm>
#include <utility>
@@ -27,6 +28,9 @@
#include <set>
#include <map>
#include <memory>
+#include <com_android_internal_camera_flags.h>
+
+namespace flags = com::android::internal::camera::flags;
namespace android {
namespace resource_policy {
@@ -142,10 +146,10 @@
public:
ClientDescriptor(const KEY& key, const VALUE& value, int32_t cost,
const std::set<KEY>& conflictingKeys, int32_t score, int32_t ownerId, int32_t state,
- bool isVendorClient, int32_t oomScoreOffset);
+ bool isVendorClient, int32_t oomScoreOffset, bool sharedMode = false);
ClientDescriptor(KEY&& key, VALUE&& value, int32_t cost, std::set<KEY>&& conflictingKeys,
int32_t score, int32_t ownerId, int32_t state, bool isVendorClient,
- int32_t oomScoreOffset);
+ int32_t oomScoreOffset, bool sharedMode = false);
~ClientDescriptor();
@@ -189,6 +193,11 @@
*/
void setPriority(const ClientPriority& priority);
+ /**
+ * Returns true when camera is opened in shared mode.
+ */
+ bool getSharedMode() const;
+
// This class is ordered by key
template<class K, class V>
friend bool operator < (const ClientDescriptor<K, V>& a, const ClientDescriptor<K, V>& b);
@@ -200,6 +209,7 @@
std::set<KEY> mConflicting;
ClientPriority mPriority;
int32_t mOwnerId;
+ bool mSharedMode;
}; // class ClientDescriptor
template<class K, class V>
@@ -210,18 +220,19 @@
template<class KEY, class VALUE>
ClientDescriptor<KEY, VALUE>::ClientDescriptor(const KEY& key, const VALUE& value, int32_t cost,
const std::set<KEY>& conflictingKeys, int32_t score, int32_t ownerId, int32_t state,
- bool isVendorClient, int32_t scoreOffset) :
+ bool isVendorClient, int32_t scoreOffset, bool sharedMode) :
mKey{key}, mValue{value}, mCost{cost}, mConflicting{conflictingKeys},
mPriority(score, state, isVendorClient, scoreOffset),
- mOwnerId{ownerId} {}
+ mOwnerId{ownerId}, mSharedMode{sharedMode} {}
template<class KEY, class VALUE>
ClientDescriptor<KEY, VALUE>::ClientDescriptor(KEY&& key, VALUE&& value, int32_t cost,
std::set<KEY>&& conflictingKeys, int32_t score, int32_t ownerId, int32_t state,
- bool isVendorClient, int32_t scoreOffset) :
+ bool isVendorClient, int32_t scoreOffset, bool sharedMode) :
mKey{std::forward<KEY>(key)}, mValue{std::forward<VALUE>(value)}, mCost{cost},
mConflicting{std::forward<std::set<KEY>>(conflictingKeys)},
- mPriority(score, state, isVendorClient, scoreOffset), mOwnerId{ownerId} {}
+ mPriority(score, state, isVendorClient, scoreOffset), mOwnerId{ownerId},
+ mSharedMode{sharedMode} {}
template<class KEY, class VALUE>
ClientDescriptor<KEY, VALUE>::~ClientDescriptor() {}
@@ -253,7 +264,14 @@
template<class KEY, class VALUE>
bool ClientDescriptor<KEY, VALUE>::isConflicting(const KEY& key) const {
- if (key == mKey) return true;
+ if (flags::camera_multi_client()) {
+ // In shared mode, there can be more than one client using the camera.
+ // Hence, having more than one client with the same key is not considered as
+ // conflicting.
+ if (!mSharedMode && key == mKey) return true;
+ } else {
+ if (key == mKey) return true;
+ }
for (const auto& x : mConflicting) {
if (key == x) return true;
}
@@ -266,6 +284,11 @@
}
template<class KEY, class VALUE>
+bool ClientDescriptor<KEY, VALUE>::getSharedMode() const {
+ return mSharedMode;
+}
+
+template<class KEY, class VALUE>
void ClientDescriptor<KEY, VALUE>::setPriority(const ClientPriority& priority) {
// We don't use the usual copy constructor here since we want to remember
// whether a client is a vendor client or not. This could have been wiped
@@ -349,14 +372,19 @@
void removeAll();
/**
- * Remove and return the ClientDescriptor with a given key.
+ * Remove all ClientDescriptors with a given key.
+ */
+ std::vector<std::shared_ptr<ClientDescriptor<KEY, VALUE>>> removeAll(const KEY& key);
+
+ /**
+ * Remove and return the ClientDescriptors with a given key.
*/
std::shared_ptr<ClientDescriptor<KEY, VALUE>> remove(const KEY& key);
/**
* Remove the given ClientDescriptor.
*/
- void remove(const std::shared_ptr<ClientDescriptor<KEY, VALUE>>& value);
+ virtual void remove(const std::shared_ptr<ClientDescriptor<KEY, VALUE>>& value);
/**
* Return a vector of the ClientDescriptors that would be evicted by adding the given
@@ -395,6 +423,8 @@
*/
std::shared_ptr<ClientDescriptor<KEY, VALUE>> get(const KEY& key) const;
+ std::shared_ptr<ClientDescriptor<KEY, VALUE>> getPrimaryClient(const KEY& key) const;
+
/**
* Block until the given client is no longer in the active clients list, or the timeout
* occurred.
@@ -495,6 +525,8 @@
int32_t cost = client->getCost();
ClientPriority priority = client->getPriority();
int32_t owner = client->getOwnerId();
+ bool sharedMode = client->getSharedMode();
+
int64_t totalCost = getCurrentCostLocked() + cost;
@@ -520,9 +552,15 @@
int32_t curCost = i->getCost();
ClientPriority curPriority = i->getPriority();
int32_t curOwner = i->getOwnerId();
-
- bool conflicting = (curKey == key || i->isConflicting(key) ||
- client->isConflicting(curKey));
+ bool curSharedMode = i->getSharedMode();
+ bool conflicting;
+ if (flags::camera_multi_client()) {
+ conflicting = (((!sharedMode || !curSharedMode) && curKey == key)
+ || i->isConflicting(key) || client->isConflicting(curKey));
+ } else {
+ conflicting = (curKey == key || i->isConflicting(key) ||
+ client->isConflicting(curKey));
+ }
if (!returnIncompatibleClients) {
// Find evicted clients
@@ -669,6 +707,25 @@
}
template<class KEY, class VALUE, class LISTENER>
+std::shared_ptr<ClientDescriptor<KEY, VALUE>> ClientManager<KEY, VALUE, LISTENER>::getPrimaryClient(
+ const KEY& key) const {
+ Mutex::Autolock lock(mLock);
+ if (flags::camera_multi_client()) {
+ for (const auto& i : mClients) {
+ bool sharedMode = i->getSharedMode();
+ bool primaryClient;
+ status_t ret = i->getValue()->isPrimaryClient(&primaryClient);
+ if (ret == OK) {
+ if ((i->getKey() == key) && sharedMode && primaryClient) {
+ return i;
+ }
+ }
+ }
+ }
+ return std::shared_ptr<ClientDescriptor<KEY, VALUE>>(nullptr);
+}
+
+template<class KEY, class VALUE, class LISTENER>
void ClientManager<KEY, VALUE, LISTENER>::removeAll() {
Mutex::Autolock lock(mLock);
if (mListener != nullptr) {
@@ -681,6 +738,27 @@
}
template<class KEY, class VALUE, class LISTENER>
+std::vector<std::shared_ptr<ClientDescriptor<KEY, VALUE>>>
+ ClientManager<KEY, VALUE, LISTENER>::removeAll(const KEY& key) {
+ Mutex::Autolock lock(mLock);
+ std::vector<std::shared_ptr<ClientDescriptor<KEY, VALUE>>> clients;
+ if (flags::camera_multi_client()) {
+ for (auto it = mClients.begin(); it != mClients.end();)
+ {
+ if ((*it)->getKey() == key) {
+ if (mListener != nullptr) mListener->onClientRemoved(**it);
+ clients.push_back(*it);
+ it = mClients.erase(it);
+ } else {
+ ++it;
+ }
+ }
+ mRemovedCondition.broadcast();
+ }
+ return clients;
+}
+
+template<class KEY, class VALUE, class LISTENER>
std::shared_ptr<ClientDescriptor<KEY, VALUE>> ClientManager<KEY, VALUE, LISTENER>::remove(
const KEY& key) {
Mutex::Autolock lock(mLock);
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index 40ca276..202ab96 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -17,6 +17,7 @@
#include <cutils/properties.h>
#include "SessionConfigurationUtils.h"
+#include <android/data_space.h>
#include "../api2/DepthCompositeStream.h"
#include "../api2/HeicCompositeStream.h"
#include "aidl/android/hardware/graphics/common/Dataspace.h"
@@ -40,6 +41,7 @@
namespace android {
namespace camera3 {
+namespace flags = com::android::internal::camera::flags;
void StreamConfiguration::getStreamConfigurations(
const CameraMetadata &staticInfo, int configuration,
@@ -167,11 +169,16 @@
getAppropriateModeTag(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, maxResolution);
const int32_t jpegRSizesTag = getAppropriateModeTag(
ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS, maxResolution);
+ const int32_t heicUltraHDRSizesTag = getAppropriateModeTag(
+ ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS, maxResolution);
bool isJpegRDataSpace = (dataSpace == static_cast<android_dataspace_t>(
::aidl::android::hardware::graphics::common::Dataspace::JPEG_R));
+ bool isHeicUltraHDRDataSpace = (dataSpace == static_cast<android_dataspace_t>(
+ ADATASPACE_HEIF_ULTRAHDR));
camera_metadata_ro_entry streamConfigs =
(isJpegRDataSpace) ? info.find(jpegRSizesTag) :
+ (isHeicUltraHDRDataSpace) ? info.find(heicUltraHDRSizesTag) :
(dataSpace == HAL_DATASPACE_DEPTH) ? info.find(depthSizesTag) :
(dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) ?
info.find(heicSizesTag) :
@@ -232,6 +239,8 @@
if (dataSpace == static_cast<android_dataspace_t>(
::aidl::android::hardware::graphics::common::Dataspace::JPEG_R)) {
return true;
+ } else if (dataSpace == static_cast<android_dataspace_t>(ADATASPACE_HEIF_ULTRAHDR)) {
+ return true;
}
return false;
@@ -341,6 +350,9 @@
static_cast<android_dataspace>(
::aidl::android::hardware::graphics::common::Dataspace::JPEG_R)) {
format64 = static_cast<int64_t>(PublicFormat::JPEG_R);
+ } else if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace ==
+ static_cast<android_dataspace>(ADATASPACE_HEIF_ULTRAHDR)) {
+ format64 = static_cast<int64_t>(HEIC_ULTRAHDR);
}
camera_metadata_ro_entry_t entry =
@@ -602,7 +614,6 @@
streamInfo.dynamicRangeProfile = dynamicRangeProfile;
streamInfo.streamUseCase = streamUseCase;
streamInfo.timestampBase = timestampBase;
- streamInfo.mirrorMode = mirrorMode;
streamInfo.colorSpace = colorSpace;
return binder::Status::ok();
}
@@ -848,7 +859,6 @@
int64_t streamUseCase = it.getStreamUseCase();
int timestampBase = it.getTimestampBase();
- int mirrorMode = it.getMirrorMode();
// If the configuration is a deferred consumer, or a not yet completed
// configuration with no buffer producers attached.
if (deferredConsumer || (!isConfigurationComplete && numBufferProducers == 0)) {
@@ -908,6 +918,7 @@
}
for (auto& bufferProducer : bufferProducers) {
+ int mirrorMode = it.getMirrorMode(bufferProducer);
sp<Surface> surface;
res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
logicalCameraId, metadataChosen, sensorPixelModesUsed, dynamicRangeProfile,
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp
index 7d344f8..2f4e83a 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp
@@ -49,6 +49,12 @@
return ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
case ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS:
return ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION;
+ case ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS:
+ return ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
+ case ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_MIN_FRAME_DURATIONS:
+ return ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
+ case ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STALL_DURATIONS:
+ return ANDROID_HEIC_AVAILABLE_HEIC_ULTRA_HDR_STALL_DURATIONS_MAXIMUM_RESOLUTION;
case ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS:
return ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
case ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS:
diff --git a/services/camera/libcameraservice/utils/Utils.cpp b/services/camera/libcameraservice/utils/Utils.cpp
index e9810c6..5f61de5 100644
--- a/services/camera/libcameraservice/utils/Utils.cpp
+++ b/services/camera/libcameraservice/utils/Utils.cpp
@@ -25,44 +25,7 @@
namespace android {
-namespace flags = com::android::internal::camera::flags;
-
-namespace {
-constexpr const char* LEGACY_VNDK_VERSION_PROP = "ro.vndk.version";
-constexpr const char* BOARD_API_LEVEL_PROP = "ro.board.api_level";
-constexpr int MAX_VENDOR_API_LEVEL = 1000000;
-constexpr int FIRST_VNDK_VERSION = 202404;
-
-int legacyGetVNDKVersionFromProp(int defaultVersion) {
- if (!flags::use_ro_board_api_level_for_vndk_version()) {
- return base::GetIntProperty(LEGACY_VNDK_VERSION_PROP, defaultVersion);
- }
-
- int vndkVersion = base::GetIntProperty(BOARD_API_LEVEL_PROP, MAX_VENDOR_API_LEVEL);
-
- if (vndkVersion == MAX_VENDOR_API_LEVEL) {
- // Couldn't find property
- return defaultVersion;
- }
-
- if (vndkVersion < __ANDROID_API_V__) {
- // VNDK versions below V return the corresponding SDK version.
- return vndkVersion;
- }
-
- // VNDK for Android V and above are of the format YYYYMM starting with 202404 and is bumped
- // up once a year. So V would be 202404 and the next one would be 202504.
- // This is the same assumption as that made in system/core/init/property_service.cpp.
- vndkVersion = (vndkVersion - FIRST_VNDK_VERSION) / 100;
- return __ANDROID_API_V__ + vndkVersion;
-}
-} // anonymous namespace
-
int getVNDKVersionFromProp(int defaultVersion) {
- if (!flags::use_system_api_for_vndk_version()) {
- return legacyGetVNDKVersionFromProp(defaultVersion);
- }
-
int vendorApiLevel = AVendorSupport_getVendorApiLevel();
if (vendorApiLevel == 0) {
// Couldn't find vendor API level, return default
diff --git a/services/camera/virtualcamera/VirtualCameraCaptureResult.cc b/services/camera/virtualcamera/VirtualCameraCaptureResult.cc
index a61f553..da1c208 100644
--- a/services/camera/virtualcamera/VirtualCameraCaptureResult.cc
+++ b/services/camera/virtualcamera/VirtualCameraCaptureResult.cc
@@ -16,6 +16,7 @@
#include "VirtualCameraCaptureResult.h"
#include <cstdint>
+#include <memory>
#include "VirtualCameraCaptureRequest.h"
#include "aidl/android/hardware/camera/device/CameraMetadata.h"
@@ -34,7 +35,7 @@
} // namespace
-CameraMetadata createCaptureResultMetadata(
+std::unique_ptr<CameraMetadata> createCaptureResultMetadata(
const std::chrono::nanoseconds timestamp,
const RequestSettings& requestSettings,
const Resolution reportedSensorSize) {
@@ -109,9 +110,9 @@
if (metadata == nullptr) {
ALOGE("%s: Failed to build capture result metadata", __func__);
- return CameraMetadata();
+ return std::make_unique<CameraMetadata>();
}
- return std::move(*metadata);
+ return metadata;
}
} // namespace virtualcamera
diff --git a/services/camera/virtualcamera/VirtualCameraCaptureResult.h b/services/camera/virtualcamera/VirtualCameraCaptureResult.h
index 9e5b4d7..c3978f7 100644
--- a/services/camera/virtualcamera/VirtualCameraCaptureResult.h
+++ b/services/camera/virtualcamera/VirtualCameraCaptureResult.h
@@ -18,21 +18,10 @@
#define ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERACAPTURERESULT_H
#include <chrono>
-#include <cstdint>
#include <cstring>
-#include <future>
#include <memory>
-#include <mutex>
-#include <thread>
-#include <utility>
-#include <vector>
-#include "Exif.h"
-#include "GLES/gl.h"
#include "VirtualCameraCaptureRequest.h"
-#include "VirtualCameraDevice.h"
-#include "VirtualCameraRenderThread.h"
-#include "VirtualCameraSessionContext.h"
#include "aidl/android/hardware/camera/device/CameraMetadata.h"
namespace android {
@@ -41,7 +30,7 @@
// Construct the Metadata for the Capture result based on the request
// settings, timestamp and reported sensore size
-::aidl::android::hardware::camera::device::CameraMetadata
+std::unique_ptr<::aidl::android::hardware::camera::device::CameraMetadata>
createCaptureResultMetadata(std::chrono::nanoseconds timestamp,
const RequestSettings& requestSettings,
Resolution reportedSensorSize);
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.cc b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
index bf4a45d..58c6549 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.cc
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
@@ -100,6 +100,9 @@
static constexpr UpdateTextureTask kUpdateTextureTask;
+// The number of nanosecond to wait for the first frame to be drawn on the input surface
+static constexpr std::chrono::nanoseconds kMaxWaitFirstFrame = 3s;
+
NotifyMsg createShutterNotifyMsg(int frameNumber,
std::chrono::nanoseconds timestamp) {
NotifyMsg msg;
@@ -110,11 +113,13 @@
return msg;
}
-NotifyMsg createBufferErrorNotifyMsg(int frameNumber, int streamId) {
+// Create a NotifyMsg for an error case. The default error is ERROR_BUFFER.
+NotifyMsg createErrorNotifyMsg(int frameNumber, int streamId,
+ ErrorCode errorCode = ErrorCode::ERROR_BUFFER) {
NotifyMsg msg;
msg.set<NotifyMsg::Tag::error>(ErrorMsg{.frameNumber = frameNumber,
.errorStreamId = streamId,
- .errorCode = ErrorCode::ERROR_BUFFER});
+ .errorCode = errorCode});
return msg;
}
@@ -421,10 +426,15 @@
}
// Calculate the maximal amount of time we can afford to wait for next frame.
+ const bool isFirstFrameDrawn = mEglSurfaceTexture->isFirstFrameDrawn();
+ ALOGV("First Frame Drawn: %s", isFirstFrameDrawn ? "Yes" : "No");
+
const std::chrono::nanoseconds maxFrameDuration =
- getMaxFrameDuration(request.getRequestSettings());
+ isFirstFrameDrawn ? getMaxFrameDuration(request.getRequestSettings())
+ : kMaxWaitFirstFrame;
const std::chrono::nanoseconds elapsedDuration =
- timestamp - lastAcquisitionTimestamp;
+ isFirstFrameDrawn ? timestamp - lastAcquisitionTimestamp : 0ns;
+
if (elapsedDuration < maxFrameDuration) {
// We can afford to wait for next frame.
// Note that if there's already new frame in the input Surface, the call
@@ -434,6 +444,17 @@
timestamp = std::chrono::duration_cast<std::chrono::nanoseconds>(
std::chrono::steady_clock::now().time_since_epoch());
if (!gotNewFrame) {
+ if (!mEglSurfaceTexture->isFirstFrameDrawn()) {
+ // We don't have any input ever drawn. This is considered as an error
+ // case. Notify the framework of the failure and return early.
+ ALOGW("Timed out waiting for first frame to be drawn.");
+ std::unique_ptr<CaptureResult> captureResult = createCaptureResult(
+ request.getFrameNumber(), /* metadata = */ nullptr);
+ notifyTimeout(request, *captureResult);
+ submitCaptureResult(std::move(captureResult));
+ return;
+ }
+
ALOGV(
"%s: No new frame received on input surface after waiting for "
"%" PRIu64 "ns, repeating last frame.",
@@ -457,75 +478,20 @@
captureTimestamp.count(), timestamp.count());
}
- CaptureResult captureResult;
- captureResult.fmqResultSize = 0;
- captureResult.frameNumber = request.getFrameNumber();
- // Partial result needs to be set to 1 when metadata are present.
- captureResult.partialResult = 1;
- captureResult.inputBuffer.streamId = -1;
- captureResult.physicalCameraMetadata.resize(0);
- captureResult.result = createCaptureResultMetadata(
- captureTimestamp, request.getRequestSettings(), mReportedSensorSize);
+ std::unique_ptr<CaptureResult> captureResult = createCaptureResult(
+ request.getFrameNumber(),
+ createCaptureResultMetadata(
+ captureTimestamp, request.getRequestSettings(), mReportedSensorSize));
+ renderOutputBuffers(request, *captureResult);
- const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
- captureResult.outputBuffers.resize(buffers.size());
-
- for (int i = 0; i < buffers.size(); ++i) {
- const CaptureRequestBuffer& reqBuffer = buffers[i];
- StreamBuffer& resBuffer = captureResult.outputBuffers[i];
- resBuffer.streamId = reqBuffer.getStreamId();
- resBuffer.bufferId = reqBuffer.getBufferId();
- resBuffer.status = BufferStatus::OK;
-
- const std::optional<Stream> streamConfig =
- mSessionContext.getStreamConfig(reqBuffer.getStreamId());
-
- if (!streamConfig.has_value()) {
- resBuffer.status = BufferStatus::ERROR;
- continue;
- }
-
- auto status = streamConfig->format == PixelFormat::BLOB
- ? renderIntoBlobStreamBuffer(
- reqBuffer.getStreamId(), reqBuffer.getBufferId(),
- captureResult.result, request.getRequestSettings(),
- reqBuffer.getFence())
- : renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
- reqBuffer.getBufferId(),
- reqBuffer.getFence());
- if (!status.isOk()) {
- resBuffer.status = BufferStatus::ERROR;
- }
- }
-
- std::vector<NotifyMsg> notifyMsg{
- createShutterNotifyMsg(request.getFrameNumber(), captureTimestamp)};
- for (const StreamBuffer& resBuffer : captureResult.outputBuffers) {
- if (resBuffer.status != BufferStatus::OK) {
- notifyMsg.push_back(createBufferErrorNotifyMsg(request.getFrameNumber(),
- resBuffer.streamId));
- }
- }
-
- auto status = mCameraDeviceCallback->notify(notifyMsg);
+ auto status = notifyShutter(request, *captureResult, captureTimestamp);
if (!status.isOk()) {
ALOGE("%s: notify call failed: %s", __func__,
status.getDescription().c_str());
return;
}
- std::vector<::aidl::android::hardware::camera::device::CaptureResult>
- captureResults(1);
- captureResults[0] = std::move(captureResult);
-
- status = mCameraDeviceCallback->processCaptureResult(captureResults);
- if (!status.isOk()) {
- ALOGE("%s: processCaptureResult call failed: %s", __func__,
- status.getDescription().c_str());
- return;
- }
-
- ALOGV("%s: Successfully called processCaptureResult", __func__);
+ submitCaptureResult(std::move(captureResult));
}
std::chrono::nanoseconds VirtualCameraRenderThread::throttleRendering(
@@ -557,22 +523,125 @@
std::chrono::nanoseconds VirtualCameraRenderThread::getSurfaceTimestamp(
std::chrono::nanoseconds timeSinceLastFrame) {
std::chrono::nanoseconds surfaceTimestamp = mEglSurfaceTexture->getTimestamp();
- if (surfaceTimestamp.count() < 0) {
- uint64_t lastSurfaceTimestamp = mLastSurfaceTimestampNanoseconds.load();
- if (lastSurfaceTimestamp > 0) {
- // The timestamps were provided by the producer but we are
- // repeating the last frame, so we increase the previous timestamp by
- // the elapsed time sinced its capture, otherwise the camera framework
- // will discard the frame.
- surfaceTimestamp = std::chrono::nanoseconds(lastSurfaceTimestamp +
- timeSinceLastFrame.count());
- }
+ uint64_t lastSurfaceTimestamp = mLastSurfaceTimestampNanoseconds.load();
+ if (lastSurfaceTimestamp > 0 &&
+ surfaceTimestamp.count() <= lastSurfaceTimestamp) {
+ // The timestamps were provided by the producer but we are
+ // repeating the last frame, so we increase the previous timestamp by
+ // the elapsed time sinced its capture, otherwise the camera framework
+ // will discard the frame.
+ surfaceTimestamp = std::chrono::nanoseconds(lastSurfaceTimestamp +
+ timeSinceLastFrame.count());
+ ALOGI(
+ "Surface's timestamp is stall. Artificially increasing the surface "
+ "timestamp by %lld",
+ timeSinceLastFrame.count());
}
mLastSurfaceTimestampNanoseconds.store(surfaceTimestamp.count(),
std::memory_order_relaxed);
return surfaceTimestamp;
}
+std::unique_ptr<CaptureResult> VirtualCameraRenderThread::createCaptureResult(
+ int frameNumber, std::unique_ptr<CameraMetadata> metadata) {
+ std::unique_ptr<CaptureResult> captureResult =
+ std::make_unique<CaptureResult>();
+ captureResult->fmqResultSize = 0;
+ captureResult->frameNumber = frameNumber;
+ // Partial result needs to be set to 1 when metadata are present.
+ captureResult->partialResult = 1;
+ captureResult->inputBuffer.streamId = -1;
+ captureResult->physicalCameraMetadata.resize(0);
+ captureResult->result = metadata != nullptr ? *metadata : CameraMetadata();
+ return captureResult;
+}
+
+void VirtualCameraRenderThread::renderOutputBuffers(
+ const ProcessCaptureRequestTask& request, CaptureResult& captureResult) {
+ const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
+ captureResult.outputBuffers.resize(buffers.size());
+
+ for (int i = 0; i < buffers.size(); ++i) {
+ const CaptureRequestBuffer& reqBuffer = buffers[i];
+ StreamBuffer& resBuffer = captureResult.outputBuffers[i];
+ resBuffer.streamId = reqBuffer.getStreamId();
+ resBuffer.bufferId = reqBuffer.getBufferId();
+ resBuffer.status = BufferStatus::OK;
+
+ const std::optional<Stream> streamConfig =
+ mSessionContext.getStreamConfig(reqBuffer.getStreamId());
+
+ if (!streamConfig.has_value()) {
+ resBuffer.status = BufferStatus::ERROR;
+ continue;
+ }
+
+ auto status = streamConfig->format == PixelFormat::BLOB
+ ? renderIntoBlobStreamBuffer(
+ reqBuffer.getStreamId(), reqBuffer.getBufferId(),
+ captureResult.result, request.getRequestSettings(),
+ reqBuffer.getFence())
+ : renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
+ reqBuffer.getBufferId(),
+ reqBuffer.getFence());
+ if (!status.isOk()) {
+ resBuffer.status = BufferStatus::ERROR;
+ }
+ }
+}
+
+::ndk::ScopedAStatus VirtualCameraRenderThread::notifyTimeout(
+ const ProcessCaptureRequestTask& request, CaptureResult& captureResult) {
+ const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
+ captureResult.outputBuffers.resize(buffers.size());
+
+ std::vector<NotifyMsg> notifyMsgs;
+
+ for (int i = 0; i < buffers.size(); ++i) {
+ const CaptureRequestBuffer& reqBuffer = buffers[i];
+ StreamBuffer& resBuffer = captureResult.outputBuffers[i];
+ resBuffer.streamId = reqBuffer.getStreamId();
+ resBuffer.bufferId = reqBuffer.getBufferId();
+ resBuffer.status = BufferStatus::ERROR;
+ notifyMsgs.push_back(createErrorNotifyMsg(
+ request.getFrameNumber(), resBuffer.streamId, ErrorCode::ERROR_REQUEST));
+ }
+ return mCameraDeviceCallback->notify(notifyMsgs);
+}
+
+::ndk::ScopedAStatus VirtualCameraRenderThread::notifyShutter(
+ const ProcessCaptureRequestTask& request, const CaptureResult& captureResult,
+ std::chrono::nanoseconds captureTimestamp) {
+ std::vector<NotifyMsg> notifyMsgs{
+ createShutterNotifyMsg(request.getFrameNumber(), captureTimestamp)};
+ for (const StreamBuffer& resBuffer : captureResult.outputBuffers) {
+ if (resBuffer.status != BufferStatus::OK) {
+ notifyMsgs.push_back(
+ createErrorNotifyMsg(request.getFrameNumber(), resBuffer.streamId));
+ }
+ }
+
+ return mCameraDeviceCallback->notify(notifyMsgs);
+}
+
+::ndk::ScopedAStatus VirtualCameraRenderThread::submitCaptureResult(
+ std::unique_ptr<CaptureResult> captureResult) {
+ std::vector<::aidl::android::hardware::camera::device::CaptureResult>
+ captureResults;
+ captureResults.push_back(std::move(*captureResult));
+
+ ::ndk::ScopedAStatus status =
+ mCameraDeviceCallback->processCaptureResult(captureResults);
+ if (!status.isOk()) {
+ ALOGE("%s: processCaptureResult call failed: %s", __func__,
+ status.getDescription().c_str());
+ return status;
+ }
+
+ ALOGV("%s: Successfully called processCaptureResult", __func__);
+ return status;
+}
+
void VirtualCameraRenderThread::flushCaptureRequest(
const ProcessCaptureRequestTask& request) {
CaptureResult captureResult;
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.h b/services/camera/virtualcamera/VirtualCameraRenderThread.h
index 1fb4e84..4cad39e 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.h
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.h
@@ -19,6 +19,7 @@
#include <atomic>
#include <chrono>
+#include <cstddef>
#include <cstdint>
#include <deque>
#include <future>
@@ -205,6 +206,35 @@
std::chrono::nanoseconds getSurfaceTimestamp(
std::chrono::nanoseconds timeSinceLastFrame);
+ // Build a default capture result object populating the metadata from the request.
+ std::unique_ptr<::aidl::android::hardware::camera::device::CaptureResult>
+ createCaptureResult(
+ int frameNumber,
+ std::unique_ptr<aidl::android::hardware::camera::device::CameraMetadata>
+ metadata);
+
+ // Renders the images from the input surface into the request's buffers.
+ void renderOutputBuffers(
+ const ProcessCaptureRequestTask& request,
+ ::aidl::android::hardware::camera::device::CaptureResult& captureResult);
+
+ // Notify a shutter event for all the buffers in this request.
+ ::ndk::ScopedAStatus notifyShutter(
+ const ProcessCaptureRequestTask& request,
+ const ::aidl::android::hardware::camera::device::CaptureResult& captureResult,
+ std::chrono::nanoseconds captureTimestamp);
+
+ // Notify a timeout error for this request. The capture result still needs to
+ // be submitted after this call.
+ ::ndk::ScopedAStatus notifyTimeout(
+ const ProcessCaptureRequestTask& request,
+ ::aidl::android::hardware::camera::device::CaptureResult& captureResult);
+
+ // Submit the capture result to the camera callback.
+ ::ndk::ScopedAStatus submitCaptureResult(
+ std::unique_ptr<::aidl::android::hardware::camera::device::CaptureResult>
+ captureResult);
+
// Camera callback
const std::shared_ptr<
::aidl::android::hardware::camera::device::ICameraDeviceCallback>
diff --git a/services/camera/virtualcamera/VirtualCameraService.cc b/services/camera/virtualcamera/VirtualCameraService.cc
index 7466089..67225c9 100644
--- a/services/camera/virtualcamera/VirtualCameraService.cc
+++ b/services/camera/virtualcamera/VirtualCameraService.cc
@@ -240,11 +240,16 @@
const VirtualCameraConfiguration& configuration,
const std::string& cameraId, const int32_t deviceId, bool* _aidl_return) {
if (!mPermissionProxy.checkCallingPermission(kCreateVirtualDevicePermission)) {
- ALOGE("%s: caller (pid %d, uid %d) doesn't hold %s permission", __func__,
- getpid(), getuid(), kCreateVirtualDevicePermission);
return ndk::ScopedAStatus::fromExceptionCode(EX_SECURITY);
}
+ return registerCameraNoCheck(token, configuration, cameraId, deviceId,
+ _aidl_return);
+}
+ndk::ScopedAStatus VirtualCameraService::registerCameraNoCheck(
+ const ::ndk::SpAIBinder& token,
+ const VirtualCameraConfiguration& configuration,
+ const std::string& cameraId, const int32_t deviceId, bool* _aidl_return) {
if (_aidl_return == nullptr) {
return ndk::ScopedAStatus::fromServiceSpecificError(
Status::EX_ILLEGAL_ARGUMENT);
@@ -390,7 +395,7 @@
status = enableTestCameraCmd(out, err, cmd.optionToValueMap);
break;
case Command::DISABLE_TEST_CAMERA:
- disableTestCameraCmd(out);
+ status = disableTestCameraCmd(out);
break;
}
@@ -481,24 +486,27 @@
configuration.virtualCameraCallback =
ndk::SharedRefBase::make<VirtualCameraTestInstance>(
inputFps.value_or(kTestCameraDefaultInputFps));
- registerCamera(mTestCameraToken, configuration,
- cameraId.value_or(std::to_string(sNextIdNumericalPortion++)),
- kDefaultDeviceId, &ret);
- if (ret) {
- dprintf(out, "Successfully registered test camera %s\n",
- getCamera(mTestCameraToken)->getCameraName().c_str());
- } else {
- dprintf(err, "Failed to create test camera\n");
+ registerCameraNoCheck(
+ mTestCameraToken, configuration,
+ cameraId.value_or(std::to_string(sNextIdNumericalPortion++)),
+ kDefaultDeviceId, &ret);
+ if (!ret) {
+ dprintf(err, "Failed to create test camera (error %d)\n", ret);
+ return ret;
}
+
+ dprintf(out, "Successfully registered test camera %s\n",
+ getCamera(mTestCameraToken)->getCameraName().c_str());
return STATUS_OK;
}
-void VirtualCameraService::disableTestCameraCmd(const int out) {
+binder_status_t VirtualCameraService::disableTestCameraCmd(const int out) {
if (mTestCameraToken == nullptr) {
dprintf(out, "Test camera is not registered.");
}
- unregisterCamera(mTestCameraToken);
+ binder_status_t ret = unregisterCamera(mTestCameraToken).getStatus();
mTestCameraToken.set(nullptr);
+ return ret;
}
} // namespace virtualcamera
diff --git a/services/camera/virtualcamera/VirtualCameraService.h b/services/camera/virtualcamera/VirtualCameraService.h
index 4ef01c7..defa75b 100644
--- a/services/camera/virtualcamera/VirtualCameraService.h
+++ b/services/camera/virtualcamera/VirtualCameraService.h
@@ -85,7 +85,16 @@
binder_status_t enableTestCameraCmd(
int out, int err, const std::map<std::string, std::string>& options);
// Disable and destroy test camera instance if there's one.
- void disableTestCameraCmd(int out);
+ binder_status_t disableTestCameraCmd(int out);
+
+ // Register camera corresponding to the binder token without checking for
+ // caller permission.
+ ndk::ScopedAStatus registerCameraNoCheck(
+ const ::ndk::SpAIBinder& token,
+ const ::aidl::android::companion::virtualcamera::VirtualCameraConfiguration&
+ configuration,
+ const std::string& cameraId, int32_t deviceId, bool* _aidl_return)
+ EXCLUDES(mLock);
std::shared_ptr<VirtualCameraProvider> mVirtualCameraProvider;
bool mVerifyEglExtensions = true;
diff --git a/services/camera/virtualcamera/util/EglSurfaceTexture.cc b/services/camera/virtualcamera/util/EglSurfaceTexture.cc
index be36ec4..fc469a0 100644
--- a/services/camera/virtualcamera/util/EglSurfaceTexture.cc
+++ b/services/camera/virtualcamera/util/EglSurfaceTexture.cc
@@ -105,6 +105,10 @@
return std::chrono::nanoseconds(mGlConsumer->getTimestamp());
}
+bool EglSurfaceTexture::isFirstFrameDrawn() {
+ return mGlConsumer->getFrameNumber() > 0;
+}
+
GLuint EglSurfaceTexture::updateTexture() {
int previousFrameId;
int framesAdvance = 0;
diff --git a/services/camera/virtualcamera/util/EglSurfaceTexture.h b/services/camera/virtualcamera/util/EglSurfaceTexture.h
index c1f1169..9f75315 100644
--- a/services/camera/virtualcamera/util/EglSurfaceTexture.h
+++ b/services/camera/virtualcamera/util/EglSurfaceTexture.h
@@ -86,6 +86,9 @@
// set by the most recent call to updateTexture.
std::chrono::nanoseconds getTimestamp();
+ // Returns true is a frame has ever been drawn on this surface.
+ bool isFirstFrameDrawn();
+
private:
#if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
sp<IGraphicBufferProducer> mBufferProducer;
diff --git a/services/camera/virtualcamera/util/Permissions.cc b/services/camera/virtualcamera/util/Permissions.cc
index 634bca3..1c1db32 100644
--- a/services/camera/virtualcamera/util/Permissions.cc
+++ b/services/camera/virtualcamera/util/Permissions.cc
@@ -20,7 +20,7 @@
#include "Permissions.h"
#include "binder/PermissionCache.h"
-#include "log/log.h"
+#include "log/log_main.h"
namespace android {
namespace companion {
@@ -39,8 +39,14 @@
const bool hasPermission = PermissionCache::checkCallingPermission(
String16(permission.c_str()), &pid, &uid);
- ALOGV("%s: Checking %s permission for pid %d uid %d: %s", __func__,
- permission.c_str(), pid, uid, hasPermission ? "granted" : "denied");
+ if (hasPermission) {
+ ALOGV("%s: Checking %s permission for pid %d uid %d: granted", __func__,
+ permission.c_str(), pid, uid);
+ } else {
+ ALOGW("%s: Checking %s permission for pid %d uid %d: denied", __func__,
+ permission.c_str(), pid, uid);
+ }
+
return hasPermission;
}
} // namespace
diff --git a/services/mediametrics/Android.bp b/services/mediametrics/Android.bp
index c90488f..d29aa80 100644
--- a/services/mediametrics/Android.bp
+++ b/services/mediametrics/Android.bp
@@ -159,7 +159,7 @@
},
shared_libs: [
- "mediametricsservice-aidl-cpp",
+ "libaudioutils",
"libbase", // android logging
"libbinder",
"libcutils",
@@ -174,6 +174,7 @@
"libstatspull",
"libstatssocket",
"libutils",
+ "mediametricsservice-aidl-cpp",
"packagemanager_aidl-cpp",
],
diff --git a/services/mediametrics/AudioPowerUsage.cpp b/services/mediametrics/AudioPowerUsage.cpp
index 201d740..095832c 100644
--- a/services/mediametrics/AudioPowerUsage.cpp
+++ b/services/mediametrics/AudioPowerUsage.cpp
@@ -25,6 +25,7 @@
#include <sstream>
#include <string>
#include <audio_utils/clock.h>
+#include <audio_utils/StringUtils.h>
#include <cutils/properties.h>
#include <stats_media_metrics.h>
#include <sys/timerfd.h>
@@ -131,7 +132,7 @@
int32_t AudioPowerUsage::deviceFromStringPairs(const std::string& device_strings) {
int32_t deviceMask = 0;
- const auto devaddrvec = stringutils::getDeviceAddressPairs(device_strings);
+ const auto devaddrvec = audio_utils::stringutils::getDeviceAddressPairs(device_strings);
for (const auto &[device, addr] : devaddrvec) {
int32_t combo_device = 0;
deviceFromString(device, combo_device);
diff --git a/services/mediametrics/AudioTypes.cpp b/services/mediametrics/AudioTypes.cpp
index 353ae12..0eeff2b 100644
--- a/services/mediametrics/AudioTypes.cpp
+++ b/services/mediametrics/AudioTypes.cpp
@@ -17,6 +17,7 @@
#include "AudioTypes.h"
#include "MediaMetricsConstants.h"
#include "StringUtils.h"
+#include <audio_utils/StringUtils.h>
#include <media/TypeConverter.h> // requires libmedia_helper to get the Audio code.
#include <stats_media_metrics.h> // statsd
@@ -349,7 +350,7 @@
template <typename Traits>
int32_t int32FromFlags(const std::string &flags)
{
- const auto result = stringutils::split(flags, "|");
+ const auto result = audio_utils::stringutils::split(flags, "|");
int32_t intFlags = 0;
for (const auto& flag : result) {
typename Traits::Type value;
@@ -364,7 +365,7 @@
template <typename Traits>
std::string stringFromFlags(const std::string &flags, size_t len)
{
- const auto result = stringutils::split(flags, "|");
+ const auto result = audio_utils::stringutils::split(flags, "|");
std::string sFlags;
for (const auto& flag : result) {
typename Traits::Type value;
@@ -383,7 +384,7 @@
{
if (str.empty()) return {};
- const auto result = stringutils::split(str, "|");
+ const auto result = audio_utils::stringutils::split(str, "|");
std::stringstream ss;
for (const auto &s : result) {
if (map.count(s) > 0) {
@@ -399,7 +400,7 @@
{
if (str.empty()) return {};
- const auto result = stringutils::split(str, "|");
+ const auto result = audio_utils::stringutils::split(str, "|");
typename M::mapped_type value{};
for (const auto &s : result) {
auto it = map.find(s);
@@ -416,7 +417,7 @@
if (str.empty()) return v;
- const auto result = stringutils::split(str, "|");
+ const auto result = audio_utils::stringutils::split(str, "|");
for (const auto &s : result) {
auto it = map.find(s);
if (it == map.end()) continue;
@@ -429,7 +430,7 @@
{
std::vector<int64_t> v;
- const auto result = stringutils::split(s, "|");
+ const auto result = audio_utils::stringutils::split(s, "|");
for (const auto &mask : result) {
// 0 if undetected or if actually 0.
int64_t int64Mask = strtoll(mask.c_str(), nullptr, 0);
diff --git a/services/mediametrics/StringUtils.cpp b/services/mediametrics/StringUtils.cpp
index 5766f1c..c4111ae 100644
--- a/services/mediametrics/StringUtils.cpp
+++ b/services/mediametrics/StringUtils.cpp
@@ -19,98 +19,12 @@
#include <utils/Log.h>
#include "StringUtils.h"
-
+#include "AudioTypes.h"
+#include <audio_utils/StringUtils.h>
#include <charconv>
-#include "AudioTypes.h"
-
namespace android::mediametrics::stringutils {
-std::string tokenizer(std::string::const_iterator& it,
- const std::string::const_iterator& end, const char *reserved)
-{
- // consume leading white space
- for (; it != end && std::isspace(*it); ++it);
- if (it == end) return {};
-
- auto start = it;
- // parse until we hit a reserved keyword or space
- if (strchr(reserved, *it)) return {start, ++it};
- for (;;) {
- ++it;
- if (it == end || std::isspace(*it) || strchr(reserved, *it)) return {start, it};
- }
-}
-
-std::vector<std::string> split(const std::string& flags, const char *delim)
-{
- std::vector<std::string> result;
- for (auto it = flags.begin(); ; ) {
- auto flag = tokenizer(it, flags.end(), delim);
- if (flag.empty() || !std::isalnum(flag[0])) return result;
- result.emplace_back(std::move(flag));
-
- // look for the delimeter and discard
- auto token = tokenizer(it, flags.end(), delim);
- if (token.size() != 1 || strchr(delim, token[0]) == nullptr) return result;
- }
-}
-
-bool parseVector(const std::string &str, std::vector<int32_t> *vector) {
- std::vector<int32_t> values;
- const char *p = str.c_str();
- const char *last = p + str.size();
- while (p != last) {
- if (*p == ',' || *p == '{' || *p == '}') {
- p++;
- }
- int32_t value = -1;
- auto [ptr, error] = std::from_chars(p, last, value);
- if (error == std::errc::invalid_argument || error == std::errc::result_out_of_range) {
- return false;
- }
- p = ptr;
- values.push_back(value);
- }
- *vector = std::move(values);
- return true;
-}
-
-std::vector<std::pair<std::string, std::string>> getDeviceAddressPairs(const std::string& devices)
-{
- std::vector<std::pair<std::string, std::string>> result;
-
- // Currently, the device format is EXACTLY
- // (device1, addr1)|(device2, addr2)|...
-
- static constexpr char delim[] = "()|,";
- for (auto it = devices.begin(); ; ) {
- auto token = tokenizer(it, devices.end(), delim);
- if (token != "(") return result;
-
- auto device = tokenizer(it, devices.end(), delim);
- if (device.empty() || !std::isalnum(device[0])) return result;
-
- token = tokenizer(it, devices.end(), delim);
- if (token != ",") return result;
-
- // special handling here for empty addresses
- auto address = tokenizer(it, devices.end(), delim);
- if (address.empty() || !std::isalnum(device[0])) return result;
- if (address == ")") { // no address, just the ")"
- address.clear();
- } else {
- token = tokenizer(it, devices.end(), delim);
- if (token != ")") return result;
- }
-
- result.emplace_back(std::move(device), std::move(address));
-
- token = tokenizer(it, devices.end(), delim);
- if (token != "|") return result; // this includes end of string detection
- }
-}
-
size_t replace(std::string &str, const char *targetChars, const char replaceChar)
{
size_t replaced = 0;
@@ -127,7 +41,7 @@
std::pair<std::string /* external statsd */, std::string /* internal */>
parseDevicePairs(const std::string& devicePairs) {
std::pair<std::string, std::string> result{};
- const auto devaddrvec = stringutils::getDeviceAddressPairs(devicePairs);
+ const auto devaddrvec = audio_utils::stringutils::getDeviceAddressPairs(devicePairs);
for (const auto& [device, addr] : devaddrvec) { // addr ignored for now.
if (!result.second.empty()) {
result.second.append("|"); // delimit devices with '|'.
diff --git a/services/mediametrics/fuzzer/Android.bp b/services/mediametrics/fuzzer/Android.bp
index 99703e3..efea252 100644
--- a/services/mediametrics/fuzzer/Android.bp
+++ b/services/mediametrics/fuzzer/Android.bp
@@ -36,6 +36,7 @@
],
shared_libs: [
+ "libaudioutils",
"libbase",
"libbinder",
"libcutils",
diff --git a/services/mediametrics/include/mediametricsservice/StringUtils.h b/services/mediametrics/include/mediametricsservice/StringUtils.h
index ed2cf2e..3e1cafc 100644
--- a/services/mediametrics/include/mediametricsservice/StringUtils.h
+++ b/services/mediametrics/include/mediametricsservice/StringUtils.h
@@ -61,30 +61,6 @@
}
/**
- * Return string tokens from iterator, separated by spaces and reserved chars.
- */
-std::string tokenizer(std::string::const_iterator& it,
- const std::string::const_iterator& end, const char *reserved);
-
-/**
- * Splits flags string based on delimeters (or, whitespace which is removed).
- */
-std::vector<std::string> split(const std::string& flags, const char *delim);
-
-/**
- * Parses a vector of integers using ',' '{' and '}' as delimeters. Leaves
- * vector unmodified if the parsing fails.
- */
-bool parseVector(const std::string &str, std::vector<int32_t> *vector);
-
-/**
- * Parse the devices string and return a vector of device address pairs.
- *
- * A failure to parse returns early with the contents that were able to be parsed.
- */
-std::vector<std::pair<std::string, std::string>> getDeviceAddressPairs(const std::string &devices);
-
-/**
* Replaces targetChars with replaceChar in string, returns number of chars replaced.
*/
size_t replace(std::string &str, const char *targetChars, const char replaceChar);
diff --git a/services/mediametrics/statsd_codec.cpp b/services/mediametrics/statsd_codec.cpp
index 844f204..2f7c4f9 100644
--- a/services/mediametrics/statsd_codec.cpp
+++ b/services/mediametrics/statsd_codec.cpp
@@ -33,6 +33,7 @@
#include <stats_media_metrics.h>
#include <stats_event.h>
+#include <audio_utils/StringUtils.h>
#include <frameworks/proto_logging/stats/message/mediametrics_message.pb.h>
#include <mediametricsservice/cleaner.h>
#include <mediametricsservice/iface_statsd.h>
@@ -171,7 +172,7 @@
}
static void parseVector(const std::string &str, std::vector<int32_t> *vector) {
- if (!mediametrics::stringutils::parseVector(str, vector)) {
+ if (!audio_utils::stringutils::parseVector(str, vector)) {
ALOGE("failed to parse integer vector from '%s'", str.c_str());
}
}
diff --git a/services/mediametrics/tests/mediametrics_tests.cpp b/services/mediametrics/tests/mediametrics_tests.cpp
index a7684f4..383ed6a 100644
--- a/services/mediametrics/tests/mediametrics_tests.cpp
+++ b/services/mediametrics/tests/mediametrics_tests.cpp
@@ -22,6 +22,7 @@
#include <unordered_set>
#include <vector>
+#include <audio_utils/StringUtils.h>
#include <gtest/gtest.h>
#include <media/MediaMetricsItem.h>
#include <mediametricsservice/AudioTypes.h>
@@ -31,7 +32,7 @@
#include <system/audio.h>
using namespace android;
-using android::mediametrics::stringutils::parseVector;
+using android::audio_utils::stringutils::parseVector;
static size_t countNewlines(const char *s) {
size_t count = 0;
@@ -59,35 +60,6 @@
ASSERT_EQ(false, android::mediametrics::startsWith(s, std::string("est")));
}
-TEST(mediametrics_tests, parseVector) {
- {
- std::vector<int32_t> values;
- EXPECT_EQ(true, parseVector("0{4,300,0,-112343,350}9", &values));
- EXPECT_EQ(values, std::vector<int32_t>({0, 4, 300, 0, -112343, 350, 9}));
- }
- {
- std::vector<int32_t> values;
- EXPECT_EQ(true, parseVector("53", &values));
- EXPECT_EQ(values, std::vector<int32_t>({53}));
- }
- {
- std::vector<int32_t> values;
- EXPECT_EQ(false, parseVector("5{3,6*3}3", &values));
- EXPECT_EQ(values, std::vector<int32_t>({}));
- }
- {
- std::vector<int32_t> values = {1}; // should still be this when parsing fails
- std::vector<int32_t> expected = {1};
- EXPECT_EQ(false, parseVector("51342abcd,1232", &values));
- EXPECT_EQ(values, std::vector<int32_t>({1}));
- }
- {
- std::vector<int32_t> values = {2}; // should still be this when parsing fails
- EXPECT_EQ(false, parseVector("12345678901234,12345678901234", &values));
- EXPECT_EQ(values, std::vector<int32_t>({2}));
- }
-}
-
TEST(mediametrics_tests, defer) {
bool check = false;
{
@@ -934,37 +906,6 @@
}
}
-TEST(mediametrics_tests, device_parsing) {
- auto devaddr = android::mediametrics::stringutils::getDeviceAddressPairs("(DEVICE, )");
- ASSERT_EQ((size_t)1, devaddr.size());
- ASSERT_EQ("DEVICE", devaddr[0].first);
- ASSERT_EQ("", devaddr[0].second);
-
- devaddr = android::mediametrics::stringutils::getDeviceAddressPairs(
- "(DEVICE1, A)|(D, ADDRB)");
- ASSERT_EQ((size_t)2, devaddr.size());
- ASSERT_EQ("DEVICE1", devaddr[0].first);
- ASSERT_EQ("A", devaddr[0].second);
- ASSERT_EQ("D", devaddr[1].first);
- ASSERT_EQ("ADDRB", devaddr[1].second);
-
- devaddr = android::mediametrics::stringutils::getDeviceAddressPairs(
- "(A,B)|(C,D)");
- ASSERT_EQ((size_t)2, devaddr.size());
- ASSERT_EQ("A", devaddr[0].first);
- ASSERT_EQ("B", devaddr[0].second);
- ASSERT_EQ("C", devaddr[1].first);
- ASSERT_EQ("D", devaddr[1].second);
-
- devaddr = android::mediametrics::stringutils::getDeviceAddressPairs(
- " ( A1 , B ) | ( C , D2 ) ");
- ASSERT_EQ((size_t)2, devaddr.size());
- ASSERT_EQ("A1", devaddr[0].first);
- ASSERT_EQ("B", devaddr[0].second);
- ASSERT_EQ("C", devaddr[1].first);
- ASSERT_EQ("D2", devaddr[1].second);
-}
-
TEST(mediametrics_tests, timed_action) {
android::mediametrics::TimedAction timedAction;
std::atomic_int value1 = 0;
diff --git a/services/oboeservice/AAudioEndpointManager.cpp b/services/oboeservice/AAudioEndpointManager.cpp
index b5ee2f2..243f1f1 100644
--- a/services/oboeservice/AAudioEndpointManager.cpp
+++ b/services/oboeservice/AAudioEndpointManager.cpp
@@ -119,8 +119,9 @@
}
}
- ALOGV("findExclusiveEndpoint_l(), found %p for device = %d, sessionId = %d",
- endpoint.get(), configuration.getDeviceId(), configuration.getSessionId());
+ ALOGV("findExclusiveEndpoint_l(), found %p for devices = %s, sessionId = %d",
+ endpoint.get(), toString(configuration.getDeviceIds()).c_str(),
+ configuration.getSessionId());
return endpoint;
}
@@ -137,8 +138,9 @@
}
}
- ALOGV("findSharedEndpoint_l(), found %p for device = %d, sessionId = %d",
- endpoint.get(), configuration.getDeviceId(), configuration.getSessionId());
+ ALOGV("findSharedEndpoint_l(), found %p for devices = %s, sessionId = %d",
+ endpoint.get(), toString(configuration.getDeviceIds()).c_str(),
+ configuration.getSessionId());
return endpoint;
}
@@ -192,8 +194,8 @@
} else {
const sp<AAudioServiceEndpointMMAP> endpointMMap =
new AAudioServiceEndpointMMAP(aaudioService);
- ALOGV("%s(), no match so try to open MMAP %p for dev %d",
- __func__, endpointMMap.get(), configuration.getDeviceId());
+ ALOGV("%s(), no match so try to open MMAP %p for devices %s",
+ __func__, endpointMMap.get(), toString(configuration.getDeviceIds()).c_str());
endpoint = endpointMMap;
const aaudio_result_t result = endpoint->open(request);
@@ -250,8 +252,9 @@
mSharedOpenCount++;
}
}
- ALOGV("%s(), created endpoint %p, requested device = %d, dir = %d",
- __func__, endpoint.get(), configuration.getDeviceId(), (int)direction);
+ ALOGV("%s(), created endpoint %p, requested device = %s, dir = %d",
+ __func__, endpoint.get(), android::toString(configuration.getDeviceIds()).c_str(),
+ (int)direction);
IPCThreadState::self()->restoreCallingIdentity(token);
}
@@ -289,8 +292,9 @@
serviceEndpoint->close();
mExclusiveCloseCount++;
- ALOGV("%s() %p for device %d",
- __func__, serviceEndpoint.get(), serviceEndpoint->getDeviceId());
+ ALOGV("%s() %p for devices %s",
+ __func__, serviceEndpoint.get(),
+ android::toString(serviceEndpoint->getDeviceIds()).c_str());
}
}
@@ -313,7 +317,8 @@
serviceEndpoint->close();
mSharedCloseCount++;
- ALOGV("%s(%p) closed for device %d",
- __func__, serviceEndpoint.get(), serviceEndpoint->getDeviceId());
+ ALOGV("%s(%p) closed for device %s",
+ __func__, serviceEndpoint.get(),
+ android::toString(serviceEndpoint->getDeviceIds()).c_str());
}
}
diff --git a/services/oboeservice/AAudioServiceEndpoint.cpp b/services/oboeservice/AAudioServiceEndpoint.cpp
index e7d14a0..c677619 100644
--- a/services/oboeservice/AAudioServiceEndpoint.cpp
+++ b/services/oboeservice/AAudioServiceEndpoint.cpp
@@ -25,6 +25,7 @@
#include <sstream>
#include <vector>
+#include <system/aaudio/AAudio.h>
#include <utils/Singleton.h>
@@ -56,7 +57,7 @@
result << " Direction: " << ((getDirection() == AAUDIO_DIRECTION_OUTPUT)
? "OUTPUT" : "INPUT") << "\n";
result << " Requested Device Id: " << mRequestedDeviceId << "\n";
- result << " Device Id: " << getDeviceId() << "\n";
+ result << " Device Ids: " << android::toString(getDeviceIds()).c_str() << "\n";
result << " Sample Rate: " << getSampleRate() << "\n";
result << " Channel Count: " << getSamplesPerFrame() << "\n";
result << " Channel Mask: 0x" << std::hex << getChannelMask() << std::dec << "\n";
@@ -154,8 +155,8 @@
if (configuration.getDirection() != getDirection()) {
return false;
}
- if (configuration.getDeviceId() != AAUDIO_UNSPECIFIED &&
- configuration.getDeviceId() != getDeviceId()) {
+ if (!configuration.getDeviceIds().empty() &&
+ !android::areDeviceIdsEqual(configuration.getDeviceIds(), getDeviceIds())) {
return false;
}
if (configuration.getSessionId() != AAUDIO_SESSION_ID_ALLOCATE &&
@@ -195,20 +196,28 @@
? AAudioConvert_inputPresetToAudioSource(params->getInputPreset())
: AUDIO_SOURCE_DEFAULT;
audio_flags_mask_t flags;
+ std::optional<std::string> optTags = {};
if (direction == AAUDIO_DIRECTION_OUTPUT) {
flags = AAudio_computeAudioFlagsMask(
params->getAllowedCapturePolicy(),
params->getSpatializationBehavior(),
params->isContentSpatialized(),
AUDIO_OUTPUT_FLAG_FAST);
+ optTags = params->getTags();
} else {
flags = static_cast<audio_flags_mask_t>(AUDIO_FLAG_LOW_LATENCY
| AAudioConvert_privacySensitiveToAudioFlagsMask(params->isPrivacySensitive()));
}
- return {
+ audio_attributes_t nativeAttributes = {
.content_type = contentType,
.usage = usage,
.source = source,
.flags = flags,
- .tags = "" };
+ .tags = ""
+ };
+ if (optTags.has_value() && !optTags->empty()) {
+ strncpy(nativeAttributes.tags, optTags.value().c_str(), AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
+ nativeAttributes.tags[AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1] = '\0';
+ }
+ return nativeAttributes;
}
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.cpp b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
index d663f37..66918c1 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.cpp
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
@@ -105,7 +105,7 @@
aaudio_result_t result = AAUDIO_OK;
mAudioDataWrapper = std::make_unique<SharedMemoryWrapper>();
copyFrom(request.getConstantConfiguration());
- mRequestedDeviceId = getDeviceId();
+ mRequestedDeviceId = android::getFirstDeviceId(getDeviceIds());
mMmapClient.attributionSource = request.getAttributionSource();
// TODO b/182392769: use attribution source util
@@ -173,11 +173,13 @@
audio_config_base_t* config) {
aaudio_result_t result = AAUDIO_OK;
audio_config_base_t currentConfig = *config;
- audio_port_handle_t deviceId;
+ android::DeviceIdVector deviceIds;
const audio_attributes_t attributes = getAudioAttributesFrom(this);
- deviceId = mRequestedDeviceId;
+ if (mRequestedDeviceId != AAUDIO_UNSPECIFIED) {
+ deviceIds.push_back(mRequestedDeviceId);
+ }
const aaudio_direction_t direction = getDirection();
@@ -202,16 +204,16 @@
// Open HAL stream. Set mMmapStream
ALOGD("%s trying to open MMAP stream with format=%#x, "
- "sample_rate=%u, channel_mask=%#x, device=%d",
+ "sample_rate=%u, channel_mask=%#x, device=%s",
__func__, config->format, config->sample_rate,
- config->channel_mask, deviceId);
+ config->channel_mask, android::toString(deviceIds).c_str());
const std::lock_guard<std::mutex> lock(mMmapStreamLock);
const status_t status = MmapStreamInterface::openMmapStream(streamDirection,
&attributes,
config,
mMmapClient,
- &deviceId,
+ &deviceIds,
&sessionId,
this, // callback
mMmapStream,
@@ -229,10 +231,10 @@
return AAUDIO_ERROR_UNAVAILABLE;
}
- if (deviceId == AAUDIO_UNSPECIFIED) {
- ALOGW("%s() - openMmapStream() failed to set deviceId", __func__);
+ if (deviceIds.empty()) {
+ ALOGW("%s() - openMmapStream() failed to set deviceIds", __func__);
}
- setDeviceId(deviceId);
+ setDeviceIds(deviceIds);
if (sessionId == AUDIO_SESSION_ALLOCATE) {
ALOGW("%s() - openMmapStream() failed to set sessionId", __func__);
@@ -244,8 +246,8 @@
: (aaudio_session_id_t) sessionId;
setSessionId(actualSessionId);
- ALOGD("%s(format = 0x%X) deviceId = %d, sessionId = %d",
- __func__, config->format, getDeviceId(), getSessionId());
+ ALOGD("%s(format = 0x%X) deviceIds = %s, sessionId = %d",
+ __func__, config->format, toString(getDeviceIds()).c_str(), getSessionId());
// Create MMAP/NOIRQ buffer.
result = createMmapBuffer_l();
@@ -274,9 +276,9 @@
mDataReportOffsetNanos = ((int64_t)mTimestampGracePeriodMs) * AAUDIO_NANOS_PER_MILLISECOND;
- ALOGD("%s() got rate = %d, channels = %d channelMask = %#x, deviceId = %d, capacity = %d\n",
+ ALOGD("%s() got rate = %d, channels = %d channelMask = %#x, deviceIds = %s, capacity = %d\n",
__func__, getSampleRate(), getSamplesPerFrame(), getChannelMask(),
- deviceId, getBufferCapacity());
+ android::toString(deviceIds).c_str(), getBufferCapacity());
ALOGD("%s() got format = 0x%X = %s, frame size = %d, burst size = %d",
__func__, getFormat(), audio_format_to_string(getFormat()),
@@ -287,7 +289,11 @@
error:
close_l();
// restore original requests
- setDeviceId(mRequestedDeviceId);
+ android::DeviceIdVector requestedDeviceIds;
+ if (mRequestedDeviceId != AAUDIO_UNSPECIFIED) {
+ requestedDeviceIds.push_back(mRequestedDeviceId);
+ }
+ setDeviceIds(requestedDeviceIds);
setSessionId(requestedSessionId);
return result;
}
@@ -422,9 +428,17 @@
return AAUDIO_ERROR_NULL;
}
struct audio_mmap_position position;
- const status_t status = mMmapStream->getMmapPosition(&position);
+ status_t status = mMmapStream->getMmapPosition(&position);
ALOGV("%s() status= %d, pos = %d, nanos = %lld\n",
__func__, status, position.position_frames, (long long) position.time_nanoseconds);
+ if (status == INVALID_OPERATION) {
+ // The HAL can return INVALID_OPERATION when the position is UNKNOWN.
+ // That can cause SHARED MMAP to break. So coerce it to NOT_ENOUGH_DATA.
+ // That will get converted to AAUDIO_ERROR_UNAVAILABLE.
+ ALOGW("%s(): change INVALID_OPERATION to NOT_ENOUGH_DATA", __func__);
+ status = NOT_ENOUGH_DATA; // see b/376467258
+ }
+
const aaudio_result_t result = AAudioConvert_androidToAAudioResult(status);
if (result == AAUDIO_ERROR_UNAVAILABLE) {
ALOGW("%s(): getMmapPosition() has no position data available", __func__);
@@ -476,27 +490,27 @@
}
};
-void AAudioServiceEndpointMMAP::onRoutingChanged(audio_port_handle_t portHandle) {
- const auto deviceId = static_cast<int32_t>(portHandle);
- ALOGD("%s() called with dev %d, old = %d", __func__, deviceId, getDeviceId());
- if (getDeviceId() != deviceId) {
- if (getDeviceId() != AUDIO_PORT_HANDLE_NONE) {
+void AAudioServiceEndpointMMAP::onRoutingChanged(const android::DeviceIdVector& deviceIds) {
+ ALOGD("%s() called with dev %s, old = %s", __func__, android::toString(deviceIds).c_str(),
+ android::toString(getDeviceIds()).c_str());
+ if (!android::areDeviceIdsEqual(getDeviceIds(), deviceIds)) {
+ if (!getDeviceIds().empty()) {
// When there is a routing changed, mmap stream should be disconnected. Set `mConnected`
- // as false here so that there won't be a new stream connect to this endpoint.
+ // as false here so that there won't be a new stream connected to this endpoint.
mConnected.store(false);
const android::sp<AAudioServiceEndpointMMAP> holdEndpoint(this);
- std::thread asyncTask([holdEndpoint, deviceId]() {
+ std::thread asyncTask([holdEndpoint, deviceIds]() {
ALOGD("onRoutingChanged() asyncTask launched");
// When routing changed, the stream is disconnected and cannot be used except for
// closing. In that case, it should be safe to release all registered streams.
// This can help release service side resource in case the client doesn't close
// the stream after receiving disconnect event.
holdEndpoint->releaseRegisteredStreams();
- holdEndpoint->setDeviceId(deviceId);
+ holdEndpoint->setDeviceIds(deviceIds);
});
asyncTask.detach();
} else {
- setDeviceId(deviceId);
+ setDeviceIds(deviceIds);
}
}
};
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.h b/services/oboeservice/AAudioServiceEndpointMMAP.h
index 962d390..a4eeba1 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.h
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.h
@@ -83,7 +83,7 @@
void onVolumeChanged(float volume) override;
- void onRoutingChanged(audio_port_handle_t portHandle) override;
+ void onRoutingChanged(const android::DeviceIdVector& deviceIds) override;
// ------------------------------------------------------------------------------
aaudio_result_t getDownDataDescription(AudioEndpointParcelable* parcelable);
diff --git a/services/oboeservice/AAudioServiceEndpointShared.cpp b/services/oboeservice/AAudioServiceEndpointShared.cpp
index 5e1e594..37ccf8a 100644
--- a/services/oboeservice/AAudioServiceEndpointShared.cpp
+++ b/services/oboeservice/AAudioServiceEndpointShared.cpp
@@ -64,7 +64,7 @@
const AAudioStreamConfiguration &configuration = request.getConstantConfiguration();
copyFrom(configuration);
- mRequestedDeviceId = configuration.getDeviceId();
+ mRequestedDeviceId = android::getFirstDeviceId(configuration.getDeviceIds());
AudioStreamBuilder builder;
builder.copyFrom(configuration);
@@ -75,11 +75,14 @@
builder.setBufferCapacity(DEFAULT_BUFFER_CAPACITY);
+ // Each shared stream will use its own SRC.
+ builder.setSampleRate(AAUDIO_UNSPECIFIED);
+
result = mStreamInternal->open(builder);
setSampleRate(mStreamInternal->getSampleRate());
setChannelMask(mStreamInternal->getChannelMask());
- setDeviceId(mStreamInternal->getDeviceId());
+ setDeviceIds(mStreamInternal->getDeviceIds());
setSessionId(mStreamInternal->getSessionId());
setFormat(AUDIO_FORMAT_PCM_FLOAT); // force for mixer
setHardwareSampleRate(mStreamInternal->getHardwareSampleRate());
@@ -220,7 +223,7 @@
void AAudioServiceEndpointShared::handleDisconnectRegisteredStreamsAsync() {
android::sp<AAudioServiceEndpointShared> holdEndpoint(this);
// When there is a routing changed, mmap stream should be disconnected. Set `mConnected`
- // as false here so that there won't be a new stream connect to this endpoint.
+ // as false here so that there won't be a new stream connected to this endpoint.
mConnected.store(false);
std::thread asyncTask([holdEndpoint]() {
// When handling disconnection, the service side has disconnected. In that case,
diff --git a/services/oboeservice/AAudioServiceStreamBase.cpp b/services/oboeservice/AAudioServiceStreamBase.cpp
index 78cf706..1c24f18 100644
--- a/services/oboeservice/AAudioServiceStreamBase.cpp
+++ b/services/oboeservice/AAudioServiceStreamBase.cpp
@@ -127,7 +127,8 @@
.set(AMEDIAMETRICS_PROP_DIRECTION,
AudioGlobal_convertDirectionToText(getDirection()))
.set(AMEDIAMETRICS_PROP_ENCODING, toString(getFormat()).c_str())
- .set(AMEDIAMETRICS_PROP_ROUTEDDEVICEID, (int32_t)getDeviceId())
+ .set(AMEDIAMETRICS_PROP_ROUTEDDEVICEID, android::getFirstDeviceId(getDeviceIds()))
+ .set(AMEDIAMETRICS_PROP_ROUTEDDEVICEIDS, android::toString(getDeviceIds()).c_str())
.set(AMEDIAMETRICS_PROP_SAMPLERATE, (int32_t)getSampleRate())
.set(AMEDIAMETRICS_PROP_SESSIONID, (int32_t)getSessionId())
.set(AMEDIAMETRICS_PROP_SOURCE, toString(attributes.source).c_str())
diff --git a/services/oboeservice/AAudioServiceStreamShared.cpp b/services/oboeservice/AAudioServiceStreamShared.cpp
index 04fcd6d..bd58fa2 100644
--- a/services/oboeservice/AAudioServiceStreamShared.cpp
+++ b/services/oboeservice/AAudioServiceStreamShared.cpp
@@ -144,6 +144,9 @@
goto error;
}
+ // Use the sample rate of the endpoint as each shared stream should use its own SRC.
+ setSampleRate(endpoint->getSampleRate());
+
// Is the request compatible with the shared endpoint?
setFormat(configurationInput.getFormat());
if (getFormat() == AUDIO_FORMAT_DEFAULT) {
@@ -154,16 +157,6 @@
goto error;
}
- setSampleRate(configurationInput.getSampleRate());
- if (getSampleRate() == AAUDIO_UNSPECIFIED) {
- setSampleRate(endpoint->getSampleRate());
- } else if (getSampleRate() != endpoint->getSampleRate()) {
- ALOGD("%s() mSampleRate = %d, need %d",
- __func__, getSampleRate(), endpoint->getSampleRate());
- result = AAUDIO_ERROR_INVALID_RATE;
- goto error;
- }
-
setChannelMask(configurationInput.getChannelMask());
if (getChannelMask() == AAUDIO_UNSPECIFIED) {
setChannelMask(endpoint->getChannelMask());
diff --git a/services/oboeservice/Android.bp b/services/oboeservice/Android.bp
index 67b319f..8200ab5 100644
--- a/services/oboeservice/Android.bp
+++ b/services/oboeservice/Android.bp
@@ -89,6 +89,7 @@
"libaaudio_internal",
"libaudioclient",
"libaudioclient_aidl_conversion",
+ "libaudiofoundation",
"libaudioutils",
"libbase",
"libbinder",
diff --git a/services/oboeservice/fuzzer/Android.bp b/services/oboeservice/fuzzer/Android.bp
index 97825b3..8f672e1 100644
--- a/services/oboeservice/fuzzer/Android.bp
+++ b/services/oboeservice/fuzzer/Android.bp
@@ -44,6 +44,7 @@
"libaudioclient",
"libaudioclient_aidl_conversion",
"libaudioflinger",
+ "libaudiofoundation",
"libaudioutils",
"libbase",
"libbinder",
diff --git a/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp b/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
index f5c2e6c..e80f51d 100644
--- a/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
+++ b/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
@@ -372,7 +372,8 @@
request.setAttributionSource(attributionSource);
request.setInService(fdp.ConsumeBool());
- request.getConfiguration().setDeviceId(fdp.ConsumeIntegral<int32_t>());
+ android::DeviceIdVector DeviceIdVector = { fdp.ConsumeIntegral<int32_t>() };
+ request.getConfiguration().setDeviceIds(DeviceIdVector);
request.getConfiguration().setSampleRate(fdp.ConsumeIntegral<int32_t>());
request.getConfiguration().setChannelMask((aaudio_channel_mask_t)(
fdp.ConsumeBool()
diff --git a/services/tuner/TunerFilter.cpp b/services/tuner/TunerFilter.cpp
index 84a2b4e..e393c44 100644
--- a/services/tuner/TunerFilter.cpp
+++ b/services/tuner/TunerFilter.cpp
@@ -335,26 +335,40 @@
/////////////// FilterCallback ///////////////////////
::ndk::ScopedAStatus TunerFilter::FilterCallback::onFilterStatus(DemuxFilterStatus status) {
- Mutex::Autolock _l(mCallbackLock);
- if (mTunerFilterCallback != nullptr) {
- mTunerFilterCallback->onFilterStatus(status);
+ shared_ptr<ITunerFilterCallback> cb(nullptr);
+ {
+ Mutex::Autolock _l(mCallbackLock);
+ cb = mTunerFilterCallback;
+ }
+ if (cb != nullptr) {
+ cb->onFilterStatus(status);
}
return ::ndk::ScopedAStatus::ok();
}
::ndk::ScopedAStatus TunerFilter::FilterCallback::onFilterEvent(
const vector<DemuxFilterEvent>& events) {
- Mutex::Autolock _l(mCallbackLock);
- if (mTunerFilterCallback != nullptr) {
- mTunerFilterCallback->onFilterEvent(events);
+ shared_ptr<ITunerFilterCallback> cb(nullptr);
+ {
+ Mutex::Autolock _l(mCallbackLock);
+ cb = mTunerFilterCallback;
+ }
+ if (cb != nullptr) {
+ cb->onFilterEvent(events);
}
return ::ndk::ScopedAStatus::ok();
}
void TunerFilter::FilterCallback::sendSharedFilterStatus(int32_t status) {
- Mutex::Autolock _l(mCallbackLock);
- if (mTunerFilterCallback != nullptr && mOriginalCallback != nullptr) {
- mTunerFilterCallback->onFilterStatus(static_cast<DemuxFilterStatus>(status));
+ shared_ptr<ITunerFilterCallback> cb(nullptr);
+ shared_ptr<ITunerFilterCallback> orig_cb(nullptr);
+ {
+ Mutex::Autolock _l(mCallbackLock);
+ cb = mTunerFilterCallback;
+ orig_cb = mOriginalCallback;
+ }
+ if (cb != nullptr && orig_cb != nullptr) {
+ cb->onFilterStatus(static_cast<DemuxFilterStatus>(status));
}
}
diff --git a/services/tuner/TunerHelper.cpp b/services/tuner/TunerHelper.cpp
index a03386f..8357a9e 100644
--- a/services/tuner/TunerHelper.cpp
+++ b/services/tuner/TunerHelper.cpp
@@ -73,7 +73,7 @@
// TODO: update Demux, Descrambler.
void TunerHelper::updateTunerResources(const vector<TunerFrontendInfo>& feInfos,
- const vector<int32_t>& lnbHandles) {
+ const vector<int64_t>& lnbHandles) {
::ndk::SpAIBinder binder(AServiceManager_waitForService("tv_tuner_resource_mgr"));
shared_ptr<ITunerResourceManager> tunerRM = ITunerResourceManager::fromBinder(binder);
if (tunerRM == nullptr) {
@@ -85,7 +85,7 @@
}
void TunerHelper::updateTunerResources(const vector<TunerFrontendInfo>& feInfos,
const vector<TunerDemuxInfo>& demuxInfos,
- const vector<int32_t>& lnbHandles) {
+ const vector<int64_t>& lnbHandles) {
::ndk::SpAIBinder binder(AServiceManager_waitForService("tv_tuner_resource_mgr"));
shared_ptr<ITunerResourceManager> tunerRM = ITunerResourceManager::fromBinder(binder);
if (tunerRM == nullptr) {
@@ -101,13 +101,22 @@
}
// TODO: create a map between resource id and handles.
-int TunerHelper::getResourceIdFromHandle(int resourceHandle, int /*type*/) {
- return (resourceHandle & 0x00ff0000) >> 16;
+int TunerHelper::getResourceIdFromHandle(long resourceHandle, int /*type*/) {
+ return (int)((resourceHandle >> RESOURCE_ID_SHIFT) & RESOURCE_ID_MASK);
}
-int TunerHelper::getResourceHandleFromId(int id, int resourceType) {
+/**
+ * Generate resource handle for resourceType and id
+ * Resource Handle Allotment : 64 bits (long)
+ * 8 bits - resourceType
+ * 32 bits - id
+ * 24 bits - resourceRequestCount
+ */
+long TunerHelper::getResourceHandleFromId(int id, int resourceType) {
// TODO: build up randomly generated id to handle mapping
- return (resourceType & 0x000000ff) << 24 | (id << 16) | (sResourceRequestCount++ & 0xffff);
+ return static_cast<int64_t>(resourceType & RESOURCE_TYPE_MASK) << RESOURCE_TYPE_SHIFT |
+ static_cast<int64_t>(id & RESOURCE_ID_MASK) << RESOURCE_ID_SHIFT |
+ (sResourceRequestCount++ & RESOURCE_COUNT_MASK);
}
} // namespace tuner
diff --git a/services/tuner/TunerHelper.h b/services/tuner/TunerHelper.h
index 65a9b0b..74e1662 100644
--- a/services/tuner/TunerHelper.h
+++ b/services/tuner/TunerHelper.h
@@ -56,17 +56,23 @@
// TODO: update Demux, Descrambler.
static void updateTunerResources(const vector<TunerFrontendInfo>& feInfos,
- const vector<int32_t>& lnbHandles);
+ const vector<int64_t>& lnbHandles);
static void updateTunerResources(const vector<TunerFrontendInfo>& feInfos,
const vector<TunerDemuxInfo>& demuxInfos,
- const vector<int32_t>& lnbHandles);
+ const vector<int64_t>& lnbHandles);
// TODO: create a map between resource id and handles.
- static int getResourceIdFromHandle(int resourceHandle, int type);
- static int getResourceHandleFromId(int id, int resourceType);
+ static int getResourceIdFromHandle(long resourceHandle, int type);
+ static long getResourceHandleFromId(int id, int resourceType);
private:
static int32_t sResourceRequestCount;
+
+ static constexpr uint32_t RESOURCE_ID_SHIFT = 24;
+ static constexpr uint32_t RESOURCE_TYPE_SHIFT = 56;
+ static constexpr uint32_t RESOURCE_COUNT_MASK = 0xffffff;
+ static constexpr uint32_t RESOURCE_ID_MASK = 0xffffffff;
+ static constexpr uint32_t RESOURCE_TYPE_MASK = 0xff;
};
} // namespace tuner
diff --git a/services/tuner/TunerService.cpp b/services/tuner/TunerService.cpp
index 9a1e8bb..8cf84e2 100644
--- a/services/tuner/TunerService.cpp
+++ b/services/tuner/TunerService.cpp
@@ -82,7 +82,7 @@
return AServiceManager_addService(tunerService->asBinder().get(), getServiceName());
}
-::ndk::ScopedAStatus TunerService::openDemux(int32_t in_demuxHandle,
+::ndk::ScopedAStatus TunerService::openDemux(int64_t in_demuxHandle,
shared_ptr<ITunerDemux>* _aidl_return) {
ALOGV("openDemux");
shared_ptr<IDemux> demux;
@@ -116,7 +116,7 @@
}
}
-::ndk::ScopedAStatus TunerService::getDemuxInfo(int32_t in_demuxHandle, DemuxInfo* _aidl_return) {
+::ndk::ScopedAStatus TunerService::getDemuxInfo(int64_t in_demuxHandle, DemuxInfo* _aidl_return) {
if (mTunerVersion <= TUNER_HAL_VERSION_2_0) {
return ::ndk::ScopedAStatus::fromServiceSpecificError(
static_cast<int32_t>(Result::UNAVAILABLE));
@@ -169,7 +169,7 @@
return mTuner->getFrontendInfo(id, _aidl_return);
}
-::ndk::ScopedAStatus TunerService::openFrontend(int32_t frontendHandle,
+::ndk::ScopedAStatus TunerService::openFrontend(int64_t frontendHandle,
shared_ptr<ITunerFrontend>* _aidl_return) {
int id = TunerHelper::getResourceIdFromHandle(frontendHandle, FRONTEND);
shared_ptr<IFrontend> frontend;
@@ -181,7 +181,7 @@
return status;
}
-::ndk::ScopedAStatus TunerService::openLnb(int lnbHandle, shared_ptr<ITunerLnb>* _aidl_return) {
+::ndk::ScopedAStatus TunerService::openLnb(int64_t lnbHandle, shared_ptr<ITunerLnb>* _aidl_return) {
shared_ptr<ILnb> lnb;
int id = TunerHelper::getResourceIdFromHandle(lnbHandle, LNB);
auto status = mTuner->openLnbById(id, &lnb);
@@ -204,7 +204,7 @@
return ::ndk::ScopedAStatus::ok();
}
-::ndk::ScopedAStatus TunerService::openDescrambler(int32_t /*descramblerHandle*/,
+::ndk::ScopedAStatus TunerService::openDescrambler(int64_t /*descramblerHandle*/,
shared_ptr<ITunerDescrambler>* _aidl_return) {
shared_ptr<IDescrambler> descrambler;
// int id = TunerHelper::getResourceIdFromHandle(descramblerHandle, DESCRAMBLER);
@@ -310,7 +310,7 @@
continue;
}
TunerFrontendInfo tunerFrontendInfo{
- .handle = TunerHelper::getResourceHandleFromId((int)ids[i], FRONTEND),
+ .handle = TunerHelper::getResourceHandleFromId(ids[i], FRONTEND),
.type = static_cast<int>(frontendInfo.type),
.exclusiveGroupId = frontendInfo.exclusiveGroupId,
};
@@ -336,18 +336,16 @@
for (int i = 0; i < ids.size(); i++) {
DemuxInfo demuxInfo;
mTuner->getDemuxInfo(ids[i], &demuxInfo);
- TunerDemuxInfo tunerDemuxInfo{
- .handle = TunerHelper::getResourceHandleFromId((int)ids[i], DEMUX),
- .filterTypes = static_cast<int>(demuxInfo.filterTypes)
- };
+ TunerDemuxInfo tunerDemuxInfo{.handle = TunerHelper::getResourceHandleFromId(ids[i], DEMUX),
+ .filterTypes = static_cast<int>(demuxInfo.filterTypes)};
infos.push_back(tunerDemuxInfo);
}
return infos;
}
-vector<int32_t> TunerService::getTRMLnbHandles() {
- vector<int32_t> lnbHandles;
+vector<int64_t> TunerService::getTRMLnbHandles() {
+ vector<int64_t> lnbHandles;
if (mTuner != nullptr) {
vector<int32_t> lnbIds;
auto res = mTuner->getLnbIds(&lnbIds);
diff --git a/services/tuner/TunerService.h b/services/tuner/TunerService.h
index 190ccd4..07b414e 100644
--- a/services/tuner/TunerService.h
+++ b/services/tuner/TunerService.h
@@ -61,20 +61,20 @@
virtual ~TunerService();
::ndk::ScopedAStatus getFrontendIds(vector<int32_t>* out_ids) override;
- ::ndk::ScopedAStatus getFrontendInfo(int32_t in_frontendHandle,
+ ::ndk::ScopedAStatus getFrontendInfo(int32_t in_frontendId,
FrontendInfo* _aidl_return) override;
- ::ndk::ScopedAStatus openFrontend(int32_t in_frontendHandle,
+ ::ndk::ScopedAStatus openFrontend(int64_t in_frontendHandle,
shared_ptr<ITunerFrontend>* _aidl_return) override;
- ::ndk::ScopedAStatus openLnb(int32_t in_lnbHandle,
+ ::ndk::ScopedAStatus openLnb(int64_t in_lnbHandle,
shared_ptr<ITunerLnb>* _aidl_return) override;
::ndk::ScopedAStatus openLnbByName(const string& in_lnbName,
shared_ptr<ITunerLnb>* _aidl_return) override;
- ::ndk::ScopedAStatus openDemux(int32_t in_demuxHandle,
+ ::ndk::ScopedAStatus openDemux(int64_t in_demuxHandle,
shared_ptr<ITunerDemux>* _aidl_return) override;
::ndk::ScopedAStatus getDemuxCaps(DemuxCapabilities* _aidl_return) override;
- ::ndk::ScopedAStatus getDemuxInfo(int32_t in_demuxHandle, DemuxInfo* _aidl_return) override;
+ ::ndk::ScopedAStatus getDemuxInfo(int64_t in_demuxHandle, DemuxInfo* _aidl_return) override;
::ndk::ScopedAStatus getDemuxInfoList(vector<DemuxInfo>* _aidl_return) override;
- ::ndk::ScopedAStatus openDescrambler(int32_t in_descramblerHandle,
+ ::ndk::ScopedAStatus openDescrambler(int64_t in_descramblerHandle,
shared_ptr<ITunerDescrambler>* _aidl_return) override;
::ndk::ScopedAStatus getTunerHalVersion(int32_t* _aidl_return) override;
::ndk::ScopedAStatus openSharedFilter(const string& in_filterToken,
@@ -94,7 +94,7 @@
void updateTunerResources();
vector<TunerFrontendInfo> getTRMFrontendInfos();
vector<TunerDemuxInfo> getTRMDemuxInfos();
- vector<int32_t> getTRMLnbHandles();
+ vector<int64_t> getTRMLnbHandles();
shared_ptr<ITuner> mTuner;
int mTunerVersion = TUNER_HAL_VERSION_UNKNOWN;
diff --git a/services/tuner/aidl/android/media/tv/tuner/ITunerService.aidl b/services/tuner/aidl/android/media/tv/tuner/ITunerService.aidl
index 932133e..0d23817 100644
--- a/services/tuner/aidl/android/media/tv/tuner/ITunerService.aidl
+++ b/services/tuner/aidl/android/media/tv/tuner/ITunerService.aidl
@@ -54,7 +54,7 @@
* @param frontendHandle the handle of the frontend granted by TRM.
* @return the aidl interface of the frontend.
*/
- ITunerFrontend openFrontend(in int frontendHandle);
+ ITunerFrontend openFrontend(in long frontendHandle);
/**
* Open a new interface of ITunerLnb given a lnbHandle.
@@ -62,7 +62,7 @@
* @param lnbHandle the handle of the LNB granted by TRM.
* @return a newly created ITunerLnb interface.
*/
- ITunerLnb openLnb(in int lnbHandle);
+ ITunerLnb openLnb(in long lnbHandle);
/**
* Open a new interface of ITunerLnb given a LNB name.
@@ -75,7 +75,7 @@
/**
* Create a new instance of Demux.
*/
- ITunerDemux openDemux(in int demuxHandle);
+ ITunerDemux openDemux(in long demuxHandle);
/**
* Retrieve the supported filter main types
@@ -83,7 +83,7 @@
* @param demuxHandle the handle of the demux to query demux info for
* @return the demux info
*/
- DemuxInfo getDemuxInfo(in int demuxHandle);
+ DemuxInfo getDemuxInfo(in long demuxHandle);
/**
* Retrieve the list of demux info for all the demuxes on the system
@@ -104,7 +104,7 @@
* @param descramblerHandle the handle of the descrambler granted by TRM.
* @return a newly created ITunerDescrambler interface.
*/
- ITunerDescrambler openDescrambler(in int descramblerHandle);
+ ITunerDescrambler openDescrambler(in long descramblerHandle);
/**
* Get an integer that carries the Tuner HIDL version. The high 16 bits are the
diff --git a/services/tuner/hidl/TunerHidlService.cpp b/services/tuner/hidl/TunerHidlService.cpp
index 6bc36be..1b6b032 100644
--- a/services/tuner/hidl/TunerHidlService.cpp
+++ b/services/tuner/hidl/TunerHidlService.cpp
@@ -101,7 +101,7 @@
return AServiceManager_addService(tunerService->asBinder().get(), getServiceName());
}
-::ndk::ScopedAStatus TunerHidlService::openDemux(int32_t /* in_demuxHandle */,
+::ndk::ScopedAStatus TunerHidlService::openDemux(int64_t /* in_demuxHandle */,
shared_ptr<ITunerDemux>* _aidl_return) {
ALOGV("openDemux");
HidlResult res;
@@ -123,7 +123,7 @@
return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
}
-::ndk::ScopedAStatus TunerHidlService::getDemuxInfo(int32_t /* in_demuxHandle */,
+::ndk::ScopedAStatus TunerHidlService::getDemuxInfo(int64_t /* in_demuxHandle */,
DemuxInfo* /* _aidl_return */) {
ALOGE("getDemuxInfo is not supported");
return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -195,7 +195,7 @@
return ::ndk::ScopedAStatus::ok();
}
-::ndk::ScopedAStatus TunerHidlService::openFrontend(int32_t frontendHandle,
+::ndk::ScopedAStatus TunerHidlService::openFrontend(int64_t frontendHandle,
shared_ptr<ITunerFrontend>* _aidl_return) {
HidlResult status;
sp<HidlIFrontend> frontend;
@@ -221,7 +221,8 @@
return ::ndk::ScopedAStatus::ok();
}
-::ndk::ScopedAStatus TunerHidlService::openLnb(int lnbHandle, shared_ptr<ITunerLnb>* _aidl_return) {
+::ndk::ScopedAStatus TunerHidlService::openLnb(int64_t lnbHandle,
+ shared_ptr<ITunerLnb>* _aidl_return) {
HidlResult status;
sp<HidlILnb> lnb;
int id = TunerHelper::getResourceIdFromHandle(lnbHandle, LNB);
@@ -256,7 +257,7 @@
}
::ndk::ScopedAStatus TunerHidlService::openDescrambler(
- int32_t /*descramblerHandle*/, shared_ptr<ITunerDescrambler>* _aidl_return) {
+ int64_t /*descramblerHandle*/, shared_ptr<ITunerDescrambler>* _aidl_return) {
HidlResult status;
sp<HidlIDescrambler> descrambler;
//int id = TunerHelper::getResourceIdFromHandle(descramblerHandle, DESCRAMBLER);
@@ -410,8 +411,8 @@
return infos;
}
-vector<int32_t> TunerHidlService::getTRMLnbHandles() {
- vector<int32_t> lnbHandles;
+vector<int64_t> TunerHidlService::getTRMLnbHandles() {
+ vector<int64_t> lnbHandles;
if (mTuner != nullptr) {
HidlResult res;
vector<HidlLnbId> lnbIds;
diff --git a/services/tuner/hidl/TunerHidlService.h b/services/tuner/hidl/TunerHidlService.h
index 526c5e6..1973a77 100644
--- a/services/tuner/hidl/TunerHidlService.h
+++ b/services/tuner/hidl/TunerHidlService.h
@@ -73,20 +73,19 @@
virtual ~TunerHidlService();
::ndk::ScopedAStatus getFrontendIds(vector<int32_t>* out_ids) override;
- ::ndk::ScopedAStatus getFrontendInfo(int32_t in_frontendHandle,
- FrontendInfo* _aidl_return) override;
- ::ndk::ScopedAStatus openFrontend(int32_t in_frontendHandle,
+ ::ndk::ScopedAStatus getFrontendInfo(int32_t in_id, FrontendInfo* _aidl_return) override;
+ ::ndk::ScopedAStatus openFrontend(int64_t in_frontendHandle,
shared_ptr<ITunerFrontend>* _aidl_return) override;
- ::ndk::ScopedAStatus openLnb(int32_t in_lnbHandle,
+ ::ndk::ScopedAStatus openLnb(int64_t in_lnbHandle,
shared_ptr<ITunerLnb>* _aidl_return) override;
::ndk::ScopedAStatus openLnbByName(const std::string& in_lnbName,
shared_ptr<ITunerLnb>* _aidl_return) override;
- ::ndk::ScopedAStatus openDemux(int32_t in_demuxHandle,
+ ::ndk::ScopedAStatus openDemux(int64_t in_demuxHandle,
shared_ptr<ITunerDemux>* _aidl_return) override;
::ndk::ScopedAStatus getDemuxCaps(DemuxCapabilities* _aidl_return) override;
- ::ndk::ScopedAStatus getDemuxInfo(int32_t in_demuxHandle, DemuxInfo* _aidl_return) override;
+ ::ndk::ScopedAStatus getDemuxInfo(int64_t in_demuxHandle, DemuxInfo* _aidl_return) override;
::ndk::ScopedAStatus getDemuxInfoList(vector<DemuxInfo>* _aidl_return) override;
- ::ndk::ScopedAStatus openDescrambler(int32_t in_descramblerHandle,
+ ::ndk::ScopedAStatus openDescrambler(int64_t in_descramblerHandle,
shared_ptr<ITunerDescrambler>* _aidl_return) override;
::ndk::ScopedAStatus getTunerHalVersion(int32_t* _aidl_return) override;
::ndk::ScopedAStatus openSharedFilter(const string& in_filterToken,
@@ -106,7 +105,7 @@
private:
void updateTunerResources();
vector<TunerFrontendInfo> getTRMFrontendInfos();
- vector<int32_t> getTRMLnbHandles();
+ vector<int64_t> getTRMLnbHandles();
HidlResult getHidlFrontendIds(hidl_vec<HidlFrontendId>& ids);
HidlResult getHidlFrontendInfo(const int id, HidlFrontendInfo& info);
DemuxCapabilities getAidlDemuxCaps(const HidlDemuxCapabilities& caps);