Merge "Appops refcount fixup" into main
diff --git a/camera/Camera.cpp b/camera/Camera.cpp
index d90f7c9..0bc735f 100644
--- a/camera/Camera.cpp
+++ b/camera/Camera.cpp
@@ -99,23 +99,21 @@
return c->unlock();
}
-// pass the buffered IGraphicBufferProducer to the camera service
-status_t Camera::setPreviewTarget(const sp<IGraphicBufferProducer>& bufferProducer)
-{
- ALOGV("setPreviewTarget(%p)", bufferProducer.get());
- sp <::android::hardware::ICamera> c = mCamera;
+// pass the Surface to the camera service
+status_t Camera::setPreviewTarget(const sp<SurfaceType>& target) {
+ ALOGV("setPreviewTarget(%p)", target.get());
+ sp<::android::hardware::ICamera> c = mCamera;
if (c == 0) return NO_INIT;
- ALOGD_IF(bufferProducer == 0, "app passed NULL surface");
- return c->setPreviewTarget(bufferProducer);
+ ALOGD_IF(target == 0, "app passed NULL surface");
+ return c->setPreviewTarget(target);
}
-status_t Camera::setVideoTarget(const sp<IGraphicBufferProducer>& bufferProducer)
-{
- ALOGV("setVideoTarget(%p)", bufferProducer.get());
- sp <::android::hardware::ICamera> c = mCamera;
+status_t Camera::setVideoTarget(const sp<SurfaceType>& target) {
+ ALOGV("setVideoTarget(%p)", target.get());
+ sp<::android::hardware::ICamera> c = mCamera;
if (c == 0) return NO_INIT;
- ALOGD_IF(bufferProducer == 0, "app passed NULL video surface");
- return c->setVideoTarget(bufferProducer);
+ ALOGD_IF(target == 0, "app passed NULL video surface");
+ return c->setVideoTarget(target);
}
// start preview mode
@@ -272,12 +270,10 @@
c->setPreviewCallbackFlag(flag);
}
-status_t Camera::setPreviewCallbackTarget(
- const sp<IGraphicBufferProducer>& callbackProducer)
-{
- sp <::android::hardware::ICamera> c = mCamera;
+status_t Camera::setPreviewCallbackTarget(const sp<SurfaceType>& target) {
+ sp<::android::hardware::ICamera> c = mCamera;
if (c == 0) return NO_INIT;
- return c->setPreviewCallbackTarget(callbackProducer);
+ return c->setPreviewCallbackTarget(target);
}
status_t Camera::setAudioRestriction(int32_t mode)
diff --git a/camera/ICamera.cpp b/camera/ICamera.cpp
index b83edf7..0b811d2 100644
--- a/camera/ICamera.cpp
+++ b/camera/ICamera.cpp
@@ -17,16 +17,16 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "ICamera"
-#include <utils/Log.h>
-#include <stdint.h>
-#include <sys/types.h>
-#include <binder/Parcel.h>
-#include <camera/CameraUtils.h>
#include <android/hardware/ICamera.h>
#include <android/hardware/ICameraClient.h>
-#include <gui/IGraphicBufferProducer.h>
+#include <binder/Parcel.h>
+#include <camera/CameraUtils.h>
#include <gui/Surface.h>
+#include <gui/view/Surface.h>
#include <media/hardware/HardwareAPI.h>
+#include <stdint.h>
+#include <sys/types.h>
+#include <utils/Log.h>
namespace android {
namespace hardware {
@@ -34,8 +34,14 @@
enum {
DISCONNECT = IBinder::FIRST_CALL_TRANSACTION,
SET_PREVIEW_TARGET,
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ SET_PREVIEW_TARGET_SURFACE,
+#endif
SET_PREVIEW_CALLBACK_FLAG,
SET_PREVIEW_CALLBACK_TARGET,
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ SET_PREVIEW_CALLBACK_TARGET_SURFACE,
+#endif
START_PREVIEW,
STOP_PREVIEW,
AUTO_FOCUS,
@@ -54,6 +60,9 @@
RELEASE_RECORDING_FRAME,
SET_VIDEO_BUFFER_MODE,
SET_VIDEO_BUFFER_TARGET,
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ SET_VIDEO_BUFFER_TARGET_SURFACE,
+#endif
RELEASE_RECORDING_FRAME_HANDLE,
RELEASE_RECORDING_FRAME_HANDLE_BATCH,
SET_AUDIO_RESTRICTION,
@@ -79,15 +88,20 @@
return binder::Status::ok();
}
- // pass the buffered IGraphicBufferProducer to the camera service
- status_t setPreviewTarget(const sp<IGraphicBufferProducer>& bufferProducer)
- {
+ // pass the Surface to the camera service
+ status_t setPreviewTarget(const sp<SurfaceType>& target) {
ALOGV("setPreviewTarget");
Parcel data, reply;
data.writeInterfaceToken(ICamera::getInterfaceDescriptor());
- sp<IBinder> b(IInterface::asBinder(bufferProducer));
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ view::Surface viewSurfaceProducer = view::Surface::fromSurface(target);
+ data.writeParcelable(viewSurfaceProducer);
+ remote()->transact(SET_PREVIEW_TARGET_SURFACE, data, &reply);
+#else
+ sp<IBinder> b(IInterface::asBinder(target));
data.writeStrongBinder(b);
remote()->transact(SET_PREVIEW_TARGET, data, &reply);
+#endif
return reply.readInt32();
}
@@ -102,15 +116,19 @@
remote()->transact(SET_PREVIEW_CALLBACK_FLAG, data, &reply);
}
- status_t setPreviewCallbackTarget(
- const sp<IGraphicBufferProducer>& callbackProducer)
- {
+ status_t setPreviewCallbackTarget(const sp<SurfaceType>& target) {
ALOGV("setPreviewCallbackTarget");
Parcel data, reply;
data.writeInterfaceToken(ICamera::getInterfaceDescriptor());
- sp<IBinder> b(IInterface::asBinder(callbackProducer));
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ view::Surface viewCallbackProducer = view::Surface::fromSurface(target);
+ data.writeParcelable(viewCallbackProducer);
+ remote()->transact(SET_PREVIEW_CALLBACK_TARGET_SURFACE, data, &reply);
+#else
+ sp<IBinder> b(IInterface::asBinder(target));
data.writeStrongBinder(b);
remote()->transact(SET_PREVIEW_CALLBACK_TARGET, data, &reply);
+#endif
return reply.readInt32();
}
@@ -326,14 +344,19 @@
return reply.readInt32();
}
- status_t setVideoTarget(const sp<IGraphicBufferProducer>& bufferProducer)
- {
+ status_t setVideoTarget(const sp<SurfaceType>& target) {
ALOGV("setVideoTarget");
Parcel data, reply;
data.writeInterfaceToken(ICamera::getInterfaceDescriptor());
- sp<IBinder> b(IInterface::asBinder(bufferProducer));
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ view::Surface viewSurfaceProducer = view::Surface::fromSurface(target);
+ data.writeParcelable(viewSurfaceProducer);
+ remote()->transact(SET_VIDEO_BUFFER_TARGET_SURFACE, data, &reply);
+#else
+ sp<IBinder> b(IInterface::asBinder(target));
data.writeStrongBinder(b);
remote()->transact(SET_VIDEO_BUFFER_TARGET, data, &reply);
+#endif
return reply.readInt32();
}
};
@@ -358,9 +381,25 @@
CHECK_INTERFACE(ICamera, data, reply);
sp<IGraphicBufferProducer> st =
interface_cast<IGraphicBufferProducer>(data.readStrongBinder());
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ sp<Surface> sp = new Surface(st);
+ reply->writeInt32(setPreviewTarget(sp));
+#else
+ reply->writeInt32(setPreviewTarget(st));
+#endif
+ return NO_ERROR;
+ } break;
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ case SET_PREVIEW_TARGET_SURFACE: {
+ ALOGV("SET_PREVIEW_TARGET_SURFACE");
+ CHECK_INTERFACE(ICamera, data, reply);
+ view::Surface viewSurface;
+ data.readParcelable(&viewSurface);
+ sp<Surface> st = viewSurface.toSurface();
reply->writeInt32(setPreviewTarget(st));
return NO_ERROR;
} break;
+#endif
case SET_PREVIEW_CALLBACK_FLAG: {
ALOGV("SET_PREVIEW_CALLBACK_TYPE");
CHECK_INTERFACE(ICamera, data, reply);
@@ -373,9 +412,25 @@
CHECK_INTERFACE(ICamera, data, reply);
sp<IGraphicBufferProducer> cp =
interface_cast<IGraphicBufferProducer>(data.readStrongBinder());
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ sp<Surface> sp = new Surface(cp);
+ reply->writeInt32(setPreviewCallbackTarget(sp));
+#else
+ reply->writeInt32(setPreviewCallbackTarget(cp));
+#endif
+ return NO_ERROR;
+ }
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ case SET_PREVIEW_CALLBACK_TARGET_SURFACE: {
+ ALOGV("SET_PREVIEW_CALLBACK_TARGET_SURFACE");
+ CHECK_INTERFACE(ICamera, data, reply);
+ view::Surface viewSurface;
+ data.readParcelable(&viewSurface);
+ sp<Surface> cp = viewSurface.toSurface();
reply->writeInt32(setPreviewCallbackTarget(cp));
return NO_ERROR;
}
+#endif
case START_PREVIEW: {
ALOGV("START_PREVIEW");
CHECK_INTERFACE(ICamera, data, reply);
@@ -508,9 +563,25 @@
CHECK_INTERFACE(ICamera, data, reply);
sp<IGraphicBufferProducer> st =
interface_cast<IGraphicBufferProducer>(data.readStrongBinder());
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ sp<Surface> sp = new Surface(st);
+ reply->writeInt32(setVideoTarget(sp));
+#else
reply->writeInt32(setVideoTarget(st));
+#endif
return NO_ERROR;
} break;
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ case SET_VIDEO_BUFFER_TARGET_SURFACE: {
+ ALOGV("SET_VIDEO_BUFFER_TARGET_SURFACE");
+ CHECK_INTERFACE(ICamera, data, reply);
+ view::Surface viewSurface;
+ data.readParcelable(&viewSurface);
+ sp<Surface> cp = viewSurface.toSurface();
+ reply->writeInt32(setVideoTarget(cp));
+ return NO_ERROR;
+ } break;
+#endif
case SET_AUDIO_RESTRICTION: {
CHECK_INTERFACE(ICamera, data, reply);
int32_t mode = data.readInt32();
diff --git a/camera/camera2/OutputConfiguration.cpp b/camera/camera2/OutputConfiguration.cpp
index 2d1af32..daa2656 100644
--- a/camera/camera2/OutputConfiguration.cpp
+++ b/camera/camera2/OutputConfiguration.cpp
@@ -22,12 +22,14 @@
#include <camera/camera2/OutputConfiguration.h>
#include <camera/StringUtils.h>
+#include <com_android_internal_camera_flags.h>
#include <binder/Parcel.h>
#include <gui/view/Surface.h>
#include <system/camera_metadata.h>
#include <system/graphics.h>
#include <utils/String8.h>
+namespace flags = com::android::internal::camera::flags;
namespace android {
@@ -95,7 +97,27 @@
return mTimestampBase;
}
-int OutputConfiguration::getMirrorMode() const {
+int OutputConfiguration::getMirrorMode(sp<IGraphicBufferProducer> surface) const {
+ if (!flags::mirror_mode_shared_surfaces()) {
+ return mMirrorMode;
+ }
+
+ if (mGbps.size() != mMirrorModeForProducers.size()) {
+ ALOGE("%s: mGbps size doesn't match mMirrorModeForProducers: %zu vs %zu",
+ __FUNCTION__, mGbps.size(), mMirrorModeForProducers.size());
+ return mMirrorMode;
+ }
+
+ // Use per-producer mirror mode if available.
+ for (size_t i = 0; i < mGbps.size(); i++) {
+ if (mGbps[i] == surface) {
+ return mMirrorModeForProducers[i];
+ }
+ }
+
+ // For surface that doesn't belong to this output configuration, use
+ // mMirrorMode as default.
+ ALOGW("%s: Surface doesn't belong to this OutputConfiguration!", __FUNCTION__);
return mMirrorMode;
}
@@ -251,6 +273,12 @@
return err;
}
+ std::vector<int> mirrorModeForProducers;
+ if ((err = parcel->readInt32Vector(&mirrorModeForProducers)) != OK) {
+ ALOGE("%s: Failed to read mirroring mode for surfaces from parcel", __FUNCTION__);
+ return err;
+ }
+
int useReadoutTimestamp = 0;
if ((err = parcel->readInt32(&useReadoutTimestamp)) != OK) {
ALOGE("%s: Failed to read useReadoutTimestamp flag from parcel", __FUNCTION__);
@@ -286,6 +314,7 @@
mStreamUseCase = streamUseCase;
mTimestampBase = timestampBase;
mMirrorMode = mirrorMode;
+ mMirrorModeForProducers = std::move(mirrorModeForProducers);
mUseReadoutTimestamp = useReadoutTimestamp != 0;
for (auto& surface : surfaceShims) {
ALOGV("%s: OutputConfiguration: %p, name %s", __FUNCTION__,
@@ -327,6 +356,7 @@
mStreamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
mTimestampBase = TIMESTAMP_BASE_DEFAULT;
mMirrorMode = MIRROR_MODE_AUTO;
+ mMirrorModeForProducers.push_back(mMirrorMode);
mUseReadoutTimestamp = false;
mFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
mDataspace = 0;
@@ -344,9 +374,9 @@
mColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
mTimestampBase(TIMESTAMP_BASE_DEFAULT),
- mMirrorMode(MIRROR_MODE_AUTO), mUseReadoutTimestamp(false),
- mFormat(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED), mDataspace(0),
- mUsage(0) { }
+ mMirrorMode(MIRROR_MODE_AUTO), mMirrorModeForProducers(gbps.size(), mMirrorMode),
+ mUseReadoutTimestamp(false), mFormat(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED),
+ mDataspace(0), mUsage(0) { }
status_t OutputConfiguration::writeToParcel(android::Parcel* parcel) const {
@@ -409,6 +439,9 @@
err = parcel->writeInt32(mMirrorMode);
if (err != OK) return err;
+ err = parcel->writeInt32Vector(mMirrorModeForProducers);
+ if (err != OK) return err;
+
err = parcel->writeInt32(mUseReadoutTimestamp ? 1 : 0);
if (err != OK) return err;
@@ -438,6 +471,20 @@
return true;
}
+template <typename T>
+static bool simpleVectorsLessThan(T first, T second) {
+ if (first.size() != second.size()) {
+ return first.size() < second.size();
+ }
+
+ for (size_t i = 0; i < first.size(); i++) {
+ if (first[i] != second[i]) {
+ return first[i] < second[i];
+ }
+ }
+ return false;
+}
+
bool OutputConfiguration::gbpsEqual(const OutputConfiguration& other) const {
const std::vector<sp<IGraphicBufferProducer> >& otherGbps =
other.getGraphicBufferProducers();
@@ -449,20 +496,20 @@
return simpleVectorsEqual(othersensorPixelModesUsed, mSensorPixelModesUsed);
}
+bool OutputConfiguration::mirrorModesEqual(const OutputConfiguration& other) const {
+ const std::vector<int>& otherMirrorModes = other.getMirrorModes();
+ return simpleVectorsEqual(otherMirrorModes, mMirrorModeForProducers);
+
+}
+
bool OutputConfiguration::sensorPixelModesUsedLessThan(const OutputConfiguration& other) const {
const std::vector<int32_t>& spms = other.getSensorPixelModesUsed();
+ return simpleVectorsLessThan(mSensorPixelModesUsed, spms);
+}
- if (mSensorPixelModesUsed.size() != spms.size()) {
- return mSensorPixelModesUsed.size() < spms.size();
- }
-
- for (size_t i = 0; i < spms.size(); i++) {
- if (mSensorPixelModesUsed[i] != spms[i]) {
- return mSensorPixelModesUsed[i] < spms[i];
- }
- }
-
- return false;
+bool OutputConfiguration::mirrorModesLessThan(const OutputConfiguration& other) const {
+ const std::vector<int>& otherMirrorModes = other.getMirrorModes();
+ return simpleVectorsLessThan(mMirrorModeForProducers, otherMirrorModes);
}
bool OutputConfiguration::gbpsLessThan(const OutputConfiguration& other) const {
diff --git a/camera/camera_platform.aconfig b/camera/camera_platform.aconfig
index 8e53ca0..323b23a 100644
--- a/camera/camera_platform.aconfig
+++ b/camera/camera_platform.aconfig
@@ -178,3 +178,27 @@
description: "Pass the full AttributionSource chain to PermissionChecker"
bug: "190657833"
}
+
+flag {
+ namespace: "camera_platform"
+ name: "mirror_mode_shared_surfaces"
+ is_exported: true
+ description: "Support setting and getting mirror mode for shared surfaces"
+ bug: "298899993"
+}
+
+flag {
+ namespace: "camera_platform"
+ is_exported: true
+ name: "multiresolution_imagereader_usage_public"
+ description: "Make constructor for MultiResolutionImageReader with usage public"
+ bug: "338621560"
+}
+
+flag {
+ namespace: "camera_platform"
+ name: "color_temperature"
+ description: "Add keys to manually set color temperature and color tint"
+ bug: "359409044"
+}
+
diff --git a/camera/include/camera/Camera.h b/camera/include/camera/Camera.h
index 646b139..fa84b4e 100644
--- a/camera/include/camera/Camera.h
+++ b/camera/include/camera/Camera.h
@@ -21,13 +21,18 @@
#include <android/hardware/ICameraService.h>
-#include <gui/IGraphicBufferProducer.h>
-#include <system/camera.h>
+#include <camera/CameraBase.h>
+#include <camera/CameraUtils.h>
#include <camera/ICameraRecordingProxy.h>
#include <camera/android/hardware/ICamera.h>
#include <camera/android/hardware/ICameraClient.h>
-#include <camera/CameraBase.h>
-#include <camera/CameraUtils.h>
+#include <gui/Flags.h>
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+#include <gui/Surface.h>
+#else
+#include <gui/IGraphicBufferProducer.h>
+#endif
+#include <system/camera.h>
namespace android {
@@ -91,8 +96,8 @@
status_t lock();
status_t unlock();
- // pass the buffered IGraphicBufferProducer to the camera service
- status_t setPreviewTarget(const sp<IGraphicBufferProducer>& bufferProducer);
+ // pass the SurfaceType to the camera service
+ status_t setPreviewTarget(const sp<SurfaceType>& target);
// start preview mode, must call setPreviewTarget first
status_t startPreview();
@@ -148,7 +153,7 @@
// Set the video buffer producer for camera to use in VIDEO_BUFFER_MODE_BUFFER_QUEUE
// mode.
- status_t setVideoTarget(const sp<IGraphicBufferProducer>& bufferProducer);
+ status_t setVideoTarget(const sp<SurfaceType>& target);
void setListener(const sp<CameraListener>& listener);
@@ -158,8 +163,7 @@
// disabled by calling it with CAMERA_FRAME_CALLBACK_FLAG_NOOP, and
// Target by calling it with a NULL interface.
void setPreviewCallbackFlags(int preview_callback_flag);
- status_t setPreviewCallbackTarget(
- const sp<IGraphicBufferProducer>& callbackProducer);
+ status_t setPreviewCallbackTarget(const sp<SurfaceType>& target);
sp<ICameraRecordingProxy> getRecordingProxy();
diff --git a/camera/include/camera/android/hardware/ICamera.h b/camera/include/camera/android/hardware/ICamera.h
index ec19e5d..eb887fb 100644
--- a/camera/include/camera/android/hardware/ICamera.h
+++ b/camera/include/camera/android/hardware/ICamera.h
@@ -22,6 +22,7 @@
#include <binder/Parcel.h>
#include <binder/IMemory.h>
#include <binder/Status.h>
+#include <gui/Flags.h>
#include <utils/String8.h>
namespace android {
@@ -61,9 +62,8 @@
// allow other processes to use this ICamera interface
virtual status_t unlock() = 0;
- // pass the buffered IGraphicBufferProducer to the camera service
- virtual status_t setPreviewTarget(
- const sp<IGraphicBufferProducer>& bufferProducer) = 0;
+ // pass the SurfaceType to the camera service
+ virtual status_t setPreviewTarget(const sp<SurfaceType>& bufferProducer) = 0;
// set the preview callback flag to affect how the received frames from
// preview are handled. Enabling preview callback flags disables any active
@@ -73,8 +73,7 @@
// of preview callback buffers. Passing a valid interface here disables any
// active preview callbacks set by setPreviewCallbackFlag(). Passing NULL
// disables the use of the callback target.
- virtual status_t setPreviewCallbackTarget(
- const sp<IGraphicBufferProducer>& callbackProducer) = 0;
+ virtual status_t setPreviewCallbackTarget(const sp<SurfaceType>& callbackProducer) = 0;
// start preview mode, must call setPreviewTarget first
virtual status_t startPreview() = 0;
@@ -138,8 +137,7 @@
virtual status_t setVideoBufferMode(int32_t videoBufferMode) = 0;
// Set the video buffer producer for camera to use in VIDEO_BUFFER_MODE_BUFFER_QUEUE mode.
- virtual status_t setVideoTarget(
- const sp<IGraphicBufferProducer>& bufferProducer) = 0;
+ virtual status_t setVideoTarget(const sp<SurfaceType>& bufferProducer) = 0;
// Set the audio restriction mode
virtual status_t setAudioRestriction(int32_t mode) = 0;
diff --git a/camera/include/camera/camera2/OutputConfiguration.h b/camera/include/camera/camera2/OutputConfiguration.h
index 83ce39d..2049a31 100644
--- a/camera/include/camera/camera2/OutputConfiguration.h
+++ b/camera/include/camera/camera2/OutputConfiguration.h
@@ -72,7 +72,7 @@
bool isMultiResolution() const;
int64_t getStreamUseCase() const;
int getTimestampBase() const;
- int getMirrorMode() const;
+ int getMirrorMode(sp<IGraphicBufferProducer> surface) const;
bool useReadoutTimestamp() const;
int getFormat() const;
int getDataspace() const;
@@ -125,6 +125,7 @@
mStreamUseCase == other.mStreamUseCase &&
mTimestampBase == other.mTimestampBase &&
mMirrorMode == other.mMirrorMode &&
+ mirrorModesEqual(other) &&
mUseReadoutTimestamp == other.mUseReadoutTimestamp &&
mFormat == other.mFormat &&
mDataspace == other.mDataspace &&
@@ -180,6 +181,9 @@
if (mMirrorMode != other.mMirrorMode) {
return mMirrorMode < other.mMirrorMode;
}
+ if (!mirrorModesEqual(other)) {
+ return mirrorModesLessThan(other);
+ }
if (mUseReadoutTimestamp != other.mUseReadoutTimestamp) {
return mUseReadoutTimestamp < other.mUseReadoutTimestamp;
}
@@ -204,6 +208,9 @@
bool sensorPixelModesUsedLessThan(const OutputConfiguration& other) const;
bool gbpsLessThan(const OutputConfiguration& other) const;
void addGraphicProducer(sp<IGraphicBufferProducer> gbp) {mGbps.push_back(gbp);}
+ bool mirrorModesEqual(const OutputConfiguration& other) const;
+ bool mirrorModesLessThan(const OutputConfiguration& other) const;
+ const std::vector<int32_t>& getMirrorModes() const {return mMirrorModeForProducers;}
private:
std::vector<sp<IGraphicBufferProducer>> mGbps;
int mRotation;
@@ -221,6 +228,7 @@
int64_t mStreamUseCase;
int mTimestampBase;
int mMirrorMode;
+ std::vector<int> mMirrorModeForProducers; // 1:1 mapped with mGbps
bool mUseReadoutTimestamp;
int mFormat;
int mDataspace;
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index 69b30f7..fef6443 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -536,6 +536,8 @@
case ACAMERA_COLOR_CORRECTION_TRANSFORM:
case ACAMERA_COLOR_CORRECTION_GAINS:
case ACAMERA_COLOR_CORRECTION_ABERRATION_MODE:
+ case ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE:
+ case ACAMERA_COLOR_CORRECTION_COLOR_TINT:
case ACAMERA_CONTROL_AE_ANTIBANDING_MODE:
case ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION:
case ACAMERA_CONTROL_AE_LOCK:
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index acc3c7c..44aac29 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -307,6 +307,100 @@
*/
ACAMERA_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES = // byte[n]
ACAMERA_COLOR_CORRECTION_START + 4,
+ /**
+ * <p>Specifies the color temperature for CCT mode in Kelvin
+ * to adjust the white balance of the image.</p>
+ *
+ * <p>Type: int32</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul></p>
+ *
+ * <p>Sets the color temperature in Kelvin units for when
+ * ACAMERA_COLOR_CORRECTION_MODE is CCT to adjust the
+ * white balance of the image.</p>
+ * <p>If CCT mode is enabled without a requested color temperature,
+ * a default value will be set by the camera device. The default value can be
+ * retrieved by checking the corresponding capture result. Color temperatures
+ * requested outside the advertised ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE_RANGE
+ * will be clamped.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE_RANGE
+ * @see ACAMERA_COLOR_CORRECTION_MODE
+ */
+ ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE = // int32
+ ACAMERA_COLOR_CORRECTION_START + 5,
+ /**
+ * <p>Specifies the color tint for CCT mode to adjust the white
+ * balance of the image.</p>
+ *
+ * <p>Type: int32</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul></p>
+ *
+ * <p>Sets the color tint for when ACAMERA_COLOR_CORRECTION_MODE
+ * is CCT to adjust the white balance of the image.</p>
+ * <p>If CCT mode is enabled without a requested color tint,
+ * a default value will be set by the camera device. The default value can be
+ * retrieved by checking the corresponding capture result. Color tints requested
+ * outside the supported range will be clamped to the nearest limit (-50 or +50).</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_MODE
+ */
+ ACAMERA_COLOR_CORRECTION_COLOR_TINT = // int32
+ ACAMERA_COLOR_CORRECTION_START + 6,
+ /**
+ * <p>The range of supported color temperature values for
+ * ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE
+ *
+ * <p>Type: int32[2]</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>This key lists the valid range of color temperature values for
+ * ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE supported by this camera device.</p>
+ * <p>This key will be null on devices that do not support CCT mode for
+ * ACAMERA_COLOR_CORRECTION_MODE.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE
+ * @see ACAMERA_COLOR_CORRECTION_MODE
+ */
+ ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE_RANGE = // int32[2]
+ ACAMERA_COLOR_CORRECTION_START + 7,
+ /**
+ * <p>List of color correction modes for ACAMERA_COLOR_CORRECTION_MODE that are
+ * supported by this camera device.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_MODE
+ *
+ * <p>Type: byte[n]</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>This key lists the valid modes for ACAMERA_COLOR_CORRECTION_MODE. If no
+ * color correction modes are available for a device, this key will be null.</p>
+ * <p>Camera devices that have a FULL hardware level will always include at least
+ * FAST, HIGH_QUALITY, and TRANSFORM_MATRIX modes.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_MODE
+ */
+ ACAMERA_COLOR_CORRECTION_AVAILABLE_MODES = // byte[n]
+ ACAMERA_COLOR_CORRECTION_START + 8,
ACAMERA_COLOR_CORRECTION_END,
/**
@@ -8137,6 +8231,20 @@
*/
ACAMERA_COLOR_CORRECTION_MODE_HIGH_QUALITY = 2,
+ /**
+ * <p>Use
+ * ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE and
+ * ACAMERA_COLOR_CORRECTION_COLOR_TINT to adjust the white balance based
+ * on correlated color temperature.</p>
+ * <p>If AWB is enabled with <code>ACAMERA_CONTROL_AWB_MODE != OFF</code>, then
+ * CCT is ignored.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_COLOR_TEMPERATURE
+ * @see ACAMERA_COLOR_CORRECTION_COLOR_TINT
+ * @see ACAMERA_CONTROL_AWB_MODE
+ */
+ ACAMERA_COLOR_CORRECTION_MODE_CCT = 3,
+
} acamera_metadata_enum_android_color_correction_mode_t;
// ACAMERA_COLOR_CORRECTION_ABERRATION_MODE
diff --git a/camera/tests/CameraZSLTests.cpp b/camera/tests/CameraZSLTests.cpp
index 2740d09..b06f9b4 100644
--- a/camera/tests/CameraZSLTests.cpp
+++ b/camera/tests/CameraZSLTests.cpp
@@ -20,17 +20,18 @@
#include <gtest/gtest.h>
#include <android/content/AttributionSourceState.h>
+#include <android/hardware/ICameraService.h>
#include <binder/ProcessState.h>
-#include <utils/Errors.h>
-#include <utils/Log.h>
-#include <gui/Surface.h>
-#include <gui/SurfaceComposerClient.h>
-#include <camera/CameraParameters.h>
-#include <camera/CameraMetadata.h>
#include <camera/Camera.h>
+#include <camera/CameraMetadata.h>
+#include <camera/CameraParameters.h>
#include <camera/CameraUtils.h>
#include <camera/StringUtils.h>
-#include <android/hardware/ICameraService.h>
+#include <gui/Flags.h>
+#include <gui/Surface.h>
+#include <gui/SurfaceComposerClient.h>
+#include <utils/Errors.h>
+#include <utils/Log.h>
using namespace android;
using namespace android::hardware;
@@ -276,8 +277,11 @@
previewSurface = surfaceControl->getSurface();
ASSERT_TRUE(previewSurface != NULL);
- ASSERT_EQ(NO_ERROR, cameraDevice->setPreviewTarget(
- previewSurface->getIGraphicBufferProducer()));
+ ASSERT_EQ(NO_ERROR, cameraDevice->setPreviewTarget(previewSurface
+#if !WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ ->getIGraphicBufferProducer()
+#endif
+ ));
cameraDevice->setPreviewCallbackFlag(
CAMERA_FRAME_CALLBACK_FLAG_CAMCORDER);
diff --git a/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp b/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp
index 7046075..b6fa817 100644
--- a/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp
@@ -120,7 +120,10 @@
[&]() { outputConfiguration->getColorSpace(); },
[&]() { outputConfiguration->getStreamUseCase(); },
[&]() { outputConfiguration->getTimestampBase(); },
- [&]() { outputConfiguration->getMirrorMode(); },
+ [&]() {
+ sp<IGraphicBufferProducer> gbp = createIGraphicBufferProducer();
+ outputConfiguration->getMirrorMode(gbp);
+ },
[&]() { outputConfiguration->useReadoutTimestamp(); },
});
callC2OutputConfAPIs();
diff --git a/camera/tests/fuzzer/camera_fuzzer.cpp b/camera/tests/fuzzer/camera_fuzzer.cpp
index f46d246..f976fe1 100644
--- a/camera/tests/fuzzer/camera_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_fuzzer.cpp
@@ -20,6 +20,7 @@
#include <android/content/AttributionSourceState.h>
#include <binder/MemoryDealer.h>
#include <fuzzer/FuzzedDataProvider.h>
+#include <gui/Flags.h>
#include <gui/Surface.h>
#include <gui/SurfaceComposerClient.h>
#include "camera2common.h"
@@ -210,7 +211,11 @@
auto callCameraAPIs = mFDP->PickValueInArray<const std::function<void()>>({
[&]() {
if (surfaceControl) {
- mCamera->setPreviewTarget(surface->getIGraphicBufferProducer());
+ mCamera->setPreviewTarget(surface
+#if !WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ ->getIGraphicBufferProducer()
+#endif
+ );
}
},
[&]() {
@@ -267,7 +272,11 @@
},
[&]() {
if (surfaceControl) {
- mCamera->setVideoTarget(surface->getIGraphicBufferProducer());
+ mCamera->setVideoTarget(surface
+#if !WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ ->getIGraphicBufferProducer()
+#endif
+ );
}
},
[&]() {
@@ -283,7 +292,11 @@
},
[&]() {
if (surfaceControl) {
- mCamera->setPreviewCallbackTarget(surface->getIGraphicBufferProducer());
+ mCamera->setPreviewCallbackTarget(surface
+#if !WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ ->getIGraphicBufferProducer()
+#endif
+ );
}
},
[&]() { mCamera->getRecordingProxy(); },
diff --git a/drm/drmserver/Android.bp b/drm/drmserver/Android.bp
index 81c2003..cee44b9 100644
--- a/drm/drmserver/Android.bp
+++ b/drm/drmserver/Android.bp
@@ -78,9 +78,6 @@
"libselinux",
"libstagefright_foundation",
],
- whole_static_libs: [
- "libc++fs",
- ],
cflags: [
"-Wall",
@@ -127,7 +124,6 @@
],
static_libs: [
- "libc++fs",
"libmediautils",
"liblog",
"libdrmframeworkcommon",
diff --git a/drm/libdrmframework/plugins/passthru/Android.bp b/drm/libdrmframework/plugins/passthru/Android.bp
index 0a6cd47..6ac7188 100644
--- a/drm/libdrmframework/plugins/passthru/Android.bp
+++ b/drm/libdrmframework/plugins/passthru/Android.bp
@@ -45,9 +45,6 @@
"libdl",
"libdrmframeworkcommon",
],
- whole_static_libs: [
- "libc++fs",
- ],
local_include_dirs: ["include"],
diff --git a/drm/mediadrm/plugins/clearkey/aidl/Android.bp b/drm/mediadrm/plugins/clearkey/aidl/Android.bp
index 079e075..4132ba2 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/Android.bp
+++ b/drm/mediadrm/plugins/clearkey/aidl/Android.bp
@@ -134,6 +134,7 @@
required: [
"com.android.hardware.drm.clearkey",
],
+ vendor: true,
}
cc_defaults {
diff --git a/media/aconfig/codec_fwk.aconfig b/media/aconfig/codec_fwk.aconfig
index 84c0f48..b5c7edf 100644
--- a/media/aconfig/codec_fwk.aconfig
+++ b/media/aconfig/codec_fwk.aconfig
@@ -13,6 +13,14 @@
}
flag {
+ name: "apv_support"
+ is_exported: true
+ namespace: "codec_fwk"
+ description: "Feature flag for Android support for APV Content"
+ bug: "375464302"
+}
+
+flag {
name: "codec_buffer_state_cleanup"
namespace: "codec_fwk"
description: "Bugfix flag for more buffer state cleanup in MediaCodec"
diff --git a/media/audio/aconfig/Android.bp b/media/audio/aconfig/Android.bp
index 47418cf..2da6758 100644
--- a/media/audio/aconfig/Android.bp
+++ b/media/audio/aconfig/Android.bp
@@ -116,7 +116,10 @@
package: "android.media.audio",
container: "system",
srcs: ["audio_framework.aconfig"],
- visibility: ["//frameworks/base/api"],
+ visibility: [
+ "//frameworks/base/api",
+ "//frameworks/base/core/res",
+ ],
}
aconfig_declarations {
diff --git a/media/audio/aconfig/audio_framework.aconfig b/media/audio/aconfig/audio_framework.aconfig
index 0b434f7..c6479d0 100644
--- a/media/audio/aconfig/audio_framework.aconfig
+++ b/media/audio/aconfig/audio_framework.aconfig
@@ -32,6 +32,25 @@
bug: "374751406"
}
+flag {
+ name: "deprecate_stream_bt_sco"
+ namespace: "media_audio"
+ description: "Deprecate STREAM_BLUETOOTH_SCO"
+ is_exported: true
+ bug: "376756660"
+}
+
+flag {
+ name: "enable_multichannel_group_device"
+ namespace: "media_audio"
+ description:
+ "Enable new audio device type for wireless connected speaker group"
+ "supporting multichannel content."
+ is_exported: true
+ is_fixed_read_only: true
+ bug: "344031109"
+}
+
flag{
name: "enable_ringtone_haptics_customization"
namespace: "media_audio"
@@ -93,6 +112,14 @@
}
flag {
+ name: "hardening_permission_api"
+ is_exported: true
+ namespace: "media_audio"
+ description: "API flag for additional appop/perm constructs for hardening."
+ bug: "376480814"
+}
+
+flag {
name: "loudness_configurator_api"
is_exported: true
namespace: "media_audio"
@@ -163,6 +190,13 @@
bug: "367816690"
}
+flag {
+ name: "speaker_cleanup_usage"
+ namespace: "media_audio"
+ description: "Support new AudioAttributes usage for speaker cleanup"
+ bug: "355050846"
+}
+
# TODO remove
flag {
name: "volume_ringer_api_hardening"
diff --git a/media/audioserver/main_audioserver.cpp b/media/audioserver/main_audioserver.cpp
index d55932d..4f5b95d 100644
--- a/media/audioserver/main_audioserver.cpp
+++ b/media/audioserver/main_audioserver.cpp
@@ -190,7 +190,7 @@
// attempting to call audio flinger on a null pointer could make the process crash
// and attract attentions.
std::vector<AudioMMapPolicyInfo> policyInfos;
- status_t status = sp<IAudioFlinger>::cast(af)->getMmapPolicyInfos(
+ status_t status = AudioSystem::getMmapPolicyInfos(
AudioMMapPolicyType::DEFAULT, &policyInfos);
// Initialize aaudio service when querying mmap policy succeeds and
// any of the policy supports MMAP.
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
index 562dcf5..52920c2 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
@@ -743,6 +743,25 @@
}
status_t C2SoftMpeg2Dec::deleteDecoder() {
+ // API call to IV_CMD_RETRIEVE_MEMREC not only retrieves the memory records
+ // but also joins active threads and destroys conditional thread variables and
+ // mutex locks for each thread.
+ iv_retrieve_mem_rec_ip_t s_retrieve_mem_ip;
+ iv_retrieve_mem_rec_op_t s_retrieve_mem_op;
+
+ s_retrieve_mem_ip.pv_mem_rec_location = (iv_mem_rec_t *)mMemRecords;
+ s_retrieve_mem_ip.e_cmd = IV_CMD_RETRIEVE_MEMREC;
+ s_retrieve_mem_ip.u4_size = sizeof(iv_retrieve_mem_rec_ip_t);
+ s_retrieve_mem_op.u4_size = sizeof(iv_retrieve_mem_rec_op_t);
+
+ IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
+ &s_retrieve_mem_ip,
+ &s_retrieve_mem_op);
+ if (IV_SUCCESS != status) {
+ ALOGE("error in %s: 0x%x", __func__, s_retrieve_mem_op.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+
if (mMemRecords) {
iv_mem_rec_t *ps_mem_rec = mMemRecords;
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index e6782a9..069d6ad 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -430,6 +430,7 @@
_C2_PL_AV1_BASE = 0x9000,
_C2_PL_VP8_BASE = 0xA000,
_C2_PL_MPEGH_BASE = 0xB000, // MPEG-H 3D Audio
+ _C2_PL_APV_BASE = 0xC000, // APV
C2_PROFILE_LEVEL_VENDOR_START = 0x70000000,
};
@@ -597,6 +598,15 @@
PROFILE_MPEGH_HIGH, ///< MPEG-H High
PROFILE_MPEGH_LC, ///< MPEG-H Low-complexity
PROFILE_MPEGH_BASELINE, ///< MPEG-H Baseline
+
+ // Advanced Professional VideoCodec (APV)
+ PROFILE_APV_422_10 = _C2_PL_APV_BASE, ///< APV 422-10 Profile
+ PROFILE_APV_422_12, ///< APV 422-12 Profile
+ PROFILE_APV_444_10, ///< APV 444-10 Profile
+ PROFILE_APV_444_12, ///< APV 444-12 Profile
+ PROFILE_APV_4444_10, ///< APV 4444-10 Profile
+ PROFILE_APV_4444_12, ///< APV 4444-12 Profile
+ PROFILE_APV_400_10, ///< APV 400-10 Profile
};
enum C2Config::level_t : uint32_t {
@@ -752,6 +762,68 @@
LEVEL_MPEGH_3, ///< MPEG-H L3
LEVEL_MPEGH_4, ///< MPEG-H L4
LEVEL_MPEGH_5, ///< MPEG-H L5
+
+ // Advanced Professional VideoCodec(APV) levels/bands
+ LEVEL_APV_1_BAND_0 = _C2_PL_APV_BASE, ///< APV L 1, BAND 0
+ LEVEL_APV_1_1_BAND_0, ///< APV L 1.1, BAND 0
+ LEVEL_APV_2_BAND_0, ///< APV L 2, BAND 0
+ LEVEL_APV_2_1_BAND_0, ///< APV L 2.1, BAND 0
+ LEVEL_APV_3_BAND_0, ///< APV L 3, BAND 0
+ LEVEL_APV_3_1_BAND_0, ///< APV L 3.1, BAND 0
+ LEVEL_APV_4_BAND_0, ///< APV L 4, BAND 0
+ LEVEL_APV_4_1_BAND_0, ///< APV L 4.1, BAND 0
+ LEVEL_APV_5_BAND_0, ///< APV L 5, BAND 0
+ LEVEL_APV_5_1_BAND_0, ///< APV L 5.1, BAND 0
+ LEVEL_APV_6_BAND_0, ///< APV L 6, BAND 0
+ LEVEL_APV_6_1_BAND_0, ///< APV L 6.1, BAND 0
+ LEVEL_APV_7_BAND_0, ///< APV L 7, BAND 0
+ LEVEL_APV_7_1_BAND_0, ///< APV L 7.1, BAND 0
+
+ LEVEL_APV_1_BAND_1 = _C2_PL_APV_BASE + 0x100, ///< APV L 1, BAND 1
+ LEVEL_APV_1_1_BAND_1, ///< APV L 1.1, BAND 1
+ LEVEL_APV_2_BAND_1, ///< APV L 2, BAND 1
+ LEVEL_APV_2_1_BAND_1, ///< APV L 2.1, BAND 1
+ LEVEL_APV_3_BAND_1, ///< APV L 3, BAND 1
+ LEVEL_APV_3_1_BAND_1, ///< APV L 3.1, BAND 1
+ LEVEL_APV_4_BAND_1, ///< APV L 4, BAND 1
+ LEVEL_APV_4_1_BAND_1, ///< APV L 4.1, BAND 1
+ LEVEL_APV_5_BAND_1, ///< APV L 5, BAND 1
+ LEVEL_APV_5_1_BAND_1, ///< APV L 5.1, BAND 1
+ LEVEL_APV_6_BAND_1, ///< APV L 6, BAND 1
+ LEVEL_APV_6_1_BAND_1, ///< APV L 6.1, BAND 1
+ LEVEL_APV_7_BAND_1, ///< APV L 7, BAND 1
+ LEVEL_APV_7_1_BAND_1, ///< APV L 7.1, BAND 1
+
+ LEVEL_APV_1_BAND_2 = _C2_PL_APV_BASE + 0x200, ///< APV L 1, BAND 2
+ LEVEL_APV_1_1_BAND_2, ///< APV L 1.1, BAND 2
+ LEVEL_APV_2_BAND_2, ///< APV L 2, BAND 2
+ LEVEL_APV_2_1_BAND_2, ///< APV L 2.1, BAND 2
+ LEVEL_APV_3_BAND_2, ///< APV L 3, BAND 2
+ LEVEL_APV_3_1_BAND_2, ///< APV L 3.1, BAND 2
+ LEVEL_APV_4_BAND_2, ///< APV L 4, BAND 2
+ LEVEL_APV_4_1_BAND_2, ///< APV L 4.1, BAND 2
+ LEVEL_APV_5_BAND_2, ///< APV L 5, BAND 2
+ LEVEL_APV_5_1_BAND_2, ///< APV L 5.1, BAND 2
+ LEVEL_APV_6_BAND_2, ///< APV L 6, BAND 2
+ LEVEL_APV_6_1_BAND_2, ///< APV L 6.1, BAND 2
+ LEVEL_APV_7_BAND_2, ///< APV L 7, BAND 2
+ LEVEL_APV_7_1_BAND_2, ///< APV L 7.1, BAND 2
+
+ LEVEL_APV_1_BAND_3 = _C2_PL_APV_BASE + 0x300, ///< APV L 1, BAND 3
+ LEVEL_APV_1_1_BAND_3, ///< APV L 1.1, BAND 3
+ LEVEL_APV_2_BAND_3, ///< APV L 2, BAND 3
+ LEVEL_APV_2_1_BAND_3, ///< APV L 2.1, BAND 3
+ LEVEL_APV_3_BAND_3, ///< APV L 3, BAND 3
+ LEVEL_APV_3_1_BAND_3, ///< APV L 3.1, BAND 3
+ LEVEL_APV_4_BAND_3, ///< APV L 4, BAND 3
+ LEVEL_APV_4_1_BAND_3, ///< APV L 4.1, BAND 3
+ LEVEL_APV_5_BAND_3, ///< APV L 5, BAND 3
+ LEVEL_APV_5_1_BAND_3, ///< APV L 5.1, BAND 3
+ LEVEL_APV_6_BAND_3, ///< APV L 6, BAND 3
+ LEVEL_APV_6_1_BAND_3, ///< APV L 6.1, BAND 3
+ LEVEL_APV_7_BAND_3, ///< APV L 7, BAND 3
+ LEVEL_APV_7_1_BAND_3, ///< APV L 7.1, BAND 3
+
};
struct C2ProfileLevelStruct {
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 5c46d99..e2b28dc 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -41,6 +41,7 @@
#include <android/sysprop/MediaProperties.sysprop.h>
#include <android-base/parseint.h>
#include <android-base/properties.h>
+#include <android-base/no_destructor.h>
#include <android-base/stringprintf.h>
#include <binder/MemoryBase.h>
#include <binder/MemoryDealer.h>
@@ -115,6 +116,109 @@
});
}
+class SurfaceCallbackHandler {
+public:
+ enum callback_type_t {
+ ON_BUFFER_RELEASED = 0,
+ ON_BUFFER_ATTACHED
+ };
+
+ void post(callback_type_t callback,
+ std::shared_ptr<Codec2Client::Component> component,
+ uint32_t generation) {
+ if (!component) {
+ ALOGW("surface callback psoted for invalid component");
+ }
+ std::shared_ptr<SurfaceCallbackItem> item =
+ std::make_shared<SurfaceCallbackItem>(callback, component, generation);
+ std::unique_lock<std::mutex> lock(mMutex);
+ mItems.emplace_back(std::move(item));
+ mCv.notify_one();
+ }
+
+ ~SurfaceCallbackHandler() {
+ {
+ std::unique_lock<std::mutex> lock(mMutex);
+ mDone = true;
+ mCv.notify_all();
+ }
+ if (mThread.joinable()) {
+ mThread.join();
+ }
+ }
+
+ static SurfaceCallbackHandler& GetInstance() {
+ static base::NoDestructor<SurfaceCallbackHandler> sSurfaceCallbackHandler{};
+ return *sSurfaceCallbackHandler;
+ }
+
+private:
+ struct SurfaceCallbackItem {
+ callback_type_t mCallback;
+ std::weak_ptr<Codec2Client::Component> mComp;
+ uint32_t mGeneration;
+
+ SurfaceCallbackItem(
+ callback_type_t callback,
+ std::shared_ptr<Codec2Client::Component> comp,
+ uint32_t generation)
+ : mCallback(callback), mComp(comp), mGeneration(generation) {}
+ };
+
+ SurfaceCallbackHandler() { mThread = std::thread(&SurfaceCallbackHandler::run, this); }
+
+ void run() {
+ std::unique_lock<std::mutex> lock(mMutex);
+ while (!mDone) {
+ while (!mItems.empty()) {
+ std::deque<std::shared_ptr<SurfaceCallbackItem>> items = std::move(mItems);
+ mItems.clear();
+ lock.unlock();
+ handle(items);
+ lock.lock();
+ }
+ mCv.wait(lock);
+ }
+ }
+
+ void handle(std::deque<std::shared_ptr<SurfaceCallbackItem>> &items) {
+ while (!items.empty()) {
+ std::shared_ptr<SurfaceCallbackItem> item = items.front();
+ items.pop_front();
+ switch (item->mCallback) {
+ case ON_BUFFER_RELEASED: {
+ std::shared_ptr<Codec2Client::Component> comp = item->mComp.lock();;
+ if (comp) {
+ comp->onBufferReleasedFromOutputSurface(item->mGeneration);
+ }
+ break;
+ }
+ case ON_BUFFER_ATTACHED: {
+ std::shared_ptr<Codec2Client::Component> comp = item->mComp.lock();
+ if (comp) {
+ comp->onBufferAttachedToOutputSurface(item->mGeneration);
+ }
+ break;
+ }
+ default:
+ ALOGE("Non defined surface callback message");
+ break;
+ }
+ }
+ }
+
+ std::thread mThread;
+ bool mDone = false;
+ std::deque<std::shared_ptr<SurfaceCallbackItem>> mItems;
+ std::mutex mMutex;
+ std::condition_variable mCv;
+
+
+ friend class base::NoDestructor<SurfaceCallbackHandler>;
+
+ DISALLOW_EVIL_CONSTRUCTORS(SurfaceCallbackHandler);
+};
+
} // namespace
CCodecBufferChannel::QueueGuard::QueueGuard(
@@ -231,7 +335,7 @@
void CCodecBufferChannel::setComponent(
const std::shared_ptr<Codec2Client::Component> &component) {
- mComponent = component;
+ std::atomic_store(&mComponent, component);
mComponentName = component->getName() + StringPrintf("#%d", int(uintptr_t(component.get()) % 997));
mName = mComponentName.c_str();
}
@@ -247,7 +351,7 @@
inputSurface->numProcessingBuffersBalance = 0;
inputSurface->surface = surface;
mHasInputSurface = true;
- return inputSurface->surface->connect(mComponent);
+ return inputSurface->surface->connect(std::atomic_load(&mComponent));
}
status_t CCodecBufferChannel::signalEndOfInputStream() {
@@ -443,7 +547,7 @@
now);
}
}
- err = mComponent->queue(&items);
+ err = std::atomic_load(&mComponent)->queue(&items);
}
if (err != C2_OK) {
Mutexed<PipelineWatcher>::Locked watcher(mPipelineWatcher);
@@ -1353,7 +1457,7 @@
qbi.setSurfaceDamage(Region::INVALID_REGION); // we don't have dirty regions
qbi.getFrameTimestamps = true; // we need to know when a frame is rendered
IGraphicBufferProducer::QueueBufferOutput qbo;
- status_t result = mComponent->queueToOutputSurface(block, qbi, &qbo);
+ status_t result = std::atomic_load(&mComponent)->queueToOutputSurface(block, qbi, &qbo);
if (result != OK) {
ALOGI("[%s] queueBuffer failed: %d", mName, result);
if (result == NO_INIT) {
@@ -1492,7 +1596,7 @@
void CCodecBufferChannel::pollForRenderedBuffers() {
FrameEventHistoryDelta delta;
- mComponent->pollForRenderedFrames(&delta);
+ std::atomic_load(&mComponent)->pollForRenderedFrames(&delta);
processRenderedFrames(delta);
}
@@ -1501,9 +1605,10 @@
// knowing the internal state of CCodec/CCodecBufferChannel,
// prevent mComponent from being destroyed by holding the shared reference
// during this interface being executed.
- std::shared_ptr<Codec2Client::Component> comp = mComponent;
+ std::shared_ptr<Codec2Client::Component> comp = std::atomic_load(&mComponent);
if (comp) {
- comp->onBufferReleasedFromOutputSurface(generation);
+ SurfaceCallbackHandler::GetInstance().post(
+ SurfaceCallbackHandler::ON_BUFFER_RELEASED, comp, generation);
}
}
@@ -1512,9 +1617,10 @@
// knowing the internal state of CCodec/CCodecBufferChannel,
// prevent mComponent from being destroyed by holding the shared reference
// during this interface being executed.
- std::shared_ptr<Codec2Client::Component> comp = mComponent;
+ std::shared_ptr<Codec2Client::Component> comp = std::atomic_load(&mComponent);
if (comp) {
- comp->onBufferAttachedToOutputSurface(generation);
+ SurfaceCallbackHandler::GetInstance().post(
+ SurfaceCallbackHandler::ON_BUFFER_ATTACHED, comp, generation);
}
}
@@ -1585,7 +1691,7 @@
C2ActualPipelineDelayTuning pipelineDelay(0);
C2SecureModeTuning secureMode(C2Config::SM_UNPROTECTED);
- c2_status_t err = mComponent->query(
+ c2_status_t err = std::atomic_load(&mComponent)->query(
{
&iStreamFormat,
&oStreamFormat,
@@ -1616,7 +1722,7 @@
size_t numOutputSlots = outputDelayValue + kSmoothnessFactor;
// TODO: get this from input format
- bool secure = mComponent->getName().find(".secure") != std::string::npos;
+ bool secure = std::atomic_load(&mComponent)->getName().find(".secure") != std::string::npos;
// secure mode is a static parameter (shall not change in the executing state)
mSendEncryptedInfoBuffer = secureMode.value == C2Config::SM_READ_PROTECTED_WITH_ENCRYPTED;
@@ -1662,7 +1768,7 @@
channelCount.invalidate();
pcmEncoding.invalidate();
}
- err = mComponent->query(stackParams,
+ err = std::atomic_load(&mComponent)->query(stackParams,
{ C2PortAllocatorsTuning::input::PARAM_TYPE },
C2_DONT_BLOCK,
¶ms);
@@ -1801,6 +1907,7 @@
outputSurface = output->surface ?
output->surface->getIGraphicBufferProducer() : nullptr;
if (outputSurface) {
+ (void)SurfaceCallbackHandler::GetInstance();
output->surface->setMaxDequeuedBufferCount(output->maxDequeueBuffers);
}
outputGeneration = output->generation;
@@ -1822,7 +1929,7 @@
// query C2PortAllocatorsTuning::output from component, or use default allocator if
// unsuccessful.
std::vector<std::unique_ptr<C2Param>> params;
- err = mComponent->query({ },
+ err = std::atomic_load(&mComponent)->query({ },
{ C2PortAllocatorsTuning::output::PARAM_TYPE },
C2_DONT_BLOCK,
¶ms);
@@ -1850,7 +1957,7 @@
// if unsuccessful.
if (outputSurface) {
params.clear();
- err = mComponent->query({ },
+ err = std::atomic_load(&mComponent)->query({ },
{ C2PortSurfaceAllocatorTuning::output::PARAM_TYPE },
C2_DONT_BLOCK,
¶ms);
@@ -1881,7 +1988,7 @@
}
if ((poolMask >> pools->outputAllocatorId) & 1) {
- err = mComponent->createBlockPool(
+ err = std::atomic_load(&mComponent)->createBlockPool(
pools->outputAllocatorId, &pools->outputPoolId, &pools->outputPoolIntf);
ALOGI("[%s] Created output block pool with allocatorID %u => poolID %llu - %s",
mName, pools->outputAllocatorId,
@@ -1902,7 +2009,8 @@
C2PortBlockPoolsTuning::output::AllocUnique({ pools->outputPoolId });
std::vector<std::unique_ptr<C2SettingResult>> failures;
- err = mComponent->config({ poolIdsTuning.get() }, C2_MAY_BLOCK, &failures);
+ err = std::atomic_load(&mComponent)->config(
+ { poolIdsTuning.get() }, C2_MAY_BLOCK, &failures);
ALOGD("[%s] Configured output block pool ids %llu => %s",
mName, (unsigned long long)poolIdsTuning->m.values[0], asString(err));
outputPoolId_ = pools->outputPoolId;
@@ -1910,7 +2018,7 @@
if (prevOutputPoolId != C2BlockPool::BASIC_LINEAR
&& prevOutputPoolId != C2BlockPool::BASIC_GRAPHIC) {
- c2_status_t err = mComponent->destroyBlockPool(prevOutputPoolId);
+ c2_status_t err = std::atomic_load(&mComponent)->destroyBlockPool(prevOutputPoolId);
if (err != C2_OK) {
ALOGW("Failed to clean up previous block pool %llu - %s (%d)\n",
(unsigned long long) prevOutputPoolId, asString(err), err);
@@ -1942,7 +2050,7 @@
// Try to set output surface to created block pool if given.
if (outputSurface) {
- mComponent->setOutputSurface(
+ std::atomic_load(&mComponent)->setOutputSurface(
outputPoolId_,
outputSurface,
outputGeneration,
@@ -1951,7 +2059,7 @@
// configure CPU read consumer usage
C2StreamUsageTuning::output outputUsage{0u, C2MemoryUsage::CPU_READ};
std::vector<std::unique_ptr<C2SettingResult>> failures;
- err = mComponent->config({ &outputUsage }, C2_MAY_BLOCK, &failures);
+ err = std::atomic_load(&mComponent)->config({ &outputUsage }, C2_MAY_BLOCK, &failures);
// do not print error message for now as most components may not yet
// support this setting
ALOGD_IF(err != C2_BAD_INDEX, "[%s] Configured output usage [%#llx]",
@@ -2073,7 +2181,8 @@
}
C2StreamBufferTypeSetting::output oStreamFormat(0u);
C2PrependHeaderModeSetting prepend(PREPEND_HEADER_TO_NONE);
- c2_status_t err = mComponent->query({ &oStreamFormat, &prepend }, {}, C2_DONT_BLOCK, nullptr);
+ c2_status_t err = std::atomic_load(&mComponent)->query(
+ { &oStreamFormat, &prepend }, {}, C2_DONT_BLOCK, nullptr);
if (err != C2_OK && err != C2_BAD_INDEX) {
return UNKNOWN_ERROR;
}
@@ -2091,7 +2200,7 @@
now);
}
}
- err = mComponent->queue(&flushedConfigs);
+ err = std::atomic_load(&mComponent)->queue(&flushedConfigs);
if (err != C2_OK) {
ALOGW("[%s] Error while queueing a flushed config", mName);
return UNKNOWN_ERROR;
@@ -2142,7 +2251,8 @@
Mutexed<BlockPools>::Locked pools(mBlockPools);
outputPoolId = pools->outputPoolId;
}
- if (mComponent) mComponent->stopUsingOutputSurface(outputPoolId);
+ std::shared_ptr<Codec2Client::Component> comp = std::atomic_load(&mComponent);
+ if (comp) comp->stopUsingOutputSurface(outputPoolId);
if (pushBlankBuffer) {
sp<ANativeWindow> anw = static_cast<ANativeWindow *>(surface.get());
@@ -2176,7 +2286,8 @@
void CCodecBufferChannel::release() {
mInfoBuffers.clear();
- mComponent.reset();
+ std::shared_ptr<Codec2Client::Component> nullComp;
+ std::atomic_store(&mComponent, nullComp);
mInputAllocator.reset();
mOutputSurface.lock()->surface.clear();
{
@@ -2498,7 +2609,7 @@
}
}
if (maxDequeueCount > 0) {
- mComponent->setOutputSurfaceMaxDequeueCount(maxDequeueCount);
+ std::atomic_load(&mComponent)->setOutputSurfaceMaxDequeueCount(maxDequeueCount);
}
}
@@ -2727,6 +2838,7 @@
oldSurface = outputSurface->surface;
}
if (newSurface) {
+ (void)SurfaceCallbackHandler::GetInstance();
newSurface->setScalingMode(NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
newSurface->setDequeueTimeout(kDequeueTimeoutNs);
newSurface->setMaxDequeuedBufferCount(maxDequeueCount);
@@ -2745,7 +2857,7 @@
}
if (outputPoolIntf) {
- if (mComponent->setOutputSurface(
+ if (std::atomic_load(&mComponent)->setOutputSurface(
outputPoolId,
producer,
generation,
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.cpp b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
index 9297520..3841831 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.cpp
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
@@ -436,6 +436,86 @@
{ C2Config::hdr_format_t::HDR10_PLUS, AV1ProfileMain10HDR10Plus },
};
+// APV
+ALookup<C2Config::profile_t, int32_t> sApvProfiles = {
+ { C2Config::PROFILE_APV_422_10, APVProfile422_10 },
+ { C2Config::PROFILE_APV_422_10, APVProfile422_10HDR10 },
+ { C2Config::PROFILE_APV_422_10, APVProfile422_10HDR10Plus },
+};
+
+ALookup<C2Config::profile_t, int32_t> sApvHdrProfiles = {
+ { C2Config::PROFILE_APV_422_10, APVProfile422_10HDR10 },
+};
+
+ALookup<C2Config::profile_t, int32_t> sApvHdr10PlusProfiles = {
+ { C2Config::PROFILE_APV_422_10, APVProfile422_10HDR10Plus },
+};
+
+ALookup<C2Config::level_t, int32_t> sApvLevels = {
+ { C2Config::LEVEL_APV_1_BAND_0, APVLevel1Band0 },
+ { C2Config::LEVEL_APV_1_BAND_1, APVLevel1Band1 },
+ { C2Config::LEVEL_APV_1_BAND_2, APVLevel1Band2 },
+ { C2Config::LEVEL_APV_1_BAND_3, APVLevel1Band3 },
+ { C2Config::LEVEL_APV_1_1_BAND_0, APVLevel11Band0 },
+ { C2Config::LEVEL_APV_1_1_BAND_1, APVLevel11Band1 },
+ { C2Config::LEVEL_APV_1_1_BAND_2, APVLevel11Band2 },
+ { C2Config::LEVEL_APV_1_1_BAND_3, APVLevel11Band3 },
+ { C2Config::LEVEL_APV_2_BAND_0, APVLevel2Band0 },
+ { C2Config::LEVEL_APV_2_BAND_1, APVLevel2Band1 },
+ { C2Config::LEVEL_APV_2_BAND_2, APVLevel2Band2 },
+ { C2Config::LEVEL_APV_2_BAND_3, APVLevel2Band3 },
+ { C2Config::LEVEL_APV_2_1_BAND_0, APVLevel21Band0 },
+ { C2Config::LEVEL_APV_2_1_BAND_1, APVLevel21Band1 },
+ { C2Config::LEVEL_APV_2_1_BAND_2, APVLevel21Band2 },
+ { C2Config::LEVEL_APV_2_1_BAND_3, APVLevel21Band3 },
+ { C2Config::LEVEL_APV_3_BAND_0, APVLevel3Band0 },
+ { C2Config::LEVEL_APV_3_BAND_1, APVLevel3Band1 },
+ { C2Config::LEVEL_APV_3_BAND_2, APVLevel3Band2 },
+ { C2Config::LEVEL_APV_3_BAND_3, APVLevel3Band3 },
+ { C2Config::LEVEL_APV_3_1_BAND_0, APVLevel31Band0 },
+ { C2Config::LEVEL_APV_3_1_BAND_1, APVLevel31Band1 },
+ { C2Config::LEVEL_APV_3_1_BAND_2, APVLevel31Band2 },
+ { C2Config::LEVEL_APV_3_1_BAND_3, APVLevel31Band3 },
+ { C2Config::LEVEL_APV_4_BAND_0, APVLevel4Band0 },
+ { C2Config::LEVEL_APV_4_BAND_1, APVLevel4Band1 },
+ { C2Config::LEVEL_APV_4_BAND_2, APVLevel4Band2 },
+ { C2Config::LEVEL_APV_4_BAND_3, APVLevel4Band3 },
+ { C2Config::LEVEL_APV_4_1_BAND_0, APVLevel41Band0 },
+ { C2Config::LEVEL_APV_4_1_BAND_1, APVLevel41Band1 },
+ { C2Config::LEVEL_APV_4_1_BAND_2, APVLevel41Band2 },
+ { C2Config::LEVEL_APV_4_1_BAND_3, APVLevel41Band3 },
+ { C2Config::LEVEL_APV_5_BAND_0, APVLevel5Band0 },
+ { C2Config::LEVEL_APV_5_BAND_1, APVLevel5Band1 },
+ { C2Config::LEVEL_APV_5_BAND_2, APVLevel5Band2 },
+ { C2Config::LEVEL_APV_5_BAND_3, APVLevel5Band3 },
+ { C2Config::LEVEL_APV_5_1_BAND_0, APVLevel51Band0 },
+ { C2Config::LEVEL_APV_5_1_BAND_1, APVLevel51Band1 },
+ { C2Config::LEVEL_APV_5_1_BAND_2, APVLevel51Band2 },
+ { C2Config::LEVEL_APV_5_1_BAND_3, APVLevel51Band3 },
+ { C2Config::LEVEL_APV_6_BAND_0, APVLevel6Band0 },
+ { C2Config::LEVEL_APV_6_BAND_1, APVLevel6Band1 },
+ { C2Config::LEVEL_APV_6_BAND_2, APVLevel6Band2 },
+ { C2Config::LEVEL_APV_6_BAND_3, APVLevel6Band3 },
+ { C2Config::LEVEL_APV_6_1_BAND_0, APVLevel61Band0 },
+ { C2Config::LEVEL_APV_6_1_BAND_1, APVLevel61Band1 },
+ { C2Config::LEVEL_APV_6_1_BAND_2, APVLevel61Band2 },
+ { C2Config::LEVEL_APV_6_1_BAND_3, APVLevel61Band3 },
+ { C2Config::LEVEL_APV_7_BAND_0, APVLevel7Band0 },
+ { C2Config::LEVEL_APV_7_BAND_1, APVLevel7Band1 },
+ { C2Config::LEVEL_APV_7_BAND_2, APVLevel7Band2 },
+ { C2Config::LEVEL_APV_7_BAND_3, APVLevel7Band3 },
+ { C2Config::LEVEL_APV_7_1_BAND_0, APVLevel71Band0 },
+ { C2Config::LEVEL_APV_7_1_BAND_1, APVLevel71Band1 },
+ { C2Config::LEVEL_APV_7_1_BAND_2, APVLevel71Band2 },
+ { C2Config::LEVEL_APV_7_1_BAND_3, APVLevel71Band3 },
+};
+
+ALookup<C2Config::hdr_format_t, int32_t> sApvHdrFormats = {
+ { C2Config::hdr_format_t::HLG, APVProfile422_10 },
+ { C2Config::hdr_format_t::HDR10, APVProfile422_10HDR10 },
+ { C2Config::hdr_format_t::HDR10_PLUS, APVProfile422_10HDR10Plus },
+};
+
// HAL_PIXEL_FORMAT_* -> COLOR_Format*
ALookup<uint32_t, int32_t> sPixelFormats = {
{ HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, COLOR_FormatSurface },
@@ -720,6 +800,37 @@
int32_t mBitDepth;
};
+// APV
+struct ApvProfileLevelMapper : ProfileLevelMapperHelper {
+ ApvProfileLevelMapper(bool isHdr = false, bool isHdr10Plus = false) :
+ ProfileLevelMapperHelper(),
+ mIsHdr(isHdr), mIsHdr10Plus(isHdr10Plus) {}
+
+ virtual bool simpleMap(C2Config::level_t from, int32_t *to) {
+ return sApvLevels.map(from, to);
+ }
+ virtual bool simpleMap(int32_t from, C2Config::level_t *to) {
+ return sApvLevels.map(from, to);
+ }
+ virtual bool simpleMap(C2Config::profile_t from, int32_t *to) {
+ return mIsHdr10Plus ? sApvHdr10PlusProfiles.map(from, to) :
+ mIsHdr ? sApvHdrProfiles.map(from, to) :
+ sApvProfiles.map(from, to);
+ }
+ virtual bool simpleMap(int32_t from, C2Config::profile_t *to) {
+ return mIsHdr10Plus ? sApvHdr10PlusProfiles.map(from, to) :
+ mIsHdr ? sApvHdrProfiles.map(from, to) :
+ sApvProfiles.map(from, to);
+ }
+ virtual bool mapHdrFormat(int32_t from, C2Config::hdr_format_t *to) override {
+ return sApvHdrFormats.map(from, to);
+ }
+
+private:
+ bool mIsHdr;
+ bool mIsHdr10Plus;
+};
+
} // namespace
// the default mapper is used for media types that do not support HDR
@@ -753,6 +864,8 @@
return std::make_shared<Vp9ProfileLevelMapper>();
} else if (mediaType == MIMETYPE_VIDEO_AV1) {
return std::make_shared<Av1ProfileLevelMapper>();
+ } else if (mediaType == MIMETYPE_VIDEO_APV) {
+ return std::make_shared<ApvProfileLevelMapper>();
}
return nullptr;
}
@@ -767,6 +880,8 @@
return std::make_shared<Vp9ProfileLevelMapper>(true, isHdr10Plus);
} else if (mediaType == MIMETYPE_VIDEO_AV1) {
return std::make_shared<Av1ProfileLevelMapper>(true, isHdr10Plus);
+ } else if (mediaType == MIMETYPE_VIDEO_APV) {
+ return std::make_shared<ApvProfileLevelMapper>(true, isHdr10Plus);
}
return nullptr;
}
@@ -779,6 +894,8 @@
return GetProfileLevelMapper(mediaType);
} else if (mediaType == MIMETYPE_VIDEO_AV1 && bitDepth == 10) {
return std::make_shared<Av1ProfileLevelMapper>(false, false, bitDepth);
+ } else if (mediaType == MIMETYPE_VIDEO_APV) {
+ return std::make_shared<ApvProfileLevelMapper>();
}
return nullptr;
}
diff --git a/media/janitors/media_solutions_OWNERS b/media/janitors/media_solutions_OWNERS
index 17bc7dd..95c2b97 100644
--- a/media/janitors/media_solutions_OWNERS
+++ b/media/janitors/media_solutions_OWNERS
@@ -1,22 +1,19 @@
# Bug component: 1344
# go/android-fwk-media-solutions for info on areas of ownership.
-# MediaRouter and native mirroring only:
-adadukin@google.com
-aquilescanta@google.com
-bishoygendy@google.com
-ivanbuper@google.com
-
-# MediaMuxer, MediaRecorder, and seamless transcoding only:
andrewlewis@google.com
-claincly@google.com
-
-# Everything in go/android-fwk-media-solutions not covered above:
bachinger@google.com
-christosts@google.com
+claincly@google.com
+dancho@google.com
ibaker@google.com
+ivanbuper@google.com
jbibik@google.com
michaelkatz@google.com
rohks@google.com
+sheenachhabra@google.com
+simakova@google.com
tianyifeng@google.com
tonihei@google.com
+
+# MediaRouter and native mirroring only:
+aquilescanta@google.com
diff --git a/media/libaaudio/fuzzer/Android.bp b/media/libaaudio/fuzzer/Android.bp
index f4637e3..a1ed6a0 100644
--- a/media/libaaudio/fuzzer/Android.bp
+++ b/media/libaaudio/fuzzer/Android.bp
@@ -37,12 +37,16 @@
],
shared_libs: [
"com.android.media.aaudio-aconfig-cc",
+ "libaconfig_storage_read_api_cc",
"libaudio_aidl_conversion_common_cpp",
"libaudioclient_aidl_conversion",
"libaudiomanager",
"libaudiopolicy",
"libbinder",
+ "libbinder_ndk",
+ "libmediautils",
"libutils",
+ "server_configurable_flags",
],
static_libs: [
"aaudio-aidl-cpp",
diff --git a/media/libaaudio/include/aaudio/AAudioTesting.h b/media/libaaudio/include/aaudio/AAudioTesting.h
index d67ec70..16d6c33 100644
--- a/media/libaaudio/include/aaudio/AAudioTesting.h
+++ b/media/libaaudio/include/aaudio/AAudioTesting.h
@@ -31,24 +31,225 @@
* They may change or be removed at any time.
************************************************************************************/
+/**
+ * When the audio is played/recorded via AAudio MMAP data path, the apps can write to/read from
+ * a shared memory that will also be accessed directly by hardware. That reduces the audio latency.
+ * The following values are used to describe how AAudio MMAP is supported.
+ */
enum {
/**
- * Related feature is disabled and never used.
+ * AAudio MMAP is disabled and never used.
*/
AAUDIO_POLICY_NEVER = 1,
/**
- * If related feature works then use it. Otherwise fall back to something else.
+ * AAudio MMAP support depends on device's availability. It will be used
+ * when it is possible or fallback to the normal path, where the audio data
+ * will be delivered via audio framework data pipeline.
*/
- AAUDIO_POLICY_AUTO,
+ AAUDIO_POLICY_AUTO,
/**
- * Related feature must be used. If not available then fail.
+ * AAudio MMAP must be used or fail.
*/
AAUDIO_POLICY_ALWAYS
};
typedef int32_t aaudio_policy_t;
+// The values are copied from JAVA SDK device types defined in android/media/AudioDeviceInfo.java
+// When a new value is added, it should be added here and handled by the conversion at
+// AAudioConvert_aaudioToAndroidDeviceType.
+typedef enum AAudio_DeviceType : int32_t {
+ /**
+ * A device type describing the attached earphone speaker.
+ */
+ AAUDIO_DEVICE_BUILTIN_EARPIECE = 1,
+
+ /**
+ * A device type describing the speaker system (i.e. a mono speaker or stereo speakers) built
+ * in a device.
+ */
+ AAUDIO_DEVICE_BUILTIN_SPEAKER = 2,
+
+ /**
+ * A device type describing a headset, which is the combination of a headphones and microphone.
+ */
+ AAUDIO_DEVICE_WIRED_HEADSET = 3,
+
+ /**
+ * A device type describing a pair of wired headphones.
+ */
+ AAUDIO_DEVICE_WIRED_HEADPHONES = 4,
+
+ /**
+ * A device type describing an analog line-level connection.
+ */
+ AAUDIO_DEVICE_LINE_ANALOG = 5,
+
+ /**
+ * A device type describing a digital line connection (e.g. SPDIF).
+ */
+ AAUDIO_DEVICE_LINE_DIGITAL = 6,
+
+ /**
+ * A device type describing a Bluetooth device typically used for telephony.
+ */
+ AAUDIO_DEVICE_BLUETOOTH_SCO = 7,
+
+ /**
+ * A device type describing a Bluetooth device supporting the A2DP profile.
+ */
+ AAUDIO_DEVICE_BLUETOOTH_A2DP = 8,
+
+ /**
+ * A device type describing an HDMI connection .
+ */
+ AAUDIO_DEVICE_HDMI = 9,
+
+ /**
+ * A device type describing the Audio Return Channel of an HDMI connection.
+ */
+ AAUDIO_DEVICE_HDMI_ARC = 10,
+
+ /**
+ * A device type describing a USB audio device.
+ */
+ AAUDIO_DEVICE_USB_DEVICE = 11,
+
+ /**
+ * A device type describing a USB audio device in accessory mode.
+ */
+ AAUDIO_DEVICE_USB_ACCESSORY = 12,
+
+ /**
+ * A device type describing the audio device associated with a dock.
+ * Starting at API 34, this device type only represents digital docks, while docks with an
+ * analog connection are represented with {@link #AAUDIO_DEVICE_DOCK_ANALOG}.
+ */
+ AAUDIO_DEVICE_DOCK = 13,
+
+ /**
+ * A device type associated with the transmission of audio signals over FM.
+ */
+ AAUDIO_DEVICE_FM = 14,
+
+ /**
+ * A device type describing the microphone(s) built in a device.
+ */
+ AAUDIO_DEVICE_BUILTIN_MIC = 15,
+
+ /**
+ * A device type for accessing the audio content transmitted over FM.
+ */
+ AAUDIO_DEVICE_FM_TUNER = 16,
+
+ /**
+ * A device type for accessing the audio content transmitted over the TV tuner system.
+ */
+ AAUDIO_DEVICE_TV_TUNER = 17,
+
+ /**
+ * A device type describing the transmission of audio signals over the telephony network.
+ */
+ AAUDIO_DEVICE_TELEPHONY = 18,
+
+ /**
+ * A device type describing the auxiliary line-level connectors.
+ */
+ AAUDIO_DEVICE_AUX_LINE = 19,
+
+ /**
+ * A device type connected over IP.
+ */
+ AAUDIO_DEVICE_IP = 20,
+
+ /**
+ * A type-agnostic device used for communication with external audio systems.
+ */
+ AAUDIO_DEVICE_BUS = 21,
+
+ /**
+ * A device type describing a USB audio headset.
+ */
+ AAUDIO_DEVICE_USB_HEADSET = 22,
+
+ /**
+ * A device type describing a Hearing Aid.
+ */
+ AAUDIO_DEVICE_HEARING_AID = 23,
+
+ /**
+ * A device type describing the speaker system (i.e. a mono speaker or stereo speakers) built
+ * in a device, that is specifically tuned for outputting sounds like notifications and alarms
+ * (i.e. sounds the user couldn't necessarily anticipate).
+ * <p>Note that this physical audio device may be the same as {@link #TYPE_BUILTIN_SPEAKER}
+ * but is driven differently to safely accommodate the different use case.</p>
+ */
+ AAUDIO_DEVICE_BUILTIN_SPEAKER_SAFE = 24,
+
+ /**
+ * A device type for rerouting audio within the Android framework between mixes and
+ * system applications.
+ */
+ AAUDIO_DEVICE_REMOTE_SUBMIX = 25,
+ /**
+ * A device type describing a Bluetooth Low Energy (BLE) audio headset or headphones.
+ * Headphones are grouped with headsets when the device is a sink:
+ * the features of headsets and headphones with regard to playback are the same.
+ */
+ AAUDIO_DEVICE_BLE_HEADSET = 26,
+
+ /**
+ * A device type describing a Bluetooth Low Energy (BLE) audio speaker.
+ */
+ AAUDIO_DEVICE_BLE_SPEAKER = 27,
+
+ /**
+ * A device type describing an Echo Canceller loopback Reference.
+ * This device is only used when capturing with MediaRecorder.AudioSource.ECHO_REFERENCE,
+ * which requires privileged permission
+ * {@link android.Manifest.permission#CAPTURE_AUDIO_OUTPUT}.
+ *
+ * Note that this is not exposed as it is a system API that requires privileged permission.
+ */
+ // AAUDIO_DEVICE_ECHO_REFERENCE = 28,
+
+ /**
+ * A device type describing the Enhanced Audio Return Channel of an HDMI connection.
+ */
+ AAUDIO_DEVICE_HDMI_EARC = 29,
+
+ /**
+ * A device type describing a Bluetooth Low Energy (BLE) broadcast group.
+ */
+ AAUDIO_DEVICE_BLE_BROADCAST = 30,
+
+ /**
+ * A device type describing the audio device associated with a dock using an analog connection.
+ */
+ AAUDIO_DEVICE_DOCK_ANALOG = 31
+} AAudio_DeviceType;
+
+/**
+ * Query how aaudio mmap is supported for the given device type.
+ *
+ * @param device device type
+ * @param direction {@link AAUDIO_DIRECTION_OUTPUT} or {@link AAUDIO_DIRECTION_INPUT}
+ * @return the mmap policy or negative error
+ */
+AAUDIO_API aaudio_policy_t AAudio_getPlatformMMapPolicy(
+ AAudio_DeviceType device, aaudio_direction_t direction) __INTRODUCED_IN(36);
+
+/**
+ * Query how aaudio exclusive mmap is supported for the given device type.
+ *
+ * @param device device type
+ * @param direction {@link AAUDIO_DIRECTION_OUTPUT} or {@link AAUDIO_DIRECTION_INPUT}
+ * @return the mmap exclusive policy or negative error
+ */
+AAUDIO_API aaudio_policy_t AAudio_getPlatformMMapExclusivePolicy(
+ AAudio_DeviceType device, aaudio_direction_t direction) __INTRODUCED_IN(36);
+
/**
* Control whether AAudioStreamBuilder_openStream() will use the new MMAP data path
* or the older "Legacy" data path.
diff --git a/media/libaaudio/include/system/aaudio/AAudio.h b/media/libaaudio/include/system/aaudio/AAudio.h
new file mode 100644
index 0000000..933ad35
--- /dev/null
+++ b/media/libaaudio/include/system/aaudio/AAudio.h
@@ -0,0 +1,80 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * This is the system APIs for AAudio.
+ */
+#ifndef SYSTEM_AAUDIO_H
+#define SYSTEM_AAUDIO_H
+
+#include <aaudio/AAudio.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/**
+ * The tags string attributes allows OEMs to extend the
+ * <a href="/reference/android/media/AudioAttributes">AudioAttributes</a>.
+ *
+ * Note that the maximum length includes all tags combined with delimiters and null terminator.
+ *
+ * Note that it matches the equivalent value in
+ * <a href="/reference/android/system/media/audio">AUDIO_ATTRIBUTES_TAGS_MAX_SIZE</a>
+ * in the Android native API.
+ */
+#define AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE 256
+
+/**
+ * Set one or more vendor extension tags that the output stream will carry.
+ *
+ * The tags can be used by the audio policy engine for routing purpose.
+ * Routing is based on audio attributes, translated into legacy stream type.
+ * The stream types cannot be extended, so the product strategies have been introduced to allow
+ * vendor extension of routing capabilities.
+ * This could, for example, affect how volume and routing is handled for the stream.
+ *
+ * The tags can also be used by a System App to pass vendor specific information through the
+ * framework to the HAL. That info could affect routing, ducking or other audio behavior in the HAL.
+ *
+ * By default, audio attributes tags are empty if this method is not called.
+ *
+ * @param builder reference provided by AAudio_createStreamBuilder()
+ * @param tags the desired tags to add, which must be UTF-8 format and null-terminated. The size
+ * of the tags must be at most {@link #AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE}. Multiple tags
+ * must be separated by semicolons.
+ * @return {@link #AAUDIO_OK} on success or {@link #AAUDIO_ERROR_ILLEGAL_ARGUMENT} if the given
+ * tags is null or its length is greater than {@link #AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE}.
+ */
+aaudio_result_t AAudioStreamBuilder_setTags(AAudioStreamBuilder* _Nonnull builder,
+ const char* _Nonnull tags);
+
+/**
+ * Read the audio attributes' tags for the stream into a buffer.
+ * The caller is responsible for allocating and freeing the returned data.
+ *
+ * @param stream reference provided by AAudioStreamBuilder_openStream()
+ * @param tags pointer to write the value to in UTF-8 that containing OEM extension tags. It must
+ * be sized with {@link #AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE}.
+ * @return {@link #AAUDIO_OK} or {@link #AAUDIO_ERROR_ILLEGAL_ARGUMENT} if the given tags is null.
+ */
+aaudio_result_t AAudioStream_getTags(AAudioStream* _Nonnull stream, char* _Nonnull tags);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif //SYSTEM_AAUDIO_H
diff --git a/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp b/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
index c4692ce..c53a897 100644
--- a/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
+++ b/media/libaaudio/src/binding/AAudioStreamConfiguration.cpp
@@ -50,7 +50,7 @@
setUsage(parcelable.usage);
static_assert(sizeof(aaudio_content_type_t) == sizeof(parcelable.contentType));
setContentType(parcelable.contentType);
-
+ setTags(parcelable.tags);
static_assert(sizeof(aaudio_spatialization_behavior_t) ==
sizeof(parcelable.spatializationBehavior));
setSpatializationBehavior(parcelable.spatializationBehavior);
@@ -106,6 +106,8 @@
result.usage = getUsage();
static_assert(sizeof(aaudio_content_type_t) == sizeof(result.contentType));
result.contentType = getContentType();
+ std::optional<std::string> tags = getTags();
+ result.tags = tags.has_value() ? tags.value() : "";
static_assert(
sizeof(aaudio_spatialization_behavior_t) == sizeof(result.spatializationBehavior));
result.spatializationBehavior = getSpatializationBehavior();
diff --git a/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl b/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl
index fa46e0d..a301da8 100644
--- a/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl
+++ b/media/libaaudio/src/binding/aidl/aaudio/StreamParameters.aidl
@@ -27,6 +27,7 @@
int /* aaudio_direction_t */ direction; // = AAUDIO_DIRECTION_OUTPUT;
int /* aaudio_usage_t */ usage; // = AAUDIO_UNSPECIFIED;
int /* aaudio_content_type_t */ contentType; // = AAUDIO_UNSPECIFIED;
+ @utf8InCpp String tags; /* UTF8 */
int /* aaudio_spatialization_behavior_t */spatializationBehavior; //= AAUDIO_UNSPECIFIED;
boolean isContentSpatialized; // = false;
int /* aaudio_input_preset_t */ inputPreset; // = AAUDIO_UNSPECIFIED;
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index fa3f5a0..99b90e2 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -129,6 +129,7 @@
request.getConfiguration().setUsage(getUsage());
request.getConfiguration().setContentType(getContentType());
+ request.getConfiguration().setTags(getTags());
request.getConfiguration().setSpatializationBehavior(getSpatializationBehavior());
request.getConfiguration().setIsContentSpatialized(isContentSpatialized());
request.getConfiguration().setInputPreset(getInputPreset());
@@ -185,6 +186,7 @@
setUsage(configurationOutput.getUsage());
setContentType(configurationOutput.getContentType());
+ setTags(configurationOutput.getTags());
setSpatializationBehavior(configurationOutput.getSpatializationBehavior());
setIsContentSpatialized(configurationOutput.isContentSpatialized());
setInputPreset(configurationOutput.getInputPreset());
diff --git a/media/libaaudio/src/core/AAudioAudio.cpp b/media/libaaudio/src/core/AAudioAudio.cpp
index 1e8ac8d..fb87dd9 100644
--- a/media/libaaudio/src/core/AAudioAudio.cpp
+++ b/media/libaaudio/src/core/AAudioAudio.cpp
@@ -25,6 +25,7 @@
#include <aaudio/AAudio.h>
#include <aaudio/AAudioTesting.h>
+#include <system/aaudio/AAudio.h>
#include "AudioClock.h"
#include "AudioGlobal.h"
#include "AudioStreamBuilder.h"
@@ -53,6 +54,16 @@
return AudioGlobal_convertStreamStateToText(state);
}
+AAUDIO_API aaudio_policy_t AAudio_getPlatformMMapPolicy(
+ AAudio_DeviceType device, aaudio_direction_t direction) {
+ return AudioGlobal_getPlatformMMapPolicy(device, direction);
+}
+
+AAUDIO_API aaudio_policy_t AAudio_getPlatformMMapExclusivePolicy(
+ AAudio_DeviceType device, aaudio_direction_t direction) {
+ return AudioGlobal_getPlatformMMapExclusivePolicy(device, direction);
+}
+
static AudioStream *convertAAudioStreamToAudioStream(AAudioStream* stream)
{
return (AudioStream*) stream;
@@ -167,6 +178,17 @@
streamBuilder->setContentType(contentType);
}
+AAUDIO_API aaudio_result_t AAudioStreamBuilder_setTags(AAudioStreamBuilder* builder,
+ const char* tags) {
+ if (tags == nullptr || strlen(tags) >= AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE) {
+ return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
+ }
+ AudioStreamBuilder *streamBuilder = convertAAudioBuilderToStreamBuilder(builder);
+ std::optional<std::string> optionalTags = std::string(tags);
+ streamBuilder->setTags(optionalTags);
+ return AAUDIO_OK;
+}
+
AAUDIO_API void AAudioStreamBuilder_setSpatializationBehavior(AAudioStreamBuilder* builder,
aaudio_spatialization_behavior_t spatializationBehavior) {
AudioStreamBuilder *streamBuilder = convertAAudioBuilderToStreamBuilder(builder);
@@ -536,6 +558,22 @@
return audioStream->getContentType();
}
+AAUDIO_API aaudio_result_t AAudioStream_getTags(AAudioStream* stream, char* tags)
+{
+ if (tags == nullptr) {
+ return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
+ }
+ AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
+ std::optional<std::string> optTags = audioStream->getTags();
+ if (optTags.has_value() && !optTags->empty()) {
+ strncpy(tags, optTags.value().c_str(), AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
+ tags[AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE-1] = '\0';
+ } else {
+ tags[0] = '\0';
+ }
+ return AAUDIO_OK;
+}
+
AAUDIO_API aaudio_spatialization_behavior_t AAudioStream_getSpatializationBehavior(
AAudioStream* stream)
{
diff --git a/media/libaaudio/src/core/AAudioStreamParameters.cpp b/media/libaaudio/src/core/AAudioStreamParameters.cpp
index 67fc668..056918a 100644
--- a/media/libaaudio/src/core/AAudioStreamParameters.cpp
+++ b/media/libaaudio/src/core/AAudioStreamParameters.cpp
@@ -18,6 +18,7 @@
#define LOG_TAG "AAudioStreamParameters"
#include <utils/Log.h>
#include <system/audio.h>
+#include <system/aaudio/AAudio.h>
#include "AAudioStreamParameters.h"
@@ -34,6 +35,7 @@
mBufferCapacity = other.mBufferCapacity;
mUsage = other.mUsage;
mContentType = other.mContentType;
+ mTags = other.mTags;
mSpatializationBehavior = other.mSpatializationBehavior;
mIsContentSpatialized = other.mIsContentSpatialized;
mInputPreset = other.mInputPreset;
@@ -199,6 +201,10 @@
// break;
}
+ if (mTags.has_value() && mTags->size() >= AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE) {
+ return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
+ }
+
return validateChannelMask();
}
@@ -301,6 +307,7 @@
ALOGD("mBufferCapacity = %6d", mBufferCapacity);
ALOGD("mUsage = %6d", mUsage);
ALOGD("mContentType = %6d", mContentType);
+ ALOGD("mTags = %s", mTags.has_value() ? mTags.value().c_str() : "");
ALOGD("mSpatializationBehavior = %6d", mSpatializationBehavior);
ALOGD("mIsContentSpatialized = %s", mIsContentSpatialized ? "true" : "false");
ALOGD("mInputPreset = %6d", mInputPreset);
diff --git a/media/libaaudio/src/core/AAudioStreamParameters.h b/media/libaaudio/src/core/AAudioStreamParameters.h
index 7c78f03..cad27a7 100644
--- a/media/libaaudio/src/core/AAudioStreamParameters.h
+++ b/media/libaaudio/src/core/AAudioStreamParameters.h
@@ -97,6 +97,14 @@
mContentType = contentType;
}
+ void setTags(const std::optional<std::string>& tags) {
+ mTags = tags;
+ }
+
+ const std::optional<std::string> getTags() const {
+ return mTags;
+ }
+
aaudio_spatialization_behavior_t getSpatializationBehavior() const {
return mSpatializationBehavior;
}
@@ -223,6 +231,7 @@
aaudio_direction_t mDirection = AAUDIO_DIRECTION_OUTPUT;
aaudio_usage_t mUsage = AAUDIO_UNSPECIFIED;
aaudio_content_type_t mContentType = AAUDIO_UNSPECIFIED;
+ std::optional<std::string> mTags = {};
aaudio_spatialization_behavior_t mSpatializationBehavior
= AAUDIO_UNSPECIFIED;
bool mIsContentSpatialized = false;
diff --git a/media/libaaudio/src/core/AudioGlobal.cpp b/media/libaaudio/src/core/AudioGlobal.cpp
index 30f9677..3268488 100644
--- a/media/libaaudio/src/core/AudioGlobal.cpp
+++ b/media/libaaudio/src/core/AudioGlobal.cpp
@@ -15,6 +15,13 @@
*/
#include <aaudio/AAudio.h>
#include <aaudio/AAudioTesting.h>
+#include <android/media/audio/common/AudioDevice.h>
+#include <android/media/audio/common/AudioMMapPolicyInfo.h>
+#include <android/media/audio/common/AudioMMapPolicyType.h>
+#include <media/AidlConversionCppNdk.h>
+#include <media/AudioSystem.h>
+#include <system/audio-hal-enums.h>
+#include <utility/AAudioUtilities.h>
#include "AudioGlobal.h"
@@ -23,6 +30,10 @@
*/
namespace aaudio {
+using android::media::audio::common::AudioDevice;
+using android::media::audio::common::AudioMMapPolicyInfo;
+using android::media::audio::common::AudioMMapPolicyType;
+
static aaudio_policy_t g_MMapPolicy = AAUDIO_UNSPECIFIED;
aaudio_policy_t AudioGlobal_getMMapPolicy() {
@@ -132,6 +143,39 @@
return "Unrecognized";
}
+namespace {
+
+aaudio_policy_t getPlatformMMapPolicy(AudioMMapPolicyType policyType, AAudio_DeviceType device,
+ aaudio_direction_t direction) {
+ if (direction != AAUDIO_DIRECTION_INPUT && direction != AAUDIO_DIRECTION_OUTPUT) {
+ return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
+ }
+ const audio_devices_t deviceType = AAudioConvert_aaudioToAndroidDeviceType(device, direction);
+ if (deviceType == AUDIO_DEVICE_NONE) {
+ return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
+ }
+
+ AudioMMapPolicyInfo policyInfo;
+ if (android::status_t status = android::AudioSystem::getMmapPolicyForDevice(
+ policyType, deviceType, &policyInfo);
+ status != android::NO_ERROR) {
+ return AAudioConvert_androidToAAudioResult(status);
+ }
+ return AAudioConvert_androidToAAudioMMapPolicy(policyInfo.mmapPolicy);
+}
+
+} // namespace
+
+aaudio_policy_t AudioGlobal_getPlatformMMapPolicy(
+ AAudio_DeviceType device, aaudio_direction_t direction) {
+ return getPlatformMMapPolicy(AudioMMapPolicyType::DEFAULT, device, direction);
+}
+
+aaudio_policy_t AudioGlobal_getPlatformMMapExclusivePolicy(
+ AAudio_DeviceType device, aaudio_direction_t direction) {
+ return getPlatformMMapPolicy(AudioMMapPolicyType::EXCLUSIVE, device, direction);
+}
+
#undef AAUDIO_CASE_ENUM
} // namespace aaudio
diff --git a/media/libaaudio/src/core/AudioGlobal.h b/media/libaaudio/src/core/AudioGlobal.h
index 8af49b4..7ff344b 100644
--- a/media/libaaudio/src/core/AudioGlobal.h
+++ b/media/libaaudio/src/core/AudioGlobal.h
@@ -40,6 +40,11 @@
const char* AudioGlobal_convertSharingModeToText(aaudio_sharing_mode_t mode);
const char* AudioGlobal_convertStreamStateToText(aaudio_stream_state_t state);
+aaudio_policy_t AudioGlobal_getPlatformMMapPolicy(
+ AAudio_DeviceType device, aaudio_direction_t direction);
+aaudio_policy_t AudioGlobal_getPlatformMMapExclusivePolicy(
+ AAudio_DeviceType device, aaudio_direction_t direction);
+
} // namespace aaudio
#endif // AAUDIO_AUDIOGLOBAL_H
diff --git a/media/libaaudio/src/core/AudioStream.cpp b/media/libaaudio/src/core/AudioStream.cpp
index e0fd325..a75a2a1 100644
--- a/media/libaaudio/src/core/AudioStream.cpp
+++ b/media/libaaudio/src/core/AudioStream.cpp
@@ -93,6 +93,7 @@
if (mContentType == AAUDIO_UNSPECIFIED) {
mContentType = AAUDIO_CONTENT_TYPE_MUSIC;
}
+ mTags = builder.getTags();
mSpatializationBehavior = builder.getSpatializationBehavior();
// for consistency with other properties, note UNSPECIFIED is the same as AUTO
if (mSpatializationBehavior == AAUDIO_UNSPECIFIED) {
diff --git a/media/libaaudio/src/core/AudioStream.h b/media/libaaudio/src/core/AudioStream.h
index 49a63c4..3271882 100644
--- a/media/libaaudio/src/core/AudioStream.h
+++ b/media/libaaudio/src/core/AudioStream.h
@@ -290,6 +290,10 @@
return mContentType;
}
+ const std::optional<std::string> getTags() const {
+ return mTags;
+ }
+
aaudio_spatialization_behavior_t getSpatializationBehavior() const {
return mSpatializationBehavior;
}
@@ -687,6 +691,13 @@
mContentType = contentType;
}
+ /**
+ * This should not be called after the open() call.
+ */
+ void setTags(const std::optional<std::string> &tags) {
+ mTags = tags;
+ }
+
void setSpatializationBehavior(aaudio_spatialization_behavior_t spatializationBehavior) {
mSpatializationBehavior = spatializationBehavior;
}
@@ -776,6 +787,7 @@
aaudio_usage_t mUsage = AAUDIO_UNSPECIFIED;
aaudio_content_type_t mContentType = AAUDIO_UNSPECIFIED;
+ std::optional<std::string> mTags = {};
aaudio_spatialization_behavior_t mSpatializationBehavior = AAUDIO_UNSPECIFIED;
bool mIsContentSpatialized = false;
aaudio_input_preset_t mInputPreset = AAUDIO_UNSPECIFIED;
diff --git a/media/libaaudio/src/core/AudioStreamBuilder.cpp b/media/libaaudio/src/core/AudioStreamBuilder.cpp
index 01f0038..b0dc669 100644
--- a/media/libaaudio/src/core/AudioStreamBuilder.cpp
+++ b/media/libaaudio/src/core/AudioStreamBuilder.cpp
@@ -110,7 +110,7 @@
std::vector<AudioMMapPolicyInfo> policyInfos;
aaudio_policy_t mmapPolicy = AudioGlobal_getMMapPolicy();
ALOGD("%s, global mmap policy is %d", __func__, mmapPolicy);
- if (status_t status = android::AudioSystem::getMmapPolicyInfo(
+ if (status_t status = android::AudioSystem::getMmapPolicyInfos(
AudioMMapPolicyType::DEFAULT, &policyInfos); status == NO_ERROR) {
aaudio_policy_t systemMmapPolicy = AAudio_getAAudioPolicy(
policyInfos, AAUDIO_MMAP_POLICY_DEFAULT_AIDL);
@@ -143,7 +143,7 @@
policyInfos.clear();
aaudio_policy_t mmapExclusivePolicy = AAUDIO_UNSPECIFIED;
- if (status_t status = android::AudioSystem::getMmapPolicyInfo(
+ if (status_t status = android::AudioSystem::getMmapPolicyInfos(
AudioMMapPolicyType::EXCLUSIVE, &policyInfos); status == NO_ERROR) {
mmapExclusivePolicy = AAudio_getAAudioPolicy(
policyInfos, AAUDIO_MMAP_EXCLUSIVE_POLICY_DEFAULT_AIDL);
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.cpp b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
index d729047..16c0bcd 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
@@ -146,14 +146,14 @@
builder.isContentSpatialized(),
flags);
- const audio_attributes_t attributes = {
- .content_type = contentType,
- .usage = usage,
- .source = AUDIO_SOURCE_DEFAULT, // only used for recording
- .flags = attributesFlags,
- .tags = ""
- };
-
+ const std::optional<std::string> tags = builder.getTags();
+ audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
+ attributes.content_type = contentType;
+ attributes.usage = usage;
+ attributes.flags = attributesFlags;
+ if (tags.has_value() && !tags.value().empty()) {
+ strcpy(attributes.tags, tags.value().c_str());
+ }
mAudioTrack = new AudioTrack();
// TODO b/182392769: use attribution source util
mAudioTrack->set(
diff --git a/media/libaaudio/src/libaaudio.map.txt b/media/libaaudio/src/libaaudio.map.txt
index e28dcb4..13c19a1 100644
--- a/media/libaaudio/src/libaaudio.map.txt
+++ b/media/libaaudio/src/libaaudio.map.txt
@@ -70,6 +70,11 @@
AAudioStream_getHardwareChannelCount; # introduced=UpsideDownCake
AAudioStream_getHardwareFormat; # introduced=UpsideDownCake
AAudioStream_getHardwareSampleRate; # introduced=UpsideDownCake
+ AAudio_getPlatformMMapPolicy; # introduced=36
+ AAudio_getPlatformMMapExclusivePolicy; #introduced=36
+
+ AAudioStreamBuilder_setTags; # systemapi
+ AAudioStream_getTags; # systemapi
local:
*;
};
diff --git a/media/libaaudio/src/utility/AAudioUtilities.cpp b/media/libaaudio/src/utility/AAudioUtilities.cpp
index 3df23ee..c741946 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.cpp
+++ b/media/libaaudio/src/utility/AAudioUtilities.cpp
@@ -693,3 +693,128 @@
}
return aidl2legacy_aaudio_policy(policy);
}
+
+audio_devices_t AAudioConvert_aaudioToAndroidDeviceType(AAudio_DeviceType device,
+ aaudio_direction_t direction) {
+ if (direction == AAUDIO_DIRECTION_INPUT) {
+ switch (device) {
+ case AAUDIO_DEVICE_BUILTIN_MIC:
+ return AUDIO_DEVICE_IN_BUILTIN_MIC;
+ case AAUDIO_DEVICE_BLUETOOTH_SCO:
+ return AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET;
+ case AAUDIO_DEVICE_WIRED_HEADSET:
+ return AUDIO_DEVICE_IN_WIRED_HEADSET;
+ case AAUDIO_DEVICE_HDMI:
+ return AUDIO_DEVICE_IN_HDMI;
+ case AAUDIO_DEVICE_TELEPHONY:
+ return AUDIO_DEVICE_IN_TELEPHONY_RX;
+ case AAUDIO_DEVICE_DOCK:
+ return AUDIO_DEVICE_IN_DGTL_DOCK_HEADSET;
+ case AAUDIO_DEVICE_DOCK_ANALOG:
+ return AUDIO_DEVICE_IN_ANLG_DOCK_HEADSET;
+ case AAUDIO_DEVICE_USB_ACCESSORY:
+ return AUDIO_DEVICE_IN_USB_ACCESSORY;
+ case AAUDIO_DEVICE_USB_DEVICE:
+ return AUDIO_DEVICE_IN_USB_DEVICE;
+ case AAUDIO_DEVICE_USB_HEADSET:
+ return AUDIO_DEVICE_IN_USB_HEADSET;
+ case AAUDIO_DEVICE_FM_TUNER:
+ return AUDIO_DEVICE_IN_FM_TUNER;
+ case AAUDIO_DEVICE_TV_TUNER:
+ return AUDIO_DEVICE_IN_TV_TUNER;
+ case AAUDIO_DEVICE_LINE_ANALOG:
+ return AUDIO_DEVICE_IN_LINE;
+ case AAUDIO_DEVICE_LINE_DIGITAL:
+ return AUDIO_DEVICE_IN_SPDIF;
+ case AAUDIO_DEVICE_BLUETOOTH_A2DP:
+ return AUDIO_DEVICE_IN_BLUETOOTH_A2DP;
+ case AAUDIO_DEVICE_IP:
+ return AUDIO_DEVICE_IN_IP;
+ case AAUDIO_DEVICE_BUS:
+ return AUDIO_DEVICE_IN_BUS;
+ case AAUDIO_DEVICE_REMOTE_SUBMIX:
+ return AUDIO_DEVICE_IN_REMOTE_SUBMIX;
+ case AAUDIO_DEVICE_BLE_HEADSET:
+ return AUDIO_DEVICE_IN_BLE_HEADSET;
+ case AAUDIO_DEVICE_HDMI_ARC:
+ return AUDIO_DEVICE_IN_HDMI_ARC;
+ case AAUDIO_DEVICE_HDMI_EARC:
+ return AUDIO_DEVICE_IN_HDMI_EARC;
+ default:
+ break;
+ }
+ } else {
+ switch (device) {
+ case AAUDIO_DEVICE_BUILTIN_EARPIECE:
+ return AUDIO_DEVICE_OUT_EARPIECE;
+ case AAUDIO_DEVICE_BUILTIN_SPEAKER:
+ return AUDIO_DEVICE_OUT_SPEAKER;
+ case AAUDIO_DEVICE_WIRED_HEADSET:
+ return AUDIO_DEVICE_OUT_WIRED_HEADSET;
+ case AAUDIO_DEVICE_WIRED_HEADPHONES:
+ return AUDIO_DEVICE_OUT_WIRED_HEADPHONE;
+ case AAUDIO_DEVICE_LINE_ANALOG:
+ return AUDIO_DEVICE_OUT_LINE;
+ case AAUDIO_DEVICE_LINE_DIGITAL:
+ return AUDIO_DEVICE_OUT_SPDIF;
+ case AAUDIO_DEVICE_BLUETOOTH_SCO:
+ return AUDIO_DEVICE_OUT_BLUETOOTH_SCO;
+ case AAUDIO_DEVICE_BLUETOOTH_A2DP:
+ return AUDIO_DEVICE_OUT_BLUETOOTH_A2DP;
+ case AAUDIO_DEVICE_HDMI:
+ return AUDIO_DEVICE_OUT_HDMI;
+ case AAUDIO_DEVICE_HDMI_ARC:
+ return AUDIO_DEVICE_OUT_HDMI_ARC;
+ case AAUDIO_DEVICE_HDMI_EARC:
+ return AUDIO_DEVICE_OUT_HDMI_EARC;
+ case AAUDIO_DEVICE_USB_DEVICE:
+ return AUDIO_DEVICE_OUT_USB_DEVICE;
+ case AAUDIO_DEVICE_USB_HEADSET:
+ return AUDIO_DEVICE_OUT_USB_HEADSET;
+ case AAUDIO_DEVICE_USB_ACCESSORY:
+ return AUDIO_DEVICE_OUT_USB_ACCESSORY;
+ case AAUDIO_DEVICE_DOCK:
+ return AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET;
+ case AAUDIO_DEVICE_DOCK_ANALOG:
+ return AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET;
+ case AAUDIO_DEVICE_FM:
+ return AUDIO_DEVICE_OUT_FM;
+ case AAUDIO_DEVICE_TELEPHONY:
+ return AUDIO_DEVICE_OUT_TELEPHONY_TX;
+ case AAUDIO_DEVICE_AUX_LINE:
+ return AUDIO_DEVICE_OUT_AUX_LINE;
+ case AAUDIO_DEVICE_IP:
+ return AUDIO_DEVICE_OUT_IP;
+ case AAUDIO_DEVICE_BUS:
+ return AUDIO_DEVICE_OUT_BUS;
+ case AAUDIO_DEVICE_HEARING_AID:
+ return AUDIO_DEVICE_OUT_HEARING_AID;
+ case AAUDIO_DEVICE_BUILTIN_SPEAKER_SAFE:
+ return AUDIO_DEVICE_OUT_SPEAKER_SAFE;
+ case AAUDIO_DEVICE_REMOTE_SUBMIX:
+ return AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
+ case AAUDIO_DEVICE_BLE_HEADSET:
+ return AUDIO_DEVICE_OUT_BLE_HEADSET;
+ case AAUDIO_DEVICE_BLE_SPEAKER:
+ return AUDIO_DEVICE_OUT_BLE_SPEAKER;
+ case AAUDIO_DEVICE_BLE_BROADCAST:
+ return AUDIO_DEVICE_OUT_BLE_BROADCAST;
+ default:
+ break;
+ }
+ }
+ return AUDIO_DEVICE_NONE;
+}
+
+aaudio_policy_t AAudioConvert_androidToAAudioMMapPolicy(AudioMMapPolicy policy) {
+ switch (policy) {
+ case AudioMMapPolicy::AUTO:
+ return AAUDIO_POLICY_AUTO;
+ case AudioMMapPolicy::ALWAYS:
+ return AAUDIO_POLICY_ALWAYS;
+ case AudioMMapPolicy::NEVER:
+ case AudioMMapPolicy::UNSPECIFIED:
+ default:
+ return AAUDIO_POLICY_NEVER;
+ }
+}
diff --git a/media/libaaudio/src/utility/AAudioUtilities.h b/media/libaaudio/src/utility/AAudioUtilities.h
index 7c351e1..d5069f5 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.h
+++ b/media/libaaudio/src/utility/AAudioUtilities.h
@@ -363,4 +363,14 @@
android::media::audio::common::AudioMMapPolicy defaultPolicy =
android::media::audio::common::AudioMMapPolicy::NEVER);
+/**
+ * Convert the aaudio device type to android device type. Returns AUDIO_DEVICE_NONE if
+ * the given device is not a valid one.
+ */
+audio_devices_t AAudioConvert_aaudioToAndroidDeviceType(
+ AAudio_DeviceType device, aaudio_direction_t direction);
+
+aaudio_policy_t AAudioConvert_androidToAAudioMMapPolicy(
+ android::media::audio::common::AudioMMapPolicy policy);
+
#endif //UTILITY_AAUDIO_UTILITIES_H
diff --git a/media/libaaudio/tests/test_attributes.cpp b/media/libaaudio/tests/test_attributes.cpp
index e5676a7..045c236 100644
--- a/media/libaaudio/tests/test_attributes.cpp
+++ b/media/libaaudio/tests/test_attributes.cpp
@@ -26,6 +26,8 @@
#include <aaudio/AAudio.h>
#include <gtest/gtest.h>
+#include <system/audio.h>
+#include <system/aaudio/AAudio.h>
constexpr int64_t kNanosPerSecond = 1000000000;
constexpr int kNumFrames = 256;
@@ -36,6 +38,7 @@
static void checkAttributes(aaudio_performance_mode_t perfMode,
aaudio_usage_t usage,
aaudio_content_type_t contentType,
+ const char * tags = nullptr,
aaudio_input_preset_t preset = DONT_SET,
aaudio_allowed_capture_policy_t capturePolicy = DONT_SET,
int privacyMode = DONT_SET,
@@ -45,6 +48,7 @@
AAudioStreamBuilder *aaudioBuilder = nullptr;
AAudioStream *aaudioStream = nullptr;
+ aaudio_result_t expectedSetTagsResult = AAUDIO_OK;
// Use an AAudioStreamBuilder to contain requested parameters.
ASSERT_EQ(AAUDIO_OK, AAudio_createStreamBuilder(&aaudioBuilder));
@@ -60,6 +64,12 @@
if (contentType != DONT_SET) {
AAudioStreamBuilder_setContentType(aaudioBuilder, contentType);
}
+ if (tags != nullptr) {
+ aaudio_result_t result = AAudioStreamBuilder_setTags(aaudioBuilder, tags);
+ expectedSetTagsResult = (strlen(tags) >= AUDIO_ATTRIBUTES_TAGS_MAX_SIZE) ?
+ AAUDIO_ERROR_ILLEGAL_ARGUMENT : AAUDIO_OK;
+ EXPECT_EQ(result, expectedSetTagsResult);
+ }
if (preset != DONT_SET) {
AAudioStreamBuilder_setInputPreset(aaudioBuilder, preset);
}
@@ -87,6 +97,20 @@
: contentType;
EXPECT_EQ(expectedContentType, AAudioStream_getContentType(aaudioStream));
+ char readTags[AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE] = {};
+ EXPECT_EQ(AAUDIO_OK, AAudioStream_getTags(aaudioStream, readTags))
+ << "Expected tags=" << (tags != nullptr ? tags : "null") << ", got tags=" << readTags;;
+ EXPECT_LT(strlen(readTags), AUDIO_ATTRIBUTES_TAGS_MAX_SIZE)
+ << "expected tags len " << strlen(readTags) << " less than "
+ << AUDIO_ATTRIBUTES_TAGS_MAX_SIZE;
+
+ // Null tags or failed to set, empty tags expected (default initializer)
+ const char * expectedTags = tags == nullptr ?
+ "" : (expectedSetTagsResult != AAUDIO_OK ? "" : tags);
+ // Oversized tags will be discarded
+ EXPECT_TRUE(std::strcmp(expectedTags, readTags) == 0)
+ << "Expected tags=" << expectedTags << ", got tags=" << readTags;
+
aaudio_input_preset_t expectedPreset =
(preset == DONT_SET || preset == AAUDIO_UNSPECIFIED)
? AAUDIO_INPUT_PRESET_VOICE_RECOGNITION // default
@@ -139,6 +163,21 @@
// Note that the AAUDIO_SYSTEM_USAGE_* values requires special permission.
};
+static const std::string oversizedTags2 = std::string(AUDIO_ATTRIBUTES_TAGS_MAX_SIZE + 1, 'A');
+static const std::string oversizedTags = std::string(AUDIO_ATTRIBUTES_TAGS_MAX_SIZE, 'B');
+static const std::string maxSizeTags = std::string(AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1, 'C');
+
+static const char * const sTags[] = {
+ nullptr,
+ "",
+ "oem=routing_extension",
+ "VX_OEM_ROUTING_EXTENSION",
+ maxSizeTags.c_str(),
+ // intentionnaly use oversized tags
+ oversizedTags.c_str(),
+ oversizedTags2.c_str()
+};
+
static const aaudio_content_type_t sContentypes[] = {
DONT_SET,
AAUDIO_UNSPECIFIED,
@@ -185,11 +224,18 @@
}
}
+static void checkAttributesTags(aaudio_performance_mode_t perfMode) {
+ for (const char * const tags : sTags) {
+ checkAttributes(perfMode, DONT_SET, DONT_SET, tags);
+ }
+}
+
static void checkAttributesInputPreset(aaudio_performance_mode_t perfMode) {
for (aaudio_input_preset_t inputPreset : sInputPresets) {
checkAttributes(perfMode,
DONT_SET,
DONT_SET,
+ nullptr,
inputPreset,
DONT_SET,
DONT_SET,
@@ -202,6 +248,7 @@
checkAttributes(perfMode,
DONT_SET,
DONT_SET,
+ nullptr,
DONT_SET,
policy,
AAUDIO_DIRECTION_INPUT);
@@ -213,6 +260,7 @@
checkAttributes(perfMode,
DONT_SET,
DONT_SET,
+ nullptr,
DONT_SET,
DONT_SET,
privacyMode,
@@ -228,6 +276,10 @@
checkAttributesContentType(AAUDIO_PERFORMANCE_MODE_NONE);
}
+TEST(test_attributes, aaudio_tags_perfnone) {
+ checkAttributesTags(AAUDIO_PERFORMANCE_MODE_NONE);
+}
+
TEST(test_attributes, aaudio_input_preset_perfnone) {
checkAttributesInputPreset(AAUDIO_PERFORMANCE_MODE_NONE);
}
@@ -244,6 +296,10 @@
checkAttributesContentType(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
}
+TEST(test_attributes, aaudio_tags_lowlat) {
+ checkAttributesTags(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
+}
+
TEST(test_attributes, aaudio_input_preset_lowlat) {
checkAttributesInputPreset(AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
}
diff --git a/media/libaaudio/tests/test_mmap_path.cpp b/media/libaaudio/tests/test_mmap_path.cpp
index c8376f6..6ad694f 100644
--- a/media/libaaudio/tests/test_mmap_path.cpp
+++ b/media/libaaudio/tests/test_mmap_path.cpp
@@ -40,7 +40,7 @@
*/
static void openStreamAndVerify(aaudio_direction_t direction) {
std::vector<AudioMMapPolicyInfo> policyInfos;
- ASSERT_EQ(android::NO_ERROR, android::AudioSystem::getMmapPolicyInfo(
+ ASSERT_EQ(android::NO_ERROR, android::AudioSystem::getMmapPolicyInfos(
AudioMMapPolicyType::DEFAULT, &policyInfos));
if (AAudio_getAAudioPolicy(policyInfos) == AAUDIO_POLICY_NEVER) {
// Query the system MMAP policy, if it is NEVER, it indicates there is no MMAP support.
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index b193950..1e6be68 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -1577,11 +1577,6 @@
const int INITIAL_RETRIES = 3;
int retries = INITIAL_RETRIES;
retry:
- if (retries < INITIAL_RETRIES) {
- // refresh the audio configuration cache in this process to make sure we get new
- // input parameters and new IAudioRecord in createRecord_l()
- AudioSystem::clearAudioConfigCache();
- }
mFlags = mOrigFlags;
// if the new IAudioRecord is created, createRecord_l() will modify the
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index 769475c..483a1ef 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -26,12 +26,14 @@
#include <binder/IServiceManager.h>
#include <binder/ProcessState.h>
#include <binder/IPCThreadState.h>
+#include <cutils/properties.h>
#include <media/AidlConversion.h>
#include <media/AudioResamplerPublic.h>
#include <media/AudioSystem.h>
#include <media/IAudioFlinger.h>
#include <media/PolicyAidlConversion.h>
#include <media/TypeConverter.h>
+#include <mediautils/ServiceSingleton.h>
#include <math.h>
#include <system/audio.h>
@@ -80,172 +82,215 @@
std::mutex AudioSystem::gSoundTriggerMutex;
sp<CaptureStateListenerImpl> AudioSystem::gSoundTriggerCaptureStateListener;
-// Sets the Binder for the AudioFlinger service, passed to this client process
-// from the system server.
-// This allows specific isolated processes to access the audio system. Currently used only for the
-// HotwordDetectionService.
-template <typename ServiceInterface, typename Client, typename AidlInterface,
- typename ServiceTraits>
-class ServiceHandler {
+// ----------------------------
+
+// AudioSystem is the client side interface to AudioFlinger (AF) and AudioPolicy (AP).
+//
+// For clients:
+// We use the ServiceSingleton class in mediautils to fetch the AF/AP service.
+// The ServiceSingleton offers service prefetch, automatic
+// new service notification, automatic binder death notification.
+//
+// AudioFlingerServiceTraits and AudioPolicyServiceTraits are passed into
+// ServiceSingleton to provide interaction with the service notifications and
+// binder death notifications.
+//
+// If the AF/AP service is unavailable for kServiceWaitMs from ServiceManager,
+// ServiceSingleton will return a nullptr service handle resulting in the same dead object error
+// as if the service died (which it did, otherwise we'd be returning the cached handle).
+//
+// Potential deadlock sequence:
+// 1) audioserver reboots.
+// 2) App clients call into AudioService (system server) obtaining binder threads,
+// these calls blocking for audioserver reboot completion (or waiting for a mutex
+// held by those blocked threads).
+// 3) AudioFlinger and AudioPolicyManager services need to call into system server
+// during initialization. It can't because app clients hold all the binder threads
+// in the threadpool.
+// 4) We have a resource deadlock between (2) and (3) potentially causing an ANR and
+// further reinitialization.
+// 5) However, after the service wait timeout kServiceWaitNs, the calls for (2) will
+// return an error and resolve itself, breaking the resource deadlock in (4).
+//
+// At this time, it is a matter of experimentation whether the service timeout is
+// applied only for system server, and we let other clients block indefinitely.
+//
+// For audio services:
+// AudioFlinger and AudioPolicy may call back into AudioSystem. When doing
+// so it should not hold any mutexes. There is no service wait as AudioFlinger
+// and AudioPolicy are in-process with each other, and the call proceeds without
+// binder. The setLocalService() method is used to set the service interfaces
+// within audioserver to bypass the ServiceManager lookup.
+//
+
+// Wait timeout for AudioFlinger or AudioPolicy service before returning with null.
+// Such an audioserver failure is considered benign as the ground truth is stored in
+// the Java AudioService and can be restored once audioserver has finished initialization.
+//
+// TODO(b/375691003) We use 10s as a conservative timeout value, and will tune closer to 3s.
+// Too small a value (i.e. less than 1s would churn repeated calls to get the service).
+static constexpr int32_t kServiceWaitMs = 10'000;
+
+static constexpr const char kServiceWaitProperty[] = "audio.service.wait_ms";
+
+// AudioFlingerServiceTraits is a collection of methods that parameterize the
+// ServiceSingleton handler for IAudioFlinger
+
+class AudioFlingerServiceTraits {
public:
- sp<ServiceInterface> getService()
- EXCLUDES(mMutex) NO_THREAD_SAFETY_ANALYSIS { // std::unique_ptr
- sp<ServiceInterface> service;
- sp<Client> client;
+ // ------- required by ServiceSingleton
- bool reportNoError = false;
+ static constexpr const char* getServiceName() { return "media.audio_flinger"; }
+
+ static void onNewService(const sp<media::IAudioFlingerService>& afs) {
+ onNewServiceWithAdapter(createServiceAdapter(afs));
+ }
+
+ static void onServiceDied(const sp<media::IAudioFlingerService>&) {
+ ALOGW("%s: %s service died", __func__, getServiceName());
{
- std::lock_guard _l(mMutex);
- if (mService != nullptr) {
- return mService;
- }
+ std::lock_guard l(mMutex);
+ mValid = false;
+ mClient->clearIoCache();
}
+ AudioSystem::reportError(DEAD_OBJECT);
+ }
- std::unique_lock ul_only1thread(mSingleGetter);
- std::unique_lock ul(mMutex);
- if (mService != nullptr) {
- return mService;
- }
- if (mClient == nullptr) {
- mClient = sp<Client>::make();
- } else {
- reportNoError = true;
- }
- while (true) {
- mService = mLocalService;
- if (mService != nullptr) break;
+ static constexpr mediautils::ServiceOptions options() {
+ return mediautils::ServiceOptions::kNone;
+ }
- sp<IBinder> binder = mBinder;
- if (binder == nullptr) {
- sp <IServiceManager> sm = defaultServiceManager();
- binder = sm->checkService(String16(ServiceTraits::SERVICE_NAME));
- if (binder == nullptr) {
- ALOGD("%s: waiting for %s", __func__, ServiceTraits::SERVICE_NAME);
+ // ------- required by AudioSystem
- // if the condition variable is present, setLocalService() and
- // setBinder() is allowed to use it to notify us.
- if (mCvGetter == nullptr) {
- mCvGetter = std::make_shared<std::condition_variable>();
- }
- mCvGetter->wait_for(ul, std::chrono::seconds(1));
- continue;
- }
+ static sp<IAudioFlinger> getService(
+ std::chrono::milliseconds waitMs = std::chrono::milliseconds{-1}) {
+ static bool init = false;
+ audio_utils::unique_lock ul(mMutex);
+ if (!init) {
+ if (!mDisableThreadPoolStart) {
+ ProcessState::self()->startThreadPool();
}
- binder->linkToDeath(mClient);
- auto aidlInterface = interface_cast<AidlInterface>(binder);
- LOG_ALWAYS_FATAL_IF(aidlInterface == nullptr);
- if constexpr (std::is_same_v<ServiceInterface, AidlInterface>) {
- mService = std::move(aidlInterface);
- } else /* constexpr */ {
- mService = ServiceTraits::createServiceAdapter(aidlInterface);
- }
- break;
+ mediautils::initService<media::IAudioFlingerService, AudioFlingerServiceTraits>();
+ mWaitMs = std::chrono::milliseconds(
+ property_get_int32(kServiceWaitProperty, kServiceWaitMs));
+ init = true;
}
- if (mCvGetter) mCvGetter.reset(); // remove condition variable.
- client = mClient;
- service = mService;
- // Make sure callbacks can be received by the client
- if (mCanStartThreadPool) {
- ProcessState::self()->startThreadPool();
- }
+ if (mValid) return mService;
+ if (waitMs.count() < 0) waitMs = mWaitMs;
ul.unlock();
- ul_only1thread.unlock();
- ServiceTraits::onServiceCreate(service, client);
- if (reportNoError) AudioSystem::reportError(NO_ERROR);
- return service;
+
+ // mediautils::getService() installs a persistent new service notification.
+ auto service = mediautils::getService<
+ media::IAudioFlingerService>(waitMs);
+ ALOGD("%s: checking for service %s: %p", __func__, getServiceName(), service.get());
+
+ ul.lock();
+ // return the IAudioFlinger interface which is adapted
+ // from the media::IAudioFlingerService.
+ return mService;
}
- status_t setLocalService(const sp<ServiceInterface>& service) EXCLUDES(mMutex) {
- std::lock_guard _l(mMutex);
- // we allow clearing once set, but not a double non-null set.
- if (mService != nullptr && service != nullptr) return INVALID_OPERATION;
- mLocalService = service;
- if (mCvGetter) mCvGetter->notify_one();
- return OK;
- }
+ static sp<AudioSystem::AudioFlingerClient> getClient() {
+ audio_utils::unique_lock ul(mMutex);
+ if (mValid) return mClient;
+ ul.unlock();
- sp<Client> getClient() EXCLUDES(mMutex) {
- const auto service = getService();
- if (service == nullptr) return nullptr;
- std::lock_guard _l(mMutex);
+ auto service = getService();
+ ALOGD("%s: checking for service: %p", __func__, service.get());
+
+ ul.lock();
return mClient;
}
- void setBinder(const sp<IBinder>& binder) EXCLUDES(mMutex) {
- std::lock_guard _l(mMutex);
- if (mService != nullptr) {
- ALOGW("%s: ignoring; %s connection already established.",
- __func__, ServiceTraits::SERVICE_NAME);
- return;
+ static void setBinder(const sp<IBinder>& binder) {
+ setLocalService(createServiceAdapter(
+ mediautils::interfaceFromBinder<media::IAudioFlingerService>(binder)));
+ }
+
+ static status_t setLocalService(const sp<IAudioFlinger>& af) {
+ mediautils::skipService<media::IAudioFlingerService>();
+ sp<IAudioFlinger> old;
+ {
+ std::lock_guard l(mMutex);
+ old = mService;
+ mService = af;
}
- mBinder = binder;
- if (mCvGetter) mCvGetter->notify_one();
+ if (old) onServiceDied({});
+ if (af) onNewServiceWithAdapter(af);
+ return OK;
}
- void clearService() EXCLUDES(mMutex) {
- std::lock_guard _l(mMutex);
- mService.clear();
- if (mClient) ServiceTraits::onClearService(mClient);
+ static void disableThreadPoolStart() {
+ mDisableThreadPoolStart = true;
}
- void disableThreadPool() {
- mCanStartThreadPool = false;
+ static bool isValid() {
+ audio_utils::unique_lock ul(mMutex);
+ if (mValid) return true;
+ ul.unlock();
+ (void)getService({});
+ ul.lock();
+ return mValid;
+ }
+
+ // called to determine error on nullptr service return.
+ static constexpr status_t getError() {
+ return DEAD_OBJECT;
}
private:
- std::mutex mSingleGetter;
- std::mutex mMutex;
- std::shared_ptr<std::condition_variable> mCvGetter GUARDED_BY(mMutex);
- sp<IBinder> mBinder GUARDED_BY(mMutex);
- sp<ServiceInterface> mLocalService GUARDED_BY(mMutex);
- sp<ServiceInterface> mService GUARDED_BY(mMutex);
- sp<Client> mClient GUARDED_BY(mMutex);
- std::atomic<bool> mCanStartThreadPool = true;
-};
-struct AudioFlingerTraits {
- static void onServiceCreate(
- const sp<IAudioFlinger>& af, const sp<AudioSystem::AudioFlingerClient>& afc) {
+ static void onNewServiceWithAdapter(const sp<IAudioFlinger>& service) {
+ ALOGD("%s: %s service obtained %p", __func__, getServiceName(), service.get());
+ sp<AudioSystem::AudioFlingerClient> client;
+ bool reportNoError = false;
+ {
+ std::lock_guard l(mMutex);
+ if (mClient == nullptr) {
+ mClient = sp<AudioSystem::AudioFlingerClient>::make();
+ } else {
+ mClient->clearIoCache();
+ reportNoError = true;
+ }
+ mService = service;
+ client = mClient;
+ mValid = true;
+ }
+ // TODO(b/375280520) consider registerClient() within mMutex lock.
const int64_t token = IPCThreadState::self()->clearCallingIdentity();
- af->registerClient(afc);
+ service->registerClient(client);
IPCThreadState::self()->restoreCallingIdentity(token);
+
+ if (reportNoError) AudioSystem::reportError(NO_ERROR);
}
static sp<IAudioFlinger> createServiceAdapter(
- const sp<media::IAudioFlingerService>& aidlInterface) {
- return sp<AudioFlingerClientAdapter>::make(aidlInterface);
+ const sp<media::IAudioFlingerService>& af) {
+ return sp<AudioFlingerClientAdapter>::make(af);
}
- static void onClearService(const sp<AudioSystem::AudioFlingerClient>& afc) {
- afc->clearIoCache();
- }
-
- static constexpr const char* SERVICE_NAME = IAudioFlinger::DEFAULT_SERVICE_NAME;
+ static inline constinit std::mutex mMutex;
+ static inline constinit sp<AudioSystem::AudioFlingerClient> mClient GUARDED_BY(mMutex);
+ static inline constinit sp<IAudioFlinger> mService GUARDED_BY(mMutex);
+ static inline constinit std::chrono::milliseconds mWaitMs GUARDED_BY(mMutex) {kServiceWaitMs};
+ static inline constinit bool mValid GUARDED_BY(mMutex) = false;
+ static inline constinit std::atomic_bool mDisableThreadPoolStart = false;
};
-[[clang::no_destroy]] static constinit ServiceHandler<IAudioFlinger,
- AudioSystem::AudioFlingerClient, media::IAudioFlingerService,
- AudioFlingerTraits> gAudioFlingerServiceHandler;
-
sp<IAudioFlinger> AudioSystem::get_audio_flinger() {
- return gAudioFlingerServiceHandler.getService();
+ return AudioFlingerServiceTraits::getService();
}
sp<AudioSystem::AudioFlingerClient> AudioSystem::getAudioFlingerClient() {
- return gAudioFlingerServiceHandler.getClient();
+ return AudioFlingerServiceTraits::getClient();
}
void AudioSystem::setAudioFlingerBinder(const sp<IBinder>& audioFlinger) {
- if (audioFlinger->getInterfaceDescriptor() != media::IAudioFlingerService::descriptor) {
- ALOGE("%s: received a binder of type %s",
- __func__, String8(audioFlinger->getInterfaceDescriptor()).c_str());
- return;
- }
- gAudioFlingerServiceHandler.setBinder(audioFlinger);
+ AudioFlingerServiceTraits::setBinder(audioFlinger);
}
status_t AudioSystem::setLocalAudioFlinger(const sp<IAudioFlinger>& af) {
- return gAudioFlingerServiceHandler.setLocalService(af);
+ return AudioFlingerServiceTraits::setLocalService(af);
}
sp<AudioIoDescriptor> AudioSystem::getIoDescriptor(audio_io_handle_t ioHandle) {
@@ -258,9 +303,7 @@
}
/* static */ status_t AudioSystem::checkAudioFlinger() {
- if (defaultServiceManager()->checkService(String16("media.audio_flinger")) != 0) {
- return NO_ERROR;
- }
+ if (AudioFlingerServiceTraits::isValid()) return OK;
return DEAD_OBJECT;
}
@@ -268,41 +311,41 @@
status_t AudioSystem::muteMicrophone(bool state) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->setMicMute(state);
}
status_t AudioSystem::isMicrophoneMuted(bool* state) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
*state = af->getMicMute();
return NO_ERROR;
}
status_t AudioSystem::setMasterVolume(float value) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
af->setMasterVolume(value);
return NO_ERROR;
}
status_t AudioSystem::setMasterMute(bool mute) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
af->setMasterMute(mute);
return NO_ERROR;
}
status_t AudioSystem::getMasterVolume(float* volume) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
*volume = af->masterVolume();
return NO_ERROR;
}
status_t AudioSystem::getMasterMute(bool* mute) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
*mute = af->masterMute();
return NO_ERROR;
}
@@ -311,7 +354,7 @@
bool muted, audio_io_handle_t output) {
if (uint32_t(stream) >= AUDIO_STREAM_CNT) return BAD_VALUE;
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
af->setStreamVolume(stream, value, muted, output);
return NO_ERROR;
}
@@ -319,7 +362,7 @@
status_t AudioSystem::setStreamMute(audio_stream_type_t stream, bool mute) {
if (uint32_t(stream) >= AUDIO_STREAM_CNT) return BAD_VALUE;
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
af->setStreamMute(stream, mute);
return NO_ERROR;
}
@@ -328,7 +371,7 @@
const std::vector<audio_port_handle_t>& portIds, float volume, bool muted,
audio_io_handle_t output) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
std::vector<int32_t> portIdsAidl = VALUE_OR_RETURN_STATUS(
convertContainer<std::vector<int32_t>>(
portIds, legacy2aidl_audio_port_handle_t_int32_t));
@@ -340,26 +383,26 @@
status_t AudioSystem::setMode(audio_mode_t mode) {
if (uint32_t(mode) >= AUDIO_MODE_CNT) return BAD_VALUE;
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->setMode(mode);
}
status_t AudioSystem::setSimulateDeviceConnections(bool enabled) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->setSimulateDeviceConnections(enabled);
}
status_t AudioSystem::setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->setParameters(ioHandle, keyValuePairs);
}
String8 AudioSystem::getParameters(audio_io_handle_t ioHandle, const String8& keys) {
const sp<IAudioFlinger> af = get_audio_flinger();
String8 result = String8("");
- if (af == 0) return result;
+ if (af == nullptr) return result;
result = af->getParameters(ioHandle, keys);
return result;
@@ -438,7 +481,7 @@
status_t AudioSystem::getSamplingRate(audio_io_handle_t ioHandle,
uint32_t* samplingRate) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
sp<AudioIoDescriptor> desc = getIoDescriptor(ioHandle);
if (desc == 0) {
*samplingRate = af->sampleRate(ioHandle);
@@ -473,7 +516,7 @@
status_t AudioSystem::getFrameCount(audio_io_handle_t ioHandle,
size_t* frameCount) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
sp<AudioIoDescriptor> desc = getIoDescriptor(ioHandle);
if (desc == 0) {
*frameCount = af->frameCount(ioHandle);
@@ -508,7 +551,7 @@
status_t AudioSystem::getLatency(audio_io_handle_t output,
uint32_t* latency) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
sp<AudioIoDescriptor> outputDesc = getIoDescriptor(output);
if (outputDesc == 0) {
*latency = af->latency(output);
@@ -532,14 +575,14 @@
status_t AudioSystem::setVoiceVolume(float value) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->setVoiceVolume(value);
}
status_t AudioSystem::getRenderPosition(audio_io_handle_t output, uint32_t* halFrames,
uint32_t* dspFrames) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->getRenderPosition(halFrames, dspFrames, output);
}
@@ -547,7 +590,7 @@
uint32_t AudioSystem::getInputFramesLost(audio_io_handle_t ioHandle) {
const sp<IAudioFlinger> af = get_audio_flinger();
uint32_t result = 0;
- if (af == 0) return result;
+ if (af == nullptr) return result;
if (ioHandle == AUDIO_IO_HANDLE_NONE) return result;
result = af->getInputFramesLost(ioHandle);
@@ -557,7 +600,7 @@
audio_unique_id_t AudioSystem::newAudioUniqueId(audio_unique_id_use_t use) {
// Must not use AF as IDs will re-roll on audioserver restart, b/130369529.
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return AUDIO_UNIQUE_ID_ALLOCATE;
+ if (af == nullptr) return AUDIO_UNIQUE_ID_ALLOCATE;
return af->newAudioUniqueId(use);
}
@@ -577,26 +620,26 @@
audio_hw_sync_t AudioSystem::getAudioHwSyncForSession(audio_session_t sessionId) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return AUDIO_HW_SYNC_INVALID;
+ if (af == nullptr) return AUDIO_HW_SYNC_INVALID;
return af->getAudioHwSyncForSession(sessionId);
}
status_t AudioSystem::systemReady() {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return NO_INIT;
+ if (af == nullptr) return NO_INIT;
return af->systemReady();
}
status_t AudioSystem::audioPolicyReady() {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return NO_INIT;
+ if (af == nullptr) return NO_INIT;
return af->audioPolicyReady();
}
status_t AudioSystem::getFrameCountHAL(audio_io_handle_t ioHandle,
size_t* frameCount) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
sp<AudioIoDescriptor> desc = getIoDescriptor(ioHandle);
if (desc == 0) {
*frameCount = af->frameCountHAL(ioHandle);
@@ -625,13 +668,6 @@
mInChannelMask = AUDIO_CHANNEL_NONE;
}
-void AudioSystem::AudioFlingerClient::binderDied(const wp<IBinder>& who __unused) {
- gAudioFlingerServiceHandler.clearService();
- reportError(DEAD_OBJECT);
-
- ALOGW("AudioFlinger server died!");
-}
-
Status AudioSystem::AudioFlingerClient::ioConfigChanged(
media::AudioIoConfigEvent _event,
const media::AudioIoDescriptor& _ioDesc) {
@@ -788,9 +824,7 @@
uint32_t sampleRate, audio_format_t format,
audio_channel_mask_t channelMask, size_t* buffSize) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) {
- return PERMISSION_DENIED;
- }
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
std::lock_guard _l(mMutex);
// Do we have a stale mInBuffSize or are we requesting the input buffer size for new values
if ((mInBuffSize == 0) || (sampleRate != mInSamplingRate) || (format != mInFormat)
@@ -925,47 +959,145 @@
gVolRangeInitReqCallback = cb;
}
-struct AudioPolicyTraits {
- static void onServiceCreate(const sp<IAudioPolicyService>& ap,
- const sp<AudioSystem::AudioPolicyServiceClient>& apc) {
+
+// AudioPolicyServiceTraits is a collection of methods that parameterize the
+// ServiceSingleton class implementation of IAudioPolicyService.
+
+class AudioPolicyServiceTraits {
+public:
+ // ------- methods required by ServiceSingleton
+
+ static constexpr const char* getServiceName() { return "media.audio_policy"; }
+
+ static void onNewService(const sp<IAudioPolicyService>& aps) {
+ ALOGD("%s: %s service obtained %p", __func__, getServiceName(), aps.get());
+ sp<AudioSystem::AudioPolicyServiceClient> client;
+ {
+ std::lock_guard l(mMutex);
+ if (mClient == nullptr) {
+ mClient = sp<AudioSystem::AudioPolicyServiceClient>::make();
+ }
+ client = mClient;
+ mService = aps;
+ mValid = true;
+ }
+ // TODO(b/375280520) consider registerClient() within mMutex lock.
const int64_t token = IPCThreadState::self()->clearCallingIdentity();
- ap->registerClient(apc);
- ap->setAudioPortCallbacksEnabled(apc->isAudioPortCbEnabled());
- ap->setAudioVolumeGroupCallbacksEnabled(apc->isAudioVolumeGroupCbEnabled());
+ aps->registerClient(client);
IPCThreadState::self()->restoreCallingIdentity(token);
}
- static void onClearService(const sp<AudioSystem::AudioPolicyServiceClient>&) {}
+ static void onServiceDied(const sp<IAudioPolicyService>&) {
+ ALOGW("%s: %s service died", __func__, getServiceName());
+ sp<AudioSystem::AudioPolicyServiceClient> client;
+ {
+ std::lock_guard l(mMutex);
+ mValid = false;
+ client = mClient;
+ }
+ client->onServiceDied();
+ }
- static constexpr const char *SERVICE_NAME = "media.audio_policy";
+ static constexpr mediautils::ServiceOptions options() {
+ return mediautils::ServiceOptions::kNone;
+ }
+
+ // ------- methods required by AudioSystem
+
+ static sp<IAudioPolicyService> getService(
+ std::chrono::milliseconds waitMs = std::chrono::milliseconds{-1}) {
+ static bool init = false;
+ audio_utils::unique_lock ul(mMutex);
+ if (!init) {
+ if (!mDisableThreadPoolStart) {
+ ProcessState::self()->startThreadPool();
+ }
+ mediautils::initService<IAudioPolicyService, AudioPolicyServiceTraits>();
+ mWaitMs = std::chrono::milliseconds(
+ property_get_int32(kServiceWaitProperty, kServiceWaitMs));
+ init = true;
+ }
+ if (mValid) return mService;
+ if (waitMs.count() < 0) waitMs = mWaitMs;
+ ul.unlock();
+
+ auto service = mediautils::getService<
+ media::IAudioPolicyService>(waitMs);
+ ALOGD("%s: checking for service %s: %p", __func__, getServiceName(), service.get());
+
+ // mediautils::getService() will return early if setLocalService() is called
+ // (whereupon mService contained the actual local service pointer to use).
+ // we should always return mService.
+ ul.lock();
+ return mService;
+ }
+
+ static sp<AudioSystem::AudioPolicyServiceClient> getClient() {
+ audio_utils::unique_lock ul(mMutex);
+ if (mValid) return mClient;
+ ul.unlock();
+
+ auto service = getService();
+ ALOGD("%s: checking for service: %p", __func__, service.get());
+
+ ul.lock();
+ return mClient;
+ }
+
+ static status_t setLocalService(const sp<IAudioPolicyService>& aps) {
+ mediautils::skipService<IAudioPolicyService>();
+ sp<IAudioPolicyService> old;
+ {
+ std::lock_guard l(mMutex);
+ old = mService;
+ mService = aps;
+ }
+ if (old) onServiceDied(old);
+ if (aps) onNewService(aps);
+ return OK;
+ }
+
+ static void disableThreadPoolStart() {
+ mDisableThreadPoolStart = true;
+ }
+
+ // called to determine error on nullptr service return.
+ static constexpr status_t getError() {
+ return DEAD_OBJECT;
+ }
+private:
+
+ static inline constinit std::mutex mMutex;
+ static inline constinit sp<AudioSystem::AudioPolicyServiceClient> mClient GUARDED_BY(mMutex);
+ static inline constinit sp<IAudioPolicyService> mService GUARDED_BY(mMutex);
+ static inline constinit bool mValid GUARDED_BY(mMutex) = false;
+ static inline constinit std::chrono::milliseconds mWaitMs GUARDED_BY(mMutex) {kServiceWaitMs};
+ static inline constinit std::atomic_bool mDisableThreadPoolStart = false;
};
-[[clang::no_destroy]] static constinit ServiceHandler<IAudioPolicyService,
- AudioSystem::AudioPolicyServiceClient, IAudioPolicyService,
- AudioPolicyTraits> gAudioPolicyServiceHandler;
-
-status_t AudioSystem::setLocalAudioPolicyService(const sp<IAudioPolicyService>& aps) {
- return gAudioPolicyServiceHandler.setLocalService(aps);
-}
sp<IAudioPolicyService> AudioSystem::get_audio_policy_service() {
- return gAudioPolicyServiceHandler.getService();
+ return AudioPolicyServiceTraits::getService();
}
-void AudioSystem::clearAudioPolicyService() {
- gAudioPolicyServiceHandler.clearService();
+status_t AudioSystem::setLocalAudioPolicyService(const sp<IAudioPolicyService>& aps) {
+ return AudioPolicyServiceTraits::setLocalService(aps);
+}
+
+sp<AudioSystem::AudioPolicyServiceClient> AudioSystem::getAudioPolicyClient() {
+ return AudioPolicyServiceTraits::getClient();
}
void AudioSystem::disableThreadPool() {
- gAudioFlingerServiceHandler.disableThreadPool();
- gAudioPolicyServiceHandler.disableThreadPool();
+ AudioFlingerServiceTraits::disableThreadPoolStart();
+ AudioPolicyServiceTraits::disableThreadPoolStart();
}
// ---------------------------------------------------------------------------
void AudioSystem::onNewAudioModulesAvailable() {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return;
+ if (aps == nullptr) return;
aps->onNewAudioModulesAvailable();
}
@@ -974,7 +1106,7 @@
audio_format_t encodedFormat) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
return statusTFromBinderStatus(
aps->setDeviceConnectionState(
@@ -988,7 +1120,7 @@
audio_policy_dev_state_t AudioSystem::getDeviceConnectionState(audio_devices_t device,
const char* device_address) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE;
+ if (aps == nullptr) return AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE;
auto result = [&]() -> ConversionResult<audio_policy_dev_state_t> {
AudioDevice deviceAidl = VALUE_OR_RETURN(
@@ -1011,7 +1143,7 @@
const char* address = "";
const char* name = "";
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
if (device_address != NULL) {
address = device_address;
@@ -1031,7 +1163,7 @@
status_t AudioSystem::setPhoneState(audio_mode_t state, uid_t uid) {
if (uint32_t(state) >= AUDIO_MODE_CNT) return BAD_VALUE;
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
return statusTFromBinderStatus(aps->setPhoneState(
VALUE_OR_RETURN_STATUS(legacy2aidl_audio_mode_t_AudioMode(state)),
@@ -1041,7 +1173,7 @@
status_t
AudioSystem::setForceUse(audio_policy_force_use_t usage, audio_policy_forced_cfg_t config) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
return statusTFromBinderStatus(
aps->setForceUse(
@@ -1054,7 +1186,7 @@
audio_policy_forced_cfg_t AudioSystem::getForceUse(audio_policy_force_use_t usage) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return AUDIO_POLICY_FORCE_NONE;
+ if (aps == nullptr) return AUDIO_POLICY_FORCE_NONE;
auto result = [&]() -> ConversionResult<audio_policy_forced_cfg_t> {
AudioPolicyForceUse usageAidl = VALUE_OR_RETURN(
@@ -1071,7 +1203,7 @@
audio_io_handle_t AudioSystem::getOutput(audio_stream_type_t stream) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return AUDIO_IO_HANDLE_NONE;
+ if (aps == nullptr) return AUDIO_IO_HANDLE_NONE;
auto result = [&]() -> ConversionResult<audio_io_handle_t> {
AudioStreamType streamAidl = VALUE_OR_RETURN(
@@ -1121,7 +1253,7 @@
}
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return NO_INIT;
+ if (aps == nullptr) return NO_INIT;
media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_attributes_t_AudioAttributes(*attr));
@@ -1172,7 +1304,7 @@
status_t AudioSystem::startOutput(audio_port_handle_t portId) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
return statusTFromBinderStatus(aps->startOutput(portIdAidl));
@@ -1180,7 +1312,7 @@
status_t AudioSystem::stopOutput(audio_port_handle_t portId) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
return statusTFromBinderStatus(aps->stopOutput(portIdAidl));
@@ -1188,7 +1320,7 @@
void AudioSystem::releaseOutput(audio_port_handle_t portId) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return;
+ if (aps == nullptr) return;
auto status = [&]() -> status_t {
int32_t portIdAidl = VALUE_OR_RETURN_STATUS(
@@ -1228,7 +1360,7 @@
}
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return NO_INIT;
+ if (aps == nullptr) return NO_INIT;
media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_attributes_t_AudioAttributes(*attr));
@@ -1262,7 +1394,7 @@
status_t AudioSystem::startInput(audio_port_handle_t portId) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
return statusTFromBinderStatus(aps->startInput(portIdAidl));
@@ -1270,7 +1402,7 @@
status_t AudioSystem::stopInput(audio_port_handle_t portId) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
return statusTFromBinderStatus(aps->stopInput(portIdAidl));
@@ -1278,7 +1410,7 @@
void AudioSystem::releaseInput(audio_port_handle_t portId) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return;
+ if (aps == nullptr) return;
auto status = [&]() -> status_t {
int32_t portIdAidl = VALUE_OR_RETURN_STATUS(
@@ -1296,7 +1428,7 @@
bool enabled,
audio_stream_type_t streamToDriveAbs) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == nullptr) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
AudioDevice deviceAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_device_AudioDevice(deviceType, address));
@@ -1310,7 +1442,7 @@
int indexMin,
int indexMax) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
@@ -1318,12 +1450,6 @@
int32_t indexMaxAidl = VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(indexMax));
status_t status = statusTFromBinderStatus(
aps->initStreamVolume(streamAidl, indexMinAidl, indexMaxAidl));
- if (status == DEAD_OBJECT) {
- // This is a critical operation since w/o proper stream volumes no audio
- // will be heard. Make sure we recover from a failure in any case.
- ALOGE("Received DEAD_OBJECT from APS, clearing the client");
- clearAudioPolicyService();
- }
return status;
}
@@ -1332,7 +1458,7 @@
bool muted,
audio_devices_t device) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
@@ -1347,7 +1473,7 @@
int* index,
audio_devices_t device) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
@@ -1367,7 +1493,7 @@
bool muted,
audio_devices_t device) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_attributes_t_AudioAttributes(attr));
@@ -1382,7 +1508,7 @@
int& index,
audio_devices_t device) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_attributes_t_AudioAttributes(attr));
@@ -1397,7 +1523,7 @@
status_t AudioSystem::getMaxVolumeIndexForAttributes(const audio_attributes_t& attr, int& index) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_attributes_t_AudioAttributes(attr));
@@ -1410,7 +1536,7 @@
status_t AudioSystem::getMinVolumeIndexForAttributes(const audio_attributes_t& attr, int& index) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_attributes_t_AudioAttributes(attr));
@@ -1423,7 +1549,7 @@
product_strategy_t AudioSystem::getStrategyForStream(audio_stream_type_t stream) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PRODUCT_STRATEGY_NONE;
+ if (aps == nullptr) return PRODUCT_STRATEGY_NONE;
auto result = [&]() -> ConversionResult<product_strategy_t> {
AudioStreamType streamAidl = VALUE_OR_RETURN(
@@ -1443,7 +1569,7 @@
return BAD_VALUE;
}
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::audio::common::AudioAttributes aaAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_attributes_t_AudioAttributes(aa));
@@ -1460,7 +1586,7 @@
audio_io_handle_t AudioSystem::getOutputForEffect(const effect_descriptor_t* desc) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
// FIXME change return type to status_t, and return PERMISSION_DENIED here
- if (aps == 0) return AUDIO_IO_HANDLE_NONE;
+ if (aps == nullptr) return AUDIO_IO_HANDLE_NONE;
auto result = [&]() -> ConversionResult<audio_io_handle_t> {
media::EffectDescriptor descAidl = VALUE_OR_RETURN(
@@ -1480,7 +1606,7 @@
audio_session_t session,
int id) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::EffectDescriptor descAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_effect_descriptor_t_EffectDescriptor(*desc));
@@ -1494,7 +1620,7 @@
status_t AudioSystem::unregisterEffect(int id) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
int32_t idAidl = VALUE_OR_RETURN_STATUS(convertReinterpret<int32_t>(id));
return statusTFromBinderStatus(
@@ -1503,7 +1629,7 @@
status_t AudioSystem::setEffectEnabled(int id, bool enabled) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
int32_t idAidl = VALUE_OR_RETURN_STATUS(convertReinterpret<int32_t>(id));
return statusTFromBinderStatus(
@@ -1512,7 +1638,7 @@
status_t AudioSystem::moveEffectsToIo(const std::vector<int>& ids, audio_io_handle_t io) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
std::vector<int32_t> idsAidl = VALUE_OR_RETURN_STATUS(
convertContainer<std::vector<int32_t>>(ids, convertReinterpret<int32_t, int>));
@@ -1522,7 +1648,7 @@
status_t AudioSystem::isStreamActive(audio_stream_type_t stream, bool* state, uint32_t inPastMs) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
if (state == NULL) return BAD_VALUE;
AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
@@ -1536,7 +1662,7 @@
status_t AudioSystem::isStreamActiveRemotely(audio_stream_type_t stream, bool* state,
uint32_t inPastMs) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
if (state == NULL) return BAD_VALUE;
AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
@@ -1549,7 +1675,7 @@
status_t AudioSystem::isSourceActive(audio_source_t stream, bool* state) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
if (state == NULL) return BAD_VALUE;
AudioSource streamAidl = VALUE_OR_RETURN_STATUS(
@@ -1561,32 +1687,25 @@
uint32_t AudioSystem::getPrimaryOutputSamplingRate() {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return 0;
+ if (af == nullptr) return 0;
return af->getPrimaryOutputSamplingRate();
}
size_t AudioSystem::getPrimaryOutputFrameCount() {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return 0;
+ if (af == nullptr) return 0;
return af->getPrimaryOutputFrameCount();
}
status_t AudioSystem::setLowRamDevice(bool isLowRamDevice, int64_t totalMemory) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->setLowRamDevice(isLowRamDevice, totalMemory);
}
-void AudioSystem::clearAudioConfigCache() {
- // called by restoreTrack_l(), which needs new IAudioFlinger and IAudioPolicyService instances
- ALOGV("clearAudioConfigCache()");
- gAudioFlingerServiceHandler.clearService();
- clearAudioPolicyService();
-}
-
status_t AudioSystem::setSupportedSystemUsages(const std::vector<audio_usage_t>& systemUsages) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == nullptr) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
std::vector<AudioUsage> systemUsagesAidl = VALUE_OR_RETURN_STATUS(
convertContainer<std::vector<AudioUsage>>(systemUsages,
@@ -1596,7 +1715,7 @@
status_t AudioSystem::setAllowedCapturePolicy(uid_t uid, audio_flags_mask_t capturePolicy) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == nullptr) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
int32_t capturePolicyAidl = VALUE_OR_RETURN_STATUS(
@@ -1607,7 +1726,7 @@
audio_offload_mode_t AudioSystem::getOffloadSupport(const audio_offload_info_t& info) {
ALOGV("%s", __func__);
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return AUDIO_OFFLOAD_NOT_SUPPORTED;
+ if (aps == nullptr) return AUDIO_OFFLOAD_NOT_SUPPORTED;
auto result = [&]() -> ConversionResult<audio_offload_mode_t> {
AudioOffloadInfo infoAidl = VALUE_OR_RETURN(
@@ -1632,7 +1751,7 @@
}
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::AudioPortRole roleAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_port_role_t_AudioPortRole(role));
@@ -1656,7 +1775,7 @@
std::vector<media::AudioPortFw>* result) {
if (result == nullptr) return BAD_VALUE;
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(aps->listDeclaredDevicePorts(role, result)));
return OK;
}
@@ -1666,7 +1785,7 @@
return BAD_VALUE;
}
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::AudioPortFw portAidl;
RETURN_STATUS_IF_ERROR(
@@ -1682,7 +1801,7 @@
}
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::AudioPatchFw patchAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_patch_AudioPatchFw(*patch));
@@ -1695,7 +1814,7 @@
status_t AudioSystem::releaseAudioPatch(audio_patch_handle_t handle) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
int32_t handleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_patch_handle_t_int32_t(handle));
return statusTFromBinderStatus(aps->releaseAudioPatch(handleAidl));
@@ -1710,7 +1829,7 @@
}
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
Int numPatchesAidl;
@@ -1733,7 +1852,7 @@
}
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::AudioPortConfigFw configAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_port_config_AudioPortConfigFw(*config));
@@ -1742,8 +1861,8 @@
status_t AudioSystem::addAudioPortCallback(const sp<AudioPortCallback>& callback) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
- const auto apc = gAudioPolicyServiceHandler.getClient();
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
+ const auto apc = getAudioPolicyClient();
if (apc == nullptr) return NO_INIT;
std::lock_guard _l(gApsCallbackMutex);
@@ -1757,8 +1876,8 @@
/*static*/
status_t AudioSystem::removeAudioPortCallback(const sp<AudioPortCallback>& callback) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
- const auto apc = gAudioPolicyServiceHandler.getClient();
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
+ const auto apc = AudioSystem::getAudioPolicyClient();
if (apc == nullptr) return NO_INIT;
std::lock_guard _l(gApsCallbackMutex);
@@ -1771,8 +1890,8 @@
status_t AudioSystem::addAudioVolumeGroupCallback(const sp<AudioVolumeGroupCallback>& callback) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
- const auto apc = gAudioPolicyServiceHandler.getClient();
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
+ const auto apc = AudioSystem::getAudioPolicyClient();
if (apc == nullptr) return NO_INIT;
std::lock_guard _l(gApsCallbackMutex);
@@ -1785,8 +1904,8 @@
status_t AudioSystem::removeAudioVolumeGroupCallback(const sp<AudioVolumeGroupCallback>& callback) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
- const auto apc = gAudioPolicyServiceHandler.getClient();
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
+ const auto apc = AudioSystem::getAudioPolicyClient();
if (apc == nullptr) return NO_INIT;
std::lock_guard _l(gApsCallbackMutex);
@@ -1844,7 +1963,7 @@
audio_port_handle_t AudioSystem::getDeviceIdForIo(audio_io_handle_t audioIo) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
const sp<AudioIoDescriptor> desc = getIoDescriptor(audioIo);
if (desc == 0) {
return AUDIO_PORT_HANDLE_NONE;
@@ -1859,7 +1978,7 @@
return BAD_VALUE;
}
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::SoundTriggerSession retAidl;
RETURN_STATUS_IF_ERROR(
@@ -1873,7 +1992,7 @@
status_t AudioSystem::releaseSoundTriggerSession(audio_session_t session) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
int32_t sessionAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_session_t_int32_t(session));
return statusTFromBinderStatus(aps->releaseSoundTriggerSession(sessionAidl));
@@ -1881,7 +2000,7 @@
audio_mode_t AudioSystem::getPhoneState() {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return AUDIO_MODE_INVALID;
+ if (aps == nullptr) return AUDIO_MODE_INVALID;
auto result = [&]() -> ConversionResult<audio_mode_t> {
media::audio::common::AudioMode retAidl;
@@ -1894,7 +2013,7 @@
status_t AudioSystem::registerPolicyMixes(const Vector<AudioMix>& mixes, bool registration) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
size_t mixesSize = std::min(mixes.size(), size_t{MAX_MIXES_PER_POLICY});
std::vector<media::AudioMix> mixesAidl;
@@ -1910,7 +2029,7 @@
}
const sp<IAudioPolicyService> aps = AudioSystem::get_audio_policy_service();
- if (aps == nullptr) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
std::vector<::android::media::AudioMix> aidlMixes;
Status status = aps->getRegisteredPolicyMixes(&aidlMixes);
@@ -1927,7 +2046,7 @@
const std::vector<std::pair<AudioMix, std::vector<AudioMixMatchCriterion>>>&
mixesWithUpdates) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
std::vector<media::AudioMixUpdate> updatesAidl;
updatesAidl.reserve(mixesWithUpdates.size());
@@ -1946,7 +2065,7 @@
status_t AudioSystem::setUidDeviceAffinities(uid_t uid, const AudioDeviceTypeAddrVector& devices) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
std::vector<AudioDevice> devicesAidl = VALUE_OR_RETURN_STATUS(
@@ -1957,7 +2076,7 @@
status_t AudioSystem::removeUidDeviceAffinities(uid_t uid) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
return statusTFromBinderStatus(aps->removeUidDeviceAffinities(uidAidl));
@@ -1966,7 +2085,7 @@
status_t AudioSystem::setUserIdDeviceAffinities(int userId,
const AudioDeviceTypeAddrVector& devices) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
int32_t userIdAidl = VALUE_OR_RETURN_STATUS(convertReinterpret<int32_t>(userId));
std::vector<AudioDevice> devicesAidl = VALUE_OR_RETURN_STATUS(
@@ -1978,7 +2097,7 @@
status_t AudioSystem::removeUserIdDeviceAffinities(int userId) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
int32_t userIdAidl = VALUE_OR_RETURN_STATUS(convertReinterpret<int32_t>(userId));
return statusTFromBinderStatus(aps->removeUserIdDeviceAffinities(userIdAidl));
}
@@ -1990,7 +2109,7 @@
return BAD_VALUE;
}
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::AudioPortConfigFw sourceAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_port_config_AudioPortConfigFw(*source));
@@ -2005,7 +2124,7 @@
status_t AudioSystem::stopAudioSource(audio_port_handle_t portId) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
return statusTFromBinderStatus(aps->stopAudioSource(portIdAidl));
@@ -2013,7 +2132,7 @@
status_t AudioSystem::setMasterMono(bool mono) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
return statusTFromBinderStatus(aps->setMasterMono(mono));
}
@@ -2022,26 +2141,26 @@
return BAD_VALUE;
}
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
return statusTFromBinderStatus(aps->getMasterMono(mono));
}
status_t AudioSystem::setMasterBalance(float balance) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->setMasterBalance(balance);
}
status_t AudioSystem::getMasterBalance(float* balance) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->getMasterBalance(balance);
}
float
AudioSystem::getStreamVolumeDB(audio_stream_type_t stream, int index, audio_devices_t device) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return NAN;
+ if (aps == nullptr) return NAN;
auto result = [&]() -> ConversionResult<float> {
AudioStreamType streamAidl = VALUE_OR_RETURN(
@@ -2059,13 +2178,13 @@
status_t AudioSystem::getMicrophones(std::vector<media::MicrophoneInfoFw>* microphones) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->getMicrophones(microphones);
}
status_t AudioSystem::setAudioHalPids(const std::vector<pid_t>& pids) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == nullptr) return PERMISSION_DENIED;
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->setAudioHalPids(pids);
}
@@ -2079,7 +2198,7 @@
}
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
Int numSurroundFormatsAidl;
numSurroundFormatsAidl.value =
VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(*numSurroundFormats));
@@ -2106,7 +2225,7 @@
}
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
Int numSurroundFormatsAidl;
numSurroundFormatsAidl.value =
VALUE_OR_RETURN_STATUS(convertIntegral<int32_t>(*numSurroundFormats));
@@ -2124,7 +2243,7 @@
status_t AudioSystem::setSurroundFormatEnabled(audio_format_t audioFormat, bool enabled) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
AudioFormatDescription audioFormatAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_format_t_AudioFormatDescription(audioFormat));
@@ -2134,7 +2253,7 @@
status_t AudioSystem::setAssistantServicesUids(const std::vector<uid_t>& uids) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
std::vector<int32_t> uidsAidl = VALUE_OR_RETURN_STATUS(
convertContainer<std::vector<int32_t>>(uids, legacy2aidl_uid_t_int32_t));
@@ -2143,7 +2262,7 @@
status_t AudioSystem::setActiveAssistantServicesUids(const std::vector<uid_t>& activeUids) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
std::vector<int32_t> activeUidsAidl = VALUE_OR_RETURN_STATUS(
convertContainer<std::vector<int32_t>>(activeUids, legacy2aidl_uid_t_int32_t));
@@ -2152,7 +2271,7 @@
status_t AudioSystem::setA11yServicesUids(const std::vector<uid_t>& uids) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
std::vector<int32_t> uidsAidl = VALUE_OR_RETURN_STATUS(
convertContainer<std::vector<int32_t>>(uids, legacy2aidl_uid_t_int32_t));
@@ -2161,7 +2280,7 @@
status_t AudioSystem::setCurrentImeUid(uid_t uid) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
return statusTFromBinderStatus(aps->setCurrentImeUid(uidAidl));
@@ -2169,7 +2288,7 @@
bool AudioSystem::isHapticPlaybackSupported() {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return false;
+ if (aps == nullptr) return false;
auto result = [&]() -> ConversionResult<bool> {
bool retVal;
@@ -2182,7 +2301,7 @@
bool AudioSystem::isUltrasoundSupported() {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return false;
+ if (aps == nullptr) return false;
auto result = [&]() -> ConversionResult<bool> {
bool retVal;
@@ -2200,7 +2319,7 @@
}
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
std::vector<AudioFormatDescription> formatsAidl;
AudioDeviceDescription deviceAidl = VALUE_OR_RETURN_STATUS(
@@ -2216,7 +2335,7 @@
status_t AudioSystem::listAudioProductStrategies(AudioProductStrategyVector& strategies) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
std::vector<media::AudioProductStrategy> strategiesAidl;
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
@@ -2278,7 +2397,7 @@
product_strategy_t& productStrategy,
bool fallbackOnDefault) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::audio::common::AudioAttributes aaAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_attributes_t_AudioAttributes(aa));
@@ -2294,7 +2413,7 @@
status_t AudioSystem::listAudioVolumeGroups(AudioVolumeGroupVector& groups) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
std::vector<media::AudioVolumeGroup> groupsAidl;
RETURN_STATUS_IF_ERROR(
@@ -2308,7 +2427,7 @@
volume_group_t& volumeGroup,
bool fallbackOnDefault) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::audio::common::AudioAttributes aaAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_attributes_t_AudioAttributes(aa));
@@ -2321,13 +2440,13 @@
status_t AudioSystem::setRttEnabled(bool enabled) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
return statusTFromBinderStatus(aps->setRttEnabled(enabled));
}
bool AudioSystem::isCallScreenModeSupported() {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) return false;
+ if (aps == nullptr) return false;
auto result = [&]() -> ConversionResult<bool> {
bool retAidl;
@@ -2342,9 +2461,7 @@
device_role_t role,
const AudioDeviceTypeAddrVector& devices) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) {
- return PERMISSION_DENIED;
- }
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
int32_t strategyAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_product_strategy_t_int32_t(strategy));
media::DeviceRole roleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_device_role_t_DeviceRole(role));
@@ -2359,9 +2476,7 @@
device_role_t role,
const AudioDeviceTypeAddrVector& devices) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) {
- return PERMISSION_DENIED;
- }
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
int32_t strategyAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_product_strategy_t_int32_t(strategy));
media::DeviceRole roleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_device_role_t_DeviceRole(role));
@@ -2375,9 +2490,8 @@
status_t
AudioSystem::clearDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) {
- return PERMISSION_DENIED;
- }
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
+
int32_t strategyAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_product_strategy_t_int32_t(strategy));
media::DeviceRole roleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_device_role_t_DeviceRole(role));
return statusTFromBinderStatus(
@@ -2388,9 +2502,8 @@
device_role_t role,
AudioDeviceTypeAddrVector& devices) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) {
- return PERMISSION_DENIED;
- }
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
+
int32_t strategyAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_product_strategy_t_int32_t(strategy));
media::DeviceRole roleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_device_role_t_DeviceRole(role));
std::vector<AudioDevice> devicesAidl;
@@ -2406,9 +2519,7 @@
device_role_t role,
const AudioDeviceTypeAddrVector& devices) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) {
- return PERMISSION_DENIED;
- }
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
AudioSource audioSourceAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_source_t_AudioSource(audioSource));
@@ -2424,9 +2535,8 @@
device_role_t role,
const AudioDeviceTypeAddrVector& devices) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) {
- return PERMISSION_DENIED;
- }
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
+
AudioSource audioSourceAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_source_t_AudioSource(audioSource));
media::DeviceRole roleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_device_role_t_DeviceRole(role));
@@ -2440,9 +2550,8 @@
status_t AudioSystem::removeDevicesRoleForCapturePreset(
audio_source_t audioSource, device_role_t role, const AudioDeviceTypeAddrVector& devices) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) {
- return PERMISSION_DENIED;
- }
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
+
AudioSource audioSourceAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_source_t_AudioSource(audioSource));
media::DeviceRole roleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_device_role_t_DeviceRole(role));
@@ -2456,9 +2565,8 @@
status_t AudioSystem::clearDevicesRoleForCapturePreset(audio_source_t audioSource,
device_role_t role) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) {
- return PERMISSION_DENIED;
- }
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
+
AudioSource audioSourceAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_source_t_AudioSource(audioSource));
media::DeviceRole roleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_device_role_t_DeviceRole(role));
@@ -2470,9 +2578,7 @@
device_role_t role,
AudioDeviceTypeAddrVector& devices) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) {
- return PERMISSION_DENIED;
- }
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
AudioSource audioSourceAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_source_t_AudioSource(audioSource));
media::DeviceRole roleAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_device_role_t_DeviceRole(role));
@@ -2491,9 +2597,7 @@
if (spatializer == nullptr) {
return BAD_VALUE;
}
- if (aps == 0) {
- return PERMISSION_DENIED;
- }
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::GetSpatializerResponse response;
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
aps->getSpatializer(callback, &response)));
@@ -2510,9 +2614,7 @@
if (canBeSpatialized == nullptr) {
return BAD_VALUE;
}
- if (aps == 0) {
- return PERMISSION_DENIED;
- }
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
audio_attributes_t attributes = attr != nullptr ? *attr : AUDIO_ATTRIBUTES_INITIALIZER;
audio_config_t configuration = config != nullptr ? *config : AUDIO_CONFIG_INITIALIZER;
@@ -2531,9 +2633,7 @@
status_t AudioSystem::getSoundDoseInterface(const sp<media::ISoundDoseCallback>& callback,
sp<media::ISoundDose>* soundDose) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == nullptr) {
- return PERMISSION_DENIED;
- }
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
if (soundDose == nullptr) {
return BAD_VALUE;
}
@@ -2550,9 +2650,7 @@
}
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) {
- return PERMISSION_DENIED;
- }
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_attributes_t_AudioAttributes(*attr));
@@ -2574,9 +2672,7 @@
}
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) {
- return PERMISSION_DENIED;
- }
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_attributes_t_AudioAttributes(*attr));
@@ -2593,52 +2689,40 @@
status_t AudioSystem::setRequestedLatencyMode(
audio_io_handle_t output, audio_latency_mode_t mode) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == nullptr) {
- return PERMISSION_DENIED;
- }
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->setRequestedLatencyMode(output, mode);
}
status_t AudioSystem::getSupportedLatencyModes(audio_io_handle_t output,
std::vector<audio_latency_mode_t>* modes) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == nullptr) {
- return PERMISSION_DENIED;
- }
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->getSupportedLatencyModes(output, modes);
}
status_t AudioSystem::setBluetoothVariableLatencyEnabled(bool enabled) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == nullptr) {
- return PERMISSION_DENIED;
- }
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->setBluetoothVariableLatencyEnabled(enabled);
}
status_t AudioSystem::isBluetoothVariableLatencyEnabled(
bool *enabled) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == nullptr) {
- return PERMISSION_DENIED;
- }
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->isBluetoothVariableLatencyEnabled(enabled);
}
status_t AudioSystem::supportsBluetoothVariableLatency(
bool *support) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == nullptr) {
- return PERMISSION_DENIED;
- }
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->supportsBluetoothVariableLatency(support);
}
status_t AudioSystem::getAudioPolicyConfig(media::AudioPolicyConfig *config) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == nullptr) {
- return PERMISSION_DENIED;
- }
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->getAudioPolicyConfig(config);
}
@@ -2685,9 +2769,7 @@
LOG_ALWAYS_FATAL_IF(listener == nullptr);
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == 0) {
- return PERMISSION_DENIED;
- }
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
std::lock_guard _l(AudioSystem::gSoundTriggerMutex);
gSoundTriggerCaptureStateListener = new CaptureStateListenerImpl(aps, listener);
@@ -2699,43 +2781,33 @@
status_t AudioSystem::setVibratorInfos(
const std::vector<media::AudioVibratorInfo>& vibratorInfos) {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == nullptr) {
- return PERMISSION_DENIED;
- }
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->setVibratorInfos(vibratorInfos);
}
-status_t AudioSystem::getMmapPolicyInfo(
+status_t AudioSystem::getMmapPolicyInfos(
AudioMMapPolicyType policyType, std::vector<AudioMMapPolicyInfo> *policyInfos) {
- const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == nullptr) {
- return PERMISSION_DENIED;
- }
- return af->getMmapPolicyInfos(policyType, policyInfos);
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
+ return statusTFromBinderStatus(aps->getMmapPolicyInfos(policyType, policyInfos));
}
int32_t AudioSystem::getAAudioMixerBurstCount() {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == nullptr) {
- return PERMISSION_DENIED;
- }
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->getAAudioMixerBurstCount();
}
int32_t AudioSystem::getAAudioHardwareBurstMinUsec() {
const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == nullptr) {
- return PERMISSION_DENIED;
- }
+ if (af == nullptr) return AudioFlingerServiceTraits::getError();
return af->getAAudioHardwareBurstMinUsec();
}
status_t AudioSystem::getSupportedMixerAttributes(
audio_port_handle_t portId, std::vector<audio_mixer_attributes_t> *mixerAttrs) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == nullptr) {
- return PERMISSION_DENIED;
- }
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
int32_t portIdAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_port_handle_t_int32_t(portId));
std::vector<media::AudioMixerAttributesInternal> _aidlReturn;
@@ -2753,9 +2825,7 @@
uid_t uid,
const audio_mixer_attributes_t *mixerAttr) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == nullptr) {
- return PERMISSION_DENIED;
- }
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_attributes_t_AudioAttributes(*attr));
@@ -2773,9 +2843,7 @@
audio_port_handle_t portId,
std::optional<audio_mixer_attributes_t> *mixerAttr) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == nullptr) {
- return PERMISSION_DENIED;
- }
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_attributes_t_AudioAttributes(*attr));
@@ -2796,9 +2864,7 @@
audio_port_handle_t portId,
uid_t uid) {
const sp<IAudioPolicyService> aps = get_audio_policy_service();
- if (aps == nullptr) {
- return PERMISSION_DENIED;
- }
+ if (aps == nullptr) return AudioPolicyServiceTraits::getError();
media::audio::common::AudioAttributes attrAidl = VALUE_OR_RETURN_STATUS(
legacy2aidl_audio_attributes_t_AudioAttributes(*attr));
@@ -2808,6 +2874,18 @@
aps->clearPreferredMixerAttributes(attrAidl, portIdAidl, uidAidl));
}
+status_t AudioSystem::getMmapPolicyForDevice(AudioMMapPolicyType policyType,
+ audio_devices_t device,
+ AudioMMapPolicyInfo *policyInfo) {
+ const sp<IAudioPolicyService> aps = get_audio_policy_service();
+ if (aps == nullptr) {
+ return PERMISSION_DENIED;
+ }
+ policyInfo->device.type = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_audio_devices_t_AudioDeviceDescription(device));
+ return statusTFromBinderStatus(aps->getMmapPolicyForDevice(policyType, policyInfo));
+}
+
// ---------------------------------------------------------------------------
int AudioSystem::AudioPolicyServiceClient::addAudioPortCallback(
@@ -2952,19 +3030,14 @@
return Status::ok();
}
-void AudioSystem::AudioPolicyServiceClient::binderDied(const wp<IBinder>& who __unused) {
- {
- std::lock_guard _l(mMutex);
- for (const auto& callback : mAudioPortCallbacks) {
- callback->onServiceDied();
- }
- for (const auto& callback : mAudioVolumeGroupCallbacks) {
- callback->onServiceDied();
- }
+void AudioSystem::AudioPolicyServiceClient::onServiceDied() {
+ std::lock_guard _l(mMutex);
+ for (const auto& callback : mAudioPortCallbacks) {
+ callback->onServiceDied();
}
- AudioSystem::clearAudioPolicyService();
-
- ALOGW("AudioPolicyService server died!");
+ for (const auto& callback : mAudioVolumeGroupCallbacks) {
+ callback->onServiceDied();
+ }
}
ConversionResult<record_client_info_t>
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index e0c5e92..a9409eb 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -2875,10 +2875,6 @@
__func__, mPortId, isOffloadedOrDirect_l() ? "Offloaded or Direct" : "PCM", from);
++mSequence;
- // refresh the audio configuration cache in this process to make sure we get new
- // output parameters and new IAudioFlinger in createTrack_l()
- AudioSystem::clearAudioConfigCache();
-
if (!forceRestore &&
(isOffloadedOrDirect_l() || mDoNotReconnect)) {
// FIXME re-creation of offloaded and direct tracks is not yet implemented;
@@ -2911,10 +2907,6 @@
const int INITIAL_RETRIES = 3;
int retries = INITIAL_RETRIES;
retry:
- if (retries < INITIAL_RETRIES) {
- // See the comment for clearAudioConfigCache at the start of the function.
- AudioSystem::clearAudioConfigCache();
- }
mFlags = mOrigFlags;
// If a new IAudioTrack is successfully created, createTrack_l() will modify the
diff --git a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
index 40ab938..956acce 100644
--- a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
@@ -46,6 +46,8 @@
import android.media.audio.common.AudioDevice;
import android.media.audio.common.AudioDeviceDescription;
import android.media.audio.common.AudioFormatDescription;
+import android.media.audio.common.AudioMMapPolicyInfo;
+import android.media.audio.common.AudioMMapPolicyType;
import android.media.audio.common.AudioMode;
import android.media.audio.common.AudioPolicyForcedConfig;
import android.media.audio.common.AudioPolicyForceUse;
@@ -482,6 +484,17 @@
* required to control audio access.
*/
INativePermissionController getPermissionController();
+
+ /**
+ * Query mmap policy information.
+ */
+ AudioMMapPolicyInfo[] getMmapPolicyInfos(AudioMMapPolicyType policyType);
+
+ /**
+ * Get all devices that support AAudio MMAP.
+ */
+ void getMmapPolicyForDevice(AudioMMapPolicyType policyType,
+ inout AudioMMapPolicyInfo policyInfo);
// When adding a new method, please review and update
// AudioPolicyService.cpp AudioPolicyService::onTransact()
// AudioPolicyService.cpp IAUDIOPOLICYSERVICE_BINDER_METHOD_MACRO_LIST
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index 31e4f05..5565281 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -103,6 +103,7 @@
template <typename ServiceInterface, typename Client, typename AidlInterface,
typename ServiceTraits>
friend class ServiceHandler;
+ friend class AudioFlingerServiceTraits;
public:
@@ -426,17 +427,12 @@
static status_t setEffectEnabled(int id, bool enabled);
static status_t moveEffectsToIo(const std::vector<int>& ids, audio_io_handle_t io);
- // clear stream to output mapping cache (gStreamOutputMap)
- // and output configuration cache (gOutputs)
- static void clearAudioConfigCache();
-
// Sets a local AudioPolicyService interface to be used by AudioSystem.
// This is used by audioserver main() to allow client object initialization
// before exposing any interfaces to ServiceManager.
static status_t setLocalAudioPolicyService(const sp<media::IAudioPolicyService>& aps);
static sp<media::IAudioPolicyService> get_audio_policy_service();
- static void clearAudioPolicyService();
// helpers for android.media.AudioManager.getProperty(), see description there for meaning
static uint32_t getPrimaryOutputSamplingRate();
@@ -801,7 +797,7 @@
static status_t setVibratorInfos(const std::vector<media::AudioVibratorInfo>& vibratorInfos);
- static status_t getMmapPolicyInfo(
+ static status_t getMmapPolicyInfos(
media::audio::common::AudioMMapPolicyType policyType,
std::vector<media::audio::common::AudioMMapPolicyInfo> *policyInfos);
@@ -809,7 +805,11 @@
static int32_t getAAudioHardwareBurstMinUsec();
- class AudioFlingerClient: public IBinder::DeathRecipient, public media::BnAudioFlingerClient
+ static status_t getMmapPolicyForDevice(
+ media::audio::common::AudioMMapPolicyType policyType, audio_devices_t device,
+ media::audio::common::AudioMMapPolicyInfo *policyInfo);
+
+ class AudioFlingerClient: public media::BnAudioFlingerClient
{
public:
AudioFlingerClient() = default;
@@ -819,9 +819,6 @@
audio_channel_mask_t channelMask, size_t* buffSize) EXCLUDES(mMutex);
sp<AudioIoDescriptor> getIoDescriptor(audio_io_handle_t ioHandle) EXCLUDES(mMutex);
- // DeathRecipient
- void binderDied(const wp<IBinder>& who) final;
-
// IAudioFlingerClient
// indicate a change in the configuration of an output or input: keeps the cached
@@ -866,8 +863,7 @@
sp<AudioIoDescriptor> getIoDescriptor_l(audio_io_handle_t ioHandle) REQUIRES(mMutex);
};
- class AudioPolicyServiceClient: public IBinder::DeathRecipient,
- public media::BnAudioPolicyServiceClient {
+ class AudioPolicyServiceClient: public media::BnAudioPolicyServiceClient {
public:
AudioPolicyServiceClient() = default;
@@ -891,8 +887,7 @@
return !mAudioVolumeGroupCallbacks.empty();
}
- // DeathRecipient
- void binderDied(const wp<IBinder>& who) final;
+ void onServiceDied();
// IAudioPolicyServiceClient
binder::Status onAudioVolumeGroupChanged(int32_t group, int32_t flags) override;
@@ -922,6 +917,7 @@
static audio_io_handle_t getOutput(audio_stream_type_t stream);
static sp<AudioFlingerClient> getAudioFlingerClient();
+ static sp<AudioPolicyServiceClient> getAudioPolicyClient();
static sp<AudioIoDescriptor> getIoDescriptor(audio_io_handle_t ioHandle);
// Invokes all registered error callbacks with the given error code.
diff --git a/media/libaudiofoundation/AudioContainers.cpp b/media/libaudiofoundation/AudioContainers.cpp
index e1265cf..6727562 100644
--- a/media/libaudiofoundation/AudioContainers.cpp
+++ b/media/libaudiofoundation/AudioContainers.cpp
@@ -130,6 +130,27 @@
return ss.str();
}
+std::string toString(const DeviceIdSet& deviceIds) {
+ if (deviceIds.empty()) {
+ return "Empty device ids";
+ }
+ std::stringstream ss;
+ for (auto it = deviceIds.begin(); it != deviceIds.end(); ++it) {
+ if (it != deviceIds.begin()) {
+ ss << ", ";
+ }
+ ss << *it;
+ }
+ return ss.str();
+}
+
+audio_port_handle_t getFirstDeviceId(const DeviceIdSet& deviceIds) {
+ if (deviceIds.empty()) {
+ return AUDIO_PORT_HANDLE_NONE;
+ }
+ return *(deviceIds.begin());
+}
+
AudioProfileAttributesMultimap createAudioProfilesAttrMap(audio_profile profiles[],
uint32_t first,
uint32_t last) {
diff --git a/media/libaudiofoundation/include/media/AudioContainers.h b/media/libaudiofoundation/include/media/AudioContainers.h
index 46fd620..3673871 100644
--- a/media/libaudiofoundation/include/media/AudioContainers.h
+++ b/media/libaudiofoundation/include/media/AudioContainers.h
@@ -33,6 +33,7 @@
using FormatSet = std::set<audio_format_t>;
using SampleRateSet = std::set<uint32_t>;
using MixerBehaviorSet = std::set<audio_mixer_behavior_t>;
+using DeviceIdSet = std::set<audio_port_handle_t>;
using FormatVector = std::vector<audio_format_t>;
using AudioProfileAttributesMultimap =
@@ -139,6 +140,16 @@
}
/**
+ * Returns human readable string for a set of device ids.
+ */
+std::string toString(const DeviceIdSet& deviceIds);
+
+/**
+ * Returns the first device id of a set of device ids or AUDIO_PORT_HANDLE_NONE when its empty.
+ */
+audio_port_handle_t getFirstDeviceId(const DeviceIdSet& deviceIds);
+
+/**
* Create audio profile attributes map by given audio profile array from the range of [first, last).
*
* @param profiles the array of audio profiles.
diff --git a/media/libaudiohal/Android.bp b/media/libaudiohal/Android.bp
index 75e2c11..74a64bf 100644
--- a/media/libaudiohal/Android.bp
+++ b/media/libaudiohal/Android.bp
@@ -23,7 +23,6 @@
],
required: [
- "libaudiohal@5.0",
"libaudiohal@6.0",
"libaudiohal@7.0",
"libaudiohal@7.1",
diff --git a/media/libaudiohal/FactoryHal.cpp b/media/libaudiohal/FactoryHal.cpp
index 15cb297..2c30693 100644
--- a/media/libaudiohal/FactoryHal.cpp
+++ b/media/libaudiohal/FactoryHal.cpp
@@ -50,12 +50,11 @@
* This list need to keep sync with AudioHalVersionInfo.VERSIONS in
* media/java/android/media/AudioHalVersionInfo.java.
*/
-static const std::array<AudioHalVersionInfo, 5> sAudioHALVersions = {
+static const std::array<AudioHalVersionInfo, 4> sAudioHALVersions = {
AudioHalVersionInfo(AudioHalVersionInfo::Type::AIDL, 1, 0),
AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 7, 1),
AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 7, 0),
AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 6, 0),
- AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 5, 0),
};
static const std::map<AudioHalVersionInfo::Type, InterfaceName> sDevicesHALInterfaces = {
diff --git a/media/libaudiohal/impl/Android.bp b/media/libaudiohal/impl/Android.bp
index 1a6b949..f5dec56 100644
--- a/media/libaudiohal/impl/Android.bp
+++ b/media/libaudiohal/impl/Android.bp
@@ -83,32 +83,6 @@
}
cc_library_shared {
- name: "libaudiohal@5.0",
- defaults: [
- "libaudiohal_default",
- "libaudiohal_hidl_default",
- ],
- srcs: [
- ":audio_core_hal_client_sources",
- ":audio_effect_hidl_hal_client_sources",
- "EffectsFactoryHalEntry.cpp",
- ],
- shared_libs: [
- "android.hardware.audio.common@5.0",
- "android.hardware.audio.common@5.0-util",
- "android.hardware.audio.effect@5.0",
- "android.hardware.audio.effect@5.0-util",
- "android.hardware.audio@5.0",
- "android.hardware.audio@5.0-util",
- ],
- cflags: [
- "-DMAJOR_VERSION=5",
- "-DMINOR_VERSION=0",
- "-include common/all-versions/VersionMacro.h",
- ],
-}
-
-cc_library_shared {
name: "libaudiohal@6.0",
defaults: [
"libaudiohal_default",
diff --git a/media/libaudiohal/impl/StreamHalAidl.cpp b/media/libaudiohal/impl/StreamHalAidl.cpp
index c4e4ae8..0d65f8c 100644
--- a/media/libaudiohal/impl/StreamHalAidl.cpp
+++ b/media/libaudiohal/impl/StreamHalAidl.cpp
@@ -367,8 +367,12 @@
if (!mStream) return NO_INIT;
StreamDescriptor::Reply reply;
RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply, statePositions));
- *frames = std::max<int64_t>(0, reply.observable.frames);
- *timestamp = std::max<int64_t>(0, reply.observable.timeNs);
+ if (reply.observable.frames == StreamDescriptor::Position::UNKNOWN ||
+ reply.observable.timeNs == StreamDescriptor::Position::UNKNOWN) {
+ return INVALID_OPERATION;
+ }
+ *frames = reply.observable.frames;
+ *timestamp = reply.observable.timeNs;
return OK;
}
@@ -377,8 +381,12 @@
if (!mStream) return NO_INIT;
StreamDescriptor::Reply reply;
RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
- *frames = std::max<int64_t>(0, reply.hardware.frames);
- *timestamp = std::max<int64_t>(0, reply.hardware.timeNs);
+ if (reply.hardware.frames == StreamDescriptor::Position::UNKNOWN ||
+ reply.hardware.timeNs == StreamDescriptor::Position::UNKNOWN) {
+ return INVALID_OPERATION;
+ }
+ *frames = reply.hardware.frames;
+ *timestamp = reply.hardware.timeNs;
return OK;
}
@@ -387,7 +395,10 @@
if (!mStream) return NO_INIT;
StreamDescriptor::Reply reply;
RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
- *frames = std::max<int32_t>(0, reply.xrunFrames);
+ if (reply.xrunFrames == StreamDescriptor::Position::UNKNOWN) {
+ return INVALID_OPERATION;
+ }
+ *frames = reply.xrunFrames;
return OK;
}
@@ -577,7 +588,9 @@
// For compatibility with HIDL behavior, apply a "soft" position reset
// after receiving the "drain ready" callback.
std::lock_guard l(mLock);
- mStatePositions.framesAtFlushOrDrain = mLastReply.observable.frames;
+ if (mLastReply.observable.frames != StreamDescriptor::Position::UNKNOWN) {
+ mStatePositions.framesAtFlushOrDrain = mLastReply.observable.frames;
+ }
} else {
AUGMENT_LOG(W, "unexpected onDrainReady in the state %s", toString(state).c_str());
}
@@ -670,7 +683,8 @@
}
mLastReply = *reply;
mLastReplyExpirationNs = uptimeNanos() + mLastReplyLifeTimeNs;
- if (!mIsInput && reply->status == STATUS_OK) {
+ if (!mIsInput && reply->status == STATUS_OK &&
+ reply->observable.frames != StreamDescriptor::Position::UNKNOWN) {
if (command.getTag() == StreamDescriptor::Command::standby &&
reply->state == StreamDescriptor::State::STANDBY) {
mStatePositions.framesAtStandby = reply->observable.frames;
diff --git a/media/libaudiohal/tests/EffectHalVersionCompatibility_test.cpp b/media/libaudiohal/tests/EffectHalVersionCompatibility_test.cpp
index e8731ea..c11f908 100644
--- a/media/libaudiohal/tests/EffectHalVersionCompatibility_test.cpp
+++ b/media/libaudiohal/tests/EffectHalVersionCompatibility_test.cpp
@@ -83,6 +83,7 @@
{Parameter::Id::visualizerTag, 1},
{Parameter::Id::volumeTag, 1},
{Parameter::Id::spatializerTag, 2},
+ {Parameter::Id::eraserTag, 3},
};
// Tags defined Parameter::Specific union.
static const std::unordered_map<Parameter::Specific::Tag, int /* version */>
@@ -104,6 +105,7 @@
{Parameter::Specific::visualizer, 1},
{Parameter::Specific::volume, 1},
{Parameter::Specific::spatializer, 2},
+ {Parameter::Specific::eraser, 3},
};
class MockFactory : public IFactory {
@@ -223,6 +225,7 @@
case Parameter::Id::virtualizerTag:
case Parameter::Id::visualizerTag:
case Parameter::Id::volumeTag:
+ case Parameter::Id::eraserTag:
FALLTHROUGH_INTENDED;
case Parameter::Id::spatializerTag: {
if (kParamIdEffectVersionMap.find(idTag) != kParamIdEffectVersionMap.end() &&
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
index fd4e615..dd14ac2 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
@@ -52,6 +52,7 @@
if (mDpFreq != nullptr) {
mDpFreq->reset();
}
+ mEngineInited = false;
return RetCode::SUCCESS;
}
diff --git a/media/libmedia/AudioCapabilities.cpp b/media/libmedia/AudioCapabilities.cpp
index e8cf517..1a92307 100644
--- a/media/libmedia/AudioCapabilities.cpp
+++ b/media/libmedia/AudioCapabilities.cpp
@@ -26,7 +26,7 @@
namespace android {
-const Range<int>& AudioCapabilities::getBitrateRange() const {
+const Range<int32_t>& AudioCapabilities::getBitrateRange() const {
return mBitrateRange;
}
@@ -86,7 +86,7 @@
}
void AudioCapabilities::initWithPlatformLimits() {
- mBitrateRange = Range<int>(0, INT_MAX);
+ mBitrateRange = Range<int>(0, INT32_MAX);
mInputChannelRanges.push_back(Range<int>(1, MAX_INPUT_CHANNEL_COUNT));
const int minSampleRate = base::GetIntProperty("ro.mediacodec.min_sample_rate", 7350);
@@ -94,30 +94,31 @@
mSampleRateRanges.push_back(Range<int>(minSampleRate, maxSampleRate));
}
-bool AudioCapabilities::supports(int sampleRate, int inputChannels) {
+bool AudioCapabilities::supports(std::optional<int> sampleRate,
+ std::optional<int> inputChannels) {
// channels and sample rates are checked orthogonally
- if (inputChannels != 0
+ if (inputChannels
&& !std::any_of(mInputChannelRanges.begin(), mInputChannelRanges.end(),
- [inputChannels](const Range<int> &a) { return a.contains(inputChannels); })) {
+ [inputChannels](const Range<int> &a) { return a.contains(inputChannels.value()); })) {
return false;
}
- if (sampleRate != 0
+ if (sampleRate
&& !std::any_of(mSampleRateRanges.begin(), mSampleRateRanges.end(),
- [sampleRate](const Range<int> &a) { return a.contains(sampleRate); })) {
+ [sampleRate](const Range<int> &a) { return a.contains(sampleRate.value()); })) {
return false;
}
return true;
}
bool AudioCapabilities::isSampleRateSupported(int sampleRate) {
- return supports(sampleRate, 0);
+ return supports(std::make_optional<int>(sampleRate), std::nullopt);
}
void AudioCapabilities::limitSampleRates(std::vector<int> rates) {
std::vector<Range<int>> sampleRateRanges;
std::sort(rates.begin(), rates.end());
for (int rate : rates) {
- if (supports(rate, 0 /* channels */)) {
+ if (supports(std::make_optional<int>(rate), std::nullopt /* channels */)) {
sampleRateRanges.push_back(Range<int>(rate, rate));
}
}
@@ -280,7 +281,7 @@
void AudioCapabilities::applyLimits(
const std::vector<Range<int>> &inputChannels,
- const std::optional<Range<int>> &bitRates) {
+ const std::optional<Range<int32_t>> &bitRates) {
// clamp & make a local copy
std::vector<Range<int>> inputChannelsCopy(inputChannels.size());
for (int i = 0; i < inputChannels.size(); i++) {
@@ -301,7 +302,7 @@
void AudioCapabilities::parseFromInfo(const sp<AMessage> &format) {
int maxInputChannels = MAX_INPUT_CHANNEL_COUNT;
std::vector<Range<int>> channels = { Range<int>(1, maxInputChannels) };
- std::optional<Range<int>> bitRates = POSITIVE_INTEGERS;
+ std::optional<Range<int32_t>> bitRates = POSITIVE_INT32;
AString rateAString;
if (format->findString("sample-rate-ranges", &rateAString)) {
@@ -348,7 +349,7 @@
}
if (format->findString("bitrate-range", &valueStr)) {
- std::optional<Range<int>> parsedBitrate = ParseIntRange(valueStr.c_str());
+ std::optional<Range<int32_t>> parsedBitrate = ParseIntRange(valueStr.c_str());
if (parsedBitrate) {
bitRates = bitRates.value().intersect(parsedBitrate.value());
}
@@ -372,10 +373,12 @@
}
bool AudioCapabilities::supportsFormat(const sp<AMessage> &format) {
- int32_t sampleRate;
- format->findInt32(KEY_SAMPLE_RATE, &sampleRate);
- int32_t channels;
- format->findInt32(KEY_CHANNEL_COUNT, &channels);
+ int32_t sampleRateValue;
+ std::optional<int> sampleRate = format->findInt32(KEY_SAMPLE_RATE, &sampleRateValue)
+ ? std::make_optional<int>(sampleRateValue) : std::nullopt;
+ int32_t channelsValue;
+ std::optional<int> channels = format->findInt32(KEY_CHANNEL_COUNT, &channelsValue)
+ ? std::make_optional<int>(channelsValue) : std::nullopt;
if (!supports(sampleRate, channels)) {
return false;
diff --git a/media/libmedia/CodecCapabilities.cpp b/media/libmedia/CodecCapabilities.cpp
index 5bed1c4..87eb4bc 100644
--- a/media/libmedia/CodecCapabilities.cpp
+++ b/media/libmedia/CodecCapabilities.cpp
@@ -25,7 +25,7 @@
namespace android {
-bool CodecCapabilities::SupportsBitrate(Range<int> bitrateRange,
+bool CodecCapabilities::SupportsBitrate(Range<int32_t> bitrateRange,
const sp<AMessage> &format) {
// consider max bitrate over average bitrate for support
int32_t maxBitrate = 0;
diff --git a/media/libmedia/include/media/AudioCapabilities.h b/media/libmedia/include/media/AudioCapabilities.h
index 2bc3335..d2bd9d7 100644
--- a/media/libmedia/include/media/AudioCapabilities.h
+++ b/media/libmedia/include/media/AudioCapabilities.h
@@ -37,7 +37,7 @@
/**
* Returns the range of supported bitrates in bits/second.
*/
- const Range<int>& getBitrateRange() const;
+ const Range<int32_t>& getBitrateRange() const;
/**
* Returns the array of supported sample rates if the codec
@@ -110,7 +110,7 @@
std::string mMediaType;
std::vector<ProfileLevel> mProfileLevels;
- Range<int> mBitrateRange;
+ Range<int32_t> mBitrateRange;
std::vector<int> mSampleRates;
std::vector<Range<int>> mSampleRateRanges;
@@ -121,13 +121,13 @@
void init(std::string mediaType, std::vector<ProfileLevel> profLevs,
const sp<AMessage> &format);
void initWithPlatformLimits();
- bool supports(int sampleRate, int inputChannels);
+ bool supports(std::optional<int> sampleRate, std::optional<int> inputChannels);
void limitSampleRates(std::vector<int> rates);
void createDiscreteSampleRates();
void limitSampleRates(std::vector<Range<int>> rateRanges);
void applyLevelLimits();
void applyLimits(const std::vector<Range<int>> &inputChannels,
- const std::optional<Range<int>> &bitRates);
+ const std::optional<Range<int32_t>> &bitRates);
void parseFromInfo(const sp<AMessage> &format);
friend struct CodecCapabilities;
diff --git a/media/libmedia/include/media/CodecCapabilities.h b/media/libmedia/include/media/CodecCapabilities.h
index 9d1c4ea..570c8b5 100644
--- a/media/libmedia/include/media/CodecCapabilities.h
+++ b/media/libmedia/include/media/CodecCapabilities.h
@@ -34,7 +34,7 @@
struct CodecCapabilities {
- static bool SupportsBitrate(Range<int> bitrateRange,
+ static bool SupportsBitrate(Range<int32_t> bitrateRange,
const sp<AMessage> &format);
/**
diff --git a/media/libmedia/include/media/CodecCapabilitiesUtils.h b/media/libmedia/include/media/CodecCapabilitiesUtils.h
index 2bf822a..89a452c 100644
--- a/media/libmedia/include/media/CodecCapabilitiesUtils.h
+++ b/media/libmedia/include/media/CodecCapabilitiesUtils.h
@@ -118,7 +118,7 @@
T upper_;
};
-static const Range<int> POSITIVE_INTEGERS = Range<int>(1, INT_MAX);
+static const Range<int32_t> POSITIVE_INT32 = Range<int32_t>(1, INT32_MAX);
// found stuff that is not supported by framework (=> this should not happen)
constexpr int ERROR_CAPABILITIES_UNRECOGNIZED = (1 << 0);
diff --git a/media/libmedia/tests/codeccapabilities/CodecCapabilitiesTest.cpp b/media/libmedia/tests/codeccapabilities/CodecCapabilitiesTest.cpp
index 89c9739..02e43a4 100644
--- a/media/libmedia/tests/codeccapabilities/CodecCapabilitiesTest.cpp
+++ b/media/libmedia/tests/codeccapabilities/CodecCapabilitiesTest.cpp
@@ -64,7 +64,7 @@
};
TEST_F(AudioCapsAacTest, AudioCaps_Aac_Bitrate) {
- const Range<int>& bitrateRange = audioCaps->getBitrateRange();
+ const Range<int32_t>& bitrateRange = audioCaps->getBitrateRange();
EXPECT_EQ(bitrateRange.lower(), 8000) << "bitrate range1 does not match. lower: "
<< bitrateRange.lower();
EXPECT_EQ(bitrateRange.upper(), 510000) << "bitrate range1 does not match. upper: "
@@ -114,7 +114,7 @@
};
TEST_F(AudioCapsRawTest, AudioCaps_Raw_Bitrate) {
- const Range<int>& bitrateRange = audioCaps->getBitrateRange();
+ const Range<int32_t>& bitrateRange = audioCaps->getBitrateRange();
EXPECT_EQ(bitrateRange.lower(), 1);
EXPECT_EQ(bitrateRange.upper(), 10000000);
}
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 086baa3..23e7a47 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -59,6 +59,7 @@
#include <media/stagefright/PersistentSurface.h>
#include <media/MediaProfiles.h>
#include <camera/CameraParameters.h>
+#include <gui/Flags.h>
#include <utils/Errors.h>
#include <sys/types.h>
@@ -1932,16 +1933,32 @@
return BAD_VALUE;
}
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ sp<Surface> surface = new Surface(mPreviewSurface);
+ mCameraSourceTimeLapse = CameraSourceTimeLapse::CreateFromCamera(
+ mCamera, mCameraProxy, mCameraId, clientName, uid, pid,
+ videoSize, mFrameRate, surface,
+ std::llround(1e6 / mCaptureFps));
+#else
mCameraSourceTimeLapse = CameraSourceTimeLapse::CreateFromCamera(
mCamera, mCameraProxy, mCameraId, clientName, uid, pid,
videoSize, mFrameRate, mPreviewSurface,
std::llround(1e6 / mCaptureFps));
+#endif
*cameraSource = mCameraSourceTimeLapse;
} else {
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ sp<Surface> surface = new Surface(mPreviewSurface);
+ *cameraSource = CameraSource::CreateFromCamera(
+ mCamera, mCameraProxy, mCameraId, clientName, uid, pid,
+ videoSize, mFrameRate,
+ surface);
+#else
*cameraSource = CameraSource::CreateFromCamera(
mCamera, mCameraProxy, mCameraId, clientName, uid, pid,
videoSize, mFrameRate,
mPreviewSurface);
+#endif
}
mCamera.clear();
mCameraProxy.clear();
diff --git a/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp b/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp
index 2518c21..3339ae8 100644
--- a/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp
+++ b/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp
@@ -24,6 +24,7 @@
#include <fakeservicemanager/FakeServiceManager.h>
#include <gui/IGraphicBufferProducer.h>
#include <gui/Surface.h>
+#include <gui/Flags.h>
#include <gui/SurfaceComposerClient.h>
#include <media/stagefright/PersistentSurface.h>
#include <media/stagefright/foundation/AString.h>
@@ -126,14 +127,9 @@
status_t connect(const sp<ICameraClient> & /*client*/) override { return 0; };
status_t lock() override { return 0; };
status_t unlock() override { return 0; };
- status_t setPreviewTarget(const sp<IGraphicBufferProducer> & /*bufferProducer*/) override {
- return 0;
- };
+ status_t setPreviewTarget(const sp<SurfaceType> & /*target*/) override { return 0; };
+ status_t setPreviewCallbackTarget(const sp<SurfaceType> & /*target*/) override { return 0; };
void setPreviewCallbackFlag(int /*flag*/) override{};
- status_t setPreviewCallbackTarget(
- const sp<IGraphicBufferProducer> & /*callbackProducer*/) override {
- return 0;
- };
status_t startPreview() override { return 0; };
void stopPreview() override{};
bool previewEnabled() override { return true; };
@@ -152,9 +148,7 @@
return 0;
};
status_t setVideoBufferMode(int32_t /*videoBufferMode*/) override { return 0; };
- status_t setVideoTarget(const sp<IGraphicBufferProducer> & /*bufferProducer*/) override {
- return 0;
- };
+ status_t setVideoTarget(const sp<SurfaceType> & /*target*/) override { return 0; };
status_t setAudioRestriction(int32_t /*mode*/) override { return 0; };
int32_t getGlobalAudioRestriction() override { return 0; };
IBinder *onAsBinder() override { return reinterpret_cast<IBinder *>(this); };
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index e26f189..81a5508 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -35,6 +35,7 @@
#include <camera/StringUtils.h>
#include <com_android_graphics_libgui_flags.h>
#include <gui/Surface.h>
+#include <gui/Flags.h>
#include <utils/String8.h>
#include <cutils/properties.h>
@@ -99,7 +100,7 @@
pid_t clientPid,
Size videoSize,
int32_t frameRate,
- const sp<IGraphicBufferProducer>& surface) {
+ const sp<SurfaceType>& surface) {
CameraSource *source = new CameraSource(camera, proxy, cameraId,
clientName, clientUid, clientPid, videoSize, frameRate, surface);
@@ -115,7 +116,7 @@
pid_t clientPid,
Size videoSize,
int32_t frameRate,
- const sp<IGraphicBufferProducer>& surface)
+ const sp<SurfaceType>& surface)
: mCameraFlags(0),
mNumInputBuffers(0),
mVideoFrameRate(-1),
@@ -490,11 +491,23 @@
#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
mVideoBufferConsumer = new BufferItemConsumer(usage, bufferCount);
mVideoBufferConsumer->setName(String8::format("StageFright-CameraSource"));
+
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ mVideoBufferProducer = mVideoBufferConsumer->getSurface();
+#else
mVideoBufferProducer = mVideoBufferConsumer->getSurface()->getIGraphicBufferProducer();
+#endif // WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+
#else
mVideoBufferConsumer = new BufferItemConsumer(consumer, usage, bufferCount);
mVideoBufferConsumer->setName(String8::format("StageFright-CameraSource"));
+
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ mVideoBufferProducer = new Surface(producer);
+#else
mVideoBufferProducer = producer;
+#endif // WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+
#endif // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
status_t res = mVideoBufferConsumer->setDefaultBufferSize(width, height);
diff --git a/media/libstagefright/CameraSourceTimeLapse.cpp b/media/libstagefright/CameraSourceTimeLapse.cpp
index 50a512f..b1a005b 100644
--- a/media/libstagefright/CameraSourceTimeLapse.cpp
+++ b/media/libstagefright/CameraSourceTimeLapse.cpp
@@ -29,6 +29,7 @@
#include <media/stagefright/MetaData.h>
#include <camera/Camera.h>
#include <camera/CameraParameters.h>
+#include <gui/Flags.h>
#include <utils/String8.h>
#include <utils/Vector.h>
@@ -44,7 +45,11 @@
pid_t clientPid,
Size videoSize,
int32_t videoFrameRate,
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ const sp<Surface>& surface,
+#else
const sp<IGraphicBufferProducer>& surface,
+#endif
int64_t timeBetweenFrameCaptureUs) {
CameraSourceTimeLapse *source = new
@@ -71,7 +76,11 @@
pid_t clientPid,
Size videoSize,
int32_t videoFrameRate,
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ const sp<Surface>& surface,
+#else
const sp<IGraphicBufferProducer>& surface,
+#endif
int64_t timeBetweenFrameCaptureUs)
: CameraSource(camera, proxy, cameraId, clientName, clientUid, clientPid,
videoSize, videoFrameRate, surface),
diff --git a/media/libstagefright/include/media/stagefright/CameraSource.h b/media/libstagefright/include/media/stagefright/CameraSource.h
index f42e315..54e4f18 100644
--- a/media/libstagefright/include/media/stagefright/CameraSource.h
+++ b/media/libstagefright/include/media/stagefright/CameraSource.h
@@ -25,6 +25,8 @@
#include <camera/ICameraRecordingProxy.h>
#include <camera/CameraParameters.h>
#include <gui/BufferItemConsumer.h>
+#include <gui/Surface.h>
+#include <gui/Flags.h>
#include <utils/List.h>
#include <utils/RefBase.h>
#include <utils/String16.h>
@@ -77,7 +79,7 @@
pid_t clientPid,
Size videoSize,
int32_t frameRate,
- const sp<IGraphicBufferProducer>& surface);
+ const sp<SurfaceType>& surface);
virtual ~CameraSource();
@@ -165,7 +167,7 @@
sp<Camera> mCamera;
sp<ICameraRecordingProxy> mCameraRecordingProxy;
sp<DeathNotifier> mDeathNotifier;
- sp<IGraphicBufferProducer> mSurface;
+ sp<SurfaceType> mSurface;
sp<MetaData> mMeta;
int64_t mStartTimeUs;
@@ -180,8 +182,7 @@
CameraSource(const sp<hardware::ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
int32_t cameraId, const String16& clientName, uid_t clientUid, pid_t clientPid,
- Size videoSize, int32_t frameRate,
- const sp<IGraphicBufferProducer>& surface);
+ Size videoSize, int32_t frameRate, const sp<SurfaceType> & surface);
virtual status_t startCameraRecording();
virtual void releaseRecordingFrame(const sp<IMemory>& frame);
@@ -221,7 +222,7 @@
static const nsecs_t kMemoryBaseAvailableTimeoutNs = 200000000; // 200ms
// Consumer and producer of the buffer queue between this class and camera.
sp<BufferItemConsumer> mVideoBufferConsumer;
- sp<IGraphicBufferProducer> mVideoBufferProducer;
+ sp<SurfaceType> mVideoBufferProducer;
// Memory used to send the buffers to encoder, where sp<IMemory> stores VideoNativeMetadata.
sp<IMemoryHeap> mMemoryHeapBase;
List<sp<IMemory>> mMemoryBases;
diff --git a/media/libstagefright/include/media/stagefright/CameraSourceTimeLapse.h b/media/libstagefright/include/media/stagefright/CameraSourceTimeLapse.h
index 3c311cf..a789b12 100644
--- a/media/libstagefright/include/media/stagefright/CameraSourceTimeLapse.h
+++ b/media/libstagefright/include/media/stagefright/CameraSourceTimeLapse.h
@@ -23,6 +23,7 @@
#include <utils/RefBase.h>
#include <utils/threads.h>
#include <utils/String16.h>
+#include <gui/Flags.h>
namespace android {
@@ -44,7 +45,7 @@
pid_t clientPid,
Size videoSize,
int32_t videoFrameRate,
- const sp<IGraphicBufferProducer>& surface,
+ const sp<SurfaceType>& surface,
int64_t timeBetweenTimeLapseFrameCaptureUs);
virtual ~CameraSourceTimeLapse();
@@ -120,7 +121,7 @@
pid_t clientPid,
Size videoSize,
int32_t videoFrameRate,
- const sp<IGraphicBufferProducer>& surface,
+ const sp<SurfaceType>& surface,
int64_t timeBetweenTimeLapseFrameCaptureUs);
// Wrapper over CameraSource::signalBufferReturned() to implement quick stop.
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index b1cf665..c9e0a97 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -586,6 +586,139 @@
}
}
+// APV ProfileLevel
+inline constexpr int32_t APVProfile422_10 = 0x01;
+inline constexpr int32_t APVProfile422_10HDR10 = 0x1000;
+inline constexpr int32_t APVProfile422_10HDR10Plus = 0x2000;
+
+inline static const char *asString_APVProfile(int32_t i, const char *def = "??") {
+ switch (i) {
+ case APVProfile422_10: return "APVProfile422_10";
+ case APVProfile422_10HDR10: return "APVProfile422_10HDR10";
+ case APVProfile422_10HDR10Plus: return "APVProfile422_10HDR10Plus";
+ default: return def;
+ }
+}
+
+inline constexpr int32_t APVLevel1Band0 = 0x101;
+inline constexpr int32_t APVLevel1Band1 = 0x102;
+inline constexpr int32_t APVLevel1Band2 = 0x104;
+inline constexpr int32_t APVLevel1Band3 = 0x108;
+inline constexpr int32_t APVLevel11Band0 = 0x201;
+inline constexpr int32_t APVLevel11Band1 = 0x202;
+inline constexpr int32_t APVLevel11Band2 = 0x204;
+inline constexpr int32_t APVLevel11Band3 = 0x208;
+inline constexpr int32_t APVLevel2Band0 = 0x401;
+inline constexpr int32_t APVLevel2Band1 = 0x402;
+inline constexpr int32_t APVLevel2Band2 = 0x404;
+inline constexpr int32_t APVLevel2Band3 = 0x408;
+inline constexpr int32_t APVLevel21Band0 = 0x801;
+inline constexpr int32_t APVLevel21Band1 = 0x802;
+inline constexpr int32_t APVLevel21Band2 = 0x804;
+inline constexpr int32_t APVLevel21Band3 = 0x808;
+inline constexpr int32_t APVLevel3Band0 = 0x1001;
+inline constexpr int32_t APVLevel3Band1 = 0x1002;
+inline constexpr int32_t APVLevel3Band2 = 0x1004;
+inline constexpr int32_t APVLevel3Band3 = 0x1008;
+inline constexpr int32_t APVLevel31Band0 = 0x2001;
+inline constexpr int32_t APVLevel31Band1 = 0x2002;
+inline constexpr int32_t APVLevel31Band2 = 0x2004;
+inline constexpr int32_t APVLevel31Band3 = 0x2008;
+inline constexpr int32_t APVLevel4Band0 = 0x4001;
+inline constexpr int32_t APVLevel4Band1 = 0x4002;
+inline constexpr int32_t APVLevel4Band2 = 0x4004;
+inline constexpr int32_t APVLevel4Band3 = 0x4008;
+inline constexpr int32_t APVLevel41Band0 = 0x8001;
+inline constexpr int32_t APVLevel41Band1 = 0x8002;
+inline constexpr int32_t APVLevel41Band2 = 0x8004;
+inline constexpr int32_t APVLevel41Band3 = 0x8008;
+inline constexpr int32_t APVLevel5Band0 = 0x10001;
+inline constexpr int32_t APVLevel5Band1 = 0x10002;
+inline constexpr int32_t APVLevel5Band2 = 0x10004;
+inline constexpr int32_t APVLevel5Band3 = 0x10008;
+inline constexpr int32_t APVLevel51Band0 = 0x20001;
+inline constexpr int32_t APVLevel51Band1 = 0x20002;
+inline constexpr int32_t APVLevel51Band2 = 0x20004;
+inline constexpr int32_t APVLevel51Band3 = 0x20008;
+inline constexpr int32_t APVLevel6Band0 = 0x40001;
+inline constexpr int32_t APVLevel6Band1 = 0x40002;
+inline constexpr int32_t APVLevel6Band2 = 0x40004;
+inline constexpr int32_t APVLevel6Band3 = 0x40008;
+inline constexpr int32_t APVLevel61Band0 = 0x80001;
+inline constexpr int32_t APVLevel61Band1 = 0x80002;
+inline constexpr int32_t APVLevel61Band2 = 0x80004;
+inline constexpr int32_t APVLevel61Band3 = 0x80008;
+inline constexpr int32_t APVLevel7Band0 = 0x100001;
+inline constexpr int32_t APVLevel7Band1 = 0x100002;
+inline constexpr int32_t APVLevel7Band2 = 0x100004;
+inline constexpr int32_t APVLevel7Band3 = 0x100008;
+inline constexpr int32_t APVLevel71Band0 = 0x200001;
+inline constexpr int32_t APVLevel71Band1 = 0x200002;
+inline constexpr int32_t APVLevel71Band2 = 0x200004;
+inline constexpr int32_t APVLevel71Band3 = 0x200008;
+
+inline static const char *asString_APVBandLevel(int32_t i, const char *def = "??") {
+ switch (i) {
+ case APVLevel1Band0: return "Level 1, Band 0";
+ case APVLevel1Band1: return "Level 1, Band 1";
+ case APVLevel1Band2: return "Level 1, Band 2";
+ case APVLevel1Band3: return "Level 1, Band 3";
+ case APVLevel11Band0: return "Level 1.1, Band 0";
+ case APVLevel11Band1: return "Level 1.1, Band 1";
+ case APVLevel11Band2: return "Level 1.1, Band 2";
+ case APVLevel11Band3: return "Level 1.1, Band 3";
+ case APVLevel2Band0: return "Level 2, Band 0";
+ case APVLevel2Band1: return "Level 2, Band 1";
+ case APVLevel2Band2: return "Level 2, Band 2";
+ case APVLevel2Band3: return "Level 2, Band 3";
+ case APVLevel21Band0: return "Level 2.1, Band 0";
+ case APVLevel21Band1: return "Level 2.1, Band 1";
+ case APVLevel21Band2: return "Level 2.1, Band 2";
+ case APVLevel21Band3: return "Level 2.1, Band 3";
+ case APVLevel3Band0: return "Level 3, Band 0";
+ case APVLevel3Band1: return "Level 3, Band 1";
+ case APVLevel3Band2: return "Level 3, Band 2";
+ case APVLevel3Band3: return "Level 3, Band 3";
+ case APVLevel31Band0: return "Level 3.1, Band 0";
+ case APVLevel31Band1: return "Level 3.1, Band 1";
+ case APVLevel31Band2: return "Level 3.1, Band 2";
+ case APVLevel31Band3: return "Level 3.1, Band 3";
+ case APVLevel4Band0: return "Level 4, Band 0";
+ case APVLevel4Band1: return "Level 4, Band 1";
+ case APVLevel4Band2: return "Level 4, Band 2";
+ case APVLevel4Band3: return "Level 4, Band 3";
+ case APVLevel41Band0: return "Level 4.1, Band 0";
+ case APVLevel41Band1: return "Level 4.1, Band 1";
+ case APVLevel41Band2: return "Level 4.1, Band 2";
+ case APVLevel41Band3: return "Level 4.1, Band 3";
+ case APVLevel5Band0: return "Level 5, Band 0";
+ case APVLevel5Band1: return "Level 5, Band 1";
+ case APVLevel5Band2: return "Level 5, Band 2";
+ case APVLevel5Band3: return "Level 5, Band 3";
+ case APVLevel51Band0: return "Level 5.1, Band 0";
+ case APVLevel51Band1: return "Level 5.1, Band 1";
+ case APVLevel51Band2: return "Level 5.1, Band 2";
+ case APVLevel51Band3: return "Level 5.1, Band 3";
+ case APVLevel6Band0: return "Level 6, Band 0";
+ case APVLevel6Band1: return "Level 6, Band 1";
+ case APVLevel6Band2: return "Level 6, Band 2";
+ case APVLevel6Band3: return "Level 6, Band 3";
+ case APVLevel61Band0: return "Level 6.1, Band 0";
+ case APVLevel61Band1: return "Level 6.1, Band 1";
+ case APVLevel61Band2: return "Level 6.1, Band 2";
+ case APVLevel61Band3: return "Level 6.1, Band 3";
+ case APVLevel7Band0: return "Level 7, Band 0";
+ case APVLevel7Band1: return "Level 7, Band 1";
+ case APVLevel7Band2: return "Level 7, Band 2";
+ case APVLevel7Band3: return "Level 7, Band 3";
+ case APVLevel71Band0: return "Level 7.1, Band 0";
+ case APVLevel71Band1: return "Level 7.1, Band 1";
+ case APVLevel71Band2: return "Level 7.1, Band 2";
+ case APVLevel71Band3: return "Level 7.1, Band 3";
+ default: return def;
+ }
+}
+
inline constexpr int32_t BITRATE_MODE_CBR = 2;
inline constexpr int32_t BITRATE_MODE_CBR_FD = 3;
inline constexpr int32_t BITRATE_MODE_CQ = 0;
@@ -654,6 +787,7 @@
inline constexpr int32_t COLOR_FormatYUV444Flexible = 0x7F444888;
inline constexpr int32_t COLOR_FormatYUV444Interleaved = 29;
inline constexpr int32_t COLOR_FormatYUVP010 = 54;
+inline constexpr int32_t COLOR_FormatYUVP210 = 60;
inline constexpr int32_t COLOR_QCOM_FormatYUV420SemiPlanar = 0x7fa30c00;
inline constexpr int32_t COLOR_TI_FormatYUV420PackedSemiPlanar = 0x7f000100;
@@ -712,6 +846,7 @@
case COLOR_FormatYUV444Flexible: return "YUV444Flexible";
case COLOR_FormatYUV444Interleaved: return "YUV444Interleaved";
case COLOR_FormatYUVP010: return "YUVP010";
+ case COLOR_FormatYUVP210: return "YUVP210";
case COLOR_QCOM_FormatYUV420SemiPlanar: return "QCOM_YUV420SemiPlanar";
case COLOR_TI_FormatYUV420PackedSemiPlanar: return "TI_YUV420PackedSemiPlanar";
default: return def;
@@ -731,6 +866,7 @@
inline constexpr char MIMETYPE_VIDEO_VP8[] = "video/x-vnd.on2.vp8";
inline constexpr char MIMETYPE_VIDEO_VP9[] = "video/x-vnd.on2.vp9";
inline constexpr char MIMETYPE_VIDEO_AV1[] = "video/av01";
+inline constexpr char MIMETYPE_VIDEO_APV[] = "video/apv";
inline constexpr char MIMETYPE_VIDEO_AVC[] = "video/avc";
inline constexpr char MIMETYPE_VIDEO_HEVC[] = "video/hevc";
inline constexpr char MIMETYPE_VIDEO_MPEG4[] = "video/mp4v-es";
diff --git a/media/libstagefright/omx/OMXUtils.cpp b/media/libstagefright/omx/OMXUtils.cpp
index 49b2dec..d62e1ed 100644
--- a/media/libstagefright/omx/OMXUtils.cpp
+++ b/media/libstagefright/omx/OMXUtils.cpp
@@ -140,6 +140,8 @@
"audio_decoder.g711alaw", "audio_encoder.g711alaw" },
{ MEDIA_MIMETYPE_VIDEO_AVC,
"video_decoder.avc", "video_encoder.avc" },
+ { MEDIA_MIMETYPE_VIDEO_APV,
+ "video_decoder.apv", "video_encoder.apv" },
{ MEDIA_MIMETYPE_VIDEO_HEVC,
"video_decoder.hevc", "video_encoder.hevc" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4,
diff --git a/media/module/foundation/MediaDefs.cpp b/media/module/foundation/MediaDefs.cpp
index 7abab63..a890696 100644
--- a/media/module/foundation/MediaDefs.cpp
+++ b/media/module/foundation/MediaDefs.cpp
@@ -25,6 +25,7 @@
const char *MEDIA_MIMETYPE_VIDEO_VP8 = "video/x-vnd.on2.vp8";
const char *MEDIA_MIMETYPE_VIDEO_VP9 = "video/x-vnd.on2.vp9";
const char *MEDIA_MIMETYPE_VIDEO_AV1 = "video/av01";
+const char *MEDIA_MIMETYPE_VIDEO_APV = "video/apv";
const char *MEDIA_MIMETYPE_VIDEO_AVC = "video/avc";
const char *MEDIA_MIMETYPE_VIDEO_HEVC = "video/hevc";
const char *MEDIA_MIMETYPE_VIDEO_MPEG4 = "video/mp4v-es";
diff --git a/media/module/foundation/include/media/stagefright/foundation/MediaDefs.h b/media/module/foundation/include/media/stagefright/foundation/MediaDefs.h
index 05ee7fc..2b3f446 100644
--- a/media/module/foundation/include/media/stagefright/foundation/MediaDefs.h
+++ b/media/module/foundation/include/media/stagefright/foundation/MediaDefs.h
@@ -27,6 +27,7 @@
extern const char *MEDIA_MIMETYPE_VIDEO_VP8;
extern const char *MEDIA_MIMETYPE_VIDEO_VP9;
extern const char *MEDIA_MIMETYPE_VIDEO_AV1;
+extern const char *MEDIA_MIMETYPE_VIDEO_APV;
extern const char *MEDIA_MIMETYPE_VIDEO_AVC;
extern const char *MEDIA_MIMETYPE_VIDEO_HEVC;
extern const char *MEDIA_MIMETYPE_VIDEO_MPEG4;
diff --git a/media/psh_utils/Android.bp b/media/psh_utils/Android.bp
index dafa63b..803de94 100644
--- a/media/psh_utils/Android.bp
+++ b/media/psh_utils/Android.bp
@@ -10,7 +10,7 @@
// libraries that are included whole_static for test apps
ndk_libs = [
"android.hardware.health-V3-ndk",
- "android.hardware.power.stats-V1-ndk",
+ "android.hardware.power.stats-V1-cpp",
]
// Power, System, Health utils
@@ -32,6 +32,7 @@
"com.android.media.audio-aconfig-cc",
"libaudioutils",
"libbase",
+ "libbinder",
"libbinder_ndk",
"libcutils",
"liblog",
diff --git a/media/psh_utils/HealthStatsProvider.cpp b/media/psh_utils/HealthStatsProvider.cpp
index de72463..611c424 100644
--- a/media/psh_utils/HealthStatsProvider.cpp
+++ b/media/psh_utils/HealthStatsProvider.cpp
@@ -18,7 +18,7 @@
#include <aidl/android/hardware/health/IHealth.h>
#include <android-base/logging.h>
#include <android/binder_manager.h>
-#include <psh_utils/ServiceSingleton.h>
+#include <mediautils/ServiceSingleton.h>
using ::aidl::android::hardware::health::HealthInfo;
using ::aidl::android::hardware::health::IHealth;
@@ -26,7 +26,7 @@
namespace android::media::psh_utils {
static auto getHealthService() {
- return getServiceSingleton<IHealth>();
+ return mediautils::getService<IHealth>();
}
status_t HealthStatsDataProvider::fill(PowerStats* stat) const {
diff --git a/media/psh_utils/PowerStats.cpp b/media/psh_utils/PowerStats.cpp
index f8f87c5..89e36e2 100644
--- a/media/psh_utils/PowerStats.cpp
+++ b/media/psh_utils/PowerStats.cpp
@@ -233,14 +233,14 @@
health_stats += other.health_stats;
if (power_entity_state_residency.empty()) {
power_entity_state_residency = other.power_entity_state_residency;
- } else {
+ } else if (power_entity_state_residency.size() == other.power_entity_state_residency.size()) {
for (size_t i = 0; i < power_entity_state_residency.size(); ++i) {
power_entity_state_residency[i] += other.power_entity_state_residency[i];
}
}
if (rail_energy.empty()) {
rail_energy = other.rail_energy;
- } else {
+ } else if (rail_energy.size() == other.rail_energy.size()) {
for (size_t i = 0; i < rail_energy.size(); ++i) {
rail_energy[i] += other.rail_energy[i];
}
@@ -253,14 +253,14 @@
health_stats -= other.health_stats;
if (power_entity_state_residency.empty()) {
power_entity_state_residency = other.power_entity_state_residency;
- } else {
+ } else if (power_entity_state_residency.size() == other.power_entity_state_residency.size()) {
for (size_t i = 0; i < power_entity_state_residency.size(); ++i) {
power_entity_state_residency[i] -= other.power_entity_state_residency[i];
}
}
if (rail_energy.empty()) {
rail_energy = other.rail_energy;
- } else {
+ } else if (rail_energy.size() == other.rail_energy.size()) {
for (size_t i = 0; i < rail_energy.size(); ++i) {
rail_energy[i] -= other.rail_energy[i];
}
diff --git a/media/psh_utils/PowerStatsProvider.cpp b/media/psh_utils/PowerStatsProvider.cpp
index 112c323..033ad95 100644
--- a/media/psh_utils/PowerStatsProvider.cpp
+++ b/media/psh_utils/PowerStatsProvider.cpp
@@ -15,17 +15,17 @@
*/
#include "PowerStatsProvider.h"
-#include <aidl/android/hardware/power/stats/IPowerStats.h>
+#include <android/hardware/power/stats/IPowerStats.h>
#include <android-base/logging.h>
-#include <psh_utils/ServiceSingleton.h>
+#include <mediautils/ServiceSingleton.h>
#include <unordered_map>
-using ::aidl::android::hardware::power::stats::IPowerStats;
+using ::android::hardware::power::stats::IPowerStats;
namespace android::media::psh_utils {
static auto getPowerStatsService() {
- return getServiceSingleton<IPowerStats>();
+ return mediautils::getService<IPowerStats>();
}
status_t RailEnergyDataProvider::fill(PowerStats *stat) const {
@@ -35,9 +35,9 @@
return NO_INIT;
}
- std::unordered_map<int32_t, ::aidl::android::hardware::power::stats::Channel> channelMap;
+ std::unordered_map<int32_t, ::android::hardware::power::stats::Channel> channelMap;
{
- std::vector<::aidl::android::hardware::power::stats::Channel> channels;
+ std::vector<::android::hardware::power::stats::Channel> channels;
if (!powerStatsService->getEnergyMeterInfo(&channels).isOk()) {
LOG(ERROR) << "unable to get energy meter info";
return INVALID_OPERATION;
@@ -47,7 +47,7 @@
}
}
- std::vector<::aidl::android::hardware::power::stats::EnergyMeasurement> measurements;
+ std::vector<::android::hardware::power::stats::EnergyMeasurement> measurements;
if (!powerStatsService->readEnergyMeter({}, &measurements).isOk()) {
LOG(ERROR) << "unable to get energy measurements";
return INVALID_OPERATION;
@@ -86,7 +86,7 @@
std::vector<int32_t> powerEntityIds; // ids to use
{
- std::vector<::aidl::android::hardware::power::stats::PowerEntity> entities;
+ std::vector<::android::hardware::power::stats::PowerEntity> entities;
if (!powerStatsService->getPowerEntityInfo(&entities).isOk()) {
LOG(ERROR) << __func__ << ": unable to get entity info";
return INVALID_OPERATION;
@@ -108,7 +108,7 @@
}
}
- std::vector<::aidl::android::hardware::power::stats::StateResidencyResult> results;
+ std::vector<::android::hardware::power::stats::StateResidencyResult> results;
if (!powerStatsService->getStateResidency(powerEntityIds, &results).isOk()) {
LOG(ERROR) << __func__ << ": Unable to get state residency";
return INVALID_OPERATION;
diff --git a/media/psh_utils/benchmarks/Android.bp b/media/psh_utils/benchmarks/Android.bp
index 2382c69..066771b 100644
--- a/media/psh_utils/benchmarks/Android.bp
+++ b/media/psh_utils/benchmarks/Android.bp
@@ -8,10 +8,9 @@
default_applicable_licenses: ["frameworks_av_license"],
}
-cc_benchmark {
- name: "audio_powerstats_benchmark",
+cc_defaults {
+ name: "audio_psh_utils_benchmark_defaults",
- srcs: ["audio_powerstats_benchmark.cpp"],
cflags: [
"-Wall",
"-Werror",
@@ -22,6 +21,7 @@
shared_libs: [
"libaudioutils",
"libbase",
+ "libbinder",
"libbinder_ndk",
"libcutils",
"liblog",
@@ -31,45 +31,25 @@
}
cc_benchmark {
+ name: "audio_powerstats_benchmark",
+
+ defaults: ["audio_psh_utils_benchmark_defaults"],
+
+ srcs: ["audio_powerstats_benchmark.cpp"],
+}
+
+cc_benchmark {
name: "audio_powerstatscollector_benchmark",
+ defaults: ["audio_psh_utils_benchmark_defaults"],
+
srcs: ["audio_powerstatscollector_benchmark.cpp"],
- cflags: [
- "-Wall",
- "-Werror",
- ],
- static_libs: [
- "libpshutils",
- ],
- shared_libs: [
- "libaudioutils",
- "libbase",
- "libbinder_ndk",
- "libcutils",
- "liblog",
- "libmediautils",
- "libutils",
- ],
}
cc_benchmark {
name: "audio_token_benchmark",
+ defaults: ["audio_psh_utils_benchmark_defaults"],
+
srcs: ["audio_token_benchmark.cpp"],
- cflags: [
- "-Wall",
- "-Werror",
- ],
- static_libs: [
- "libpshutils",
- ],
- shared_libs: [
- "libaudioutils",
- "libbase",
- "libbinder_ndk",
- "libcutils",
- "liblog",
- "libmediautils",
- "libutils",
- ],
}
diff --git a/media/psh_utils/include/psh_utils/ServiceSingleton.h b/media/psh_utils/include/psh_utils/ServiceSingleton.h
deleted file mode 100644
index d0cd6d2..0000000
--- a/media/psh_utils/include/psh_utils/ServiceSingleton.h
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Copyright (C) 2024 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#pragma once
-
-#include <android/binder_auto_utils.h>
-#include <android/binder_manager.h>
-#include <android-base/thread_annotations.h>
-#include <mutex>
-#include <utils/Log.h>
-#include <utils/Timers.h>
-
-namespace android::media::psh_utils {
-
-struct DefaultServiceTraits {
- static constexpr int64_t kThresholdRetryNs = 1'000'000'000;
- static constexpr int64_t kMaxRetries = 5;
- static constexpr const char* kServiceVersion = "/default";
- static constexpr bool kShowLog = true;
-};
-
-template<typename Service, typename ServiceTraits = DefaultServiceTraits>
-std::shared_ptr<Service> getServiceSingleton() {
- [[clang::no_destroy]] static constinit std::mutex m;
- [[clang::no_destroy]] static constinit std::shared_ptr<Service> service GUARDED_BY(m);
- static int64_t nextTryNs GUARDED_BY(m) = 0;
- static int64_t tries GUARDED_BY(m) = 0;
-
- std::lock_guard l(m);
- if (service
- || tries > ServiceTraits::kMaxRetries // try too many times
- || systemTime(SYSTEM_TIME_BOOTTIME) < nextTryNs) { // try too frequently.
- return service;
- }
-
- const auto serviceName = std::string(Service::descriptor)
- .append(ServiceTraits::kServiceVersion);
- service = Service::fromBinder(
- ::ndk::SpAIBinder(AServiceManager_checkService(serviceName.c_str())));
-
- if (!service) {
- // If failed, set a time limit before retry.
- // No need to log an error, it is already done.
- nextTryNs = systemTime(SYSTEM_TIME_BOOTTIME) + ServiceTraits::kThresholdRetryNs;
- ALOGV_IF(ServiceTraits::kShowLog, "service:%s retries:%lld of %lld nextTryNs:%lld",
- Service::descriptor, (long long)tries,
- (long long)kMaxRetries, (long long)nextTryNs);
- ++tries;
- }
-
- return service;
-}
-
-} // namespace android::media::psh_utils
diff --git a/media/psh_utils/tests/Android.bp b/media/psh_utils/tests/Android.bp
index 74589f8..64fc971 100644
--- a/media/psh_utils/tests/Android.bp
+++ b/media/psh_utils/tests/Android.bp
@@ -15,9 +15,11 @@
],
shared_libs: [
"libbase",
+ "libbinder",
"libbinder_ndk",
"libcutils",
"liblog",
+ "libmediautils",
"libutils",
],
static_libs: [
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/MultiAccessUnitBlockModelDecoder.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/MultiAccessUnitBlockModelDecoder.java
index ed2defe..d1a5d79 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/MultiAccessUnitBlockModelDecoder.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/MultiAccessUnitBlockModelDecoder.java
@@ -72,8 +72,11 @@
final String mime = format.getString(MediaFormat.KEY_MIME);
final int maxOutputSize = format.getNumber(
MediaFormat.KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE, 0).intValue();
- final int maxInputSizeInBytes = format.getInteger(
- MediaFormat.KEY_MAX_INPUT_SIZE);
+ int maxInputSizeInBytes = 0;
+ if (format.containsKey(MediaFormat.KEY_MAX_INPUT_SIZE)) {
+ maxInputSizeInBytes = format.getNumber(
+ MediaFormat.KEY_MAX_INPUT_SIZE, 0).intValue();
+ }
mMaxInputSize = Math.max(maxInputSizeInBytes,
(int) (maxOutputSize * CodecUtils.getCompressionRatio(mime)));
}
diff --git a/media/utils/Android.bp b/media/utils/Android.bp
index e340b40..762984e 100644
--- a/media/utils/Android.bp
+++ b/media/utils/Android.bp
@@ -53,6 +53,7 @@
"Process.cpp",
"ProcessInfo.cpp",
"SchedulingPolicyService.cpp",
+ "ServiceSingleton.cpp",
"ServiceUtilities.cpp",
"ThreadSnapshot.cpp",
"TimeCheck.cpp",
@@ -89,6 +90,7 @@
"libaudioutils", // for clock.h, Statistics.h
"libbase",
"libbinder",
+ "libbinder_ndk",
"libcutils",
"libhidlbase",
"liblog",
@@ -112,6 +114,8 @@
],
export_shared_lib_headers: [
+ "libaudioutils",
+ "libbinder_ndk",
"libpermission",
"packagemanager_aidl-cpp",
],
diff --git a/media/utils/ServiceSingleton.cpp b/media/utils/ServiceSingleton.cpp
new file mode 100644
index 0000000..ade7a3e
--- /dev/null
+++ b/media/utils/ServiceSingleton.cpp
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ServiceSingleton"
+
+#include <mediautils/ServiceSingleton.h>
+
+namespace android::mediautils {
+
+namespace details {
+
+// To prevent multiple instances in different linkages,
+// we anchor the singleton in a .cpp instead of inlining in the header.
+
+template<typename T>
+requires (std::is_same_v<T, const char*> || std::is_same_v<T, String16>)
+std::shared_ptr<ServiceHandler> ServiceHandler::getInstance(const T& name) {
+ using Key = std::conditional_t<std::is_same_v<T, String16>, String16, std::string>;
+ [[clang::no_destroy]] static constinit std::mutex mutex;
+ [[clang::no_destroy]] static constinit std::shared_ptr<
+ std::map<Key, std::shared_ptr<ServiceHandler>>> map GUARDED_BY(mutex);
+ static constinit bool init GUARDED_BY(mutex) = false;
+
+ std::lock_guard l(mutex);
+ if (!init) {
+ map = std::make_shared<std::map<Key, std::shared_ptr<ServiceHandler>>>();
+ init = true;
+ }
+
+ auto& handler = (*map)[name];
+ if (!handler) {
+ handler = std::make_shared<ServiceHandler>();
+ if constexpr (std::is_same_v<T, String16>) {
+ handler->init_cpp();
+ } else /* constexpr */ {
+ handler->init_ndk();
+ }
+ }
+ return handler;
+}
+
+// Explicit template function instantiation.
+template
+std::shared_ptr<ServiceHandler> ServiceHandler::getInstance<const char*>(const char* const& name);
+
+template
+std::shared_ptr<ServiceHandler> ServiceHandler::getInstance<String16>(const String16& name);
+
+} // details
+
+} // namespace android::mediautils
+
diff --git a/media/utils/include/mediautils/BinderGenericUtils.h b/media/utils/include/mediautils/BinderGenericUtils.h
new file mode 100644
index 0000000..c2bbde1
--- /dev/null
+++ b/media/utils/include/mediautils/BinderGenericUtils.h
@@ -0,0 +1,388 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android/binder_auto_utils.h>
+#include <android/binder_interface_utils.h>
+#include <android/binder_manager.h>
+#include <binder/IServiceManager.h>
+
+namespace android::mediautils {
+// General Template Binder Utilities.
+//
+// In order to write generic Template methods, we need to have utility methods
+// that provide seamless template overload resolution between NDK and CPP variants.
+//
+
+// Returns true or false based on whether the Interface is a NDK Interface.
+template<typename Interface>
+inline constexpr bool is_ndk = std::derived_from<Interface, ::ndk::ICInterface>;
+
+// Returns the Interface ptr type (shared_ptr or sp) based on the Interface.
+template<typename Interface>
+using InterfaceType =
+ std::conditional_t <is_ndk<Interface>, std::shared_ptr<Interface>, sp<Interface>>;
+
+template<typename Interface>
+using BaseInterfaceType = std::conditional_t <is_ndk<Interface>,
+std::shared_ptr<::ndk::ICInterface>, sp<::android::IInterface>>;
+
+/**
+ * Returns either a sp<IBinder> or an SpAIBinder object
+ * for the AIDL interface given.
+ *
+ * A -cpp interface will return sp<IBinder>.
+ * A -ndk interface will return SpAIBinder
+ */
+template<typename Interface>
+sp<IBinder> binderFromInterface(const sp<Interface> &interface) {
+ return IInterface::asBinder(interface);
+}
+
+template<typename Interface>
+::ndk::SpAIBinder binderFromInterface(const std::shared_ptr<Interface> &interface) {
+ return interface->asBinder();
+}
+
+/**
+ * Returns either a sp<Interface> or a std::shared_ptr<Interface> from a Binder object.
+ *
+ * A -cpp interface will return sp<Interface>.
+ * A -ndk interface will return std::shared_ptr<Interface>
+ */
+template<typename Interface>
+sp<Interface> interfaceFromBinder(const sp<IBinder> &binder) {
+ return interface_cast<Interface>(binder);
+}
+
+template<typename Interface>
+std::shared_ptr<Interface> interfaceFromBinder(const ::ndk::SpAIBinder &binder) {
+ return Interface::fromBinder(binder);
+}
+
+/**
+ * Returns either a sp<Interface> or a std::shared_ptr<Interface> from
+ * the NDK/CPP base interface class.
+ */
+template<typename Interface>
+sp<Interface> interfaceFromBase(const sp<::android::IInterface> &interface) {
+ // this is unvalidated, though could verify getInterfaceDescriptor() == Interface::descriptor
+ return sp<Interface>::cast(interface);
+}
+
+template<typename Interface>
+std::shared_ptr<Interface> interfaceFromBase(
+ const std::shared_ptr<::ndk::ICInterface> &interface) {
+ // this is unvalidated, though could verify
+ // !strcmp(AIBinder_Class_getDescriptor(AIBinder_getClass(...), Interface::descriptor)
+ return std::static_pointer_cast<Interface>(interface);
+}
+
+/**
+ * Returns a fully qualified service name.
+ *
+ * @param name
+ * If name is empty, it returns the name from the Service descriptor.
+ * If name starts with '/', it appends the name as a version to the Service descriptor,
+ * e.g. "/default".
+ * Otherwise the name is assumed to be the full Service name, overriding the
+ * Service descriptor.
+ */
+template<typename Service>
+auto fullyQualifiedServiceName(const char* const name) {
+ using StringType = std::conditional_t<is_ndk<Service>, std::string, String16>;
+ return name == nullptr ? StringType(Service::descriptor)
+ : name[0] != 0 && name[0] != '/' ? StringType(name)
+ : StringType(Service::descriptor) + StringType(name);
+}
+
+/**
+ * Returns either a std::shared_ptr<Interface> or sp<Interface>
+ * for the AIDL interface given.
+ *
+ * A -cpp interface will return sp<Service>.
+ * A -ndk interface will return std::shared_ptr<Service>
+ *
+ * @param name if non-empty should contain either a suffix if it starts
+ * with a '/' such as "/default", or the full service name.
+ */
+template<typename Service>
+auto checkServicePassThrough(const char *const name = "") {
+ if constexpr(is_ndk<Service>)
+ {
+ const auto serviceName = fullyQualifiedServiceName<Service>(name);
+ return Service::fromBinder(
+ ::ndk::SpAIBinder(AServiceManager_checkService(serviceName.c_str())));
+ } else /* constexpr */ {
+ const auto serviceName = fullyQualifiedServiceName<Service>(name);
+ auto binder = defaultServiceManager()->checkService(serviceName);
+ return interface_cast<Service>(binder);
+ }
+}
+
+template<typename Service>
+void addService(const std::shared_ptr<Service> &service) {
+ AServiceManager_addService(binderFromInterface(service), Service::descriptor);
+}
+
+template<typename Service>
+void addService(const sp<Service> &service) {
+ defaultServiceManager()->addService(Service::descriptor, binderFromInterface(service));
+}
+
+namespace details {
+
+// Use the APIs below, not the details here.
+
+/**
+ * RequestServiceManagerCallback(Cpp|Ndk) is a RAII class that
+ * requests a ServiceManager callback.
+ *
+ * Note the ServiceManager is a single threaded "apartment" and only one
+ * transaction is active, hence:
+ *
+ * 1) After the RequestServiceManagerCallback object is destroyed no
+ * calls to the onBinder function is pending or will occur.
+ * 2) To prevent deadlock, do not construct or destroy the class with
+ * a lock held that the onService function also requires.
+ */
+template<typename Service>
+class RequestServiceManagerCallbackCpp {
+public:
+ explicit RequestServiceManagerCallbackCpp(
+ std::function<void(const sp<Service> &)> &&onService,
+ const char *const serviceName = ""
+ )
+ : mServiceName{fullyQualifiedServiceName<Service>(serviceName)},
+ mWaiter{sp<Waiter>::make(std::move(onService))},
+ mStatus{defaultServiceManager()->registerForNotifications(mServiceName,
+ mWaiter)} {
+ }
+
+ ~RequestServiceManagerCallbackCpp() {
+ if (mStatus == OK) {
+ defaultServiceManager()->unregisterForNotifications(mServiceName, mWaiter);
+ }
+ }
+
+ status_t getStatus() const {
+ return mStatus;
+ }
+
+private:
+ const String16 mServiceName;
+ const sp<IServiceManager::LocalRegistrationCallback> mWaiter;
+ const status_t mStatus;
+
+ // With some work here, we could make this a singleton to improve
+ // performance and reduce binder clutter.
+ class Waiter : public IServiceManager::LocalRegistrationCallback {
+ public:
+ explicit Waiter(std::function<void(const sp<Service> &)> &&onService)
+ : mOnService{std::move(onService)} {}
+
+ private:
+ void onServiceRegistration(
+ const String16 & /*name*/, const sp<IBinder> &binder) final {
+ mOnService(interface_cast<Service>(binder));
+ }
+
+ const std::function<void(const sp<Service> &)> mOnService;
+ };
+};
+
+template<typename Service>
+class RequestServiceManagerCallbackNdk {
+public:
+ explicit RequestServiceManagerCallbackNdk(
+ std::function<void(const std::shared_ptr<Service> &)> &&onService,
+ const char *const serviceName = ""
+ )
+ : mServiceName{fullyQualifiedServiceName<Service>(serviceName)},
+ mOnService{std::move(onService)},
+ mWaiter{AServiceManager_registerForServiceNotifications(
+ mServiceName.c_str(),
+ onRegister, this)} // must be registered after mOnService.
+ {}
+
+ ~RequestServiceManagerCallbackNdk() {
+ if (mWaiter) {
+ AServiceManager_NotificationRegistration_delete(mWaiter);
+ }
+ }
+
+ status_t getStatus() const {
+ return mWaiter != nullptr ? OK : INVALID_OPERATION;
+ }
+
+private:
+ const std::string mServiceName; // must keep a local copy.
+ const std::function<void(const std::shared_ptr<Service> &)> mOnService;
+ AServiceManager_NotificationRegistration *const mWaiter; // last.
+
+ static void onRegister(const char *instance, AIBinder *registered, void *cookie) {
+ (void) instance;
+ auto *callbackHandler = static_cast<RequestServiceManagerCallbackNdk<Service> *>(cookie);
+ callbackHandler->mOnService(Service::fromBinder(::ndk::SpAIBinder(registered)));
+ }
+};
+
+/**
+ * RequestDeathNotification(Cpp|Ndk) is a RAII class that
+ * requests a death notification.
+ *
+ * Note the ServiceManager is a single threaded "apartment" and only one
+ * transaction is active, hence:
+ *
+ * 1) After the RequestDeathNotification object is destroyed no
+ * calls to the onBinder function is pending or will occur.
+ * 2) To prevent deadlock, do not construct or destroy the class with
+ * a lock held that the onBinderDied function also requires.
+ */
+
+class RequestDeathNotificationCpp {
+ class DeathRecipientHelper : public IBinder::DeathRecipient {
+ public:
+ explicit DeathRecipientHelper(std::function<void()> &&onBinderDied)
+ : mOnBinderDied{std::move(onBinderDied)} {
+ }
+
+ void binderDied(const wp<IBinder> &weakBinder) final {
+ (void) weakBinder;
+ mOnBinderDied();
+ }
+
+ private:
+ const std::function<void()> mOnBinderDied;
+ };
+
+public:
+ RequestDeathNotificationCpp(const sp<IBinder> &binder,
+ std::function<void()> &&onBinderDied)
+ : mHelper{sp<DeathRecipientHelper>::make(std::move(onBinderDied))},
+ mWeakBinder{binder}, mStatus{binder->linkToDeath(mHelper)} {
+ ALOGW_IF(mStatus != OK, "%s: linkToDeath status:%d", __func__, mStatus);
+ }
+
+ ~RequestDeathNotificationCpp() {
+ if (mStatus == OK) {
+ const auto binder = mWeakBinder.promote();
+ if (binder) binder->unlinkToDeath(mHelper);
+ }
+ }
+
+ status_t getStatus() const {
+ return mStatus;
+ }
+
+private:
+ const sp<DeathRecipientHelper> mHelper;
+ const wp<IBinder> mWeakBinder;
+ const status_t mStatus;
+};
+
+class RequestDeathNotificationNdk {
+public:
+ RequestDeathNotificationNdk(
+ const ::ndk::SpAIBinder &binder, std::function<void()> &&onBinderDied)
+ : mOnBinderDied(std::move(onBinderDied)),
+ mRecipient(::AIBinder_DeathRecipient_new(OnBinderDiedStatic),
+ &AIBinder_DeathRecipient_delete), mStatus{AIBinder_linkToDeath(
+ binder.get(), mRecipient.get(), /* cookie */ this)} {
+ ALOGW_IF(mStatus != OK, "%s: AIBinder_linkToDeath status:%d", __func__, mStatus);
+ // We do not use AIBinder_DeathRecipient_setOnUnlinked() to do resource deallocation
+ // as the functor mOnBinderDied is kept alive by this class.
+ }
+
+ ~RequestDeathNotificationNdk() {
+ // The AIBinder_DeathRecipient dtor automatically unlinks all registered notifications,
+ // so AIBinder_unlinkToDeath() is not needed here (elsewise we need to maintain a
+ // AIBinder_Weak here).
+ }
+
+ status_t getStatus() const {
+ return mStatus;
+ }
+
+private:
+ void onBinderDied() {
+ mOnBinderDied();
+ }
+
+ static void OnBinderDiedStatic(void *cookie) {
+ reinterpret_cast<RequestDeathNotificationNdk *>(cookie)->onBinderDied();
+ }
+
+ const std::function<void()> mOnBinderDied;
+ const std::unique_ptr<AIBinder_DeathRecipient, decltype(
+ &AIBinder_DeathRecipient_delete)>
+ mRecipient;
+ const status_t mStatus; // binder_status_t is a limited subset of status_t
+};
+
+} // details
+
+/**
+ * Requests a notification that service is available.
+ *
+ * An opaque handle is returned - after clearing it is guaranteed that
+ * no callback will occur.
+ *
+ * The callback will be of form:
+ * onService(const sp<Service>& service);
+ * onService(const std::shared_ptr<Service>& service);
+ */
+template<typename Service, typename F>
+std::shared_ptr<void> requestServiceNotification(
+ F onService, const char *const serviceName = "") {
+ // the following are used for callbacks but placed here for invalidate.
+ using RequestServiceManagerCallback = std::conditional_t<is_ndk<Service>,
+ details::RequestServiceManagerCallbackNdk<Service>,
+ details::RequestServiceManagerCallbackCpp<Service>>;
+ const auto ptr = std::make_shared<RequestServiceManagerCallback>(
+ onService, serviceName);
+ const auto status = ptr->getStatus();
+ return status == OK ? ptr : nullptr;
+}
+
+/**
+ * Requests a death notification.
+ *
+ * An opaque handle is returned - after clearing it is guaranteed that
+ * no notification will occur.
+ *
+ * The callback will be of form void onBinderDied();
+ */
+template<typename Service>
+std::shared_ptr<void> requestDeathNotification(
+ const sp<Service> &service, std::function<void()> &&onBinderDied) {
+ const auto ptr = std::make_shared<details::RequestDeathNotificationCpp>(
+ binderFromInterface(service), std::move(onBinderDied));
+ const auto status = ptr->getStatus();
+ return status == OK ? ptr : nullptr;
+}
+
+template<typename Service>
+std::shared_ptr<void> requestDeathNotification(
+ const std::shared_ptr<Service> &service, std::function<void()> &&onBinderDied) {
+ const auto ptr = std::make_shared<details::RequestDeathNotificationNdk>(
+ binderFromInterface(service), std::move(onBinderDied));
+ const auto status = ptr->getStatus();
+ return status == OK ? ptr : nullptr;
+}
+
+} // namespace android::mediautils
diff --git a/media/utils/include/mediautils/ServiceSingleton.h b/media/utils/include/mediautils/ServiceSingleton.h
new file mode 100644
index 0000000..644d9cd
--- /dev/null
+++ b/media/utils/include/mediautils/ServiceSingleton.h
@@ -0,0 +1,464 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "BinderGenericUtils.h"
+
+#include <android-base/thread_annotations.h>
+#include <audio_utils/mutex.h>
+#include <chrono>
+#include <map>
+#include <mutex>
+#include <utils/Log.h>
+#include <utils/Timers.h>
+
+/**
+ * ServiceSingleton provides a non-blocking NDK/CPP compatible service cache.
+ *
+ * This is a specialized cache that allows per-service configuration.
+ *
+ * Features:
+ *
+ * 1) Seamless compatibility with NDK and CPP based interfaces.
+ * 2) Time-out based service acquisition.
+ * Set the maximum time to wait for any service.
+ * 3) Service prefetch:
+ * Reduce start-up by prefetching service in advance (not on demand).
+ * Prefetch is automatically installed by getService().
+ * 4) Manual interface setting for test and non-service manager acquisition support.
+ *
+ * If both NDK and CPP interfaces are available, we prefer the CPP version
+ * for the following reasons:
+ * 1) Established sp<> reference counting avoids mistakes. NDK tends to be error-prone.
+ * 2) Possible reduced binder object clutter by a singleton notification binder object.
+ * Fewer binder objects are more efficient for the binder driver and ServiceManager.
+ * For example, fewer binder deaths means less ServiceManager (linear time) cleanup.
+ * A single binder object also offers binder access serialization.
+ * 3) CPP offers slightly better efficiency as it is closer to the
+ * actual implementation, a minor detail and effect.
+ *
+ * We use a per-service ServiceHandler object to collect methods and implementation details.
+ * Currently this is separate for NDK and CPP interfaces to the same service;
+ * unification is possible by using ibinder_internals.h.
+ */
+namespace android::mediautils {
+
+enum ServiceOptions {
+ kNone = 0,
+ kNonNull = (1 << 0), // don't return a null interface unless disabled.
+ // partially implemented and experimental.
+};
+
+// Traits may come through a constexpr static function collection.
+// This participates in small buffer optimization SBO in std::function impl.
+template <typename Service>
+struct DefaultServiceTraits {
+ // getServiceName() returns the name associated with Service.
+ //
+ // If name is empty, it returns the name from the Service descriptor.
+ // If name starts with '/', it appends the name as a version to the Service descriptor,
+ // e.g. "/default".
+ // Otherwise the name is assumed to be the Service name.
+ static constexpr const char* getServiceName() { return "/default"; }
+
+ // This callback is called when a new service is received.
+ // The callback requires at least one thread in the Binder threadpool.
+ static constexpr void onNewService(const InterfaceType<Service>&) {}
+
+ // This callback is called if the service has died.
+ // The callback requires at least one thread in the Binder threadpool.
+ static constexpr void onServiceDied(const InterfaceType<Service>&) {}
+
+ // ServiceOptions configured for the Service.
+ static constexpr ServiceOptions options() { return ServiceOptions::kNone; }
+};
+
+// We store the traits as functors.
+template <typename Service>
+struct FunctionalServiceTraits {
+ template <typename ServiceTraits>
+ explicit FunctionalServiceTraits(const ServiceTraits& serviceTraits)
+ : getServiceName{serviceTraits.getServiceName}
+ , onNewService{serviceTraits.onNewService}
+ , onServiceDied{serviceTraits.onServiceDied}
+ , options{serviceTraits.options} {
+ }
+ std::function<const char*()> getServiceName;
+ std::function<void(const InterfaceType<Service>& service)> onNewService;
+ std::function<void(const InterfaceType<Service>& service)> onServiceDied;
+ std::function<ServiceOptions()> options;
+};
+
+namespace details {
+
+class ServiceHandler
+{
+public:
+ /**
+ * Returns a ServiceHandler, templated type T is String16 for the native type
+ * of the CPP service descriptors and const char* for the native type of the NDK
+ * service descriptors.
+ */
+ template<typename T>
+ requires (std::is_same_v<T, const char*> || std::is_same_v<T, String16>)
+ static std::shared_ptr<ServiceHandler> getInstance(const T& name);
+
+ /**
+ * Initializes the service handler with new service traits
+ * (methods that are triggered on service events).
+ *
+ * This is optional. Default construction of traits is allowed for
+ * services that do not require special handling.
+ *
+ * @param serviceTraits
+ * @return true if the service handler had been previously initialized.
+ */
+ template<typename Service, typename ServiceTraits>
+ bool init(const ServiceTraits& serviceTraits) {
+ auto traits = std::make_shared<FunctionalServiceTraits<Service>>(serviceTraits);
+ std::shared_ptr<void> oldTraits;
+ std::lock_guard l(mMutex);
+ std::swap(oldTraits, mTraits);
+ const bool existing = oldTraits != nullptr;
+ mTraits = std::move(traits);
+ mSkip = false;
+ return existing;
+ }
+
+ /**
+ * Returns the service based on a timeout.
+ *
+ * @param waitNs the time to wait, internally clamped to (0, INT64_MAX / 2) to
+ * avoid numeric overflow.
+ * @param useCallback installs a callback instead of polling.
+ * the Callback persists if the call timeouts. A Callback requires
+ * at least one thread in the threadpool.
+ * @return Service interface.
+ */
+ template <typename Service>
+ auto get(std::chrono::nanoseconds waitNs, bool useCallback) {
+ audio_utils::unique_lock ul(mMutex);
+ auto& service = std::get<BaseInterfaceType<Service>>(mService);
+
+ if (mSkip || (service && mValid)) return service; // early check.
+
+ // clamp to avoid numeric overflow. INT64_MAX / 2 is effectively forever for a device.
+ std::chrono::nanoseconds kWaitLimitNs(
+ std::numeric_limits<decltype(waitNs.count())>::max() / 2);
+ waitNs = std::clamp(waitNs, decltype(waitNs)(0), kWaitLimitNs);
+ const auto end = std::chrono::steady_clock::now() + waitNs;
+
+ for (bool first = true; true; first = false) {
+ // we may have released mMutex, so see if service has been obtained.
+ if (mSkip || (service && mValid)) return service;
+
+ const auto traits = getTraits_l<Service>();
+
+ // first time or not using callback, check the service.
+ if (first || !useCallback) {
+ auto service_new = checkServicePassThrough<Service>(
+ traits->getServiceName());
+ if (service_new) {
+ mValid = true;
+ service = std::move(service_new);
+ setDeathNotifier_l<Service>();
+ auto service_fixed = service; // we're releasing the mutex.
+ ul.unlock();
+ traits->onNewService(interfaceFromBase<Service>(service_fixed));
+ mCv.notify_all();
+ return service_fixed;
+ }
+ }
+
+ // install service callback if needed.
+ if (useCallback && !mServiceNotificationHandle) {
+ setServiceNotifier_l<Service>();
+ }
+
+ // check time expiration.
+ const auto now = std::chrono::steady_clock::now();
+ if (now >= end
+ && (service || !(traits->options() & ServiceOptions::kNonNull))) {
+ return service;
+ }
+
+ // compute time to wait, then wait.
+ if (mServiceNotificationHandle) {
+ mCv.wait_until(ul, end);
+ } else {
+ const auto target = now + kPollTime;
+ mCv.wait_until(ul, std::min(target, end));
+ }
+ // loop back to see if we have any state change.
+ }
+ }
+
+ /**
+ * Sets an externally provided service override.
+ *
+ * @param Service
+ * @param service_new
+ */
+ template<typename Service>
+ void set(const InterfaceType<Service>& service_new) {
+ audio_utils::unique_lock ul(mMutex);
+ auto& service = std::get<BaseInterfaceType<Service>>(mService);
+ const auto traits = getTraits_l<Service>();
+ if (service) {
+ auto orig_service = service;
+ invalidateService_l<Service>();
+ ul.unlock();
+ traits->onServiceDied(interfaceFromBase<Service>(orig_service));
+ }
+ service = service_new;
+ ul.unlock();
+ // should we set the death notifier? It could be a local service.
+ if (service_new) traits->onNewService(service_new);
+ mCv.notify_all();
+ }
+
+ /**
+ * Disables cache management in the ServiceHandler. init() needs to be
+ * called to restart.
+ *
+ * All notifiers removed.
+ * Service pointer is released.
+ */
+ template<typename Service>
+ void skip() {
+ audio_utils::unique_lock ul(mMutex);
+ mSkip = true;
+ // remove notifiers. OK to hold lock as presuming notifications one-way
+ // or manually triggered outside of lock.
+ mDeathNotificationHandle.reset();
+ mServiceNotificationHandle.reset();
+ auto& service = std::get<BaseInterfaceType<Service>>(mService);
+ const auto traits = getTraits_l<Service>();
+ std::shared_ptr<void> oldTraits;
+ std::swap(oldTraits, mTraits); // destroyed outside of lock.
+ if (service) {
+ auto orig_service = service; // keep reference to service to manually notify death.
+ invalidateService_l<Service>(); // sets service to nullptr
+ ul.unlock();
+ traits->onServiceDied(interfaceFromBase<Service>(orig_service));
+ } else {
+ ul.unlock();
+ }
+ mCv.notify_all();
+ }
+
+private:
+
+ // invalidateService_l is called to remove the old death notifier,
+ // invalidate the service, and optionally clear the service pointer.
+ template <typename Service>
+ void invalidateService_l() REQUIRES(mMutex) {
+ mDeathNotificationHandle.reset();
+ const auto traits = getTraits_l<Service>();
+ mValid = false;
+ if (!(traits->options() & ServiceOptions::kNonNull) || mSkip) {
+ auto &service = std::get<BaseInterfaceType<Service>>(mService);
+ service = nullptr;
+ }
+ }
+
+ // gets the traits set by init(), initializes with default if init() not called.
+ template <typename Service>
+ std::shared_ptr<FunctionalServiceTraits<Service>> getTraits_l() REQUIRES(mMutex) {
+ if (!mTraits) {
+ mTraits = std::make_shared<FunctionalServiceTraits<Service>>(
+ DefaultServiceTraits<Service>{});
+ }
+ return std::static_pointer_cast<FunctionalServiceTraits<Service>>(mTraits);
+ }
+
+ // sets the service notification
+ template <typename Service>
+ void setServiceNotifier_l() REQUIRES(mMutex) {
+ const auto traits = getTraits_l<Service>();
+ mServiceNotificationHandle = requestServiceNotification<Service>(
+ [traits, this](const InterfaceType<Service>& service) {
+ audio_utils::unique_lock ul(mMutex);
+ auto originalService = std::get<BaseInterfaceType<Service>>(mService);
+ if (originalService != service) {
+ mService = service;
+ mValid = true;
+ setDeathNotifier_l<Service>();
+ traits->onNewService(service);
+ }
+ ul.unlock();
+ mCv.notify_all();
+ }, traits->getServiceName());
+ ALOGW_IF(!mServiceNotificationHandle, "%s: cannot register service notification %s"
+ " (do we have permission?)",
+ __func__, toString(Service::descriptor).c_str());
+ }
+
+ // sets the death notifier for mService (mService must be non-null).
+ template <typename Service>
+ void setDeathNotifier_l() REQUIRES(mMutex) {
+ auto base = std::get<BaseInterfaceType<Service>>(mService);
+ auto service = interfaceFromBase<Service>(base);
+ const auto binder = binderFromInterface(service);
+ if (binder.get()) {
+ auto traits = getTraits_l<Service>();
+ mDeathNotificationHandle = requestDeathNotification(
+ base, [traits, service, this]() {
+ // as only one death notification is dispatched,
+ // we do not need to generation count.
+ {
+ std::lock_guard l(mMutex);
+ invalidateService_l<Service>();
+ }
+ traits->onServiceDied(service);
+ });
+ ALOGW_IF(!mDeathNotificationHandle, "%s: cannot register death notification %s"
+ " (already died?)",
+ __func__, toString(Service::descriptor).c_str());
+ }
+ }
+
+ // initializes the variant for NDK use (called on first creation in the cache map).
+ void init_ndk() EXCLUDES(mMutex) {
+ std::lock_guard l(mMutex);
+ mService = std::shared_ptr<::ndk::ICInterface>{};
+ }
+
+ // initializes the variant for CPP use (called on first creation in the cache map).
+ void init_cpp() EXCLUDES(mMutex) {
+ std::lock_guard l(mMutex);
+ mService = sp<::android::IInterface>{};
+ }
+
+ static std::string toString(const std::string& s) { return s; }
+ static std::string toString(const String16& s) { return String8(s).c_str(); }
+
+ mutable std::mutex mMutex;
+ std::condition_variable mCv;
+ static constexpr auto kPollTime = std::chrono::seconds(1);
+
+ std::variant<std::shared_ptr<::ndk::ICInterface>,
+ sp<::android::IInterface>> mService GUARDED_BY(mMutex);
+ // aesthetically we place these last, but a ServiceHandler is never deleted in
+ // current operation, so there is no deadlock on destruction.
+ std::shared_ptr<void> mDeathNotificationHandle GUARDED_BY(mMutex);
+ std::shared_ptr<void> mServiceNotificationHandle GUARDED_BY(mMutex);
+ std::shared_ptr<void> mTraits GUARDED_BY(mMutex);
+
+ // mValid is true iff the service is non-null and alive.
+ bool mValid GUARDED_BY(mMutex) = false;
+
+ // mSkip indicates that the service is not cached.
+ bool mSkip GUARDED_BY(mMutex) = false;
+};
+
+} // details
+
+//----------------------------------
+// ServiceSingleton API
+//
+
+/*
+ * Implementation detail:
+ *
+ * Each CPP or NDK service interface has a unique ServiceHandler that
+ * is stored in a singleton cache. The cache key is based on the service descriptor string
+ * so only one version can be chosen. (The particular version may be changed using
+ * ServiceTraits.getName()).
+ */
+
+/**
+ * Sets the service trait parameters for acquiring the Service interface.
+ *
+ * If this is not set before the first service fetch, then default service traits are used.
+ *
+ * @return true if there is a preexisting (including prior default set) traits.
+ */
+template<typename Service, typename ServiceTraits>
+bool initService(const ServiceTraits& serviceTraits = {}) {
+ const auto serviceHandler = details::ServiceHandler::getInstance(Service::descriptor);
+ return serviceHandler->template init<Service>(serviceTraits);
+}
+
+/**
+ * Returns either a std::shared_ptr<Interface> or sp<Interface>
+ * for the AIDL service. If the service is not available within waitNs,
+ * the method will return nullptr
+ * (or the previous invalidated service if Service.options() & kNonNull).
+ *
+ * This method installs a callback to obtain the service, so with waitNs == 0, it may be used to
+ * prefetch the service before it is actually needed.
+ *
+ * @param waitNs wait time for the service to become available.
+ * @return
+ * a sp<> for a CPP interface
+ * a std::shared_ptr<> for a NDK interface
+ *
+ */
+template<typename Service>
+auto getService(std::chrono::nanoseconds waitNs = {}) {
+ const auto serviceHandler = details::ServiceHandler::getInstance(Service::descriptor);
+ return interfaceFromBase<Service>(serviceHandler->template get<Service>(
+ waitNs, true /* useCallback */));
+}
+
+/**
+ * Returns either a std::shared_ptr<Interface> or sp<Interface>
+ * for the AIDL service. If the service is not available within waitNs,
+ * the method will return nullptr
+ * (or the previous invalidated service if Service.options() & kNonNull).
+ *
+ * This method polls to obtain the service, which
+ * is useful if the service is restricted due to permissions or
+ * one is concerned about ThreadPool starvation.
+ *
+ * @param waitNs wait time for the service to become available.
+ * @return
+ * a sp<> for a CPP interface
+ * a std::shared_ptr<> for a NDK interface
+ */
+template<typename Service>
+auto checkService(std::chrono::nanoseconds waitNs = {}) {
+ const auto serviceHandler = details::ServiceHandler::getInstance(Service::descriptor);
+ return interfaceFromBase<Service>(serviceHandler->template get<Service>(
+ waitNs, false /* useCallback */));
+}
+
+/**
+ * Sets a service implementation override, replacing any fetched service from ServiceManager.
+ *
+ * An empty service clears the cache.
+ */
+template<typename Service>
+void setService(const InterfaceType<Service>& service) {
+ const auto serviceHandler = details::ServiceHandler::getInstance(Service::descriptor);
+ serviceHandler->template set<Service>(service);
+}
+
+/**
+ * Disables the service cache.
+ *
+ * This releases any service and notification callbacks. After this,
+ * another initService() can be called seamlessly.
+ */
+template<typename Service>
+void skipService() {
+ const auto serviceHandler = details::ServiceHandler::getInstance(Service::descriptor);
+ serviceHandler->template skip<Service>();
+}
+
+} // namespace android::mediautils
diff --git a/media/utils/tests/Android.bp b/media/utils/tests/Android.bp
index ff11b42..4456df2 100644
--- a/media/utils/tests/Android.bp
+++ b/media/utils/tests/Android.bp
@@ -12,8 +12,6 @@
cc_defaults {
name: "libmediautils_tests_config",
- host_supported: true,
-
cflags: [
"-Wall",
"-Werror",
@@ -67,6 +65,22 @@
],
}
+aidl_interface {
+ name: "ServiceSingletonTestInterface",
+ unstable: true,
+ srcs: [
+ "IServiceSingletonTest.aidl",
+ ],
+ backend: {
+ cpp: {
+ enabled: true,
+ },
+ ndk: {
+ enabled: true,
+ },
+ },
+}
+
cc_test_library {
name: "libsharedtest",
@@ -178,6 +192,34 @@
}
cc_test {
+ name: "service_singleton_tests",
+
+ defaults: ["libmediautils_tests_config"],
+
+ // to add and get services, we need to be root.
+ require_root: true,
+ host_supported: false,
+
+ srcs: [
+ "service_singleton_tests.cpp",
+ ],
+
+ shared_libs: [
+ "libaudioutils",
+ "libbinder",
+ "libbinder_ndk",
+ "liblog",
+ "libmediautils",
+ "libutils",
+ ],
+
+ static_libs: [
+ "ServiceSingletonTestInterface-cpp",
+ "ServiceSingletonTestInterface-ndk",
+ ],
+}
+
+cc_test {
name: "static_string_tests",
defaults: ["libmediautils_tests_defaults"],
diff --git a/media/utils/tests/IServiceSingletonTest.aidl b/media/utils/tests/IServiceSingletonTest.aidl
new file mode 100644
index 0000000..9f889a6
--- /dev/null
+++ b/media/utils/tests/IServiceSingletonTest.aidl
@@ -0,0 +1,19 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+interface IServiceSingletonTest {
+ int inc();
+}
diff --git a/media/utils/tests/service_singleton_tests.cpp b/media/utils/tests/service_singleton_tests.cpp
new file mode 100644
index 0000000..8656a20
--- /dev/null
+++ b/media/utils/tests/service_singleton_tests.cpp
@@ -0,0 +1,365 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "service_singleton_tests"
+
+#include <mediautils/ServiceSingleton.h>
+
+#include "BnServiceSingletonTest.h"
+#include "aidl/BnServiceSingletonTest.h"
+#include <audio_utils/RunRemote.h>
+#include <binder/IPCThreadState.h>
+#include <binder/ProcessState.h>
+#include <gtest/gtest.h>
+#include <utils/Log.h>
+
+using namespace android;
+
+/**
+ * Service Singleton Test uses a worker process to spawn new binder services.
+ *
+ * A worker process is required since we cannot fork after registering
+ * with the binder driver.
+ *
+ * Test Process -> Worker_Process -> Service Process(1)
+ * -> Service Process(2)
+ * -> ....
+ */
+
+// Service implementation.
+class ServiceSingletonTestCpp : public BnServiceSingletonTest {
+public:
+ binder::Status inc(int32_t* _aidl_return) final {
+ *_aidl_return = ++mValue;
+ return binder::Status::ok();
+ }
+ std::atomic_int32_t mValue = 0;
+};
+
+// The service traits increment static atomic counters, which
+// validates that the trait callbacks are invoked.
+static std::atomic_int32_t sNewService = 0;
+static std::atomic_int32_t sServiceDied = 0;
+
+template <typename Service>
+struct TestServiceTraits : public mediautils::DefaultServiceTraits<Service> {
+ static constexpr const char* getServiceName() { return ""; }
+ static constexpr void onNewService(const mediautils::InterfaceType<Service>&) {
+ ++sNewService;
+ }
+ static constexpr void onServiceDied(const mediautils::InterfaceType<Service>&) {
+ ++sServiceDied;
+ }
+};
+
+// Here we have an alternative set of service traits,
+// used to validate that we can switch traits for the service singleton.
+static std::atomic_int32_t sNewService2 = 0;
+static std::atomic_int32_t sServiceDied2 = 0;
+
+template <typename Service>
+struct TestServiceTraits2 : public mediautils::DefaultServiceTraits<Service> {
+ static constexpr const char* getServiceName() { return ""; }
+ static constexpr void onNewService(const mediautils::InterfaceType<Service>&) {
+ ++sNewService2;
+ }
+ static constexpr void onServiceDied(const mediautils::InterfaceType<Service>&) {
+ ++sServiceDied2;
+ }
+};
+
+/*
+ * ServiceThreads run in a remote process.
+ *
+ * The WorkerThread is used to launch and kill the ServiceThread in a remote process.
+ */
+static void ServiceThread(audio_utils::RunRemote& runRemote) {
+ int c = runRemote.getc(); // requires any character to launch
+ auto service = sp<IServiceSingletonTest>::cast(sp<ServiceSingletonTestCpp>::make());
+ mediautils::addService(service);
+ ProcessState::self()->startThreadPool();
+ runRemote.putc(c); // echo character.
+ IPCThreadState::self()->joinThreadPool();
+}
+
+/*
+ * The WorkerThread is run in a remote process from the test. It communicates with
+ * the test process through pipes.
+ */
+static void WorkerThread(audio_utils::RunRemote& runRemote) {
+ std::shared_ptr<audio_utils::RunRemote> remoteService;
+ while (true) {
+ const int c = runRemote.getc();
+ switch (c) {
+ case 'a': // launch a new service.
+ // if the old service isn't destroyed, it will be destroyed here
+ // when the RunRemote is replaced.
+ remoteService = std::make_shared<audio_utils::RunRemote>(ServiceThread);
+ remoteService->run();
+ remoteService->putc('a'); // create service.
+ (void)remoteService->getc(); // ensure it is created.
+ runRemote.putc(c); // echo
+ break;
+ case 'b': // destroys the old service.
+ remoteService.reset(); // this kills the service.
+ runRemote.putc(c); // echo
+ break;
+ default: // respond that we don't know what happened!
+ runRemote.putc('?');
+ break;
+ }
+ }
+}
+
+// This is a monolithic test.
+TEST(service_singleton_tests, one_and_only) {
+ std::atomic_int32_t listenerServiceCreated = 0;
+ std::atomic_int32_t listenerServiceDied = 0;
+
+ // initialize the service cache with a custom handler.
+ mediautils::initService<
+ IServiceSingletonTest, TestServiceTraits<IServiceSingletonTest>>({});
+ mediautils::initService<
+ aidl::IServiceSingletonTest, TestServiceTraits<aidl::IServiceSingletonTest>>({});
+
+ // start the worker thread that spawns the services.
+ auto remoteWorker = std::make_shared<audio_utils::RunRemote>(WorkerThread);
+ remoteWorker->run();
+
+ // now we are ready for binder.
+ ProcessState::self()->startThreadPool();
+
+ // check that our service isn't preexisting.
+ {
+ auto service = mediautils::checkServicePassThrough<IServiceSingletonTest>();
+ EXPECT_FALSE(service);
+
+ auto service2 = mediautils::checkServicePassThrough<aidl::IServiceSingletonTest>();
+ EXPECT_FALSE(service2);
+ }
+ EXPECT_EQ(0, sNewService);
+ EXPECT_EQ(0, sServiceDied);
+
+ {
+ auto service = mediautils::checkService<IServiceSingletonTest>();
+ EXPECT_FALSE(service);
+
+ auto service2 = mediautils::checkService<aidl::IServiceSingletonTest>();
+ EXPECT_FALSE(service2);
+ }
+ EXPECT_EQ(0, sNewService);
+ EXPECT_EQ(0, sServiceDied);
+
+ // getService will register a notification handler that fetches the
+ // service in the background.
+ {
+ auto service = mediautils::getService<IServiceSingletonTest>();
+ EXPECT_FALSE(service);
+
+ auto service2 = mediautils::getService<aidl::IServiceSingletonTest>();
+ EXPECT_FALSE(service2);
+ }
+ EXPECT_EQ(0, sNewService);
+ EXPECT_EQ(0, sServiceDied);
+
+ // now spawn the service.
+ remoteWorker->putc('a');
+ EXPECT_EQ('a', remoteWorker->getc());
+
+ sleep(1); // In the background, 2 services were fetched.
+
+ EXPECT_EQ(2, sNewService);
+ EXPECT_EQ(0, sServiceDied);
+
+ // we repeat the prior checks, but the service is cached now.
+ {
+ auto service = mediautils::checkServicePassThrough<IServiceSingletonTest>();
+ EXPECT_TRUE(service);
+
+ auto service2 = mediautils::checkServicePassThrough<aidl::IServiceSingletonTest>();
+ EXPECT_TRUE(service2);
+ }
+ EXPECT_EQ(2, sNewService);
+ EXPECT_EQ(0, sServiceDied);
+
+ {
+ auto service = mediautils::checkService<IServiceSingletonTest>();
+ EXPECT_TRUE(service);
+
+ auto service2 = mediautils::checkService<aidl::IServiceSingletonTest>();
+ EXPECT_TRUE(service2);
+ }
+ EXPECT_EQ(2, sNewService);
+ EXPECT_EQ(0, sServiceDied);
+
+ {
+ auto service = mediautils::getService<IServiceSingletonTest>();
+ EXPECT_TRUE(service);
+
+ auto service2 = mediautils::getService<aidl::IServiceSingletonTest>();
+ EXPECT_TRUE(service2);
+ }
+ EXPECT_EQ(2, sNewService);
+ EXPECT_EQ(0, sServiceDied);
+
+ // destroy the service.
+ remoteWorker->putc('b');
+ EXPECT_EQ('b', remoteWorker->getc());
+
+ sleep(1);
+
+ // We expect the died callbacks.
+ EXPECT_EQ(2, sNewService);
+ EXPECT_EQ(2, sServiceDied);
+
+ // we can also manually check whether there is a new service by
+ // requesting service notifications. This is outside of the service singleton
+ // traits.
+ auto handle1 = mediautils::requestServiceNotification<IServiceSingletonTest>(
+ [&](const sp<IServiceSingletonTest>&) { ++listenerServiceCreated; });
+ auto handle2 = mediautils::requestServiceNotification<aidl::IServiceSingletonTest>(
+ [&](const std::shared_ptr<aidl::IServiceSingletonTest>&) {
+ ++listenerServiceCreated; });
+
+ // Spawn the service again.
+ remoteWorker->putc('a');
+ EXPECT_EQ('a', remoteWorker->getc());
+
+ sleep(1); // In the background, 2 services were fetched.
+
+ EXPECT_EQ(4, sNewService);
+ EXPECT_EQ(2, sServiceDied);
+
+ EXPECT_EQ(2, listenerServiceCreated); // our listener picked up the service creation.
+
+ std::shared_ptr<void> handle3, handle4;
+ std::shared_ptr<aidl::IServiceSingletonTest> keepAlive; // NDK Workaround!
+ {
+ auto service = mediautils::getService<IServiceSingletonTest>();
+ EXPECT_TRUE(service);
+
+ auto service2 = mediautils::getService<aidl::IServiceSingletonTest>();
+ EXPECT_TRUE(service2);
+
+ keepAlive = service2;
+
+ // we can also request our own death notifications (outside of the service traits).
+ handle3 = mediautils::requestDeathNotification(service, [&] { ++listenerServiceDied; });
+ handle4 = mediautils::requestDeathNotification(service2, [&] { ++listenerServiceDied; });
+ }
+
+ EXPECT_EQ(4, sNewService);
+ EXPECT_EQ(2, sServiceDied);
+
+ // destroy the service.
+
+ remoteWorker->putc('b');
+ EXPECT_EQ('b', remoteWorker->getc());
+
+ sleep(1);
+
+ // We expect the died callbacks.
+ EXPECT_EQ(4, sNewService);
+ EXPECT_EQ(4, sServiceDied);
+
+ EXPECT_EQ(2, listenerServiceCreated);
+ EXPECT_EQ(2, listenerServiceDied); // NDK Workaround - without keepAlive, this is 1.
+ // the death notification is invalidated without a
+ // pointer to the binder object.
+
+ keepAlive.reset();
+
+ // Cancel the singleton cache.
+ mediautils::skipService<IServiceSingletonTest>();
+ mediautils::skipService<aidl::IServiceSingletonTest>();
+
+ // Spawn the service again.
+ remoteWorker->putc('a');
+ EXPECT_EQ('a', remoteWorker->getc());
+
+ sleep(1);
+
+ // We expect no change from the service traits (service not cached).
+ EXPECT_EQ(4, sNewService);
+ EXPECT_EQ(4, sServiceDied);
+ EXPECT_EQ(4, listenerServiceCreated); // our listener picks it up.
+
+ // remove service
+ remoteWorker->putc('b');
+ EXPECT_EQ('b', remoteWorker->getc());
+
+ sleep(1);
+
+ // We expect no change from the service traits (service not cached).
+ EXPECT_EQ(4, sNewService);
+ EXPECT_EQ(4, sServiceDied);
+ EXPECT_EQ(4, listenerServiceCreated);
+ EXPECT_EQ(2, listenerServiceDied); // binder died is associated with the actual handle.
+
+ // replace the service traits.
+ {
+ auto previous = mediautils::initService<
+ IServiceSingletonTest, TestServiceTraits2<IServiceSingletonTest>>({});
+ auto previous2 = mediautils::initService<
+ aidl::IServiceSingletonTest, TestServiceTraits2<aidl::IServiceSingletonTest>>({});
+
+ EXPECT_FALSE(previous);
+ EXPECT_FALSE(previous2);
+ }
+
+ // We expect no change with old counters.
+ EXPECT_EQ(4, sNewService);
+ EXPECT_EQ(4, sServiceDied);
+ EXPECT_EQ(0, sNewService2);
+ EXPECT_EQ(0, sServiceDied2);
+
+ {
+ auto service = mediautils::getService<IServiceSingletonTest>();
+ EXPECT_FALSE(service);
+
+ auto service2 = mediautils::getService<aidl::IServiceSingletonTest>();
+ EXPECT_FALSE(service2);
+ }
+
+ EXPECT_EQ(4, sNewService);
+ EXPECT_EQ(4, sServiceDied);
+ EXPECT_EQ(0, sNewService2);
+ EXPECT_EQ(0, sServiceDied2);
+
+ // Spawn the service again.
+ remoteWorker->putc('a');
+ EXPECT_EQ('a', remoteWorker->getc());
+
+ sleep(1);
+
+ EXPECT_EQ(4, sNewService); // old counters do not change.
+ EXPECT_EQ(4, sServiceDied);
+ EXPECT_EQ(2, sNewService2); // new counters change
+ EXPECT_EQ(0, sServiceDied2);
+
+ EXPECT_EQ(6, listenerServiceCreated); // listener associated with service name picks up info.
+
+ // Release the service.
+ remoteWorker->putc('b');
+ EXPECT_EQ('b', remoteWorker->getc());
+
+ sleep(1);
+
+ EXPECT_EQ(4, sNewService); // old counters do not change.
+ EXPECT_EQ(4, sServiceDied);
+ EXPECT_EQ(2, sNewService2); // new counters change
+ EXPECT_EQ(2, sServiceDied2);
+}
diff --git a/services/audioflinger/IAfTrack.h b/services/audioflinger/IAfTrack.h
index 1b10f81..d27d52a 100644
--- a/services/audioflinger/IAfTrack.h
+++ b/services/audioflinger/IAfTrack.h
@@ -299,7 +299,7 @@
return "Type Id Active Client(pid/uid) Session Port Id S Flags "
" Format Chn mask SRate "
"ST Usg CT "
- " G db L dB R dB VS dB PortVol dB PortMuted"
+ " G db L dB R dB VS dB PortVol dB PortMuted "
" Server FrmCnt FrmRdy F Underruns Flushed BitPerfect InternalMute"
" Latency\n"sv;
}
diff --git a/services/audioflinger/MmapTracks.h b/services/audioflinger/MmapTracks.h
index 30bbd5d..0210bc2 100644
--- a/services/audioflinger/MmapTracks.h
+++ b/services/audioflinger/MmapTracks.h
@@ -73,10 +73,10 @@
mVolume = volume;
}
void setPortMute(bool muted) override {
- mMuteState.muteFromPortVolume = muted;
+ mMutedFromPort = muted;
}
float getPortVolume() const override { return mVolume; }
- bool getPortMute() const override { return mMuteState.muteFromPortVolume; }
+ bool getPortMute() const override { return mMutedFromPort; }
private:
DISALLOW_COPY_AND_ASSIGN(MmapTrack);
@@ -101,6 +101,7 @@
/* GUARDED_BY(MmapPlaybackThread::mLock) */;
mute_state_t mMuteState
/* GUARDED_BY(MmapPlaybackThread::mLock) */;
+ bool mMutedFromPort;
float mVolume = 0.0f;
}; // end of Track
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 70bab6a..2c3212c 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -229,7 +229,7 @@
void setPortVolume(float volume) override;
void setPortMute(bool muted) override;
float getPortVolume() const override { return mVolume; }
- bool getPortMute() const override { return mMuteState.load().muteFromPortVolume; }
+ bool getPortMute() const override { return mMutedFromPort; }
protected:
@@ -414,6 +414,7 @@
// access these two variables only when holding player thread lock.
std::unique_ptr<os::PersistableBundle> mMuteEventExtras;
std::atomic<mute_state_t> mMuteState;
+ std::atomic<bool> mMutedFromPort;
bool mInternalMute = false;
std::atomic<float> mVolume = 0.0f;
}; // end of Track
diff --git a/services/audioflinger/RecordTracks.h b/services/audioflinger/RecordTracks.h
index 3de9968..83cd024 100644
--- a/services/audioflinger/RecordTracks.h
+++ b/services/audioflinger/RecordTracks.h
@@ -73,7 +73,7 @@
bool isDirect() const final
{ return (mFlags & AUDIO_INPUT_FLAG_DIRECT) != 0; }
- void setSilenced(bool silenced) final { if (!isPatchTrack()) mSilenced = silenced; }
+ void setSilenced(bool silenced) final;
bool isSilenced() const final { return mSilenced; }
status_t getActiveMicrophones(
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 1c0b749..060c72b 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -3560,26 +3560,8 @@
void PlaybackThread::checkSilentMode_l()
{
- if (!mMasterMute) {
- char value[PROPERTY_VALUE_MAX];
- if (mOutDeviceTypeAddrs.empty()) {
- ALOGD("ro.audio.silent is ignored since no output device is set");
- return;
- }
- if (isSingleDeviceType(outDeviceTypes_l(), AUDIO_DEVICE_OUT_REMOTE_SUBMIX)) {
- ALOGD("ro.audio.silent will be ignored for threads on AUDIO_DEVICE_OUT_REMOTE_SUBMIX");
- return;
- }
- if (property_get("ro.audio.silent", value, "0") > 0) {
- char *endptr;
- unsigned long ul = strtoul(value, &endptr, 0);
- if (*endptr == '\0' && ul != 0) {
- ALOGW("%s: mute from ro.audio.silent. Silence is golden", __func__);
- // The setprop command will not allow a property to be changed after
- // the first time it is set, so we don't have to worry about un-muting.
- setMasterMute_l(true);
- }
- }
+ if (property_get_bool("ro.audio.silent", false)) {
+ ALOGW("ro.audio.silent is now ignored");
}
}
@@ -7872,6 +7854,7 @@
ssize_t DuplicatingThread::threadLoop_write()
{
+ ATRACE_BEGIN("write");
for (size_t i = 0; i < outputTracks.size(); i++) {
const ssize_t actualWritten = outputTracks[i]->write(mSinkBuffer, writeFrames);
@@ -7890,6 +7873,7 @@
// TODO: Report correction for the other output tracks and show in the dump.
}
+ ATRACE_END();
if (mStandby) {
mThreadMetrics.logBeginInterval();
mThreadSnapshot.onBegin();
@@ -11438,18 +11422,8 @@
void MmapPlaybackThread::checkSilentMode_l()
{
- if (!mMasterMute) {
- char value[PROPERTY_VALUE_MAX];
- if (property_get("ro.audio.silent", value, "0") > 0) {
- char *endptr;
- unsigned long ul = strtoul(value, &endptr, 0);
- if (*endptr == '\0' && ul != 0) {
- ALOGW("%s: mute from ro.audio.silent. Silence is golden", __func__);
- // The setprop command will not allow a property to be changed after
- // the first time it is set, so we don't have to worry about un-muting.
- setMasterMute_l(true);
- }
- }
+ if (property_get_bool("ro.audio.silent", false)) {
+ ALOGW("ro.audio.silent is now ignored");
}
}
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 0ddbaec..867561a 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -864,16 +864,14 @@
populateUsageAndContentTypeFromStreamType();
- mute_state_t newMuteState = mMuteState.load();
- newMuteState.muteFromPortVolume = muted;
+ mMutedFromPort = muted;
// Audio patch and call assistant volume are always max
if (mAttr.usage == AUDIO_USAGE_CALL_ASSISTANT
|| mAttr.usage == AUDIO_USAGE_VIRTUAL_SOURCE) {
mVolume = 1.0f;
- newMuteState.muteFromPortVolume = false;
+ mMutedFromPort = false;
}
- mMuteState.store(newMuteState);
mServerLatencySupported = checkServerLatencySupported(format, flags);
#ifdef TEE_SINK
@@ -1092,7 +1090,7 @@
result.appendFormat("%7s %7u/%7u %7u %7u %2s 0x%03X "
"%08X %08X %6u "
"%2u %3x %2x "
- "%5.2g %5.2g %5.2g %5.2g%c %11.2g %12s"
+ "%5.2g %5.2g %5.2g %5.2g%c %11.2g %10s "
"%08X %6zu%c %6zu %c %9u%c %7u %10s %12s",
active ? "yes" : "no",
mClient ? mClient->pid() : getpid() ,
@@ -1630,12 +1628,10 @@
}
void Track::setPortMute(bool muted) {
- mute_state_t newMuteState = mMuteState.load();
- if (newMuteState.muteFromPortVolume == muted) {
+ if (mMutedFromPort == muted) {
return;
}
- newMuteState.muteFromPortVolume = muted;
- mMuteState.store(newMuteState);
+ mMutedFromPort = muted;
if (mType != TYPE_PATCH) {
// Do not recursively propagate a PatchTrack setPortVolume to
// downstream PatchTracks.
@@ -3201,6 +3197,14 @@
*backInserter++ = metadata;
}
+void RecordTrack::setSilenced(bool silenced) {
+ if (!isPatchTrack() && mSilenced != silenced) {
+ mSilenced = silenced;
+ ALOGD("%s: track with port id: %d, (%s)", __func__, mPortId,
+ mSilenced ? "silenced" : "unsilenced");
+ }
+}
+
// ----------------------------------------------------------------------------
#undef LOG_TAG
#define LOG_TAG "AF::PatchRecord"
@@ -3591,14 +3595,14 @@
mUid(VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid))),
mSilenced(false), mSilencedNotified(false), mVolume(volume)
{
- mMuteState.muteFromPortVolume = muted;
+ mMutedFromPort = muted;
// Once this item is logged by the server, the client can add properties.
mTrackMetrics.logConstructor(creatorPid, uid(), id());
if (isOut && (attr.usage == AUDIO_USAGE_CALL_ASSISTANT
|| attr.usage == AUDIO_USAGE_VIRTUAL_SOURCE)) {
// Audio patch and call assistant volume are always max
mVolume = 1.0f;
- mMuteState.muteFromPortVolume = false;
+ mMutedFromPort = false;
}
}
@@ -3696,7 +3700,7 @@
if (isOut()) {
result.appendFormat("%4x %2x", mAttr.usage, mAttr.content_type);
result.appendFormat("%11.2g", 20.0 * log10(mVolume));
- result.appendFormat("%12s", mMuteState.muteFromPortVolume ? "true" : "false");
+ result.appendFormat("%12s", mMutedFromPort ? "true" : "false");
} else {
result.appendFormat("%7x", mAttr.source);
}
diff --git a/services/audioflinger/timing/MonotonicFrameCounter.cpp b/services/audioflinger/timing/MonotonicFrameCounter.cpp
index 286f549..175e2f5 100644
--- a/services/audioflinger/timing/MonotonicFrameCounter.cpp
+++ b/services/audioflinger/timing/MonotonicFrameCounter.cpp
@@ -26,9 +26,9 @@
int64_t newFrameCount, int64_t newTime) {
if (newFrameCount < 0 || newTime < 0) {
const auto result = getLastReportedFrameCount();
- ALOGW("%s: invalid (frame, time) pair newFrameCount:%lld newFrameCount:%lld,"
+ ALOGW("%s: invalid (frame, time) pair newFrameCount:%lld newTime:%lld,"
" using %lld as frameCount",
- __func__, (long long) newFrameCount, (long long)newFrameCount,
+ __func__, (long long)newFrameCount, (long long)newTime,
(long long)result);
return result;
}
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index 3539f00..c047a89 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -19,6 +19,8 @@
#include <android/media/DeviceConnectedState.h>
#include <android/media/TrackInternalMuteInfo.h>
+#include <android/media/audio/common/AudioMMapPolicyInfo.h>
+#include <android/media/audio/common/AudioMMapPolicyType.h>
#include <media/AudioCommonTypes.h>
#include <media/AudioContainers.h>
#include <media/AudioDeviceTypeAddr.h>
@@ -447,6 +449,13 @@
virtual status_t clearPreferredMixerAttributes(const audio_attributes_t* attr,
audio_port_handle_t portId,
uid_t uid) = 0;
+
+ virtual status_t getMmapPolicyInfos(
+ media::audio::common::AudioMMapPolicyType policyType,
+ std::vector<media::audio::common::AudioMMapPolicyInfo> *policyInfos) = 0;
+ virtual status_t getMmapPolicyForDevice(
+ media::audio::common::AudioMMapPolicyType policyType,
+ media::audio::common::AudioMMapPolicyInfo *policyInfo) = 0;
};
// Audio Policy client Interface
@@ -612,6 +621,10 @@
virtual status_t setTracksInternalMute(
const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) = 0;
+
+ virtual status_t getMmapPolicyInfos(
+ media::audio::common::AudioMMapPolicyType policyType,
+ std::vector<media::audio::common::AudioMMapPolicyInfo> *policyInfos) = 0;
};
// These are the signatures of createAudioPolicyManager/destroyAudioPolicyManager
diff --git a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
index 688772c..c2ee5f6 100644
--- a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
+++ b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
@@ -73,7 +73,8 @@
enum CompatibilityScore{
NO_MATCH = 0,
PARTIAL_MATCH = 1,
- EXACT_MATCH = 2
+ PARTIAL_MATCH_WITH_FLAG = 2,
+ EXACT_MATCH = 3
};
/**
@@ -92,7 +93,6 @@
* @param channelMask to be checked for compatibility. Must be specified
* @param updatedChannelMask if non-NULL, it is assigned the actual channel mask
* @param flags to be checked for compatibility
- * @param exactMatchRequiredForInputFlags true if exact match is required on flags
* @return how the IO profile is compatible with the given parameters.
*/
CompatibilityScore getCompatibilityScore(const DeviceVector &devices,
@@ -103,8 +103,7 @@
audio_channel_mask_t channelMask,
audio_channel_mask_t *updatedChannelMask,
// FIXME parameter type
- uint32_t flags,
- bool exactMatchRequiredForInputFlags = false) const;
+ uint32_t flags) const;
/**
* @brief areAllDevicesSupported: Checks if the given devices are supported by the IO profile.
@@ -119,11 +118,9 @@
* specified flags.
*
* @param flags to be checked for compatibility
- * @param exactMatchRequiredForInputFlags true if exact match is required on flags
* @return true if the profile is compatible, false otherwise.
*/
- bool isCompatibleProfileForFlags(uint32_t flags,
- bool exactMatchRequiredForInputFlags = false) const;
+ bool isCompatibleProfileForFlags(uint32_t flags) const;
void dump(String8 *dst, int spaces) const;
void log();
@@ -235,6 +232,7 @@
private:
void refreshMixerBehaviors();
+ CompatibilityScore getFlagsCompatibleScore(uint32_t flags) const;
DeviceVector mSupportedDevices; // supported devices: this input/output can be routed from/to
diff --git a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
index 991b103..bc9eb20 100644
--- a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
@@ -42,15 +42,14 @@
audio_channel_mask_t channelMask,
audio_channel_mask_t *updatedChannelMask,
// FIXME type punning here
- uint32_t flags,
- bool exactMatchRequiredForInputFlags) const {
+ uint32_t flags) const {
const bool isPlaybackThread =
getType() == AUDIO_PORT_TYPE_MIX && getRole() == AUDIO_PORT_ROLE_SOURCE;
const bool isRecordThread =
getType() == AUDIO_PORT_TYPE_MIX && getRole() == AUDIO_PORT_ROLE_SINK;
ALOG_ASSERT(isPlaybackThread != isRecordThread);
- if (!areAllDevicesSupported(devices) ||
- !isCompatibleProfileForFlags(flags, exactMatchRequiredForInputFlags)) {
+ const auto flagsCompatibleScore = getFlagsCompatibleScore(flags);
+ if (!areAllDevicesSupported(devices) || flagsCompatibleScore == NO_MATCH) {
return NO_MATCH;
}
@@ -81,7 +80,11 @@
result = EXACT_MATCH;
} else if (checkCompatibleAudioProfile(
myUpdatedSamplingRate, myUpdatedChannelMask, myUpdatedFormat) == NO_ERROR) {
- result = PARTIAL_MATCH;
+ if (flagsCompatibleScore == EXACT_MATCH) {
+ result = PARTIAL_MATCH_WITH_FLAG;
+ } else {
+ result = PARTIAL_MATCH;
+ }
} else {
return result;
}
@@ -118,32 +121,8 @@
return mSupportedDevices.containsAllDevices(devices);
}
-bool IOProfile::isCompatibleProfileForFlags(uint32_t flags,
- bool exactMatchRequiredForInputFlags) const {
- const bool isPlaybackThread =
- getType() == AUDIO_PORT_TYPE_MIX && getRole() == AUDIO_PORT_ROLE_SOURCE;
- const bool isRecordThread =
- getType() == AUDIO_PORT_TYPE_MIX && getRole() == AUDIO_PORT_ROLE_SINK;
- ALOG_ASSERT(isPlaybackThread != isRecordThread);
-
- const uint32_t mustMatchOutputFlags =
- AUDIO_OUTPUT_FLAG_DIRECT|AUDIO_OUTPUT_FLAG_HW_AV_SYNC|AUDIO_OUTPUT_FLAG_MMAP_NOIRQ;
- if (isPlaybackThread &&
- !audio_output_flags_is_subset((audio_output_flags_t)getFlags(),
- (audio_output_flags_t)flags,
- mustMatchOutputFlags)) {
- return false;
- }
- // The only input flag that is allowed to be different is the fast flag.
- // An existing fast stream is compatible with a normal track request.
- // An existing normal stream is compatible with a fast track request,
- // but the fast request will be denied by AudioFlinger and converted to normal track.
- if (isRecordThread && ((getFlags() ^ flags) &
- ~(exactMatchRequiredForInputFlags ? AUDIO_INPUT_FLAG_NONE : AUDIO_INPUT_FLAG_FAST))) {
- return false;
- }
-
- return true;
+bool IOProfile::isCompatibleProfileForFlags(uint32_t flags) const {
+ return getFlagsCompatibleScore(flags) != NO_MATCH;
}
bool IOProfile::containsSingleDeviceSupportingEncodedFormats(
@@ -228,6 +207,39 @@
}
}
+IOProfile::CompatibilityScore IOProfile::getFlagsCompatibleScore(uint32_t flags) const {
+ const bool isPlaybackThread =
+ getType() == AUDIO_PORT_TYPE_MIX && getRole() == AUDIO_PORT_ROLE_SOURCE;
+ const bool isRecordThread =
+ getType() == AUDIO_PORT_TYPE_MIX && getRole() == AUDIO_PORT_ROLE_SINK;
+ ALOG_ASSERT(isPlaybackThread != isRecordThread);
+
+ const uint32_t mustMatchOutputFlags =
+ AUDIO_OUTPUT_FLAG_DIRECT|AUDIO_OUTPUT_FLAG_HW_AV_SYNC|AUDIO_OUTPUT_FLAG_MMAP_NOIRQ;
+ if (isPlaybackThread &&
+ !audio_output_flags_is_subset((audio_output_flags_t)getFlags(),
+ (audio_output_flags_t)flags,
+ mustMatchOutputFlags)) {
+ return NO_MATCH;
+ }
+ // The only input flag that is allowed to be different is the fast flag.
+ // An existing fast stream is compatible with a normal track request.
+ // An existing normal stream is compatible with a fast track request,
+ // but the fast request will be denied by AudioFlinger and converted to normal track.
+ if (isRecordThread) {
+ const auto unmatchedFlag = getFlags() ^ flags;
+ if (unmatchedFlag == AUDIO_INPUT_FLAG_NONE) {
+ return EXACT_MATCH;
+ } else if (unmatchedFlag == AUDIO_INPUT_FLAG_FAST) {
+ return PARTIAL_MATCH;
+ } else {
+ return NO_MATCH;
+ }
+ }
+
+ return EXACT_MATCH;
+}
+
void IOProfile::dump(String8 *dst, int spaces) const
{
String8 extraInfo;
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index 7de6939..1082d31 100644
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -156,40 +156,21 @@
return EngineBase::setForceUse(usage, config);
}
-bool Engine::isBtScoActive(DeviceVector& availableOutputDevices,
- const SwAudioOutputCollection &outputs) const {
+bool Engine::isBtScoActive(DeviceVector& availableOutputDevices) const {
+ // SCO is considered active if:
+ // 1) a SCO device is connected
+ // 2) the preferred device for PHONE strategy is BT SCO: this is controlled only by java
+ // AudioService and is only true if the SCO audio link as been confirmed active by BT.
if (availableOutputDevices.getDevicesFromTypes(getAudioDeviceOutAllScoSet()).isEmpty()) {
return false;
}
- // SCO is active if:
- // 1) we are in a call and SCO is the preferred device for PHONE strategy
- if (isInCall() && audio_is_bluetooth_out_sco_device(
+
+ if (!audio_is_bluetooth_out_sco_device(
getPreferredDeviceTypeForLegacyStrategy(availableOutputDevices, STRATEGY_PHONE))) {
- return true;
+ return false;
}
- // 2) A strategy for which the preferred device is SCO is active
- for (const auto &ps : getOrderedProductStrategies()) {
- if (outputs.isStrategyActive(ps) &&
- !getPreferredAvailableDevicesForProductStrategy(availableOutputDevices, ps)
- .getDevicesFromTypes(getAudioDeviceOutAllScoSet()).isEmpty()) {
- return true;
- }
- }
- // 3) a ringtone is active and SCO is used for ringing
- if (outputs.isActiveLocally(toVolumeSource(AUDIO_STREAM_RING))
- && (getForceUse(AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING)
- == AUDIO_POLICY_FORCE_BT_SCO)) {
- return true;
- }
- // 4) an active input is routed from SCO
- DeviceVector availableInputDevices = getApmObserver()->getAvailableInputDevices();
- const auto &inputs = getApmObserver()->getInputs();
- if (inputs.activeInputsCountOnDevices(availableInputDevices.getDevicesFromType(
- AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET)) > 0) {
- return true;
- }
- return false;
+ return true;
}
void Engine::filterOutputDevicesForStrategy(legacy_strategy strategy,
@@ -200,7 +181,7 @@
if (com::android::media::audioserver::use_bt_sco_for_media()) {
// remove A2DP and LE Audio devices whenever BT SCO is in use
- if (isBtScoActive(availableOutputDevices, outputs)) {
+ if (isBtScoActive(availableOutputDevices)) {
availableOutputDevices.remove(
availableOutputDevices.getDevicesFromTypes(getAudioDeviceOutAllA2dpSet()));
availableOutputDevices.remove(
@@ -372,69 +353,58 @@
// if SCO headset is connected and we are told to use it, play ringtone over
// speaker and BT SCO
- if (!availableOutputDevices.getDevicesFromTypes(getAudioDeviceOutAllScoSet()).isEmpty()) {
- DeviceVector devices2;
- devices2 = availableOutputDevices.getFirstDevicesFromTypes({
+ if (!availableOutputDevices.getDevicesFromTypes(getAudioDeviceOutAllScoSet()).isEmpty()
+ && audio_is_bluetooth_out_sco_device(getPreferredDeviceTypeForLegacyStrategy(
+ availableOutputDevices, STRATEGY_PHONE))) {
+ DeviceVector devices2 = availableOutputDevices.getFirstDevicesFromTypes({
AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT, AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET,
AUDIO_DEVICE_OUT_BLUETOOTH_SCO});
+ // devices2 cannot be empty at this point
// Use ONLY Bluetooth SCO output when ringing in vibration mode
if (!((getForceUse(AUDIO_POLICY_FORCE_FOR_SYSTEM) == AUDIO_POLICY_FORCE_SYSTEM_ENFORCED)
- && (strategy == STRATEGY_ENFORCED_AUDIBLE))) {
- if (getForceUse(AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING)
- == AUDIO_POLICY_FORCE_BT_SCO) {
- if (!devices2.isEmpty()) {
- devices = devices2;
- break;
- }
- }
+ && (strategy == STRATEGY_ENFORCED_AUDIBLE))
+ && (getForceUse(AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING)
+ == AUDIO_POLICY_FORCE_BT_SCO)) {
+ devices = devices2;
+ break;
}
// Use both Bluetooth SCO and phone default output when ringing in normal mode
- if (audio_is_bluetooth_out_sco_device(getPreferredDeviceTypeForLegacyStrategy(
- availableOutputDevices, STRATEGY_PHONE))) {
- if (strategy == STRATEGY_SONIFICATION) {
- devices.replaceDevicesByType(
- AUDIO_DEVICE_OUT_SPEAKER,
- availableOutputDevices.getDevicesFromType(
- AUDIO_DEVICE_OUT_SPEAKER_SAFE));
- }
- if (!devices2.isEmpty()) {
- devices.add(devices2);
- break;
- }
+ if (strategy == STRATEGY_SONIFICATION) {
+ devices.replaceDevicesByType(
+ AUDIO_DEVICE_OUT_SPEAKER,
+ availableOutputDevices.getDevicesFromType(
+ AUDIO_DEVICE_OUT_SPEAKER_SAFE));
}
+ devices.add(devices2);
+ break;
}
// if LEA headset is connected and we are told to use it, play ringtone over
// speaker and BT LEA
- if (!availableOutputDevices.getDevicesFromTypes(getAudioDeviceOutAllBleSet()).isEmpty()) {
+ if (!availableOutputDevices.getDevicesFromTypes(getAudioDeviceOutAllBleSet()).isEmpty()
+ && audio_is_ble_out_device(getPreferredDeviceTypeForLegacyStrategy(
+ availableOutputDevices, STRATEGY_PHONE))) {
DeviceVector devices2;
devices2 = availableOutputDevices.getFirstDevicesFromTypes({
AUDIO_DEVICE_OUT_BLE_HEADSET, AUDIO_DEVICE_OUT_BLE_SPEAKER});
+ // devices2 cannot be empty at this point
// Use ONLY Bluetooth LEA output when ringing in vibration mode
if (!((getForceUse(AUDIO_POLICY_FORCE_FOR_SYSTEM) == AUDIO_POLICY_FORCE_SYSTEM_ENFORCED)
- && (strategy == STRATEGY_ENFORCED_AUDIBLE))) {
- if (getForceUse(AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING)
- == AUDIO_POLICY_FORCE_BT_BLE) {
- if (!devices2.isEmpty()) {
- devices = devices2;
- break;
- }
- }
+ && (strategy == STRATEGY_ENFORCED_AUDIBLE))
+ && (getForceUse(AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING)
+ == AUDIO_POLICY_FORCE_BT_BLE)) {
+ devices = devices2;
+ break;
}
// Use both Bluetooth LEA and phone default output when ringing in normal mode
- if (audio_is_ble_out_device(getPreferredDeviceTypeForLegacyStrategy(
- availableOutputDevices, STRATEGY_PHONE))) {
- if (strategy == STRATEGY_SONIFICATION) {
- devices.replaceDevicesByType(
- AUDIO_DEVICE_OUT_SPEAKER,
- availableOutputDevices.getDevicesFromType(
- AUDIO_DEVICE_OUT_SPEAKER_SAFE));
- }
- if (!devices2.isEmpty()) {
- devices.add(devices2);
- break;
- }
+ if (strategy == STRATEGY_SONIFICATION) {
+ devices.replaceDevicesByType(
+ AUDIO_DEVICE_OUT_SPEAKER,
+ availableOutputDevices.getDevicesFromType(
+ AUDIO_DEVICE_OUT_SPEAKER_SAFE));
}
+ devices.add(devices2);
+ break;
}
// The second device used for sonification is the same as the device used by media strategy
@@ -497,6 +467,18 @@
// Get the last connected device of wired and bluetooth a2dp
devices2 = availableOutputDevices.getFirstDevicesFromTypes(
getLastRemovableMediaDevices(GROUP_NONE, excludedDevices));
+ if (com::android::media::audioserver::use_bt_sco_for_media()) {
+ if (isBtScoActive(availableOutputDevices)
+ && !(devices2.getDevicesFromTypes(
+ getAudioDeviceOutAllA2dpSet()).isEmpty()
+ && devices2.getDevicesFromTypes(
+ getAudioDeviceOutAllBleSet()).isEmpty())) {
+ devices2 = availableOutputDevices.getFirstDevicesFromTypes(
+ { AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT,
+ AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET,
+ AUDIO_DEVICE_OUT_BLUETOOTH_SCO});
+ }
+ }
} else {
// Get the last connected device of wired except bluetooth a2dp
devices2 = availableOutputDevices.getFirstDevicesFromTypes(
@@ -504,15 +486,6 @@
}
}
- if (com::android::media::audioserver::use_bt_sco_for_media()) {
- if (devices2.isEmpty() && isBtScoActive(availableOutputDevices, outputs)) {
- devices2 = availableOutputDevices.getFirstDevicesFromTypes(
- { AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT,
- AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET,
- AUDIO_DEVICE_OUT_BLUETOOTH_SCO});
- }
- }
-
if ((devices2.isEmpty()) &&
(getForceUse(AUDIO_POLICY_FORCE_FOR_DOCK) == AUDIO_POLICY_FORCE_ANALOG_DOCK)) {
devices2 = availableOutputDevices.getDevicesFromType(
diff --git a/services/audiopolicy/enginedefault/src/Engine.h b/services/audiopolicy/enginedefault/src/Engine.h
index 862b5fd..e9c71dd 100644
--- a/services/audiopolicy/enginedefault/src/Engine.h
+++ b/services/audiopolicy/enginedefault/src/Engine.h
@@ -95,8 +95,7 @@
DeviceVector getDisabledDevicesForInputSource(
const DeviceVector& availableInputDevices, audio_source_t inputSource) const;
- bool isBtScoActive(DeviceVector& availableOutputDevices,
- const SwAudioOutputCollection &outputs) const;
+ bool isBtScoActive(DeviceVector& availableOutputDevices) const;
std::map<product_strategy_t, legacy_strategy> mLegacyStrategyMap;
};
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 62cb6c7..000b571 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -40,6 +40,7 @@
#include <vector>
#include <Serializer.h>
+#include <android/media/audio/common/AudioMMapPolicy.h>
#include <android/media/audio/common/AudioPort.h>
#include <com_android_media_audio.h>
#include <android_media_audiopolicy.h>
@@ -64,6 +65,10 @@
using android::media::audio::common::AudioDevice;
using android::media::audio::common::AudioDeviceAddress;
+using android::media::audio::common::AudioDeviceDescription;
+using android::media::audio::common::AudioMMapPolicy;
+using android::media::audio::common::AudioMMapPolicyInfo;
+using android::media::audio::common::AudioMMapPolicyType;
using android::media::audio::common::AudioPortDeviceExt;
using android::media::audio::common::AudioPortExt;
using com::android::media::audioserver::fix_call_audio_patch;
@@ -3559,19 +3564,26 @@
ALOGI("%s: deviceType 0x%X, enabled %d, streamToDriveAbs %d", __func__, deviceType, enabled,
streamToDriveAbs);
- if (!enabled) {
- mAbsoluteVolumeDrivingStreams.erase(deviceType);
- return NO_ERROR;
- }
-
audio_attributes_t attributesToDriveAbs = mEngine->getAttributesForStreamType(streamToDriveAbs);
- if (attributesToDriveAbs == AUDIO_ATTRIBUTES_INITIALIZER) {
- ALOGW("%s: no attributes for stream %s, bailing out", __func__,
- toString(streamToDriveAbs).c_str());
- return BAD_VALUE;
+ if (enabled) {
+ if (attributesToDriveAbs == AUDIO_ATTRIBUTES_INITIALIZER) {
+ ALOGW("%s: no attributes for stream %s, bailing out", __func__,
+ toString(streamToDriveAbs).c_str());
+ return BAD_VALUE;
+ }
+
+ mAbsoluteVolumeDrivingStreams[deviceType] = attributesToDriveAbs;
+ } else {
+ mAbsoluteVolumeDrivingStreams.erase(deviceType);
}
- mAbsoluteVolumeDrivingStreams[deviceType] = attributesToDriveAbs;
+ // apply the stream volumes regarding the new absolute mode to all the outputs
+ for (size_t i = 0; i < mOutputs.size(); i++) {
+ sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
+ ALOGV("%s: apply stream volumes for portId %d", __func__, desc->getId());
+ applyStreamVolumes(desc, {deviceType}, static_cast<int>(desc->latency()) * 2);
+ }
+
return NO_ERROR;
}
@@ -4721,6 +4733,18 @@
dumpDeviceTypes({it.first}).c_str(),
mEngine->getVolumeGroupForAttributes(it.second));
}
+
+ // dump mmap policy by device
+ dst->appendFormat("\nMmap policy:\n");
+ for (const auto& [policyType, policyByDevice] : mMmapPolicyByDeviceType) {
+ std::stringstream ss;
+ ss << '{';
+ for (const auto& [deviceType, policy] : policyByDevice) {
+ ss << deviceType.toString() << ":" << toString(policy) << " ";
+ }
+ ss << '}';
+ dst->appendFormat(" - %s: %s\n", toString(policyType).c_str(), ss.str().c_str());
+ }
}
status_t AudioPolicyManager::dump(int fd)
@@ -5025,8 +5049,7 @@
nullptr /*updatedFormat*/,
mixerAttributes->config.channel_mask,
nullptr /*updatedChannelMask*/,
- flags,
- false /*exactMatchRequiredForInputFlags*/)
+ flags)
!= IOProfile::NO_MATCH) {
profile = curProfile;
break;
@@ -8236,7 +8259,7 @@
const underlying_input_flag_t oriFlags = flags;
for (;;) {
- sp<IOProfile> firstInexact = nullptr;
+ sp<IOProfile> inexact = nullptr;
uint32_t inexactSamplingRate = 0;
audio_format_t inexactFormat = AUDIO_FORMAT_INVALID;
audio_channel_mask_t inexactChannelMask = AUDIO_CHANNEL_INVALID;
@@ -8247,7 +8270,7 @@
for (const auto& profile : hwModule->getInputProfiles()) {
// profile->log();
//updatedFormat = format;
- if (profile->getCompatibilityScore(
+ auto compatibleScore = profile->getCompatibilityScore(
DeviceVector(device),
samplingRate,
&updatedSamplingRate,
@@ -8256,27 +8279,16 @@
channelMask,
&updatedChannelMask,
// FIXME ugly cast
- (audio_output_flags_t) flags,
- true /*exactMatchRequiredForInputFlags*/) == IOProfile::EXACT_MATCH) {
+ (audio_output_flags_t) flags);
+ if (compatibleScore == IOProfile::EXACT_MATCH) {
samplingRate = updatedSamplingRate;
format = updatedFormat;
channelMask = updatedChannelMask;
return profile;
- }
- if (firstInexact == nullptr
- && profile->getCompatibilityScore(
- DeviceVector(device),
- samplingRate,
- &updatedSamplingRate,
- format,
- &updatedFormat,
- channelMask,
- &updatedChannelMask,
- // FIXME ugly cast
- (audio_output_flags_t) flags,
- false /*exactMatchRequiredForInputFlags*/)
- != IOProfile::NO_MATCH) {
- firstInexact = profile;
+ } else if ((flags != AUDIO_INPUT_FLAG_NONE
+ && compatibleScore == IOProfile::PARTIAL_MATCH_WITH_FLAG)
+ || (inexact == nullptr && compatibleScore != IOProfile::NO_MATCH)) {
+ inexact = profile;
inexactSamplingRate = updatedSamplingRate;
inexactFormat = updatedFormat;
inexactChannelMask = updatedChannelMask;
@@ -8284,11 +8296,11 @@
}
}
- if (firstInexact != nullptr) {
+ if (inexact != nullptr) {
samplingRate = inexactSamplingRate;
format = inexactFormat;
channelMask = inexactChannelMask;
- return firstInexact;
+ return inexact;
} else if (flags & AUDIO_INPUT_FLAG_RAW) {
flags = (audio_input_flags_t) (flags & ~AUDIO_INPUT_FLAG_RAW); // retry
} else if ((flags & mustMatchFlag) == AUDIO_INPUT_FLAG_NONE &&
@@ -9261,8 +9273,7 @@
: hwModule->getOutputProfiles();
for (const auto& profile : ioProfiles) {
if (!profile->areAllDevicesSupported(devices) ||
- !profile->isCompatibleProfileForFlags(
- flags, false /*exactMatchRequiredForInputFlags*/)) {
+ !profile->isCompatibleProfileForFlags(flags)) {
continue;
}
audioProfiles.addAllValidProfiles(profile->asAudioPort()->getAudioProfiles());
@@ -9388,4 +9399,88 @@
}
}
+status_t AudioPolicyManager::getMmapPolicyInfos(AudioMMapPolicyType policyType,
+ std::vector<AudioMMapPolicyInfo> *policyInfos) {
+ if (policyType != AudioMMapPolicyType::DEFAULT &&
+ policyType != AudioMMapPolicyType::EXCLUSIVE) {
+ return BAD_VALUE;
+ }
+ if (mMmapPolicyByDeviceType.count(policyType) == 0) {
+ if (status_t status = updateMmapPolicyInfos(policyType); status != NO_ERROR) {
+ return status;
+ }
+ }
+ *policyInfos = mMmapPolicyInfos[policyType];
+ return NO_ERROR;
+}
+
+status_t AudioPolicyManager::getMmapPolicyForDevice(
+ AudioMMapPolicyType policyType, AudioMMapPolicyInfo *policyInfo) {
+ if (policyType != AudioMMapPolicyType::DEFAULT &&
+ policyType != AudioMMapPolicyType::EXCLUSIVE) {
+ return BAD_VALUE;
+ }
+ if (mMmapPolicyByDeviceType.count(policyType) == 0) {
+ if (status_t status = updateMmapPolicyInfos(policyType); status != NO_ERROR) {
+ return status;
+ }
+ }
+ auto it = mMmapPolicyByDeviceType[policyType].find(policyInfo->device.type);
+ policyInfo->mmapPolicy = it == mMmapPolicyByDeviceType[policyType].end()
+ ? AudioMMapPolicy::NEVER : it->second;
+ return NO_ERROR;
+}
+
+status_t AudioPolicyManager::updateMmapPolicyInfos(AudioMMapPolicyType policyType) {
+ std::vector<AudioMMapPolicyInfo> policyInfos;
+ if (status_t status = mpClientInterface->getMmapPolicyInfos(policyType, &policyInfos);
+ status != NO_ERROR) {
+ ALOGE("%s, failed, error = %d", __func__, status);
+ return status;
+ }
+ std::map<AudioDeviceDescription, AudioMMapPolicy> mmapPolicyByDeviceType;
+ if (policyInfos.size() == 1 && policyInfos[0].device == AudioDevice()) {
+ // When there is only one AudioMMapPolicyInfo instance and the device is a default value,
+ // it indicates the mmap policy is reported via system property. In that case, use the
+ // routing information to fill details for how mmap is supported for a particular device.
+ for (const auto &hwModule: mHwModules) {
+ for (const auto &profile: hwModule->getInputProfiles()) {
+ if ((profile->getFlags() & AUDIO_INPUT_FLAG_MMAP_NOIRQ)
+ != AUDIO_INPUT_FLAG_MMAP_NOIRQ) {
+ continue;
+ }
+ for (const auto &device: profile->getSupportedDevices()) {
+ auto deviceDesc =
+ legacy2aidl_audio_devices_t_AudioDeviceDescription(device->type());
+ if (deviceDesc.ok()) {
+ mmapPolicyByDeviceType.emplace(
+ deviceDesc.value(), policyInfos[0].mmapPolicy);
+ }
+ }
+ }
+ for (const auto &profile: hwModule->getOutputProfiles()) {
+ if ((profile->getFlags() & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ)
+ != AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) {
+ continue;
+ }
+ for (const auto &device: profile->getSupportedDevices()) {
+ auto deviceDesc =
+ legacy2aidl_audio_devices_t_AudioDeviceDescription(device->type());
+ if (deviceDesc.ok()) {
+ mmapPolicyByDeviceType.emplace(
+ deviceDesc.value(), policyInfos[0].mmapPolicy);
+ }
+ }
+ }
+ }
+ } else {
+ for (const auto &info: policyInfos) {
+ mmapPolicyByDeviceType[info.device.type] = info.mmapPolicy;
+ }
+ }
+ mMmapPolicyByDeviceType.emplace(policyType, mmapPolicyByDeviceType);
+ mMmapPolicyInfos.emplace(policyType, policyInfos);
+ return NO_ERROR;
+}
+
} // namespace android
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 20f7b12..1ca0c32 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -440,6 +440,13 @@
void onNewAudioModulesAvailable() override;
+ status_t getMmapPolicyInfos(
+ media::audio::common::AudioMMapPolicyType policyType,
+ std::vector<media::audio::common::AudioMMapPolicyInfo> *policyInfos) override;
+ status_t getMmapPolicyForDevice(
+ media::audio::common::AudioMMapPolicyType policyType,
+ media::audio::common::AudioMMapPolicyInfo *policyInfo) override;
+
status_t initialize();
protected:
@@ -1413,9 +1420,17 @@
int index,
const DeviceTypeSet &deviceTypes);
+ status_t updateMmapPolicyInfos(media::audio::common::AudioMMapPolicyType policyType);
+
// Contains for devices that support absolute volume the audio attributes
// corresponding to the streams that are driving the volume changes
std::unordered_map<audio_devices_t, audio_attributes_t> mAbsoluteVolumeDrivingStreams;
+
+ std::map<media::audio::common::AudioMMapPolicyType,
+ const std::vector<media::audio::common::AudioMMapPolicyInfo>> mMmapPolicyInfos;
+ std::map<media::audio::common::AudioMMapPolicyType,
+ const std::map<media::audio::common::AudioDeviceDescription,
+ media::audio::common::AudioMMapPolicy>> mMmapPolicyByDeviceType;
};
};
diff --git a/services/audiopolicy/service/AudioPolicyClientImpl.cpp b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
index 3ed247b..765928e 100644
--- a/services/audiopolicy/service/AudioPolicyClientImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
@@ -374,4 +374,14 @@
return af->setTracksInternalMute(tracksInternalMute);
}
+status_t AudioPolicyService::AudioPolicyClient::getMmapPolicyInfos(
+ media::audio::common::AudioMMapPolicyType policyType,
+ std::vector<media::audio::common::AudioMMapPolicyInfo> *policyInfos) {
+ sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
+ if (af == nullptr) {
+ return PERMISSION_DENIED;
+ }
+ return af->getMmapPolicyInfos(policyType, policyInfos);
+}
+
} // namespace android
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index b27c017..f298541 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -2817,4 +2817,24 @@
return Status::ok();
}
+Status AudioPolicyService::getMmapPolicyInfos(
+ AudioMMapPolicyType policyType, std::vector<AudioMMapPolicyInfo> *_aidl_return) {
+ if (mAudioPolicyManager == nullptr) {
+ return binderStatusFromStatusT(NO_INIT);
+ }
+ audio_utils::lock_guard _l(mMutex);
+ return binderStatusFromStatusT(
+ mAudioPolicyManager->getMmapPolicyInfos(policyType, _aidl_return));
+}
+
+Status AudioPolicyService::getMmapPolicyForDevice(
+ AudioMMapPolicyType policyType, AudioMMapPolicyInfo *policyInfo) {
+ if (mAudioPolicyManager == nullptr) {
+ return binderStatusFromStatusT(NO_INIT);
+ }
+ audio_utils::lock_guard _l(mMutex);
+ return binderStatusFromStatusT(
+ mAudioPolicyManager->getMmapPolicyForDevice(policyType, policyInfo));
+}
+
} // namespace android
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 290a036..80ee34e 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -170,6 +170,8 @@
BINDER_METHOD_ENTRY(clearPreferredMixerAttributes) \
BINDER_METHOD_ENTRY(getRegisteredPolicyMixes) \
BINDER_METHOD_ENTRY(getPermissionController) \
+BINDER_METHOD_ENTRY(getMmapPolicyInfos) \
+BINDER_METHOD_ENTRY(getMmapPolicyForDevice) \
\
// singleton for Binder Method Statistics for IAudioPolicyService
static auto& getIAudioPolicyServiceStatistics() {
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 2ce82c0..44a0e7d 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -58,6 +58,8 @@
using media::audio::common::AudioDevice;
using media::audio::common::AudioDeviceDescription;
using media::audio::common::AudioFormatDescription;
+using media::audio::common::AudioMMapPolicyInfo;
+using media::audio::common::AudioMMapPolicyType;
using media::audio::common::AudioMode;
using media::audio::common::AudioSource;
using media::audio::common::AudioStreamType;
@@ -328,6 +330,13 @@
// Should only be called by AudioService to push permission data down to audioserver
binder::Status getPermissionController(sp<INativePermissionController>* out) override;
+ binder::Status getMmapPolicyInfos(
+ AudioMMapPolicyType policyType,
+ std::vector<AudioMMapPolicyInfo>* _aidl_return) override;
+ binder::Status getMmapPolicyForDevice(
+ AudioMMapPolicyType policyType,
+ AudioMMapPolicyInfo* policyInfo) override;
+
status_t onTransact(uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) override;
// -- IAudioPolicyLocal methods
@@ -945,6 +954,10 @@
status_t setTracksInternalMute(
const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) override;
+ status_t getMmapPolicyInfos(
+ media::audio::common::AudioMMapPolicyType policyType,
+ std::vector<media::audio::common::AudioMMapPolicyInfo> *policyInfos) override;
+
private:
AudioPolicyService *mAudioPolicyService;
};
diff --git a/services/audiopolicy/tests/AudioPolicyTestClient.h b/services/audiopolicy/tests/AudioPolicyTestClient.h
index 5290da2..33dc5fe 100644
--- a/services/audiopolicy/tests/AudioPolicyTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyTestClient.h
@@ -122,6 +122,12 @@
const std::vector<media::TrackInternalMuteInfo>& /*tracksInternalMute*/) override {
return INVALID_OPERATION;
}
+
+ status_t getMmapPolicyInfos(
+ media::audio::common::AudioMMapPolicyType /*policyType*/,
+ std::vector<media::audio::common::AudioMMapPolicyInfo>* /*policyInfos*/) override {
+ return INVALID_OPERATION;
+ }
};
} // namespace android
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index d83a277..e901cfd 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -1276,6 +1276,33 @@
EXPECT_EQ(expectedChannelMask, requestedChannelMask);
}
+TEST_F(AudioPolicyManagerTestWithConfigurationFile, MatchesMoreInputFlagsWhenPossible) {
+ const audio_format_t expectedFormat = AUDIO_FORMAT_PCM_16_BIT;
+ const uint32_t expectedSampleRate = 48000;
+ const audio_channel_mask_t expectedChannelMask = AUDIO_CHANNEL_IN_STEREO;
+ const std::string expectedIOProfile = "mixport_fast_input";
+
+ auto devices = mManager->getAvailableInputDevices();
+ sp<DeviceDescriptor> mic = nullptr;
+ for (auto device : devices) {
+ if (device->type() == AUDIO_DEVICE_IN_BUILTIN_MIC) {
+ mic = device;
+ break;
+ }
+ }
+ EXPECT_NE(nullptr, mic);
+
+ audio_format_t requestedFormat = AUDIO_FORMAT_PCM_24_BIT_PACKED;
+ uint32_t requestedSampleRate = 48000;
+ audio_channel_mask_t requestedChannelMask = AUDIO_CHANNEL_IN_STEREO;
+ auto profile = mManager->getInputProfile(
+ mic, requestedSampleRate, requestedFormat, requestedChannelMask, AUDIO_INPUT_FLAG_FAST);
+ EXPECT_EQ(expectedIOProfile, profile->getName());
+ EXPECT_EQ(expectedFormat, requestedFormat);
+ EXPECT_EQ(expectedSampleRate, requestedSampleRate);
+ EXPECT_EQ(expectedChannelMask, requestedChannelMask);
+}
+
class AudioPolicyManagerTestDynamicPolicy : public AudioPolicyManagerTestWithConfigurationFile {
protected:
void TearDown() override;
diff --git a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
index 3c64898..9cb3608 100644
--- a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
+++ b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
@@ -71,6 +71,11 @@
samplingRates="48000"
channelMasks="AUDIO_CHANNEL_IN_5POINT1"/>
</mixPort>
+ <mixPort name="mixport_fast_input" role="sink" flags="AUDIO_INPUT_FLAG_FAST">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000"
+ channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+ </mixPort>
</mixPorts>
<devicePorts>
<devicePort tagName="Speaker" type="AUDIO_DEVICE_OUT_SPEAKER" role="sink">
@@ -121,6 +126,8 @@
sources="USB Device In" />
<route type="mix" sink="multiple_channels_input"
sources="Built-In Mic" />
+ <route type="mix" sink="mixport_fast_input"
+ sources="Built-In Mic"/>
</routes>
</module>
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 17ec41e..6da1606 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -105,6 +105,20 @@
const auto &deviceIdEntry = cameraInfo.find(ANDROID_INFO_DEVICE_ID);
return deviceIdEntry.data.i32[0];
}
+
+ static android::PermissionChecker::PermissionResult appOpModeToPermissionResult(int32_t res) {
+ switch (res) {
+ case android::AppOpsManager::MODE_ERRORED:
+ return android::PermissionChecker::PERMISSION_HARD_DENIED;
+ case android::AppOpsManager::MODE_IGNORED:
+ return android::PermissionChecker::PERMISSION_SOFT_DENIED;
+ case android::AppOpsManager::MODE_ALLOWED:
+ return android::PermissionChecker::PERMISSION_GRANTED;
+ }
+
+ ALOGE("%s: Unexpected appOpMode %d", __FUNCTION__, res);
+ return android::PermissionChecker::PERMISSION_HARD_DENIED;
+ }
} // namespace anonymous
namespace android {
@@ -1464,14 +1478,14 @@
}
}
-Status CameraService::makeClient(const sp<CameraService>& cameraService,
- const sp<IInterface>& cameraCb, const std::string& packageName, bool systemNativeClient,
- const std::optional<std::string>& featureId, const std::string& cameraId,
- int api1CameraId, int facing, int sensorOrientation, int clientPid, uid_t clientUid,
+Status CameraService::makeClient(
+ const sp<CameraService>& cameraService, const sp<IInterface>& cameraCb,
+ const AttributionSourceState& clientAttribution, int callingPid, bool systemNativeClient,
+ const std::string& cameraId, int api1CameraId, int facing, int sensorOrientation,
int servicePid, std::pair<int, IPCTransport> deviceVersionAndTransport,
apiLevel effectiveApiLevel, bool overrideForPerfClass, int rotationOverride,
bool forceSlowJpegMode, const std::string& originalCameraId,
- /*out*/sp<BasicClient>* client) {
+ /*out*/ sp<BasicClient>* client) {
// For HIDL devices
if (deviceVersionAndTransport.second == IPCTransport::HIDL) {
// Create CameraClient based on device version reported by the HAL.
@@ -1503,19 +1517,19 @@
if (effectiveApiLevel == API_1) { // Camera1 API route
sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
*client = new Camera2Client(cameraService, tmp, cameraService->mCameraServiceProxyWrapper,
- cameraService->mAttributionAndPermissionUtils, packageName, featureId, cameraId,
- api1CameraId, facing, sensorOrientation,
- clientPid, clientUid, servicePid, overrideForPerfClass, rotationOverride,
- forceSlowJpegMode);
+ cameraService->mAttributionAndPermissionUtils,
+ clientAttribution, callingPid, cameraId, api1CameraId, facing,
+ sensorOrientation, servicePid, overrideForPerfClass,
+ rotationOverride, forceSlowJpegMode);
ALOGI("%s: Camera1 API (legacy), rotationOverride %d, forceSlowJpegMode %d",
__FUNCTION__, rotationOverride, forceSlowJpegMode);
} else { // Camera2 API route
sp<hardware::camera2::ICameraDeviceCallbacks> tmp =
static_cast<hardware::camera2::ICameraDeviceCallbacks*>(cameraCb.get());
- *client = new CameraDeviceClient(cameraService, tmp,
- cameraService->mCameraServiceProxyWrapper,
- cameraService->mAttributionAndPermissionUtils, packageName, systemNativeClient,
- featureId, cameraId, facing, sensorOrientation, clientPid, clientUid, servicePid,
+ *client = new CameraDeviceClient(
+ cameraService, tmp, cameraService->mCameraServiceProxyWrapper,
+ cameraService->mAttributionAndPermissionUtils, clientAttribution, callingPid,
+ systemNativeClient, cameraId, facing, sensorOrientation, servicePid,
overrideForPerfClass, rotationOverride, originalCameraId);
ALOGI("%s: Camera2 API, rotationOverride %d", __FUNCTION__, rotationOverride);
}
@@ -2483,11 +2497,11 @@
// Only use passed in clientPid to check permission. Use calling PID as the client PID
// that's connected to camera service directly.
- if (!(ret = makeClient(this, cameraCb, clientPackageName, systemNativeClient,
- clientAttribution.attributionTag, cameraId, api1CameraId, facing,
- orientation, getCallingPid(), clientAttribution.uid, getpid(),
- deviceVersionAndTransport, effectiveApiLevel, overrideForPerfClass,
- rotationOverride, forceSlowJpegMode, originalCameraId,
+ if (!(ret = makeClient(this, cameraCb, clientAttribution, getCallingPid(),
+ systemNativeClient, cameraId, api1CameraId, facing, orientation,
+ getpid(), deviceVersionAndTransport, effectiveApiLevel,
+ overrideForPerfClass, rotationOverride, forceSlowJpegMode,
+ originalCameraId,
/*out*/ &tmp))
.isOk()) {
return ret;
@@ -2716,7 +2730,7 @@
if (lock == nullptr) {
ALOGE("%s: (PID %d) rejected (too many other clients connecting)."
- , __FUNCTION__, offlineClient->getClientPid());
+ , __FUNCTION__, offlineClient->getClientCallingPid());
return TIMED_OUT;
}
@@ -4021,25 +4035,17 @@
// ----------------------------------------------------------------------------
-CameraService::Client::Client(const sp<CameraService>& cameraService,
- const sp<ICameraClient>& cameraClient,
+CameraService::Client::Client(
+ const sp<CameraService>& cameraService, const sp<ICameraClient>& cameraClient,
std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
- const std::string& clientPackageName, bool systemNativeClient,
- const std::optional<std::string>& clientFeatureId,
- const std::string& cameraIdStr,
- int api1CameraId, int cameraFacing, int sensorOrientation,
- int clientPid, uid_t clientUid,
- int servicePid, int rotationOverride) :
- CameraService::BasicClient(cameraService,
- IInterface::asBinder(cameraClient),
- attributionAndPermissionUtils,
- clientPackageName, systemNativeClient, clientFeatureId,
- cameraIdStr, cameraFacing, sensorOrientation,
- clientPid, clientUid,
- servicePid, rotationOverride),
- mCameraId(api1CameraId)
-{
- int callingPid = getCallingPid();
+ const AttributionSourceState& clientAttribution, int callingPid, bool systemNativeClient,
+ const std::string& cameraIdStr, int api1CameraId, int cameraFacing, int sensorOrientation,
+ int servicePid, int rotationOverride)
+ : CameraService::BasicClient(cameraService, IInterface::asBinder(cameraClient),
+ attributionAndPermissionUtils, clientAttribution, callingPid,
+ systemNativeClient, cameraIdStr, cameraFacing, sensorOrientation,
+ servicePid, rotationOverride),
+ mCameraId(api1CameraId) {
LOG1("Client::Client E (pid %d, id %d)", callingPid, mCameraId);
mRemoteCallback = cameraClient;
@@ -4061,27 +4067,28 @@
sp<CameraService> CameraService::BasicClient::BasicClient::sCameraService;
-CameraService::BasicClient::BasicClient(const sp<CameraService>& cameraService,
- const sp<IBinder>& remoteCallback,
+CameraService::BasicClient::BasicClient(
+ const sp<CameraService>& cameraService, const sp<IBinder>& remoteCallback,
std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
- const std::string& clientPackageName, bool nativeClient,
- const std::optional<std::string>& clientFeatureId, const std::string& cameraIdStr,
- int cameraFacing, int sensorOrientation, int clientPid, uid_t clientUid,
- int servicePid, int rotationOverride):
- AttributionAndPermissionUtilsEncapsulator(attributionAndPermissionUtils),
- mDestructionStarted(false),
- mCameraIdStr(cameraIdStr), mCameraFacing(cameraFacing), mOrientation(sensorOrientation),
- mClientPackageName(clientPackageName), mSystemNativeClient(nativeClient),
- mClientFeatureId(clientFeatureId),
- mClientPid(clientPid), mClientUid(clientUid),
- mServicePid(servicePid),
- mDisconnected(false), mUidIsTrusted(false),
- mRotationOverride(rotationOverride),
- mAudioRestriction(hardware::camera2::ICameraDeviceUser::AUDIO_RESTRICTION_NONE),
- mRemoteBinder(remoteCallback),
- mOpsActive(false),
- mOpsStreaming(false)
-{
+ const AttributionSourceState& clientAttribution, int callingPid, bool nativeClient,
+ const std::string& cameraIdStr, int cameraFacing, int sensorOrientation, int servicePid,
+ int rotationOverride)
+ : AttributionAndPermissionUtilsEncapsulator(attributionAndPermissionUtils),
+ mDestructionStarted(false),
+ mCameraIdStr(cameraIdStr),
+ mCameraFacing(cameraFacing),
+ mOrientation(sensorOrientation),
+ mClientAttribution(clientAttribution),
+ mCallingPid(callingPid),
+ mSystemNativeClient(nativeClient),
+ mServicePid(servicePid),
+ mDisconnected(false),
+ mUidIsTrusted(false),
+ mRotationOverride(rotationOverride),
+ mAudioRestriction(hardware::camera2::ICameraDeviceUser::AUDIO_RESTRICTION_NONE),
+ mRemoteBinder(remoteCallback),
+ mCameraOpen(false),
+ mCameraStreaming(false) {
if (sCameraService == nullptr) {
sCameraService = cameraService;
}
@@ -4101,7 +4108,7 @@
mAppOpsManager = std::make_unique<AppOpsManager>();
}
- mUidIsTrusted = isTrustedCallingUid(mClientUid);
+ mUidIsTrusted = isTrustedCallingUid(getClientUid());
}
CameraService::BasicClient::~BasicClient() {
@@ -4117,7 +4124,7 @@
mDisconnected = true;
sCameraService->removeByClient(this);
- sCameraService->logDisconnected(mCameraIdStr, mClientPid, mClientPackageName);
+ sCameraService->logDisconnected(mCameraIdStr, mCallingPid, getPackageName());
sCameraService->mCameraProviderManager->removeRef(CameraProviderManager::DeviceMode::CAMERA,
mCameraIdStr);
@@ -4126,14 +4133,14 @@
remote->unlinkToDeath(sCameraService);
}
- finishCameraOps();
+ notifyCameraClosing();
// Notify flashlight that a camera device is closed.
sCameraService->mFlashlight->deviceClosed(mCameraIdStr);
ALOGI("%s: Disconnected client for camera %s for PID %d", __FUNCTION__, mCameraIdStr.c_str(),
- mClientPid);
+ mCallingPid);
// client shouldn't be able to call into us anymore
- mClientPid = 0;
+ mCallingPid = 0;
const auto& mActivityManager = getActivityManager();
if (mActivityManager) {
@@ -4169,7 +4176,7 @@
}
std::string CameraService::BasicClient::getPackageName() const {
- return mClientPackageName;
+ return mClientAttribution.packageName.value_or(kUnknownPackageName);
}
int CameraService::BasicClient::getCameraFacing() const {
@@ -4180,12 +4187,16 @@
return mOrientation;
}
-int CameraService::BasicClient::getClientPid() const {
- return mClientPid;
+int CameraService::BasicClient::getClientCallingPid() const {
+ return mCallingPid;
}
uid_t CameraService::BasicClient::getClientUid() const {
- return mClientUid;
+ return mClientAttribution.uid;
+}
+
+const std::optional<std::string>& CameraService::BasicClient::getClientAttributionTag() const {
+ return mClientAttribution.attributionTag;
}
bool CameraService::BasicClient::canCastToApiClient(apiLevel level) const {
@@ -4222,21 +4233,22 @@
}
}
-status_t CameraService::BasicClient::handleAppOpMode(int32_t mode) {
- if (mode == AppOpsManager::MODE_ERRORED) {
- ALOGI("Camera %s: Access for \"%s\" has been revoked",
- mCameraIdStr.c_str(), mClientPackageName.c_str());
+status_t CameraService::BasicClient::handlePermissionResult(
+ PermissionChecker::PermissionResult result) {
+ if (result == PermissionChecker::PERMISSION_HARD_DENIED) {
+ ALOGI("Camera %s: Access for \"%s\" has been revoked", mCameraIdStr.c_str(),
+ getPackageName().c_str());
return PERMISSION_DENIED;
- } else if (!mUidIsTrusted && mode == AppOpsManager::MODE_IGNORED) {
- // If the calling Uid is trusted (a native service), the AppOpsManager could
- // return MODE_IGNORED. Do not treat such case as error.
- bool isUidActive = sCameraService->mUidPolicy->isUidActive(mClientUid,
- mClientPackageName);
+ } else if (!mUidIsTrusted && result == PermissionChecker::PERMISSION_SOFT_DENIED) {
+ // If the calling Uid is trusted (a native service), the AppOpsManager/PermissionChecker
+ // could return MODE_IGNORED/PERMISSION_SOFT_DENIED. Do not treat such case as error.
+ bool isUidActive =
+ sCameraService->mUidPolicy->isUidActive(getClientUid(), getPackageName());
bool isCameraPrivacyEnabled;
if (flags::camera_privacy_allowlist()) {
isCameraPrivacyEnabled = sCameraService->isCameraPrivacyEnabled(
- toString16(mClientPackageName), std::string(), mClientPid, mClientUid);
+ toString16(getPackageName()), std::string(), mCallingPid, getClientUid());
} else {
isCameraPrivacyEnabled =
sCameraService->mSensorPrivacyPolicy->isCameraPrivacyEnabled();
@@ -4248,9 +4260,9 @@
// capabilities are unknown.
if (!isUidActive || !isCameraPrivacyEnabled) {
ALOGI("Camera %s: Access for \"%s\" has been restricted."
- "uid active: %s, privacy enabled: %s", mCameraIdStr.c_str(),
- mClientPackageName.c_str(), isUidActive ? "true" : "false",
- isCameraPrivacyEnabled ? "true" : "false");
+ "uid active: %s, privacy enabled: %s",
+ mCameraIdStr.c_str(), getPackageName().c_str(), isUidActive ? "true" : "false",
+ isCameraPrivacyEnabled ? "true" : "false");
// Return the same error as for device policy manager rejection
return -EACCES;
}
@@ -4258,40 +4270,49 @@
return OK;
}
-status_t CameraService::BasicClient::startCameraOps() {
+status_t CameraService::BasicClient::handleAppOpMode(int32_t mode) {
+ return handlePermissionResult(appOpModeToPermissionResult(mode));
+}
+
+status_t CameraService::BasicClient::notifyCameraOpening() {
ATRACE_CALL();
- {
- ALOGV("%s: Start camera ops, package name = %s, client UID = %d",
- __FUNCTION__, mClientPackageName.c_str(), mClientUid);
- }
- if (mAppOpsManager != nullptr) {
- // Notify app ops that the camera is not available
- mOpsCallback = new OpsCallback(this);
+ // Don't start watching until we're streaming when using permissionChecker for data delivery
+ if (!flags::check_full_attribution_source_chain()) {
+ ALOGD("%s: Start camera ops, package name = %s, client UID = %d", __FUNCTION__,
+ getPackageName().c_str(), getClientUid());
- mAppOpsManager->startWatchingMode(AppOpsManager::OP_CAMERA,
- toString16(mClientPackageName),
- AppOpsManager::WATCH_FOREGROUND_CHANGES, mOpsCallback);
+ if (mAppOpsManager != nullptr) {
+ // Notify app ops that the camera is not available
+ mOpsCallback = new OpsCallback(this);
- // Just check for camera acccess here on open - delay startOp until
- // camera frames start streaming in startCameraStreamingOps
- int32_t mode = mAppOpsManager->checkOp(AppOpsManager::OP_CAMERA, mClientUid,
- toString16(mClientPackageName));
- status_t res = handleAppOpMode(mode);
- if (res != OK) {
- return res;
+ mAppOpsManager->startWatchingMode(
+ AppOpsManager::OP_CAMERA, toString16(getPackageName()),
+ AppOpsManager::WATCH_FOREGROUND_CHANGES, mOpsCallback);
+
+ // Just check for camera access here on open - delay startOp until
+ // camera frames start streaming in startCameraStreamingOps
+ int32_t mode = mAppOpsManager->checkOp(AppOpsManager::OP_CAMERA, getClientUid(),
+ toString16(getPackageName()));
+ status_t res = handleAppOpMode(mode);
+ if (res != OK) {
+ return res;
+ }
}
+ } else {
+ // TODO: Remove when removing the check_full_attribution_source_chain flag
+ ALOGD("%s: Bypassing checkOp for uid %d", __FUNCTION__, getClientUid());
}
- mOpsActive = true;
+ mCameraOpen = true;
// Transition device availability listeners from PRESENT -> NOT_AVAILABLE
sCameraService->updateStatus(StatusInternal::NOT_AVAILABLE, mCameraIdStr);
- sCameraService->mUidPolicy->registerMonitorUid(mClientUid, /*openCamera*/true);
+ sCameraService->mUidPolicy->registerMonitorUid(getClientUid(), /*openCamera*/ true);
// Notify listeners of camera open/close status
- sCameraService->updateOpenCloseStatus(mCameraIdStr, true/*open*/, mClientPackageName);
+ sCameraService->updateOpenCloseStatus(mCameraIdStr, true /*open*/, getPackageName());
return OK;
}
@@ -4299,30 +4320,52 @@
status_t CameraService::BasicClient::startCameraStreamingOps() {
ATRACE_CALL();
- if (!mOpsActive) {
+ if (!mCameraOpen) {
ALOGE("%s: Calling streaming start when not yet active", __FUNCTION__);
return INVALID_OPERATION;
}
- if (mOpsStreaming) {
+
+ if (mCameraStreaming) {
ALOGV("%s: Streaming already active!", __FUNCTION__);
return OK;
}
- ALOGV("%s: Start camera streaming ops, package name = %s, client UID = %d",
- __FUNCTION__, mClientPackageName.c_str(), mClientUid);
+ ALOGV("%s: Start camera streaming ops, package name = %s, client UID = %d", __FUNCTION__,
+ getPackageName().c_str(), getClientUid());
if (mAppOpsManager != nullptr) {
- int32_t mode = mAppOpsManager->startOpNoThrow(AppOpsManager::OP_CAMERA, mClientUid,
- toString16(mClientPackageName), /*startIfModeDefault*/ false,
- toString16(mClientFeatureId),
- toString16("start camera ") + toString16(mCameraIdStr));
- status_t res = handleAppOpMode(mode);
- if (res != OK) {
- return res;
+ if (flags::check_full_attribution_source_chain()) {
+ ALOGD("%s: Start data delivery for uid %d", __FUNCTION__, getClientUid());
+
+ const PermissionChecker::PermissionResult result =
+ checkPermissionsForCameraForStartDataDelivery(mCameraIdStr, mClientAttribution);
+ status_t res = handlePermissionResult(result);
+ if (res != OK) {
+ return res;
+ }
+
+ mOpsCallback = new OpsCallback(this);
+ std::for_each(AttrSourceItr{mClientAttribution}, AttrSourceItr::end(),
+ [&](const auto& attr) {
+ mAppOpsManager->startWatchingMode(
+ AppOpsManager::OP_CAMERA,
+ toString16(attr.packageName.value_or("")),
+ AppOpsManager::WATCH_FOREGROUND_CHANGES, mOpsCallback);
+ });
+ } else {
+ ALOGD("%s: startOp for uid %d", __FUNCTION__, getClientUid());
+ int32_t mode = mAppOpsManager->startOpNoThrow(
+ AppOpsManager::OP_CAMERA, getClientUid(), toString16(getPackageName()),
+ /*startIfModeDefault*/ false, toString16(getClientAttributionTag()),
+ toString16("start camera ") + toString16(mCameraIdStr));
+ status_t res = handleAppOpMode(mode);
+ if (res != OK) {
+ return res;
+ }
}
}
- mOpsStreaming = true;
+ mCameraStreaming = true;
return OK;
}
@@ -4330,14 +4373,20 @@
status_t CameraService::BasicClient::noteAppOp() {
ATRACE_CALL();
- ALOGV("%s: Start camera noteAppOp, package name = %s, client UID = %d",
- __FUNCTION__, mClientPackageName.c_str(), mClientUid);
+ ALOGV("%s: Start camera noteAppOp, package name = %s, client UID = %d", __FUNCTION__,
+ getPackageName().c_str(), getClientUid());
// noteAppOp is only used for when camera mute is not supported, in order
// to trigger the sensor privacy "Unblock" dialog
- if (mAppOpsManager != nullptr) {
- int32_t mode = mAppOpsManager->noteOp(AppOpsManager::OP_CAMERA, mClientUid,
- toString16(mClientPackageName), toString16(mClientFeatureId),
+ if (flags::check_full_attribution_source_chain()) {
+ // Ignore the result, since we're only triggering the dialog
+ ALOGD("%s: Check data delivery permissions for uid %d", __FUNCTION__, getClientUid());
+ hasPermissionsForCameraForDataDelivery(std::string(), mClientAttribution);
+ } else if (mAppOpsManager != nullptr) {
+ ALOGD("%s: noteOp for uid %d", __FUNCTION__, getClientUid());
+ int32_t mode = mAppOpsManager->noteOp(
+ AppOpsManager::OP_CAMERA, getClientUid(), toString16(getPackageName()),
+ toString16(getClientAttributionTag()),
toString16("start camera ") + toString16(mCameraIdStr));
status_t res = handleAppOpMode(mode);
if (res != OK) {
@@ -4351,35 +4400,48 @@
status_t CameraService::BasicClient::finishCameraStreamingOps() {
ATRACE_CALL();
- if (!mOpsActive) {
+ if (!mCameraOpen) {
ALOGE("%s: Calling streaming start when not yet active", __FUNCTION__);
return INVALID_OPERATION;
}
- if (!mOpsStreaming) {
+ if (!mCameraStreaming) {
ALOGV("%s: Streaming not active!", __FUNCTION__);
return OK;
}
if (mAppOpsManager != nullptr) {
- mAppOpsManager->finishOp(AppOpsManager::OP_CAMERA, mClientUid,
- toString16(mClientPackageName), toString16(mClientFeatureId));
- mOpsStreaming = false;
+ if (flags::check_full_attribution_source_chain()) {
+ ALOGD("%s: finishDataDelivery for uid %d", __FUNCTION__, getClientUid());
+ finishDataDelivery(mClientAttribution);
+
+ // Stop watching app op changes after stop streaming
+ if (mOpsCallback != nullptr) {
+ mAppOpsManager->stopWatchingMode(mOpsCallback);
+ mOpsCallback.clear();
+ }
+ } else {
+ ALOGD("%s: finishOp for uid %d", __FUNCTION__, getClientUid());
+ mAppOpsManager->finishOp(AppOpsManager::OP_CAMERA, getClientUid(),
+ toString16(getPackageName()),
+ toString16(getClientAttributionTag()));
+ }
+ mCameraStreaming = false;
}
return OK;
}
-status_t CameraService::BasicClient::finishCameraOps() {
+status_t CameraService::BasicClient::notifyCameraClosing() {
ATRACE_CALL();
- if (mOpsStreaming) {
+ if (mCameraStreaming) {
// Make sure we've notified everyone about camera stopping
finishCameraStreamingOps();
}
- // Check if startCameraOps succeeded, and if so, finish the camera op
- if (mOpsActive) {
- mOpsActive = false;
+ // Check if notifyCameraOpening succeeded, and if so, finish the camera op if necessary
+ if (mCameraOpen) {
+ mCameraOpen = false;
// This function is called when a client disconnects. This should
// release the camera, but actually only if it was in a proper
@@ -4391,16 +4453,20 @@
sCameraService->updateStatus(StatusInternal::PRESENT,
mCameraIdStr, rejected);
}
- // Always stop watching, even if no camera op is active
- if (mOpsCallback != nullptr && mAppOpsManager != nullptr) {
- mAppOpsManager->stopWatchingMode(mOpsCallback);
- }
- mOpsCallback.clear();
- sCameraService->mUidPolicy->unregisterMonitorUid(mClientUid, /*closeCamera*/true);
+ // When using the data delivery permission checks, the open state does not involve AppOps
+ if (!flags::check_full_attribution_source_chain()) {
+ // Always stop watching, even if no camera op is active
+ if (mOpsCallback != nullptr && mAppOpsManager != nullptr) {
+ mAppOpsManager->stopWatchingMode(mOpsCallback);
+ }
+ mOpsCallback.clear();
+ }
+
+ sCameraService->mUidPolicy->unregisterMonitorUid(getClientUid(), /*closeCamera*/ true);
// Notify listeners of camera open/close status
- sCameraService->updateOpenCloseStatus(mCameraIdStr, false/*open*/, mClientPackageName);
+ sCameraService->updateOpenCloseStatus(mCameraIdStr, false /*open*/, getPackageName());
return OK;
}
@@ -4416,40 +4482,75 @@
return;
}
- int32_t res;
- res = mAppOpsManager->checkOp(AppOpsManager::OP_CAMERA,
- mClientUid, toString16(mClientPackageName));
- ALOGV("checkOp returns: %d, %s ", res,
- res == AppOpsManager::MODE_ALLOWED ? "ALLOWED" :
- res == AppOpsManager::MODE_IGNORED ? "IGNORED" :
- res == AppOpsManager::MODE_ERRORED ? "ERRORED" :
- "UNKNOWN");
+ PermissionChecker::PermissionResult res;
+ if (flags::check_full_attribution_source_chain()) {
+ int32_t appOpMode = AppOpsManager::MODE_ALLOWED;
+ std::for_each(AttrSourceItr{mClientAttribution}, AttrSourceItr::end(),
+ [&](const auto& attr) {
+ appOpMode = std::max(appOpMode, mAppOpsManager->checkOp(
+ AppOpsManager::OP_CAMERA, attr.uid,
+ toString16(attr.packageName.value_or(""))));
+ });
+ ALOGV("checkOp returns: %d, %s ", res,
+ appOpMode == AppOpsManager::MODE_ALLOWED ? "ALLOWED"
+ : appOpMode == AppOpsManager::MODE_IGNORED ? "IGNORED"
+ : appOpMode == AppOpsManager::MODE_ERRORED ? "ERRORED"
+ : "UNKNOWN");
+ res = appOpModeToPermissionResult(appOpMode);
+ } else {
+ int32_t appOpMode = mAppOpsManager->checkOp(AppOpsManager::OP_CAMERA, getClientUid(),
+ toString16(getPackageName()));
+ ALOGV("checkOp returns: %d, %s ", res,
+ appOpMode == AppOpsManager::MODE_ALLOWED ? "ALLOWED"
+ : appOpMode == AppOpsManager::MODE_IGNORED ? "IGNORED"
+ : appOpMode == AppOpsManager::MODE_ERRORED ? "ERRORED"
+ : "UNKNOWN");
+ res = appOpModeToPermissionResult(appOpMode);
+ }
- if (res == AppOpsManager::MODE_ERRORED) {
+ if (res == PermissionChecker::PERMISSION_HARD_DENIED) {
ALOGI("Camera %s: Access for \"%s\" revoked", mCameraIdStr.c_str(),
- mClientPackageName.c_str());
+ getPackageName().c_str());
block();
- } else if (res == AppOpsManager::MODE_IGNORED) {
- bool isUidActive = sCameraService->mUidPolicy->isUidActive(mClientUid, mClientPackageName);
+ } else if (res == PermissionChecker::PERMISSION_SOFT_DENIED) {
+ bool isUidActive =
+ sCameraService->mUidPolicy->isUidActive(getClientUid(), getPackageName());
// Uid may be active, but not visible to the user (e.g. PROCESS_STATE_FOREGROUND_SERVICE).
// If not visible, but still active, then we want to block instead of muting the camera.
- int32_t procState = sCameraService->mUidPolicy->getProcState(mClientUid);
+ int32_t procState = ActivityManager::PROCESS_STATE_NONEXISTENT;
+ if (flags::check_full_attribution_source_chain()) {
+ // Use the proc state of the last uid in the chain (ultimately receiving the data)
+ // when determining whether to mute or block
+ int32_t uid = -1;
+ std::for_each(AttrSourceItr{mClientAttribution}, AttrSourceItr::end(),
+ [&](const auto& attr) {
+ uid = static_cast<uid_t>(attr.uid);
+ });
+ const auto& activityManager = getActivityManager();
+ if (activityManager != nullptr) {
+ procState = activityManager->getUidProcessState(uid, toString16(kServiceName));
+ } else {
+ ALOGD("%s: getActivityManager returned nullptr.", __FUNCTION__);
+ }
+ } else {
+ procState = sCameraService->mUidPolicy->getProcState(getClientUid());
+ }
bool isUidVisible = (procState <= ActivityManager::PROCESS_STATE_BOUND_TOP);
bool isCameraPrivacyEnabled;
if (flags::camera_privacy_allowlist()) {
isCameraPrivacyEnabled = sCameraService->isCameraPrivacyEnabled(
- toString16(mClientPackageName),std::string(),mClientPid,mClientUid);
+ toString16(getPackageName()), std::string(), mCallingPid, getClientUid());
} else {
isCameraPrivacyEnabled =
sCameraService->mSensorPrivacyPolicy->isCameraPrivacyEnabled();
}
ALOGI("Camera %s: Access for \"%s\" has been restricted, isUidTrusted %d, isUidActive %d"
- " isUidVisible %d, isCameraPrivacyEnabled %d", mCameraIdStr.c_str(),
- mClientPackageName.c_str(), mUidIsTrusted, isUidActive, isUidVisible,
- isCameraPrivacyEnabled);
+ " isUidVisible %d, isCameraPrivacyEnabled %d procState %d",
+ mCameraIdStr.c_str(), getPackageName().c_str(), mUidIsTrusted, isUidActive,
+ isUidVisible, isCameraPrivacyEnabled, procState);
// If the calling Uid is trusted (a native service), or the client Uid is active / visible
// (WAR for b/175320666)the AppOpsManager could return MODE_IGNORED. Do not treat such
// cases as error.
@@ -4460,7 +4561,7 @@
block();
}
}
- } else if (res == AppOpsManager::MODE_ALLOWED) {
+ } else if (res == PermissionChecker::PERMISSION_GRANTED) {
setCameraMute(sCameraService->mOverrideCameraMuteMode);
}
}
@@ -4470,7 +4571,7 @@
// Reset the client PID to allow server-initiated disconnect,
// and to prevent further calls by client.
- mClientPid = getCallingPid();
+ mCallingPid = getCallingPid();
CaptureResultExtras resultExtras; // a dummy result (invalid)
notifyError(hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISABLED, resultExtras);
disconnect();
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 07c9d00..4c93ae1 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -24,18 +24,19 @@
#include <android/hardware/camera2/BnCameraInjectionSession.h>
#include <android/hardware/camera2/ICameraInjectionCallback.h>
-#include <cutils/multiuser.h>
-#include <utils/Vector.h>
-#include <utils/KeyedVector.h>
#include <binder/ActivityManager.h>
#include <binder/AppOpsManager.h>
#include <binder/BinderService.h>
-#include <binder/IServiceManager.h>
#include <binder/IActivityManager.h>
#include <binder/IAppOpsCallback.h>
+#include <binder/IServiceManager.h>
#include <binder/IUidObserver.h>
+#include <cutils/multiuser.h>
+#include <gui/Flags.h>
#include <hardware/camera.h>
#include <sensorprivacy/SensorPrivacyManager.h>
+#include <utils/KeyedVector.h>
+#include <utils/Vector.h>
#include <android/hardware/camera/common/1.0/types.h>
@@ -386,8 +387,11 @@
// Get the UID of the application client using this
virtual uid_t getClientUid() const;
- // Get the PID of the application client using this
- virtual int getClientPid() const;
+ // Get the calling PID of the application client using this
+ virtual int getClientCallingPid() const;
+
+ // Get the attribution tag (previously featureId) of the application client using this
+ virtual const std::optional<std::string>& getClientAttributionTag() const;
// Check what API level is used for this client. This is used to determine which
// superclass this can be cast to.
@@ -450,38 +454,28 @@
const hardware::camera2::impl::CameraMetadataNative& sessionParams) = 0;
protected:
- BasicClient(const sp<CameraService>& cameraService,
- const sp<IBinder>& remoteCallback,
- std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
- const std::string& clientPackageName,
- bool nativeClient,
- const std::optional<std::string>& clientFeatureId,
- const std::string& cameraIdStr,
- int cameraFacing,
- int sensorOrientation,
- int clientPid,
- uid_t clientUid,
- int servicePid,
- int rotationOverride);
+ BasicClient(const sp<CameraService>& cameraService, const sp<IBinder>& remoteCallback,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
+ const AttributionSourceState& clientAttribution, int callingPid,
+ bool nativeClient, const std::string& cameraIdStr, int cameraFacing,
+ int sensorOrientation, int servicePid, int rotationOverride);
virtual ~BasicClient();
- // the instance is in the middle of destruction. When this is set,
+ // The instance is in the middle of destruction. When this is set,
// the instance should not be accessed from callback.
// CameraService's mClientLock should be acquired to access this.
// - subclasses should set this to true in their destructors.
- bool mDestructionStarted;
+ bool mDestructionStarted;
- // these are initialized in the constructor.
+ // These are initialized in the constructor.
static sp<CameraService> sCameraService;
const std::string mCameraIdStr;
const int mCameraFacing;
const int mOrientation;
- std::string mClientPackageName;
+ AttributionSourceState mClientAttribution;
+ int mCallingPid;
bool mSystemNativeClient;
- std::optional<std::string> mClientFeatureId;
- pid_t mClientPid;
- const uid_t mClientUid;
const pid_t mServicePid;
bool mDisconnected;
bool mUidIsTrusted;
@@ -491,20 +485,22 @@
int32_t mAudioRestriction;
// - The app-side Binder interface to receive callbacks from us
- sp<IBinder> mRemoteBinder; // immutable after constructor
+ sp<IBinder> mRemoteBinder; // immutable after constructor
// Permissions management methods for camera lifecycle
- // Notify rest of system/apps about camera opening, and check appops
- virtual status_t startCameraOps();
+ // Notify rest of system/apps about camera opening, and (legacy) check appops
+ virtual status_t notifyCameraOpening();
// Notify rest of system/apps about camera starting to stream data, and confirm appops
virtual status_t startCameraStreamingOps();
// Notify rest of system/apps about camera stopping streaming data
virtual status_t finishCameraStreamingOps();
// Notify rest of system/apps about camera closing
- virtual status_t finishCameraOps();
- // Handle errors for start/checkOps
+ virtual status_t notifyCameraClosing();
+ // Handle errors for start/checkOps, startDataDelivery
virtual status_t handleAppOpMode(int32_t mode);
+ virtual status_t handlePermissionResult(
+ PermissionChecker::PermissionResult result);
// Just notify camera appops to trigger unblocking dialog if sensor
// privacy is enabled and camera mute is not supported
virtual status_t noteAppOp();
@@ -522,12 +518,10 @@
}; // class OpsCallback
sp<OpsCallback> mOpsCallback;
- // Track whether checkOps was called successfully, to avoid
- // finishing what we didn't start, on camera open.
- bool mOpsActive;
- // Track whether startOps was called successfully on start of
- // camera streaming.
- bool mOpsStreaming;
+ // Track if the camera is currently active.
+ bool mCameraOpen;
+ // Track if the camera is currently streaming.
+ bool mCameraStreaming;
// IAppOpsCallback interface, indirected through opListener
virtual void opChanged(int32_t op, const String16& packageName);
@@ -543,10 +537,9 @@
virtual status_t connect(const sp<hardware::ICameraClient>& client) = 0;
virtual status_t lock() = 0;
virtual status_t unlock() = 0;
- virtual status_t setPreviewTarget(const sp<IGraphicBufferProducer>& bufferProducer)=0;
+ virtual status_t setPreviewTarget(const sp<SurfaceType>& target) = 0;
virtual void setPreviewCallbackFlag(int flag) = 0;
- virtual status_t setPreviewCallbackTarget(
- const sp<IGraphicBufferProducer>& callbackProducer) = 0;
+ virtual status_t setPreviewCallbackTarget(const sp<SurfaceType>& target) = 0;
virtual status_t startPreview() = 0;
virtual void stopPreview() = 0;
virtual bool previewEnabled() = 0;
@@ -561,23 +554,15 @@
virtual status_t setParameters(const String8& params) = 0;
virtual String8 getParameters() const = 0;
virtual status_t sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) = 0;
- virtual status_t setVideoTarget(const sp<IGraphicBufferProducer>& bufferProducer) = 0;
+ virtual status_t setVideoTarget(const sp<SurfaceType>& target) = 0;
// Interface used by CameraService
Client(const sp<CameraService>& cameraService,
- const sp<hardware::ICameraClient>& cameraClient,
- std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
- const std::string& clientPackageName,
- bool systemNativeClient,
- const std::optional<std::string>& clientFeatureId,
- const std::string& cameraIdStr,
- int api1CameraId,
- int cameraFacing,
- int sensorOrientation,
- int clientPid,
- uid_t clientUid,
- int servicePid,
- int rotationOverride);
+ const sp<hardware::ICameraClient>& cameraClient,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
+ const AttributionSourceState& clientAttribution, int callingPid,
+ bool systemNativeClient, const std::string& cameraIdStr, int api1CameraId,
+ int cameraFacing, int sensorOrientation, int servicePid, int rotationOverride);
~Client();
// return our camera client
@@ -1467,14 +1452,16 @@
static std::string getFormattedCurrentTime();
static binder::Status makeClient(const sp<CameraService>& cameraService,
- const sp<IInterface>& cameraCb, const std::string& packageName,
- bool systemNativeClient, const std::optional<std::string>& featureId,
- const std::string& cameraId, int api1CameraId, int facing, int sensorOrientation,
- int clientPid, uid_t clientUid, int servicePid,
- std::pair<int, IPCTransport> deviceVersionAndIPCTransport, apiLevel effectiveApiLevel,
- bool overrideForPerfClass, int rotationOverride, bool forceSlowJpegMode,
- const std::string& originalCameraId,
- /*out*/ sp<BasicClient>* client);
+ const sp<IInterface>& cameraCb,
+ const AttributionSourceState& clientAttribution,
+ int callingPid, bool systemNativeClient,
+ const std::string& cameraId, int api1CameraId, int facing,
+ int sensorOrientation, int servicePid,
+ std::pair<int, IPCTransport> deviceVersionAndIPCTransport,
+ apiLevel effectiveApiLevel, bool overrideForPerfClass,
+ int rotationOverride, bool forceSlowJpegMode,
+ const std::string& originalCameraId,
+ /*out*/ sp<BasicClient>* client);
static std::string toString(std::set<userid_t> intSet);
static int32_t mapToInterface(TorchModeStatus status);
diff --git a/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h b/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
index b07d8d5..158ee69 100644
--- a/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
+++ b/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
@@ -93,6 +93,10 @@
ANDROID_FLASH_TORCH_STRENGTH_MAX_LEVEL,
ANDROID_INFO_SESSION_CONFIGURATION_QUERY_VERSION,
} },
+ {36, {
+ ANDROID_COLOR_CORRECTION_AVAILABLE_MODES,
+ ANDROID_COLOR_CORRECTION_COLOR_TEMPERATURE_RANGE,
+ } },
};
/**
@@ -125,4 +129,8 @@
ANDROID_STATISTICS_LENS_INTRINSIC_SAMPLES,
ANDROID_STATISTICS_LENS_INTRINSIC_TIMESTAMPS,
} },
+ {36, {
+ ANDROID_COLOR_CORRECTION_COLOR_TEMPERATURE,
+ ANDROID_COLOR_CORRECTION_COLOR_TINT,
+ } },
};
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 861414f..45b7c3b 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -30,6 +30,7 @@
#include <com_android_internal_camera_flags.h>
#include <cutils/properties.h>
#include <gui/Surface.h>
+#include <gui/view/Surface.h>
#include "api1/Camera2Client.h"
@@ -55,32 +56,23 @@
// Interface used by CameraService
-Camera2Client::Camera2Client(const sp<CameraService>& cameraService,
- const sp<hardware::ICameraClient>& cameraClient,
+Camera2Client::Camera2Client(
+ const sp<CameraService>& cameraService, const sp<hardware::ICameraClient>& cameraClient,
std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
- const std::string& clientPackageName,
- const std::optional<std::string>& clientFeatureId,
- const std::string& cameraDeviceId,
- int api1CameraId,
- int cameraFacing,
- int sensorOrientation,
- int clientPid,
- uid_t clientUid,
- int servicePid,
- bool overrideForPerfClass,
- int rotationOverride,
- bool forceSlowJpegMode):
- Camera2ClientBase(cameraService, cameraClient, cameraServiceProxyWrapper,
- attributionAndPermissionUtils, clientPackageName,
- false/*systemNativeClient - since no ndk for api1*/, clientFeatureId,
- cameraDeviceId, api1CameraId, cameraFacing, sensorOrientation, clientPid,
- clientUid, servicePid, overrideForPerfClass, rotationOverride,
- /*legacyClient*/ true),
- mParameters(api1CameraId, cameraFacing),
- mLatestRequestIds(kMaxRequestIds),
- mLatestFailedRequestIds(kMaxRequestIds)
-{
+ const AttributionSourceState& clientAttribution, int callingPid,
+ const std::string& cameraDeviceId, int api1CameraId, int cameraFacing,
+ int sensorOrientation, int servicePid, bool overrideForPerfClass, int rotationOverride,
+ bool forceSlowJpegMode)
+ : Camera2ClientBase(cameraService, cameraClient, cameraServiceProxyWrapper,
+ attributionAndPermissionUtils, clientAttribution, callingPid,
+ false /*systemNativeClient - since no ndk for api1*/, cameraDeviceId,
+ api1CameraId, cameraFacing, sensorOrientation, servicePid,
+ overrideForPerfClass, rotationOverride,
+ /*legacyClient*/ true),
+ mParameters(api1CameraId, cameraFacing),
+ mLatestRequestIds(kMaxRequestIds),
+ mLatestFailedRequestIds(kMaxRequestIds) {
ATRACE_CALL();
mRotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_NONE;
@@ -218,7 +210,7 @@
result << fmt::sprintf("Client2[%d] (%p) PID: %d, dump:\n", mCameraId,
(getRemoteCallback() != NULL ?
(void *) (IInterface::asBinder(getRemoteCallback()).get()) : NULL),
- mClientPid);
+ mCallingPid);
result << " State: ";
#define CASE_APPEND_ENUM(x) case x: result << #x "\n"; break;
@@ -449,7 +441,7 @@
binder::Status res = binder::Status::ok();
// Allow both client and the cameraserver to disconnect at all times
int callingPid = getCallingPid();
- if (callingPid != mClientPid && callingPid != mServicePid) return res;
+ if (callingPid != mCallingPid && callingPid != mServicePid) return res;
if (mDevice == 0) return res;
@@ -526,14 +518,14 @@
ALOGV("%s: E", __FUNCTION__);
Mutex::Autolock icl(mBinderSerializationLock);
- if (mClientPid != 0 && getCallingPid() != mClientPid) {
+ if (mCallingPid != 0 && getCallingPid() != mCallingPid) {
ALOGE("%s: Camera %d: Connection attempt from pid %d; "
"current locked to pid %d", __FUNCTION__,
- mCameraId, getCallingPid(), mClientPid);
+ mCameraId, getCallingPid(), mCallingPid);
return BAD_VALUE;
}
- mClientPid = getCallingPid();
+ mCallingPid = getCallingPid();
mRemoteCallback = client;
mSharedCameraCallbacks = client;
@@ -546,16 +538,16 @@
ALOGV("%s: E", __FUNCTION__);
Mutex::Autolock icl(mBinderSerializationLock);
ALOGV("%s: Camera %d: Lock call from pid %d; current client pid %d",
- __FUNCTION__, mCameraId, getCallingPid(), mClientPid);
+ __FUNCTION__, mCameraId, getCallingPid(), mCallingPid);
- if (mClientPid == 0) {
- mClientPid = getCallingPid();
+ if (mCallingPid == 0) {
+ mCallingPid = getCallingPid();
return OK;
}
- if (mClientPid != getCallingPid()) {
+ if (mCallingPid != getCallingPid()) {
ALOGE("%s: Camera %d: Lock call from pid %d; currently locked to pid %d",
- __FUNCTION__, mCameraId, getCallingPid(), mClientPid);
+ __FUNCTION__, mCameraId, getCallingPid(), mCallingPid);
return EBUSY;
}
@@ -567,46 +559,76 @@
ALOGV("%s: E", __FUNCTION__);
Mutex::Autolock icl(mBinderSerializationLock);
ALOGV("%s: Camera %d: Unlock call from pid %d; current client pid %d",
- __FUNCTION__, mCameraId, getCallingPid(), mClientPid);
+ __FUNCTION__, mCameraId, getCallingPid(), mCallingPid);
- if (mClientPid == getCallingPid()) {
+ if (mCallingPid == getCallingPid()) {
SharedParameters::Lock l(mParameters);
if (l.mParameters.state == Parameters::RECORD ||
l.mParameters.state == Parameters::VIDEO_SNAPSHOT) {
ALOGD("Not allowed to unlock camera during recording.");
return INVALID_OPERATION;
}
- mClientPid = 0;
+ mCallingPid = 0;
mRemoteCallback.clear();
mSharedCameraCallbacks.clear();
return OK;
}
ALOGE("%s: Camera %d: Unlock call from pid %d; currently locked to pid %d",
- __FUNCTION__, mCameraId, getCallingPid(), mClientPid);
+ __FUNCTION__, mCameraId, getCallingPid(), mCallingPid);
return EBUSY;
}
-status_t Camera2Client::setPreviewTarget(
- const sp<IGraphicBufferProducer>& bufferProducer) {
+status_t Camera2Client::setPreviewTarget(const sp<SurfaceType>& target) {
ATRACE_CALL();
ALOGV("%s: E", __FUNCTION__);
Mutex::Autolock icl(mBinderSerializationLock);
status_t res;
- if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
+ if ((res = checkPid(__FUNCTION__)) != OK) return res;
- sp<IBinder> binder;
- sp<Surface> window;
- if (bufferProducer != 0) {
- binder = IInterface::asBinder(bufferProducer);
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ sp<Surface> surface;
+ view::Surface viewSurface;
+ if (target != nullptr) {
// Using controlledByApp flag to ensure that the buffer queue remains in
// async mode for the old camera API, where many applications depend
// on that behavior.
- window = new Surface(bufferProducer, /*controlledByApp*/ true);
+ surface = new Surface(target->getIGraphicBufferProducer(), true);
+ viewSurface = view::Surface::fromSurface(surface);
+ }
+ return setPreviewWindowL(viewSurface, surface);
+#else
+ sp<IBinder> binder;
+ sp<Surface> window;
+ if (target != 0) {
+ binder = IInterface::asBinder(target);
+ // Using controlledByApp flag to ensure that the buffer queue remains in
+ // async mode for the old camera API, where many applications depend
+ // on that behavior.
+ window = new Surface(target, /*controlledByApp*/ true);
}
return setPreviewWindowL(binder, window);
+#endif
}
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+status_t Camera2Client::setPreviewWindowL(const view::Surface& viewSurface,
+ const sp<Surface>& window) {
+ ATRACE_CALL();
+ status_t res;
+
+ uint64_t viewSurfaceID;
+ res = viewSurface.getUniqueId(&viewSurfaceID);
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Could not getUniqueId.", __FUNCTION__, mCameraId);
+ return res;
+ }
+
+ if (viewSurfaceID == mPreviewViewSurfaceID) {
+ ALOGV("%s: Camera %d: New window is same as old window", __FUNCTION__, mCameraId);
+ return NO_ERROR;
+ }
+#else
status_t Camera2Client::setPreviewWindowL(const sp<IBinder>& binder,
const sp<Surface>& window) {
ATRACE_CALL();
@@ -617,6 +639,7 @@
__FUNCTION__, mCameraId);
return NO_ERROR;
}
+#endif
Parameters::State state;
{
@@ -628,9 +651,8 @@
case Parameters::RECORD:
case Parameters::STILL_CAPTURE:
case Parameters::VIDEO_SNAPSHOT:
- ALOGE("%s: Camera %d: Cannot set preview display while in state %s",
- __FUNCTION__, mCameraId,
- Parameters::getStateName(state));
+ ALOGE("%s: Camera %d: Cannot set preview display while in state %s", __FUNCTION__,
+ mCameraId, Parameters::getStateName(state));
return INVALID_OPERATION;
case Parameters::STOPPED:
case Parameters::WAITING_FOR_PREVIEW_WINDOW:
@@ -640,19 +662,23 @@
// Already running preview - need to stop and create a new stream
res = stopStream();
if (res != OK) {
- ALOGE("%s: Unable to stop preview to swap windows: %s (%d)",
- __FUNCTION__, strerror(-res), res);
+ ALOGE("%s: Unable to stop preview to swap windows: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
return res;
}
state = Parameters::WAITING_FOR_PREVIEW_WINDOW;
break;
}
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ mPreviewViewSurfaceID = viewSurfaceID;
+#else
mPreviewSurface = binder;
+#endif
+
res = mStreamingProcessor->setPreviewWindow(window);
if (res != OK) {
- ALOGE("%s: Unable to set new preview window: %s (%d)",
- __FUNCTION__, strerror(-res), res);
+ ALOGE("%s: Unable to set new preview window: %s (%d)", __FUNCTION__, strerror(-res), res);
return res;
}
@@ -725,23 +751,26 @@
}
}
-status_t Camera2Client::setPreviewCallbackTarget(
- const sp<IGraphicBufferProducer>& callbackProducer) {
+status_t Camera2Client::setPreviewCallbackTarget(const sp<SurfaceType>& target) {
ATRACE_CALL();
ALOGV("%s: E", __FUNCTION__);
Mutex::Autolock icl(mBinderSerializationLock);
status_t res;
- if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
+ if ((res = checkPid(__FUNCTION__)) != OK) return res;
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ sp<Surface> window = target;
+#else
sp<Surface> window;
- if (callbackProducer != 0) {
- window = new Surface(callbackProducer);
+ if (target != 0) {
+ window = new Surface(target);
}
+#endif
res = mCallbackProcessor->setCallbackWindow(window);
if (res != OK) {
- ALOGE("%s: Camera %d: Unable to set preview callback surface: %s (%d)",
- __FUNCTION__, mCameraId, strerror(-res), res);
+ ALOGE("%s: Camera %d: Unable to set preview callback surface: %s (%d)", __FUNCTION__,
+ mCameraId, strerror(-res), res);
return res;
}
@@ -757,7 +786,7 @@
l.mParameters.previewCallbackSurface = false;
}
- switch(l.mParameters.state) {
+ switch (l.mParameters.state) {
case Parameters::PREVIEW:
res = startPreviewL(l.mParameters, true);
break;
@@ -769,15 +798,13 @@
break;
}
if (res != OK) {
- ALOGE("%s: Camera %d: Unable to refresh request in state %s",
- __FUNCTION__, mCameraId,
- Parameters::getStateName(l.mParameters.state));
+ ALOGE("%s: Camera %d: Unable to refresh request in state %s", __FUNCTION__, mCameraId,
+ Parameters::getStateName(l.mParameters.state));
}
return OK;
}
-
status_t Camera2Client::startPreview() {
ATRACE_CALL();
ALOGV("%s: E", __FUNCTION__);
@@ -2266,29 +2293,47 @@
return res;
}
-status_t Camera2Client::setVideoTarget(const sp<IGraphicBufferProducer>& bufferProducer) {
+status_t Camera2Client::setVideoTarget(const sp<SurfaceType>& target) {
ATRACE_CALL();
ALOGV("%s: E", __FUNCTION__);
Mutex::Autolock icl(mBinderSerializationLock);
status_t res;
if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
- sp<IBinder> binder = IInterface::asBinder(bufferProducer);
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ uint64_t videoSurfaceID;
+ res = target->getUniqueId(&videoSurfaceID);
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Could not getUniqueId in setVideoTarget.", __FUNCTION__, mCameraId);
+ return res;
+ }
+ if (videoSurfaceID == mVideoSurfaceID) {
+ ALOGE("%s: Camera %d: New video window is same as old video window", __FUNCTION__,
+ mCameraId);
+ return NO_ERROR;
+ }
+#else
+ sp<IBinder> binder = IInterface::asBinder(target);
if (binder == mVideoSurface) {
ALOGV("%s: Camera %d: New video window is same as old video window",
__FUNCTION__, mCameraId);
return NO_ERROR;
}
+#endif
sp<Surface> window;
int format;
android_dataspace dataSpace;
- if (bufferProducer != nullptr) {
+ if (target != nullptr) {
// Using controlledByApp flag to ensure that the buffer queue remains in
// async mode for the old camera API, where many applications depend
// on that behavior.
- window = new Surface(bufferProducer, /*controlledByApp*/ true);
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ window = new Surface(target->getIGraphicBufferProducer(), /*controlledByApp*/ true);
+#else
+ window = new Surface(target, /*controlledByApp*/ true);
+#endif
ANativeWindow *anw = window.get();
@@ -2327,7 +2372,11 @@
return INVALID_OPERATION;
}
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ mVideoSurfaceID = videoSurfaceID;
+#else
mVideoSurface = binder;
+#endif
res = mStreamingProcessor->setRecordingWindow(window);
if (res != OK) {
ALOGE("%s: Unable to set new recording window: %s (%d)",
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index a0c9f2d..345494b 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -17,12 +17,14 @@
#ifndef ANDROID_SERVERS_CAMERA_CAMERA2CLIENT_H
#define ANDROID_SERVERS_CAMERA_CAMERA2CLIENT_H
-#include "CameraService.h"
-#include "common/CameraDeviceBase.h"
-#include "common/Camera2ClientBase.h"
-#include "api1/client2/Parameters.h"
-#include "api1/client2/FrameProcessor.h"
+#include <gui/Flags.h>
+#include <gui/view/Surface.h>
#include <media/RingBuffer.h>
+#include "CameraService.h"
+#include "api1/client2/FrameProcessor.h"
+#include "api1/client2/Parameters.h"
+#include "common/Camera2ClientBase.h"
+#include "common/CameraDeviceBase.h"
namespace android {
@@ -53,11 +55,9 @@
virtual status_t connect(const sp<hardware::ICameraClient>& client);
virtual status_t lock();
virtual status_t unlock();
- virtual status_t setPreviewTarget(
- const sp<IGraphicBufferProducer>& bufferProducer);
+ virtual status_t setPreviewTarget(const sp<SurfaceType>& target);
virtual void setPreviewCallbackFlag(int flag);
- virtual status_t setPreviewCallbackTarget(
- const sp<IGraphicBufferProducer>& callbackProducer);
+ virtual status_t setPreviewCallbackTarget(const sp<SurfaceType>& target);
virtual status_t startPreview();
virtual void stopPreview();
@@ -78,7 +78,7 @@
virtual status_t sendCommand(int32_t cmd, int32_t arg1, int32_t arg2);
virtual void notifyError(int32_t errorCode,
const CaptureResultExtras& resultExtras);
- virtual status_t setVideoTarget(const sp<IGraphicBufferProducer>& bufferProducer);
+ virtual status_t setVideoTarget(const sp<SurfaceType>& target);
virtual status_t setAudioRestriction(int mode);
virtual int32_t getGlobalAudioRestriction();
virtual status_t setRotateAndCropOverride(uint8_t rotateAndCrop, bool fromHal = false);
@@ -101,21 +101,13 @@
*/
Camera2Client(const sp<CameraService>& cameraService,
- const sp<hardware::ICameraClient>& cameraClient,
- std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
- std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
- const std::string& clientPackageName,
- const std::optional<std::string>& clientFeatureId,
- const std::string& cameraDeviceId,
- int api1CameraId,
- int cameraFacing,
- int sensorOrientation,
- int clientPid,
- uid_t clientUid,
- int servicePid,
- bool overrideForPerfClass,
- int rotationOverride,
- bool forceSlowJpegMode);
+ const sp<hardware::ICameraClient>& cameraClient,
+ std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
+ const AttributionSourceState& clientAttribution, int callingPid,
+ const std::string& cameraDeviceId, int api1CameraId, int cameraFacing,
+ int sensorOrientation, int servicePid, bool overrideForPerfClass,
+ int rotationOverride, bool forceSlowJpegMode);
virtual ~Camera2Client();
@@ -183,8 +175,12 @@
/** ICamera interface-related private members */
typedef camera2::Parameters Parameters;
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ status_t setPreviewWindowL(const view::Surface& viewSurface, const sp<Surface>& window);
+#else
status_t setPreviewWindowL(const sp<IBinder>& binder,
const sp<Surface>& window);
+#endif
status_t startPreviewL(Parameters ¶ms, bool restart);
void stopPreviewL();
status_t startRecordingL(Parameters ¶ms, bool restart);
@@ -221,8 +217,13 @@
/* Preview/Recording related members */
+#if WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ uint64_t mPreviewViewSurfaceID;
+ uint64_t mVideoSurfaceID;
+#else
sp<IBinder> mPreviewSurface;
sp<IBinder> mVideoSurface;
+#endif
sp<camera2::StreamingProcessor> mStreamingProcessor;
/** Preview callback related members */
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index f469aad..17a6dc3 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -61,61 +61,33 @@
const sp<CameraService>& cameraService,
const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
- const std::string& clientPackageName,
- bool systemNativeClient,
- const std::optional<std::string>& clientFeatureId,
- const std::string& cameraId,
- [[maybe_unused]] int api1CameraId,
- int cameraFacing,
- int sensorOrientation,
- int clientPid,
- uid_t clientUid,
- int servicePid,
- int rotationOverride) :
- BasicClient(cameraService,
- IInterface::asBinder(remoteCallback),
- attributionAndPermissionUtils,
- clientPackageName,
- systemNativeClient,
- clientFeatureId,
- cameraId,
- cameraFacing,
- sensorOrientation,
- clientPid,
- clientUid,
- servicePid,
- rotationOverride),
- mRemoteCallback(remoteCallback) {
-}
+ const AttributionSourceState& clientAttribution, int callingPid, bool systemNativeClient,
+ const std::string& cameraId, [[maybe_unused]] int api1CameraId, int cameraFacing,
+ int sensorOrientation, int servicePid, int rotationOverride)
+ : BasicClient(cameraService, IInterface::asBinder(remoteCallback),
+ attributionAndPermissionUtils, clientAttribution, callingPid, systemNativeClient,
+ cameraId, cameraFacing, sensorOrientation, servicePid, rotationOverride),
+ mRemoteCallback(remoteCallback) {}
// Interface used by CameraService
-CameraDeviceClient::CameraDeviceClient(const sp<CameraService>& cameraService,
+CameraDeviceClient::CameraDeviceClient(
+ const sp<CameraService>& cameraService,
const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
- const std::string& clientPackageName,
- bool systemNativeClient,
- const std::optional<std::string>& clientFeatureId,
- const std::string& cameraId,
- int cameraFacing,
- int sensorOrientation,
- int clientPid,
- uid_t clientUid,
- int servicePid,
- bool overrideForPerfClass,
- int rotationOverride,
- const std::string& originalCameraId) :
- Camera2ClientBase(cameraService, remoteCallback, cameraServiceProxyWrapper,
- attributionAndPermissionUtils, clientPackageName,
- systemNativeClient, clientFeatureId, cameraId, /*API1 camera ID*/ -1, cameraFacing,
- sensorOrientation, clientPid, clientUid, servicePid, overrideForPerfClass,
- rotationOverride),
- mInputStream(),
- mStreamingRequestId(REQUEST_ID_NONE),
- mRequestIdCounter(0),
- mOverrideForPerfClass(overrideForPerfClass),
- mOriginalCameraId(originalCameraId) {
+ const AttributionSourceState& clientAttribution, int callingPid, bool systemNativeClient,
+ const std::string& cameraId, int cameraFacing, int sensorOrientation, int servicePid,
+ bool overrideForPerfClass, int rotationOverride, const std::string& originalCameraId)
+ : Camera2ClientBase(cameraService, remoteCallback, cameraServiceProxyWrapper,
+ attributionAndPermissionUtils, clientAttribution, callingPid,
+ systemNativeClient, cameraId, /*API1 camera ID*/ -1, cameraFacing,
+ sensorOrientation, servicePid, overrideForPerfClass, rotationOverride),
+ mInputStream(),
+ mStreamingRequestId(REQUEST_ID_NONE),
+ mRequestIdCounter(0),
+ mOverrideForPerfClass(overrideForPerfClass),
+ mOriginalCameraId(originalCameraId) {
ATRACE_CALL();
ALOGI("CameraDeviceClient %s: Opened", cameraId.c_str());
}
@@ -908,7 +880,6 @@
int64_t dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
int64_t streamUseCase = outputConfiguration.getStreamUseCase();
int timestampBase = outputConfiguration.getTimestampBase();
- int mirrorMode = outputConfiguration.getMirrorMode();
int32_t colorSpace = outputConfiguration.getColorSpace();
bool useReadoutTimestamp = outputConfiguration.useReadoutTimestamp();
@@ -927,7 +898,7 @@
return res;
}
- std::vector<sp<Surface>> surfaces;
+ std::vector<SurfaceHolder> surfaces;
std::vector<sp<IBinder>> binders;
status_t err;
@@ -952,6 +923,7 @@
return STATUS_ERROR(CameraService::ERROR_ALREADY_EXISTS, msg.c_str());
}
+ int mirrorMode = outputConfiguration.getMirrorMode(bufferProducer);
sp<Surface> surface;
res = SessionConfigurationUtils::createSurfaceFromGbp(streamInfo,
isStreamInfoValid, surface, bufferProducer, mCameraIdStr,
@@ -966,7 +938,7 @@
}
binders.push_back(IInterface::asBinder(bufferProducer));
- surfaces.push_back(surface);
+ surfaces.push_back({surface, mirrorMode});
}
// If mOverrideForPerfClass is true, do not fail createStream() for small
@@ -976,10 +948,11 @@
int streamId = camera3::CAMERA3_STREAM_ID_INVALID;
std::vector<int> surfaceIds;
bool isDepthCompositeStream =
- camera3::DepthCompositeStream::isDepthCompositeStream(surfaces[0]);
- bool isHeicCompositeStream = camera3::HeicCompositeStream::isHeicCompositeStream(surfaces[0]);
+ camera3::DepthCompositeStream::isDepthCompositeStream(surfaces[0].mSurface);
+ bool isHeicCompositeStream = camera3::HeicCompositeStream::isHeicCompositeStream(
+ surfaces[0].mSurface);
bool isJpegRCompositeStream =
- camera3::JpegRCompositeStream::isJpegRCompositeStream(surfaces[0]) &&
+ camera3::JpegRCompositeStream::isJpegRCompositeStream(surfaces[0].mSurface) &&
!mDevice->isCompositeJpegRDisabled();
if (isDepthCompositeStream || isHeicCompositeStream || isJpegRCompositeStream) {
sp<CompositeStream> compositeStream;
@@ -1000,7 +973,8 @@
useReadoutTimestamp);
if (err == OK) {
Mutex::Autolock l(mCompositeLock);
- mCompositeStreamMap.add(IInterface::asBinder(surfaces[0]->getIGraphicBufferProducer()),
+ mCompositeStreamMap.add(
+ IInterface::asBinder(surfaces[0].mSurface->getIGraphicBufferProducer()),
compositeStream);
}
} else {
@@ -1010,8 +984,7 @@
&streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution,
/*consumerUsage*/0, streamInfo.dynamicRangeProfile, streamInfo.streamUseCase,
- streamInfo.timestampBase, streamInfo.mirrorMode, streamInfo.colorSpace,
- useReadoutTimestamp);
+ streamInfo.timestampBase, streamInfo.colorSpace, useReadoutTimestamp);
}
if (err != OK) {
@@ -1036,9 +1009,6 @@
__FUNCTION__, mCameraIdStr.c_str(), streamId, streamInfo.width,
streamInfo.height, streamInfo.format);
- // Set transform flags to ensure preview to be rotated correctly.
- res = setStreamTransformLocked(streamId, streamInfo.mirrorMode);
-
// Fill in mHighResolutionCameraIdToStreamIdSet map
const std::string &cameraIdUsed =
physicalCameraId.size() != 0 ? physicalCameraId : mCameraIdStr;
@@ -1087,7 +1057,7 @@
consumerUsage |= GraphicBuffer::USAGE_HW_COMPOSER;
}
int streamId = camera3::CAMERA3_STREAM_ID_INVALID;
- std::vector<sp<Surface>> noSurface;
+ std::vector<SurfaceHolder> noSurface;
std::vector<int> surfaceIds;
const std::string &physicalCameraId = outputConfiguration.getPhysicalCameraId();
const std::string &cameraIdUsed =
@@ -1113,7 +1083,6 @@
outputConfiguration.isMultiResolution(), consumerUsage,
outputConfiguration.getDynamicRangeProfile(),
outputConfiguration.getStreamUseCase(),
- outputConfiguration.getMirrorMode(),
outputConfiguration.useReadoutTimestamp());
if (err != OK) {
@@ -1132,16 +1101,12 @@
outputConfiguration.getDynamicRangeProfile(),
outputConfiguration.getStreamUseCase(),
outputConfiguration.getTimestampBase(),
- outputConfiguration.getMirrorMode(),
colorSpace));
ALOGV("%s: Camera %s: Successfully created a new stream ID %d for a deferred surface"
" (%d x %d) stream with format 0x%x.",
__FUNCTION__, mCameraIdStr.c_str(), streamId, width, height, format);
- // Set transform flags to ensure preview to be rotated correctly.
- res = setStreamTransformLocked(streamId, outputConfiguration.getMirrorMode());
-
*newStreamId = streamId;
// Fill in mHighResolutionCameraIdToStreamIdSet
// Only needed for high resolution sensors
@@ -1153,33 +1118,6 @@
return res;
}
-binder::Status CameraDeviceClient::setStreamTransformLocked(int streamId, int mirrorMode) {
- int32_t transform = 0;
- status_t err;
- binder::Status res;
-
- if (!mDevice.get()) {
- return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
- }
-
- err = getRotationTransformLocked(mirrorMode, &transform);
- if (err != OK) {
- // Error logged by getRotationTransformLocked.
- return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION,
- "Unable to calculate rotation transform for new stream");
- }
-
- err = mDevice->setStreamTransform(streamId, transform);
- if (err != OK) {
- std::string msg = fmt::sprintf("Failed to set stream transform (stream id %d)",
- streamId);
- ALOGE("%s: %s", __FUNCTION__, msg.c_str());
- return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
- }
-
- return res;
-}
-
binder::Status CameraDeviceClient::createInputStream(
int width, int height, int format, bool isMultiResolution,
/*out*/
@@ -1312,7 +1250,7 @@
std::vector<size_t> removedSurfaceIds;
std::vector<sp<IBinder>> removedOutputs;
- std::vector<sp<Surface>> newOutputs;
+ std::vector<SurfaceHolder> newOutputs;
std::vector<OutputStreamInfo> streamInfos;
KeyedVector<sp<IBinder>, sp<IGraphicBufferProducer>> newOutputsMap;
for (auto &it : bufferProducers) {
@@ -1341,11 +1279,11 @@
int timestampBase = outputConfiguration.getTimestampBase();
int64_t dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
int32_t colorSpace = outputConfiguration.getColorSpace();
- int mirrorMode = outputConfiguration.getMirrorMode();
for (size_t i = 0; i < newOutputsMap.size(); i++) {
OutputStreamInfo outInfo;
sp<Surface> surface;
+ int mirrorMode = outputConfiguration.getMirrorMode(newOutputsMap.valueAt(i));
res = SessionConfigurationUtils::createSurfaceFromGbp(outInfo,
/*isStreamInfoValid*/ false, surface, newOutputsMap.valueAt(i), mCameraIdStr,
mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
@@ -1354,7 +1292,7 @@
return res;
streamInfos.push_back(outInfo);
- newOutputs.push_back(surface);
+ newOutputs.push_back({surface, mirrorMode});
}
//Trivial case no changes required
@@ -1711,14 +1649,13 @@
return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
}
- std::vector<sp<Surface>> consumerSurfaces;
+ std::vector<SurfaceHolder> consumerSurfaceHolders;
const std::vector<int32_t> &sensorPixelModesUsed =
outputConfiguration.getSensorPixelModesUsed();
int64_t dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
int32_t colorSpace = outputConfiguration.getColorSpace();
int64_t streamUseCase = outputConfiguration.getStreamUseCase();
int timestampBase = outputConfiguration.getTimestampBase();
- int mirrorMode = outputConfiguration.getMirrorMode();
for (auto& bufferProducer : bufferProducers) {
// Don't create multiple streams for the same target surface
ssize_t index = mStreamMap.indexOfKey(IInterface::asBinder(bufferProducer));
@@ -1729,6 +1666,7 @@
}
sp<Surface> surface;
+ int mirrorMode = outputConfiguration.getMirrorMode(bufferProducer);
res = SessionConfigurationUtils::createSurfaceFromGbp(mStreamInfoMap[streamId],
true /*isStreamInfoValid*/, surface, bufferProducer, mCameraIdStr,
mDevice->infoPhysical(physicalId), sensorPixelModesUsed, dynamicRangeProfile,
@@ -1737,12 +1675,12 @@
if (!res.isOk())
return res;
- consumerSurfaces.push_back(surface);
+ consumerSurfaceHolders.push_back({surface, mirrorMode});
}
// Gracefully handle case where finalizeOutputConfigurations is called
// without any new surface.
- if (consumerSurfaces.size() == 0) {
+ if (consumerSurfaceHolders.size() == 0) {
mStreamInfoMap[streamId].finalized = true;
return res;
}
@@ -1750,11 +1688,11 @@
// Finish the deferred stream configuration with the surface.
status_t err;
std::vector<int> consumerSurfaceIds;
- err = mDevice->setConsumerSurfaces(streamId, consumerSurfaces, &consumerSurfaceIds);
+ err = mDevice->setConsumerSurfaces(streamId, consumerSurfaceHolders, &consumerSurfaceIds);
if (err == OK) {
- for (size_t i = 0; i < consumerSurfaces.size(); i++) {
+ for (size_t i = 0; i < consumerSurfaceHolders.size(); i++) {
sp<IBinder> binder = IInterface::asBinder(
- consumerSurfaces[i]->getIGraphicBufferProducer());
+ consumerSurfaceHolders[i].mSurface->getIGraphicBufferProducer());
ALOGV("%s: mStreamMap add binder %p streamId %d, surfaceId %d", __FUNCTION__,
binder.get(), streamId, consumerSurfaceIds[i]);
mStreamMap.add(binder, StreamSurfaceId(streamId, consumerSurfaceIds[i]));
@@ -1934,10 +1872,10 @@
sp<CameraOfflineSessionClient> offlineClient;
if (offlineSession.get() != nullptr) {
- offlineClient = new CameraOfflineSessionClient(sCameraService,
- offlineSession, offlineCompositeStreamMap, cameraCb, mAttributionAndPermissionUtils,
- mClientPackageName, mClientFeatureId, mCameraIdStr, mCameraFacing, mOrientation,
- mClientPid, mClientUid, mServicePid);
+ offlineClient = new CameraOfflineSessionClient(
+ sCameraService, offlineSession, offlineCompositeStreamMap, cameraCb,
+ mAttributionAndPermissionUtils, mClientAttribution, mCallingPid, mCameraIdStr,
+ mCameraFacing, mOrientation, mServicePid);
ret = sCameraService->addOfflineClient(mCameraIdStr, offlineClient);
}
@@ -1984,7 +1922,7 @@
mCameraIdStr.c_str(),
(getRemoteCallback() != NULL ?
IInterface::asBinder(getRemoteCallback()).get() : NULL) );
- dprintf(fd, " Current client UID %u\n", mClientUid);
+ dprintf(fd, " Current client UID %u\n", getClientUid());
dprintf(fd, " State:\n");
dprintf(fd, " Request ID counter: %d\n", mRequestIdCounter);
@@ -2271,14 +2209,6 @@
return true;
}
-status_t CameraDeviceClient::getRotationTransformLocked(int mirrorMode,
- int32_t* transform) {
- ALOGV("%s: begin", __FUNCTION__);
-
- const CameraMetadata& staticInfo = mDevice->info();
- return CameraUtils::getRotationTransform(staticInfo, mirrorMode, transform);
-}
-
const CameraMetadata &CameraDeviceClient::getStaticInfo(const std::string &cameraId) {
if (mDevice->getId() == cameraId) {
return mDevice->info();
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 42f2752..0858633 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -48,20 +48,13 @@
}
protected:
- CameraDeviceClientBase(const sp<CameraService>& cameraService,
+ CameraDeviceClientBase(
+ const sp<CameraService>& cameraService,
const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
- const std::string& clientPackageName,
- bool systemNativeClient,
- const std::optional<std::string>& clientFeatureId,
- const std::string& cameraId,
- int api1CameraId,
- int cameraFacing,
- int sensorOrientation,
- int clientPid,
- uid_t clientUid,
- int servicePid,
- int rotationOverride);
+ const AttributionSourceState& clientAttribution, int callingPid,
+ bool systemNativeClient, const std::string& cameraId, int api1CameraId,
+ int cameraFacing, int sensorOrientation, int servicePid, int rotationOverride);
sp<hardware::camera2::ICameraDeviceCallbacks> mRemoteCallback;
};
@@ -180,21 +173,13 @@
*/
CameraDeviceClient(const sp<CameraService>& cameraService,
- const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
- std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
- std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
- const std::string& clientPackageName,
- bool clientPackageOverride,
- const std::optional<std::string>& clientFeatureId,
- const std::string& cameraId,
- int cameraFacing,
- int sensorOrientation,
- int clientPid,
- uid_t clientUid,
- int servicePid,
- bool overrideForPerfClass,
- int rotationOverride,
- const std::string& originalCameraId);
+ const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
+ std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
+ const AttributionSourceState& clientAttribution, int callingPid,
+ bool clientPackageOverride, const std::string& cameraId, int cameraFacing,
+ int sensorOrientation, int servicePid, bool overrideForPerfClass,
+ int rotationOverride, const std::string& originalCameraId);
virtual ~CameraDeviceClient();
virtual status_t initialize(sp<CameraProviderManager> manager,
@@ -247,9 +232,6 @@
virtual void onResultAvailable(const CaptureResult& result);
virtual void detachDevice();
- // Calculate the ANativeWindow transform from android.sensor.orientation
- status_t getRotationTransformLocked(int mirrorMode, /*out*/int32_t* transform);
-
bool supportsUltraHighResolutionCapture(const std::string &cameraId);
bool isSensorPixelModeConsistent(const std::list<int> &streamIdList,
@@ -306,10 +288,6 @@
bool isShared,
int* newStreamId = NULL);
- // Set the stream transform flags to automatically rotate the camera stream for preview use
- // cases.
- binder::Status setStreamTransformLocked(int streamId, int mirrorMode);
-
// Utility method to insert the surface into SurfaceMap
binder::Status insertGbpLocked(const sp<IGraphicBufferProducer>& gbp,
/*out*/SurfaceMap* surfaceMap, /*out*/Vector<int32_t>* streamIds,
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
index 9a1fdd6..e783cbc 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
@@ -34,8 +34,8 @@
return OK;
}
- // Verify ops permissions
- auto res = startCameraOps();
+ // Verify ops permissions and/or open camera
+ auto res = notifyCameraOpening();
if (res != OK) {
return res;
}
@@ -163,7 +163,7 @@
}
// Allow both client and the media server to disconnect at all times
int callingPid = getCallingPid();
- if (callingPid != mClientPid &&
+ if (callingPid != mCallingPid &&
callingPid != mServicePid) {
return res;
}
@@ -171,7 +171,7 @@
mDisconnected = true;
sCameraService->removeByClient(this);
- sCameraService->logDisconnectedOffline(mCameraIdStr, mClientPid, mClientPackageName);
+ sCameraService->logDisconnectedOffline(mCameraIdStr, mCallingPid, getPackageName());
sp<IBinder> remote = getRemote();
if (remote != nullptr) {
@@ -184,12 +184,12 @@
mFrameProcessor->requestExit();
mFrameProcessor->join();
- finishCameraOps();
+ notifyCameraClosing();
ALOGI("%s: Disconnected client for offline camera %s for PID %d", __FUNCTION__,
- mCameraIdStr.c_str(), mClientPid);
+ mCameraIdStr.c_str(), mCallingPid);
// client shouldn't be able to call into us anymore
- mClientPid = 0;
+ mCallingPid = 0;
if (mOfflineSession.get() != nullptr) {
auto ret = mOfflineSession->disconnect();
@@ -227,11 +227,11 @@
}
}
-status_t CameraOfflineSessionClient::startCameraOps() {
+status_t CameraOfflineSessionClient::notifyCameraOpening() {
ATRACE_CALL();
{
- ALOGV("%s: Start camera ops, package name = %s, client UID = %d",
- __FUNCTION__, mClientPackageName.c_str(), mClientUid);
+ ALOGV("%s: Notify camera opening, package name = %s, client UID = %d", __FUNCTION__,
+ getPackageName().c_str(), getClientUid());
}
if (mAppOpsManager != nullptr) {
@@ -239,47 +239,48 @@
mOpsCallback = new OpsCallback(this);
int32_t res;
// TODO : possibly change this to OP_OFFLINE_CAMERA_SESSION
- mAppOpsManager->startWatchingMode(AppOpsManager::OP_CAMERA,
- toString16(mClientPackageName), mOpsCallback);
+ mAppOpsManager->startWatchingMode(AppOpsManager::OP_CAMERA, toString16(getPackageName()),
+ mOpsCallback);
// TODO : possibly change this to OP_OFFLINE_CAMERA_SESSION
- res = mAppOpsManager->startOpNoThrow(AppOpsManager::OP_CAMERA,
- mClientUid, toString16(mClientPackageName), /*startIfModeDefault*/ false);
+ res = mAppOpsManager->startOpNoThrow(AppOpsManager::OP_CAMERA, getClientUid(),
+ toString16(getPackageName()),
+ /*startIfModeDefault*/ false);
if (res == AppOpsManager::MODE_ERRORED) {
- ALOGI("Offline Camera %s: Access for \"%s\" has been revoked",
- mCameraIdStr.c_str(), mClientPackageName.c_str());
+ ALOGI("Offline Camera %s: Access for \"%s\" has been revoked", mCameraIdStr.c_str(),
+ getPackageName().c_str());
return PERMISSION_DENIED;
}
// If the calling Uid is trusted (a native service), the AppOpsManager could
// return MODE_IGNORED. Do not treat such case as error.
if (!mUidIsTrusted && res == AppOpsManager::MODE_IGNORED) {
- ALOGI("Offline Camera %s: Access for \"%s\" has been restricted",
- mCameraIdStr.c_str(), mClientPackageName.c_str());
+ ALOGI("Offline Camera %s: Access for \"%s\" has been restricted", mCameraIdStr.c_str(),
+ getPackageName().c_str());
// Return the same error as for device policy manager rejection
return -EACCES;
}
}
- mOpsActive = true;
+ mCameraOpen = true;
// Transition device state to OPEN
- sCameraService->mUidPolicy->registerMonitorUid(mClientUid, /*openCamera*/true);
+ sCameraService->mUidPolicy->registerMonitorUid(getClientUid(), /*openCamera*/ true);
return OK;
}
-status_t CameraOfflineSessionClient::finishCameraOps() {
+status_t CameraOfflineSessionClient::notifyCameraClosing() {
ATRACE_CALL();
- // Check if startCameraOps succeeded, and if so, finish the camera op
- if (mOpsActive) {
+ // Check if notifyCameraOpening succeeded, and if so, finish the camera op if necessary
+ if (mCameraOpen) {
// Notify app ops that the camera is available again
if (mAppOpsManager != nullptr) {
- // TODO : possibly change this to OP_OFFLINE_CAMERA_SESSION
- mAppOpsManager->finishOp(AppOpsManager::OP_CAMERA, mClientUid,
- toString16(mClientPackageName));
- mOpsActive = false;
+ // TODO : possibly change this to OP_OFFLINE_CAMERA_SESSION
+ mAppOpsManager->finishOp(AppOpsManager::OP_CAMERA, getClientUid(),
+ toString16(getPackageName()));
+ mCameraOpen = false;
}
}
// Always stop watching, even if no camera op is active
@@ -288,7 +289,7 @@
}
mOpsCallback.clear();
- sCameraService->mUidPolicy->unregisterMonitorUid(mClientUid, /*closeCamera*/true);
+ sCameraService->mUidPolicy->unregisterMonitorUid(getClientUid(), /*closeCamera*/ true);
return OK;
}
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
index 77de874..574ff9a 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
@@ -44,25 +44,21 @@
{
public:
CameraOfflineSessionClient(
- const sp<CameraService>& cameraService,
- sp<CameraOfflineSessionBase> session,
+ const sp<CameraService>& cameraService, sp<CameraOfflineSessionBase> session,
const KeyedVector<sp<IBinder>, sp<CompositeStream>>& offlineCompositeStreamMap,
const sp<ICameraDeviceCallbacks>& remoteCallback,
std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
- const std::string& clientPackageName,
- const std::optional<std::string>& clientFeatureId,
- const std::string& cameraIdStr, int cameraFacing, int sensorOrientation,
- int clientPid, uid_t clientUid, int servicePid) :
- CameraService::BasicClient(
- cameraService,
- IInterface::asBinder(remoteCallback),
- attributionAndPermissionUtils,
- // (v)ndk doesn't have offline session support
- clientPackageName, /*overridePackageName*/false, clientFeatureId,
- cameraIdStr, cameraFacing, sensorOrientation, clientPid, clientUid, servicePid,
- hardware::ICameraService::ROTATION_OVERRIDE_NONE),
- mRemoteCallback(remoteCallback), mOfflineSession(session),
- mCompositeStreamMap(offlineCompositeStreamMap) {}
+ const AttributionSourceState& clientAttribution, int callingPid,
+ const std::string& cameraIdStr, int cameraFacing, int sensorOrientation, int servicePid)
+ : CameraService::BasicClient(cameraService, IInterface::asBinder(remoteCallback),
+ attributionAndPermissionUtils,
+ // (v)ndk doesn't have offline session support
+ clientAttribution, callingPid, /*overridePackageName*/ false,
+ cameraIdStr, cameraFacing, sensorOrientation, servicePid,
+ hardware::ICameraService::ROTATION_OVERRIDE_NONE),
+ mRemoteCallback(remoteCallback),
+ mOfflineSession(session),
+ mCompositeStreamMap(offlineCompositeStreamMap) {}
virtual ~CameraOfflineSessionClient() {}
@@ -102,8 +98,8 @@
status_t setZoomOverride(int32_t zoomOverride) override;
// permissions management
- status_t startCameraOps() override;
- status_t finishCameraOps() override;
+ status_t notifyCameraOpening() override;
+ status_t notifyCameraClosing() override;
// FilteredResultListener API
void onResultAvailable(const CaptureResult& result) override;
diff --git a/services/camera/libcameraservice/api2/CompositeStream.cpp b/services/camera/libcameraservice/api2/CompositeStream.cpp
index 8f53458..6d7fabd 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/CompositeStream.cpp
@@ -44,7 +44,7 @@
}
}
-status_t CompositeStream::createStream(const std::vector<sp<Surface>>& consumers,
+status_t CompositeStream::createStream(const std::vector<SurfaceHolder>& consumers,
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
camera_stream_rotation_t rotation, int * id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
diff --git a/services/camera/libcameraservice/api2/CompositeStream.h b/services/camera/libcameraservice/api2/CompositeStream.h
index fa569ce..2b158c9 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.h
+++ b/services/camera/libcameraservice/api2/CompositeStream.h
@@ -41,7 +41,7 @@
CompositeStream(sp<CameraDeviceBase> device, wp<hardware::camera2::ICameraDeviceCallbacks> cb);
virtual ~CompositeStream() {}
- status_t createStream(const std::vector<sp<Surface>>& consumers,
+ status_t createStream(const std::vector<SurfaceHolder>& consumers,
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
@@ -55,7 +55,7 @@
void switchToOffline();
// Create and register all internal camera streams.
- virtual status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
+ virtual status_t createInternalStreams(const std::vector<SurfaceHolder>& consumers,
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
index 244a1e5..14618c4 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
@@ -588,7 +588,7 @@
}
-status_t DepthCompositeStream::createInternalStreams(const std::vector<sp<Surface>>& consumers,
+status_t DepthCompositeStream::createInternalStreams(const std::vector<SurfaceHolder>& consumers,
bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
@@ -643,7 +643,7 @@
if (ret == OK) {
mBlobStreamId = *id;
mBlobSurfaceId = (*surfaceIds)[0];
- mOutputSurface = consumers[0];
+ mOutputSurface = consumers[0].mSurface;
} else {
return ret;
}
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.h b/services/camera/libcameraservice/api2/DepthCompositeStream.h
index 75deef7..9c0311e 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.h
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.h
@@ -49,7 +49,7 @@
static bool isDepthCompositeStreamInfo(const OutputStreamInfo& streamInfo);
// CompositeStream overrides
- status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
+ status_t createInternalStreams(const std::vector<SurfaceHolder>& consumers,
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index 3af673b..0f4ba65 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -123,7 +123,7 @@
return ((format == HAL_PIXEL_FORMAT_BLOB) && (dataspace == HAL_DATASPACE_HEIF));
}
-status_t HeicCompositeStream::createInternalStreams(const std::vector<sp<Surface>>& consumers,
+status_t HeicCompositeStream::createInternalStreams(const std::vector<SurfaceHolder>& consumers,
bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
@@ -228,7 +228,7 @@
return res;
}
- mOutputSurface = consumers[0];
+ mOutputSurface = consumers[0].mSurface;
res = registerCompositeStreamListener(mMainImageStreamId);
if (res != OK) {
ALOGE("%s: Failed to register HAL main image stream: %s (%d)", __FUNCTION__,
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.h b/services/camera/libcameraservice/api2/HeicCompositeStream.h
index ba10e05..fad968a 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.h
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.h
@@ -43,7 +43,7 @@
static bool isHeicCompositeStream(const sp<Surface> &surface);
static bool isHeicCompositeStreamInfo(const OutputStreamInfo& streamInfo);
- status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
+ status_t createInternalStreams(const std::vector<SurfaceHolder>& consumers,
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
index c5bd7a9..e0d7604 100644
--- a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
@@ -557,7 +557,7 @@
}
-status_t JpegRCompositeStream::createInternalStreams(const std::vector<sp<Surface>>& consumers,
+status_t JpegRCompositeStream::createInternalStreams(const std::vector<SurfaceHolder>& consumers,
bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
@@ -600,7 +600,7 @@
if (ret == OK) {
mP010StreamId = *id;
mP010SurfaceId = (*surfaceIds)[0];
- mOutputSurface = consumers[0];
+ mOutputSurface = consumers[0].mSurface;
} else {
return ret;
}
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.h b/services/camera/libcameraservice/api2/JpegRCompositeStream.h
index d3ab19c..efd31da 100644
--- a/services/camera/libcameraservice/api2/JpegRCompositeStream.h
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.h
@@ -46,7 +46,7 @@
static bool isJpegRCompositeStreamInfo(const OutputStreamInfo& streamInfo);
// CompositeStream overrides
- status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
+ status_t createInternalStreams(const std::vector<SurfaceHolder>& consumers,
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 18069fe..f6b1e80 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -50,34 +50,24 @@
template <typename TClientBase>
Camera2ClientBase<TClientBase>::Camera2ClientBase(
- const sp<CameraService>& cameraService,
- const sp<TCamCallbacks>& remoteCallback,
+ const sp<CameraService>& cameraService, const sp<TCamCallbacks>& remoteCallback,
std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
- const std::string& clientPackageName,
- bool systemNativeClient,
- const std::optional<std::string>& clientFeatureId,
- const std::string& cameraId,
- int api1CameraId,
- int cameraFacing,
- int sensorOrientation,
- int clientPid,
- uid_t clientUid,
- int servicePid,
- bool overrideForPerfClass,
- int rotationOverride,
- bool legacyClient):
- TClientBase(cameraService, remoteCallback, attributionAndPermissionUtils, clientPackageName,
- systemNativeClient, clientFeatureId, cameraId, api1CameraId, cameraFacing,
- sensorOrientation, clientPid, clientUid, servicePid, rotationOverride),
- mSharedCameraCallbacks(remoteCallback),
- mCameraServiceProxyWrapper(cameraServiceProxyWrapper),
- mDeviceActive(false), mApi1CameraId(api1CameraId)
-{
+ const AttributionSourceState& clientAttribution, int callingPid, bool systemNativeClient,
+ const std::string& cameraId, int api1CameraId, int cameraFacing, int sensorOrientation,
+ int servicePid, bool overrideForPerfClass, int rotationOverride, bool legacyClient)
+ : TClientBase(cameraService, remoteCallback, attributionAndPermissionUtils, clientAttribution,
+ callingPid, systemNativeClient, cameraId, api1CameraId, cameraFacing,
+ sensorOrientation, servicePid, rotationOverride),
+ mSharedCameraCallbacks(remoteCallback),
+ mCameraServiceProxyWrapper(cameraServiceProxyWrapper),
+ mDeviceActive(false),
+ mApi1CameraId(api1CameraId) {
ALOGI("Camera %s: Opened. Client: %s (PID %d, UID %d)", cameraId.c_str(),
- clientPackageName.c_str(), clientPid, clientUid);
+ TClientBase::getPackageName().c_str(), TClientBase::mCallingPid,
+ TClientBase::getClientUid());
- mInitialClientPid = clientPid;
+ mInitialClientPid = TClientBase::mCallingPid;
mOverrideForPerfClass = overrideForPerfClass;
mLegacyClient = legacyClient;
}
@@ -87,10 +77,10 @@
const {
int callingPid = TClientBase::getCallingPid();
- if (callingPid == TClientBase::mClientPid) return NO_ERROR;
+ if (callingPid == TClientBase::mCallingPid) return NO_ERROR;
ALOGE("%s: attempt to use a locked camera from a different process"
- " (old pid %d, new pid %d)", checkLocation, TClientBase::mClientPid, callingPid);
+ " (old pid %d, new pid %d)", checkLocation, TClientBase::mCallingPid, callingPid);
return PERMISSION_DENIED;
}
@@ -141,10 +131,10 @@
return NO_INIT;
}
- // Verify ops permissions
- res = TClientBase::startCameraOps();
+ // Notify camera opening (check op if check_full_attribution_source_chain flag is off).
+ res = TClientBase::notifyCameraOpening();
if (res != OK) {
- TClientBase::finishCameraOps();
+ TClientBase::notifyCameraClosing();
return res;
}
@@ -152,7 +142,7 @@
if (res != OK) {
ALOGE("%s: Camera %s: unable to initialize device: %s (%d)",
__FUNCTION__, TClientBase::mCameraIdStr.c_str(), strerror(-res), res);
- TClientBase::finishCameraOps();
+ TClientBase::notifyCameraClosing();
return res;
}
@@ -176,9 +166,8 @@
disconnect();
ALOGI("%s: Client object's dtor for Camera Id %s completed. Client was: %s (PID %d, UID %u)",
- __FUNCTION__, TClientBase::mCameraIdStr.c_str(),
- TClientBase::mClientPackageName.c_str(),
- mInitialClientPid, TClientBase::mClientUid);
+ __FUNCTION__, TClientBase::mCameraIdStr.c_str(), TClientBase::getPackageName().c_str(),
+ mInitialClientPid, TClientBase::getClientUid());
}
template <typename TClientBase>
@@ -189,7 +178,7 @@
TClientBase::mCameraIdStr.c_str(),
(TClientBase::getRemoteCallback() != NULL ?
(void *)IInterface::asBinder(TClientBase::getRemoteCallback()).get() : NULL),
- TClientBase::mClientPid);
+ TClientBase::mCallingPid);
result += " State: ";
write(fd, result.c_str(), result.size());
@@ -274,7 +263,7 @@
binder::Status res = binder::Status::ok();
// Allow both client and the media server to disconnect at all times
int callingPid = TClientBase::getCallingPid();
- if (callingPid != TClientBase::mClientPid &&
+ if (callingPid != TClientBase::mCallingPid &&
callingPid != TClientBase::mServicePid) return res;
ALOGD("Camera %s: Shutting down", TClientBase::mCameraIdStr.c_str());
@@ -311,19 +300,19 @@
ALOGV("%s: E", __FUNCTION__);
Mutex::Autolock icl(mBinderSerializationLock);
- if (TClientBase::mClientPid != 0 &&
- TClientBase::getCallingPid() != TClientBase::mClientPid) {
+ if (TClientBase::mCallingPid != 0 &&
+ TClientBase::getCallingPid() != TClientBase::mCallingPid) {
ALOGE("%s: Camera %s: Connection attempt from pid %d; "
"current locked to pid %d",
__FUNCTION__,
TClientBase::mCameraIdStr.c_str(),
TClientBase::getCallingPid(),
- TClientBase::mClientPid);
+ TClientBase::mCallingPid);
return BAD_VALUE;
}
- TClientBase::mClientPid = TClientBase::getCallingPid();
+ TClientBase::mCallingPid = TClientBase::getCallingPid();
TClientBase::mRemoteCallback = client;
mSharedCameraCallbacks = client;
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index c9d5735..e231f1f 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -53,19 +53,10 @@
const sp<TCamCallbacks>& remoteCallback,
std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
- const std::string& clientPackageName,
- bool systemNativeClient,
- const std::optional<std::string>& clientFeatureId,
- const std::string& cameraId,
- int api1CameraId,
- int cameraFacing,
- int sensorOrientation,
- int clientPid,
- uid_t clientUid,
- int servicePid,
- bool overrideForPerfClass,
- int rotationOverride,
- bool legacyClient = false);
+ const AttributionSourceState& clientAttribution, int callingPid,
+ bool systemNativeClient, const std::string& cameraId, int api1CameraId,
+ int cameraFacing, int sensorOrientation, int servicePid,
+ bool overrideForPerfClass, int rotationOverride, bool legacyClient = false);
virtual ~Camera2ClientBase();
virtual status_t initialize(sp<CameraProviderManager> manager,
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index 9c8f5ad..f5e960b 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -68,6 +68,7 @@
using camera3::camera_request_template_t;;
using camera3::camera_stream_configuration_mode_t;
using camera3::camera_stream_rotation_t;
+using camera3::SurfaceHolder;
class CameraProviderManager;
@@ -200,7 +201,7 @@
* For HAL_PIXEL_FORMAT_BLOB formats, the width and height should be the
* logical dimensions of the buffer, not the number of bytes.
*/
- virtual status_t createStream(const std::vector<sp<Surface>>& consumers,
+ virtual status_t createStream(const std::vector<SurfaceHolder>& consumers,
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
const std::string& physicalCameraId,
@@ -212,7 +213,6 @@
int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
bool useReadoutTimestamp = false)
= 0;
@@ -404,12 +404,12 @@
* Set the deferred consumer surface and finish the rest of the stream configuration.
*/
virtual status_t setConsumerSurfaces(int streamId,
- const std::vector<sp<Surface>>& consumers, std::vector<int> *surfaceIds /*out*/) = 0;
+ const std::vector<SurfaceHolder>& consumers, std::vector<int> *surfaceIds /*out*/) = 0;
/**
* Update a given stream.
*/
- virtual status_t updateStream(int streamId, const std::vector<sp<Surface>> &newSurfaces,
+ virtual status_t updateStream(int streamId, const std::vector<SurfaceHolder> &newSurfaces,
const std::vector<android::camera3::OutputStreamInfo> &outputInfo,
const std::vector<size_t> &removedSurfaceIds,
KeyedVector<sp<Surface>, size_t> *outputMap/*out*/) = 0;
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index a03d199..6394ec1 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -1833,6 +1833,67 @@
return res;
}
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addColorCorrectionAvailableModesTag(
+ CameraMetadata& c) {
+ status_t res = OK;
+
+ // The COLOR_CORRECTION_AVAILABLE_MODES key advertises the
+ // supported color correction modes. Previously, if color correction was
+ // supported (COLOR_CORRECTION_MODE was not null), it was assumed
+ // that all existing options, TRANSFORM_MATRIX, FAST, and HIGH_QUALITY, were supported.
+ // However, a new optional mode, CCT, has been introduced. To indicate
+ // whether CCT is supported, the camera device must now explicitly list all
+ // available modes using the COLOR_CORRECTION_AVAILABLE_MODES key.
+ // If the camera device doesn't set COLOR_CORRECTION_AVAILABLE_MODES,
+ // this code falls back to checking for the COLOR_CORRECTION_MODE key.
+ // If present, this adds the required supported modes TRANSFORM_MATRIX,
+ // FAST, HIGH_QUALITY.
+ auto entry = c.find(ANDROID_COLOR_CORRECTION_AVAILABLE_MODES);
+ if (entry.count != 0) {
+ return res;
+ }
+
+ auto reqKeys = c.find(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS);
+ if (reqKeys.count == 0) {
+ ALOGE("%s: No supported camera request keys!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ bool colorCorrectionModeAvailable = false;
+ for (size_t i = 0; i < reqKeys.count; i++) {
+ if (reqKeys.data.i32[i] == ANDROID_COLOR_CORRECTION_MODE) {
+ colorCorrectionModeAvailable = true;
+ break;
+ }
+ }
+
+ if (!colorCorrectionModeAvailable) {
+ return res;
+ }
+
+ std::vector<int32_t> supportedChTags;
+ auto chTags = c.find(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
+ if (chTags.count == 0) {
+ ALOGE("%s: No supported camera characteristics keys!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ std::vector<uint8_t> colorCorrectionAvailableModes = {
+ ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX,
+ ANDROID_COLOR_CORRECTION_MODE_FAST,
+ ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY };
+ supportedChTags.reserve(chTags.count + 1);
+ supportedChTags.insert(supportedChTags.end(), chTags.data.i32,
+ chTags.data.i32 + chTags.count);
+ supportedChTags.push_back(ANDROID_COLOR_CORRECTION_AVAILABLE_MODES);
+ c.update(ANDROID_COLOR_CORRECTION_AVAILABLE_MODES,
+ colorCorrectionAvailableModes.data(), colorCorrectionAvailableModes.size());
+ c.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, supportedChTags.data(),
+ supportedChTags.size());
+
+ return res;
+}
+
status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addSessionConfigQueryVersionTag() {
sp<ProviderInfo> parentProvider = mParentProvider.promote();
if (parentProvider == nullptr) {
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index b686a58..f0db8bc 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -761,6 +761,7 @@
status_t addAutoframingTags();
status_t addPreCorrectionActiveArraySize();
status_t addReadoutTimestampTag(bool readoutTimestampSupported = true);
+ status_t addColorCorrectionAvailableModesTag(CameraMetadata& ch);
status_t addSessionConfigQueryVersionTag();
static void getSupportedSizes(const CameraMetadata& ch, uint32_t tag,
diff --git a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
index 4bfe11d..e1efd90 100644
--- a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
@@ -596,6 +596,14 @@
__FUNCTION__, strerror(-res), res);
}
+ if (flags::color_temperature()) {
+ res = addColorCorrectionAvailableModesTag(mCameraCharacteristics);
+ if (OK != res) {
+ ALOGE("%s: Unable to add COLOR_CORRECTION_AVAILABLE_MODES tag: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ }
+ }
+
camera_metadata_entry flashAvailable =
mCameraCharacteristics.find(ANDROID_FLASH_INFO_AVAILABLE);
if (flashAvailable.count == 1 &&
@@ -683,6 +691,14 @@
__FUNCTION__, strerror(-res), res);
return;
}
+
+ if (flags::color_temperature()) {
+ res = addColorCorrectionAvailableModesTag(mPhysicalCameraCharacteristics[id]);
+ if (OK != res) {
+ ALOGE("%s: Unable to add COLOR_CORRECTION_AVAILABLE_MODES tag: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ }
+ }
}
}
diff --git a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
index 6cedb04..edaee6e 100644
--- a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
@@ -675,6 +675,13 @@
ALOGE("%s: Unable to add sensorReadoutTimestamp tag: %s (%d)",
__FUNCTION__, strerror(-res), res);
}
+ if (flags::color_temperature()) {
+ res = addColorCorrectionAvailableModesTag(mCameraCharacteristics);
+ if (OK != res) {
+ ALOGE("%s: Unable to add COLOR_CORRECTION_AVAILABLE_MODES tag: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ }
+ }
camera_metadata_entry flashAvailable =
mCameraCharacteristics.find(ANDROID_FLASH_INFO_AVAILABLE);
@@ -785,6 +792,14 @@
__FUNCTION__, strerror(-res), res);
return;
}
+
+ if (flags::color_temperature()) {
+ res = addColorCorrectionAvailableModesTag(mPhysicalCameraCharacteristics[id]);
+ if (OK != res) {
+ ALOGE("%s: Unable to add COLOR_CORRECTION_AVAILABLE_MODES tag: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ }
+ }
}
}
}
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 5721745..eb8cb9d 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -50,6 +50,7 @@
#include <utils/Trace.h>
#include <utils/Timers.h>
#include <cutils/properties.h>
+#include <camera/CameraUtils.h>
#include <camera/StringUtils.h>
#include <android-base/properties.h>
@@ -1046,13 +1047,13 @@
return BAD_VALUE;
}
- std::vector<sp<Surface>> consumers;
- consumers.push_back(consumer);
+ std::vector<SurfaceHolder> consumers;
+ consumers.push_back(SurfaceHolder{consumer, mirrorMode});
return createStream(consumers, /*hasDeferredConsumer*/ false, width, height,
format, dataSpace, rotation, id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
streamSetId, isShared, isMultiResolution, consumerUsage, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode, colorSpace, useReadoutTimestamp);
+ streamUseCase, timestampBase, colorSpace, useReadoutTimestamp);
}
static bool isRawFormat(int format) {
@@ -1067,14 +1068,14 @@
}
}
-status_t Camera3Device::createStream(const std::vector<sp<Surface>>& consumers,
+status_t Camera3Device::createStream(const std::vector<SurfaceHolder>& consumers,
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
uint64_t consumerUsage, int64_t dynamicRangeProfile, int64_t streamUseCase,
- int timestampBase, int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) {
+ int timestampBase, int32_t colorSpace, bool useReadoutTimestamp) {
ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
@@ -1083,10 +1084,10 @@
ALOGV("Camera %s: Creating new stream %d: %d x %d, format %d, dataspace %d rotation %d"
" consumer usage %" PRIu64 ", isShared %d, physicalCameraId %s, isMultiResolution %d"
" dynamicRangeProfile 0x%" PRIx64 ", streamUseCase %" PRId64 ", timestampBase %d,"
- " mirrorMode %d, colorSpace %d, useReadoutTimestamp %d",
+ " colorSpace %d, useReadoutTimestamp %d",
mId.c_str(), mNextStreamId, width, height, format, dataSpace, rotation,
consumerUsage, isShared, physicalCameraId.c_str(), isMultiResolution,
- dynamicRangeProfile, streamUseCase, timestampBase, mirrorMode, colorSpace,
+ dynamicRangeProfile, streamUseCase, timestampBase, colorSpace,
useReadoutTimestamp);
status_t res;
@@ -1155,11 +1156,11 @@
return BAD_VALUE;
}
}
- newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
+ newStream = new Camera3OutputStream(mNextStreamId, consumers[0].mSurface,
width, height, blobBufferSize, format, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode, colorSpace, useReadoutTimestamp);
+ timestampBase, consumers[0].mMirrorMode, colorSpace, useReadoutTimestamp);
} else if (format == HAL_PIXEL_FORMAT_RAW_OPAQUE) {
bool maxResolution =
sensorPixelModesUsed.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
@@ -1170,34 +1171,34 @@
SET_ERR_L("Invalid RAW opaque buffer size %zd", rawOpaqueBufferSize);
return BAD_VALUE;
}
- newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
+ newStream = new Camera3OutputStream(mNextStreamId, consumers[0].mSurface,
width, height, rawOpaqueBufferSize, format, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode, colorSpace, useReadoutTimestamp);
+ timestampBase, consumers[0].mMirrorMode, colorSpace, useReadoutTimestamp);
} else if (isShared) {
newStream = new Camera3SharedOutputStream(mNextStreamId, consumers,
width, height, format, consumerUsage, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
mUseHalBufManager, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode, colorSpace, useReadoutTimestamp);
+ timestampBase, colorSpace, useReadoutTimestamp);
} else if (consumers.size() == 0 && hasDeferredConsumer) {
newStream = new Camera3OutputStream(mNextStreamId,
width, height, format, consumerUsage, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode, colorSpace, useReadoutTimestamp);
+ timestampBase, colorSpace, useReadoutTimestamp);
} else {
- newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
+ newStream = new Camera3OutputStream(mNextStreamId, consumers[0].mSurface,
width, height, format, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode, colorSpace, useReadoutTimestamp);
+ timestampBase, consumers[0].mMirrorMode, colorSpace, useReadoutTimestamp);
}
size_t consumerCount = consumers.size();
for (size_t i = 0; i < consumerCount; i++) {
- int id = newStream->getSurfaceId(consumers[i]);
+ int id = newStream->getSurfaceId(consumers[i].mSurface);
if (id < 0) {
SET_ERR_L("Invalid surface id");
return BAD_VALUE;
@@ -1205,6 +1206,11 @@
if (surfaceIds != nullptr) {
surfaceIds->push_back(id);
}
+
+ res = deriveAndSetTransformLocked(*newStream, consumers[i].mMirrorMode, id);
+ if (res < 0) {
+ return res;
+ }
}
newStream->setStatusTracker(mStatusTracker);
@@ -2038,7 +2044,7 @@
}
status_t Camera3Device::setConsumerSurfaces(int streamId,
- const std::vector<sp<Surface>>& consumers, std::vector<int> *surfaceIds) {
+ const std::vector<SurfaceHolder>& consumers, std::vector<int> *surfaceIds) {
ATRACE_CALL();
ALOGV("%s: Camera %s: set consumer surface for stream %d",
__FUNCTION__, mId.c_str(), streamId);
@@ -2070,12 +2076,17 @@
}
for (auto &consumer : consumers) {
- int id = stream->getSurfaceId(consumer);
+ int id = stream->getSurfaceId(consumer.mSurface);
if (id < 0) {
CLOGE("Invalid surface id!");
return BAD_VALUE;
}
surfaceIds->push_back(id);
+
+ res = deriveAndSetTransformLocked(*stream, consumer.mMirrorMode, id);
+ if (res != OK) {
+ return res;
+ }
}
if (isDeferred) {
@@ -2101,7 +2112,7 @@
return OK;
}
-status_t Camera3Device::updateStream(int streamId, const std::vector<sp<Surface>> &newSurfaces,
+status_t Camera3Device::updateStream(int streamId, const std::vector<SurfaceHolder> &newSurfaces,
const std::vector<OutputStreamInfo> &outputInfo,
const std::vector<size_t> &removedSurfaceIds, KeyedVector<sp<Surface>, size_t> *outputMap) {
Mutex::Autolock il(mInterfaceLock);
@@ -2131,6 +2142,14 @@
return res;
}
+ for (size_t i = 0; i < outputMap->size(); i++) {
+ res = deriveAndSetTransformLocked(
+ *stream, newSurfaces[i].mMirrorMode, outputMap->valueAt(i));
+ if (res != OK) {
+ return res;
+ }
+ }
+
return res;
}
@@ -5786,4 +5805,15 @@
}
}
+status_t Camera3Device::deriveAndSetTransformLocked(
+ Camera3OutputStreamInterface& stream, int mirrorMode, int surfaceId) {
+ int transform = -1;
+ int res = CameraUtils::getRotationTransform(mDeviceInfo, mirrorMode, &transform);
+ if (res != OK) {
+ return res;
+ }
+ stream.setTransform(transform, false /*mayChangeMirror*/, surfaceId);
+ return OK;
+}
+
}; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 3c45c1a..397ec5c 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -63,6 +63,7 @@
using android::camera3::camera_stream_configuration_mode_t;
using android::camera3::CAMERA_TEMPLATE_COUNT;
using android::camera3::OutputStreamInfo;
+using android::camera3::SurfaceHolder;
namespace android {
@@ -168,7 +169,7 @@
bool useReadoutTimestamp = false)
override;
- status_t createStream(const std::vector<sp<Surface>>& consumers,
+ status_t createStream(const std::vector<SurfaceHolder>& consumers,
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
const std::string& physicalCameraId,
@@ -181,7 +182,6 @@
ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
bool useReadoutTimestamp = false)
override;
@@ -247,13 +247,13 @@
* consumer configuration.
*/
status_t setConsumerSurfaces(
- int streamId, const std::vector<sp<Surface>>& consumers,
+ int streamId, const std::vector<SurfaceHolder>& consumers,
std::vector<int> *surfaceIds /*out*/) override;
/**
* Update a given stream.
*/
- status_t updateStream(int streamId, const std::vector<sp<Surface>> &newSurfaces,
+ status_t updateStream(int streamId, const std::vector<SurfaceHolder> &newSurfaces,
const std::vector<OutputStreamInfo> &outputInfo,
const std::vector<size_t> &removedSurfaceIds,
KeyedVector<sp<Surface>, size_t> *outputMap/*out*/);
@@ -1644,6 +1644,8 @@
sp<Camera3DeviceInjectionMethods> mInjectionMethods;
void overrideStreamUseCaseLocked();
+ status_t deriveAndSetTransformLocked(camera3::Camera3OutputStreamInterface& stream,
+ int mirrorMode, int surfaceId);
}; // class Camera3Device
diff --git a/services/camera/libcameraservice/device3/Camera3FakeStream.cpp b/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
index 55467c3..79b88f8 100644
--- a/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
@@ -76,7 +76,7 @@
Camera3IOStreamBase::dump(fd, args);
}
-status_t Camera3FakeStream::setTransform(int, bool) {
+status_t Camera3FakeStream::setTransform(int, bool, int) {
ATRACE_CALL();
// Do nothing
return OK;
@@ -120,13 +120,13 @@
return FAKE_ID;
}
-status_t Camera3FakeStream::setConsumers(const std::vector<sp<Surface>>& /*consumers*/) {
+status_t Camera3FakeStream::setConsumers(const std::vector<SurfaceHolder>& /*consumers*/) {
ALOGE("%s: Stream %d: Fake stream doesn't support set consumer surface!",
__FUNCTION__, mId);
return INVALID_OPERATION;
}
-status_t Camera3FakeStream::updateStream(const std::vector<sp<Surface>> &/*outputSurfaces*/,
+status_t Camera3FakeStream::updateStream(const std::vector<SurfaceHolder> &/*outputSurfaces*/,
const std::vector<OutputStreamInfo> &/*outputInfo*/,
const std::vector<size_t> &/*removedSurfaceIds*/,
KeyedVector<sp<Surface>, size_t> * /*outputMap*/) {
diff --git a/services/camera/libcameraservice/device3/Camera3FakeStream.h b/services/camera/libcameraservice/device3/Camera3FakeStream.h
index 7addb90..9291bd0 100644
--- a/services/camera/libcameraservice/device3/Camera3FakeStream.h
+++ b/services/camera/libcameraservice/device3/Camera3FakeStream.h
@@ -52,7 +52,7 @@
virtual void dump(int fd, const Vector<String16> &args);
- status_t setTransform(int transform, bool mayChangeMirror);
+ status_t setTransform(int transform, bool mayChangeMirror, int surfaceId);
virtual status_t detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd);
@@ -80,7 +80,7 @@
/**
* Set the consumer surfaces to the output stream.
*/
- virtual status_t setConsumers(const std::vector<sp<Surface>>& consumers);
+ virtual status_t setConsumers(const std::vector<SurfaceHolder>& consumers);
/**
* Query the output surface id.
@@ -93,7 +93,7 @@
/**
* Update the stream output surfaces.
*/
- virtual status_t updateStream(const std::vector<sp<Surface>> &outputSurfaces,
+ virtual status_t updateStream(const std::vector<SurfaceHolder> &outputSurfaces,
const std::vector<OutputStreamInfo> &outputInfo,
const std::vector<size_t> &removedSurfaceIds,
KeyedVector<sp<Surface>, size_t> *outputMap/*out*/);
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 83c8a38..dc663f3 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -136,7 +136,7 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
- int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) :
+ int32_t colorSpace, bool useReadoutTimestamp) :
Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
/*maxSize*/0, format, dataSpace, rotation,
physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
@@ -150,7 +150,7 @@
mUseReadoutTime(useReadoutTimestamp),
mConsumerUsage(consumerUsage),
mDropBuffers(false),
- mMirrorMode(mirrorMode),
+ mMirrorMode(OutputConfiguration::MIRROR_MODE_AUTO),
mDequeueBufferLatency(kDequeueLatencyBinSize),
mIPCTransport(transport) {
// Deferred consumer only support preview surface format now.
@@ -184,8 +184,7 @@
int setId, bool isMultiResolution,
int64_t dynamicRangeProfile, int64_t streamUseCase,
bool deviceTimeBaseIsRealtime, int timestampBase,
- int mirrorMode, int32_t colorSpace,
- bool useReadoutTimestamp) :
+ int32_t colorSpace, bool useReadoutTimestamp) :
Camera3IOStreamBase(id, type, width, height,
/*maxSize*/0,
format, dataSpace, rotation,
@@ -199,7 +198,7 @@
mUseReadoutTime(useReadoutTimestamp),
mConsumerUsage(consumerUsage),
mDropBuffers(false),
- mMirrorMode(mirrorMode),
+ mMirrorMode(OutputConfiguration::MIRROR_MODE_AUTO),
mDequeueBufferLatency(kDequeueLatencyBinSize),
mIPCTransport(transport) {
@@ -479,21 +478,23 @@
" DequeueBuffer latency histogram:");
}
-status_t Camera3OutputStream::setTransform(int transform, bool mayChangeMirror) {
+status_t Camera3OutputStream::setTransform(int transform, bool mayChangeMirror, int surfaceId) {
ATRACE_CALL();
Mutex::Autolock l(mLock);
+
if (mMirrorMode != OutputConfiguration::MIRROR_MODE_AUTO && mayChangeMirror) {
// If the mirroring mode is not AUTO, do not allow transform update
// which may change mirror.
return OK;
}
- return setTransformLocked(transform);
-}
-
-status_t Camera3OutputStream::setTransformLocked(int transform) {
status_t res = OK;
+ if (surfaceId != 0) {
+ ALOGE("%s: Invalid surfaceId %d", __FUNCTION__, surfaceId);
+ return BAD_VALUE;
+ }
+
if (transform == -1) return res;
if (mState == STATE_ERROR) {
@@ -525,6 +526,12 @@
return res;
}
+ if ((res = native_window_set_buffers_transform(mConsumer.get(), mTransform)) != OK) {
+ ALOGE("%s: Unable to configure stream transform to %x: %s (%d)",
+ __FUNCTION__, mTransform, strerror(-res), res);
+ return res;
+ }
+
// Set dequeueBuffer/attachBuffer timeout if the consumer is not hw composer or hw texture.
// We need skip these cases as timeout will disable the non-blocking (async) mode.
if (!(isConsumedByHWComposer() || isConsumedByHWTexture())) {
@@ -694,14 +701,6 @@
return res;
}
- res = native_window_set_buffers_transform(mConsumer.get(),
- mTransform);
- if (res != OK) {
- ALOGE("%s: Unable to configure stream transform to %x: %s (%d)",
- __FUNCTION__, mTransform, strerror(-res), res);
- return res;
- }
-
/**
* Camera3 Buffer manager is only supported by HAL3.3 onwards, as the older HALs requires
* buffers to be statically allocated for internal static buffer registration, while the
@@ -1069,7 +1068,7 @@
return OK;
}
-status_t Camera3OutputStream::updateStream(const std::vector<sp<Surface>> &/*outputSurfaces*/,
+status_t Camera3OutputStream::updateStream(const std::vector<SurfaceHolder> &/*outputSurfaces*/,
const std::vector<OutputStreamInfo> &/*outputInfo*/,
const std::vector<size_t> &/*removedSurfaceIds*/,
KeyedVector<sp<Surface>, size_t> * /*outputMapo*/) {
@@ -1206,14 +1205,14 @@
return mConsumer == nullptr;
}
-status_t Camera3OutputStream::setConsumers(const std::vector<sp<Surface>>& consumers) {
+status_t Camera3OutputStream::setConsumers(const std::vector<SurfaceHolder>& consumers) {
Mutex::Autolock l(mLock);
if (consumers.size() != 1) {
ALOGE("%s: it's illegal to set %zu consumer surfaces!",
__FUNCTION__, consumers.size());
return INVALID_OPERATION;
}
- if (consumers[0] == nullptr) {
+ if (consumers[0].mSurface == nullptr) {
ALOGE("%s: it's illegal to set null consumer surface!", __FUNCTION__);
return INVALID_OPERATION;
}
@@ -1223,7 +1222,8 @@
return INVALID_OPERATION;
}
- mConsumer = consumers[0];
+ mConsumer = consumers[0].mSurface;
+ mMirrorMode = consumers[0].mMirrorMode;
return OK;
}
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index f8b78c1..a547f82 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -134,7 +134,6 @@
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
bool useReadoutTimestamp = false);
@@ -150,7 +149,7 @@
* Set the transform on the output stream; one of the
* HAL_TRANSFORM_* / NATIVE_WINDOW_TRANSFORM_* constants.
*/
- status_t setTransform(int transform, bool mayChangeMirror);
+ virtual status_t setTransform(int transform, bool mayChangeMirror, int surfaceId = 0);
/**
* Return if this output stream is for video encoding.
@@ -179,7 +178,7 @@
/**
* Set the consumer surfaces to the output stream.
*/
- virtual status_t setConsumers(const std::vector<sp<Surface>>& consumers);
+ virtual status_t setConsumers(const std::vector<SurfaceHolder>& consumers);
class BufferProducerListener : public SurfaceListener {
public:
@@ -236,7 +235,7 @@
/**
* Update the stream output surfaces.
*/
- virtual status_t updateStream(const std::vector<sp<Surface>> &outputSurfaces,
+ virtual status_t updateStream(const std::vector<SurfaceHolder> &outputSurfaces,
const std::vector<OutputStreamInfo> &outputInfo,
const std::vector<size_t> &removedSurfaceIds,
KeyedVector<sp<Surface>, size_t> *outputMap/*out*/);
@@ -286,7 +285,6 @@
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
bool useReadoutTimestamp = false);
@@ -323,8 +321,6 @@
int mTransform;
- virtual status_t setTransformLocked(int transform);
-
bool mTraceFirstBuffer;
/**
@@ -383,7 +379,7 @@
std::vector<Surface::BatchBuffer> mBatchedBuffers;
// ---- End of mBatchLock protected scope ----
- const int mMirrorMode;
+ int mMirrorMode;
/**
* Internal Camera3Stream interface
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
index 77edfbe..ff7ad56 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
@@ -34,7 +34,7 @@
* Set the transform on the output stream; one of the
* HAL_TRANSFORM_* / NATIVE_WINDOW_TRANSFORM_* constants.
*/
- virtual status_t setTransform(int transform, bool mayChangeMirror) = 0;
+ virtual status_t setTransform(int transform, bool mayChangeMirror, int surfaceId = 0) = 0;
/**
* Return if this output stream is for video encoding.
@@ -49,7 +49,7 @@
/**
* Set the consumer surfaces to the output stream.
*/
- virtual status_t setConsumers(const std::vector<sp<Surface>>& consumers) = 0;
+ virtual status_t setConsumers(const std::vector<SurfaceHolder>& consumers) = 0;
/**
* Detach an unused buffer from the stream.
@@ -81,7 +81,7 @@
/**
* Update the stream output surfaces.
*/
- virtual status_t updateStream(const std::vector<sp<Surface>> &outputSurfaces,
+ virtual status_t updateStream(const std::vector<SurfaceHolder> &outputSurfaces,
const std::vector<OutputStreamInfo> &outputInfo,
const std::vector<size_t> &removedSurfaceIds,
KeyedVector<sp<Surface>, size_t> *outputMap/*out*/) = 0;
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
index 187bd93..b436d2e 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
@@ -18,6 +18,8 @@
#define ATRACE_TAG ATRACE_TAG_CAMERA
//#define LOG_NDEBUG 0
+#include <utils/Trace.h>
+
#include "Flags.h"
#include "Camera3SharedOutputStream.h"
@@ -29,7 +31,7 @@
const size_t Camera3SharedOutputStream::kMaxOutputs;
Camera3SharedOutputStream::Camera3SharedOutputStream(int id,
- const std::vector<sp<Surface>>& surfaces,
+ const std::vector<SurfaceHolder>& surfaces,
uint32_t width, uint32_t height, int format,
uint64_t consumerUsage, android_dataspace dataSpace,
camera_stream_rotation_t rotation,
@@ -37,12 +39,12 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
int setId, bool useHalBufManager, int64_t dynamicProfile,
int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
- int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) :
+ int32_t colorSpace, bool useReadoutTimestamp) :
Camera3OutputStream(id, CAMERA_STREAM_OUTPUT, width, height,
format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
transport, consumerUsage, timestampOffset, setId,
/*isMultiResolution*/false, dynamicProfile, streamUseCase,
- deviceTimeBaseIsRealtime, timestampBase, mirrorMode, colorSpace,
+ deviceTimeBaseIsRealtime, timestampBase, colorSpace,
useReadoutTimestamp),
mUseHalBufManager(useHalBufManager) {
size_t consumerCount = std::min(surfaces.size(), kMaxOutputs);
@@ -50,7 +52,7 @@
ALOGE("%s: Trying to add more consumers than the maximum ", __func__);
}
for (size_t i = 0; i < consumerCount; i++) {
- mSurfaceUniqueIds[i] = std::make_pair(surfaces[i], mNextUniqueSurfaceId++);
+ mSurfaceUniqueIds[i] = SurfaceHolderUniqueId{surfaces[i], mNextUniqueSurfaceId++};
}
}
@@ -72,8 +74,8 @@
std::unordered_map<size_t, sp<Surface>> initialSurfaces;
for (size_t i = 0; i < kMaxOutputs; i++) {
- if (mSurfaceUniqueIds[i].first != nullptr) {
- initialSurfaces.emplace(i, mSurfaceUniqueIds[i].first);
+ if (mSurfaceUniqueIds[i].mSurfaceHolder.mSurface != nullptr) {
+ initialSurfaces.emplace(i, mSurfaceUniqueIds[i].mSurfaceHolder.mSurface);
}
}
@@ -142,19 +144,19 @@
return true;
}
- return (mSurfaceUniqueIds[surface_id].first == nullptr);
+ return (mSurfaceUniqueIds[surface_id].mSurfaceHolder.mSurface == nullptr);
}
-status_t Camera3SharedOutputStream::setConsumers(const std::vector<sp<Surface>>& surfaces) {
+status_t Camera3SharedOutputStream::setConsumers(const std::vector<SurfaceHolder>& surfaceHolders) {
Mutex::Autolock l(mLock);
- if (surfaces.size() == 0) {
+ if (surfaceHolders.size() == 0) {
ALOGE("%s: it's illegal to set zero consumer surfaces!", __FUNCTION__);
return INVALID_OPERATION;
}
status_t ret = OK;
- for (auto& surface : surfaces) {
- if (surface == nullptr) {
+ for (auto& surfaceHolder : surfaceHolders) {
+ if (surfaceHolder.mSurface == nullptr) {
ALOGE("%s: it's illegal to set a null consumer surface!", __FUNCTION__);
return INVALID_OPERATION;
}
@@ -165,11 +167,11 @@
return NO_MEMORY;
}
- mSurfaceUniqueIds[id] = std::make_pair(surface, mNextUniqueSurfaceId++);
+ mSurfaceUniqueIds[id] = SurfaceHolderUniqueId{surfaceHolder, mNextUniqueSurfaceId++};
// Only call addOutput if the splitter has been connected.
if (mStreamSplitter != nullptr) {
- ret = mStreamSplitter->addOutput(id, surface);
+ ret = mStreamSplitter->addOutput(id, surfaceHolder.mSurface);
if (ret != OK) {
ALOGE("%s: addOutput failed with error code %d", __FUNCTION__, ret);
return ret;
@@ -222,7 +224,7 @@
for (const auto& uniqueId : uniqueSurfaceIds) {
bool uniqueIdFound = false;
for (size_t i = 0; i < kMaxOutputs; i++) {
- if (mSurfaceUniqueIds[i].second == uniqueId) {
+ if (mSurfaceUniqueIds[i].mId == uniqueId) {
surfaceIds.push_back(i);
uniqueIdFound = true;
break;
@@ -275,6 +277,23 @@
return res;
}
+ // Set buffer transform for all configured surfaces
+ for (const auto& surfaceUniqueId : mSurfaceUniqueIds) {
+ const sp<Surface>& surface = surfaceUniqueId.mSurfaceHolder.mSurface;
+ int surfaceId = surfaceUniqueId.mId;
+ int32_t transform = surfaceUniqueId.mTransform;
+ if (transform == -1 || surface == nullptr) {
+ continue;
+ }
+
+ res = mStreamSplitter->setTransform(surfaceId, transform);
+ if (res != OK) {
+ ALOGE("%s: StreamSplitter failed to setTransform: %s(%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+ }
+
return OK;
}
@@ -299,8 +318,9 @@
*usage = getPresetConsumerUsage();
for (size_t id = 0; id < kMaxOutputs; id++) {
- if (mSurfaceUniqueIds[id].first != nullptr) {
- res = getEndpointUsageForSurface(&u, mSurfaceUniqueIds[id].first);
+ const auto& surface = mSurfaceUniqueIds[id].mSurfaceHolder.mSurface;
+ if (surface != nullptr) {
+ res = getEndpointUsageForSurface(&u, surface);
*usage |= u;
}
}
@@ -316,7 +336,7 @@
ssize_t Camera3SharedOutputStream::getNextSurfaceIdLocked() {
ssize_t id = -1;
for (size_t i = 0; i < kMaxOutputs; i++) {
- if (mSurfaceUniqueIds[i].first == nullptr) {
+ if (mSurfaceUniqueIds[i].mSurfaceHolder.mSurface == nullptr) {
id = i;
break;
}
@@ -329,7 +349,7 @@
Mutex::Autolock l(mLock);
ssize_t id = -1;
for (size_t i = 0; i < kMaxOutputs; i++) {
- if (mSurfaceUniqueIds[i].first == surface) {
+ if (mSurfaceUniqueIds[i].mSurfaceHolder.mSurface == surface) {
id = i;
break;
}
@@ -353,13 +373,13 @@
if (surfaceId >= kMaxOutputs) {
return BAD_VALUE;
}
- outUniqueIds->push_back(mSurfaceUniqueIds[surfaceId].second);
+ outUniqueIds->push_back(mSurfaceUniqueIds[surfaceId].mId);
}
return OK;
}
status_t Camera3SharedOutputStream::revertPartialUpdateLocked(
- const KeyedVector<sp<Surface>, size_t> &removedSurfaces,
+ const KeyedVector<size_t, SurfaceHolder> &removedSurfaces,
const KeyedVector<sp<Surface>, size_t> &attachedSurfaces) {
status_t ret = OK;
@@ -371,25 +391,25 @@
return UNKNOWN_ERROR;
}
}
- mSurfaceUniqueIds[index] = std::make_pair(nullptr, mNextUniqueSurfaceId++);
+ mSurfaceUniqueIds[index] = SurfaceHolderUniqueId{mNextUniqueSurfaceId++};
}
for (size_t i = 0; i < removedSurfaces.size(); i++) {
- size_t index = removedSurfaces.valueAt(i);
+ size_t index = removedSurfaces.keyAt(i);
if (mStreamSplitter != nullptr) {
- ret = mStreamSplitter->addOutput(index, removedSurfaces.keyAt(i));
+ ret = mStreamSplitter->addOutput(index, removedSurfaces.valueAt(i).mSurface);
if (ret != OK) {
return UNKNOWN_ERROR;
}
}
- mSurfaceUniqueIds[index] = std::make_pair(
- removedSurfaces.keyAt(i), mNextUniqueSurfaceId++);
+ mSurfaceUniqueIds[index] = SurfaceHolderUniqueId{removedSurfaces.valueAt(i),
+ mNextUniqueSurfaceId++};
}
return ret;
}
-status_t Camera3SharedOutputStream::updateStream(const std::vector<sp<Surface>> &outputSurfaces,
+status_t Camera3SharedOutputStream::updateStream(const std::vector<SurfaceHolder> &outputSurfaces,
const std::vector<OutputStreamInfo> &outputInfo,
const std::vector<size_t> &removedSurfaceIds,
KeyedVector<sp<Surface>, size_t> *outputMap) {
@@ -403,7 +423,7 @@
uint64_t usage;
getEndpointUsage(&usage);
- KeyedVector<sp<Surface>, size_t> removedSurfaces;
+ KeyedVector<size_t, SurfaceHolder> removedSurfaces;
//Check whether the new surfaces are compatible.
for (const auto &infoIt : outputInfo) {
bool imgReaderUsage = (infoIt.consumerUsage & GRALLOC_USAGE_SW_READ_OFTEN) ? true : false;
@@ -437,8 +457,8 @@
}
}
- removedSurfaces.add(mSurfaceUniqueIds[it].first, it);
- mSurfaceUniqueIds[it] = std::make_pair(nullptr, mNextUniqueSurfaceId++);
+ removedSurfaces.add(it, mSurfaceUniqueIds[it].mSurfaceHolder);
+ mSurfaceUniqueIds[it] = SurfaceHolderUniqueId{mNextUniqueSurfaceId++};
}
//Next add the new outputs
@@ -453,7 +473,7 @@
return NO_MEMORY;
}
if (mStreamSplitter != nullptr) {
- ret = mStreamSplitter->addOutput(surfaceId, it);
+ ret = mStreamSplitter->addOutput(surfaceId, it.mSurface);
if (ret != OK) {
ALOGE("%s: failed with error code %d", __FUNCTION__, ret);
status_t res = revertPartialUpdateLocked(removedSurfaces, *outputMap);
@@ -463,13 +483,54 @@
return ret;
}
}
- mSurfaceUniqueIds[surfaceId] = std::make_pair(it, mNextUniqueSurfaceId++);
- outputMap->add(it, surfaceId);
+ mSurfaceUniqueIds[surfaceId] = SurfaceHolderUniqueId{it, mNextUniqueSurfaceId++};
+ outputMap->add(it.mSurface, surfaceId);
}
return ret;
}
+status_t Camera3SharedOutputStream::setTransform(
+ int transform, bool mayChangeMirror, int surfaceId) {
+ ATRACE_CALL();
+ Mutex::Autolock l(mLock);
+
+ status_t res = OK;
+
+ if (surfaceId < 0 || (size_t)surfaceId >= mSurfaceUniqueIds.size()) {
+ ALOGE("%s: Invalid surfaceId %d", __FUNCTION__, surfaceId);
+ return BAD_VALUE;
+ }
+ if (transform == -1) return res;
+
+ if (mState == STATE_ERROR) {
+ ALOGE("%s: Stream in error state", __FUNCTION__);
+ return INVALID_OPERATION;
+ }
+
+ auto& surfaceHolderForId = mSurfaceUniqueIds[surfaceId];
+ if (surfaceHolderForId.mSurfaceHolder.mMirrorMode != OutputConfiguration::MIRROR_MODE_AUTO &&
+ mayChangeMirror) {
+ // If the mirroring mode is not AUTO, do not allow transform update
+ // which may change mirror.
+ return OK;
+ }
+
+ surfaceHolderForId.mTransform = transform;
+ if (mState == STATE_CONFIGURED) {
+ sp<Surface> surface = surfaceHolderForId.mSurfaceHolder.mSurface;
+ if (surface != nullptr) {
+ res = mStreamSplitter->setTransform(surfaceId, transform);
+ if (res != OK) {
+ ALOGE("%s: StreamSplitter fails to setTransform: %s(%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+ }
+ }
+ return res;
+}
+
} // namespace camera3
} // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
index ae11507..1fd676c 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
@@ -41,21 +41,15 @@
* surfaces. A valid stream set id needs to be set to support buffer
* sharing between multiple streams.
*/
- Camera3SharedOutputStream(int id, const std::vector<sp<Surface>>& surfaces,
+ Camera3SharedOutputStream(int id, const std::vector<SurfaceHolder>& surfaces,
uint32_t width, uint32_t height, int format,
uint64_t consumerUsage, android_dataspace dataSpace,
camera_stream_rotation_t rotation, nsecs_t timestampOffset,
const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
- int setId = CAMERA3_STREAM_SET_ID_INVALID,
- bool useHalBufManager = false,
- int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
- int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
- bool deviceTimeBaseIsRealtime = false,
- int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
- int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
- bool useReadoutTimestamp = false);
+ int setId, bool useHalBufManager, int64_t dynamicProfile, int64_t streamUseCase,
+ bool deviceTimeBaseIsRealtime, int timestampBase,
+ int32_t colorSpace, bool useReadoutTimestamp);
virtual ~Camera3SharedOutputStream();
@@ -65,7 +59,7 @@
virtual bool isConsumerConfigurationDeferred(size_t surface_id) const;
- virtual status_t setConsumers(const std::vector<sp<Surface>>& consumers);
+ virtual status_t setConsumers(const std::vector<SurfaceHolder>& consumers);
virtual ssize_t getSurfaceId(const sp<Surface> &surface);
@@ -78,7 +72,7 @@
virtual status_t getUniqueSurfaceIds(const std::vector<size_t>& surfaceIds,
/*out*/std::vector<size_t>* outUniqueIds) override;
- virtual status_t updateStream(const std::vector<sp<Surface>> &outputSurfaces,
+ virtual status_t updateStream(const std::vector<SurfaceHolder> &outputSurfaces,
const std::vector<OutputStreamInfo> &outputInfo,
const std::vector<size_t> &removedSurfaceIds,
KeyedVector<sp<Surface>, size_t> *outputMap/*out*/);
@@ -89,6 +83,8 @@
return false;
}
+ virtual status_t setTransform(int transform, bool mayChangeMirror, int surfaceId);
+
private:
static const size_t kMaxOutputs = 4;
@@ -97,17 +93,26 @@
// depends on this flag.
bool mUseHalBufManager;
- // Pair of an output Surface and its unique ID
- typedef std::pair<sp<Surface>, size_t> SurfaceUniqueId;
+ // Struct of an output SurfaceHolder, transform, and its unique ID
+ struct SurfaceHolderUniqueId {
+ SurfaceHolder mSurfaceHolder;
+ int mTransform = -1;
+ size_t mId = -1;
- // Map surfaceId -> (output surface, unique surface ID)
- std::array<SurfaceUniqueId, kMaxOutputs> mSurfaceUniqueIds;
+ SurfaceHolderUniqueId() = default;
+ SurfaceHolderUniqueId(size_t id) : mId(id) {}
+ SurfaceHolderUniqueId(const SurfaceHolder& holder, size_t id) :
+ mSurfaceHolder(holder), mId(id) {}
+ };
+
+ // Map surfaceId -> SurfaceHolderUniqueId
+ std::array<SurfaceHolderUniqueId, kMaxOutputs> mSurfaceUniqueIds;
size_t mNextUniqueSurfaceId = 0;
ssize_t getNextSurfaceIdLocked();
- status_t revertPartialUpdateLocked(const KeyedVector<sp<Surface>, size_t> &removedSurfaces,
+ status_t revertPartialUpdateLocked(const KeyedVector<size_t, SurfaceHolder> &removedSurfaces,
const KeyedVector<sp<Surface>, size_t> &attachedSurfaces);
/**
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index 0786622..8f3249d 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -115,7 +115,6 @@
int64_t dynamicRangeProfile;
int64_t streamUseCase;
int timestampBase;
- int mirrorMode;
int32_t colorSpace;
OutputStreamInfo() :
width(-1), height(-1), format(-1), dataSpace(HAL_DATASPACE_UNKNOWN),
@@ -123,17 +122,21 @@
dynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
streamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
timestampBase(OutputConfiguration::TIMESTAMP_BASE_DEFAULT),
- mirrorMode(OutputConfiguration::MIRROR_MODE_AUTO),
colorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED) {}
OutputStreamInfo(int _width, int _height, int _format, android_dataspace _dataSpace,
uint64_t _consumerUsage, const std::unordered_set<int32_t>& _sensorPixelModesUsed,
- int64_t _dynamicRangeProfile, int _streamUseCase, int _timestampBase, int _mirrorMode,
+ int64_t _dynamicRangeProfile, int _streamUseCase, int _timestampBase,
int32_t _colorSpace) :
width(_width), height(_height), format(_format),
dataSpace(_dataSpace), consumerUsage(_consumerUsage),
sensorPixelModesUsed(_sensorPixelModesUsed), dynamicRangeProfile(_dynamicRangeProfile),
- streamUseCase(_streamUseCase), timestampBase(_timestampBase), mirrorMode(_mirrorMode),
- colorSpace(_colorSpace) {}
+ streamUseCase(_streamUseCase), timestampBase(_timestampBase), colorSpace(_colorSpace) {}
+};
+
+// A holder containing a surface and its corresponding mirroring mode
+struct SurfaceHolder {
+ sp<Surface> mSurface;
+ int mMirrorMode = OutputConfiguration::MIRROR_MODE_AUTO;
};
// Utility class to lock and unlock a GraphicBuffer
diff --git a/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp b/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
index 7090545..a360abf 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
+++ b/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
@@ -201,6 +201,17 @@
mUseHalBufManager = enabled;
}
+status_t Camera3StreamSplitter::setTransform(size_t surfaceId, int transform) {
+ Mutex::Autolock lock(mMutex);
+ if (!mOutputSurfaces.contains(surfaceId) || mOutputSurfaces[surfaceId] == nullptr) {
+ SP_LOGE("%s: No surface at id %zu", __FUNCTION__, surfaceId);
+ return BAD_VALUE;
+ }
+
+ mOutputTransforms[surfaceId] = transform;
+ return OK;
+}
+
status_t Camera3StreamSplitter::addOutputLocked(size_t surfaceId, const sp<Surface>& outputQueue) {
ATRACE_CALL();
if (outputQueue == nullptr) {
@@ -374,7 +385,12 @@
output->setBuffersDataSpace(static_cast<ui::Dataspace>(bufferItem.mDataSpace));
output->setCrop(&bufferItem.mCrop);
output->setScalingMode(bufferItem.mScalingMode);
- output->setBuffersTransform(bufferItem.mTransform);
+
+ int transform = bufferItem.mTransform;
+ if (mOutputTransforms.contains(surfaceId)) {
+ transform = mOutputTransforms[surfaceId];
+ }
+ output->setBuffersTransform(transform);
// In case the output BufferQueue has its own lock, if we hold splitter lock while calling
// queueBuffer (which will try to acquire the output lock), the output could be holding its
diff --git a/services/camera/libcameraservice/device3/Camera3StreamSplitter.h b/services/camera/libcameraservice/device3/Camera3StreamSplitter.h
index 0440e08..6e5d8f7 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamSplitter.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamSplitter.h
@@ -96,6 +96,7 @@
void setHalBufferManager(bool enabled);
+ status_t setTransform(size_t surfaceId, int transform);
private:
// From BufferItemConsumer::FrameAvailableListener
//
@@ -237,6 +238,9 @@
//Map surface ids -> gbp outputs
std::unordered_map<int, sp<Surface>> mOutputSurfaces;
+ // Map surface ids -> transform
+ std::unordered_map<int, int> mOutputTransforms;
+
//Map surface ids -> consumer buffer count
std::unordered_map<int, size_t > mConsumerBufferCount;
diff --git a/services/camera/libcameraservice/device3/deprecated/DeprecatedCamera3StreamSplitter.cpp b/services/camera/libcameraservice/device3/deprecated/DeprecatedCamera3StreamSplitter.cpp
index c1113e5..00bbde3 100644
--- a/services/camera/libcameraservice/device3/deprecated/DeprecatedCamera3StreamSplitter.cpp
+++ b/services/camera/libcameraservice/device3/deprecated/DeprecatedCamera3StreamSplitter.cpp
@@ -189,6 +189,17 @@
mUseHalBufManager = enabled;
}
+status_t DeprecatedCamera3StreamSplitter::setTransform(size_t surfaceId, int transform) {
+ Mutex::Autolock lock(mMutex);
+ if (!mOutputs.contains(surfaceId) || mOutputs[surfaceId] == nullptr) {
+ SP_LOGE("%s: No surface at id %zu", __FUNCTION__, surfaceId);
+ return BAD_VALUE;
+ }
+
+ mOutputTransforms[surfaceId] = transform;
+ return OK;
+}
+
status_t DeprecatedCamera3StreamSplitter::addOutputLocked(size_t surfaceId,
const sp<Surface>& outputQueue) {
ATRACE_CALL();
@@ -355,9 +366,13 @@
const sp<IGraphicBufferProducer>& output, const BufferItem& bufferItem, size_t surfaceId) {
ATRACE_CALL();
status_t res;
+ int transform = bufferItem.mTransform;
+ if (mOutputTransforms.contains(surfaceId)) {
+ transform = mOutputTransforms[surfaceId];
+ }
IGraphicBufferProducer::QueueBufferInput queueInput(
bufferItem.mTimestamp, bufferItem.mIsAutoTimestamp, bufferItem.mDataSpace,
- bufferItem.mCrop, static_cast<int32_t>(bufferItem.mScalingMode), bufferItem.mTransform,
+ bufferItem.mCrop, static_cast<int32_t>(bufferItem.mScalingMode), transform,
bufferItem.mFence);
IGraphicBufferProducer::QueueBufferOutput queueOutput;
diff --git a/services/camera/libcameraservice/device3/deprecated/DeprecatedCamera3StreamSplitter.h b/services/camera/libcameraservice/device3/deprecated/DeprecatedCamera3StreamSplitter.h
index 4610985..61b43a8 100644
--- a/services/camera/libcameraservice/device3/deprecated/DeprecatedCamera3StreamSplitter.h
+++ b/services/camera/libcameraservice/device3/deprecated/DeprecatedCamera3StreamSplitter.h
@@ -95,6 +95,7 @@
void setHalBufferManager(bool enabled);
+ status_t setTransform(size_t surfaceId, int transform);
private:
// From IConsumerListener
//
@@ -259,6 +260,9 @@
// Map surface ids -> gbp outputs
std::unordered_map<int, sp<Surface>> mOutputSurfaces;
+ // Map surface ids -> transform
+ std::unordered_map<int, int> mOutputTransforms;
+
// Map surface ids -> consumer buffer count
std::unordered_map<int, size_t> mConsumerBufferCount;
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
index 12ac33f..86e2c70 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
@@ -40,6 +40,7 @@
#include <fakeservicemanager/FakeServiceManager.h>
#include <fuzzbinder/random_binder.h>
#include <gui/BufferItemConsumer.h>
+#include <gui/Flags.h>
#include <gui/IGraphicBufferProducer.h>
#include <gui/Surface.h>
#include <gui/SurfaceComposerClient.h>
@@ -620,7 +621,11 @@
previewSurface = surfaceControl->getSurface();
if (previewSurface.get()) {
- cameraDevice->setPreviewTarget(previewSurface->getIGraphicBufferProducer());
+ cameraDevice->setPreviewTarget(previewSurface
+#if !WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ ->getIGraphicBufferProducer()
+#endif
+ );
}
}
cameraDevice->setPreviewCallbackFlag(CAMERA_FRAME_CALLBACK_FLAG_CAMCORDER);
@@ -675,7 +680,11 @@
.apply();
sp<Surface> previewSurfaceVideo = surfaceControlVideo->getSurface();
if (previewSurfaceVideo.get()) {
- cameraDevice->setVideoTarget(previewSurfaceVideo->getIGraphicBufferProducer());
+ cameraDevice->setVideoTarget(previewSurfaceVideo
+#if !WB_LIBCAMERASERVICE_WITH_DEPENDENCIES
+ ->getIGraphicBufferProducer()
+#endif
+ );
}
}
cameraDevice->stopPreview();
diff --git a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.cpp b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.cpp
index b213218..4b63704 100644
--- a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.cpp
+++ b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.cpp
@@ -132,26 +132,78 @@
return binder::Status::ok();
}
-bool AttributionAndPermissionUtils::checkPermissionForPreflight(
+PermissionChecker::PermissionResult AttributionAndPermissionUtils::checkPermission(
const std::string& cameraId, const std::string& permission,
const AttributionSourceState& attributionSource, const std::string& message,
- int32_t attributedOpCode) {
+ int32_t attributedOpCode, bool forDataDelivery, bool startDataDelivery,
+ bool checkAutomotive) {
AttributionSourceState clientAttribution = attributionSource;
if (!flags::check_full_attribution_source_chain() && !clientAttribution.next.empty()) {
clientAttribution.next.clear();
}
- if (checkAutomotivePrivilegedClient(cameraId, clientAttribution)) {
- return true;
+ if (checkAutomotive && checkAutomotivePrivilegedClient(cameraId, clientAttribution)) {
+ return PermissionChecker::PERMISSION_GRANTED;
}
- PermissionChecker::PermissionResult result = mPermissionChecker->checkPermissionForPreflight(
- toString16(permission), clientAttribution, toString16(message), attributedOpCode);
+ PermissionChecker::PermissionResult result;
+ if (forDataDelivery) {
+ if (startDataDelivery) {
+ result = mPermissionChecker->checkPermissionForStartDataDeliveryFromDatasource(
+ toString16(permission), clientAttribution, toString16(message),
+ attributedOpCode);
+ } else {
+ result = mPermissionChecker->checkPermissionForDataDeliveryFromDatasource(
+ toString16(permission), clientAttribution, toString16(message),
+ attributedOpCode);
+ }
+ } else {
+ result = mPermissionChecker->checkPermissionForPreflight(
+ toString16(permission), clientAttribution, toString16(message), attributedOpCode);
+ }
+
if (result == PermissionChecker::PERMISSION_HARD_DENIED) {
- ALOGE("%s: Permission denied for client attribution %s", __FUNCTION__,
+ ALOGI("%s (forDataDelivery %d startDataDelivery %d): Permission hard denied "
+ "for client attribution %s",
+ __FUNCTION__, forDataDelivery, startDataDelivery,
+ getAttributionString(clientAttribution).c_str());
+ } else if (result == PermissionChecker::PERMISSION_SOFT_DENIED) {
+ ALOGI("%s checkPermission (forDataDelivery %d startDataDelivery %d): Permission soft "
+ "denied "
+ "for client attribution %s",
+ __FUNCTION__, forDataDelivery, startDataDelivery,
getAttributionString(clientAttribution).c_str());
}
- return result != PermissionChecker::PERMISSION_HARD_DENIED;
+ return result;
+}
+
+bool AttributionAndPermissionUtils::checkPermissionForPreflight(
+ const std::string& cameraId, const std::string& permission,
+ const AttributionSourceState& attributionSource, const std::string& message,
+ int32_t attributedOpCode) {
+ return checkPermission(cameraId, permission, attributionSource, message, attributedOpCode,
+ /* forDataDelivery */ false, /* startDataDelivery */ false,
+ /* checkAutomotive */ true) != PermissionChecker::PERMISSION_HARD_DENIED;
+}
+
+bool AttributionAndPermissionUtils::checkPermissionForDataDelivery(
+ const std::string& cameraId, const std::string& permission,
+ const AttributionSourceState& attributionSource, const std::string& message,
+ int32_t attributedOpCode) {
+ return checkPermission(cameraId, permission, attributionSource, message, attributedOpCode,
+ /* forDataDelivery */ true, /* startDataDelivery */ false,
+ /* checkAutomotive */ false) !=
+ PermissionChecker::PERMISSION_HARD_DENIED;
+}
+
+PermissionChecker::PermissionResult
+AttributionAndPermissionUtils::checkPermissionForStartDataDelivery(
+ const std::string& cameraId, const std::string& permission,
+ const AttributionSourceState& attributionSource, const std::string& message,
+ int32_t attributedOpCode) {
+ return checkPermission(cameraId, permission, attributionSource, message, attributedOpCode,
+ /* forDataDelivery */ true, /* startDataDelivery */ true,
+ /* checkAutomotive */ false);
}
// Can camera service trust the caller based on the calling UID?
@@ -244,9 +296,35 @@
}
bool AttributionAndPermissionUtils::hasPermissionsForCamera(
+ const std::string& cameraId, const AttributionSourceState& attributionSource,
+ bool forDataDelivery, bool checkAutomotive) {
+ return checkPermission(cameraId, sCameraPermission, attributionSource, std::string(),
+ AppOpsManager::OP_NONE, forDataDelivery, /* startDataDelivery */ false,
+ checkAutomotive) != PermissionChecker::PERMISSION_HARD_DENIED;
+}
+
+PermissionChecker::PermissionResult
+AttributionAndPermissionUtils::checkPermissionsForCameraForPreflight(
const std::string& cameraId, const AttributionSourceState& attributionSource) {
- return checkPermissionForPreflight(cameraId, sCameraPermission, attributionSource,
- std::string(), AppOpsManager::OP_NONE);
+ return checkPermission(cameraId, sCameraPermission, attributionSource, std::string(),
+ AppOpsManager::OP_NONE, /* forDataDelivery */ false,
+ /* startDataDelivery */ false, /* checkAutomotive */ false);
+}
+
+PermissionChecker::PermissionResult
+AttributionAndPermissionUtils::checkPermissionsForCameraForDataDelivery(
+ const std::string& cameraId, const AttributionSourceState& attributionSource) {
+ return checkPermission(cameraId, sCameraPermission, attributionSource, std::string(),
+ AppOpsManager::OP_NONE, /* forDataDelivery */ true,
+ /* startDataDelivery */ false, /* checkAutomotive */ false);
+}
+
+PermissionChecker::PermissionResult
+AttributionAndPermissionUtils::checkPermissionsForCameraForStartDataDelivery(
+ const std::string& cameraId, const AttributionSourceState& attributionSource) {
+ return checkPermission(cameraId, sCameraPermission, attributionSource, std::string(),
+ AppOpsManager::OP_NONE, /* forDataDelivery */ true,
+ /* startDataDelivery */ true, /* checkAutomotive */ false);
}
bool AttributionAndPermissionUtils::hasPermissionsForSystemCamera(
@@ -277,6 +355,12 @@
attributionSource, std::string(), AppOpsManager::OP_NONE);
}
+void AttributionAndPermissionUtils::finishDataDelivery(
+ const AttributionSourceState& attributionSource) {
+ mPermissionChecker->finishDataDeliveryFromDatasource(AppOpsManager::OP_CAMERA,
+ attributionSource);
+}
+
bool AttributionAndPermissionUtils::checkAutomotivePrivilegedClient(
const std::string& cameraId, const AttributionSourceState& attributionSource) {
if (isAutomotivePrivilegedClient(attributionSource.uid)) {
diff --git a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h
index 9ed7fa2..3361eaa 100644
--- a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h
+++ b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h
@@ -29,6 +29,37 @@
using content::AttributionSourceState;
using permission::PermissionChecker;
+class AttrSourceItr {
+ public:
+ using value_type = AttributionSourceState;
+ using pointer = const value_type*;
+ using reference = const value_type&;
+
+ AttrSourceItr() : mAttr(nullptr) {}
+
+ AttrSourceItr(const AttributionSourceState& attr) : mAttr(&attr) {}
+
+ reference operator*() const { return *mAttr; }
+ pointer operator->() const { return mAttr; }
+
+ AttrSourceItr& operator++() {
+ mAttr = !mAttr->next.empty() ? mAttr->next.data() : nullptr;
+ return *this;
+ }
+
+ AttrSourceItr operator++(int) {
+ AttrSourceItr tmp = *this;
+ ++(*this);
+ return tmp;
+ }
+
+ friend bool operator==(const AttrSourceItr& a, const AttrSourceItr& b) = default;
+
+ static AttrSourceItr end() { return AttrSourceItr{}; }
+private:
+ const AttributionSourceState * mAttr;
+};
+
/**
* Utility class consolidating methods/data for verifying permissions and the identity of the
* caller.
@@ -87,6 +118,15 @@
const std::string& permission,
const AttributionSourceState& attributionSource,
const std::string& message, int32_t attributedOpCode);
+ virtual bool checkPermissionForDataDelivery(const std::string& cameraId,
+ const std::string& permission,
+ const AttributionSourceState& attributionSource,
+ const std::string& message,
+ int32_t attributedOpCode);
+ virtual PermissionChecker::PermissionResult checkPermissionForStartDataDelivery(
+ const std::string& cameraId, const std::string& permission,
+ const AttributionSourceState& attributionSource, const std::string& message,
+ int32_t attributedOpCode);
// Can camera service trust the caller based on the calling UID?
virtual bool isTrustedCallingUid(uid_t uid);
@@ -114,7 +154,14 @@
// Utils for checking specific permissions
virtual bool hasPermissionsForCamera(const std::string& cameraId,
- const AttributionSourceState& attributionSource);
+ const AttributionSourceState& attributionSource,
+ bool forDataDelivery = false, bool checkAutomotive = true);
+ virtual PermissionChecker::PermissionResult checkPermissionsForCameraForPreflight(
+ const std::string& cameraId, const AttributionSourceState& attributionSource);
+ virtual PermissionChecker::PermissionResult checkPermissionsForCameraForDataDelivery(
+ const std::string& cameraId, const AttributionSourceState& attributionSource);
+ virtual PermissionChecker::PermissionResult checkPermissionsForCameraForStartDataDelivery(
+ const std::string& cameraId, const AttributionSourceState& attributionSource);
virtual bool hasPermissionsForSystemCamera(const std::string& cameraId,
const AttributionSourceState& attributionSource,
bool checkCameraPermissions = true);
@@ -125,6 +172,8 @@
virtual bool hasPermissionsForOpenCloseListener(
const AttributionSourceState& attributionSource);
+ virtual void finishDataDelivery(const AttributionSourceState& attributionSource);
+
static const std::string sDumpPermission;
static const std::string sManageCameraPermission;
static const std::string sCameraPermission;
@@ -156,6 +205,12 @@
private:
virtual const sp<IPermissionController>& getPermissionController() const;
+ virtual PermissionChecker::PermissionResult checkPermission(
+ const std::string& cameraId, const std::string& permission,
+ const AttributionSourceState& attributionSource, const std::string& message,
+ int32_t attributedOpCode, bool forDataDelivery, bool startDataDelivery,
+ bool checkAutomotive);
+
std::unique_ptr<permission::PermissionChecker> mPermissionChecker =
std::make_unique<permission::PermissionChecker>();
};
@@ -230,12 +285,39 @@
bool hasPermissionsForCamera(const std::string& cameraId, int callingPid, int callingUid,
int32_t deviceId) const {
auto attributionSource = buildAttributionSource(callingPid, callingUid, deviceId);
- return mAttributionAndPermissionUtils->hasPermissionsForCamera(cameraId, attributionSource);
+ return hasPermissionsForCamera(cameraId, attributionSource);
}
bool hasPermissionsForCamera(const std::string& cameraId,
const AttributionSourceState& clientAttribution) const {
- return mAttributionAndPermissionUtils->hasPermissionsForCamera(cameraId, clientAttribution);
+ return mAttributionAndPermissionUtils->hasPermissionsForCamera(cameraId, clientAttribution,
+ /* forDataDelivery */ false,
+ /* checkAutomotive */ true);
+ }
+
+ bool hasPermissionsForCameraForDataDelivery(
+ const std::string& cameraId, const AttributionSourceState& clientAttribution) const {
+ return mAttributionAndPermissionUtils->hasPermissionsForCamera(cameraId, clientAttribution,
+ /* forDataDelivery */ true,
+ /* checkAutomotive */ false);
+ }
+
+ PermissionChecker::PermissionResult checkPermissionsForCameraForPreflight(
+ const std::string& cameraId, const AttributionSourceState& clientAttribution) const {
+ return mAttributionAndPermissionUtils->checkPermissionsForCameraForPreflight(
+ cameraId, clientAttribution);
+ }
+
+ PermissionChecker::PermissionResult checkPermissionsForCameraForDataDelivery(
+ const std::string& cameraId, const AttributionSourceState& clientAttribution) const {
+ return mAttributionAndPermissionUtils->checkPermissionsForCameraForDataDelivery(
+ cameraId, clientAttribution);
+ }
+
+ PermissionChecker::PermissionResult checkPermissionsForCameraForStartDataDelivery(
+ const std::string& cameraId, const AttributionSourceState& clientAttribution) const {
+ return mAttributionAndPermissionUtils->checkPermissionsForCameraForStartDataDelivery(
+ cameraId, clientAttribution);
}
bool hasPermissionsForSystemCamera(const std::string& cameraId, int callingPid, int callingUid,
@@ -264,6 +346,10 @@
attributionSource);
}
+ void finishDataDelivery(const AttributionSourceState& attributionSource) {
+ mAttributionAndPermissionUtils->finishDataDelivery(attributionSource);
+ }
+
bool isAutomotiveDevice() const { return mAttributionAndPermissionUtils->isAutomotiveDevice(); }
bool isAutomotivePrivilegedClient(int32_t uid) const {
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index 40ca276..d937fe9 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -602,7 +602,6 @@
streamInfo.dynamicRangeProfile = dynamicRangeProfile;
streamInfo.streamUseCase = streamUseCase;
streamInfo.timestampBase = timestampBase;
- streamInfo.mirrorMode = mirrorMode;
streamInfo.colorSpace = colorSpace;
return binder::Status::ok();
}
@@ -848,7 +847,6 @@
int64_t streamUseCase = it.getStreamUseCase();
int timestampBase = it.getTimestampBase();
- int mirrorMode = it.getMirrorMode();
// If the configuration is a deferred consumer, or a not yet completed
// configuration with no buffer producers attached.
if (deferredConsumer || (!isConfigurationComplete && numBufferProducers == 0)) {
@@ -908,6 +906,7 @@
}
for (auto& bufferProducer : bufferProducers) {
+ int mirrorMode = it.getMirrorMode(bufferProducer);
sp<Surface> surface;
res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
logicalCameraId, metadataChosen, sensorPixelModesUsed, dynamicRangeProfile,
diff --git a/services/camera/virtualcamera/VirtualCameraCaptureResult.cc b/services/camera/virtualcamera/VirtualCameraCaptureResult.cc
index a61f553..da1c208 100644
--- a/services/camera/virtualcamera/VirtualCameraCaptureResult.cc
+++ b/services/camera/virtualcamera/VirtualCameraCaptureResult.cc
@@ -16,6 +16,7 @@
#include "VirtualCameraCaptureResult.h"
#include <cstdint>
+#include <memory>
#include "VirtualCameraCaptureRequest.h"
#include "aidl/android/hardware/camera/device/CameraMetadata.h"
@@ -34,7 +35,7 @@
} // namespace
-CameraMetadata createCaptureResultMetadata(
+std::unique_ptr<CameraMetadata> createCaptureResultMetadata(
const std::chrono::nanoseconds timestamp,
const RequestSettings& requestSettings,
const Resolution reportedSensorSize) {
@@ -109,9 +110,9 @@
if (metadata == nullptr) {
ALOGE("%s: Failed to build capture result metadata", __func__);
- return CameraMetadata();
+ return std::make_unique<CameraMetadata>();
}
- return std::move(*metadata);
+ return metadata;
}
} // namespace virtualcamera
diff --git a/services/camera/virtualcamera/VirtualCameraCaptureResult.h b/services/camera/virtualcamera/VirtualCameraCaptureResult.h
index 9e5b4d7..c3978f7 100644
--- a/services/camera/virtualcamera/VirtualCameraCaptureResult.h
+++ b/services/camera/virtualcamera/VirtualCameraCaptureResult.h
@@ -18,21 +18,10 @@
#define ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERACAPTURERESULT_H
#include <chrono>
-#include <cstdint>
#include <cstring>
-#include <future>
#include <memory>
-#include <mutex>
-#include <thread>
-#include <utility>
-#include <vector>
-#include "Exif.h"
-#include "GLES/gl.h"
#include "VirtualCameraCaptureRequest.h"
-#include "VirtualCameraDevice.h"
-#include "VirtualCameraRenderThread.h"
-#include "VirtualCameraSessionContext.h"
#include "aidl/android/hardware/camera/device/CameraMetadata.h"
namespace android {
@@ -41,7 +30,7 @@
// Construct the Metadata for the Capture result based on the request
// settings, timestamp and reported sensore size
-::aidl::android::hardware::camera::device::CameraMetadata
+std::unique_ptr<::aidl::android::hardware::camera::device::CameraMetadata>
createCaptureResultMetadata(std::chrono::nanoseconds timestamp,
const RequestSettings& requestSettings,
Resolution reportedSensorSize);
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.cc b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
index becba90..58c6549 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.cc
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
@@ -100,6 +100,9 @@
static constexpr UpdateTextureTask kUpdateTextureTask;
+// The number of nanosecond to wait for the first frame to be drawn on the input surface
+static constexpr std::chrono::nanoseconds kMaxWaitFirstFrame = 3s;
+
NotifyMsg createShutterNotifyMsg(int frameNumber,
std::chrono::nanoseconds timestamp) {
NotifyMsg msg;
@@ -110,11 +113,13 @@
return msg;
}
-NotifyMsg createBufferErrorNotifyMsg(int frameNumber, int streamId) {
+// Create a NotifyMsg for an error case. The default error is ERROR_BUFFER.
+NotifyMsg createErrorNotifyMsg(int frameNumber, int streamId,
+ ErrorCode errorCode = ErrorCode::ERROR_BUFFER) {
NotifyMsg msg;
msg.set<NotifyMsg::Tag::error>(ErrorMsg{.frameNumber = frameNumber,
.errorStreamId = streamId,
- .errorCode = ErrorCode::ERROR_BUFFER});
+ .errorCode = errorCode});
return msg;
}
@@ -421,10 +426,15 @@
}
// Calculate the maximal amount of time we can afford to wait for next frame.
+ const bool isFirstFrameDrawn = mEglSurfaceTexture->isFirstFrameDrawn();
+ ALOGV("First Frame Drawn: %s", isFirstFrameDrawn ? "Yes" : "No");
+
const std::chrono::nanoseconds maxFrameDuration =
- getMaxFrameDuration(request.getRequestSettings());
+ isFirstFrameDrawn ? getMaxFrameDuration(request.getRequestSettings())
+ : kMaxWaitFirstFrame;
const std::chrono::nanoseconds elapsedDuration =
- timestamp - lastAcquisitionTimestamp;
+ isFirstFrameDrawn ? timestamp - lastAcquisitionTimestamp : 0ns;
+
if (elapsedDuration < maxFrameDuration) {
// We can afford to wait for next frame.
// Note that if there's already new frame in the input Surface, the call
@@ -434,6 +444,17 @@
timestamp = std::chrono::duration_cast<std::chrono::nanoseconds>(
std::chrono::steady_clock::now().time_since_epoch());
if (!gotNewFrame) {
+ if (!mEglSurfaceTexture->isFirstFrameDrawn()) {
+ // We don't have any input ever drawn. This is considered as an error
+ // case. Notify the framework of the failure and return early.
+ ALOGW("Timed out waiting for first frame to be drawn.");
+ std::unique_ptr<CaptureResult> captureResult = createCaptureResult(
+ request.getFrameNumber(), /* metadata = */ nullptr);
+ notifyTimeout(request, *captureResult);
+ submitCaptureResult(std::move(captureResult));
+ return;
+ }
+
ALOGV(
"%s: No new frame received on input surface after waiting for "
"%" PRIu64 "ns, repeating last frame.",
@@ -457,75 +478,20 @@
captureTimestamp.count(), timestamp.count());
}
- CaptureResult captureResult;
- captureResult.fmqResultSize = 0;
- captureResult.frameNumber = request.getFrameNumber();
- // Partial result needs to be set to 1 when metadata are present.
- captureResult.partialResult = 1;
- captureResult.inputBuffer.streamId = -1;
- captureResult.physicalCameraMetadata.resize(0);
- captureResult.result = createCaptureResultMetadata(
- captureTimestamp, request.getRequestSettings(), mReportedSensorSize);
+ std::unique_ptr<CaptureResult> captureResult = createCaptureResult(
+ request.getFrameNumber(),
+ createCaptureResultMetadata(
+ captureTimestamp, request.getRequestSettings(), mReportedSensorSize));
+ renderOutputBuffers(request, *captureResult);
- const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
- captureResult.outputBuffers.resize(buffers.size());
-
- for (int i = 0; i < buffers.size(); ++i) {
- const CaptureRequestBuffer& reqBuffer = buffers[i];
- StreamBuffer& resBuffer = captureResult.outputBuffers[i];
- resBuffer.streamId = reqBuffer.getStreamId();
- resBuffer.bufferId = reqBuffer.getBufferId();
- resBuffer.status = BufferStatus::OK;
-
- const std::optional<Stream> streamConfig =
- mSessionContext.getStreamConfig(reqBuffer.getStreamId());
-
- if (!streamConfig.has_value()) {
- resBuffer.status = BufferStatus::ERROR;
- continue;
- }
-
- auto status = streamConfig->format == PixelFormat::BLOB
- ? renderIntoBlobStreamBuffer(
- reqBuffer.getStreamId(), reqBuffer.getBufferId(),
- captureResult.result, request.getRequestSettings(),
- reqBuffer.getFence())
- : renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
- reqBuffer.getBufferId(),
- reqBuffer.getFence());
- if (!status.isOk()) {
- resBuffer.status = BufferStatus::ERROR;
- }
- }
-
- std::vector<NotifyMsg> notifyMsg{
- createShutterNotifyMsg(request.getFrameNumber(), captureTimestamp)};
- for (const StreamBuffer& resBuffer : captureResult.outputBuffers) {
- if (resBuffer.status != BufferStatus::OK) {
- notifyMsg.push_back(createBufferErrorNotifyMsg(request.getFrameNumber(),
- resBuffer.streamId));
- }
- }
-
- auto status = mCameraDeviceCallback->notify(notifyMsg);
+ auto status = notifyShutter(request, *captureResult, captureTimestamp);
if (!status.isOk()) {
ALOGE("%s: notify call failed: %s", __func__,
status.getDescription().c_str());
return;
}
- std::vector<::aidl::android::hardware::camera::device::CaptureResult>
- captureResults(1);
- captureResults[0] = std::move(captureResult);
-
- status = mCameraDeviceCallback->processCaptureResult(captureResults);
- if (!status.isOk()) {
- ALOGE("%s: processCaptureResult call failed: %s", __func__,
- status.getDescription().c_str());
- return;
- }
-
- ALOGV("%s: Successfully called processCaptureResult", __func__);
+ submitCaptureResult(std::move(captureResult));
}
std::chrono::nanoseconds VirtualCameraRenderThread::throttleRendering(
@@ -558,22 +524,124 @@
std::chrono::nanoseconds timeSinceLastFrame) {
std::chrono::nanoseconds surfaceTimestamp = mEglSurfaceTexture->getTimestamp();
uint64_t lastSurfaceTimestamp = mLastSurfaceTimestampNanoseconds.load();
- if (surfaceTimestamp.count() < 0 ||
- surfaceTimestamp.count() == lastSurfaceTimestamp) {
- if (lastSurfaceTimestamp > 0) {
- // The timestamps were provided by the producer but we are
- // repeating the last frame, so we increase the previous timestamp by
- // the elapsed time sinced its capture, otherwise the camera framework
- // will discard the frame.
- surfaceTimestamp = std::chrono::nanoseconds(lastSurfaceTimestamp +
- timeSinceLastFrame.count());
- }
+ if (lastSurfaceTimestamp > 0 &&
+ surfaceTimestamp.count() <= lastSurfaceTimestamp) {
+ // The timestamps were provided by the producer but we are
+ // repeating the last frame, so we increase the previous timestamp by
+ // the elapsed time sinced its capture, otherwise the camera framework
+ // will discard the frame.
+ surfaceTimestamp = std::chrono::nanoseconds(lastSurfaceTimestamp +
+ timeSinceLastFrame.count());
+ ALOGI(
+ "Surface's timestamp is stall. Artificially increasing the surface "
+ "timestamp by %lld",
+ timeSinceLastFrame.count());
}
mLastSurfaceTimestampNanoseconds.store(surfaceTimestamp.count(),
std::memory_order_relaxed);
return surfaceTimestamp;
}
+std::unique_ptr<CaptureResult> VirtualCameraRenderThread::createCaptureResult(
+ int frameNumber, std::unique_ptr<CameraMetadata> metadata) {
+ std::unique_ptr<CaptureResult> captureResult =
+ std::make_unique<CaptureResult>();
+ captureResult->fmqResultSize = 0;
+ captureResult->frameNumber = frameNumber;
+ // Partial result needs to be set to 1 when metadata are present.
+ captureResult->partialResult = 1;
+ captureResult->inputBuffer.streamId = -1;
+ captureResult->physicalCameraMetadata.resize(0);
+ captureResult->result = metadata != nullptr ? *metadata : CameraMetadata();
+ return captureResult;
+}
+
+void VirtualCameraRenderThread::renderOutputBuffers(
+ const ProcessCaptureRequestTask& request, CaptureResult& captureResult) {
+ const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
+ captureResult.outputBuffers.resize(buffers.size());
+
+ for (int i = 0; i < buffers.size(); ++i) {
+ const CaptureRequestBuffer& reqBuffer = buffers[i];
+ StreamBuffer& resBuffer = captureResult.outputBuffers[i];
+ resBuffer.streamId = reqBuffer.getStreamId();
+ resBuffer.bufferId = reqBuffer.getBufferId();
+ resBuffer.status = BufferStatus::OK;
+
+ const std::optional<Stream> streamConfig =
+ mSessionContext.getStreamConfig(reqBuffer.getStreamId());
+
+ if (!streamConfig.has_value()) {
+ resBuffer.status = BufferStatus::ERROR;
+ continue;
+ }
+
+ auto status = streamConfig->format == PixelFormat::BLOB
+ ? renderIntoBlobStreamBuffer(
+ reqBuffer.getStreamId(), reqBuffer.getBufferId(),
+ captureResult.result, request.getRequestSettings(),
+ reqBuffer.getFence())
+ : renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
+ reqBuffer.getBufferId(),
+ reqBuffer.getFence());
+ if (!status.isOk()) {
+ resBuffer.status = BufferStatus::ERROR;
+ }
+ }
+}
+
+::ndk::ScopedAStatus VirtualCameraRenderThread::notifyTimeout(
+ const ProcessCaptureRequestTask& request, CaptureResult& captureResult) {
+ const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
+ captureResult.outputBuffers.resize(buffers.size());
+
+ std::vector<NotifyMsg> notifyMsgs;
+
+ for (int i = 0; i < buffers.size(); ++i) {
+ const CaptureRequestBuffer& reqBuffer = buffers[i];
+ StreamBuffer& resBuffer = captureResult.outputBuffers[i];
+ resBuffer.streamId = reqBuffer.getStreamId();
+ resBuffer.bufferId = reqBuffer.getBufferId();
+ resBuffer.status = BufferStatus::ERROR;
+ notifyMsgs.push_back(createErrorNotifyMsg(
+ request.getFrameNumber(), resBuffer.streamId, ErrorCode::ERROR_REQUEST));
+ }
+ return mCameraDeviceCallback->notify(notifyMsgs);
+}
+
+::ndk::ScopedAStatus VirtualCameraRenderThread::notifyShutter(
+ const ProcessCaptureRequestTask& request, const CaptureResult& captureResult,
+ std::chrono::nanoseconds captureTimestamp) {
+ std::vector<NotifyMsg> notifyMsgs{
+ createShutterNotifyMsg(request.getFrameNumber(), captureTimestamp)};
+ for (const StreamBuffer& resBuffer : captureResult.outputBuffers) {
+ if (resBuffer.status != BufferStatus::OK) {
+ notifyMsgs.push_back(
+ createErrorNotifyMsg(request.getFrameNumber(), resBuffer.streamId));
+ }
+ }
+
+ return mCameraDeviceCallback->notify(notifyMsgs);
+}
+
+::ndk::ScopedAStatus VirtualCameraRenderThread::submitCaptureResult(
+ std::unique_ptr<CaptureResult> captureResult) {
+ std::vector<::aidl::android::hardware::camera::device::CaptureResult>
+ captureResults;
+ captureResults.push_back(std::move(*captureResult));
+
+ ::ndk::ScopedAStatus status =
+ mCameraDeviceCallback->processCaptureResult(captureResults);
+ if (!status.isOk()) {
+ ALOGE("%s: processCaptureResult call failed: %s", __func__,
+ status.getDescription().c_str());
+ return status;
+ }
+
+ ALOGV("%s: Successfully called processCaptureResult", __func__);
+ return status;
+}
+
void VirtualCameraRenderThread::flushCaptureRequest(
const ProcessCaptureRequestTask& request) {
CaptureResult captureResult;
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.h b/services/camera/virtualcamera/VirtualCameraRenderThread.h
index 1fb4e84..4cad39e 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.h
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.h
@@ -19,6 +19,7 @@
#include <atomic>
#include <chrono>
+#include <cstddef>
#include <cstdint>
#include <deque>
#include <future>
@@ -205,6 +206,35 @@
std::chrono::nanoseconds getSurfaceTimestamp(
std::chrono::nanoseconds timeSinceLastFrame);
+ // Build a default capture result object populating the metadata from the request.
+ std::unique_ptr<::aidl::android::hardware::camera::device::CaptureResult>
+ createCaptureResult(
+ int frameNumber,
+ std::unique_ptr<aidl::android::hardware::camera::device::CameraMetadata>
+ metadata);
+
+ // Renders the images from the input surface into the request's buffers.
+ void renderOutputBuffers(
+ const ProcessCaptureRequestTask& request,
+ ::aidl::android::hardware::camera::device::CaptureResult& captureResult);
+
+ // Notify a shutter event for all the buffers in this request.
+ ::ndk::ScopedAStatus notifyShutter(
+ const ProcessCaptureRequestTask& request,
+ const ::aidl::android::hardware::camera::device::CaptureResult& captureResult,
+ std::chrono::nanoseconds captureTimestamp);
+
+ // Notify a timeout error for this request. The capture result still needs to
+ // be submitted after this call.
+ ::ndk::ScopedAStatus notifyTimeout(
+ const ProcessCaptureRequestTask& request,
+ ::aidl::android::hardware::camera::device::CaptureResult& captureResult);
+
+ // Submit the capture result to the camera callback.
+ ::ndk::ScopedAStatus submitCaptureResult(
+ std::unique_ptr<::aidl::android::hardware::camera::device::CaptureResult>
+ captureResult);
+
// Camera callback
const std::shared_ptr<
::aidl::android::hardware::camera::device::ICameraDeviceCallback>
diff --git a/services/camera/virtualcamera/VirtualCameraService.cc b/services/camera/virtualcamera/VirtualCameraService.cc
index 7466089..67225c9 100644
--- a/services/camera/virtualcamera/VirtualCameraService.cc
+++ b/services/camera/virtualcamera/VirtualCameraService.cc
@@ -240,11 +240,16 @@
const VirtualCameraConfiguration& configuration,
const std::string& cameraId, const int32_t deviceId, bool* _aidl_return) {
if (!mPermissionProxy.checkCallingPermission(kCreateVirtualDevicePermission)) {
- ALOGE("%s: caller (pid %d, uid %d) doesn't hold %s permission", __func__,
- getpid(), getuid(), kCreateVirtualDevicePermission);
return ndk::ScopedAStatus::fromExceptionCode(EX_SECURITY);
}
+ return registerCameraNoCheck(token, configuration, cameraId, deviceId,
+ _aidl_return);
+}
+ndk::ScopedAStatus VirtualCameraService::registerCameraNoCheck(
+ const ::ndk::SpAIBinder& token,
+ const VirtualCameraConfiguration& configuration,
+ const std::string& cameraId, const int32_t deviceId, bool* _aidl_return) {
if (_aidl_return == nullptr) {
return ndk::ScopedAStatus::fromServiceSpecificError(
Status::EX_ILLEGAL_ARGUMENT);
@@ -390,7 +395,7 @@
status = enableTestCameraCmd(out, err, cmd.optionToValueMap);
break;
case Command::DISABLE_TEST_CAMERA:
- disableTestCameraCmd(out);
+ status = disableTestCameraCmd(out);
break;
}
@@ -481,24 +486,27 @@
configuration.virtualCameraCallback =
ndk::SharedRefBase::make<VirtualCameraTestInstance>(
inputFps.value_or(kTestCameraDefaultInputFps));
- registerCamera(mTestCameraToken, configuration,
- cameraId.value_or(std::to_string(sNextIdNumericalPortion++)),
- kDefaultDeviceId, &ret);
- if (ret) {
- dprintf(out, "Successfully registered test camera %s\n",
- getCamera(mTestCameraToken)->getCameraName().c_str());
- } else {
- dprintf(err, "Failed to create test camera\n");
+ registerCameraNoCheck(
+ mTestCameraToken, configuration,
+ cameraId.value_or(std::to_string(sNextIdNumericalPortion++)),
+ kDefaultDeviceId, &ret);
+ if (!ret) {
+ dprintf(err, "Failed to create test camera (error %d)\n", ret);
+ return ret;
}
+
+ dprintf(out, "Successfully registered test camera %s\n",
+ getCamera(mTestCameraToken)->getCameraName().c_str());
return STATUS_OK;
}
-void VirtualCameraService::disableTestCameraCmd(const int out) {
+binder_status_t VirtualCameraService::disableTestCameraCmd(const int out) {
if (mTestCameraToken == nullptr) {
dprintf(out, "Test camera is not registered.");
}
- unregisterCamera(mTestCameraToken);
+ binder_status_t ret = unregisterCamera(mTestCameraToken).getStatus();
mTestCameraToken.set(nullptr);
+ return ret;
}
} // namespace virtualcamera
diff --git a/services/camera/virtualcamera/VirtualCameraService.h b/services/camera/virtualcamera/VirtualCameraService.h
index 4ef01c7..defa75b 100644
--- a/services/camera/virtualcamera/VirtualCameraService.h
+++ b/services/camera/virtualcamera/VirtualCameraService.h
@@ -85,7 +85,16 @@
binder_status_t enableTestCameraCmd(
int out, int err, const std::map<std::string, std::string>& options);
// Disable and destroy test camera instance if there's one.
- void disableTestCameraCmd(int out);
+ binder_status_t disableTestCameraCmd(int out);
+
+ // Register camera corresponding to the binder token without checking for
+ // caller permission.
+ ndk::ScopedAStatus registerCameraNoCheck(
+ const ::ndk::SpAIBinder& token,
+ const ::aidl::android::companion::virtualcamera::VirtualCameraConfiguration&
+ configuration,
+ const std::string& cameraId, int32_t deviceId, bool* _aidl_return)
+ EXCLUDES(mLock);
std::shared_ptr<VirtualCameraProvider> mVirtualCameraProvider;
bool mVerifyEglExtensions = true;
diff --git a/services/camera/virtualcamera/util/EglSurfaceTexture.cc b/services/camera/virtualcamera/util/EglSurfaceTexture.cc
index be36ec4..fc469a0 100644
--- a/services/camera/virtualcamera/util/EglSurfaceTexture.cc
+++ b/services/camera/virtualcamera/util/EglSurfaceTexture.cc
@@ -105,6 +105,10 @@
return std::chrono::nanoseconds(mGlConsumer->getTimestamp());
}
+bool EglSurfaceTexture::isFirstFrameDrawn() {
+ return mGlConsumer->getFrameNumber() > 0;
+}
+
GLuint EglSurfaceTexture::updateTexture() {
int previousFrameId;
int framesAdvance = 0;
diff --git a/services/camera/virtualcamera/util/EglSurfaceTexture.h b/services/camera/virtualcamera/util/EglSurfaceTexture.h
index c1f1169..9f75315 100644
--- a/services/camera/virtualcamera/util/EglSurfaceTexture.h
+++ b/services/camera/virtualcamera/util/EglSurfaceTexture.h
@@ -86,6 +86,9 @@
// set by the most recent call to updateTexture.
std::chrono::nanoseconds getTimestamp();
+ // Returns true is a frame has ever been drawn on this surface.
+ bool isFirstFrameDrawn();
+
private:
#if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
sp<IGraphicBufferProducer> mBufferProducer;
diff --git a/services/camera/virtualcamera/util/Permissions.cc b/services/camera/virtualcamera/util/Permissions.cc
index 634bca3..1c1db32 100644
--- a/services/camera/virtualcamera/util/Permissions.cc
+++ b/services/camera/virtualcamera/util/Permissions.cc
@@ -20,7 +20,7 @@
#include "Permissions.h"
#include "binder/PermissionCache.h"
-#include "log/log.h"
+#include "log/log_main.h"
namespace android {
namespace companion {
@@ -39,8 +39,14 @@
const bool hasPermission = PermissionCache::checkCallingPermission(
String16(permission.c_str()), &pid, &uid);
- ALOGV("%s: Checking %s permission for pid %d uid %d: %s", __func__,
- permission.c_str(), pid, uid, hasPermission ? "granted" : "denied");
+ if (hasPermission) {
+ ALOGV("%s: Checking %s permission for pid %d uid %d: granted", __func__,
+ permission.c_str(), pid, uid);
+ } else {
+ ALOGW("%s: Checking %s permission for pid %d uid %d: denied", __func__,
+ permission.c_str(), pid, uid);
+ }
+
return hasPermission;
}
} // namespace
diff --git a/services/oboeservice/AAudioServiceEndpoint.cpp b/services/oboeservice/AAudioServiceEndpoint.cpp
index e7d14a0..e49e9e7 100644
--- a/services/oboeservice/AAudioServiceEndpoint.cpp
+++ b/services/oboeservice/AAudioServiceEndpoint.cpp
@@ -25,6 +25,7 @@
#include <sstream>
#include <vector>
+#include <system/aaudio/AAudio.h>
#include <utils/Singleton.h>
@@ -195,20 +196,28 @@
? AAudioConvert_inputPresetToAudioSource(params->getInputPreset())
: AUDIO_SOURCE_DEFAULT;
audio_flags_mask_t flags;
+ std::optional<std::string> optTags = {};
if (direction == AAUDIO_DIRECTION_OUTPUT) {
flags = AAudio_computeAudioFlagsMask(
params->getAllowedCapturePolicy(),
params->getSpatializationBehavior(),
params->isContentSpatialized(),
AUDIO_OUTPUT_FLAG_FAST);
+ optTags = params->getTags();
} else {
flags = static_cast<audio_flags_mask_t>(AUDIO_FLAG_LOW_LATENCY
| AAudioConvert_privacySensitiveToAudioFlagsMask(params->isPrivacySensitive()));
}
- return {
+ audio_attributes_t nativeAttributes = {
.content_type = contentType,
.usage = usage,
.source = source,
.flags = flags,
- .tags = "" };
+ .tags = ""
+ };
+ if (optTags.has_value() && !optTags->empty()) {
+ strncpy(nativeAttributes.tags, optTags.value().c_str(), AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
+ nativeAttributes.tags[AAUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1] = '\0';
+ }
+ return nativeAttributes;
}