Merge "GraphicsTracker: handle dequeueBuffer error while switching Surfaces" into main
diff --git a/camera/Android.bp b/camera/Android.bp
index 9e1efae..d91fcb2 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -75,6 +75,7 @@
local_include_dirs: ["aidl"],
include_dirs: [
"frameworks/native/aidl/gui",
+ "frameworks/native/libs/permission/aidl",
],
},
@@ -106,6 +107,7 @@
shared_libs: [
"camera_platform_flags_c_lib",
+ "framework-permission-aidl-cpp",
"lib-platform-compat-native-api",
"libbase",
"libbinder",
@@ -114,6 +116,7 @@
"libgui",
"liblog",
"libnativewindow",
+ "libpermission",
"libutils",
],
@@ -126,6 +129,7 @@
"include/camera",
],
export_shared_lib_headers: [
+ "framework-permission-aidl-cpp",
"libcamera_metadata",
"libgui",
"libnativewindow",
diff --git a/camera/Camera.cpp b/camera/Camera.cpp
index 8018390..d90f7c9 100644
--- a/camera/Camera.cpp
+++ b/camera/Camera.cpp
@@ -69,13 +69,12 @@
// deadlock if we call any method of ICamera here.
}
-sp<Camera> Camera::connect(int cameraId, const std::string& clientPackageName,
- int clientUid, int clientPid, int targetSdkVersion, int rotationOverride,
- bool forceSlowJpegMode, int32_t deviceId, int32_t devicePolicy)
+sp<Camera> Camera::connect(int cameraId, int targetSdkVersion, int rotationOverride,
+ bool forceSlowJpegMode, const AttributionSourceState& clientAttribution,
+ int32_t devicePolicy)
{
- return CameraBaseT::connect(cameraId, clientPackageName, clientUid,
- clientPid, targetSdkVersion, rotationOverride, forceSlowJpegMode, deviceId,
- devicePolicy);
+ return CameraBaseT::connect(cameraId, targetSdkVersion, rotationOverride,
+ forceSlowJpegMode, clientAttribution, devicePolicy);
}
status_t Camera::reconnect()
diff --git a/camera/CameraBase.cpp b/camera/CameraBase.cpp
index d7415a3..774db25 100644
--- a/camera/CameraBase.cpp
+++ b/camera/CameraBase.cpp
@@ -161,10 +161,10 @@
template <typename TCam, typename TCamTraits>
sp<TCam> CameraBase<TCam, TCamTraits>::connect(int cameraId,
- const std::string& clientPackageName,
- int clientUid, int clientPid, int targetSdkVersion,
- int rotationOverride, bool forceSlowJpegMode,
- int32_t deviceId, int32_t devicePolicy)
+ int targetSdkVersion, int rotationOverride,
+ bool forceSlowJpegMode,
+ const AttributionSourceState& clientAttribution,
+ int32_t devicePolicy)
{
ALOGV("%s: connect", __FUNCTION__);
sp<TCam> c = new TCam(cameraId);
@@ -176,9 +176,9 @@
TCamConnectService fnConnectService = TCamTraits::fnConnectService;
ALOGI("Connect camera (legacy API) - rotationOverride %d, forceSlowJpegMode %d",
rotationOverride, forceSlowJpegMode);
- ret = (cs.get()->*fnConnectService)(cl, cameraId, clientPackageName, clientUid,
- clientPid, targetSdkVersion, rotationOverride, forceSlowJpegMode, deviceId,
- devicePolicy, /*out*/ &c->mCamera);
+ ret = (cs.get()->*fnConnectService)(cl, cameraId, targetSdkVersion,
+ rotationOverride, forceSlowJpegMode, clientAttribution, devicePolicy,
+ /*out*/ &c->mCamera);
}
if (ret.isOk() && c->mCamera != nullptr) {
IInterface::asBinder(c->mCamera)->linkToDeath(c);
@@ -257,7 +257,8 @@
}
template <typename TCam, typename TCamTraits>
-int CameraBase<TCam, TCamTraits>::getNumberOfCameras(int32_t deviceId, int32_t devicePolicy) {
+int CameraBase<TCam, TCamTraits>::getNumberOfCameras(
+ const AttributionSourceState& clientAttribution, int32_t devicePolicy) {
const sp<::android::hardware::ICameraService> cs = getCameraService();
if (!cs.get()) {
@@ -266,7 +267,7 @@
}
int32_t count;
binder::Status res = cs->getNumberOfCameras(
- ::android::hardware::ICameraService::CAMERA_TYPE_BACKWARD_COMPATIBLE, deviceId,
+ ::android::hardware::ICameraService::CAMERA_TYPE_BACKWARD_COMPATIBLE, clientAttribution,
devicePolicy, &count);
if (!res.isOk()) {
ALOGE("Error reading number of cameras: %s",
@@ -279,12 +280,12 @@
// this can be in BaseCamera but it should be an instance method
template <typename TCam, typename TCamTraits>
status_t CameraBase<TCam, TCamTraits>::getCameraInfo(int cameraId,
- int rotationOverride, int32_t deviceId, int32_t devicePolicy,
+ int rotationOverride, const AttributionSourceState& clientAttribution, int32_t devicePolicy,
struct hardware::CameraInfo* cameraInfo) {
const sp<::android::hardware::ICameraService> cs = getCameraService();
if (cs == 0) return UNKNOWN_ERROR;
- binder::Status res = cs->getCameraInfo(cameraId, rotationOverride, deviceId, devicePolicy,
- cameraInfo);
+ binder::Status res = cs->getCameraInfo(cameraId, rotationOverride, clientAttribution,
+ devicePolicy, cameraInfo);
return res.isOk() ? OK : res.serviceSpecificErrorCode();
}
diff --git a/camera/aidl/android/hardware/ICameraService.aidl b/camera/aidl/android/hardware/ICameraService.aidl
index d9a0934..ce6c2d3 100644
--- a/camera/aidl/android/hardware/ICameraService.aidl
+++ b/camera/aidl/android/hardware/ICameraService.aidl
@@ -16,6 +16,7 @@
package android.hardware;
+import android.content.AttributionSourceState;
import android.hardware.ICamera;
import android.hardware.ICameraClient;
import android.hardware.camera2.ICameraDeviceUser;
@@ -66,13 +67,13 @@
*
* @param type The type of the camera, can be either CAMERA_TYPE_BACKWARD_COMPATIBLE
* or CAMERA_TYPE_ALL.
- * @param deviceId The device id of the context associated with the caller.
+ * @param clientAttribution The AttributionSource of the client.
* @param devicePolicy The camera policy of the device of the associated context (default
* policy for default device context). Only virtual cameras would be exposed
* only for custom policy and only real cameras would be exposed for default
* policy.
*/
- int getNumberOfCameras(int type, int deviceId, int devicePolicy);
+ int getNumberOfCameras(int type, in AttributionSourceState clientAttribution, int devicePolicy);
/**
* If changed, reflect in
@@ -97,19 +98,20 @@
* will override the sensor orientation and rotate and crop, while {@link
* ICameraService#ROTATION_OVERRIDE_ROTATION_ONLY} will rotate and crop the camera feed
* without changing the sensor orientation.
- * @param deviceId The device id of the context associated with the caller.
+ * @param clientAttribution The AttributionSource of the client.
* @param devicePolicy The camera policy of the device of the associated context (default
* policy for default device context). Only virtual cameras would be exposed
* only for custom policy and only real cameras would be exposed for default
* policy.
* @return CameraInfo for the camera.
*/
- CameraInfo getCameraInfo(int cameraId, int rotationOverride, int deviceId,
- int devicePolicy);
+ CameraInfo getCameraInfo(int cameraId, int rotationOverride,
+ in AttributionSourceState clientAttribution, int devicePolicy);
/**
- * Default UID/PID values for non-privileged callers of
- * connect() and connectDevice()
+ * Default UID/PID values for non-privileged callers of connect() and connectDevice(). Can be
+ * used to set the pid/uid fields of AttributionSourceState to indicate the calling uid/pid
+ * should be used.
*/
const int USE_CALLING_UID = -1;
const int USE_CALLING_PID = -1;
@@ -118,9 +120,6 @@
* Open a camera device through the old camera API.
*
* @param cameraId The ID of the camera to open.
- * @param opPackageName The package name to report for the app-ops.
- * @param clientUid UID for the calling client.
- * @param clientPid PID for the calling client.
* @param targetSdkVersion the target sdk level of the application calling this function.
* @param rotationOverride Whether to override the sensor orientation information to
* correspond to portrait: {@link ICameraService#ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT}
@@ -128,7 +127,7 @@
* ICameraService#ROTATION_OVERRIDE_ROTATION_ONLY} will rotate and crop the camera feed
* without changing the sensor orientation.
* @param forceSlowJpegMode Whether to force slow jpeg mode.
- * @param deviceId The device id of the context associated with the caller.
+ * @param clientAttribution The AttributionSource of the client.
* @param devicePolicy The camera policy of the device of the associated context (default
* policy for default device context). Only virtual cameras would be exposed
* only for custom policy and only real cameras would be exposed for default
@@ -136,12 +135,10 @@
*/
ICamera connect(ICameraClient client,
int cameraId,
- @utf8InCpp String opPackageName,
- int clientUid, int clientPid,
int targetSdkVersion,
int rotationOverride,
boolean forceSlowJpegMode,
- int deviceId,
+ in AttributionSourceState clientAttribution,
int devicePolicy);
/**
@@ -149,15 +146,13 @@
* Only supported for device HAL versions >= 3.2.
*
* @param cameraId The ID of the camera to open.
- * @param opPackageName The package name to report for the app-ops.
- * @param clientUid UID for the calling client.
* @param targetSdkVersion the target sdk level of the application calling this function.
* @param rotationOverride Whether to override the sensor orientation information to
* correspond to portrait: {@link ICameraService#ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT}
* will override the sensor orientation and rotate and crop, while {@link
* ICameraService#ROTATION_OVERRIDE_ROTATION_ONLY} will rotate and crop the camera feed
* without changing the sensor orientation.
- * @param deviceId The device id of the context associated with the caller.
+ * @param clientAttribution The AttributionSource of the client.
* @param devicePolicy The camera policy of the device of the associated context (default
* policy for default device context). Only virtual cameras would be exposed
* only for custom policy and only real cameras would be exposed for default
@@ -165,12 +160,10 @@
*/
ICameraDeviceUser connectDevice(ICameraDeviceCallbacks callbacks,
@utf8InCpp String cameraId,
- @utf8InCpp String opPackageName,
- @nullable @utf8InCpp String featureId,
- int clientUid, int oomScoreOffset,
+ int oomScoreOffset,
int targetSdkVersion,
int rotationOverride,
- int deviceId,
+ in AttributionSourceState clientAttribution,
int devicePolicy);
/**
@@ -194,7 +187,7 @@
*
* @param sessions the set of camera id and session configuration pairs to be queried.
* @param targetSdkVersion the target sdk level of the application calling this function.
- * @param deviceId The device id of the context associated with the caller.
+ * @param clientAttribution The AttributionSource of the client.
* @param devicePolicy The camera policy of the device of the associated context (default
* policy for default device context). Only virtual cameras would be exposed
* only for custom policy and only real cameras would be exposed for default
@@ -206,7 +199,7 @@
*/
boolean isConcurrentSessionConfigurationSupported(
in CameraIdAndSessionConfiguration[] sessions,
- int targetSdkVersion, int deviceId, int devicePolicy);
+ int targetSdkVersion, in AttributionSourceState clientAttribution, int devicePolicy);
/**
* Inject Session Params into an existing camera session.
@@ -236,7 +229,7 @@
* will override the sensor orientation and rotate and crop, while {@link
* ICameraService#ROTATION_OVERRIDE_ROTATION_ONLY} will rotate and crop the camera feed
* without changing the sensor orientation.
- * @param deviceId The device id of the context associated with the caller.
+ * @param clientAttribution The AttributionSource of the client.
* @param devicePolicy The camera policy of the device of the associated context (default
* policy for default device context). Only virtual cameras would be exposed
* only for custom policy and only real cameras would be exposed for default
@@ -244,7 +237,7 @@
* @return Characteristics for the given camera.
*/
CameraMetadataNative getCameraCharacteristics(@utf8InCpp String cameraId, int targetSdkVersion,
- int rotationOverride, int deviceId, int devicePolicy);
+ int rotationOverride, in AttributionSourceState clientAttribution, int devicePolicy);
/**
* Read in the vendor tag descriptors from the camera module HAL.
@@ -284,14 +277,14 @@
* Set the torch mode for a camera device.
*
* @param cameraId The ID of the camera to set torch mode for.
- * @param deviceId The device id of the context associated with the caller.
+ * @param clientAttribution The AttributionSource of the client.
* @param devicePolicy The camera policy of the device of the associated context (default
* policy for default device context). Only virtual cameras would be exposed
* only for custom policy and only real cameras would be exposed for default
* policy.
*/
void setTorchMode(@utf8InCpp String cameraId, boolean enabled, IBinder clientBinder,
- int deviceId, int devicePolicy);
+ in AttributionSourceState clientAttribution, int devicePolicy);
/**
* Change the brightness level of the flash unit associated with cameraId to strengthLevel.
@@ -299,27 +292,28 @@
*
* @param cameraId The ID of the camera.
* @param strengthLevel The torch strength level to set for the camera.
- * @param deviceId The device id of the context associated with the caller.
+ * @param clientAttribution The AttributionSource of the client.
* @param devicePolicy The camera policy of the device of the associated context (default
* policy for default device context). Only virtual cameras would be exposed
* only for custom policy and only real cameras would be exposed for default
* policy.
*/
void turnOnTorchWithStrengthLevel(@utf8InCpp String cameraId, int strengthLevel,
- IBinder clientBinder, int deviceId, int devicePolicy);
+ IBinder clientBinder, in AttributionSourceState clientAttribution, int devicePolicy);
/**
* Get the brightness level of the flash unit associated with cameraId.
*
* @param cameraId The ID of the camera.
- * @param deviceId The device id of the context associated with the caller.
+ * @param clientAttribution The AttributionSource of the client.
* @param devicePolicy The camera policy of the device of the associated context (default
* policy for default device context). Only virtual cameras would be exposed
* only for custom policy and only real cameras would be exposed for default
* policy.
* @return Torch strength level for the camera.
*/
- int getTorchStrengthLevel(@utf8InCpp String cameraId, int deviceId, int devicePolicy);
+ int getTorchStrengthLevel(@utf8InCpp String cameraId,
+ in AttributionSourceState clientAttribution, int devicePolicy);
/**
* Notify the camera service of a system event. Should only be called from system_server.
@@ -385,7 +379,7 @@
*
* @param cameraId The camera id to create the CaptureRequest for.
* @param templateId The template id create the CaptureRequest for.
- * @param deviceId the device id of the context associated with the caller.
+ * @param clientAttribution The AttributionSource of the client.
* @param devicePolicy The camera policy of the device of the associated context (default
* policy for default device context). Only virtual cameras would be exposed
* only for custom policy and only real cameras would be exposed for default
@@ -393,7 +387,7 @@
* @return Metadata representing the CaptureRequest.
*/
CameraMetadataNative createDefaultRequest(@utf8InCpp String cameraId, int templateId,
- int deviceId, int devicePolicy);
+ in AttributionSourceState clientAttribution, int devicePolicy);
/**
* Check whether a particular session configuration with optional session parameters
@@ -402,7 +396,7 @@
* @param cameraId The camera id to query session configuration for
* @param targetSdkVersion the target sdk level of the application calling this function.
* @param sessionConfiguration Specific session configuration to be verified.
- * @param deviceId The device id of the context associated with the caller.
+ * @param clientAttribution The AttributionSource of the client.
* @param devicePolicy The camera policy of the device of the associated context (default
* policy for default device context). Only virtual cameras would be exposed
* only for custom policy and only real cameras would be exposed for default
@@ -412,7 +406,7 @@
*/
boolean isSessionConfigurationWithParametersSupported(@utf8InCpp String cameraId,
int targetSdkVersion, in SessionConfiguration sessionConfiguration,
- int deviceId, int devicePolicy);
+ in AttributionSourceState clientAttribution, int devicePolicy);
/**
* Get the camera characteristics for a particular session configuration for
@@ -427,7 +421,7 @@
* without changing the sensor orientation.
* @param sessionConfiguration Session configuration for which the characteristics
* must be fetched.
- * @param deviceId The device id of the context associated with the caller.
+ * @param clientAttribution The AttributionSource of the client.
* @param devicePolicy The camera policy of the device of the associated context (default
* policy for default device context). Only virtual cameras would be exposed
* only for custom policy and only real cameras would be exposed for default
@@ -438,6 +432,6 @@
int targetSdkVersion,
int rotationOverride,
in SessionConfiguration sessionConfiguration,
- int deviceId,
+ in AttributionSourceState clientAttribution,
int devicePolicy);
}
diff --git a/camera/include/camera/Camera.h b/camera/include/camera/Camera.h
index 3ecd10d..646b139 100644
--- a/camera/include/camera/Camera.h
+++ b/camera/include/camera/Camera.h
@@ -59,7 +59,7 @@
typedef ::android::hardware::ICameraClient TCamCallbacks;
typedef ::android::binder::Status (::android::hardware::ICameraService::*TCamConnectService)
(const sp<::android::hardware::ICameraClient>&,
- int, const std::string&, int, int, int, int, bool, int32_t, int32_t,
+ int, int, int, bool, const AttributionSourceState&, int32_t,
/*out*/
sp<::android::hardware::ICamera>*);
static TCamConnectService fnConnectService;
@@ -81,10 +81,9 @@
// construct a camera client from an existing remote
static sp<Camera> create(const sp<::android::hardware::ICamera>& camera);
static sp<Camera> connect(int cameraId,
- const std::string& clientPackageName,
- int clientUid, int clientPid, int targetSdkVersion,
- int rotationOverride, bool forceSlowJpegMode,
- int32_t deviceId = kDefaultDeviceId, int32_t devicePolicy = 0);
+ int targetSdkVersion, int rotationOverride, bool forceSlowJpegMode,
+ const AttributionSourceState& clientAttribution,
+ int32_t devicePolicy = 0);
virtual ~Camera();
diff --git a/camera/include/camera/CameraBase.h b/camera/include/camera/CameraBase.h
index 3370b3d..d98abe4 100644
--- a/camera/include/camera/CameraBase.h
+++ b/camera/include/camera/CameraBase.h
@@ -17,6 +17,7 @@
#ifndef ANDROID_HARDWARE_CAMERA_BASE_H
#define ANDROID_HARDWARE_CAMERA_BASE_H
+#include <android/content/AttributionSourceState.h>
#include <android/hardware/ICameraServiceListener.h>
#include <utils/Mutex.h>
@@ -107,6 +108,7 @@
} // namespace hardware
+using content::AttributionSourceState;
using hardware::CameraInfo;
template <typename TCam>
@@ -123,19 +125,19 @@
typedef typename TCamTraits::TCamConnectService TCamConnectService;
static sp<TCam> connect(int cameraId,
- const std::string& clientPackageName,
- int clientUid, int clientPid, int targetSdkVersion,
- int rotationOverride, bool forceSlowJpegMode,
- int32_t deviceId, int32_t devicePolicy);
+ int targetSdkVersion, int rotationOverride, bool forceSlowJpegMode,
+ const AttributionSourceState &clientAttribution,
+ int32_t devicePolicy);
virtual void disconnect();
void setListener(const sp<TCamListener>& listener);
- static int getNumberOfCameras(int32_t deviceId, int32_t devicePolicy);
+ static int getNumberOfCameras(const AttributionSourceState& clientAttribution,
+ int32_t devicePolicy);
static status_t getCameraInfo(int cameraId,
int rotationOverride,
- int32_t deviceId,
+ const AttributionSourceState& clientAttribution,
int32_t devicePolicy,
/*out*/
struct hardware::CameraInfo* cameraInfo);
diff --git a/camera/ndk/Android.bp b/camera/ndk/Android.bp
index a603659..379c0b5 100644
--- a/camera/ndk/Android.bp
+++ b/camera/ndk/Android.bp
@@ -79,6 +79,7 @@
shared_libs: [
"android.companion.virtual.virtualdevice_aidl-cpp",
"android.companion.virtualdevice.flags-aconfig-cc",
+ "framework-permission-aidl-cpp",
"libandroid_runtime",
"libbinder",
"libcamera_client",
diff --git a/camera/ndk/impl/ACameraManager.cpp b/camera/ndk/impl/ACameraManager.cpp
index f36a743..c3bec0a 100644
--- a/camera/ndk/impl/ACameraManager.cpp
+++ b/camera/ndk/impl/ACameraManager.cpp
@@ -810,9 +810,15 @@
CameraMetadata rawMetadata;
int targetSdkVersion = android_get_application_target_sdk_version();
+
+ AttributionSourceState clientAttribution;
+ clientAttribution.uid = hardware::ICameraService::USE_CALLING_UID;
+ clientAttribution.pid = hardware::ICameraService::USE_CALLING_PID;
+ clientAttribution.deviceId = mDeviceContext.deviceId;
+
binder::Status serviceRet = cs->getCameraCharacteristics(cameraIdStr,
targetSdkVersion, /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
- mDeviceContext.deviceId, static_cast<int32_t>(mDeviceContext.policy),
+ clientAttribution, static_cast<int32_t>(mDeviceContext.policy),
&rawMetadata);
if (!serviceRet.isOk()) {
switch(serviceRet.serviceSpecificErrorCode()) {
@@ -860,13 +866,20 @@
sp<hardware::camera2::ICameraDeviceCallbacks> callbacks = device->getServiceCallback();
sp<hardware::camera2::ICameraDeviceUser> deviceRemote;
int targetSdkVersion = android_get_application_target_sdk_version();
+
+ AttributionSourceState clientAttribution;
+ clientAttribution.uid = hardware::ICameraService::USE_CALLING_UID;
+ clientAttribution.pid = hardware::ICameraService::USE_CALLING_PID;
+ clientAttribution.deviceId = mDeviceContext.deviceId;
+ clientAttribution.packageName = "";
+ clientAttribution.attributionTag = std::nullopt;
+
// No way to get package name from native.
// Send a zero length package name and let camera service figure it out from UID
binder::Status serviceRet = cs->connectDevice(
- callbacks, cameraId, "", {},
- hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/0,
+ callbacks, cameraId, /*oomScoreOffset*/0,
targetSdkVersion, /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
- mDeviceContext.deviceId, static_cast<int32_t>(mDeviceContext.policy),
+ clientAttribution, static_cast<int32_t>(mDeviceContext.policy),
/*out*/&deviceRemote);
if (!serviceRet.isOk()) {
diff --git a/camera/tests/Android.bp b/camera/tests/Android.bp
index 9aaac6a..484335a 100644
--- a/camera/tests/Android.bp
+++ b/camera/tests/Android.bp
@@ -29,6 +29,7 @@
"CameraCharacteristicsPermission.cpp",
],
shared_libs: [
+ "framework-permission-aidl-cpp",
"liblog",
"libutils",
"libcutils",
diff --git a/camera/tests/CameraBinderTests.cpp b/camera/tests/CameraBinderTests.cpp
index e5f99be..d21513c 100644
--- a/camera/tests/CameraBinderTests.cpp
+++ b/camera/tests/CameraBinderTests.cpp
@@ -33,6 +33,7 @@
#include <hardware/gralloc.h>
#include <camera/CameraMetadata.h>
+#include <android/content/AttributionSourceState.h>
#include <android/hardware/ICameraService.h>
#include <android/hardware/ICameraServiceListener.h>
#include <android/hardware/BnCameraServiceListener.h>
@@ -347,7 +348,11 @@
binder::Status res;
int32_t numCameras = 0;
- res = service->getNumberOfCameras(hardware::ICameraService::CAMERA_TYPE_ALL, kDefaultDeviceId,
+ AttributionSourceState clientAttribution;
+ clientAttribution.deviceId = kDefaultDeviceId;
+ clientAttribution.uid = hardware::ICameraService::USE_CALLING_UID;
+ clientAttribution.packageName = "meeeeeeeee!";
+ res = service->getNumberOfCameras(hardware::ICameraService::CAMERA_TYPE_ALL, clientAttribution,
/*devicePolicy*/0, &numCameras);
EXPECT_TRUE(res.isOk()) << res;
EXPECT_LE(0, numCameras);
@@ -360,7 +365,7 @@
EXPECT_EQ(numCameras, static_cast<const int>(statuses.size()));
for (const auto &it : statuses) {
- listener->onStatusChanged(it.status, it.cameraId, kDefaultDeviceId);
+ listener->onStatusChanged(it.status, it.cameraId, clientAttribution.deviceId);
}
for (int32_t i = 0; i < numCameras; i++) {
@@ -379,7 +384,7 @@
CameraMetadata metadata;
res = service->getCameraCharacteristics(cameraId,
/*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
- kDefaultDeviceId, /*devicePolicy*/0, &metadata);
+ clientAttribution, /*devicePolicy*/0, &metadata);
EXPECT_TRUE(res.isOk()) << res;
EXPECT_FALSE(metadata.isEmpty());
@@ -393,10 +398,10 @@
// Check connect binder calls
sp<TestCameraDeviceCallbacks> callbacks(new TestCameraDeviceCallbacks());
sp<hardware::camera2::ICameraDeviceUser> device;
- res = service->connectDevice(callbacks, cameraId, "meeeeeeeee!",
- {}, hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/ 0,
+ res = service->connectDevice(callbacks, cameraId,
+ /*oomScoreOffset*/ 0,
/*targetSdkVersion*/__ANDROID_API_FUTURE__,
- /*overrideToPortrait*/false, kDefaultDeviceId, /*devicePolicy*/0, /*out*/&device);
+ /*overrideToPortrait*/false, clientAttribution, /*devicePolicy*/0, /*out*/&device);
EXPECT_TRUE(res.isOk()) << res;
ASSERT_NE(nullptr, device.get());
device->disconnect();
@@ -406,12 +411,12 @@
if (torchStatus == hardware::ICameraServiceListener::TORCH_STATUS_AVAILABLE_OFF) {
// Check torch calls
res = service->setTorchMode(cameraId,
- /*enabled*/true, callbacks, kDefaultDeviceId, /*devicePolicy*/0);
+ /*enabled*/true, callbacks, clientAttribution, /*devicePolicy*/0);
EXPECT_TRUE(res.isOk()) << res;
EXPECT_TRUE(listener->waitForTorchState(
hardware::ICameraServiceListener::TORCH_STATUS_AVAILABLE_ON, i));
res = service->setTorchMode(cameraId,
- /*enabled*/false, callbacks, kDefaultDeviceId, /*devicePolicy*/0);
+ /*enabled*/false, callbacks, clientAttribution, /*devicePolicy*/0);
EXPECT_TRUE(res.isOk()) << res;
EXPECT_TRUE(listener->waitForTorchState(
hardware::ICameraServiceListener::TORCH_STATUS_AVAILABLE_OFF, i));
@@ -437,10 +442,14 @@
sp<hardware::camera2::ICameraDeviceUser> device;
{
SCOPED_TRACE("openNewDevice");
- binder::Status res = service->connectDevice(callbacks, deviceId, "meeeeeeeee!",
- {}, hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/ 0,
+ AttributionSourceState clientAttribution;
+ clientAttribution.deviceId = kDefaultDeviceId;
+ clientAttribution.uid = hardware::ICameraService::USE_CALLING_UID;
+ clientAttribution.packageName = "meeeeeeeee!";
+ binder::Status res = service->connectDevice(callbacks, deviceId,
+ /*oomScoreOffset*/ 0,
/*targetSdkVersion*/__ANDROID_API_FUTURE__,
- /*overrideToPortrait*/false, kDefaultDeviceId, /*devicePolicy*/0,
+ /*overrideToPortrait*/false, clientAttribution, /*devicePolicy*/0,
/*out*/&device);
EXPECT_TRUE(res.isOk()) << res;
}
@@ -473,11 +482,13 @@
serviceListener = new TestCameraServiceListener();
std::vector<hardware::CameraStatus> statuses;
service->addListener(serviceListener, &statuses);
+ AttributionSourceState clientAttribution;
+ clientAttribution.deviceId = kDefaultDeviceId;
for (const auto &it : statuses) {
- serviceListener->onStatusChanged(it.status, it.cameraId, kDefaultDeviceId);
+ serviceListener->onStatusChanged(it.status, it.cameraId, clientAttribution.deviceId);
}
service->getNumberOfCameras(hardware::ICameraService::CAMERA_TYPE_BACKWARD_COMPATIBLE,
- kDefaultDeviceId, /*devicePolicy*/0, &numCameras);
+ clientAttribution, /*devicePolicy*/0, &numCameras);
}
virtual void TearDown() {
diff --git a/camera/tests/CameraCharacteristicsPermission.cpp b/camera/tests/CameraCharacteristicsPermission.cpp
index 10f7f22..9204eb1 100644
--- a/camera/tests/CameraCharacteristicsPermission.cpp
+++ b/camera/tests/CameraCharacteristicsPermission.cpp
@@ -19,6 +19,7 @@
#include <gtest/gtest.h>
+#include <android/content/AttributionSourceState.h>
#include <binder/ProcessState.h>
#include <utils/Errors.h>
#include <utils/Log.h>
@@ -47,8 +48,10 @@
sp<IServiceManager> sm = defaultServiceManager();
sp<IBinder> binder = sm->getService(String16("media.camera"));
mCameraService = interface_cast<ICameraService>(binder);
+ AttributionSourceState clientAttribution;
+ clientAttribution.deviceId = kDefaultDeviceId;
rc = mCameraService->getNumberOfCameras(
- hardware::ICameraService::CAMERA_TYPE_ALL, kDefaultDeviceId, /*devicePolicy*/0,
+ hardware::ICameraService::CAMERA_TYPE_ALL, clientAttribution, /*devicePolicy*/0,
&numCameras);
EXPECT_TRUE(rc.isOk());
}
@@ -73,9 +76,11 @@
CameraMetadata metadata;
std::vector<int32_t> tagsNeedingPermission;
+ AttributionSourceState clientAttribution;
+ clientAttribution.deviceId = kDefaultDeviceId;
rc = mCameraService->getCameraCharacteristics(cameraIdStr,
/*targetSdkVersion*/__ANDROID_API_FUTURE__,
- /*overrideToPortrait*/false, kDefaultDeviceId, /*devicePolicy*/0, &metadata);
+ /*overrideToPortrait*/false, clientAttribution, /*devicePolicy*/0, &metadata);
ASSERT_TRUE(rc.isOk());
EXPECT_FALSE(metadata.isEmpty());
EXPECT_EQ(metadata.removePermissionEntries(CAMERA_METADATA_INVALID_VENDOR_ID,
diff --git a/camera/tests/CameraZSLTests.cpp b/camera/tests/CameraZSLTests.cpp
index 56fcfa4..2740d09 100644
--- a/camera/tests/CameraZSLTests.cpp
+++ b/camera/tests/CameraZSLTests.cpp
@@ -19,6 +19,7 @@
#include <gtest/gtest.h>
+#include <android/content/AttributionSourceState.h>
#include <binder/ProcessState.h>
#include <utils/Errors.h>
#include <utils/Log.h>
@@ -84,8 +85,10 @@
sp<IServiceManager> sm = defaultServiceManager();
sp<IBinder> binder = sm->getService(String16("media.camera"));
mCameraService = interface_cast<ICameraService>(binder);
+ AttributionSourceState clientAttribution;
+ clientAttribution.deviceId = kDefaultDeviceId;
rc = mCameraService->getNumberOfCameras(
- hardware::ICameraService::CAMERA_TYPE_ALL, kDefaultDeviceId, /*devicePolicy*/0,
+ hardware::ICameraService::CAMERA_TYPE_ALL, clientAttribution, /*devicePolicy*/0,
&numCameras);
EXPECT_TRUE(rc.isOk());
@@ -183,9 +186,11 @@
}
CameraMetadata metadata;
+ AttributionSourceState clientAttribution;
+ clientAttribution.deviceId = kDefaultDeviceId;
rc = mCameraService->getCameraCharacteristics(cameraIdStr,
/*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
- kDefaultDeviceId, /*devicePolicy*/0, &metadata);
+ clientAttribution, /*devicePolicy*/0, &metadata);
if (!rc.isOk()) {
// The test is relevant only for cameras with Hal 3.x
// support.
@@ -209,11 +214,12 @@
continue;
}
+ clientAttribution.uid = hardware::ICameraService::USE_CALLING_UID;
+ clientAttribution.pid = hardware::ICameraService::USE_CALLING_PID;
+ clientAttribution.packageName = "ZSLTest";
rc = mCameraService->connect(this, cameraId,
- "ZSLTest", hardware::ICameraService::USE_CALLING_UID,
- hardware::ICameraService::USE_CALLING_PID,
/*targetSdkVersion*/__ANDROID_API_FUTURE__,
- /*overrideToPortrait*/false, /*forceSlowJpegMode*/false, kDefaultDeviceId,
+ /*overrideToPortrait*/false, /*forceSlowJpegMode*/false, clientAttribution,
/*devicePolicy*/0, &cameraDevice);
EXPECT_TRUE(rc.isOk());
diff --git a/camera/tests/fuzzer/Android.bp b/camera/tests/fuzzer/Android.bp
index bd97c39..3b6413c 100644
--- a/camera/tests/fuzzer/Android.bp
+++ b/camera/tests/fuzzer/Android.bp
@@ -31,6 +31,7 @@
],
shared_libs: [
"camera_platform_flags_c_lib",
+ "framework-permission-aidl-cpp",
"libbase",
"libcutils",
"libutils",
diff --git a/camera/tests/fuzzer/camera_fuzzer.cpp b/camera/tests/fuzzer/camera_fuzzer.cpp
index b0f59f1..f46d246 100644
--- a/camera/tests/fuzzer/camera_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_fuzzer.cpp
@@ -17,6 +17,7 @@
#include <Camera.h>
#include <CameraParameters.h>
#include <CameraUtils.h>
+#include <android/content/AttributionSourceState.h>
#include <binder/MemoryDealer.h>
#include <fuzzer/FuzzedDataProvider.h>
#include <gui/Surface.h>
@@ -123,21 +124,24 @@
sp<ICameraService> cameraService = nullptr;
cameraService = interface_cast<ICameraService>(binder);
sp<ICamera> cameraDevice = nullptr;
+ AttributionSourceState clientAttribution;
+ clientAttribution.deviceId = kDefaultDeviceId;
if (mFDP->ConsumeBool()) {
- cameraService->connect(this, mFDP->ConsumeIntegral<int32_t>() /* cameraId */, "CAMERAFUZZ",
- hardware::ICameraService::USE_CALLING_UID,
- hardware::ICameraService::USE_CALLING_PID,
+ clientAttribution.uid = hardware::ICameraService::USE_CALLING_UID;
+ clientAttribution.pid = hardware::ICameraService::USE_CALLING_PID;
+ clientAttribution.packageName = "CAMERAFUZZ";
+ cameraService->connect(this, mFDP->ConsumeIntegral<int32_t>() /* cameraId */,
/*targetSdkVersion*/ __ANDROID_API_FUTURE__,
/*overrideToPortrait*/ false, /*forceSlowJpegMode*/ false,
- kDefaultDeviceId, /*devicePolicy*/0, &cameraDevice);
+ clientAttribution, /*devicePolicy*/0, &cameraDevice);
} else {
+ clientAttribution.uid = mFDP->ConsumeIntegral<int8_t>();
+ clientAttribution.pid = mFDP->ConsumeIntegral<int8_t>();
+ clientAttribution.packageName = mFDP->ConsumeRandomLengthString(kMaxBytes).c_str();
cameraService->connect(this, mFDP->ConsumeIntegral<int32_t>() /* cameraId */,
- mFDP->ConsumeRandomLengthString(kMaxBytes).c_str(),
- mFDP->ConsumeIntegral<int8_t>() /* clientUid */,
- mFDP->ConsumeIntegral<int8_t>() /* clientPid */,
/*targetSdkVersion*/ mFDP->ConsumeIntegral<int32_t>(),
/*overrideToPortrait*/ mFDP->ConsumeBool(),
- /*forceSlowJpegMode*/ mFDP->ConsumeBool(), kDefaultDeviceId,
+ /*forceSlowJpegMode*/ mFDP->ConsumeBool(), clientAttribution,
/*devicePolicy*/0, &cameraDevice);
}
@@ -165,13 +169,15 @@
}
int32_t cameraId = mFDP->ConsumeIntegral<int32_t>();
- Camera::getNumberOfCameras(kDefaultDeviceId, /*devicePolicy*/0);
+ AttributionSourceState clientAttribution;
+ clientAttribution.deviceId = kDefaultDeviceId;
+ Camera::getNumberOfCameras(clientAttribution, /*devicePolicy*/0);
CameraInfo cameraInfo;
cameraInfo.facing = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFacing)
: mFDP->ConsumeIntegral<int32_t>();
cameraInfo.orientation = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidOrientation)
: mFDP->ConsumeIntegral<int32_t>();
- Camera::getCameraInfo(cameraId, /*overrideToPortrait*/false, kDefaultDeviceId,
+ Camera::getCameraInfo(cameraId, /*overrideToPortrait*/false, clientAttribution,
/*devicePolicy*/0, &cameraInfo);
mCamera->reconnect();
diff --git a/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.bp b/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.bp
index 16ea15e..6e55a16 100644
--- a/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.bp
+++ b/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.bp
@@ -64,10 +64,6 @@
"libfwdlock-decoder",
],
- whole_static_libs: [
- "libc++fs",
- ],
-
local_include_dirs: ["include"],
relative_install_path: "drm",
diff --git a/media/audioaidlconversion/include/media/AidlConversionEffect.h b/media/audioaidlconversion/include/media/AidlConversionEffect.h
index b03d06b..e51bf8b 100644
--- a/media/audioaidlconversion/include/media/AidlConversionEffect.h
+++ b/media/audioaidlconversion/include/media/AidlConversionEffect.h
@@ -72,9 +72,6 @@
MAKE_EXTENSION_PARAMETER_ID(_effect, _tag##Tag, _extId); \
aidl::android::hardware::audio::effect::Parameter _aidlParam; \
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(_id, &_aidlParam))); \
- aidl::android::hardware::audio::effect::VendorExtension _ext = \
- VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD( \
- _aidlParam, _effect, _tag, _effect::vendor, VendorExtension)); \
return VALUE_OR_RETURN_STATUS( \
aidl::android::aidl2legacy_Parameter_EffectParameterWriter(_aidlParam, _param)); \
}
diff --git a/media/codec2/hal/common/MultiAccessUnitHelper.cpp b/media/codec2/hal/common/MultiAccessUnitHelper.cpp
index b1fa82f..55bf7a9 100644
--- a/media/codec2/hal/common/MultiAccessUnitHelper.cpp
+++ b/media/codec2/hal/common/MultiAccessUnitHelper.cpp
@@ -497,9 +497,10 @@
// This is to take care of the last bytes and to decide to send with
// FLAG_INCOMPLETE or not.
if ((frame->mWview
- && (frame->mWview->offset() > frame->mLargeFrameTuning.thresholdSize))
+ && (frame->mWview->offset() >= frame->mLargeFrameTuning.thresholdSize))
|| frame->mComponentFrameIds.empty()) {
if (frame->mLargeWork) {
+ frame->mLargeWork->result = C2_OK;
finalizeWork(*frame);
addOutWork(frame->mLargeWork);
frame->reset();
@@ -558,12 +559,15 @@
c2_status_t ret = C2_OK;
if (frame.mLargeWork == nullptr) {
frame.mLargeWork.reset(new C2Work);
+ frame.mLargeWork->result = C2_OK;
+ frame.mLargeWork->input.flags = (C2FrameData::flags_t)0;
frame.mLargeWork->input.ordinal = frame.inOrdinal;
frame.mLargeWork->input.ordinal.frameIndex = frame.inOrdinal.frameIndex;
}
if (allocateWorket) {
if (frame.mLargeWork->worklets.size() == 0) {
frame.mLargeWork->worklets.emplace_back(new C2Worklet);
+ frame.mLargeWork->worklets.back()->output.flags = (C2FrameData::flags_t)0;
}
}
if (allocateBuffer) {
@@ -611,6 +615,9 @@
if (c2ret != C2_OK) {
return c2ret;
}
+ uint32_t flags = work->input.flags;
+ flags |= frame.mLargeWork->input.flags;
+ frame.mLargeWork->input.flags = (C2FrameData::flags_t)flags;
C2FrameData& outputFramedata = frame.mLargeWork->worklets.front()->output;
if (!(*worklet)->output.configUpdate.empty()) {
for (auto& configUpdate : (*worklet)->output.configUpdate) {
@@ -678,6 +685,9 @@
}
}
allocateWork(frame, true, true);
+ uint32_t flags = work->input.flags;
+ flags |= frame.mLargeWork->input.flags;
+ frame.mLargeWork->input.flags = (C2FrameData::flags_t)flags;
C2ReadView rView = blocks.front().map().get();
if (rView.error()) {
LOG(ERROR) << "Buffer read view error";
@@ -744,7 +754,8 @@
}
LOG(DEBUG) << "Finalizing work with input Idx "
<< frame.mLargeWork->input.ordinal.frameIndex.peekull()
- << " timestamp " << timeStampUs;
+ << " timestamp " << timeStampUs
+ << " inFlags " << inFlags;
uint32_t finalFlags = 0;
if ((!forceComplete)
&& (frame.mLargeWork->result == C2_OK)
diff --git a/media/codec2/vndk/include/C2BqBufferPriv.h b/media/codec2/vndk/include/C2BqBufferPriv.h
index 1e8dd40..806932c 100644
--- a/media/codec2/vndk/include/C2BqBufferPriv.h
+++ b/media/codec2/vndk/include/C2BqBufferPriv.h
@@ -44,7 +44,77 @@
*
* If a specific HGBP is configured, the HGBP acts as an allocator for creating graphic blocks.
*
- * TODO: add more ducumentation(graphic block life-cycle, waitable object and workaounds)
+ *
+ * HGBP/IGBP and the BlockPool
+ *
+ * GraphicBuffer(s) from BufferQueue(IGBP/IGBC) are based on slot id.
+ * A created GraphicBuffer occupies a slot(so the GraphicBuffer has a slot-id).
+ * A GraphicBuffer is produced and consumed and recyled based on the slot-id
+ * w.r.t. BufferQueue.
+ *
+ * HGBP::dequeueBuffer() returns a slot id where the slot has an available GraphicBuffer.
+ * If it is necessary, HGBP allocates a new GraphicBuffer to the slot and indicates
+ * that a new buffer is allocated as return flag.
+ * To retrieve the GraphicBuffer, HGBP::requestBuffer() along with the slot id
+ * is required. In order to save HGBP remote calls, the blockpool caches the
+ * allocated GraphicBuffer(s) along with the slot information.
+ *
+ * The blockpool provides C2GraphicBlock upon \fetchGraphicBlock().
+ * The C2GraphicBlock has a native handle, which is extracted from a GraphicBuffer
+ * and then cloned for independent life-cycle with the GraphicBuffer. The GraphicBuffer
+ * is allocated by HGBP::dequeueBuffer() and retrieved by HGBP::requestBuffer()
+ * if there is a HGBP configured.
+ *
+ *
+ * Life-cycle of C2GraphicBlock
+ *
+ * The decoder HAL writes a decoded frame into C2GraphicBlock. Upon
+ * completion, the component sends the block to the client in the remote process
+ * (i.e. to MediaCodec). The remote process renders the frame into the output surface
+ * via IGBP::queueBuffer() (Note: this is not hidlized.).
+ *
+ * If the decoder HAL destroys the C2GraphicBlock without transferring to the
+ * client, the destroy request goes to the BlockPool. Then
+ * the BlockPool free the associated GraphicBuffer from a slot to
+ * HGBP in order to recycle via HGBP::cancelBuffer().
+ *
+ *
+ * Clearing the Cache(GraphicBuffer)
+ *
+ * When the output surface is switched to a new surface, The GraphicBuffers from
+ * the old surface is either migrated or cleared.
+ *
+ * The GraphicBuffer(s) still in use are migrated to a new surface during
+ * configuration via HGBP::attachBuffer(). The GraphicBuffer(s) not in use are
+ * cleared from the cache inside the BlockPool.
+ *
+ * When the surface is switched to a null surface, all the
+ * GraphicBuffers in the cache are cleared.
+ *
+ *
+ * Workaround w.r.t. b/322731059 (Deferring cleaning the cache)
+ *
+ * Some vendor devices have issues with graphic buffer lifecycle management,
+ * where the graphic buffers get released even when the cloned native handles
+ * in the remote process are not closed yet. This issue led to rare crashes
+ * for those devices when the cache is cleared early.
+ *
+ * We workarounded the crash by deferring the cleaning of the cache.
+ * The workaround is not enabled by default, and can be enabled via a
+ * system property as shown below:
+ *
+ * 'debug.codec2.bqpool_dealloc_after_stop' = 1
+ *
+ * Configuring the debug flag will call \::setDeferDeallocationAfterStop()
+ * after the blockpool creation. This will enable the deferring.
+ *
+ * After enabling the deferring, clearing the GraphicBuffer is delayed until
+ * 1) \::clearDeferredBlocks() is called.
+ * Typically after HAL processes stop() request.
+ * 2) Or a new ::fetchGraphicBlock() is called.
+ *
+ * Since the deferring will delay the deallocation, the deferring will result
+ * in more memory consumption during the brief period.
*/
class C2BufferQueueBlockPool : public C2BlockPool {
public:
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index 21321b9..e19d526 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -574,7 +574,7 @@
* For privacy, the following usages can not be recorded: AAUDIO_VOICE_COMMUNICATION*,
* AAUDIO_USAGE_NOTIFICATION*, AAUDIO_USAGE_ASSISTANCE* and {@link #AAUDIO_USAGE_ASSISTANT}.
*
- * On <a href="/reference/android/os/Build.VERSION_CODES#Q">Build.VERSION_CODES</a>,
+ * On <a href="/reference/android/os/Build.VERSION_CODES#Q">Q</a>,
* this means only {@link #AAUDIO_USAGE_MEDIA} and {@link #AAUDIO_USAGE_GAME} may be captured.
*
* See <a href="/reference/android/media/AudioAttributes.html#ALLOW_CAPTURE_BY_ALL">
diff --git a/media/libaudioclient/tests/audio_test_utils.cpp b/media/libaudioclient/tests/audio_test_utils.cpp
index 745c7d1..1599839 100644
--- a/media/libaudioclient/tests/audio_test_utils.cpp
+++ b/media/libaudioclient/tests/audio_test_utils.cpp
@@ -294,6 +294,7 @@
ts.getBestTimestamp(&position, &timeNs, ExtendedTimestamp::TIMEBASE_MONOTONIC, &location) ==
OK) {
// Use audio timestamp.
+ std::lock_guard l(mMutex);
timeUs = timeNs / 1000 -
(position - mNumFramesReceived + mNumFramesLost) * usPerSec / mSampleRate;
} else {
@@ -322,6 +323,7 @@
} else {
numLostBytes = 0;
}
+ std::lock_guard l(mMutex);
const int64_t timestampUs =
((1000000LL * mNumFramesReceived) + (mRecord->getSampleRate() >> 1)) /
mRecord->getSampleRate();
@@ -335,6 +337,7 @@
if (buffer.size() == 0) {
ALOGW("Nothing is available from AudioRecord callback buffer");
} else {
+ std::lock_guard l(mMutex);
const size_t bufferSize = buffer.size();
const int64_t timestampUs =
((1000000LL * mNumFramesReceived) + (mRecord->getSampleRate() >> 1)) /
@@ -359,17 +362,24 @@
void AudioCapture::onOverrun() {
ALOGV("received event overrun");
- mBufferOverrun = true;
}
void AudioCapture::onMarker(uint32_t markerPosition) {
ALOGV("received Callback at position %d", markerPosition);
- mReceivedCbMarkerAtPosition = markerPosition;
+ {
+ std::lock_guard l(mMutex);
+ mReceivedCbMarkerAtPosition = markerPosition;
+ }
+ mMarkerCondition.notify_all();
}
void AudioCapture::onNewPos(uint32_t markerPosition) {
ALOGV("received Callback at position %d", markerPosition);
- mReceivedCbMarkerCount++;
+ {
+ std::lock_guard l(mMutex);
+ mReceivedCbMarkerCount = mReceivedCbMarkerCount.value_or(0) + 1;
+ }
+ mMarkerCondition.notify_all();
}
void AudioCapture::onNewIAudioRecord() {
@@ -387,20 +397,7 @@
mFlags(flags),
mSessionId(sessionId),
mTransferType(transferType),
- mAttributes(attributes) {
- mFrameCount = 0;
- mNotificationFrames = 0;
- mNumFramesToRecord = 0;
- mNumFramesReceived = 0;
- mNumFramesLost = 0;
- mBufferOverrun = false;
- mMarkerPosition = 0;
- mMarkerPeriod = 0;
- mReceivedCbMarkerAtPosition = -1;
- mReceivedCbMarkerCount = 0;
- mState = REC_NO_INIT;
- mStopRecording = false;
-}
+ mAttributes(attributes) {}
AudioCapture::~AudioCapture() {
if (mOutFileFd > 0) close(mOutFileFd);
@@ -531,25 +528,32 @@
const int maxTries = MAX_WAIT_TIME_MS / WAIT_PERIOD_MS;
int counter = 0;
size_t nonContig = 0;
- while (mNumFramesReceived < mNumFramesToRecord) {
+ int64_t numFramesReceived;
+ {
+ std::lock_guard l(mMutex);
+ numFramesReceived = mNumFramesReceived;
+ }
+ while (numFramesReceived < mNumFramesToRecord) {
AudioRecord::Buffer recordBuffer;
recordBuffer.frameCount = mNotificationFrames;
status_t status = mRecord->obtainBuffer(&recordBuffer, 1, &nonContig);
if (OK == status) {
const int64_t timestampUs =
- ((1000000LL * mNumFramesReceived) + (mRecord->getSampleRate() >> 1)) /
+ ((1000000LL * numFramesReceived) + (mRecord->getSampleRate() >> 1)) /
mRecord->getSampleRate();
RawBuffer buff{-1, timestampUs, static_cast<int32_t>(recordBuffer.size())};
memcpy(buff.mData.get(), recordBuffer.data(), recordBuffer.size());
buffer = std::move(buff);
- mNumFramesReceived += recordBuffer.size() / mRecord->frameSize();
+ numFramesReceived += recordBuffer.size() / mRecord->frameSize();
mRecord->releaseBuffer(&recordBuffer);
counter = 0;
} else if (WOULD_BLOCK == status) {
// if not received a buffer for MAX_WAIT_TIME_MS, something has gone wrong
- if (counter == maxTries) return TIMED_OUT;
- counter++;
+ if (counter++ == maxTries) status = TIMED_OUT;
}
+ std::lock_guard l(mMutex);
+ mNumFramesReceived = numFramesReceived;
+ if (TIMED_OUT == status) return status;
}
return OK;
}
@@ -577,7 +581,12 @@
status_t AudioCapture::audioProcess() {
RawBuffer buffer;
status_t status = OK;
- while (mNumFramesReceived < mNumFramesToRecord && status == OK) {
+ int64_t numFramesReceived;
+ {
+ std::lock_guard l(mMutex);
+ numFramesReceived = mNumFramesReceived;
+ }
+ while (numFramesReceived < mNumFramesToRecord && status == OK) {
if (mTransferType == AudioRecord::TRANSFER_CALLBACK)
status = obtainBufferCb(buffer);
else
@@ -586,10 +595,52 @@
const char* ptr = static_cast<const char*>(static_cast<void*>(buffer.mData.get()));
write(mOutFileFd, ptr, buffer.mCapacity);
}
+ std::lock_guard l(mMutex);
+ numFramesReceived = mNumFramesReceived;
}
return OK;
}
+uint32_t AudioCapture::getMarkerPeriod() const {
+ std::lock_guard l(mMutex);
+ return mMarkerPeriod;
+}
+
+uint32_t AudioCapture::getMarkerPosition() const {
+ std::lock_guard l(mMutex);
+ return mMarkerPosition;
+}
+
+void AudioCapture::setMarkerPeriod(uint32_t markerPeriod) {
+ std::lock_guard l(mMutex);
+ mMarkerPeriod = markerPeriod;
+}
+
+void AudioCapture::setMarkerPosition(uint32_t markerPosition) {
+ std::lock_guard l(mMutex);
+ mMarkerPosition = markerPosition;
+}
+
+uint32_t AudioCapture::waitAndGetReceivedCbMarkerAtPosition() const {
+ std::unique_lock lock(mMutex);
+ android::base::ScopedLockAssertion lock_assertion(mMutex);
+ mMarkerCondition.wait_for(lock, std::chrono::seconds(3), [this]() {
+ android::base::ScopedLockAssertion lock_assertion(mMutex);
+ return mReceivedCbMarkerAtPosition.has_value();
+ });
+ return mReceivedCbMarkerAtPosition.value_or(~0);
+}
+
+uint32_t AudioCapture::waitAndGetReceivedCbMarkerCount() const {
+ std::unique_lock lock(mMutex);
+ android::base::ScopedLockAssertion lock_assertion(mMutex);
+ mMarkerCondition.wait_for(lock, std::chrono::seconds(3), [this]() {
+ android::base::ScopedLockAssertion lock_assertion(mMutex);
+ return mReceivedCbMarkerCount.has_value();
+ });
+ return mReceivedCbMarkerCount.value_or(0);
+}
+
status_t listAudioPorts(std::vector<audio_port_v7>& portsVec) {
int attempts = 5;
status_t status;
diff --git a/media/libaudioclient/tests/audio_test_utils.h b/media/libaudioclient/tests/audio_test_utils.h
index 40c3365..022ecf3 100644
--- a/media/libaudioclient/tests/audio_test_utils.h
+++ b/media/libaudioclient/tests/audio_test_utils.h
@@ -146,8 +146,8 @@
~AudioCapture();
size_t onMoreData(const AudioRecord::Buffer& buffer) override EXCLUDES(mMutex);
void onOverrun() override;
- void onMarker(uint32_t markerPosition) override;
- void onNewPos(uint32_t newPos) override;
+ void onMarker(uint32_t markerPosition) override EXCLUDES(mMutex);
+ void onNewPos(uint32_t newPos) override EXCLUDES(mMutex);
void onNewIAudioRecord() override;
status_t create();
status_t setRecordDuration(float durationInSec);
@@ -157,20 +157,19 @@
status_t start(AudioSystem::sync_event_t event = AudioSystem::SYNC_EVENT_NONE,
audio_session_t triggerSession = AUDIO_SESSION_NONE);
status_t obtainBufferCb(RawBuffer& buffer) EXCLUDES(mMutex);
- status_t obtainBuffer(RawBuffer& buffer);
- status_t audioProcess();
+ status_t obtainBuffer(RawBuffer& buffer) EXCLUDES(mMutex);
+ status_t audioProcess() EXCLUDES(mMutex);
status_t stop() EXCLUDES(mMutex);
+ uint32_t getMarkerPeriod() const EXCLUDES(mMutex);
+ uint32_t getMarkerPosition() const EXCLUDES(mMutex);
+ void setMarkerPeriod(uint32_t markerPeriod) EXCLUDES(mMutex);
+ void setMarkerPosition(uint32_t markerPosition) EXCLUDES(mMutex);
+ uint32_t waitAndGetReceivedCbMarkerAtPosition() const EXCLUDES(mMutex);
+ uint32_t waitAndGetReceivedCbMarkerCount() const EXCLUDES(mMutex);
- uint32_t mFrameCount;
- uint32_t mNotificationFrames;
- int64_t mNumFramesToRecord;
- int64_t mNumFramesReceived;
- int64_t mNumFramesLost;
- uint32_t mMarkerPosition;
- uint32_t mMarkerPeriod;
- uint32_t mReceivedCbMarkerAtPosition;
- uint32_t mReceivedCbMarkerCount;
- bool mBufferOverrun;
+ uint32_t mFrameCount = 0;
+ uint32_t mNotificationFrames = 0;
+ int64_t mNumFramesToRecord = 0;
enum State {
REC_NO_INIT,
@@ -191,14 +190,23 @@
size_t mMaxBytesPerCallback = 2048;
sp<AudioRecord> mRecord;
- State mState;
- bool mStopRecording GUARDED_BY(mMutex);
+ State mState = REC_NO_INIT;
+ bool mStopRecording GUARDED_BY(mMutex) = false;
std::string mFileName;
int mOutFileFd = -1;
mutable std::mutex mMutex;
std::condition_variable mCondition;
std::deque<RawBuffer> mBuffersReceived GUARDED_BY(mMutex);
+
+ mutable std::condition_variable mMarkerCondition;
+ uint32_t mMarkerPeriod GUARDED_BY(mMutex) = 0;
+ uint32_t mMarkerPosition GUARDED_BY(mMutex) = 0;
+ std::optional<uint32_t> mReceivedCbMarkerCount GUARDED_BY(mMutex);
+ std::optional<uint32_t> mReceivedCbMarkerAtPosition GUARDED_BY(mMutex);
+
+ int64_t mNumFramesReceived GUARDED_BY(mMutex) = 0;
+ int64_t mNumFramesLost GUARDED_BY(mMutex) = 0;
};
#endif // AUDIO_TEST_UTILS_H_
diff --git a/media/libaudioclient/tests/audiorecord_tests.cpp b/media/libaudioclient/tests/audiorecord_tests.cpp
index 9908f33..f2fee8b 100644
--- a/media/libaudioclient/tests/audiorecord_tests.cpp
+++ b/media/libaudioclient/tests/audiorecord_tests.cpp
@@ -102,7 +102,10 @@
}
void TearDown() override {
- if (mAC) ASSERT_EQ(OK, mAC->stop());
+ if (mAC) {
+ ASSERT_EQ(OK, mAC->stop());
+ mAC.clear();
+ }
}
};
@@ -168,33 +171,33 @@
}
TEST_F(AudioRecordTest, TestGetSetMarker) {
- mAC->mMarkerPosition = (mAC->mNotificationFrames << 3) + (mAC->mNotificationFrames >> 1);
- EXPECT_EQ(OK, mAC->getAudioRecordHandle()->setMarkerPosition(mAC->mMarkerPosition))
+ mAC->setMarkerPosition((mAC->mNotificationFrames << 3) + (mAC->mNotificationFrames >> 1));
+ EXPECT_EQ(OK, mAC->getAudioRecordHandle()->setMarkerPosition(mAC->getMarkerPosition()))
<< "setMarkerPosition() failed";
uint32_t marker;
EXPECT_EQ(OK, mAC->getAudioRecordHandle()->getMarkerPosition(&marker))
<< "getMarkerPosition() failed";
EXPECT_EQ(OK, mAC->start()) << "start recording failed";
EXPECT_EQ(OK, mAC->audioProcess()) << "audioProcess failed";
- // TODO(b/348658586): Properly synchronize callback updates with the test thread.
- EXPECT_EQ(marker, mAC->mMarkerPosition)
+ EXPECT_EQ(marker, mAC->getMarkerPosition())
<< "configured marker and received marker are different";
- EXPECT_EQ(mAC->mReceivedCbMarkerAtPosition, mAC->mMarkerPosition)
+ EXPECT_EQ(mAC->waitAndGetReceivedCbMarkerAtPosition(), mAC->getMarkerPosition())
<< "configured marker and received cb marker are different";
}
TEST_F(AudioRecordTest, TestGetSetMarkerPeriodical) {
- mAC->mMarkerPeriod = (mAC->mNotificationFrames << 3) + (mAC->mNotificationFrames >> 1);
- EXPECT_EQ(OK, mAC->getAudioRecordHandle()->setPositionUpdatePeriod(mAC->mMarkerPeriod))
+ mAC->setMarkerPeriod((mAC->mNotificationFrames << 3) + (mAC->mNotificationFrames >> 1));
+ EXPECT_EQ(OK, mAC->getAudioRecordHandle()->setPositionUpdatePeriod(mAC->getMarkerPeriod()))
<< "setPositionUpdatePeriod() failed";
uint32_t marker;
EXPECT_EQ(OK, mAC->getAudioRecordHandle()->getPositionUpdatePeriod(&marker))
<< "getPositionUpdatePeriod() failed";
EXPECT_EQ(OK, mAC->start()) << "start recording failed";
EXPECT_EQ(OK, mAC->audioProcess()) << "audioProcess failed";
- // TODO(b/348658586): Properly synchronize callback updates with the test thread.
- EXPECT_EQ(marker, mAC->mMarkerPeriod) << "configured marker and received marker are different";
- EXPECT_EQ(mAC->mReceivedCbMarkerCount, mAC->mNumFramesToRecord / mAC->mMarkerPeriod)
+ EXPECT_EQ(marker, mAC->getMarkerPeriod())
+ << "configured marker and received marker are different";
+ EXPECT_EQ(mAC->waitAndGetReceivedCbMarkerCount(),
+ mAC->mNumFramesToRecord / mAC->getMarkerPeriod())
<< "configured marker and received cb marker are different";
}
@@ -221,12 +224,12 @@
EXPECT_EQ(mSessionId, mAC->getAudioRecordHandle()->getSessionId());
if (mTransferType != AudioRecord::TRANSFER_CALLBACK) {
uint32_t marker;
- mAC->mMarkerPosition = (mAC->mNotificationFrames << 3) + (mAC->mNotificationFrames >> 1);
+ mAC->setMarkerPosition((mAC->mNotificationFrames << 3) + (mAC->mNotificationFrames >> 1));
EXPECT_EQ(INVALID_OPERATION,
- mAC->getAudioRecordHandle()->setMarkerPosition(mAC->mMarkerPosition));
+ mAC->getAudioRecordHandle()->setMarkerPosition(mAC->getMarkerPosition()));
EXPECT_EQ(OK, mAC->getAudioRecordHandle()->getMarkerPosition(&marker));
EXPECT_EQ(INVALID_OPERATION,
- mAC->getAudioRecordHandle()->setPositionUpdatePeriod(mAC->mMarkerPosition));
+ mAC->getAudioRecordHandle()->setPositionUpdatePeriod(mAC->getMarkerPosition()));
EXPECT_EQ(OK, mAC->getAudioRecordHandle()->getPositionUpdatePeriod(&marker));
}
EXPECT_EQ(OK, mAC->start()) << "start recording failed";
diff --git a/media/libaudiohal/impl/StreamHalAidl.cpp b/media/libaudiohal/impl/StreamHalAidl.cpp
index 123d4a2..38e1ea4 100644
--- a/media/libaudiohal/impl/StreamHalAidl.cpp
+++ b/media/libaudiohal/impl/StreamHalAidl.cpp
@@ -449,6 +449,11 @@
state == StreamDescriptor::State::TRANSFER_PAUSED ||
state == StreamDescriptor::State::DRAIN_PAUSED) {
return sendCommand(makeHalCommand<HalCommand::Tag::start>(), reply);
+ } else if (state == StreamDescriptor::State::ACTIVE ||
+ state == StreamDescriptor::State::TRANSFERRING ||
+ state == StreamDescriptor::State::DRAINING) {
+ ALOGD("%s: already in stream state: %s", __func__, toString(state).c_str());
+ return OK;
} else {
ALOGE("%s: unexpected stream state: %s (expected IDLE or one of *PAUSED states)",
__func__, toString(state).c_str());
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index e229844..26b8d0c 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -150,10 +150,15 @@
int32_t cameraId, const std::string& clientName, uid_t clientUid, pid_t clientPid) {
if (camera == 0) {
- mCamera = Camera::connect(cameraId, clientName, clientUid, clientPid,
- /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+ AttributionSourceState clientAttribution;
+ clientAttribution.pid = clientPid;
+ clientAttribution.uid = clientUid;
+ clientAttribution.deviceId = kDefaultDeviceId;
+ clientAttribution.packageName = clientName;
+
+ mCamera = Camera::connect(cameraId, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
/*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
- /*forceSlowJpegMode*/false);
+ /*forceSlowJpegMode*/false, clientAttribution);
if (mCamera == 0) return -EBUSY;
mCameraFlags &= ~FLAGS_HOT_CAMERA;
} else {
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index e918b5e..15188b0 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -72,6 +72,9 @@
static const int64_t kMaxMetadataSize = 0x4000000LL; // 64MB max per-frame metadata size
static const int64_t kMaxCttsOffsetTimeUs = 30 * 60 * 1000000LL; // 30 minutes
static const size_t kESDSScratchBufferSize = 10; // kMaxAtomSize in Mpeg4Extractor 64MB
+// Allow up to 100 milli second, which is safely above the maximum delay observed in manual testing
+// between posting from setNextFd and handling it
+static const int64_t kFdCondWaitTimeoutNs = 100000000;
static const char kMetaKey_Version[] = "com.android.version";
static const char kMetaKey_Manufacturer[] = "com.android.manufacturer";
@@ -1262,9 +1265,13 @@
return OK;
}
+ // Wait for the signal only if the new file is not available.
if (mNextFd == -1) {
- ALOGW("No FileDescriptor for next recording");
- return INVALID_OPERATION;
+ status_t res = mFdCond.waitRelative(mLock, kFdCondWaitTimeoutNs);
+ if (res != OK) {
+ ALOGW("No FileDescriptor for next recording");
+ return INVALID_OPERATION;
+ }
}
mSwitchPending = true;
@@ -2433,6 +2440,7 @@
return INVALID_OPERATION;
}
mNextFd = dup(fd);
+ mFdCond.signal();
return OK;
}
@@ -4886,8 +4894,15 @@
int32_t mediaTime = (mFirstSampleStartOffsetUs * mTimeScale + 5E5) / 1E6;
int32_t firstSampleOffsetTicks =
(mFirstSampleStartOffsetUs * mvhdTimeScale + 5E5) / 1E6;
- // samples before 0 don't count in for duration, hence subtract firstSampleOffsetTicks.
- addOneElstTableEntry(tkhdDurationTicks - firstSampleOffsetTicks, mediaTime, 1, 0);
+ if (tkhdDurationTicks >= firstSampleOffsetTicks) {
+ // samples before 0 don't count in for duration, hence subtract
+ // firstSampleOffsetTicks.
+ addOneElstTableEntry(tkhdDurationTicks - firstSampleOffsetTicks, mediaTime, 1, 0);
+ } else {
+ ALOGW("The track header duration %" PRId64
+ " is smaller than the first sample offset %" PRId64,
+ mTrackDurationUs, mFirstSampleStartOffsetUs);
+ }
} else {
// Track starting at zero.
ALOGV("No edit list entry required for this track");
diff --git a/media/libstagefright/include/media/stagefright/MPEG4Writer.h b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
index 054a4b8..ee75129 100644
--- a/media/libstagefright/include/media/stagefright/MPEG4Writer.h
+++ b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
@@ -144,6 +144,7 @@
std::mutex mFallocMutex;
bool mPreAllocFirstTime; // Pre-allocate space for file and track headers only once per file.
uint64_t mPrevAllTracksTotalMetaDataSizeEstimate;
+ Condition mFdCond;
List<Track *> mTracks;
diff --git a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
index 4183023..4ab5d10 100644
--- a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
+++ b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
@@ -619,6 +619,13 @@
if (!isValidOMXParam(outParams)) {
return OMX_ErrorBadParameter;
}
+ if (offsetof(DescribeHDR10PlusInfoParams, nValue) + outParams->nParamSize >
+ outParams->nSize) {
+ ALOGE("b/329641908: too large param size; nParamSize=%u nSize=%u",
+ outParams->nParamSize, outParams->nSize);
+ android_errorWriteLog(0x534e4554, "329641908");
+ return OMX_ErrorBadParameter;
+ }
outParams->nParamSizeUsed = info->size();
diff --git a/media/ndk/include/media/NdkMediaDataSource.h b/media/ndk/include/media/NdkMediaDataSource.h
index 197e202..def142c 100644
--- a/media/ndk/include/media/NdkMediaDataSource.h
+++ b/media/ndk/include/media/NdkMediaDataSource.h
@@ -49,16 +49,16 @@
/*
* AMediaDataSource's callbacks will be invoked on an implementation-defined thread
* or thread pool. No guarantees are provided about which thread(s) will be used for
- * callbacks. For example, |close| can be invoked from a different thread than the
- * thread invoking |readAt|. As such, the Implementations of AMediaDataSource callbacks
+ * callbacks. For example, `close` can be invoked from a different thread than the
+ * thread invoking `readAt`. As such, the Implementations of AMediaDataSource callbacks
* must be threadsafe.
*/
/**
- * Called to request data from the given |offset|.
+ * Called to request data from the given `offset`.
*
- * Implementations should should write up to |size| bytes into
- * |buffer|, and return the number of bytes written.
+ * Implementations should should write up to `size` bytes into
+ * `buffer`, and return the number of bytes written.
*
* Return 0 if size is zero (thus no bytes are read).
*
@@ -78,9 +78,9 @@
* Called to close the data source, unblock reads, and release associated
* resources.
*
- * The NDK media framework guarantees that after the first |close| is
+ * The NDK media framework guarantees that after the first `close` is
* called, no future callbacks will be invoked on the data source except
- * for |close| itself.
+ * for `close` itself.
*
* Closing a data source allows readAt calls that were blocked waiting
* for I/O data to return promptly.
@@ -101,7 +101,7 @@
/**
* Called to get an estimate of the number of bytes that can be read from this data source
- * starting at |offset| without blocking for I/O.
+ * starting at `offset` without blocking for I/O.
*
* Return -1 when such an estimate is not possible.
*/
@@ -111,10 +111,10 @@
* Create new media data source. Returns NULL if memory allocation
* for the new data source object fails.
*
- * Set the |uri| from which the data source will read,
+ * Set the `uri` from which the data source will read,
* plus additional http headers when initiating the request.
*
- * Headers will contain corresponding items from |key_values|
+ * Headers will contain corresponding items from `key_values`
* in the following fashion:
*
* key_values[0]:key_values[1]
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index b763f09..2abf682 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -213,6 +213,7 @@
],
static_libs: [
+ "audiopermissioncontroller",
"libcpustats",
"libpermission",
],
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index a17ac58..e76ece2 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -39,13 +39,17 @@
#include <binder/IServiceManager.h>
#include <binder/Parcel.h>
#include <cutils/properties.h>
+#include <com_android_media_audio.h>
#include <com_android_media_audioserver.h>
#include <media/AidlConversion.h>
#include <media/AudioParameter.h>
#include <media/AudioValidator.h>
#include <media/IMediaLogService.h>
+#include <media/IPermissionProvider.h>
#include <media/MediaMetricsItem.h>
+#include <media/NativePermissionController.h>
#include <media/TypeConverter.h>
+#include <media/ValidatedAttributionSourceState.h>
#include <mediautils/BatteryNotifier.h>
#include <mediautils/MemoryLeakTrackUtil.h>
#include <mediautils/MethodStatistics.h>
@@ -81,12 +85,17 @@
namespace android {
using ::android::base::StringPrintf;
+using aidl_utils::statusTFromBinderStatus;
using media::IEffectClient;
using media::audio::common::AudioMMapPolicyInfo;
using media::audio::common::AudioMMapPolicyType;
using media::audio::common::AudioMode;
using android::content::AttributionSourceState;
using android::detail::AudioHalVersionInfo;
+using com::android::media::permission::INativePermissionController;
+using com::android::media::permission::IPermissionProvider;
+using com::android::media::permission::NativePermissionController;
+using com::android::media::permission::ValidatedAttributionSourceState;
static const AudioHalVersionInfo kMaxAAudioPropertyDeviceHalVersion =
AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 7, 1);
@@ -118,6 +127,52 @@
}
}
+static error::BinderResult<ValidatedAttributionSourceState>
+validateAttributionFromContextOrTrustedCaller(AttributionSourceState attr,
+ const IPermissionProvider& provider) {
+ const auto callingUid = IPCThreadState::self()->getCallingUid();
+ // We trust the following UIDs to appropriate validated identities above us
+ if (isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
+ // Legacy paths may not properly populate package name, so we attempt to handle.
+ if (!attr.packageName.has_value() || attr.packageName.value() == "") {
+ ALOGW("Trusted client %d provided attr with missing package name" , callingUid);
+ attr.packageName = VALUE_OR_RETURN(provider.getPackagesForUid(callingUid))[0];
+ }
+ // Behavior change: In the case of delegation, if pid is invalid,
+ // filling it in with the callingPid will cause a mismatch between the
+ // pid and the uid in the attribution, which is error-prone.
+ // Instead, assert that the pid from a trusted source is valid
+ if (attr.pid == -1) {
+ if (callingUid != static_cast<uid_t>(attr.uid)) {
+ return error::unexpectedExceptionCode(binder::Status::EX_ILLEGAL_ARGUMENT,
+ "validateAttribution: Invalid pid from delegating trusted source");
+ } else {
+ // Legacy handling for trusted clients which may not fill pid correctly
+ attr.pid = IPCThreadState::self()->getCallingPid();
+ }
+ }
+ return ValidatedAttributionSourceState::createFromTrustedSource(std::move(attr));
+ } else {
+ // Behavior change: Populate pid with callingPid unconditionally. Previously, we
+ // allowed caller provided pid, if uid matched calling context, but this is error-prone
+ // since it allows mismatching uid/pid
+ return ValidatedAttributionSourceState::createFromBinderContext(std::move(attr), provider);
+ }
+}
+
+#define VALUE_OR_RETURN_CONVERTED(exp) \
+ ({ \
+ auto _tmp = (exp); \
+ if (!_tmp.ok()) { \
+ ALOGE("Function: %s Line: %d Failed result (%s)", __FUNCTION__, __LINE__, \
+ errorToString(_tmp.error()).c_str()); \
+ return statusTFromBinderStatus(_tmp.error()); \
+ } \
+ std::move(_tmp.value()); \
+ })
+
+
+
// Creates association between Binder code to name for IAudioFlinger.
#define IAUDIOFLINGER_BINDER_METHOD_MACRO_LIST \
BINDER_METHOD_ENTRY(createTrack) \
@@ -519,30 +574,42 @@
audio_attributes_t localAttr = *attr;
// TODO b/182392553: refactor or make clearer
- pid_t clientPid =
- VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(client.attributionSource.pid));
- bool updatePid = (clientPid == (pid_t)-1);
- const uid_t callingUid = IPCThreadState::self()->getCallingUid();
+ AttributionSourceState adjAttributionSource;
+ if (!com::android::media::audio::audioserver_permissions()) {
+ pid_t clientPid =
+ VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(client.attributionSource.pid));
+ bool updatePid = (clientPid == (pid_t)-1);
+ const uid_t callingUid = IPCThreadState::self()->getCallingUid();
- AttributionSourceState adjAttributionSource = client.attributionSource;
- if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
- uid_t clientUid =
- VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_uid_t(client.attributionSource.uid));
- ALOGW_IF(clientUid != callingUid,
- "%s uid %d tried to pass itself off as %d",
- __FUNCTION__, callingUid, clientUid);
- adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
- updatePid = true;
- }
- if (updatePid) {
- const pid_t callingPid = IPCThreadState::self()->getCallingPid();
- ALOGW_IF(clientPid != (pid_t)-1 && clientPid != callingPid,
- "%s uid %d pid %d tried to pass itself off as pid %d",
- __func__, callingUid, callingPid, clientPid);
- adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
- }
- adjAttributionSource = afutils::checkAttributionSourcePackage(
+ adjAttributionSource = client.attributionSource;
+ if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
+ uid_t clientUid =
+ VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_uid_t(client.attributionSource.uid));
+ ALOGW_IF(clientUid != callingUid,
+ "%s uid %d tried to pass itself off as %d",
+ __FUNCTION__, callingUid, clientUid);
+ adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_uid_t_int32_t(callingUid));
+ updatePid = true;
+ }
+ if (updatePid) {
+ const pid_t callingPid = IPCThreadState::self()->getCallingPid();
+ ALOGW_IF(clientPid != (pid_t)-1 && clientPid != callingPid,
+ "%s uid %d pid %d tried to pass itself off as pid %d",
+ __func__, callingUid, callingPid, clientPid);
+ adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_pid_t_int32_t(callingPid));
+ }
+ adjAttributionSource = afutils::checkAttributionSourcePackage(
adjAttributionSource);
+ } else {
+ auto validatedAttrSource = VALUE_OR_RETURN_CONVERTED(
+ validateAttributionFromContextOrTrustedCaller(client.attributionSource,
+ getPermissionProvider()
+ ));
+ // TODO pass wrapped object around
+ adjAttributionSource = std::move(validatedAttrSource).unwrapInto();
+ }
if (direction == MmapStreamInterface::DIRECTION_OUTPUT) {
audio_config_t fullConfig = AUDIO_CONFIG_INITIALIZER;
@@ -997,36 +1064,50 @@
bool isSpatialized = false;
bool isBitPerfect = false;
- // TODO b/182392553: refactor or make clearer
- pid_t clientPid =
- VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(input.clientInfo.attributionSource.pid));
- bool updatePid = (clientPid == (pid_t)-1);
- const uid_t callingUid = IPCThreadState::self()->getCallingUid();
- uid_t clientUid =
- VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_uid_t(input.clientInfo.attributionSource.uid));
audio_io_handle_t effectThreadId = AUDIO_IO_HANDLE_NONE;
std::vector<int> effectIds;
audio_attributes_t localAttr = input.attr;
- AttributionSourceState adjAttributionSource = input.clientInfo.attributionSource;
- if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
- ALOGW_IF(clientUid != callingUid,
- "%s uid %d tried to pass itself off as %d",
- __FUNCTION__, callingUid, clientUid);
- adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
- clientUid = callingUid;
- updatePid = true;
+ AttributionSourceState adjAttributionSource;
+ pid_t callingPid = IPCThreadState::self()->getCallingPid();
+ if (!com::android::media::audio::audioserver_permissions()) {
+ adjAttributionSource = input.clientInfo.attributionSource;
+ const uid_t callingUid = IPCThreadState::self()->getCallingUid();
+ uid_t clientUid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_uid_t(
+ input.clientInfo.attributionSource.uid));
+ pid_t clientPid =
+ VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(
+ input.clientInfo.attributionSource.pid));
+ bool updatePid = (clientPid == (pid_t)-1);
+
+ if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
+ ALOGW_IF(clientUid != callingUid,
+ "%s uid %d tried to pass itself off as %d",
+ __FUNCTION__, callingUid, clientUid);
+ adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_uid_t_int32_t(callingUid));
+ clientUid = callingUid;
+ updatePid = true;
+ }
+ if (updatePid) {
+ ALOGW_IF(clientPid != (pid_t)-1 && clientPid != callingPid,
+ "%s uid %d pid %d tried to pass itself off as pid %d",
+ __func__, callingUid, callingPid, clientPid);
+ clientPid = callingPid;
+ adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_pid_t_int32_t(callingPid));
+ }
+ adjAttributionSource = afutils::checkAttributionSourcePackage(
+ adjAttributionSource);
+
+ } else {
+ auto validatedAttrSource = VALUE_OR_RETURN_CONVERTED(
+ validateAttributionFromContextOrTrustedCaller(input.clientInfo.attributionSource,
+ getPermissionProvider()
+ ));
+ // TODO pass wrapped object around
+ adjAttributionSource = std::move(validatedAttrSource).unwrapInto();
}
- const pid_t callingPid = IPCThreadState::self()->getCallingPid();
- if (updatePid) {
- ALOGW_IF(clientPid != (pid_t)-1 && clientPid != callingPid,
- "%s uid %d pid %d tried to pass itself off as pid %d",
- __func__, callingUid, callingPid, clientPid);
- clientPid = callingPid;
- adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
- }
- adjAttributionSource = afutils::checkAttributionSourcePackage(
- adjAttributionSource);
audio_session_t sessionId = input.sessionId;
if (sessionId == AUDIO_SESSION_ALLOCATE) {
@@ -1079,7 +1160,7 @@
goto Exit;
}
- client = registerPid(clientPid);
+ client = registerPid(adjAttributionSource.pid);
IAfPlaybackThread* effectThread = nullptr;
sp<IAfEffectChain> effectChain = nullptr;
@@ -2199,6 +2280,12 @@
}
}
+const IPermissionProvider& AudioFlinger::getPermissionProvider() {
+ // This is inited as part of service construction, prior to binder registration,
+ // so it should always be non-null.
+ return mAudioPolicyServiceLocal.load()->getPermissionProvider();
+}
+
// removeClient_l() must be called with AudioFlinger::clientMutex() held
void AudioFlinger::removeClient_l(pid_t pid)
{
@@ -2308,30 +2395,43 @@
output.buffers.clear();
output.inputId = AUDIO_IO_HANDLE_NONE;
- // TODO b/182392553: refactor or clean up
- AttributionSourceState adjAttributionSource = input.clientInfo.attributionSource;
- bool updatePid = (adjAttributionSource.pid == -1);
- const uid_t callingUid = IPCThreadState::self()->getCallingUid();
- const uid_t currentUid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(
- adjAttributionSource.uid));
- if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
- ALOGW_IF(currentUid != callingUid,
- "%s uid %d tried to pass itself off as %d",
- __FUNCTION__, callingUid, currentUid);
- adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
- updatePid = true;
+ AttributionSourceState adjAttributionSource;
+ pid_t callingPid = IPCThreadState::self()->getCallingPid();
+ if (!com::android::media::audio::audioserver_permissions()) {
+ adjAttributionSource = input.clientInfo.attributionSource;
+ bool updatePid = (adjAttributionSource.pid == -1);
+ const uid_t callingUid = IPCThreadState::self()->getCallingUid();
+ const uid_t currentUid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(
+ adjAttributionSource.uid));
+ if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
+ ALOGW_IF(currentUid != callingUid,
+ "%s uid %d tried to pass itself off as %d",
+ __FUNCTION__, callingUid, currentUid);
+ adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_uid_t_int32_t(callingUid));
+ updatePid = true;
+ }
+ const pid_t currentPid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(
+ adjAttributionSource.pid));
+ if (updatePid) {
+ ALOGW_IF(currentPid != (pid_t)-1 && currentPid != callingPid,
+ "%s uid %d pid %d tried to pass itself off as pid %d",
+ __func__, callingUid, callingPid, currentPid);
+ adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_pid_t_int32_t(callingPid));
+ }
+ adjAttributionSource = afutils::checkAttributionSourcePackage(
+ adjAttributionSource);
+ } else {
+ auto validatedAttrSource = VALUE_OR_RETURN_CONVERTED(
+ validateAttributionFromContextOrTrustedCaller(
+ input.clientInfo.attributionSource,
+ getPermissionProvider()
+ ));
+ // TODO pass wrapped object around
+ adjAttributionSource = std::move(validatedAttrSource).unwrapInto();
}
- const pid_t callingPid = IPCThreadState::self()->getCallingPid();
- const pid_t currentPid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(
- adjAttributionSource.pid));
- if (updatePid) {
- ALOGW_IF(currentPid != (pid_t)-1 && currentPid != callingPid,
- "%s uid %d pid %d tried to pass itself off as pid %d",
- __func__, callingUid, callingPid, currentPid);
- adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
- }
- adjAttributionSource = afutils::checkAttributionSourcePackage(
- adjAttributionSource);
+
// further format checks are performed by createRecordTrack_l()
if (!audio_is_valid_format(input.config.format)) {
ALOGE("createRecord() invalid format %#x", input.config.format);
@@ -4121,20 +4221,31 @@
int idOut = -1;
status_t lStatus = NO_ERROR;
-
- // TODO b/182392553: refactor or make clearer
- const uid_t callingUid = IPCThreadState::self()->getCallingUid();
- adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
- pid_t currentPid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(adjAttributionSource.pid));
- if (currentPid == -1 || !isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
- const pid_t callingPid = IPCThreadState::self()->getCallingPid();
- ALOGW_IF(currentPid != -1 && currentPid != callingPid,
- "%s uid %d pid %d tried to pass itself off as pid %d",
- __func__, callingUid, callingPid, currentPid);
- adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
- currentPid = callingPid;
+ uid_t callingUid = IPCThreadState::self()->getCallingUid();
+ pid_t currentPid;
+ if (!com::android::media::audio::audioserver_permissions()) {
+ adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
+ currentPid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(adjAttributionSource.pid));
+ if (currentPid == -1 || !isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
+ const pid_t callingPid = IPCThreadState::self()->getCallingPid();
+ ALOGW_IF(currentPid != -1 && currentPid != callingPid,
+ "%s uid %d pid %d tried to pass itself off as pid %d",
+ __func__, callingUid, callingPid, currentPid);
+ adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(
+ legacy2aidl_pid_t_int32_t(callingPid));
+ currentPid = callingPid;
+ }
+ adjAttributionSource = afutils::checkAttributionSourcePackage(adjAttributionSource);
+ } else {
+ auto validatedAttrSource = VALUE_OR_RETURN_CONVERTED(
+ validateAttributionFromContextOrTrustedCaller(request.attributionSource,
+ getPermissionProvider()
+ ));
+ // TODO pass wrapped object around
+ adjAttributionSource = std::move(validatedAttrSource).unwrapInto();
+ currentPid = adjAttributionSource.pid;
}
- adjAttributionSource = afutils::checkAttributionSourcePackage(adjAttributionSource);
+
ALOGV("createEffect pid %d, effectClient %p, priority %d, sessionId %d, io %d, factory %p",
adjAttributionSource.pid, effectClient.get(), priority, sessionId, io,
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 501aed1..46f4068 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -407,6 +407,8 @@
void onHardError(std::set<audio_port_handle_t>& trackPortIds) final
EXCLUDES_AudioFlinger_ClientMutex;
+ const ::com::android::media::permission::IPermissionProvider& getPermissionProvider() final;
+
// ---- end of IAfThreadCallback interface
/* List available audio ports and their attributes */
diff --git a/services/audioflinger/DeviceEffectManager.h b/services/audioflinger/DeviceEffectManager.h
index 3af51d5..287d838 100644
--- a/services/audioflinger/DeviceEffectManager.h
+++ b/services/audioflinger/DeviceEffectManager.h
@@ -108,7 +108,6 @@
}
audio_io_handle_t io() const final { return AUDIO_IO_HANDLE_NONE; }
- bool shouldDispatchAddRemoveToHal(bool isAdded __unused) const final { return true; }
bool isOutput() const final { return false; }
bool isOffload() const final { return false; }
bool isOffloadOrDirect() const final { return false; }
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index bceba4b..c73b946 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -617,10 +617,11 @@
}
+// return true if any effect started or stopped
bool EffectModule::updateState_l() {
audio_utils::lock_guard _l(mutex());
- bool started = false;
+ bool startedOrStopped = false;
switch (mState) {
case RESTART:
reset_l();
@@ -635,7 +636,7 @@
}
if (start_ll() == NO_ERROR) {
mState = ACTIVE;
- started = true;
+ startedOrStopped = true;
} else {
mState = IDLE;
}
@@ -655,6 +656,7 @@
// turn off sequence.
if (--mDisableWaitCnt == 0) {
reset_l();
+ startedOrStopped = true;
mState = IDLE;
}
break;
@@ -669,7 +671,7 @@
break;
}
- return started;
+ return startedOrStopped;
}
void EffectModule::process()
@@ -1040,11 +1042,12 @@
{
if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_PRE_PROC ||
(mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_POST_PROC) {
- if (!getCallback()->shouldDispatchAddRemoveToHal(/* isAdded= */ true)) {
+ if (mCurrentHalStream == getCallback()->io()) {
return;
}
(void)getCallback()->addEffectToHal(mEffectInterface);
+ mCurrentHalStream = getCallback()->io();
}
}
@@ -1141,10 +1144,11 @@
{
if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_PRE_PROC ||
(mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_POST_PROC) {
- if (!getCallback()->shouldDispatchAddRemoveToHal(/* isAdded= */ false)) {
- return (getCallback()->io() == AUDIO_IO_HANDLE_NONE) ? NO_ERROR : INVALID_OPERATION;
+ if (mCurrentHalStream != getCallback()->io()) {
+ return (mCurrentHalStream == AUDIO_IO_HANDLE_NONE) ? NO_ERROR : INVALID_OPERATION;
}
getCallback()->removeEffectFromHal(mEffectInterface);
+ mCurrentHalStream = AUDIO_IO_HANDLE_NONE;
}
return NO_ERROR;
}
@@ -2308,6 +2312,9 @@
}
bool doResetVolume = false;
for (size_t i = 0; i < size; i++) {
+ // reset volume when any effect just started or stopped.
+ // resetVolume_l will check if the volume controller effect in the chain needs update and
+ // apply the correct volume
doResetVolume = mEffects[i]->updateState_l() || doResetVolume;
}
if (doResetVolume) {
@@ -2661,6 +2668,9 @@
true /* effect chain volume controller */);
mNewLeftVolume = newLeft;
mNewRightVolume = newRight;
+ ALOGD("%s sessionId %d volume controller effect %s set (%d, %d), ret (%d, %d)", __func__,
+ mSessionId, mEffects[ctrlIdx]->desc().name, mLeftVolume, mRightVolume, newLeft,
+ newRight);
}
// then indicate volume to all other effects in chain.
// Pass altered volume to effects before volume controller
@@ -3120,16 +3130,12 @@
return result;
}
result = st->addEffect(effect);
- if (result == OK) {
- mCurrentHalStream = t->id();
- }
ALOGE_IF(result != OK, "Error when adding effect: %d", result);
return result;
}
status_t EffectChain::EffectCallback::removeEffectFromHal(
const sp<EffectHalInterface>& effect) {
- mCurrentHalStream = AUDIO_IO_HANDLE_NONE;
status_t result = NO_INIT;
const sp<IAfThreadBase> t = thread().promote();
if (t == nullptr) {
@@ -3144,11 +3150,6 @@
return result;
}
-bool EffectChain::EffectCallback::shouldDispatchAddRemoveToHal(bool isAdded) const {
- const bool currentHalStreamMatchesThreadId = (io() == mCurrentHalStream);
- return isAdded != currentHalStreamMatchesThreadId;
-}
-
audio_io_handle_t EffectChain::EffectCallback::io() const {
const sp<IAfThreadBase> t = thread().promote();
if (t == nullptr) {
@@ -3741,14 +3742,11 @@
if (proxy == nullptr) {
return NO_INIT;
}
- status_t ret = proxy->addEffectToHal(effect);
- mAddedToHal = (ret == OK);
- return ret;
+ return proxy->addEffectToHal(effect);
}
status_t DeviceEffectProxy::ProxyCallback::removeEffectFromHal(
const sp<EffectHalInterface>& effect) {
- mAddedToHal = false;
sp<DeviceEffectProxy> proxy = mProxy.promote();
if (proxy == nullptr) {
return NO_INIT;
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index 549cff2..d107543 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -279,6 +279,8 @@
// sending disable command.
uint32_t mDisableWaitCnt; // current process() calls count during disable period.
bool mOffloaded; // effect is currently offloaded to the audio DSP
+ // effect has been added to this HAL input stream
+ audio_io_handle_t mCurrentHalStream = AUDIO_IO_HANDLE_NONE;
bool mIsOutput; // direction of the AF thread
bool mSupportsFloat; // effect supports float processing
@@ -594,7 +596,6 @@
status_t allocateHalBuffer(size_t size, sp<EffectBufferHalInterface>* buffer) override;
bool updateOrphanEffectChains(const sp<IAfEffectBase>& effect) override;
- bool shouldDispatchAddRemoveToHal(bool isAdded) const override;
audio_io_handle_t io() const override;
bool isOutput() const override;
bool isOffload() const override;
@@ -652,8 +653,6 @@
mediautils::atomic_wp<IAfThreadBase> mThread;
sp<IAfThreadCallback> mAfThreadCallback;
IAfThreadBase::type_t mThreadType = IAfThreadBase::MIXER;
- // effect has been added to this HAL input stream
- audio_io_handle_t mCurrentHalStream = AUDIO_IO_HANDLE_NONE;
};
DISALLOW_COPY_AND_ASSIGN(EffectChain);
@@ -785,9 +784,6 @@
}
audio_io_handle_t io() const override { return AUDIO_IO_HANDLE_NONE; }
- bool shouldDispatchAddRemoveToHal(bool isAdded) const override {
- return isAdded != mAddedToHal;
- }
bool isOutput() const override;
bool isOffload() const override { return false; }
bool isOffloadOrDirect() const override { return false; }
@@ -828,7 +824,6 @@
private:
const wp<DeviceEffectProxy> mProxy;
const sp<DeviceEffectManagerCallback> mManagerCallback;
- bool mAddedToHal = false;
};
status_t checkPort(const IAfPatchPanel::Patch& patch,
diff --git a/services/audioflinger/IAfEffect.h b/services/audioflinger/IAfEffect.h
index 98a0fcb..bb82afb 100644
--- a/services/audioflinger/IAfEffect.h
+++ b/services/audioflinger/IAfEffect.h
@@ -46,7 +46,6 @@
public:
// Trivial methods usually implemented with help from ThreadBase
virtual audio_io_handle_t io() const = 0;
- virtual bool shouldDispatchAddRemoveToHal(bool isAdded) const = 0;
virtual bool isOutput() const = 0;
virtual bool isOffload() const = 0;
virtual bool isOffloadOrDirect() const = 0;
@@ -190,11 +189,13 @@
virtual status_t sendMetadata_ll(const std::vector<playback_track_metadata_v7_t>& metadata)
REQUIRES(audio_utils::ThreadBase_Mutex,
audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex = 0;
+ // return true if there was a state change from STARTING to ACTIVE, or STOPPED to IDLE, effect
+ // chain will do a volume reset in these two cases
+ virtual bool updateState_l()
+ REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex = 0;
private:
virtual void process() = 0;
- virtual bool updateState_l()
- REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex = 0;
virtual void reset_l() REQUIRES(audio_utils::EffectChain_Mutex) = 0;
virtual status_t configure_l() REQUIRES(audio_utils::EffectChain_Mutex) = 0;
virtual status_t init_l()
diff --git a/services/audioflinger/IAfThread.h b/services/audioflinger/IAfThread.h
index a7da658..4d26aa0 100644
--- a/services/audioflinger/IAfThread.h
+++ b/services/audioflinger/IAfThread.h
@@ -37,6 +37,10 @@
#include <optional>
+namespace com::android::media::permission {
+ class IPermissionProvider;
+}
+
namespace android {
class IAfDirectOutputThread;
@@ -122,6 +126,9 @@
EXCLUDES_AudioFlinger_ClientMutex = 0;
virtual void onHardError(std::set<audio_port_handle_t>& trackPortIds) = 0;
+
+ virtual const ::com::android::media::permission::IPermissionProvider&
+ getPermissionProvider() = 0;
};
class IAfThreadBase : public virtual RefBase {
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index e4e4017..2dcbbce 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -72,6 +72,7 @@
#include <media/nbaio/Pipe.h>
#include <media/nbaio/PipeReader.h>
#include <media/nbaio/SourceAudioBufferProvider.h>
+#include <media/ValidatedAttributionSourceState.h>
#include <mediautils/BatteryNotifier.h>
#include <mediautils/Process.h>
#include <mediautils/SchedulingPolicyService.h>
@@ -120,6 +121,8 @@
return a < b ? a : b;
}
+using com::android::media::permission::ValidatedAttributionSourceState;
+
namespace android {
using audioflinger::SyncEvent;
@@ -10302,8 +10305,23 @@
audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
audio_io_handle_t io = mId;
- const AttributionSourceState adjAttributionSource = afutils::checkAttributionSourcePackage(
- client.attributionSource);
+ AttributionSourceState adjAttributionSource;
+ if (!com::android::media::audio::audioserver_permissions()) {
+ adjAttributionSource = afutils::checkAttributionSourcePackage(
+ client.attributionSource);
+ } else {
+ // TODO(b/342475009) validate in oboeservice, and plumb downwards
+ auto validatedRes = ValidatedAttributionSourceState::createFromTrustedUidNoPackage(
+ client.attributionSource,
+ mAfThreadCallback->getPermissionProvider()
+ );
+ if (!validatedRes.has_value()) {
+ ALOGE("MMAP client package validation fail: %s",
+ validatedRes.error().toString8().c_str());
+ return aidl_utils::statusTFromBinderStatus(validatedRes.error());
+ }
+ adjAttributionSource = std::move(validatedRes.value()).unwrapInto();
+ }
const auto localSessionId = mSessionId;
auto localAttr = mAttr;
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioIODescriptorInterface.h b/services/audiopolicy/common/managerdefinitions/include/AudioIODescriptorInterface.h
index 3edd4de..e519766 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioIODescriptorInterface.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioIODescriptorInterface.h
@@ -53,7 +53,7 @@
// a preferred device in which case the first client drives the selection.
if (desc->isMmap()) {
auto matchingClients = desc->clientsList(
- false /*activeOnly*/, filter, true /*preferredDevice*/);
+ false /*activeOnly*/, filter, false /*preferredDevice*/);
if (matchingClients.empty()) {
return nullptr;
}
diff --git a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
index c502fc2..7002e63 100644
--- a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
@@ -282,6 +282,11 @@
const AudioProfileVector& getSupportedProfiles() { return mSupportedProfiles; }
+ /**
+ * @brief checks if all devices in device vector are attached to the HwModule or not
+ * @return true if all the devices in device vector are attached, otherwise false
+ */
+ bool areAllDevicesAttached() const;
// Return a string to describe the DeviceVector. The sensitive information will only be
// added to the string if `includeSensitiveInfo` is true.
std::string toString(bool includeSensitiveInfo = false) const;
diff --git a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
index 9f7b8fc..46a04de 100644
--- a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
@@ -541,4 +541,14 @@
return filteredDevices;
}
+bool DeviceVector::areAllDevicesAttached() const
+{
+ for (const auto &device : *this) {
+ if (!device->isAttached()) {
+ return false;
+ }
+ }
+ return true;
+}
+
} // namespace android
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 7d2dfbc..e5fe2d2 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -375,6 +375,7 @@
checkLeBroadcastRoutes(wasLeUnicastActive, nullptr, 0);
mpClientInterface->onAudioPortListUpdate();
+ ALOGV("%s() completed for device: %s", __func__, device->toString().c_str());
return NO_ERROR;
} // end if is output device
@@ -390,6 +391,8 @@
return INVALID_OPERATION;
}
+ ALOGV("%s() connecting device %s", __func__, device->toString().c_str());
+
if (mAvailableInputDevices.add(device) < 0) {
return NO_MEMORY;
}
@@ -462,6 +465,7 @@
}
mpClientInterface->onAudioPortListUpdate();
+ ALOGV("%s() completed for device: %s", __func__, device->toString().c_str());
return NO_ERROR;
} // end if is input device
@@ -770,7 +774,7 @@
}
muteWaitMs = setOutputDevices(__func__, mPrimaryOutput, rxDevices, true, delayMs);
} else { // create RX path audio patch
- connectTelephonyRxAudioSource();
+ connectTelephonyRxAudioSource(delayMs);
// If the TX device is on the primary HW module but RX device is
// on other HW module, SinkMetaData of telephony input should handle it
// assuming the device uses audio HAL V5.0 and above
@@ -807,7 +811,7 @@
return false;
}
-void AudioPolicyManager::connectTelephonyRxAudioSource()
+void AudioPolicyManager::connectTelephonyRxAudioSource(uint32_t delayMs)
{
const auto aa = mEngine->getAttributesForStreamType(AUDIO_STREAM_VOICE_CALL);
@@ -835,7 +839,7 @@
audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
status_t status = startAudioSourceInternal(&source, &aa, &portId, 0 /*uid*/,
- true /*internal*/, true /*isCallRx*/);
+ true /*internal*/, true /*isCallRx*/, delayMs);
ALOGE_IF(status != OK, "%s: failed to start audio source (%d)", __func__, status);
mCallRxSourceClient = mAudioSources.valueFor(portId);
ALOGV("%s portdID %d between source %s and sink %s", __func__, portId,
@@ -1592,7 +1596,7 @@
(config->channel_mask == desc->getChannelMask()) &&
(session == desc->mDirectClientSession)) {
desc->mDirectOpenCount++;
- ALOGV("%s reusing direct output %d for session %d", __func__,
+ ALOGI("%s reusing direct output %d for session %d", __func__,
mOutputs.keyAt(i), session);
*output = mOutputs.keyAt(i);
return NO_ERROR;
@@ -1602,17 +1606,23 @@
if (!profile->canOpenNewIo()) {
if (!com::android::media::audioserver::direct_track_reprioritization()) {
+ ALOGW("%s profile %s can't open new output maxOpenCount reached", __func__,
+ profile->getName().c_str());
return NAME_NOT_FOUND;
} else if ((profile->getFlags() & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) != 0) {
// MMAP gracefully handles lack of an exclusive track resource by mixing
// above the audio framework. For AAudio to know that the limit is reached,
// return an error.
+ ALOGW("%s profile %s can't open new mmap output maxOpenCount reached", __func__,
+ profile->getName().c_str());
return NAME_NOT_FOUND;
} else {
// Close outputs on this profile, if available, to free resources for this request
for (int i = 0; i < mOutputs.size() && !profile->canOpenNewIo(); i++) {
const auto desc = mOutputs.valueAt(i);
if (desc->mProfile == profile) {
+ ALOGV("%s closeOutput %d to prioritize session %d on profile %s", __func__,
+ desc->mIoHandle, session, profile->getName().c_str());
closeOutput(desc->mIoHandle);
}
}
@@ -1621,6 +1631,8 @@
// Unable to close streams to find free resources for this request
if (!profile->canOpenNewIo()) {
+ ALOGW("%s profile %s can't open new output maxOpenCount reached", __func__,
+ profile->getName().c_str());
return NAME_NOT_FOUND;
}
@@ -3539,8 +3551,8 @@
ALOGW("%s: no group for stream %s, bailing out", __func__, toString(stream).c_str());
return NO_ERROR;
}
- ALOGV("%s: stream %s attributes=%s", __func__,
- toString(stream).c_str(), toString(attributes).c_str());
+ ALOGV("%s: stream %s attributes=%s, index %d , device 0x%X", __func__,
+ toString(stream).c_str(), toString(attributes).c_str(), index, device);
return setVolumeIndexForAttributes(attributes, index, device);
}
@@ -3718,8 +3730,8 @@
bool hasVoice = hasVoiceStream(volumeCurves.getStreamTypes());
if (((index < volumeCurves.getVolumeIndexMin()) && !(hasVoice && index == 0)) ||
(index > volumeCurves.getVolumeIndexMax())) {
- ALOGD("%s: wrong index %d min=%d max=%d", __FUNCTION__, index,
- volumeCurves.getVolumeIndexMin(), volumeCurves.getVolumeIndexMax());
+ ALOGE("%s: wrong index %d min=%d max=%d, device 0x%X", __FUNCTION__, index,
+ volumeCurves.getVolumeIndexMin(), volumeCurves.getVolumeIndexMax(), device);
return BAD_VALUE;
}
if (!audio_is_output_device(device)) {
@@ -5884,13 +5896,14 @@
audio_port_handle_t *portId,
uid_t uid) {
return startAudioSourceInternal(source, attributes, portId, uid,
- false /*internal*/, false /*isCallRx*/);
+ false /*internal*/, false /*isCallRx*/, 0 /*delayMs*/);
}
status_t AudioPolicyManager::startAudioSourceInternal(const struct audio_port_config *source,
const audio_attributes_t *attributes,
audio_port_handle_t *portId,
- uid_t uid, bool internal, bool isCallRx)
+ uid_t uid, bool internal, bool isCallRx,
+ uint32_t delayMs)
{
ALOGV("%s", __FUNCTION__);
*portId = AUDIO_PORT_HANDLE_NONE;
@@ -5925,14 +5938,15 @@
mEngine->getProductStrategyForAttributes(*attributes),
toVolumeSource(*attributes), internal, isCallRx, false);
- status_t status = connectAudioSource(sourceDesc);
+ status_t status = connectAudioSource(sourceDesc, delayMs);
if (status == NO_ERROR) {
mAudioSources.add(*portId, sourceDesc);
}
return status;
}
-status_t AudioPolicyManager::connectAudioSource(const sp<SourceClientDescriptor>& sourceDesc)
+status_t AudioPolicyManager::connectAudioSource(const sp<SourceClientDescriptor>& sourceDesc,
+ uint32_t delayMs)
{
ALOGV("%s handle %d", __FUNCTION__, sourceDesc->portId());
@@ -5958,7 +5972,7 @@
audio_patch_handle_t handle = AUDIO_PATCH_HANDLE_NONE;
return connectAudioSourceToSink(
- sourceDesc, sinkDevice, patchBuilder.patch(), handle, mUidCached, 0 /*delayMs*/);
+ sourceDesc, sinkDevice, patchBuilder.patch(), handle, mUidCached, delayMs);
}
status_t AudioPolicyManager::stopAudioSource(audio_port_handle_t portId)
@@ -6688,6 +6702,14 @@
if (!mConfig->getOutputDevices().contains(supportedDevice)) {
continue;
}
+
+ if (outProfile->isMmap() && !outProfile->hasDynamicAudioProfile()
+ && availProfileDevices.areAllDevicesAttached()) {
+ ALOGV("%s skip opening output for mmap profile %s", __func__,
+ outProfile->getTagName().c_str());
+ continue;
+ }
+
sp<SwAudioOutputDescriptor> outputDesc = new SwAudioOutputDescriptor(outProfile,
mpClientInterface);
audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
@@ -6747,6 +6769,14 @@
__func__, inProfile->getTagName().c_str());
continue;
}
+
+ if (inProfile->isMmap() && !inProfile->hasDynamicAudioProfile()
+ && availProfileDevices.areAllDevicesAttached()) {
+ ALOGV("%s skip opening input for mmap profile %s", __func__,
+ inProfile->getTagName().c_str());
+ continue;
+ }
+
sp<AudioInputDescriptor> inputDesc = new AudioInputDescriptor(
inProfile, mpClientInterface, false /*isPreemptor*/);
@@ -6754,12 +6784,12 @@
status_t status = inputDesc->open(nullptr,
availProfileDevices.itemAt(0),
AUDIO_SOURCE_MIC,
- AUDIO_INPUT_FLAG_NONE,
+ (audio_input_flags_t) inProfile->getFlags(),
&input);
if (status != NO_ERROR) {
- ALOGW("Cannot open input stream for device %s on hw module %s",
- availProfileDevices.toString().c_str(),
- hwModule->getName());
+ ALOGW("%s: Cannot open input stream for device %s for profile %s on hw module %s",
+ __func__, availProfileDevices.toString().c_str(),
+ inProfile->getTagName().c_str(), hwModule->getName());
continue;
}
for (const auto &device : availProfileDevices) {
@@ -6867,8 +6897,8 @@
sp<IOProfile> profile = hwModule->getOutputProfiles()[j];
if (profile->supportsDevice(device)) {
profiles.add(profile);
- ALOGV("checkOutputsForDevice(): adding profile %zu from module %s",
- j, hwModule->getName());
+ ALOGV("%s(): adding profile %s from module %s",
+ __func__, profile->getTagName().c_str(), hwModule->getName());
}
}
}
@@ -6901,7 +6931,11 @@
if (j != outputs.size()) {
continue;
}
-
+ if (profile->isMmap() && !profile->hasDynamicAudioProfile()) {
+ ALOGV("%s skip opening output for mmap profile %s",
+ __func__, profile->getTagName().c_str());
+ continue;
+ }
if (!profile->canOpenNewIo()) {
ALOGW("Max Output number %u already opened for this profile %s",
profile->maxOpenCount, profile->getTagName().c_str());
@@ -6962,9 +6996,8 @@
if (!profile->supportsDevice(device)) {
continue;
}
- ALOGV("checkOutputsForDevice(): "
- "clearing direct output profile %zu on module %s",
- j, hwModule->getName());
+ ALOGV("%s(): clearing direct output profile %s on module %s",
+ __func__, profile->getTagName().c_str(), hwModule->getName());
profile->clearAudioProfiles();
if (!profile->hasDynamicAudioProfile()) {
continue;
@@ -7019,8 +7052,8 @@
if (profile->supportsDevice(device)) {
profiles.add(profile);
- ALOGV("checkInputsForDevice(): adding profile %zu from module %s",
- profile_index, hwModule->getName());
+ ALOGV("%s : adding profile %s from module %s", __func__,
+ profile->getTagName().c_str(), hwModule->getName());
}
}
}
@@ -7052,15 +7085,22 @@
continue;
}
+ if (profile->isMmap() && !profile->hasDynamicAudioProfile()) {
+ ALOGV("%s skip opening input for mmap profile %s",
+ __func__, profile->getTagName().c_str());
+ continue;
+ }
if (!profile->canOpenNewIo()) {
- ALOGW("Max Input number %u already opened for this profile %s",
- profile->maxOpenCount, profile->getTagName().c_str());
+ ALOGW("%s Max Input number %u already opened for this profile %s",
+ __func__, profile->maxOpenCount, profile->getTagName().c_str());
continue;
}
desc = new AudioInputDescriptor(profile, mpClientInterface, false /*isPreemptor*/);
audio_io_handle_t input = AUDIO_IO_HANDLE_NONE;
- status = desc->open(nullptr, device, AUDIO_SOURCE_MIC, AUDIO_INPUT_FLAG_NONE, &input);
+ ALOGV("%s opening input for profile %s", __func__, profile->getTagName().c_str());
+ status = desc->open(nullptr, device, AUDIO_SOURCE_MIC,
+ (audio_input_flags_t) profile->getFlags(), &input);
if (status == NO_ERROR) {
const String8& address = String8(device->address().c_str());
@@ -7071,7 +7111,8 @@
}
updateAudioProfiles(device, input, profile);
if (!profile->hasValidAudioProfile()) {
- ALOGW("checkInputsForDevice() direct input missing param");
+ ALOGW("%s direct input missing param for profile %s", __func__,
+ profile->getTagName().c_str());
desc->close();
input = AUDIO_IO_HANDLE_NONE;
}
@@ -7082,18 +7123,20 @@
} // endif input != 0
if (input == AUDIO_IO_HANDLE_NONE) {
- ALOGW("%s could not open input for device %s", __func__,
- device->toString().c_str());
+ ALOGW("%s could not open input for device %s on profile %s", __func__,
+ device->toString().c_str(), profile->getTagName().c_str());
profiles.removeAt(profile_index);
profile_index--;
} else {
if (audio_device_is_digital(device->type())) {
device->importAudioPortAndPickAudioProfile(profile);
}
- ALOGV("checkInputsForDevice(): adding input %d", input);
+ ALOGV("%s: adding input %d for profile %s", __func__,
+ input, profile->getTagName().c_str());
if (checkCloseInput(desc)) {
- ALOGV("%s closing input %d", __func__, input);
+ ALOGV("%s: closing input %d for profile %s", __func__,
+ input, profile->getTagName().c_str());
closeInput(input);
}
}
@@ -7112,8 +7155,8 @@
profile_index++) {
sp<IOProfile> profile = hwModule->getInputProfiles()[profile_index];
if (profile->supportsDevice(device)) {
- ALOGV("checkInputsForDevice(): clearing direct input profile %zu on module %s",
- profile_index, hwModule->getName());
+ ALOGV("%s: clearing direct input profile %s on module %s", __func__,
+ profile->getTagName().c_str(), hwModule->getName());
profile->clearAudioProfiles();
}
}
@@ -7305,7 +7348,7 @@
if (sourceDesc != nullptr && followsSameRouting(attr, sourceDesc->attributes())
&& sourceDesc->getPatchHandle() == AUDIO_PATCH_HANDLE_NONE
&& !sourceDesc->isCallRx() && !sourceDesc->isInternal()) {
- connectAudioSource(sourceDesc);
+ connectAudioSource(sourceDesc, 0 /*delayMs*/);
}
}
}
@@ -7412,7 +7455,7 @@
}
sp<SourceClientDescriptor> source = getSourceForAttributesOnOutput(srcOut, attr);
if (source != nullptr && !source->isCallRx() && !source->isInternal()) {
- connectAudioSource(source);
+ connectAudioSource(source, 0 /*delayMs*/);
}
}
@@ -8171,7 +8214,7 @@
VolumeSource vsToDriveAbs = toVolumeSource(groupToDriveAbs);
if (vsToDriveAbs == volumeSource) {
// attenuation is applied by the abs volume controller
- return volumeDbMax;
+ return (index != 0) ? volumeDbMax : volumeDb;
} else {
IVolumeCurves &curvesAbs = getVolumeCurves(vsToDriveAbs);
int indexAbs = curvesAbs.getVolumeIndex({volumeDevice});
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index f899cd5..98853ce 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -707,7 +707,7 @@
void updateCallAndOutputRouting(bool forceVolumeReeval = true, uint32_t delayMs = 0,
bool skipDelays = false);
- void connectTelephonyRxAudioSource();
+ void connectTelephonyRxAudioSource(uint32_t delayMs);
void disconnectTelephonyAudioSource(sp<SourceClientDescriptor> &clientDesc);
@@ -932,7 +932,8 @@
status_t hasPrimaryOutput() const { return mPrimaryOutput != 0; }
- status_t connectAudioSource(const sp<SourceClientDescriptor>& sourceDesc);
+ status_t connectAudioSource(const sp<SourceClientDescriptor>& sourceDesc,
+ uint32_t delayMs);
status_t disconnectAudioSource(const sp<SourceClientDescriptor>& sourceDesc);
status_t connectAudioSourceToSink(const sp<SourceClientDescriptor>& sourceDesc,
@@ -975,7 +976,8 @@
audio_port_handle_t *portId,
uid_t uid,
bool internal,
- bool isCallRx);
+ bool isCallRx,
+ uint32_t delayMs);
const uid_t mUidCached; // AID_AUDIOSERVER
sp<const AudioPolicyConfig> mConfig;
EngineInstance mEngine; // Audio Policy Engine instance
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index b8f781c..157f084 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -784,12 +784,13 @@
return true;
}
-Status CameraService::getNumberOfCameras(int32_t type, int32_t deviceId, int32_t devicePolicy,
+Status CameraService::getNumberOfCameras(int32_t type,
+ const AttributionSourceState& clientAttribution, int32_t devicePolicy,
int32_t* numCameras) {
ATRACE_CALL();
- if (vd_flags::camera_device_awareness() && (deviceId != kDefaultDeviceId)
+ if (vd_flags::camera_device_awareness() && (clientAttribution.deviceId != kDefaultDeviceId)
&& (devicePolicy != IVirtualDeviceManagerNative::DEVICE_POLICY_DEFAULT)) {
- *numCameras = mVirtualDeviceCameraIdMapper.getNumberOfCameras(deviceId);
+ *numCameras = mVirtualDeviceCameraIdMapper.getNumberOfCameras(clientAttribution.deviceId);
return Status::ok();
}
@@ -822,7 +823,7 @@
}
Status CameraService::createDefaultRequest(const std::string& unresolvedCameraId, int templateId,
- int32_t deviceId, int32_t devicePolicy,
+ const AttributionSourceState& clientAttribution, int32_t devicePolicy,
/* out */
hardware::camera2::impl::CameraMetadataNative* request) {
ATRACE_CALL();
@@ -837,11 +838,11 @@
return STATUS_ERROR(ERROR_DISCONNECTED, "Camera subsystem is not available");
}
- std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
- devicePolicy);
+ std::optional<std::string> cameraIdOptional =
+ resolveCameraId(unresolvedCameraId, clientAttribution.deviceId, devicePolicy);
if (!cameraIdOptional.has_value()) {
std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
- unresolvedCameraId.c_str(), deviceId);
+ unresolvedCameraId.c_str(), clientAttribution.deviceId);
ALOGE("%s: %s", __FUNCTION__, msg.c_str());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
}
@@ -886,7 +887,7 @@
Status CameraService::isSessionConfigurationWithParametersSupported(
const std::string& unresolvedCameraId, int targetSdkVersion,
const SessionConfiguration& sessionConfiguration,
- int32_t deviceId, int32_t devicePolicy,
+ const AttributionSourceState& clientAttribution, int32_t devicePolicy,
/*out*/ bool* supported) {
ATRACE_CALL();
@@ -900,11 +901,11 @@
return STATUS_ERROR(ERROR_DISCONNECTED, "Camera subsystem is not available");
}
- std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
- devicePolicy);
+ std::optional<std::string> cameraIdOptional =
+ resolveCameraId(unresolvedCameraId, clientAttribution.deviceId, devicePolicy);
if (!cameraIdOptional.has_value()) {
std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
- unresolvedCameraId.c_str(), deviceId);
+ unresolvedCameraId.c_str(), clientAttribution.deviceId);
ALOGE("%s: %s", __FUNCTION__, msg.c_str());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
}
@@ -983,7 +984,8 @@
Status CameraService::getSessionCharacteristics(const std::string& unresolvedCameraId,
int targetSdkVersion, int rotationOverride,
- const SessionConfiguration& sessionConfiguration, int32_t deviceId, int32_t devicePolicy,
+ const SessionConfiguration& sessionConfiguration,
+ const AttributionSourceState& clientAttribution, int32_t devicePolicy,
/*out*/ CameraMetadata* outMetadata) {
ATRACE_CALL();
@@ -1000,11 +1002,11 @@
return STATUS_ERROR(ERROR_DISCONNECTED, "Camera subsystem is not available");
}
- std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
- devicePolicy);
+ std::optional<std::string> cameraIdOptional =
+ resolveCameraId(unresolvedCameraId, clientAttribution.deviceId, devicePolicy);
if (!cameraIdOptional.has_value()) {
std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
- unresolvedCameraId.c_str(), deviceId);
+ unresolvedCameraId.c_str(), clientAttribution.deviceId);
ALOGE("%s: %s", __FUNCTION__, msg.c_str());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
}
@@ -1206,14 +1208,16 @@
return mVirtualDeviceCameraIdMapper.getActualCameraId(deviceId, inputCameraId);
}
-Status CameraService::getCameraInfo(int cameraId, int rotationOverride, int32_t deviceId,
- int32_t devicePolicy, CameraInfo* cameraInfo) {
+Status CameraService::getCameraInfo(int cameraId, int rotationOverride,
+ const AttributionSourceState& clientAttribution, int32_t devicePolicy,
+ CameraInfo* cameraInfo) {
ATRACE_CALL();
Mutex::Autolock l(mServiceLock);
- std::string cameraIdStr = cameraIdIntToStrLocked(cameraId, deviceId, devicePolicy);
+ std::string cameraIdStr =
+ cameraIdIntToStrLocked(cameraId, clientAttribution.deviceId, devicePolicy);
if (cameraIdStr.empty()) {
std::string msg = fmt::sprintf("Camera %d: Invalid camera id for device id %d",
- cameraId, deviceId);
+ cameraId, clientAttribution.deviceId);
ALOGE("%s: %s", __FUNCTION__, msg.c_str());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
}
@@ -1287,8 +1291,8 @@
}
Status CameraService::getCameraCharacteristics(const std::string& unresolvedCameraId,
- int targetSdkVersion, int rotationOverride, int32_t deviceId, int32_t devicePolicy,
- CameraMetadata* cameraInfo) {
+ int targetSdkVersion, int rotationOverride, const AttributionSourceState& clientAttribution,
+ int32_t devicePolicy, CameraMetadata* cameraInfo) {
ATRACE_CALL();
if (!cameraInfo) {
@@ -1303,11 +1307,11 @@
"Camera subsystem is not available");;
}
- std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
- devicePolicy);
+ std::optional<std::string> cameraIdOptional =
+ resolveCameraId(unresolvedCameraId, clientAttribution.deviceId, devicePolicy);
if (!cameraIdOptional.has_value()) {
std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
- unresolvedCameraId.c_str(), deviceId);
+ unresolvedCameraId.c_str(), clientAttribution.deviceId);
ALOGE("%s: %s", __FUNCTION__, msg.c_str());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
}
@@ -1340,16 +1344,17 @@
return filterSensitiveMetadataIfNeeded(cameraId, cameraInfo);
}
-Status CameraService::getTorchStrengthLevel(const std::string& unresolvedCameraId, int32_t deviceId,
+Status CameraService::getTorchStrengthLevel(const std::string& unresolvedCameraId,
+ const AttributionSourceState& clientAttribution,
int32_t devicePolicy, int32_t* torchStrength) {
ATRACE_CALL();
Mutex::Autolock l(mServiceLock);
- std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
- devicePolicy);
+ std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId,
+ clientAttribution.deviceId, devicePolicy);
if (!cameraIdOptional.has_value()) {
std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
- unresolvedCameraId.c_str(), deviceId);
+ unresolvedCameraId.c_str(), clientAttribution.deviceId);
ALOGE("%s: %s", __FUNCTION__, msg.c_str());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
}
@@ -2129,35 +2134,35 @@
Status CameraService::connect(
const sp<ICameraClient>& cameraClient,
int api1CameraId,
- const std::string& clientPackageName,
- int clientUid,
- int clientPid,
int targetSdkVersion,
int rotationOverride,
bool forceSlowJpegMode,
- int32_t deviceId,
+ const AttributionSourceState& clientAttribution,
int32_t devicePolicy,
/*out*/
sp<ICamera>* device) {
ATRACE_CALL();
Status ret = Status::ok();
- std::string cameraIdStr = cameraIdIntToStr(api1CameraId, deviceId, devicePolicy);
+ std::string cameraIdStr =
+ cameraIdIntToStr(api1CameraId, clientAttribution.deviceId, devicePolicy);
if (cameraIdStr.empty()) {
std::string msg = fmt::sprintf("Camera %d: Invalid camera id for device id %d",
- api1CameraId, deviceId);
+ api1CameraId, clientAttribution.deviceId);
ALOGE("%s: %s", __FUNCTION__, msg.c_str());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
}
sp<Client> client = nullptr;
ret = connectHelper<ICameraClient,Client>(cameraClient, cameraIdStr, api1CameraId,
- clientPackageName, /*systemNativeClient*/ false, {}, clientUid, clientPid, API_1,
+ clientAttribution.packageName.value_or(""), /*systemNativeClient*/ false, {},
+ clientAttribution.uid, clientAttribution.pid, API_1,
/*shimUpdateOnly*/ false, /*oomScoreOffset*/ 0, targetSdkVersion,
rotationOverride, forceSlowJpegMode, cameraIdStr, /*out*/client);
if (!ret.isOk()) {
- logRejected(cameraIdStr, getCallingPid(), clientPackageName, toStdString(ret.toString8()));
+ logRejected(cameraIdStr, getCallingPid(), clientAttribution.packageName.value_or(""),
+ toStdString(ret.toString8()));
return ret;
}
@@ -2234,17 +2239,15 @@
Status CameraService::connectDevice(
const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
const std::string& unresolvedCameraId,
- const std::string& clientPackageName,
- const std::optional<std::string>& clientFeatureId,
- int clientUid, int oomScoreOffset, int targetSdkVersion,
- int rotationOverride, int32_t deviceId, int32_t devicePolicy,
+ int oomScoreOffset, int targetSdkVersion,
+ int rotationOverride, const AttributionSourceState& clientAttribution, int32_t devicePolicy,
/*out*/
sp<hardware::camera2::ICameraDeviceUser>* device) {
ATRACE_CALL();
RunThreadWithRealtimePriority priorityBump;
Status ret = Status::ok();
sp<CameraDeviceClient> client = nullptr;
- std::string clientPackageNameAdj = clientPackageName;
+ std::string clientPackageNameAdj = clientAttribution.packageName.value_or("");
int callingPid = getCallingPid();
int callingUid = getCallingUid();
bool systemNativeClient = false;
@@ -2254,11 +2257,11 @@
systemNativeClient = true;
}
- std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
- devicePolicy);
+ std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId,
+ clientAttribution.deviceId, devicePolicy);
if (!cameraIdOptional.has_value()) {
std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
- unresolvedCameraId.c_str(), deviceId);
+ unresolvedCameraId.c_str(), clientAttribution.deviceId);
ALOGE("%s: %s", __FUNCTION__, msg.c_str());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
}
@@ -2273,8 +2276,8 @@
return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, msg.c_str());
}
- userid_t clientUserId = multiuser_get_user_id(clientUid);
- if (clientUid == USE_CALLING_UID) {
+ userid_t clientUserId = multiuser_get_user_id(clientAttribution.uid);
+ if (clientAttribution.uid == USE_CALLING_UID) {
clientUserId = multiuser_get_user_id(callingUid);
}
@@ -2300,10 +2303,10 @@
}
ret = connectHelper<hardware::camera2::ICameraDeviceCallbacks,CameraDeviceClient>(cameraCb,
- cameraId, /*api1CameraId*/-1, clientPackageNameAdj, systemNativeClient, clientFeatureId,
- clientUid, USE_CALLING_PID, API_2, /*shimUpdateOnly*/ false, oomScoreOffset,
- targetSdkVersion, rotationOverride, /*forceSlowJpegMode*/false, unresolvedCameraId,
- /*out*/client);
+ cameraId, /*api1CameraId*/-1, clientPackageNameAdj, systemNativeClient,
+ clientAttribution.attributionTag, clientAttribution.uid, USE_CALLING_PID, API_2,
+ /*shimUpdateOnly*/ false, oomScoreOffset, targetSdkVersion, rotationOverride,
+ /*forceSlowJpegMode*/false, unresolvedCameraId, /*out*/client);
if (!ret.isOk()) {
logRejected(cameraId, callingPid, clientPackageNameAdj, toStdString(ret.toString8()));
@@ -2826,8 +2829,8 @@
}
Status CameraService::turnOnTorchWithStrengthLevel(const std::string& unresolvedCameraId,
- int32_t torchStrength, const sp<IBinder>& clientBinder, int32_t deviceId,
- int32_t devicePolicy) {
+ int32_t torchStrength, const sp<IBinder>& clientBinder,
+ const AttributionSourceState& clientAttribution, int32_t devicePolicy) {
Mutex::Autolock lock(mServiceLock);
ATRACE_CALL();
@@ -2838,11 +2841,11 @@
}
int uid = getCallingUid();
- std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
- devicePolicy);
+ std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId,
+ clientAttribution.deviceId, devicePolicy);
if (!cameraIdOptional.has_value()) {
std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
- unresolvedCameraId.c_str(), deviceId);
+ unresolvedCameraId.c_str(), clientAttribution.deviceId);
ALOGE("%s: %s", __FUNCTION__, msg.c_str());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
}
@@ -2964,7 +2967,8 @@
}
Status CameraService::setTorchMode(const std::string& unresolvedCameraId, bool enabled,
- const sp<IBinder>& clientBinder, int32_t deviceId, int32_t devicePolicy) {
+ const sp<IBinder>& clientBinder, const AttributionSourceState& clientAttribution,
+ int32_t devicePolicy) {
Mutex::Autolock lock(mServiceLock);
ATRACE_CALL();
@@ -2975,11 +2979,11 @@
}
int uid = getCallingUid();
- std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
- devicePolicy);
+ std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId,
+ clientAttribution.deviceId, devicePolicy);
if (!cameraIdOptional.has_value()) {
std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
- unresolvedCameraId.c_str(), deviceId);
+ unresolvedCameraId.c_str(), clientAttribution.deviceId);
ALOGE("%s: %s", __FUNCTION__, msg.c_str());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
}
@@ -3310,7 +3314,7 @@
Status CameraService::isConcurrentSessionConfigurationSupported(
const std::vector<CameraIdAndSessionConfiguration>& cameraIdsAndSessionConfigurations,
- int targetSdkVersion, int32_t deviceId, int32_t devicePolicy,
+ int targetSdkVersion, const AttributionSourceState& clientAttribution, int32_t devicePolicy,
/*out*/bool* isSupported) {
if (!isSupported) {
ALOGE("%s: isSupported is NULL", __FUNCTION__);
@@ -3325,10 +3329,11 @@
for (auto cameraIdAndSessionConfiguration : cameraIdsAndSessionConfigurations) {
std::optional<std::string> cameraIdOptional =
- resolveCameraId(cameraIdAndSessionConfiguration.mCameraId, deviceId, devicePolicy);
+ resolveCameraId(cameraIdAndSessionConfiguration.mCameraId,
+ clientAttribution.deviceId, devicePolicy);
if (!cameraIdOptional.has_value()) {
std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
- cameraIdAndSessionConfiguration.mCameraId.c_str(), deviceId);
+ cameraIdAndSessionConfiguration.mCameraId.c_str(), clientAttribution.deviceId);
ALOGE("%s: %s", __FUNCTION__, msg.c_str());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
}
@@ -3341,7 +3346,7 @@
bool hasCameraPermission = ((callingPid == getpid()) ||
hasPermissionsForCamera(callingPid, callingUid,
devicePolicy == IVirtualDeviceManagerNative::DEVICE_POLICY_DEFAULT
- ? kDefaultDeviceId : deviceId));
+ ? kDefaultDeviceId : clientAttribution.deviceId));
if (!hasCameraPermission) {
return STATUS_ERROR(ERROR_PERMISSION_DENIED,
"android.permission.CAMERA needed to call"
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 9998fb8..6b21e05 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -154,13 +154,16 @@
// resolveCameraId(logicalCameraId, deviceId, devicePolicy) to arrive at the correct
// cameraId to perform the operation on (in case of contexts
// associated with virtual devices).
- virtual binder::Status getNumberOfCameras(int32_t type, int32_t deviceId,
+ virtual binder::Status getNumberOfCameras(int32_t type,
+ const AttributionSourceState& clientAttribution,
int32_t devicePolicy, int32_t* numCameras);
virtual binder::Status getCameraInfo(int cameraId, int rotationOverride,
- int32_t deviceId, int32_t devicePolicy, hardware::CameraInfo* cameraInfo) override;
+ const AttributionSourceState& clientAttribution,
+ int32_t devicePolicy, hardware::CameraInfo* cameraInfo) override;
virtual binder::Status getCameraCharacteristics(const std::string& cameraId,
- int targetSdkVersion, int rotationOverride, int32_t deviceId,
+ int targetSdkVersion, int rotationOverride,
+ const AttributionSourceState& clientAttribution,
int32_t devicePolicy, CameraMetadata* cameraInfo) override;
virtual binder::Status getCameraVendorTagDescriptor(
/*out*/
@@ -170,17 +173,15 @@
hardware::camera2::params::VendorTagDescriptorCache* cache);
virtual binder::Status connect(const sp<hardware::ICameraClient>& cameraClient,
- int32_t cameraId, const std::string& clientPackageName,
- int32_t clientUid, int clientPid, int targetSdkVersion,
- int rotationOverride, bool forceSlowJpegMode, int32_t deviceId,
+ int32_t cameraId, int targetSdkVersion, int rotationOverride, bool forceSlowJpegMode,
+ const AttributionSourceState& clientAttribution,
int32_t devicePolicy, /*out*/ sp<hardware::ICamera>* device) override;
virtual binder::Status connectDevice(
const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
- const std::string& cameraId,
- const std::string& clientPackageName, const std::optional<std::string>& clientFeatureId,
- int32_t clientUid, int scoreOffset, int targetSdkVersion, int rotationOverride,
- int32_t deviceId, int32_t devicePolicy,
+ const std::string& cameraId, int scoreOffset, int targetSdkVersion,
+ int rotationOverride, const AttributionSourceState& clientAttribution,
+ int32_t devicePolicy,
/*out*/
sp<hardware::camera2::ICameraDeviceUser>* device);
@@ -196,7 +197,7 @@
virtual binder::Status isConcurrentSessionConfigurationSupported(
const std::vector<hardware::camera2::utils::CameraIdAndSessionConfiguration>& sessions,
- int targetSdkVersion, int32_t deviceId, int32_t devicePolicy,
+ int targetSdkVersion, const AttributionSourceState& clientAttribution, int32_t devicePolicy,
/*out*/bool* supported);
virtual binder::Status getLegacyParameters(
@@ -205,13 +206,16 @@
std::string* parameters);
virtual binder::Status setTorchMode(const std::string& cameraId, bool enabled,
- const sp<IBinder>& clientBinder, int32_t deviceId, int32_t devicePolicy);
-
- virtual binder::Status turnOnTorchWithStrengthLevel(const std::string& cameraId,
- int32_t torchStrength, const sp<IBinder>& clientBinder, int32_t deviceId,
+ const sp<IBinder>& clientBinder, const AttributionSourceState& clientAttribution,
int32_t devicePolicy);
- virtual binder::Status getTorchStrengthLevel(const std::string& cameraId, int32_t deviceId,
+ virtual binder::Status turnOnTorchWithStrengthLevel(const std::string& cameraId,
+ int32_t torchStrength, const sp<IBinder>& clientBinder,
+ const AttributionSourceState& clientAttribution,
+ int32_t devicePolicy);
+
+ virtual binder::Status getTorchStrengthLevel(const std::string& cameraId,
+ const AttributionSourceState& clientAttribution,
int32_t devicePolicy, int32_t* torchStrength);
virtual binder::Status notifySystemEvent(int32_t eventId,
@@ -247,19 +251,20 @@
const hardware::camera2::impl::CameraMetadataNative& sessionParams);
virtual binder::Status createDefaultRequest(const std::string& cameraId, int templateId,
- int32_t deviceId, int32_t devicePolicy,
+ const AttributionSourceState& clientAttribution, int32_t devicePolicy,
/*out*/
hardware::camera2::impl::CameraMetadataNative* request);
virtual binder::Status isSessionConfigurationWithParametersSupported(
const std::string& cameraId, int targetSdkVersion,
const SessionConfiguration& sessionConfiguration,
- int32_t deviceId, int32_t devicePolicy,
+ const AttributionSourceState& clientAttribution, int32_t devicePolicy,
/*out*/ bool* supported);
virtual binder::Status getSessionCharacteristics(
const std::string& cameraId, int targetSdkVersion, int rotationOverride,
- const SessionConfiguration& sessionConfiguration, int32_t deviceId,
+ const SessionConfiguration& sessionConfiguration,
+ const AttributionSourceState& clientAttribution,
int32_t devicePolicy, /*out*/ CameraMetadata* outMetadata);
// Extra permissions checks
diff --git a/services/camera/libcameraservice/aidl/AidlCameraService.cpp b/services/camera/libcameraservice/aidl/AidlCameraService.cpp
index 2886942..7f674bd 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraService.cpp
+++ b/services/camera/libcameraservice/aidl/AidlCameraService.cpp
@@ -28,6 +28,7 @@
#include <binder/Status.h>
#include <camera/CameraUtils.h>
#include <hidl/HidlTransportSupport.h>
+#include <utils/AttributionAndPermissionUtils.h>
#include <utils/Utils.h>
namespace android::frameworks::cameraservice::service::implementation {
@@ -89,10 +90,15 @@
if (_aidl_return == nullptr) { return fromSStatus(SStatus::ILLEGAL_ARGUMENT); }
::android::CameraMetadata cameraMetadata;
+ AttributionSourceState clientAttribution =
+ AttributionAndPermissionUtils::buildAttributionSource(
+ hardware::ICameraService::USE_CALLING_PID,
+ hardware::ICameraService::USE_CALLING_UID,
+ kDefaultDeviceId);
UStatus ret = mCameraService->getCameraCharacteristics(in_cameraId,
mVndkVersion,
ROTATION_OVERRIDE_NONE,
- kDefaultDeviceId,
+ clientAttribution,
/* devicePolicy= */ 0,
&cameraMetadata);
if (!ret.isOk()) {
@@ -143,16 +149,20 @@
return fromSStatus(SStatus::UNKNOWN_ERROR);
}
sp<hardware::camera2::ICameraDeviceCallbacks> callbacks = hybridCallbacks;
+ AttributionSourceState clientAttribution =
+ AttributionAndPermissionUtils::buildAttributionSource(
+ hardware::ICameraService::USE_CALLING_PID,
+ hardware::ICameraService::USE_CALLING_UID,
+ kDefaultDeviceId);
+ clientAttribution.packageName = "";
+ clientAttribution.attributionTag = std::nullopt;
binder::Status serviceRet = mCameraService->connectDevice(
callbacks,
in_cameraId,
- std::string(),
- /* clientFeatureId= */{},
- hardware::ICameraService::USE_CALLING_UID,
/* scoreOffset= */ 0,
/* targetSdkVersion= */ __ANDROID_API_FUTURE__,
ROTATION_OVERRIDE_NONE,
- kDefaultDeviceId,
+ clientAttribution,
/* devicePolicy= */ 0,
&unstableDevice);
if (!serviceRet.isOk()) {
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index cf6ff84..ea4f6fd 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -1589,7 +1589,7 @@
// The chrome plane could be either Cb first, or Cr first. Take the
// smaller address.
uint8_t *src = std::min(yuvBuffer.dataCb, yuvBuffer.dataCr);
- MediaImage2::PlaneIndex dstPlane = codecUvOffsetDiff > 0 ? MediaImage2::U : MediaImage2::V;
+ MediaImage2::PlaneIndex dstPlane = codecUPlaneFirst ? MediaImage2::U : MediaImage2::V;
for (auto row = top/2; row < (top+height)/2; row++) {
uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[dstPlane].mOffset +
imageInfo->mPlane[dstPlane].mRowInc * (row - top/2);
diff --git a/services/camera/libcameraservice/hidl/HidlCameraService.cpp b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
index d3b2a51..59e892f 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraService.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
@@ -26,6 +26,7 @@
#include <hidl/HidlTransportSupport.h>
#include <camera/CameraUtils.h>
+#include <utils/AttributionAndPermissionUtils.h>
#include <utils/Utils.h>
namespace android {
@@ -68,10 +69,15 @@
getCameraCharacteristics_cb _hidl_cb) {
android::CameraMetadata cameraMetadata;
HStatus status = HStatus::NO_ERROR;
+ AttributionSourceState clientAttribution =
+ AttributionAndPermissionUtils::buildAttributionSource(
+ hardware::ICameraService::USE_CALLING_PID,
+ hardware::ICameraService::USE_CALLING_UID,
+ kDefaultDeviceId);
binder::Status serviceRet =
mAidlICameraService->getCameraCharacteristics(cameraId,
/*targetSdkVersion*/__ANDROID_API_FUTURE__, ROTATION_OVERRIDE_NONE,
- kDefaultDeviceId, 0, &cameraMetadata);
+ clientAttribution, 0, &cameraMetadata);
HCameraMetadata hidlMetadata;
if (!serviceRet.isOk()) {
switch(serviceRet.serviceSpecificErrorCode()) {
@@ -119,11 +125,17 @@
return Void();
}
sp<hardware::camera2::ICameraDeviceCallbacks> callbacks = hybridCallbacks;
+ AttributionSourceState clientAttribution =
+ AttributionAndPermissionUtils::buildAttributionSource(
+ hardware::ICameraService::USE_CALLING_PID,
+ hardware::ICameraService::USE_CALLING_UID,
+ kDefaultDeviceId);
+ clientAttribution.packageName = "";
+ clientAttribution.attributionTag = std::nullopt;
binder::Status serviceRet = mAidlICameraService->connectDevice(
- callbacks, cameraId, std::string(), {},
- hardware::ICameraService::USE_CALLING_UID, 0/*oomScoreOffset*/,
+ callbacks, cameraId, 0/*oomScoreOffset*/,
/*targetSdkVersion*/__ANDROID_API_FUTURE__, ROTATION_OVERRIDE_NONE,
- kDefaultDeviceId, /*devicePolicy*/0, /*out*/&deviceRemote);
+ clientAttribution, /*devicePolicy*/0, /*out*/&deviceRemote);
HStatus status = HStatus::NO_ERROR;
if (!serviceRet.isOk()) {
ALOGE("%s: Unable to connect to camera device", __FUNCTION__);
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
index 78a1fc8..53234f0 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
@@ -35,6 +35,7 @@
"libmedia_headers",
],
shared_libs: [
+ "framework-permission-aidl-cpp",
"libbinder",
"libbase",
"libutils",
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
index bce0faf..ac2fd64 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
@@ -23,6 +23,7 @@
#include <CameraService.h>
#include <device3/Camera3StreamInterface.h>
+#include <android/content/AttributionSourceState.h>
#include <android/hardware/BnCameraServiceListener.h>
#include <android/hardware/camera2/BnCameraDeviceCallbacks.h>
#include <android/hardware/ICameraServiceListener.h>
@@ -219,7 +220,9 @@
} else {
camType = kCamType[mFuzzedDataProvider->ConsumeBool()];
}
- mCameraService->getNumberOfCameras(camType, kDefaultDeviceId, /*devicePolicy*/0, &mNumCameras);
+ AttributionSourceState clientAttribution;
+ clientAttribution.deviceId = kDefaultDeviceId;
+ mCameraService->getNumberOfCameras(camType, clientAttribution, /*devicePolicy*/0, &mNumCameras);
}
void CameraFuzzer::getCameraInformation(int32_t cameraId) {
@@ -238,14 +241,17 @@
hardware::camera2::params::VendorTagDescriptorCache cache;
mCameraService->getCameraVendorTagCache(&cache);
+ AttributionSourceState clientAttribution;
+ clientAttribution.deviceId = kDefaultDeviceId;
+
CameraInfo cameraInfo;
- mCameraService->getCameraInfo(cameraId, ROTATION_OVERRIDE_NONE, kDefaultDeviceId,
+ mCameraService->getCameraInfo(cameraId, ROTATION_OVERRIDE_NONE, clientAttribution,
/*devicePolicy*/0, &cameraInfo);
CameraMetadata metadata;
mCameraService->getCameraCharacteristics(cameraIdStr,
/*targetSdkVersion*/__ANDROID_API_FUTURE__, ROTATION_OVERRIDE_NONE,
- kDefaultDeviceId, /*devicePolicy*/0, &metadata);
+ clientAttribution, /*devicePolicy*/0, &metadata);
}
void CameraFuzzer::invokeCameraSound() {
@@ -327,13 +333,15 @@
std::string cameraIdStr = std::to_string(cameraId);
sp<IBinder> binder = new BBinder;
- mCameraService->setTorchMode(cameraIdStr, true, binder, kDefaultDeviceId, /*devicePolicy*/0);
+ AttributionSourceState clientAttribution;
+ clientAttribution.deviceId = kDefaultDeviceId;
+ mCameraService->setTorchMode(cameraIdStr, true, binder, clientAttribution, /*devicePolicy*/0);
ALOGV("Turned torch on.");
int32_t torchStrength = rand() % 5 + 1;
ALOGV("Changing torch strength level to %d", torchStrength);
mCameraService->turnOnTorchWithStrengthLevel(cameraIdStr, torchStrength, binder,
- kDefaultDeviceId, /*devicePolicy*/0);
- mCameraService->setTorchMode(cameraIdStr, false, binder, kDefaultDeviceId, /*devicePolicy*/0);
+ clientAttribution, /*devicePolicy*/0);
+ mCameraService->setTorchMode(cameraIdStr, false, binder, clientAttribution, /*devicePolicy*/0);
ALOGV("Turned torch off.");
}
@@ -349,13 +357,15 @@
::android::binder::Status rc;
sp<ICamera> cameraDevice;
- rc = mCameraService->connect(this, cameraId, std::string(),
- android::CameraService::USE_CALLING_UID,
- android::CameraService::USE_CALLING_PID,
+ AttributionSourceState clientAttribution;
+ clientAttribution.deviceId = kDefaultDeviceId;
+ clientAttribution.uid = android::CameraService::USE_CALLING_UID;
+ clientAttribution.pid = android::CameraService::USE_CALLING_PID;
+ rc = mCameraService->connect(this, cameraId,
/*targetSdkVersion*/ __ANDROID_API_FUTURE__,
ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT,
/*forceSlowJpegMode*/false,
- kDefaultDeviceId, /*devicePolicy*/0, &cameraDevice);
+ clientAttribution, /*devicePolicy*/0, &cameraDevice);
if (!rc.isOk()) {
// camera not connected
return;
@@ -590,11 +600,15 @@
for (auto s : statuses) {
sp<TestCameraDeviceCallbacks> callbacks(new TestCameraDeviceCallbacks());
sp<hardware::camera2::ICameraDeviceUser> device;
- mCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
- android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
- /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+
+ AttributionSourceState clientAttribution;
+ clientAttribution.deviceId = kDefaultDeviceId;
+ clientAttribution.uid = android::CameraService::USE_CALLING_UID;
+ clientAttribution.pid = android::CameraService::USE_CALLING_PID;
+ mCameraService->connectDevice(callbacks, s.cameraId,
+ 0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT,
- kDefaultDeviceId, /*devicePolicy*/0, &device);
+ clientAttribution, /*devicePolicy*/0, &device);
if (device == nullptr) {
continue;
}
diff --git a/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp b/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
index feb5540..50aeaca 100644
--- a/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
@@ -14,6 +14,7 @@
* limitations under the License.
*/
+#include <android/content/AttributionSourceState.h>
#include <android/hardware/BnCameraServiceListener.h>
#include <android/hardware/BnCameraServiceProxy.h>
#include <android/hardware/camera2/BnCameraDeviceCallbacks.h>
@@ -223,6 +224,11 @@
// Test that camera connections fail with ERROR_DISABLED when the camera is disabled via device
// policy, and succeed when it isn't.
TEST_F(CameraPermissionsTest, TestCameraDisabled) {
+ AttributionSourceState clientAttribution;
+ clientAttribution.deviceId = kDefaultDeviceId;
+ clientAttribution.uid = android::CameraService::USE_CALLING_UID;
+ clientAttribution.pid = android::CameraService::USE_CALLING_PID;
+
std::vector<hardware::CameraStatus> statuses;
sp<TestCameraServiceListener> serviceListener = new TestCameraServiceListener();
sCameraService->addListenerTest(serviceListener, &statuses);
@@ -233,11 +239,10 @@
sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
sp<hardware::camera2::ICameraDeviceUser> device;
binder::Status status =
- sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
- android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
- /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+ sCameraService->connectDevice(callbacks, s.cameraId,
+ 0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
hardware::ICameraService::ROTATION_OVERRIDE_NONE,
- kDefaultDeviceId, /*devicePolicy*/0, &device);
+ clientAttribution, /*devicePolicy*/0, &device);
AutoDisconnectDevice autoDisconnect(device);
ASSERT_TRUE(!status.isOk()) << "connectDevice returned OK status";
ASSERT_EQ(status.serviceSpecificErrorCode(), hardware::ICameraService::ERROR_DISABLED)
@@ -249,11 +254,10 @@
sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
sp<hardware::camera2::ICameraDeviceUser> device;
binder::Status status =
- sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
- android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
- /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+ sCameraService->connectDevice(callbacks, s.cameraId,
+ 0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
hardware::ICameraService::ROTATION_OVERRIDE_NONE,
- kDefaultDeviceId, /*devicePolicy*/0, &device);
+ clientAttribution, /*devicePolicy*/0, &device);
AutoDisconnectDevice autoDisconnect(device);
ASSERT_TRUE(status.isOk());
}
@@ -261,6 +265,10 @@
// Test that consecutive camera connections succeed.
TEST_F(CameraPermissionsTest, TestConsecutiveConnections) {
+ AttributionSourceState clientAttribution;
+ clientAttribution.deviceId = kDefaultDeviceId;
+ clientAttribution.uid = android::CameraService::USE_CALLING_UID;
+
std::vector<hardware::CameraStatus> statuses;
sp<TestCameraServiceListener> serviceListener = new TestCameraServiceListener();
sCameraService->addListenerTest(serviceListener, &statuses);
@@ -270,20 +278,18 @@
sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
sp<hardware::camera2::ICameraDeviceUser> deviceA, deviceB;
binder::Status status =
- sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
- android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
- /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+ sCameraService->connectDevice(callbacks, s.cameraId,
+ 0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
hardware::ICameraService::ROTATION_OVERRIDE_NONE,
- kDefaultDeviceId, /*devicePolicy*/0, &deviceA);
+ clientAttribution, /*devicePolicy*/0, &deviceA);
AutoDisconnectDevice autoDisconnectA(deviceA);
ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
" service specific error code " << status.serviceSpecificErrorCode();
status =
- sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
- android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
- /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+ sCameraService->connectDevice(callbacks, s.cameraId,
+ 0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
hardware::ICameraService::ROTATION_OVERRIDE_NONE,
- kDefaultDeviceId, /*devicePolicy*/0, &deviceB);
+ clientAttribution, /*devicePolicy*/0, &deviceB);
AutoDisconnectDevice autoDisconnectB(deviceB);
ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
" service specific error code " << status.serviceSpecificErrorCode();
@@ -293,6 +299,10 @@
// Test that consecutive camera connections succeed even when a nonzero oomScoreOffset is provided
// in the second call.
TEST_F(CameraPermissionsTest, TestConflictingOomScoreOffset) {
+ AttributionSourceState clientAttribution;
+ clientAttribution.deviceId = kDefaultDeviceId;
+ clientAttribution.uid = android::CameraService::USE_CALLING_UID;
+
std::vector<hardware::CameraStatus> statuses;
sp<TestCameraServiceListener> serviceListener = new TestCameraServiceListener();
sCameraService->addListenerTest(serviceListener, &statuses);
@@ -302,20 +312,18 @@
sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
sp<hardware::camera2::ICameraDeviceUser> deviceA, deviceB;
binder::Status status =
- sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
- android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
- /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+ sCameraService->connectDevice(callbacks, s.cameraId,
+ 0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
hardware::ICameraService::ROTATION_OVERRIDE_NONE,
- kDefaultDeviceId, /*devicePolicy*/0, &deviceA);
+ clientAttribution, /*devicePolicy*/0, &deviceA);
AutoDisconnectDevice autoDisconnectA(deviceA);
ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
" service specific error code " << status.serviceSpecificErrorCode();
status =
- sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
- android::CameraService::USE_CALLING_UID, 1/*oomScoreDiff*/,
- /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+ sCameraService->connectDevice(callbacks, s.cameraId,
+ 1/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
hardware::ICameraService::ROTATION_OVERRIDE_NONE,
- kDefaultDeviceId, /*devicePolicy*/0, &deviceB);
+ clientAttribution, /*devicePolicy*/0, &deviceB);
AutoDisconnectDevice autoDisconnectB(deviceB);
ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
" service specific error code " << status.serviceSpecificErrorCode();
diff --git a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h
index 4f238ab..4daab0f 100644
--- a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h
+++ b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h
@@ -41,6 +41,20 @@
mCameraService = cameraService;
}
+ static AttributionSourceState buildAttributionSource(int callingPid, int callingUid) {
+ AttributionSourceState attributionSource{};
+ attributionSource.pid = callingPid;
+ attributionSource.uid = callingUid;
+ return attributionSource;
+ }
+
+ static AttributionSourceState buildAttributionSource(int callingPid, int callingUid,
+ int32_t deviceId) {
+ AttributionSourceState attributionSource = buildAttributionSource(callingPid, callingUid);
+ attributionSource.deviceId = deviceId;
+ return attributionSource;
+ }
+
// Utilities handling Binder calling identities (previously in CameraThreadState)
virtual int getCallingUid();
virtual int getCallingPid();
@@ -123,17 +137,13 @@
: mAttributionAndPermissionUtils(attributionAndPermissionUtils) { }
static AttributionSourceState buildAttributionSource(int callingPid, int callingUid) {
- AttributionSourceState attributionSource{};
- attributionSource.pid = callingPid;
- attributionSource.uid = callingUid;
- return attributionSource;
+ return AttributionAndPermissionUtils::buildAttributionSource(callingPid, callingUid);
}
static AttributionSourceState buildAttributionSource(int callingPid, int callingUid,
int32_t deviceId) {
- AttributionSourceState attributionSource = buildAttributionSource(callingPid, callingUid);
- attributionSource.deviceId = deviceId;
- return attributionSource;
+ return AttributionAndPermissionUtils::buildAttributionSource(
+ callingPid, callingUid, deviceId);
}
static AttributionSourceState buildAttributionSource(int callingPid, int callingUid,
diff --git a/services/camera/virtualcamera/util/EglSurfaceTexture.cc b/services/camera/virtualcamera/util/EglSurfaceTexture.cc
index c81d36d..465531b 100644
--- a/services/camera/virtualcamera/util/EglSurfaceTexture.cc
+++ b/services/camera/virtualcamera/util/EglSurfaceTexture.cc
@@ -31,6 +31,12 @@
namespace android {
namespace companion {
namespace virtualcamera {
+namespace {
+
+// Maximal number of buffers producer can dequeue without blocking.
+constexpr int kBufferProducerMaxDequeueBufferCount = 64;
+
+} // namespace
EglSurfaceTexture::EglSurfaceTexture(const uint32_t width, const uint32_t height)
: mWidth(width), mHeight(height) {
@@ -40,6 +46,10 @@
return;
}
BufferQueue::createBufferQueue(&mBufferProducer, &mBufferConsumer);
+ // Set max dequeue buffer count for producer to maximal value to prevent
+ // blocking when dequeuing input buffers.
+ mBufferProducer->setMaxDequeuedBufferCount(
+ kBufferProducerMaxDequeueBufferCount);
mGlConsumer = sp<GLConsumer>::make(
mBufferConsumer, mTextureId, GLConsumer::TEXTURE_EXTERNAL, false, false);
mGlConsumer->setName(String8("VirtualCameraEglSurfaceTexture"));
@@ -75,7 +85,26 @@
}
GLuint EglSurfaceTexture::updateTexture() {
- mGlConsumer->updateTexImage();
+ int previousFrameId;
+ int framesAdvance = 0;
+ // Consume buffers one at the time.
+ // Contrary to the code comments in GLConsumer, the GLConsumer acquires
+ // next queued buffer (not the most recently queued buffer).
+ while (true) {
+ previousFrameId = mGlConsumer->getFrameNumber();
+ mGlConsumer->updateTexImage();
+ int currentFrameId = mGlConsumer->getFrameNumber();
+ if (previousFrameId == currentFrameId) {
+ // Frame number didn't change after updating the texture,
+ // this means we're at the end of the queue and current attached
+ // buffer is the most recent buffer.
+ break;
+ }
+
+ framesAdvance++;
+ previousFrameId = currentFrameId;
+ }
+ ALOGV("%s: Advanced %d frames", __func__, framesAdvance);
return mTextureId;
}