audio policy: force initial audio patch after opening an output. am: 0ca0940321
Original change: https://googleplex-android-review.googlesource.com/c/platform/frameworks/av/+/27382241
Change-Id: If6a05513b52e0ea28a7267ae6ab0d0b4fedbe620
Signed-off-by: Automerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>
diff --git a/Android.bp b/Android.bp
index 72b8721..c11e38a 100644
--- a/Android.bp
+++ b/Android.bp
@@ -34,6 +34,24 @@
],
}
+aidl_interface_defaults {
+ name: "audio-aidl-defaults",
+ unstable: true,
+ host_supported: true,
+ backend: {
+ cpp: {
+ enabled: true,
+ },
+ java: {
+ enabled: true,
+ },
+ rust: {
+ enabled: true,
+ },
+ },
+
+}
+
aidl_interface {
name: "av-types-aidl",
unstable: true,
@@ -71,6 +89,18 @@
},
}
+aidl_interface {
+ name: "audio-permission-aidl",
+ // TODO remove
+ vendor_available: true,
+ double_loadable: true,
+ defaults: ["audio-aidl-defaults"],
+ local_include_dir: "aidl",
+ srcs: [
+ "aidl/com/android/media/permission/*",
+ ],
+}
+
cc_library_headers {
name: "av-headers",
export_include_dirs: ["include"],
@@ -133,3 +163,19 @@
frozen: true,
}
+
+latest_av_audio_types_aidl = "av-audio-types-aidl-V1"
+
+cc_defaults {
+ name: "latest_av_audio_types_aidl_ndk_shared",
+ shared_libs: [
+ latest_av_audio_types_aidl + "-ndk",
+ ],
+}
+
+cc_defaults {
+ name: "latest_av_audio_types_aidl_ndk_static",
+ static_libs: [
+ latest_av_audio_types_aidl + "-ndk",
+ ],
+}
diff --git a/PREUPLOAD.cfg b/PREUPLOAD.cfg
index 1cf63b0..e9b757b 100644
--- a/PREUPLOAD.cfg
+++ b/PREUPLOAD.cfg
@@ -4,9 +4,30 @@
hidden_api_txt_checksorted_hook = ${REPO_ROOT}/tools/platform-compat/hiddenapi/checksorted_sha.sh ${PREUPLOAD_COMMIT} ${REPO_ROOT}
[Builtin Hooks]
+bpfmt = true
clang_format = true
[Builtin Hooks Options]
+# Enable sort and limit subfolder checks
+bpfmt = -s
+ media/audio/
+ media/audioserver/
+ media/libaaudio/
+ media/libaudioclient/
+ media/libaudiofoundation/
+ media/libaudiohal/
+ media/libaudioprocessing/
+ media/libaudiousecasevalidation/
+ media/libeffects/
+ media/libmediametrics/
+ media/libnbaio/
+ media/libnblog/
+ services/audioflinger/
+ services/audioparameterparser/
+ services/audiopolicy/
+ services/medialog/
+ services/oboeservice/
+
# Only turn on clang-format check for the following subfolders.
clang_format = --commit ${PREUPLOAD_COMMIT} --style file --extensions c,h,cc,cpp
media/libaudioclient/tests/
diff --git a/aidl/com/android/media/permission/INativePermissionController.aidl b/aidl/com/android/media/permission/INativePermissionController.aidl
new file mode 100644
index 0000000..5766e33
--- /dev/null
+++ b/aidl/com/android/media/permission/INativePermissionController.aidl
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.media.permission;
+
+import com.android.media.permission.UidPackageState;
+
+/**
+ * This interface is used by system_server to communicate permission information
+ * downwards towards native services.
+ * {@hide}
+ */
+interface INativePermissionController {
+ /**
+ * Initialize app-ids and their corresponding packages, to be used for package validation.
+ */
+ void populatePackagesForUids(in List<UidPackageState> initialPackageStates);
+ /**
+ * Replace or populate the list of packages associated with a given uid.
+ * If the list is empty, the package no longer exists.
+ */
+ void updatePackagesForUid(in UidPackageState newPackageState);
+}
diff --git a/services/camera/libcameraservice/utils/CameraThreadState.h b/aidl/com/android/media/permission/UidPackageState.aidl
similarity index 64%
rename from services/camera/libcameraservice/utils/CameraThreadState.h
rename to aidl/com/android/media/permission/UidPackageState.aidl
index e1a70de..41b8c65 100644
--- a/services/camera/libcameraservice/utils/CameraThreadState.h
+++ b/aidl/com/android/media/permission/UidPackageState.aidl
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2018 The Android Open Source Project
+ * Copyright (C) 2024 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -14,18 +14,13 @@
* limitations under the License.
*/
-#include <stdint.h>
+package com.android.media.permission;
-namespace android {
-class CameraThreadState {
-public:
- static int64_t clearCallingIdentity();
-
- static void restoreCallingIdentity(int64_t token);
-
- static int getCallingUid();
-
- static int getCallingPid();
-};
-
-} // android
+/**
+ * Entity representing the package names associated with a particular uid/app-id
+ * {@hide}
+ */
+parcelable UidPackageState {
+ int uid;
+ @utf8InCpp List<String> packageNames;
+}
diff --git a/camera/Android.bp b/camera/Android.bp
index d0f8e7e..75c2999 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -169,7 +169,6 @@
srcs: [
"aidl/android/hardware/CameraExtensionSessionStats.aidl",
"aidl/android/hardware/ICameraService.aidl",
- "aidl/android/hardware/CameraIdRemapping.aidl",
"aidl/android/hardware/ICameraServiceListener.aidl",
"aidl/android/hardware/ICameraServiceProxy.aidl",
"aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl",
diff --git a/camera/Camera.cpp b/camera/Camera.cpp
index 6b040ab..8018390 100644
--- a/camera/Camera.cpp
+++ b/camera/Camera.cpp
@@ -70,11 +70,12 @@
}
sp<Camera> Camera::connect(int cameraId, const std::string& clientPackageName,
- int clientUid, int clientPid, int targetSdkVersion, bool overrideToPortrait,
- bool forceSlowJpegMode)
+ int clientUid, int clientPid, int targetSdkVersion, int rotationOverride,
+ bool forceSlowJpegMode, int32_t deviceId, int32_t devicePolicy)
{
return CameraBaseT::connect(cameraId, clientPackageName, clientUid,
- clientPid, targetSdkVersion, overrideToPortrait, forceSlowJpegMode);
+ clientPid, targetSdkVersion, rotationOverride, forceSlowJpegMode, deviceId,
+ devicePolicy);
}
status_t Camera::reconnect()
diff --git a/camera/CameraBase.cpp b/camera/CameraBase.cpp
index 6759f3b..d7415a3 100644
--- a/camera/CameraBase.cpp
+++ b/camera/CameraBase.cpp
@@ -73,6 +73,9 @@
if (res != OK) return res;
res = parcel->writeString16(toString16(clientPackage));
+ if (res != OK) return res;
+
+ res = parcel->writeInt32(deviceId);
return res;
}
@@ -97,6 +100,7 @@
if (res != OK) return res;
clientPackage = toStdString(tempClientPackage);
+ res = parcel->readInt32(&deviceId);
return res;
}
@@ -123,7 +127,7 @@
};
sp<DeathNotifier> gDeathNotifier;
-}; // namespace anonymous
+} // namespace anonymous
///////////////////////////////////////////////////////////
// CameraBase definition
@@ -159,7 +163,8 @@
sp<TCam> CameraBase<TCam, TCamTraits>::connect(int cameraId,
const std::string& clientPackageName,
int clientUid, int clientPid, int targetSdkVersion,
- bool overrideToPortrait, bool forceSlowJpegMode)
+ int rotationOverride, bool forceSlowJpegMode,
+ int32_t deviceId, int32_t devicePolicy)
{
ALOGV("%s: connect", __FUNCTION__);
sp<TCam> c = new TCam(cameraId);
@@ -169,11 +174,11 @@
binder::Status ret;
if (cs != nullptr) {
TCamConnectService fnConnectService = TCamTraits::fnConnectService;
- ALOGI("Connect camera (legacy API) - overrideToPortrait %d, forceSlowJpegMode %d",
- overrideToPortrait, forceSlowJpegMode);
+ ALOGI("Connect camera (legacy API) - rotationOverride %d, forceSlowJpegMode %d",
+ rotationOverride, forceSlowJpegMode);
ret = (cs.get()->*fnConnectService)(cl, cameraId, clientPackageName, clientUid,
- clientPid, targetSdkVersion, overrideToPortrait, forceSlowJpegMode,
- /*out*/ &c->mCamera);
+ clientPid, targetSdkVersion, rotationOverride, forceSlowJpegMode, deviceId,
+ devicePolicy, /*out*/ &c->mCamera);
}
if (ret.isOk() && c->mCamera != nullptr) {
IInterface::asBinder(c->mCamera)->linkToDeath(c);
@@ -252,7 +257,7 @@
}
template <typename TCam, typename TCamTraits>
-int CameraBase<TCam, TCamTraits>::getNumberOfCameras() {
+int CameraBase<TCam, TCamTraits>::getNumberOfCameras(int32_t deviceId, int32_t devicePolicy) {
const sp<::android::hardware::ICameraService> cs = getCameraService();
if (!cs.get()) {
@@ -261,8 +266,8 @@
}
int32_t count;
binder::Status res = cs->getNumberOfCameras(
- ::android::hardware::ICameraService::CAMERA_TYPE_BACKWARD_COMPATIBLE,
- &count);
+ ::android::hardware::ICameraService::CAMERA_TYPE_BACKWARD_COMPATIBLE, deviceId,
+ devicePolicy, &count);
if (!res.isOk()) {
ALOGE("Error reading number of cameras: %s",
res.toString8().c_str());
@@ -274,11 +279,12 @@
// this can be in BaseCamera but it should be an instance method
template <typename TCam, typename TCamTraits>
status_t CameraBase<TCam, TCamTraits>::getCameraInfo(int cameraId,
- bool overrideToPortrait,
+ int rotationOverride, int32_t deviceId, int32_t devicePolicy,
struct hardware::CameraInfo* cameraInfo) {
const sp<::android::hardware::ICameraService> cs = getCameraService();
if (cs == 0) return UNKNOWN_ERROR;
- binder::Status res = cs->getCameraInfo(cameraId, overrideToPortrait, cameraInfo);
+ binder::Status res = cs->getCameraInfo(cameraId, rotationOverride, deviceId, devicePolicy,
+ cameraInfo);
return res.isOk() ? OK : res.serviceSpecificErrorCode();
}
diff --git a/camera/CameraSessionStats.cpp b/camera/CameraSessionStats.cpp
index 057ec99..450bdd8 100644
--- a/camera/CameraSessionStats.cpp
+++ b/camera/CameraSessionStats.cpp
@@ -439,6 +439,16 @@
return err;
}
+ auto mostRequestedFpsRange = std::make_pair(0,0);
+ if ((err = parcel->readInt32(&mostRequestedFpsRange.first)) != OK) {
+ ALOGE("%s: Failed to read frame rate range min info!", __FUNCTION__);
+ return err;
+ }
+ if ((err = parcel->readInt32(&mostRequestedFpsRange.second)) != OK) {
+ ALOGE("%s: Failed to read frame rate range max info!", __FUNCTION__);
+ return err;
+ }
+
mCameraId = toStdString(id);
mFacing = facing;
mNewCameraState = newCameraState;
@@ -460,6 +470,7 @@
mUsedZoomOverride = usedZoomOverride;
mSessionIndex = sessionIdx;
mCameraExtensionSessionStats = extStats;
+ mMostRequestedFpsRange = mostRequestedFpsRange;
return OK;
}
@@ -577,6 +588,16 @@
return err;
}
+ if ((err = parcel->writeInt32(mMostRequestedFpsRange.first)) != OK) {
+ ALOGE("%s: Failed to write frame rate range min info!", __FUNCTION__);
+ return err;
+ }
+
+ if ((err = parcel->writeInt32(mMostRequestedFpsRange.second)) != OK) {
+ ALOGE("%s: Failed to write frame rate range max info!", __FUNCTION__);
+ return err;
+ }
+
return OK;
}
diff --git a/camera/aidl/android/hardware/CameraExtensionSessionStats.aidl b/camera/aidl/android/hardware/CameraExtensionSessionStats.aidl
index 1c81831..a3c0e69 100644
--- a/camera/aidl/android/hardware/CameraExtensionSessionStats.aidl
+++ b/camera/aidl/android/hardware/CameraExtensionSessionStats.aidl
@@ -66,4 +66,9 @@
* true if advanced extensions are being used, false otherwise
*/
boolean isAdvanced = false;
+
+ /**
+ * Format of image capture request
+ */
+ int captureFormat;
}
\ No newline at end of file
diff --git a/camera/aidl/android/hardware/CameraIdRemapping.aidl b/camera/aidl/android/hardware/CameraIdRemapping.aidl
deleted file mode 100644
index 453f696..0000000
--- a/camera/aidl/android/hardware/CameraIdRemapping.aidl
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright (C) 2023 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.hardware;
-
-/**
- * Specifies a remapping of Camera Ids.
- *
- * Example: For a given package, a remapping of camera id0 to id1 specifies
- * that any operation to perform on id0 should instead be performed on id1.
- *
- * @hide
- */
-parcelable CameraIdRemapping {
- /**
- * Specifies remapping of Camera Ids per package.
- */
- parcelable PackageIdRemapping {
- /** Package Name (e.g. com.android.xyz). */
- @utf8InCpp String packageName;
- /**
- * Ordered list of Camera Ids to replace. Only Camera Ids present in this list will be
- * affected.
- */
- @utf8InCpp List<String> cameraIdsToReplace;
- /**
- * Ordered list of updated Camera Ids, where updatedCameraIds[i] corresponds to
- * the updated camera id for cameraIdsToReplace[i].
- */
- @utf8InCpp List<String> updatedCameraIds;
- }
-
- /**
- * List of Camera Id remappings to perform.
- */
- List<PackageIdRemapping> packageIdRemappings;
-}
diff --git a/camera/aidl/android/hardware/ICameraService.aidl b/camera/aidl/android/hardware/ICameraService.aidl
index 4bea896..d9a0934 100644
--- a/camera/aidl/android/hardware/ICameraService.aidl
+++ b/camera/aidl/android/hardware/ICameraService.aidl
@@ -30,7 +30,6 @@
import android.hardware.camera2.impl.CameraMetadataNative;
import android.hardware.ICameraServiceListener;
import android.hardware.CameraInfo;
-import android.hardware.CameraIdRemapping;
import android.hardware.CameraStatus;
import android.hardware.CameraExtensionSessionStats;
@@ -63,14 +62,50 @@
const int CAMERA_TYPE_ALL = 1;
/**
- * Return the number of camera devices available in the system
+ * Return the number of camera devices available in the system.
+ *
+ * @param type The type of the camera, can be either CAMERA_TYPE_BACKWARD_COMPATIBLE
+ * or CAMERA_TYPE_ALL.
+ * @param deviceId The device id of the context associated with the caller.
+ * @param devicePolicy The camera policy of the device of the associated context (default
+ * policy for default device context). Only virtual cameras would be exposed
+ * only for custom policy and only real cameras would be exposed for default
+ * policy.
*/
- int getNumberOfCameras(int type);
+ int getNumberOfCameras(int type, int deviceId, int devicePolicy);
/**
- * Fetch basic camera information for a camera device
+ * If changed, reflect in
+ * frameworks/base/core/java/android/hardware/camera2/CameraManager.java.
+ * We have an enum here since the decision to override to portrait mode / fetch the
+ * rotationOverride as it exists in CameraManager right now is based on a static system
+ * property and not something that changes based dynamically, say on fold state. As a result,
+ * we can't use just a boolean to differentiate between the case where cameraserver should
+ * override to portrait (sensor orientation is 0, 180) or just rotate the sensor feed (sensor
+ * orientation is 90, 270)
*/
- CameraInfo getCameraInfo(int cameraId, boolean overrideToPortrait);
+ const int ROTATION_OVERRIDE_NONE = 0;
+ const int ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT = 1;
+ const int ROTATION_OVERRIDE_ROTATION_ONLY = 2;
+
+ /**
+ * Fetch basic camera information for a camera.
+ *
+ * @param cameraId The ID of the camera to fetch information for.
+ * @param rotationOverride Whether to override the sensor orientation information to
+ * correspond to portrait: {@link ICameraService#ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT}
+ * will override the sensor orientation and rotate and crop, while {@link
+ * ICameraService#ROTATION_OVERRIDE_ROTATION_ONLY} will rotate and crop the camera feed
+ * without changing the sensor orientation.
+ * @param deviceId The device id of the context associated with the caller.
+ * @param devicePolicy The camera policy of the device of the associated context (default
+ * policy for default device context). Only virtual cameras would be exposed
+ * only for custom policy and only real cameras would be exposed for default
+ * policy.
+ * @return CameraInfo for the camera.
+ */
+ CameraInfo getCameraInfo(int cameraId, int rotationOverride, int deviceId,
+ int devicePolicy);
/**
* Default UID/PID values for non-privileged callers of
@@ -80,19 +115,53 @@
const int USE_CALLING_PID = -1;
/**
- * Open a camera device through the old camera API
+ * Open a camera device through the old camera API.
+ *
+ * @param cameraId The ID of the camera to open.
+ * @param opPackageName The package name to report for the app-ops.
+ * @param clientUid UID for the calling client.
+ * @param clientPid PID for the calling client.
+ * @param targetSdkVersion the target sdk level of the application calling this function.
+ * @param rotationOverride Whether to override the sensor orientation information to
+ * correspond to portrait: {@link ICameraService#ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT}
+ * will override the sensor orientation and rotate and crop, while {@link
+ * ICameraService#ROTATION_OVERRIDE_ROTATION_ONLY} will rotate and crop the camera feed
+ * without changing the sensor orientation.
+ * @param forceSlowJpegMode Whether to force slow jpeg mode.
+ * @param deviceId The device id of the context associated with the caller.
+ * @param devicePolicy The camera policy of the device of the associated context (default
+ * policy for default device context). Only virtual cameras would be exposed
+ * only for custom policy and only real cameras would be exposed for default
+ * policy.
*/
ICamera connect(ICameraClient client,
int cameraId,
@utf8InCpp String opPackageName,
int clientUid, int clientPid,
int targetSdkVersion,
- boolean overrideToPortrait,
- boolean forceSlowJpegMode);
+ int rotationOverride,
+ boolean forceSlowJpegMode,
+ int deviceId,
+ int devicePolicy);
/**
- * Open a camera device through the new camera API
- * Only supported for device HAL versions >= 3.2
+ * Open a camera device through the new camera API.
+ * Only supported for device HAL versions >= 3.2.
+ *
+ * @param cameraId The ID of the camera to open.
+ * @param opPackageName The package name to report for the app-ops.
+ * @param clientUid UID for the calling client.
+ * @param targetSdkVersion the target sdk level of the application calling this function.
+ * @param rotationOverride Whether to override the sensor orientation information to
+ * correspond to portrait: {@link ICameraService#ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT}
+ * will override the sensor orientation and rotate and crop, while {@link
+ * ICameraService#ROTATION_OVERRIDE_ROTATION_ONLY} will rotate and crop the camera feed
+ * without changing the sensor orientation.
+ * @param deviceId The device id of the context associated with the caller.
+ * @param devicePolicy The camera policy of the device of the associated context (default
+ * policy for default device context). Only virtual cameras would be exposed
+ * only for custom policy and only real cameras would be exposed for default
+ * policy.
*/
ICameraDeviceUser connectDevice(ICameraDeviceCallbacks callbacks,
@utf8InCpp String cameraId,
@@ -100,7 +169,9 @@
@nullable @utf8InCpp String featureId,
int clientUid, int oomScoreOffset,
int targetSdkVersion,
- boolean overrideToPortrait);
+ int rotationOverride,
+ int deviceId,
+ int devicePolicy);
/**
* Add listener for changes to camera device and flashlight state.
@@ -118,35 +189,24 @@
ConcurrentCameraIdCombination[] getConcurrentCameraIds();
/**
- * Check whether a particular set of session configurations are concurrently supported by the
- * corresponding camera ids.
- *
- * @param sessions the set of camera id and session configuration pairs to be queried.
- * @param targetSdkVersion the target sdk level of the application calling this function.
- * @return true - the set of concurrent camera id and stream combinations is supported.
- * false - the set of concurrent camera id and stream combinations is not supported
- * OR the method was called with a set of camera ids not returned by
- * getConcurrentCameraIds().
- */
+ * Check whether a particular set of session configurations are concurrently supported by the
+ * corresponding camera ids.
+ *
+ * @param sessions the set of camera id and session configuration pairs to be queried.
+ * @param targetSdkVersion the target sdk level of the application calling this function.
+ * @param deviceId The device id of the context associated with the caller.
+ * @param devicePolicy The camera policy of the device of the associated context (default
+ * policy for default device context). Only virtual cameras would be exposed
+ * only for custom policy and only real cameras would be exposed for default
+ * policy.
+ * @return true - the set of concurrent camera id and stream combinations is supported.
+ * false - the set of concurrent camera id and stream combinations is not supported
+ * OR the method was called with a set of camera ids not returned by
+ * getConcurrentCameraIds().
+ */
boolean isConcurrentSessionConfigurationSupported(
in CameraIdAndSessionConfiguration[] sessions,
- int targetSdkVersion);
-
- /**
- * Remap Camera Ids in the CameraService.
- *
- * Once this is in effect, all binder calls in the ICameraService that
- * use logicalCameraId should consult remapping state to arrive at the
- * correct cameraId to perform the operation on.
- *
- * Note: Before the new cameraIdRemapping state is applied, the previous
- * state is cleared.
- *
- * @param cameraIdRemapping the camera ids to remap. Sending an unpopulated
- * cameraIdRemapping object will result in clearing of any previous
- * cameraIdRemapping state in the camera service.
- */
- void remapCameraIds(in CameraIdRemapping cameraIdRemapping);
+ int targetSdkVersion, int deviceId, int devicePolicy);
/**
* Inject Session Params into an existing camera session.
@@ -168,9 +228,23 @@
/**
* Read the static camera metadata for a camera device.
* Only supported for device HAL versions >= 3.2
+ *
+ * @param cameraId The ID of the camera to fetch metadata for.
+ * @param targetSdkVersion the target sdk level of the application calling this function.
+ * @param rotationOverride Whether to override the sensor orientation information to
+ * correspond to portrait: {@link ICameraService#ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT}
+ * will override the sensor orientation and rotate and crop, while {@link
+ * ICameraService#ROTATION_OVERRIDE_ROTATION_ONLY} will rotate and crop the camera feed
+ * without changing the sensor orientation.
+ * @param deviceId The device id of the context associated with the caller.
+ * @param devicePolicy The camera policy of the device of the associated context (default
+ * policy for default device context). Only virtual cameras would be exposed
+ * only for custom policy and only real cameras would be exposed for default
+ * policy.
+ * @return Characteristics for the given camera.
*/
CameraMetadataNative getCameraCharacteristics(@utf8InCpp String cameraId, int targetSdkVersion,
- boolean overrideToPortrait);
+ int rotationOverride, int deviceId, int devicePolicy);
/**
* Read in the vendor tag descriptors from the camera module HAL.
@@ -206,14 +280,46 @@
ICameraInjectionSession injectCamera(@utf8InCpp String packageName, @utf8InCpp String internalCamId,
@utf8InCpp String externalCamId, in ICameraInjectionCallback CameraInjectionCallback);
- void setTorchMode(@utf8InCpp String cameraId, boolean enabled, IBinder clientBinder);
+ /**
+ * Set the torch mode for a camera device.
+ *
+ * @param cameraId The ID of the camera to set torch mode for.
+ * @param deviceId The device id of the context associated with the caller.
+ * @param devicePolicy The camera policy of the device of the associated context (default
+ * policy for default device context). Only virtual cameras would be exposed
+ * only for custom policy and only real cameras would be exposed for default
+ * policy.
+ */
+ void setTorchMode(@utf8InCpp String cameraId, boolean enabled, IBinder clientBinder,
+ int deviceId, int devicePolicy);
- // Change the brightness level of the flash unit associated with cameraId to strengthLevel.
- // If the torch is in OFF state and strengthLevel > 0 then the torch will also be turned ON.
- void turnOnTorchWithStrengthLevel(@utf8InCpp String cameraId, int strengthLevel, IBinder clientBinder);
+ /**
+ * Change the brightness level of the flash unit associated with cameraId to strengthLevel.
+ * If the torch is in OFF state and strengthLevel > 0 then the torch will also be turned ON.
+ *
+ * @param cameraId The ID of the camera.
+ * @param strengthLevel The torch strength level to set for the camera.
+ * @param deviceId The device id of the context associated with the caller.
+ * @param devicePolicy The camera policy of the device of the associated context (default
+ * policy for default device context). Only virtual cameras would be exposed
+ * only for custom policy and only real cameras would be exposed for default
+ * policy.
+ */
+ void turnOnTorchWithStrengthLevel(@utf8InCpp String cameraId, int strengthLevel,
+ IBinder clientBinder, int deviceId, int devicePolicy);
- // Get the brightness level of the flash unit associated with cameraId.
- int getTorchStrengthLevel(@utf8InCpp String cameraId);
+ /**
+ * Get the brightness level of the flash unit associated with cameraId.
+ *
+ * @param cameraId The ID of the camera.
+ * @param deviceId The device id of the context associated with the caller.
+ * @param devicePolicy The camera policy of the device of the associated context (default
+ * policy for default device context). Only virtual cameras would be exposed
+ * only for custom policy and only real cameras would be exposed for default
+ * policy.
+ * @return Torch strength level for the camera.
+ */
+ int getTorchStrengthLevel(@utf8InCpp String cameraId, int deviceId, int devicePolicy);
/**
* Notify the camera service of a system event. Should only be called from system_server.
@@ -274,32 +380,64 @@
const int DEVICE_STATE_FOLDED = 4;
const int DEVICE_STATE_LAST_FRAMEWORK_BIT = 0x80000000; // 1 << 31;
- // Create a CaptureRequest metadata based on template id
- CameraMetadataNative createDefaultRequest(@utf8InCpp String cameraId, int templateId);
+ /**
+ * Create a CaptureRequest metadata based on template id
+ *
+ * @param cameraId The camera id to create the CaptureRequest for.
+ * @param templateId The template id create the CaptureRequest for.
+ * @param deviceId the device id of the context associated with the caller.
+ * @param devicePolicy The camera policy of the device of the associated context (default
+ * policy for default device context). Only virtual cameras would be exposed
+ * only for custom policy and only real cameras would be exposed for default
+ * policy.
+ * @return Metadata representing the CaptureRequest.
+ */
+ CameraMetadataNative createDefaultRequest(@utf8InCpp String cameraId, int templateId,
+ int deviceId, int devicePolicy);
/**
* Check whether a particular session configuration with optional session parameters
* has camera device support.
*
* @param cameraId The camera id to query session configuration for
+ * @param targetSdkVersion the target sdk level of the application calling this function.
* @param sessionConfiguration Specific session configuration to be verified.
+ * @param deviceId The device id of the context associated with the caller.
+ * @param devicePolicy The camera policy of the device of the associated context (default
+ * policy for default device context). Only virtual cameras would be exposed
+ * only for custom policy and only real cameras would be exposed for default
+ * policy.
* @return true - in case the stream combination is supported.
* false - in case there is no device support.
*/
boolean isSessionConfigurationWithParametersSupported(@utf8InCpp String cameraId,
- in SessionConfiguration sessionConfiguration);
+ int targetSdkVersion, in SessionConfiguration sessionConfiguration,
+ int deviceId, int devicePolicy);
/**
* Get the camera characteristics for a particular session configuration for
* the given camera device.
*
* @param cameraId ID of the device for which the session characteristics must be fetched.
- * @param sessionConfiguration session configuration for which the characteristics
- * must be fetched.
- * @return - characteristics associated with the given session.
+ * @param targetSdkVersion the target sdk level of the application calling this function.
+ * @param rotationOverride Whether to override the sensor orientation information to
+ * correspond to portrait: {@link ICameraService#ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT}
+ * will override the sensor orientation and rotate and crop, while {@link
+ * ICameraService#ROTATION_OVERRIDE_ROTATION_ONLY} will rotate and crop the camera feed
+ * without changing the sensor orientation.
+ * @param sessionConfiguration Session configuration for which the characteristics
+ * must be fetched.
+ * @param deviceId The device id of the context associated with the caller.
+ * @param devicePolicy The camera policy of the device of the associated context (default
+ * policy for default device context). Only virtual cameras would be exposed
+ * only for custom policy and only real cameras would be exposed for default
+ * policy.
+ * @return Characteristics associated with the given session.
*/
CameraMetadataNative getSessionCharacteristics(@utf8InCpp String cameraId,
- int targetSdkVersion,
- boolean overrideToPortrait,
- in SessionConfiguration sessionConfiguration);
+ int targetSdkVersion,
+ int rotationOverride,
+ in SessionConfiguration sessionConfiguration,
+ int deviceId,
+ int devicePolicy);
}
diff --git a/camera/aidl/android/hardware/ICameraServiceListener.aidl b/camera/aidl/android/hardware/ICameraServiceListener.aidl
index 23a87d3..9c8c88a 100644
--- a/camera/aidl/android/hardware/ICameraServiceListener.aidl
+++ b/camera/aidl/android/hardware/ICameraServiceListener.aidl
@@ -51,14 +51,20 @@
// Use to initialize variables only
const int STATUS_UNKNOWN = -1;
- oneway void onStatusChanged(int status, @utf8InCpp String cameraId);
+ // We pass deviceId associated with a camera in the callbacks, which is the id of the virtual
+ // device owning the camera (for virtual cameras), or kDefaultDeviceId (for real
+ // cameras). The deviceId is being passed so that the API layer (CameraManagerGlobal) can filter
+ // out the cameras that don't correspond to the context associated with the caller who
+ // registers a callback.
+
+ oneway void onStatusChanged(int status, @utf8InCpp String cameraId, int deviceId);
/**
* Notify registered client about status changes for a physical camera backing
* a logical camera.
*/
oneway void onPhysicalCameraStatusChanged(int status, @utf8InCpp String cameraId,
- @utf8InCpp String physicalCameraId);
+ @utf8InCpp String physicalCameraId, int deviceId);
/**
* The torch mode status of a camera.
@@ -82,9 +88,9 @@
// Use to initialize variables only
const int TORCH_STATUS_UNKNOWN = -1;
- oneway void onTorchStatusChanged(int status, @utf8InCpp String cameraId);
+ oneway void onTorchStatusChanged(int status, @utf8InCpp String cameraId, int deviceId);
- oneway void onTorchStrengthLevelChanged(@utf8InCpp String cameraId, int newTorchStrength);
+ oneway void onTorchStrengthLevelChanged(@utf8InCpp String cameraId, int newTorchStrength, int deviceId);
/**
* Notify registered clients about camera access priority changes.
@@ -98,6 +104,6 @@
* Only clients with android.permission.CAMERA_OPEN_CLOSE_LISTENER permission
* will receive such callbacks.
*/
- oneway void onCameraOpened(@utf8InCpp String cameraId, @utf8InCpp String clientPackageId);
- oneway void onCameraClosed(@utf8InCpp String cameraId);
+ oneway void onCameraOpened(@utf8InCpp String cameraId, @utf8InCpp String clientPackageId, int deviceId);
+ oneway void onCameraClosed(@utf8InCpp String cameraId, int deviceId);
}
diff --git a/camera/camera2/ConcurrentCamera.cpp b/camera/camera2/ConcurrentCamera.cpp
index 67aa876..ac442ed 100644
--- a/camera/camera2/ConcurrentCamera.cpp
+++ b/camera/camera2/ConcurrentCamera.cpp
@@ -32,7 +32,8 @@
ConcurrentCameraIdCombination::ConcurrentCameraIdCombination() = default;
ConcurrentCameraIdCombination::ConcurrentCameraIdCombination(
- std::vector<std::string> &&combination) : mConcurrentCameraIds(std::move(combination)) { }
+ std::vector<std::pair<std::string, int32_t>> &&combination)
+ : mConcurrentCameraIdDeviceIdPairs(std::move(combination)) { }
ConcurrentCameraIdCombination::~ConcurrentCameraIdCombination() = default;
@@ -42,25 +43,29 @@
return BAD_VALUE;
}
status_t err = OK;
- mConcurrentCameraIds.clear();
- int32_t cameraIdCount = 0;
- if ((err = parcel->readInt32(&cameraIdCount)) != OK) {
- ALOGE("%s: Failed to read the camera id count from parcel: %d", __FUNCTION__, err);
+ mConcurrentCameraIdDeviceIdPairs.clear();
+ int32_t cameraCount = 0;
+ if ((err = parcel->readInt32(&cameraCount)) != OK) {
+ ALOGE("%s: Failed to read the camera count from parcel: %d", __FUNCTION__, err);
return err;
}
- for (int32_t i = 0; i < cameraIdCount; i++) {
- String16 id;
- if ((err = parcel->readString16(&id)) != OK) {
+ for (int32_t i = 0; i < cameraCount; i++) {
+ String16 cameraId;
+ if ((err = parcel->readString16(&cameraId)) != OK) {
ALOGE("%s: Failed to read camera id!", __FUNCTION__);
return err;
}
- mConcurrentCameraIds.push_back(toStdString(id));
+ int32_t deviceId;
+ if ((err = parcel->readInt32(&deviceId)) != OK) {
+ ALOGE("%s: Failed to read device id!", __FUNCTION__);
+ return err;
+ }
+ mConcurrentCameraIdDeviceIdPairs.push_back({toStdString(cameraId), deviceId});
}
return OK;
}
status_t ConcurrentCameraIdCombination::writeToParcel(android::Parcel* parcel) const {
-
if (parcel == nullptr) {
ALOGE("%s: Null parcel", __FUNCTION__);
return BAD_VALUE;
@@ -68,16 +73,20 @@
status_t err = OK;
- if ((err = parcel->writeInt32(mConcurrentCameraIds.size())) != OK) {
+ if ((err = parcel->writeInt32(mConcurrentCameraIdDeviceIdPairs.size())) != OK) {
ALOGE("%s: Failed to write the camera id count to parcel: %d", __FUNCTION__, err);
return err;
}
- for (const auto &it : mConcurrentCameraIds) {
- if ((err = parcel->writeString16(toString16(it))) != OK) {
+ for (const auto &it : mConcurrentCameraIdDeviceIdPairs) {
+ if ((err = parcel->writeString16(toString16(it.first))) != OK) {
ALOGE("%s: Failed to write the camera id string to parcel: %d", __FUNCTION__, err);
return err;
}
+ if ((err = parcel->writeInt32(it.second)) != OK) {
+ ALOGE("%s: Failed to write the device id integer to parcel: %d", __FUNCTION__, err);
+ return err;
+ }
}
return OK;
}
@@ -105,7 +114,6 @@
}
status_t CameraIdAndSessionConfiguration::writeToParcel(android::Parcel* parcel) const {
-
if (parcel == nullptr) {
ALOGE("%s: Null parcel", __FUNCTION__);
return BAD_VALUE;
diff --git a/camera/camera2/OutputConfiguration.cpp b/camera/camera2/OutputConfiguration.cpp
index 73b153c..2d1af32 100644
--- a/camera/camera2/OutputConfiguration.cpp
+++ b/camera/camera2/OutputConfiguration.cpp
@@ -25,6 +25,7 @@
#include <binder/Parcel.h>
#include <gui/view/Surface.h>
#include <system/camera_metadata.h>
+#include <system/graphics.h>
#include <utils/String8.h>
@@ -102,6 +103,25 @@
return mUseReadoutTimestamp;
}
+int OutputConfiguration::getFormat() const {
+ return mFormat;
+}
+
+int OutputConfiguration::getDataspace() const {
+ return mDataspace;
+}
+
+int64_t OutputConfiguration::getUsage() const {
+ return mUsage;
+}
+
+bool OutputConfiguration::isComplete() const {
+ return !((mSurfaceType == SURFACE_TYPE_MEDIA_RECORDER ||
+ mSurfaceType == SURFACE_TYPE_MEDIA_CODEC ||
+ mSurfaceType == SURFACE_TYPE_IMAGE_READER) &&
+ mGbps.empty());
+}
+
OutputConfiguration::OutputConfiguration() :
mRotation(INVALID_ROTATION),
mSurfaceSetID(INVALID_SET_ID),
@@ -116,7 +136,10 @@
mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
mTimestampBase(TIMESTAMP_BASE_DEFAULT),
mMirrorMode(MIRROR_MODE_AUTO),
- mUseReadoutTimestamp(false) {
+ mUseReadoutTimestamp(false),
+ mFormat(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED),
+ mDataspace(0),
+ mUsage(0) {
}
OutputConfiguration::OutputConfiguration(const android::Parcel& parcel) :
@@ -234,6 +257,24 @@
return err;
}
+ int format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+ if ((err = parcel->readInt32(&format)) != OK) {
+ ALOGE("%s: Failed to read format from parcel", __FUNCTION__);
+ return err;
+ }
+
+ int dataspace = 0;
+ if ((err = parcel->readInt32(&dataspace)) != OK) {
+ ALOGE("%s: Failed to read dataspace from parcel", __FUNCTION__);
+ return err;
+ }
+
+ int64_t usage = 0;
+ if ((err = parcel->readInt64(&usage)) != OK) {
+ ALOGE("%s: Failed to read usage flag from parcel", __FUNCTION__);
+ return err;
+ }
+
mRotation = rotation;
mSurfaceSetID = setID;
mSurfaceType = surfaceType;
@@ -256,13 +297,17 @@
mSensorPixelModesUsed = std::move(sensorPixelModesUsed);
mDynamicRangeProfile = dynamicProfile;
mColorSpace = colorSpace;
+ mFormat = format;
+ mDataspace = dataspace;
+ mUsage = usage;
ALOGV("%s: OutputConfiguration: rotation = %d, setId = %d, surfaceType = %d,"
" physicalCameraId = %s, isMultiResolution = %d, streamUseCase = %" PRId64
- ", timestampBase = %d, mirrorMode = %d, useReadoutTimestamp = %d",
+ ", timestampBase = %d, mirrorMode = %d, useReadoutTimestamp = %d, format = %d, "
+ "dataspace = %d, usage = %" PRId64,
__FUNCTION__, mRotation, mSurfaceSetID, mSurfaceType,
mPhysicalCameraId.c_str(), mIsMultiResolution, mStreamUseCase, timestampBase,
- mMirrorMode, mUseReadoutTimestamp);
+ mMirrorMode, mUseReadoutTimestamp, mFormat, mDataspace, mUsage);
return err;
}
@@ -283,6 +328,9 @@
mTimestampBase = TIMESTAMP_BASE_DEFAULT;
mMirrorMode = MIRROR_MODE_AUTO;
mUseReadoutTimestamp = false;
+ mFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+ mDataspace = 0;
+ mUsage = 0;
}
OutputConfiguration::OutputConfiguration(
@@ -296,7 +344,9 @@
mColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
mTimestampBase(TIMESTAMP_BASE_DEFAULT),
- mMirrorMode(MIRROR_MODE_AUTO), mUseReadoutTimestamp(false) { }
+ mMirrorMode(MIRROR_MODE_AUTO), mUseReadoutTimestamp(false),
+ mFormat(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED), mDataspace(0),
+ mUsage(0) { }
status_t OutputConfiguration::writeToParcel(android::Parcel* parcel) const {
@@ -362,6 +412,15 @@
err = parcel->writeInt32(mUseReadoutTimestamp ? 1 : 0);
if (err != OK) return err;
+ err = parcel->writeInt32(mFormat);
+ if (err != OK) return err;
+
+ err = parcel->writeInt32(mDataspace);
+ if (err != OK) return err;
+
+ err = parcel->writeInt64(mUsage);
+ if (err != OK) return err;
+
return OK;
}
diff --git a/camera/camera_platform.aconfig b/camera/camera_platform.aconfig
index f16cd0d..ab161d6 100644
--- a/camera/camera_platform.aconfig
+++ b/camera/camera_platform.aconfig
@@ -2,66 +2,70 @@
container: "system"
flag {
- namespace: "camera_platform"
- name: "camera_hsum_permission"
- description: "Camera access by headless system user"
- bug: "273539631"
+ namespace: "camera_platform"
+ name: "camera_hsum_permission"
+ is_exported: true
+ description: "Camera access by headless system user"
+ bug: "273539631"
}
flag {
- namespace: "camera_platform"
- name: "concert_mode"
- description: "Introduces a new concert mode camera extension type"
- bug: "297083874"
+ namespace: "camera_platform"
+ name: "concert_mode"
+ is_exported: true
+ description: "Introduces a new concert mode camera extension type"
+ bug: "297083874"
}
flag {
- namespace: "camera_platform"
- name: "feature_combination_query"
- description: "Query feature combination support and session specific characteristics"
- bug: "309627704"
+ namespace: "camera_platform"
+ name: "feature_combination_query"
+ is_exported: true
+ description: "Query feature combination support and session specific characteristics"
+ bug: "309627704"
}
flag {
- namespace: "camera_platform"
- name: "watch_foreground_changes"
- description: "Request AppOps to notify changes in the foreground status of the client"
- bug: "290086710"
+ namespace: "camera_platform"
+ name: "watch_foreground_changes"
+ description: "Request AppOps to notify changes in the foreground status of the client"
+ bug: "290086710"
}
flag {
- namespace: "camera_platform"
- name: "log_ultrawide_usage"
- description: "Enable measuring how much usage there is for ultrawide-angle cameras"
- bug: "300515796"
+ namespace: "camera_platform"
+ name: "log_ultrawide_usage"
+ description: "Enable measuring how much usage there is for ultrawide-angle cameras"
+ bug: "300515796"
}
flag {
- namespace: "camera_platform"
- name: "camera_manual_flash_strength_control"
- description: "Flash brightness level control in manual flash mode"
- bug: "238348881"
+ namespace: "camera_platform"
+ name: "camera_manual_flash_strength_control"
+ is_exported: true
+ description: "Flash brightness level control in manual flash mode"
+ bug: "238348881"
}
flag {
- namespace: "camera_platform"
- name: "lazy_aidl_wait_for_service"
- description: "Use waitForService instead of getService with lazy AIDL HALs"
- bug: "285546208"
+ namespace: "camera_platform"
+ name: "lazy_aidl_wait_for_service"
+ description: "Use waitForService instead of getService with lazy AIDL HALs"
+ bug: "285546208"
}
flag {
- namespace: "camera_platform"
- name: "log_zoom_override_usage"
- description: "Enable measuring how much usage there is for zoom settings overrde"
- bug: "307409002"
+ namespace: "camera_platform"
+ name: "log_zoom_override_usage"
+ description: "Enable measuring how much usage there is for zoom settings overrde"
+ bug: "307409002"
}
flag {
- namespace: "camera_platform"
- name: "session_hal_buf_manager"
- description: "Enable or disable HAL buffer manager as requested by the camera HAL"
- bug: "311263114"
+ namespace: "camera_platform"
+ name: "session_hal_buf_manager"
+ description: "Enable or disable HAL buffer manager as requested by the camera HAL"
+ bug: "311263114"
}
flag {
@@ -72,84 +76,164 @@
}
flag {
- namespace: "camera_platform"
- name: "camera_ae_mode_low_light_boost"
- description: "An AE mode that enables increased brightening in low light scenes"
- bug: "312803148"
+ namespace: "camera_platform"
+ name: "camera_ae_mode_low_light_boost"
+ is_exported: true
+ description: "An AE mode that enables increased brightening in low light scenes"
+ bug: "312803148"
}
flag {
- namespace: "camera_platform"
- name: "multiresolution_imagereader_usage_config"
- description: "Enable creating MultiResolutionImageReader with usage flag configuration"
- bug: "301588215"
+ namespace: "camera_platform"
+ name: "multiresolution_imagereader_usage_config"
+ description: "Enable creating MultiResolutionImageReader with usage flag configuration"
+ bug: "301588215"
}
flag {
- namespace: "camera_platform"
- name: "use_ro_board_api_level_for_vndk_version"
- description: "Enable using ro.board.api_level instead of ro.vndk.version to get VNDK version"
- bug: "312315580"
+ namespace: "camera_platform"
+ name: "use_ro_board_api_level_for_vndk_version"
+ description: "Enable using ro.board.api_level instead of ro.vndk.version to get VNDK version"
+ bug: "312315580"
}
flag {
- namespace: "camera_platform"
- name: "camera_extensions_characteristics_get"
- description: "Enable get extension specific camera characteristics API"
- bug: "280649914"
+ namespace: "camera_platform"
+ name: "camera_extensions_characteristics_get"
+ is_exported: true
+ description: "Enable get extension specific camera characteristics API"
+ bug: "280649914"
}
flag {
- namespace: "camera_platform"
- name: "delay_lazy_hal_instantiation"
- description: "Only trigger lazy HAL instantiation when the HAL is needed for an operation."
- bug: "319735068"
+ namespace: "camera_platform"
+ name: "delay_lazy_hal_instantiation"
+ description: "Only trigger lazy HAL instantiation when the HAL is needed for an operation."
+ bug: "319735068"
}
flag {
- namespace: "camera_platform"
- name: "return_buffers_outside_locks"
- description: "Enable returning graphics buffers to buffer queues without holding the in-flight mutex"
- bug: "315526878"
+ namespace: "camera_platform"
+ name: "return_buffers_outside_locks"
+ description: "Enable returning graphics buffers to buffer queues without holding the in-flight mutex"
+ bug: "315526878"
}
flag {
- namespace: "camera_platform"
- name: "camera_device_setup"
- description: "Create an intermediate Camera Device class for limited CameraDevice access."
- bug: "320741775"
+ namespace: "camera_platform"
+ name: "camera_device_setup"
+ is_exported: true
+ description: "Create an intermediate Camera Device class for limited CameraDevice access."
+ bug: "320741775"
}
flag {
- namespace: "camera_platform"
- name: "camera_privacy_allowlist"
- description: "Allowlisting to exempt safety-relevant cameras from privacy control for automotive devices"
- bug: "282814430"
+ namespace: "camera_platform"
+ name: "camera_privacy_allowlist"
+ is_exported: true
+ description: "Allowlisting to exempt safety-relevant cameras from privacy control for automotive devices"
+ bug: "282814430"
}
flag {
- namespace: "camera_platform"
- name: "surface_ipc"
- description: "Optimize Surface binder IPC"
- bug: "323292530"
- metadata {
- purpose: PURPOSE_BUGFIX
- }
+ namespace: "camera_platform"
+ name: "surface_ipc"
+ description: "Optimize Surface binder IPC"
+ bug: "323292530"
+ metadata {
+ purpose: PURPOSE_BUGFIX
+ }
}
flag {
- namespace: "camera_platform"
- name: "extension_10_bit"
- description: "Enables 10-bit support in the camera extensions."
- bug: "316375635"
+ namespace: "camera_platform"
+ name: "extension_10_bit"
+ is_exported: true
+ description: "Enables 10-bit support in the camera extensions."
+ bug: "316375635"
}
flag {
- namespace: "camera_platform"
- name: "realtime_priority_bump"
- description: "Bump the scheduling priority of performance critical code paths"
- bug: "336628522"
- metadata {
- purpose: PURPOSE_BUGFIX
- }
+ namespace: "camera_platform"
+ name: "single_thread_executor"
+ description: "Ensure device logic is run within one thread."
+ bug: "305857746"
+ metadata {
+ purpose: PURPOSE_BUGFIX
+ }
+}
+
+flag {
+ namespace: "camera_platform"
+ name: "surface_leak_fix"
+ description: "Address Surface release leaks in CaptureRequest"
+ bug: "324071855"
+ metadata {
+ purpose: PURPOSE_BUGFIX
+ }
+}
+
+flag {
+ namespace: "camera_platform"
+ name: "concert_mode_api"
+ description: "Covers the eyes free videography public facing API"
+ bug: "297083874"
+}
+
+
+flag {
+ namespace: "camera_platform"
+ name: "cache_permission_services"
+ description: "Cache IPermissionController and IPermissionChecker in CameraService to reduce query latency."
+ bug: "326139956"
+ metadata {
+ purpose: PURPOSE_BUGFIX
+ }
+}
+
+flag {
+ namespace: "camera_platform"
+ name: "check_session_support_before_session_char"
+ description: "Validate that a SessionConfiguration is supported before fetching SessionCharacteristics."
+ bug: "327008530"
+ metadata {
+ purpose: PURPOSE_BUGFIX
+ }
+}
+
+flag {
+ namespace: "camera_platform"
+ name: "calculate_perf_override_during_session_support"
+ description: "Dynamically calulate whether perf class override should be set in isSessionConfigurationWithParametersSupported."
+ bug: "332975108"
+ metadata {
+ purpose: PURPOSE_BUGFIX
+ }
+}
+
+flag {
+ namespace: "camera_platform"
+ name: "analytics_24q3"
+ description: "Miscellaneous camera platform metrics for 24Q3"
+ bug: "332557570"
+}
+
+flag {
+ namespace: "camera_platform"
+ name: "realtime_priority_bump"
+ description: "Bump the scheduling priority of performance critical code paths"
+ bug: "336628522"
+ metadata {
+ purpose: PURPOSE_BUGFIX
+ }
+}
+
+flag {
+ namespace: "camera_platform"
+ name: "use_system_api_for_vndk_version"
+ description: "ro.board.api_level isn't reliable. Use system api to replace ro.vndk.version"
+ bug: "312315580"
+ metadata {
+ purpose: PURPOSE_BUGFIX
+ }
}
diff --git a/camera/include/camera/Camera.h b/camera/include/camera/Camera.h
index 6655f82..3ecd10d 100644
--- a/camera/include/camera/Camera.h
+++ b/camera/include/camera/Camera.h
@@ -27,6 +27,7 @@
#include <camera/android/hardware/ICamera.h>
#include <camera/android/hardware/ICameraClient.h>
#include <camera/CameraBase.h>
+#include <camera/CameraUtils.h>
namespace android {
@@ -58,7 +59,7 @@
typedef ::android::hardware::ICameraClient TCamCallbacks;
typedef ::android::binder::Status (::android::hardware::ICameraService::*TCamConnectService)
(const sp<::android::hardware::ICameraClient>&,
- int, const std::string&, int, int, int, bool, bool,
+ int, const std::string&, int, int, int, int, bool, int32_t, int32_t,
/*out*/
sp<::android::hardware::ICamera>*);
static TCamConnectService fnConnectService;
@@ -82,7 +83,8 @@
static sp<Camera> connect(int cameraId,
const std::string& clientPackageName,
int clientUid, int clientPid, int targetSdkVersion,
- bool overrideToPortrait, bool forceSlowJpegMode);
+ int rotationOverride, bool forceSlowJpegMode,
+ int32_t deviceId = kDefaultDeviceId, int32_t devicePolicy = 0);
virtual ~Camera();
@@ -197,6 +199,6 @@
friend class CameraBase;
};
-}; // namespace android
+} // namespace android
#endif
diff --git a/camera/include/camera/CameraBase.h b/camera/include/camera/CameraBase.h
index 6af7f2a..3370b3d 100644
--- a/camera/include/camera/CameraBase.h
+++ b/camera/include/camera/CameraBase.h
@@ -62,16 +62,15 @@
virtual status_t writeToParcel(android::Parcel* parcel) const;
virtual status_t readFromParcel(const android::Parcel* parcel);
-
};
/**
- * Basic status information about a camera device - its name and its current
+ * Basic status information about a camera device - its id and its current
* state.
*/
struct CameraStatus : public android::Parcelable {
/**
- * The name of the camera device
+ * The app-visible id of the camera device
*/
std::string cameraId;
@@ -90,12 +89,19 @@
*/
std::string clientPackage;
+ /**
+ * The id of the device owning the camera. For virtual cameras, this is the id of the virtual
+ * device owning the camera. For real cameras, this is the default device id, i.e.,
+ * kDefaultDeviceId.
+ */
+ int32_t deviceId;
+
virtual status_t writeToParcel(android::Parcel* parcel) const;
virtual status_t readFromParcel(const android::Parcel* parcel);
CameraStatus(std::string id, int32_t s, const std::vector<std::string>& unavailSubIds,
- const std::string& clientPkg) : cameraId(id), status(s),
- unavailablePhysicalIds(unavailSubIds), clientPackage(clientPkg) {}
+ const std::string& clientPkg, int32_t devId) : cameraId(id), status(s),
+ unavailablePhysicalIds(unavailSubIds), clientPackage(clientPkg), deviceId(devId) {}
CameraStatus() : status(ICameraServiceListener::STATUS_PRESENT) {}
};
@@ -103,7 +109,6 @@
using hardware::CameraInfo;
-
template <typename TCam>
struct CameraTraits {
};
@@ -120,15 +125,18 @@
static sp<TCam> connect(int cameraId,
const std::string& clientPackageName,
int clientUid, int clientPid, int targetSdkVersion,
- bool overrideToPortrait, bool forceSlowJpegMode);
+ int rotationOverride, bool forceSlowJpegMode,
+ int32_t deviceId, int32_t devicePolicy);
virtual void disconnect();
void setListener(const sp<TCamListener>& listener);
- static int getNumberOfCameras();
+ static int getNumberOfCameras(int32_t deviceId, int32_t devicePolicy);
static status_t getCameraInfo(int cameraId,
- bool overrideToPortrait,
+ int rotationOverride,
+ int32_t deviceId,
+ int32_t devicePolicy,
/*out*/
struct hardware::CameraInfo* cameraInfo);
@@ -167,6 +175,6 @@
typedef CameraBase<TCam> CameraBaseT;
};
-}; // namespace android
+} // namespace android
#endif
diff --git a/camera/include/camera/CameraSessionStats.h b/camera/include/camera/CameraSessionStats.h
index 06c154d..34ee882 100644
--- a/camera/include/camera/CameraSessionStats.h
+++ b/camera/include/camera/CameraSessionStats.h
@@ -167,6 +167,8 @@
CameraExtensionSessionStats mCameraExtensionSessionStats;
+ std::pair<int32_t, int32_t> mMostRequestedFpsRange;
+
// Constructors
CameraSessionStats();
CameraSessionStats(const std::string& cameraId, int facing, int newCameraState,
diff --git a/camera/include/camera/CameraUtils.h b/camera/include/camera/CameraUtils.h
index 31d25e7..d358407 100644
--- a/camera/include/camera/CameraUtils.h
+++ b/camera/include/camera/CameraUtils.h
@@ -26,6 +26,9 @@
namespace android {
+// Device id of a context associated with the default device.
+constexpr int32_t kDefaultDeviceId = 0;
+
/**
* CameraUtils contains utility methods that are shared between the native
* camera client, and the camera service.
diff --git a/camera/include/camera/camera2/ConcurrentCamera.h b/camera/include/camera/camera2/ConcurrentCamera.h
index ac99fd5..2a65da8 100644
--- a/camera/include/camera/camera2/ConcurrentCamera.h
+++ b/camera/include/camera/camera2/ConcurrentCamera.h
@@ -28,9 +28,9 @@
namespace utils {
struct ConcurrentCameraIdCombination : public Parcelable {
- std::vector<std::string> mConcurrentCameraIds;
+ std::vector<std::pair<std::string, int32_t>> mConcurrentCameraIdDeviceIdPairs;
ConcurrentCameraIdCombination();
- ConcurrentCameraIdCombination(std::vector<std::string> &&combination);
+ ConcurrentCameraIdCombination(std::vector<std::pair<std::string, int32_t>> &&combination);
virtual ~ConcurrentCameraIdCombination();
virtual status_t writeToParcel(android::Parcel *parcel) const override;
diff --git a/camera/include/camera/camera2/OutputConfiguration.h b/camera/include/camera/camera2/OutputConfiguration.h
index 3f74b4a..83ce39d 100644
--- a/camera/include/camera/camera2/OutputConfiguration.h
+++ b/camera/include/camera/camera2/OutputConfiguration.h
@@ -35,10 +35,13 @@
static const int INVALID_ROTATION;
static const int INVALID_SET_ID;
- enum SurfaceType{
+ enum SurfaceType {
SURFACE_TYPE_UNKNOWN = -1,
SURFACE_TYPE_SURFACE_VIEW = 0,
- SURFACE_TYPE_SURFACE_TEXTURE = 1
+ SURFACE_TYPE_SURFACE_TEXTURE = 1,
+ SURFACE_TYPE_MEDIA_RECORDER = 2,
+ SURFACE_TYPE_MEDIA_CODEC = 3,
+ SURFACE_TYPE_IMAGE_READER = 4
};
enum TimestampBaseType {
TIMESTAMP_BASE_DEFAULT = 0,
@@ -71,6 +74,10 @@
int getTimestampBase() const;
int getMirrorMode() const;
bool useReadoutTimestamp() const;
+ int getFormat() const;
+ int getDataspace() const;
+ int64_t getUsage() const;
+ bool isComplete() const;
// set of sensor pixel mode resolutions allowed {MAX_RESOLUTION, DEFAULT_MODE};
const std::vector<int32_t>& getSensorPixelModesUsed() const;
@@ -98,7 +105,7 @@
OutputConfiguration(const std::vector<sp<IGraphicBufferProducer>>& gbps,
int rotation, const std::string& physicalCameraId,
int surfaceSetID = INVALID_SET_ID,
- int surfaceType = OutputConfiguration::SURFACE_TYPE_UNKNOWN, int width = 0,
+ int surfaceType = SURFACE_TYPE_UNKNOWN, int width = 0,
int height = 0, bool isShared = false);
bool operator == (const OutputConfiguration& other) const {
@@ -118,7 +125,10 @@
mStreamUseCase == other.mStreamUseCase &&
mTimestampBase == other.mTimestampBase &&
mMirrorMode == other.mMirrorMode &&
- mUseReadoutTimestamp == other.mUseReadoutTimestamp);
+ mUseReadoutTimestamp == other.mUseReadoutTimestamp &&
+ mFormat == other.mFormat &&
+ mDataspace == other.mDataspace &&
+ mUsage == other.mUsage);
}
bool operator != (const OutputConfiguration& other) const {
return !(*this == other);
@@ -173,6 +183,15 @@
if (mUseReadoutTimestamp != other.mUseReadoutTimestamp) {
return mUseReadoutTimestamp < other.mUseReadoutTimestamp;
}
+ if (mFormat != other.mFormat) {
+ return mFormat < other.mFormat;
+ }
+ if (mDataspace != other.mDataspace) {
+ return mDataspace < other.mDataspace;
+ }
+ if (mUsage != other.mUsage) {
+ return mUsage < other.mUsage;
+ }
return gbpsLessThan(other);
}
@@ -203,6 +222,9 @@
int mTimestampBase;
int mMirrorMode;
bool mUseReadoutTimestamp;
+ int mFormat;
+ int mDataspace;
+ int64_t mUsage;
};
} // namespace params
} // namespace camera2
diff --git a/camera/ndk/Android.bp b/camera/ndk/Android.bp
index 421469a..5577775 100644
--- a/camera/ndk/Android.bp
+++ b/camera/ndk/Android.bp
@@ -77,6 +77,8 @@
"impl/ACameraCaptureSession.cpp",
],
shared_libs: [
+ "android.companion.virtual.virtualdevice_aidl-cpp",
+ "android.companion.virtualdevice.flags-aconfig-cc",
"libbinder",
"liblog",
"libgui",
diff --git a/camera/ndk/NdkCameraManager.cpp b/camera/ndk/NdkCameraManager.cpp
index 2de4a50..1b3343e 100644
--- a/camera/ndk/NdkCameraManager.cpp
+++ b/camera/ndk/NdkCameraManager.cpp
@@ -68,7 +68,7 @@
EXPORT
camera_status_t ACameraManager_registerAvailabilityCallback(
- ACameraManager*, const ACameraManager_AvailabilityCallbacks *callback) {
+ ACameraManager* manager, const ACameraManager_AvailabilityCallbacks* callback) {
ATRACE_CALL();
if (callback == nullptr) {
ALOGE("%s: invalid argument! callback is null!", __FUNCTION__);
@@ -81,13 +81,13 @@
callback->onCameraAvailable, callback->onCameraUnavailable);
return ACAMERA_ERROR_INVALID_PARAMETER;
}
- CameraManagerGlobal::getInstance()->registerAvailabilityCallback(callback);
+ manager->registerAvailabilityCallback(callback);
return ACAMERA_OK;
}
EXPORT
camera_status_t ACameraManager_unregisterAvailabilityCallback(
- ACameraManager*, const ACameraManager_AvailabilityCallbacks *callback) {
+ ACameraManager* manager, const ACameraManager_AvailabilityCallbacks* callback) {
ATRACE_CALL();
if (callback == nullptr) {
ALOGE("%s: invalid argument! callback is null!", __FUNCTION__);
@@ -100,13 +100,13 @@
callback->onCameraAvailable, callback->onCameraUnavailable);
return ACAMERA_ERROR_INVALID_PARAMETER;
}
- CameraManagerGlobal::getInstance()->unregisterAvailabilityCallback(callback);
+ manager->unregisterAvailabilityCallback(callback);
return ACAMERA_OK;
}
EXPORT
camera_status_t ACameraManager_registerExtendedAvailabilityCallback(
- ACameraManager* /*manager*/, const ACameraManager_ExtendedAvailabilityCallbacks *callback) {
+ ACameraManager* manager, const ACameraManager_ExtendedAvailabilityCallbacks* callback) {
ATRACE_CALL();
if (callback == nullptr) {
ALOGE("%s: invalid argument! callback is null!", __FUNCTION__);
@@ -131,13 +131,13 @@
return ACAMERA_ERROR_INVALID_PARAMETER;
}
}
- CameraManagerGlobal::getInstance()->registerExtendedAvailabilityCallback(callback);
+ manager->registerExtendedAvailabilityCallback(callback);
return ACAMERA_OK;
}
EXPORT
camera_status_t ACameraManager_unregisterExtendedAvailabilityCallback(
- ACameraManager* /*manager*/, const ACameraManager_ExtendedAvailabilityCallbacks *callback) {
+ ACameraManager* manager, const ACameraManager_ExtendedAvailabilityCallbacks* callback) {
ATRACE_CALL();
if (callback == nullptr) {
ALOGE("%s: invalid argument! callback is null!", __FUNCTION__);
@@ -154,7 +154,7 @@
callback->onCameraAccessPrioritiesChanged);
return ACAMERA_ERROR_INVALID_PARAMETER;
}
- CameraManagerGlobal::getInstance()->unregisterExtendedAvailabilityCallback(callback);
+ manager->unregisterExtendedAvailabilityCallback(callback);
return ACAMERA_OK;
}
diff --git a/camera/ndk/NdkCameraMetadata.cpp b/camera/ndk/NdkCameraMetadata.cpp
index 7d3a53e..a2dfaee 100644
--- a/camera/ndk/NdkCameraMetadata.cpp
+++ b/camera/ndk/NdkCameraMetadata.cpp
@@ -121,6 +121,18 @@
}
EXPORT
+camera_status_t ACameraMetadata_getTagFromName(
+ const ACameraMetadata* acm, const char* name, uint32_t* tag) {
+ ATRACE_CALL();
+ if (acm == nullptr || name == nullptr || tag == nullptr) {
+ ALOGE("%s: invalid argument! metadata %p, name %p, tag %p",
+ __FUNCTION__, acm, name, tag);
+ return ACAMERA_ERROR_INVALID_PARAMETER;
+ }
+ return acm->getTagFromName(name, tag);
+}
+
+EXPORT
ACameraMetadata* ACameraMetadata_copy(const ACameraMetadata* src) {
ATRACE_CALL();
if (src == nullptr) {
diff --git a/camera/ndk/impl/ACameraManager.cpp b/camera/ndk/impl/ACameraManager.cpp
index 8c3424f..f36a743 100644
--- a/camera/ndk/impl/ACameraManager.cpp
+++ b/camera/ndk/impl/ACameraManager.cpp
@@ -17,21 +17,109 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "ACameraManager"
-#include <memory>
#include "ACameraManager.h"
-#include "ACameraMetadata.h"
-#include "ACameraDevice.h"
-#include <utils/Vector.h>
-#include <cutils/properties.h>
-#include <stdlib.h>
+#include <android_companion_virtualdevice_flags.h>
#include <camera/CameraUtils.h>
#include <camera/StringUtils.h>
#include <camera/VendorTagDescriptor.h>
+#include <cutils/properties.h>
+#include <stdlib.h>
+#include <utils/Vector.h>
+#include <memory>
+#include "ACameraDevice.h"
+#include "ACameraMetadata.h"
using namespace android::acam;
+namespace vd_flags = android::companion::virtualdevice::flags;
namespace android {
namespace acam {
+namespace {
+
+using ::android::binder::Status;
+using ::android::companion::virtualnative::IVirtualDeviceManagerNative;
+
+// Return binder connection to VirtualDeviceManager.
+//
+// Subsequent calls return the same cached instance.
+sp<IVirtualDeviceManagerNative> getVirtualDeviceManager() {
+ auto connectToVirtualDeviceManagerNative = []() {
+ sp<IBinder> binder =
+ defaultServiceManager()->checkService(String16("virtualdevice_native"));
+ if (binder == nullptr) {
+ ALOGW("%s: Cannot get virtualdevice_native service", __func__);
+ return interface_cast<IVirtualDeviceManagerNative>(nullptr);
+ }
+ return interface_cast<IVirtualDeviceManagerNative>(binder);
+ };
+
+ static sp<IVirtualDeviceManagerNative> vdm = connectToVirtualDeviceManagerNative();
+ return vdm;
+}
+
+// Returns device id calling process is running on.
+// If the process cannot be attributed to single virtual device id, returns default device id.
+int getCurrentDeviceId() {
+ if (!vd_flags::camera_device_awareness()) {
+ return kDefaultDeviceId;
+ }
+
+ auto vdm = getVirtualDeviceManager();
+ if (vdm == nullptr) {
+ return kDefaultDeviceId;
+ }
+
+ const uid_t myUid = getuid();
+ std::vector<int> deviceIds;
+ Status status = vdm->getDeviceIdsForUid(myUid, &deviceIds);
+ if (!status.isOk() || deviceIds.empty()) {
+ ALOGE("%s: Failed to call getDeviceIdsForUid to determine device id for uid %d: %s",
+ __func__, myUid, status.toString8().c_str());
+ return kDefaultDeviceId;
+ }
+
+ // If the UID is associated with multiple virtual devices, use the default device's
+ // camera as we cannot disambiguate here. This effectively means that the app has
+ // activities on different devices at the same time.
+ if (deviceIds.size() != 1) {
+ return kDefaultDeviceId;
+ }
+ return deviceIds[0];
+}
+
+// Returns device policy for POLICY_TYPE_CAMERA corresponding to deviceId.
+DevicePolicy getDevicePolicyForDeviceId(const int deviceId) {
+ if (!vd_flags::camera_device_awareness() || deviceId == kDefaultDeviceId) {
+ return DevicePolicy::DEVICE_POLICY_DEFAULT;
+ }
+
+ auto vdm = getVirtualDeviceManager();
+ if (vdm == nullptr) {
+ return DevicePolicy::DEVICE_POLICY_DEFAULT;
+ }
+
+ int policy = IVirtualDeviceManagerNative::DEVICE_POLICY_DEFAULT;
+ Status status = vdm->getDevicePolicy(deviceId, IVirtualDeviceManagerNative::POLICY_TYPE_CAMERA,
+ &policy);
+ if (!status.isOk()) {
+ ALOGE("%s: Failed to call getDevicePolicy to determine camera policy for device id %d: %s",
+ __func__, deviceId, status.toString8().c_str());
+ return DevicePolicy::DEVICE_POLICY_DEFAULT;
+ }
+ return static_cast<DevicePolicy>(policy);
+}
+
+// Returns true if camera owned by device cameraDeviceId can be accessed within deviceContext.
+bool isCameraAccessible(const DeviceContext deviceContext, const int cameraDeviceId) {
+ if (!vd_flags::camera_device_awareness() ||
+ deviceContext.policy == DevicePolicy::DEVICE_POLICY_DEFAULT) {
+ return cameraDeviceId == kDefaultDeviceId;
+ }
+ return deviceContext.deviceId == cameraDeviceId;
+}
+
+} // namespace
+
// Static member definitions
const char* CameraManagerGlobal::kCameraIdKey = "CameraId";
const char* CameraManagerGlobal::kPhysicalCameraIdKey = "PhysicalCameraId";
@@ -41,6 +129,11 @@
Mutex CameraManagerGlobal::sLock;
wp<CameraManagerGlobal> CameraManagerGlobal::sInstance = nullptr;
+DeviceContext::DeviceContext() {
+ deviceId = getCurrentDeviceId();
+ policy = getDevicePolicyForDeviceId(deviceId);
+}
+
sp<CameraManagerGlobal> CameraManagerGlobal::getInstance() {
Mutex::Autolock _l(sLock);
sp<CameraManagerGlobal> instance = sInstance.promote();
@@ -125,11 +218,11 @@
std::vector<hardware::CameraStatus> cameraStatuses{};
mCameraService->addListener(mCameraServiceListener, &cameraStatuses);
for (auto& c : cameraStatuses) {
- onStatusChangedLocked(c.status, c.cameraId);
+ onStatusChangedLocked(c.status, c.deviceId, c.cameraId);
for (auto& unavailablePhysicalId : c.unavailablePhysicalIds) {
onStatusChangedLocked(hardware::ICameraServiceListener::STATUS_NOT_PRESENT,
- c.cameraId, unavailablePhysicalId);
+ c.deviceId, c.cameraId, unavailablePhysicalId);
}
}
@@ -189,14 +282,15 @@
sp<CameraManagerGlobal> cm = mCameraManager.promote();
if (cm != nullptr) {
AutoMutex lock(cm->mLock);
- std::vector<std::string> cameraIdList;
+ std::vector<DeviceStatusMapKey> keysToRemove;
+ keysToRemove.reserve(cm->mDeviceStatusMap.size());
for (auto& pair : cm->mDeviceStatusMap) {
- cameraIdList.push_back(pair.first);
+ keysToRemove.push_back(pair.first);
}
- for (const std::string& cameraId : cameraIdList) {
- cm->onStatusChangedLocked(
- CameraServiceListener::STATUS_NOT_PRESENT, cameraId);
+ for (const DeviceStatusMapKey& key : keysToRemove) {
+ cm->onStatusChangedLocked(CameraServiceListener::STATUS_NOT_PRESENT, key.deviceId,
+ key.cameraId);
}
cm->mCameraService.clear();
// TODO: consider adding re-connect call here?
@@ -204,32 +298,35 @@
}
void CameraManagerGlobal::registerExtendedAvailabilityCallback(
- const ACameraManager_ExtendedAvailabilityCallbacks *callback) {
- return registerAvailCallback<ACameraManager_ExtendedAvailabilityCallbacks>(callback);
+ const DeviceContext& deviceContext,
+ const ACameraManager_ExtendedAvailabilityCallbacks* callback) {
+ return registerAvailCallback<ACameraManager_ExtendedAvailabilityCallbacks>(deviceContext,
+ callback);
}
void CameraManagerGlobal::unregisterExtendedAvailabilityCallback(
- const ACameraManager_ExtendedAvailabilityCallbacks *callback) {
+ const DeviceContext& deviceContext,
+ const ACameraManager_ExtendedAvailabilityCallbacks* callback) {
Mutex::Autolock _l(mLock);
drainPendingCallbacksLocked();
- Callback cb(callback);
+ Callback cb(deviceContext, callback);
mCallbacks.erase(cb);
}
void CameraManagerGlobal::registerAvailabilityCallback(
- const ACameraManager_AvailabilityCallbacks *callback) {
- return registerAvailCallback<ACameraManager_AvailabilityCallbacks>(callback);
+ const DeviceContext& deviceContext, const ACameraManager_AvailabilityCallbacks* callback) {
+ return registerAvailCallback<ACameraManager_AvailabilityCallbacks>(deviceContext, callback);
}
void CameraManagerGlobal::unregisterAvailabilityCallback(
- const ACameraManager_AvailabilityCallbacks *callback) {
+ const DeviceContext& deviceContext, const ACameraManager_AvailabilityCallbacks* callback) {
Mutex::Autolock _l(mLock);
drainPendingCallbacksLocked();
- Callback cb(callback);
+ Callback cb(deviceContext, callback);
mCallbacks.erase(cb);
}
@@ -252,20 +349,24 @@
}
}
-template<class T>
-void CameraManagerGlobal::registerAvailCallback(const T *callback) {
+template <class T>
+void CameraManagerGlobal::registerAvailCallback(const DeviceContext& deviceContext,
+ const T* callback) {
Mutex::Autolock _l(mLock);
getCameraServiceLocked();
- Callback cb(callback);
- auto pair = mCallbacks.insert(cb);
+ Callback cb(deviceContext, callback);
+ const auto& [_, newlyRegistered] = mCallbacks.insert(cb);
// Send initial callbacks if callback is newly registered
- if (pair.second) {
- for (auto& pair : mDeviceStatusMap) {
- const std::string& cameraId = pair.first;
- int32_t status = pair.second.getStatus();
+ if (newlyRegistered) {
+ for (auto& [key, statusAndHAL3Support] : mDeviceStatusMap) {
+ if (!isCameraAccessible(deviceContext, key.deviceId)) {
+ continue;
+ }
+ const std::string& cameraId = key.cameraId;
+ int32_t status = statusAndHAL3Support.getStatus();
// Don't send initial callbacks for camera ids which don't support
// camera2
- if (!pair.second.supportsHAL3) {
+ if (!statusAndHAL3Support.supportsHAL3) {
continue;
}
@@ -281,7 +382,7 @@
// Physical camera unavailable callback
std::set<std::string> unavailablePhysicalCameras =
- pair.second.getUnavailablePhysicalIds();
+ statusAndHAL3Support.getUnavailablePhysicalIds();
for (const auto& physicalCameraId : unavailablePhysicalCameras) {
sp<AMessage> msg = new AMessage(kWhatSendSinglePhysicalCameraCallback, mHandler);
ACameraManager_PhysicalCameraAvailabilityCallback cbFunc =
@@ -311,21 +412,26 @@
return camera2Support;
}
-void CameraManagerGlobal::getCameraIdList(std::vector<std::string>* cameraIds) {
+void CameraManagerGlobal::getCameraIdList(const DeviceContext& context,
+ std::vector<std::string>* cameraIds) {
// Ensure that we have initialized/refreshed the list of available devices
Mutex::Autolock _l(mLock);
// Needed to make sure we're connected to cameraservice
getCameraServiceLocked();
- for(auto& deviceStatus : mDeviceStatusMap) {
- int32_t status = deviceStatus.second.getStatus();
+ for (auto& [key, statusAndHAL3Support] : mDeviceStatusMap) {
+ if (!isCameraAccessible(context, key.deviceId)) {
+ continue;
+ }
+
+ int32_t status = statusAndHAL3Support.getStatus();
if (status == hardware::ICameraServiceListener::STATUS_NOT_PRESENT ||
status == hardware::ICameraServiceListener::STATUS_ENUMERATING) {
continue;
}
- if (!deviceStatus.second.supportsHAL3) {
+ if (!statusAndHAL3Support.supportsHAL3) {
continue;
}
- cameraIds->push_back(deviceStatus.first);
+ cameraIds->push_back(key.cameraId);
}
}
@@ -461,24 +567,25 @@
}
binder::Status CameraManagerGlobal::CameraServiceListener::onStatusChanged(
- int32_t status, const std::string& cameraId) {
+ int32_t status, const std::string& cameraId, int deviceId) {
sp<CameraManagerGlobal> cm = mCameraManager.promote();
if (cm != nullptr) {
- cm->onStatusChanged(status, cameraId);
- } else {
- ALOGE("Cannot deliver status change. Global camera manager died");
+ cm->onStatusChanged(status, deviceId, cameraId);
}
+ ALOGE_IF(cm == nullptr,
+ "Cannot deliver physical camera status change. Global camera manager died");
return binder::Status::ok();
}
binder::Status CameraManagerGlobal::CameraServiceListener::onPhysicalCameraStatusChanged(
- int32_t status, const std::string& cameraId, const std::string& physicalCameraId) {
+ int32_t status, const std::string& cameraId, const std::string& physicalCameraId,
+ int deviceId) {
sp<CameraManagerGlobal> cm = mCameraManager.promote();
if (cm != nullptr) {
- cm->onStatusChanged(status, cameraId, physicalCameraId);
- } else {
- ALOGE("Cannot deliver physical camera status change. Global camera manager died");
+ cm->onStatusChanged(status, deviceId, cameraId, physicalCameraId);
}
+ ALOGE_IF(cm == nullptr,
+ "Cannot deliver physical camera status change. Global camera manager died");
return binder::Status::ok();
}
@@ -496,23 +603,24 @@
}
}
-void CameraManagerGlobal::onStatusChanged(
- int32_t status, const std::string& cameraId) {
+void CameraManagerGlobal::onStatusChanged(int32_t status, const int deviceId,
+ const std::string& cameraId) {
Mutex::Autolock _l(mLock);
- onStatusChangedLocked(status, cameraId);
+ onStatusChangedLocked(status, deviceId, cameraId);
}
-void CameraManagerGlobal::onStatusChangedLocked(
- int32_t status, const std::string& cameraId) {
+void CameraManagerGlobal::onStatusChangedLocked(int32_t status, const int deviceId,
+ const std::string& cameraId) {
if (!validStatus(status)) {
ALOGE("%s: Invalid status %d", __FUNCTION__, status);
return;
}
- bool firstStatus = (mDeviceStatusMap.count(cameraId) == 0);
- int32_t oldStatus = firstStatus ?
- status : // first status
- mDeviceStatusMap[cameraId].getStatus();
+ DeviceStatusMapKey key{.deviceId = deviceId, .cameraId = cameraId};
+
+ bool firstStatus = (mDeviceStatusMap.count(key) == 0);
+ int32_t oldStatus = firstStatus ? status : // first status
+ mDeviceStatusMap[key].getStatus();
if (!firstStatus &&
isStatusAvailable(status) == isStatusAvailable(oldStatus)) {
@@ -522,15 +630,17 @@
bool supportsHAL3 = supportsCamera2ApiLocked(cameraId);
if (firstStatus) {
- mDeviceStatusMap.emplace(std::piecewise_construct,
- std::forward_as_tuple(cameraId),
- std::forward_as_tuple(status, supportsHAL3));
+ mDeviceStatusMap.emplace(std::piecewise_construct, std::forward_as_tuple(key),
+ std::forward_as_tuple(status, supportsHAL3));
} else {
- mDeviceStatusMap[cameraId].updateStatus(status);
+ mDeviceStatusMap[key].updateStatus(status);
}
// Iterate through all registered callbacks
if (supportsHAL3) {
for (auto cb : mCallbacks) {
+ if (!isCameraAccessible(cb.mDeviceContext, deviceId)) {
+ continue;
+ }
sp<AMessage> msg = new AMessage(kWhatSendSingleCallback, mHandler);
ACameraManager_AvailabilityCallback cbFp = isStatusAvailable(status) ?
cb.mAvailable : cb.mUnavailable;
@@ -542,30 +652,31 @@
}
}
if (status == hardware::ICameraServiceListener::STATUS_NOT_PRESENT) {
- mDeviceStatusMap.erase(cameraId);
+ mDeviceStatusMap.erase(key);
}
}
-void CameraManagerGlobal::onStatusChanged(
- int32_t status, const std::string& cameraId, const std::string& physicalCameraId) {
+void CameraManagerGlobal::onStatusChanged(int32_t status, const int deviceId,
+ const std::string& cameraId, const std::string& physicalCameraId) {
Mutex::Autolock _l(mLock);
- onStatusChangedLocked(status, cameraId, physicalCameraId);
+ onStatusChangedLocked(status, deviceId, cameraId, physicalCameraId);
}
-void CameraManagerGlobal::onStatusChangedLocked(
- int32_t status, const std::string& cameraId, const std::string& physicalCameraId) {
+void CameraManagerGlobal::onStatusChangedLocked(int32_t status, const int deviceId,
+ const std::string& cameraId, const std::string& physicalCameraId) {
if (!validStatus(status)) {
ALOGE("%s: Invalid status %d", __FUNCTION__, status);
return;
}
- auto logicalStatus = mDeviceStatusMap.find(cameraId);
+ DeviceStatusMapKey key{.deviceId = deviceId, .cameraId = cameraId};
+ auto logicalStatus = mDeviceStatusMap.find(key);
if (logicalStatus == mDeviceStatusMap.end()) {
ALOGE("%s: Physical camera id %s status change on a non-present id %s",
__FUNCTION__, physicalCameraId.c_str(), cameraId.c_str());
return;
}
- int32_t logicalCamStatus = mDeviceStatusMap[cameraId].getStatus();
+ int32_t logicalCamStatus = mDeviceStatusMap[key].getStatus();
if (logicalCamStatus != hardware::ICameraServiceListener::STATUS_PRESENT &&
logicalCamStatus != hardware::ICameraServiceListener::STATUS_NOT_AVAILABLE) {
ALOGE("%s: Physical camera id %s status %d change for an invalid logical camera state %d",
@@ -577,14 +688,17 @@
bool updated = false;
if (status == hardware::ICameraServiceListener::STATUS_PRESENT) {
- updated = mDeviceStatusMap[cameraId].removeUnavailablePhysicalId(physicalCameraId);
+ updated = mDeviceStatusMap[key].removeUnavailablePhysicalId(physicalCameraId);
} else {
- updated = mDeviceStatusMap[cameraId].addUnavailablePhysicalId(physicalCameraId);
+ updated = mDeviceStatusMap[key].addUnavailablePhysicalId(physicalCameraId);
}
// Iterate through all registered callbacks
if (supportsHAL3 && updated) {
for (auto cb : mCallbacks) {
+ if (!isCameraAccessible(cb.mDeviceContext, deviceId)) {
+ continue;
+ }
sp<AMessage> msg = new AMessage(kWhatSendSinglePhysicalCameraCallback, mHandler);
ACameraManager_PhysicalCameraAvailabilityCallback cbFp = isStatusAvailable(status) ?
cb.mPhysicalCamAvailable : cb.mPhysicalCamUnavailable;
@@ -638,7 +752,7 @@
Mutex::Autolock _l(mLock);
std::vector<std::string> idList;
- CameraManagerGlobal::getInstance()->getCameraIdList(&idList);
+ mGlobalManager->getCameraIdList(mDeviceContext, &idList);
int numCameras = idList.size();
ACameraIdList *out = new ACameraIdList;
@@ -688,7 +802,7 @@
const char* cameraIdStr, sp<ACameraMetadata>* characteristics) {
Mutex::Autolock _l(mLock);
- sp<hardware::ICameraService> cs = CameraManagerGlobal::getInstance()->getCameraService();
+ sp<hardware::ICameraService> cs = mGlobalManager->getCameraService();
if (cs == nullptr) {
ALOGE("%s: Cannot reach camera service!", __FUNCTION__);
return ACAMERA_ERROR_CAMERA_DISCONNECTED;
@@ -697,7 +811,9 @@
CameraMetadata rawMetadata;
int targetSdkVersion = android_get_application_target_sdk_version();
binder::Status serviceRet = cs->getCameraCharacteristics(cameraIdStr,
- targetSdkVersion, /*overrideToPortrait*/false, &rawMetadata);
+ targetSdkVersion, /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+ mDeviceContext.deviceId, static_cast<int32_t>(mDeviceContext.policy),
+ &rawMetadata);
if (!serviceRet.isOk()) {
switch(serviceRet.serviceSpecificErrorCode()) {
case hardware::ICameraService::ERROR_DISCONNECTED:
@@ -734,7 +850,7 @@
ACameraDevice* device = new ACameraDevice(cameraId, callback, chars);
- sp<hardware::ICameraService> cs = CameraManagerGlobal::getInstance()->getCameraService();
+ sp<hardware::ICameraService> cs = mGlobalManager->getCameraService();
if (cs == nullptr) {
ALOGE("%s: Cannot reach camera service!", __FUNCTION__);
delete device;
@@ -749,7 +865,9 @@
binder::Status serviceRet = cs->connectDevice(
callbacks, cameraId, "", {},
hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/0,
- targetSdkVersion, /*overrideToPortrait*/false, /*out*/&deviceRemote);
+ targetSdkVersion, /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+ mDeviceContext.deviceId, static_cast<int32_t>(mDeviceContext.policy),
+ /*out*/&deviceRemote);
if (!serviceRet.isOk()) {
ALOGE("%s: connect camera device failed: %s", __FUNCTION__, serviceRet.toString8().c_str());
@@ -796,6 +914,22 @@
return ACAMERA_OK;
}
-ACameraManager::~ACameraManager() {
+void ACameraManager::registerAvailabilityCallback(
+ const ACameraManager_AvailabilityCallbacks* callback) {
+ mGlobalManager->registerAvailabilityCallback(mDeviceContext, callback);
+}
+void ACameraManager::unregisterAvailabilityCallback(
+ const ACameraManager_AvailabilityCallbacks* callback) {
+ mGlobalManager->unregisterAvailabilityCallback(mDeviceContext, callback);
+}
+
+void ACameraManager::registerExtendedAvailabilityCallback(
+ const ACameraManager_ExtendedAvailabilityCallbacks* callback) {
+ mGlobalManager->registerExtendedAvailabilityCallback(mDeviceContext, callback);
+}
+
+void ACameraManager::unregisterExtendedAvailabilityCallback(
+ const ACameraManager_ExtendedAvailabilityCallbacks* callback) {
+ mGlobalManager->unregisterExtendedAvailabilityCallback(mDeviceContext, callback);
}
diff --git a/camera/ndk/impl/ACameraManager.h b/camera/ndk/impl/ACameraManager.h
index c135d0f..d8bf6b1 100644
--- a/camera/ndk/impl/ACameraManager.h
+++ b/camera/ndk/impl/ACameraManager.h
@@ -20,6 +20,7 @@
#include <camera/NdkCameraManager.h>
#include <android-base/parseint.h>
+#include <android/companion/virtualnative/IVirtualDeviceManagerNative.h>
#include <android/hardware/ICameraService.h>
#include <android/hardware/BnCameraServiceListener.h>
#include <camera/CameraMetadata.h>
@@ -37,6 +38,36 @@
namespace android {
namespace acam {
+enum class DevicePolicy {
+ DEVICE_POLICY_DEFAULT =
+ ::android::companion::virtualnative::IVirtualDeviceManagerNative::DEVICE_POLICY_DEFAULT,
+ DEVICE_POLICY_CUSTOM =
+ ::android::companion::virtualnative::IVirtualDeviceManagerNative::DEVICE_POLICY_CUSTOM
+};
+
+/**
+ * Device context within which are cameras accessed.
+ *
+ * When constructed, the device id is set to id of virtual device corresponding to
+ * caller's UID (or default device id if current app process is not running on virtual device).
+ *
+ * See getDeviceId() in Context.java for more context (no pun intented).
+ */
+struct DeviceContext {
+ DeviceContext();
+
+ // Id of virtual device asociated with this context (or DEFAULT_DEVICE_ID = 0 in case
+ // caller UID is not running on virtual device).
+ int deviceId;
+ // Device policy corresponding to VirtualDeviceParams.POLICY_TYPE_CAMERA:
+ //
+ // Can be either:
+ // * (0) DEVICE_POLICY_DEFAULT - virtual devices have access to default device cameras.
+ // * (1) DEVICE_POLICY_CUSTOM - virtual devices do not have access to default device cameras
+ // and can only access virtual cameras owned by the same device.
+ DevicePolicy policy;
+};
+
/**
* Per-process singleton instance of CameraManger. Shared by all ACameraManager
* instances. Created when first ACameraManager is created and destroyed when
@@ -49,20 +80,22 @@
static sp<CameraManagerGlobal> getInstance();
sp<hardware::ICameraService> getCameraService();
- void registerAvailabilityCallback(
- const ACameraManager_AvailabilityCallbacks *callback);
- void unregisterAvailabilityCallback(
- const ACameraManager_AvailabilityCallbacks *callback);
+ void registerAvailabilityCallback(const DeviceContext& context,
+ const ACameraManager_AvailabilityCallbacks* callback);
+ void unregisterAvailabilityCallback(const DeviceContext& context,
+ const ACameraManager_AvailabilityCallbacks* callback);
void registerExtendedAvailabilityCallback(
+ const DeviceContext& context,
const ACameraManager_ExtendedAvailabilityCallbacks* callback);
void unregisterExtendedAvailabilityCallback(
+ const DeviceContext& context,
const ACameraManager_ExtendedAvailabilityCallbacks* callback);
/**
* Return camera IDs that support camera2
*/
- void getCameraIdList(std::vector<std::string> *cameraIds);
+ void getCameraIdList(const DeviceContext& deviceContext, std::vector<std::string>* cameraIds);
private:
sp<hardware::ICameraService> mCameraService;
@@ -70,8 +103,8 @@
const char* kCameraServiceName = "media.camera";
Mutex mLock;
- template<class T>
- void registerAvailCallback(const T *callback);
+ template <class T>
+ void registerAvailCallback(const DeviceContext& deviceContext, const T* callback);
class DeathNotifier : public IBinder::DeathRecipient {
public:
@@ -87,23 +120,24 @@
class CameraServiceListener final : public hardware::BnCameraServiceListener {
public:
explicit CameraServiceListener(CameraManagerGlobal* cm) : mCameraManager(cm) {}
- virtual binder::Status onStatusChanged(int32_t status, const std::string& cameraId);
+ virtual binder::Status onStatusChanged(int32_t status, const std::string& cameraId,
+ int32_t deviceId);
virtual binder::Status onPhysicalCameraStatusChanged(int32_t status,
- const std::string& cameraId, const std::string& physicalCameraId);
+ const std::string& cameraId, const std::string& physicalCameraId, int32_t deviceId);
// Torch API not implemented yet
- virtual binder::Status onTorchStatusChanged(int32_t, const std::string&) {
+ virtual binder::Status onTorchStatusChanged(int32_t, const std::string&, int32_t) {
return binder::Status::ok();
}
- virtual binder::Status onTorchStrengthLevelChanged(const std::string&, int32_t) {
+ virtual binder::Status onTorchStrengthLevelChanged(const std::string&, int32_t, int32_t) {
return binder::Status::ok();
}
virtual binder::Status onCameraAccessPrioritiesChanged();
- virtual binder::Status onCameraOpened(const std::string&, const std::string&) {
+ virtual binder::Status onCameraOpened(const std::string&, const std::string&, int32_t) {
return binder::Status::ok();
}
- virtual binder::Status onCameraClosed(const std::string&) {
+ virtual binder::Status onCameraClosed(const std::string&, int32_t) {
return binder::Status::ok();
}
@@ -114,29 +148,34 @@
// Wrapper of ACameraManager_AvailabilityCallbacks so we can store it in std::set
struct Callback {
- explicit Callback(const ACameraManager_AvailabilityCallbacks *callback) :
- mAvailable(callback->onCameraAvailable),
- mUnavailable(callback->onCameraUnavailable),
- mAccessPriorityChanged(nullptr),
- mPhysicalCamAvailable(nullptr),
- mPhysicalCamUnavailable(nullptr),
- mContext(callback->context) {}
+ explicit Callback(const DeviceContext& deviceContext,
+ const ACameraManager_AvailabilityCallbacks* callback)
+ : mDeviceContext(deviceContext),
+ mAvailable(callback->onCameraAvailable),
+ mUnavailable(callback->onCameraUnavailable),
+ mAccessPriorityChanged(nullptr),
+ mPhysicalCamAvailable(nullptr),
+ mPhysicalCamUnavailable(nullptr),
+ mContext(callback->context) {}
- explicit Callback(const ACameraManager_ExtendedAvailabilityCallbacks *callback) :
- mAvailable(callback->availabilityCallbacks.onCameraAvailable),
- mUnavailable(callback->availabilityCallbacks.onCameraUnavailable),
- mAccessPriorityChanged(callback->onCameraAccessPrioritiesChanged),
- mPhysicalCamAvailable(callback->onPhysicalCameraAvailable),
- mPhysicalCamUnavailable(callback->onPhysicalCameraUnavailable),
- mContext(callback->availabilityCallbacks.context) {}
+ explicit Callback(const DeviceContext& deviceContext,
+ const ACameraManager_ExtendedAvailabilityCallbacks* callback)
+ : mDeviceContext(deviceContext),
+ mAvailable(callback->availabilityCallbacks.onCameraAvailable),
+ mUnavailable(callback->availabilityCallbacks.onCameraUnavailable),
+ mAccessPriorityChanged(callback->onCameraAccessPrioritiesChanged),
+ mPhysicalCamAvailable(callback->onPhysicalCameraAvailable),
+ mPhysicalCamUnavailable(callback->onPhysicalCameraUnavailable),
+ mContext(callback->availabilityCallbacks.context) {}
bool operator == (const Callback& other) const {
- return (mAvailable == other.mAvailable &&
- mUnavailable == other.mUnavailable &&
+ return (mAvailable == other.mAvailable && mUnavailable == other.mUnavailable &&
mAccessPriorityChanged == other.mAccessPriorityChanged &&
mPhysicalCamAvailable == other.mPhysicalCamAvailable &&
mPhysicalCamUnavailable == other.mPhysicalCamUnavailable &&
- mContext == other.mContext);
+ mContext == other.mContext &&
+ mDeviceContext.deviceId == other.mDeviceContext.deviceId &&
+ mDeviceContext.policy == other.mDeviceContext.policy);
}
bool operator != (const Callback& other) const {
return !(*this == other);
@@ -145,6 +184,9 @@
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wordered-compare-function-pointers"
if (*this == other) return false;
+ if (mDeviceContext.deviceId != other.mDeviceContext.deviceId) {
+ return mDeviceContext.deviceId < other.mDeviceContext.deviceId;
+ }
if (mContext != other.mContext) return mContext < other.mContext;
if (mPhysicalCamAvailable != other.mPhysicalCamAvailable) {
return mPhysicalCamAvailable < other.mPhysicalCamAvailable;
@@ -162,6 +204,7 @@
bool operator > (const Callback& other) const {
return (*this != other && !(*this < other));
}
+ DeviceContext mDeviceContext;
ACameraManager_AvailabilityCallback mAvailable;
ACameraManager_AvailabilityCallback mUnavailable;
ACameraManager_AccessPrioritiesChangedCallback mAccessPriorityChanged;
@@ -203,37 +246,17 @@
sp<hardware::ICameraService> getCameraServiceLocked();
void onCameraAccessPrioritiesChanged();
- void onStatusChanged(int32_t status, const std::string& cameraId);
- void onStatusChangedLocked(int32_t status, const std::string& cameraId);
- void onStatusChanged(int32_t status, const std::string& cameraId, const std::string& physicalCameraId);
- void onStatusChangedLocked(int32_t status, const std::string& cameraId,
- const std::string& physicalCameraId);
+ void onStatusChanged(int32_t status, int deviceId, const std::string& cameraId);
+ void onStatusChangedLocked(int32_t status, int deviceId, const std::string& cameraId);
+ void onStatusChanged(int32_t status, int deviceId, const std::string& cameraId,
+ const std::string& physicalCameraId);
+ void onStatusChangedLocked(int32_t status, int deviceId, const std::string& cameraId,
+ const std::string& physicalCameraId);
// Utils for status
static bool validStatus(int32_t status);
static bool isStatusAvailable(int32_t status);
bool supportsCamera2ApiLocked(const std::string &cameraId);
- // The sort logic must match the logic in
- // libcameraservice/common/CameraProviderManager.cpp::getAPI1CompatibleCameraDeviceIds
- struct CameraIdComparator {
- bool operator()(const std::string& a, const std::string& b) const {
- uint32_t aUint = 0, bUint = 0;
- bool aIsUint = base::ParseUint(a.c_str(), &aUint);
- bool bIsUint = base::ParseUint(b.c_str(), &bUint);
-
- // Uint device IDs first
- if (aIsUint && bIsUint) {
- return aUint < bUint;
- } else if (aIsUint) {
- return true;
- } else if (bIsUint) {
- return false;
- }
- // Simple string compare if both id are not uint
- return a < b;
- }
- };
-
struct StatusAndHAL3Support {
private:
int32_t status = hardware::ICameraServiceListener::STATUS_NOT_PRESENT;
@@ -252,13 +275,40 @@
std::set<std::string> getUnavailablePhysicalIds();
};
- // Map camera_id -> status
- std::map<std::string, StatusAndHAL3Support, CameraIdComparator> mDeviceStatusMap;
+ struct DeviceStatusMapKey {
+ int deviceId;
+ std::string cameraId;
+
+ bool operator<(const DeviceStatusMapKey& other) const {
+ if (deviceId != other.deviceId) {
+ return deviceId < other.deviceId;
+ }
+
+ // The sort logic must match the logic in
+ // libcameraservice/common/CameraProviderManager.cpp::getAPI1CompatibleCameraDeviceIds
+ uint32_t cameraIdUint = 0, otherCameraIdUint = 0;
+ bool cameraIdIsUint = base::ParseUint(cameraId.c_str(), &cameraIdUint);
+ bool otherCameraIdIsUint = base::ParseUint(other.cameraId.c_str(), &otherCameraIdUint);
+
+ // Uint device IDs first
+ if (cameraIdIsUint && otherCameraIdIsUint) {
+ return cameraIdUint < otherCameraIdUint;
+ } else if (cameraIdIsUint) {
+ return true;
+ } else if (otherCameraIdIsUint) {
+ return false;
+ }
+ // Simple string compare if both id are not uint
+ return cameraIdIsUint < otherCameraIdIsUint;
+ }
+ };
+
+ std::map<DeviceStatusMapKey, StatusAndHAL3Support> mDeviceStatusMap;
// For the singleton instance
static Mutex sLock;
static wp<CameraManagerGlobal> sInstance;
- CameraManagerGlobal() {};
+ CameraManagerGlobal() {}
~CameraManagerGlobal();
};
@@ -270,9 +320,7 @@
* Leave outside of android namespace because it's NDK struct
*/
struct ACameraManager {
- ACameraManager() :
- mGlobalManager(android::acam::CameraManagerGlobal::getInstance()) {}
- ~ACameraManager();
+ ACameraManager() : mGlobalManager(android::acam::CameraManagerGlobal::getInstance()) {}
camera_status_t getCameraIdList(ACameraIdList** cameraIdList);
static void deleteCameraIdList(ACameraIdList* cameraIdList);
@@ -281,6 +329,12 @@
camera_status_t openCamera(const char* cameraId,
ACameraDevice_StateCallbacks* callback,
/*out*/ACameraDevice** device);
+ void registerAvailabilityCallback(const ACameraManager_AvailabilityCallbacks* callback);
+ void unregisterAvailabilityCallback(const ACameraManager_AvailabilityCallbacks* callback);
+ void registerExtendedAvailabilityCallback(
+ const ACameraManager_ExtendedAvailabilityCallbacks* callback);
+ void unregisterExtendedAvailabilityCallback(
+ const ACameraManager_ExtendedAvailabilityCallbacks* callback);
private:
enum {
@@ -288,6 +342,7 @@
};
android::Mutex mLock;
android::sp<android::acam::CameraManagerGlobal> mGlobalManager;
+ const android::acam::DeviceContext mDeviceContext;
};
#endif //_ACAMERA_MANAGER_H
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index b6b8012..69b30f7 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -18,6 +18,8 @@
#define LOG_TAG "ACameraMetadata"
#include "ACameraMetadata.h"
+
+#include <camera_metadata_hidden.h>
#include <utils/Vector.h>
#include <system/graphics.h>
#include <media/NdkImage.h>
@@ -85,6 +87,19 @@
filterDurations(ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS);
}
// TODO: filter request/result keys
+ const CameraMetadata& metadata = *mData;
+ const camera_metadata_t *rawMetadata = metadata.getAndLock();
+ metadata_vendor_id_t vendorTagId = get_camera_metadata_vendor_id(rawMetadata);
+ metadata.unlock(rawMetadata);
+ sp<VendorTagDescriptorCache> vtCache = VendorTagDescriptorCache::getGlobalVendorTagCache();
+ if (vtCache == nullptr) {
+ ALOGE("%s: error vendor tag descriptor cache is not initialized", __FUNCTION__);
+ return;
+ }
+ vtCache->getVendorTagDescriptor(vendorTagId, &mVTags);
+ if (mVTags == nullptr) {
+ ALOGE("%s: error retrieving vendor tag descriptor", __FUNCTION__);
+ }
}
bool
@@ -473,6 +488,13 @@
return (*mData);
}
+camera_status_t
+ACameraMetadata::getTagFromName(const char *name, uint32_t *tag) const {
+ Mutex::Autolock _l(mLock);
+ status_t status = CameraMetadata::getTagFromName(name, mVTags.get(), tag);
+ return status == OK ? ACAMERA_OK : ACAMERA_ERROR_METADATA_NOT_FOUND;
+}
+
bool
ACameraMetadata::isLogicalMultiCamera(size_t* count, const char*const** physicalCameraIds) const {
if (mType != ACM_CHARACTERISTICS) {
diff --git a/camera/ndk/impl/ACameraMetadata.h b/camera/ndk/impl/ACameraMetadata.h
index 084a60b..e89e620 100644
--- a/camera/ndk/impl/ACameraMetadata.h
+++ b/camera/ndk/impl/ACameraMetadata.h
@@ -27,9 +27,17 @@
#ifdef __ANDROID_VNDK__
#include <CameraMetadata.h>
+#include <aidl/android/frameworks/cameraservice/common/VendorTag.h>
+#include <aidl/android/frameworks/cameraservice/common/VendorTagSection.h>
+#include <aidl/android/frameworks/cameraservice/common/ProviderIdAndVendorTagSections.h>
+#include <VendorTagDescriptor.h>
using CameraMetadata = android::hardware::camera::common::V1_0::helper::CameraMetadata;
+using ::aidl::android::frameworks::cameraservice::common::ProviderIdAndVendorTagSections;
+using ::android::hardware::camera::common::V1_0::helper::VendorTagDescriptor;
+using ::android::hardware::camera::common::V1_0::helper::VendorTagDescriptorCache;
#else
#include <camera/CameraMetadata.h>
+#include <camera/VendorTagDescriptor.h>
#endif
#include <camera/NdkCameraMetadata.h>
@@ -73,6 +81,8 @@
camera_status_t getTags(/*out*/int32_t* numTags,
/*out*/const uint32_t** tags) const;
+ camera_status_t
+ getTagFromName(const char *name, uint32_t *tag) const;
const CameraMetadata& getInternalData() const;
bool isLogicalMultiCamera(size_t* count, const char* const** physicalCameraIds) const;
@@ -134,6 +144,7 @@
std::vector<const char*> mStaticPhysicalCameraIds;
std::vector<String8> mStaticPhysicalCameraIdValues;
+ sp<VendorTagDescriptor> mVTags = nullptr;
};
#endif // _ACAMERA_METADATA_H
diff --git a/camera/ndk/include/camera/NdkCameraMetadata.h b/camera/ndk/include/camera/NdkCameraMetadata.h
index cf29736..237d07b 100644
--- a/camera/ndk/include/camera/NdkCameraMetadata.h
+++ b/camera/ndk/include/camera/NdkCameraMetadata.h
@@ -221,6 +221,24 @@
/*out*/int32_t* numEntries, /*out*/const uint32_t** tags) __INTRODUCED_IN(24);
/**
+ * Look up tag ID value for device-specific custom tags that are usable only
+ * for the particular device, by name. The name and type of the tag need to be
+ * discovered from some other source, such as the manufacturer. The ID value is
+ * stable during the lifetime of an application, but should be queried again after
+ * process restarts. This method can also be used to query tag values using the names
+ * for public tags which exist in the Java API, however it is just simpler and faster to
+ * use the values of the tags which exist in the ndk.
+ * @param metadata The {@link ACameraMetadata} of to query the tag value from.
+ * @param name The name of the tag being queried.
+ * @param tag The output tag assigned by this method.
+ *
+ * @return ACAMERA_OK only if the function call was successful.
+ */
+
+camera_status_t
+ACameraMetadata_getTagFromName(const ACameraMetadata* metadata, const char *name, uint32_t *tag) __INTRODUCED_IN(35);
+
+/**
* Create a copy of input {@link ACameraMetadata}.
*
* <p>The returned ACameraMetadata must be freed by the application by {@link ACameraMetadata_free}
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 1ed17a3..cba26da 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -40,6 +40,21 @@
__BEGIN_DECLS
+/*
+ * Note: The following enum values were incorrect and have been updated:
+ * enum old value updated value
+ * ACAMERA_CONTROL_SETTINGS_OVERRIDE ACAMERA_CONTROL_START + 49 ACAMERA_CONTROL_START + 52;
+ * ACAMERA_CONTROL_AVAILABLE_SETTINGS_OVERRIDES ACAMERA_CONTROL_START + 50 ACAMERA_CONTROL_START + 53;
+ * ACAMERA_CONTROL_AUTOFRAMING ACAMERA_CONTROL_START + 52 ACAMERA_CONTROL_START + 55;
+ * ACAMERA_CONTROL_AUTOFRAMING_AVAILABLE ACAMERA_CONTROL_START + 53 ACAMERA_CONTROL_START + 56;
+ * ACAMERA_CONTROL_AUTOFRAMING_STATE ACAMERA_CONTROL_START + 54 ACAMERA_CONTROL_START + 57;
+ * ACAMERA_CONTROL_LOW_LIGHT_BOOST_INFO_LUMINANCE_RANGE ACAMERA_CONTROL_START + 55 ACAMERA_CONTROL_START + 58;
+ * ACAMERA_CONTROL_LOW_LIGHT_BOOST_STATE ACAMERA_CONTROL_START + 56 ACAMERA_CONTROL_START + 59;
+
+ * ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES ACAMERA_SCALER_START + 25 ACAMERA_SCALER_START + 26;
+ * ACAMERA_SCALER_CROP_REGION ACAMERA_SCALER_START + 26 ACAMERA_SCALER_START + 27;
+ */
+
typedef enum acamera_metadata_section {
ACAMERA_COLOR_CORRECTION,
@@ -588,7 +603,7 @@
* ACAMERA_SENSOR_FRAME_DURATION.</p>
* <p>Note that the actual achievable max framerate also depends on the minimum frame
* duration of the output streams. The max frame rate will be
- * <code>min(aeTargetFpsRange.maxFps, 1 / max(individual stream min durations)</code>. For example,
+ * <code>min(aeTargetFpsRange.maxFps, 1 / max(individual stream min durations))</code>. For example,
* if the application sets this key to <code>{60, 60}</code>, but the maximum minFrameDuration among
* all configured streams is 33ms, the maximum framerate won't be 60fps, but will be
* 30fps.</p>
@@ -2149,7 +2164,7 @@
* </ul>
*/
ACAMERA_CONTROL_SETTINGS_OVERRIDE = // int32 (acamera_metadata_enum_android_control_settings_override_t)
- ACAMERA_CONTROL_START + 49,
+ ACAMERA_CONTROL_START + 52,
/**
* <p>List of available settings overrides supported by the camera device that can
* be used to speed up certain controls.</p>
@@ -2175,7 +2190,7 @@
* @see ACAMERA_CONTROL_SETTINGS_OVERRIDE
*/
ACAMERA_CONTROL_AVAILABLE_SETTINGS_OVERRIDES = // int32[n]
- ACAMERA_CONTROL_START + 50,
+ ACAMERA_CONTROL_START + 53,
/**
* <p>Automatic crop, pan and zoom to keep objects in the center of the frame.</p>
*
@@ -2202,7 +2217,7 @@
* @see ACAMERA_SCALER_CROP_REGION
*/
ACAMERA_CONTROL_AUTOFRAMING = // byte (acamera_metadata_enum_android_control_autoframing_t)
- ACAMERA_CONTROL_START + 52,
+ ACAMERA_CONTROL_START + 55,
/**
* <p>Whether the camera device supports ACAMERA_CONTROL_AUTOFRAMING.</p>
*
@@ -2218,7 +2233,7 @@
* <p>Will be <code>false</code> if auto-framing is not available.</p>
*/
ACAMERA_CONTROL_AUTOFRAMING_AVAILABLE = // byte (acamera_metadata_enum_android_control_autoframing_available_t)
- ACAMERA_CONTROL_START + 53,
+ ACAMERA_CONTROL_START + 56,
/**
* <p>Current state of auto-framing.</p>
*
@@ -2245,7 +2260,7 @@
* @see ACAMERA_CONTROL_AUTOFRAMING_AVAILABLE
*/
ACAMERA_CONTROL_AUTOFRAMING_STATE = // byte (acamera_metadata_enum_android_control_autoframing_state_t)
- ACAMERA_CONTROL_START + 54,
+ ACAMERA_CONTROL_START + 57,
/**
* <p>The operating luminance range of low light boost measured in lux (lx).</p>
*
@@ -2258,7 +2273,7 @@
*
*/
ACAMERA_CONTROL_LOW_LIGHT_BOOST_INFO_LUMINANCE_RANGE = // float[2]
- ACAMERA_CONTROL_START + 55,
+ ACAMERA_CONTROL_START + 58,
/**
* <p>Current state of the low light boost AE mode.</p>
*
@@ -2272,13 +2287,15 @@
* <p>When low light boost is enabled by setting the AE mode to
* 'ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY', it can dynamically apply a low light
* boost when the light level threshold is exceeded.</p>
+ * <p>This field is present in the CaptureResult when the AE mode is set to
+ * 'ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY'. Otherwise, the field is not present.</p>
* <p>This state indicates when low light boost is 'ACTIVE' and applied. Similarly, it can
* indicate when it is not being applied by returning 'INACTIVE'.</p>
* <p>This key will be absent from the CaptureResult if AE mode is not set to
* 'ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY.</p>
*/
ACAMERA_CONTROL_LOW_LIGHT_BOOST_STATE = // byte (acamera_metadata_enum_android_control_low_light_boost_state_t)
- ACAMERA_CONTROL_START + 56,
+ ACAMERA_CONTROL_START + 59,
ACAMERA_CONTROL_END,
/**
@@ -2688,7 +2705,9 @@
* upright.</p>
* <p>Camera devices may either encode this value into the JPEG EXIF header, or
* rotate the image data to match this orientation. When the image data is rotated,
- * the thumbnail data will also be rotated.</p>
+ * the thumbnail data will also be rotated. Additionally, in the case where the image data
+ * is rotated, <a href="https://developer.android.com/reference/android/media/Image.html#getWidth">Image#getWidth</a> and <a href="https://developer.android.com/reference/android/media/Image.html#getHeight">Image#getHeight</a>
+ * will not be updated to reflect the height and width of the rotated image.</p>
* <p>Note that this orientation is relative to the orientation of the camera sensor, given
* by ACAMERA_SENSOR_ORIENTATION.</p>
* <p>To translate from the device orientation given by the Android sensor APIs for camera
@@ -4671,7 +4690,7 @@
* application should leave stream use cases within the session as DEFAULT.</p>
*/
ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES = // int64[n] (acamera_metadata_enum_android_scaler_available_stream_use_cases_t)
- ACAMERA_SCALER_START + 25,
+ ACAMERA_SCALER_START + 26,
/**
* <p>The region of the sensor that corresponds to the RAW read out for this
* capture when the stream use case of a RAW stream is set to CROPPED_RAW.</p>
@@ -4727,7 +4746,7 @@
* @see ACAMERA_STATISTICS_HOT_PIXEL_MAP
*/
ACAMERA_SCALER_RAW_CROP_REGION = // int32[4]
- ACAMERA_SCALER_START + 26,
+ ACAMERA_SCALER_START + 27,
ACAMERA_SCALER_END,
/**
@@ -8275,7 +8294,10 @@
* FPS.</p>
* <p>If the session configuration is not supported, the AE mode reported in the
* CaptureResult will be 'ON' instead of 'ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY'.</p>
- * <p>The application can observe the CapturerResult field
+ * <p>When this AE mode is enabled, the CaptureResult field
+ * ACAMERA_CONTROL_LOW_LIGHT_BOOST_STATE will be present and not null. Otherwise, the
+ * ACAMERA_CONTROL_LOW_LIGHT_BOOST_STATE field will not be present in the CaptureResult.</p>
+ * <p>The application can observe the CaptureResult field
* ACAMERA_CONTROL_LOW_LIGHT_BOOST_STATE to determine when low light boost is 'ACTIVE' or
* 'INACTIVE'.</p>
* <p>The low light boost is 'ACTIVE' once the scene lighting condition is less than the
diff --git a/camera/ndk/libcamera2ndk.map.txt b/camera/ndk/libcamera2ndk.map.txt
index 4c54658..7d7868b 100644
--- a/camera/ndk/libcamera2ndk.map.txt
+++ b/camera/ndk/libcamera2ndk.map.txt
@@ -35,6 +35,7 @@
ACameraMetadata_copy;
ACameraMetadata_free;
ACameraMetadata_getAllTags;
+ ACameraMetadata_getTagFromName; #introduced=35
ACameraMetadata_getConstEntry;
ACameraMetadata_isLogicalMultiCamera; # introduced=29
ACameraMetadata_fromCameraMetadata; # introduced=30
diff --git a/camera/ndk/ndk_vendor/impl/ACameraManager.cpp b/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
index 099786b..cdba8ff 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
+++ b/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
@@ -574,7 +574,7 @@
void CameraManagerGlobal::onStatusChangedLocked(
const CameraDeviceStatus &status, const std::string &cameraId) {
if (!validStatus(status)) {
- ALOGE("%s: Invalid status %d", __FUNCTION__, status);
+ ALOGE("%s: Invalid status %d", __FUNCTION__, static_cast<int>(status));
return;
}
@@ -629,7 +629,7 @@
const CameraDeviceStatus &status, const std::string& cameraId,
const std::string& physicalCameraId) {
if (!validStatus(status)) {
- ALOGE("%s: Invalid status %d", __FUNCTION__, status);
+ ALOGE("%s: Invalid status %d", __FUNCTION__, static_cast<int>(status));
return;
}
@@ -643,7 +643,8 @@
if (logicalCamStatus != CameraDeviceStatus::STATUS_PRESENT &&
logicalCamStatus != CameraDeviceStatus::STATUS_NOT_AVAILABLE) {
ALOGE("%s: Physical camera id %s status %d change for an invalid logical camera state %d",
- __FUNCTION__, physicalCameraId.c_str(), status, logicalCamStatus);
+ __FUNCTION__, physicalCameraId.c_str(), static_cast<int>(status),
+ static_cast<int>(logicalCamStatus));
return;
}
@@ -866,6 +867,25 @@
return status == OK ? ACAMERA_OK : ACAMERA_ERROR_METADATA_NOT_FOUND;
}
-ACameraManager::~ACameraManager() {
+void ACameraManager::registerAvailabilityCallback(
+ const ACameraManager_AvailabilityCallbacks* callback) {
+ mGlobalManager->registerAvailabilityCallback(callback);
+}
+void ACameraManager::unregisterAvailabilityCallback(
+ const ACameraManager_AvailabilityCallbacks* callback) {
+ mGlobalManager->unregisterAvailabilityCallback(callback);
+}
+
+void ACameraManager::registerExtendedAvailabilityCallback(
+ const ACameraManager_ExtendedAvailabilityCallbacks* callback) {
+ mGlobalManager->registerExtendedAvailabilityCallback(callback);
+}
+
+void ACameraManager::unregisterExtendedAvailabilityCallback(
+ const ACameraManager_ExtendedAvailabilityCallbacks* callback) {
+ mGlobalManager->unregisterExtendedAvailabilityCallback(callback);
+}
+
+ACameraManager::~ACameraManager() {
}
diff --git a/camera/ndk/ndk_vendor/impl/ACameraManager.h b/camera/ndk/ndk_vendor/impl/ACameraManager.h
index 85acee7..2d8eefa 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraManager.h
+++ b/camera/ndk/ndk_vendor/impl/ACameraManager.h
@@ -265,6 +265,12 @@
ACameraDevice_StateCallbacks* callback,
/*out*/ACameraDevice** device);
camera_status_t getTagFromName(const char *cameraId, const char *name, uint32_t *tag);
+ void registerAvailabilityCallback(const ACameraManager_AvailabilityCallbacks* callback);
+ void unregisterAvailabilityCallback(const ACameraManager_AvailabilityCallbacks* callback);
+ void registerExtendedAvailabilityCallback(
+ const ACameraManager_ExtendedAvailabilityCallbacks* callback);
+ void unregisterExtendedAvailabilityCallback(
+ const ACameraManager_ExtendedAvailabilityCallbacks* callback);
private:
enum {
diff --git a/camera/tests/CameraBinderTests.cpp b/camera/tests/CameraBinderTests.cpp
index bb963ab..e5f99be 100644
--- a/camera/tests/CameraBinderTests.cpp
+++ b/camera/tests/CameraBinderTests.cpp
@@ -43,6 +43,7 @@
#include <camera/camera2/OutputConfiguration.h>
#include <camera/camera2/SessionConfiguration.h>
#include <camera/camera2/SubmitInfo.h>
+#include <camera/CameraUtils.h>
#include <camera/StringUtils.h>
#include <gui/BufferItemConsumer.h>
@@ -77,29 +78,34 @@
public:
virtual ~TestCameraServiceListener() {};
- virtual binder::Status onStatusChanged(int32_t status, const std::string& cameraId) override {
+ virtual binder::Status onStatusChanged(int32_t status, const std::string& cameraId,
+ [[maybe_unused]] int32_t /*deviceId*/) override {
Mutex::Autolock l(mLock);
mCameraStatuses[cameraId] = status;
mCondition.broadcast();
return binder::Status::ok();
- };
+ }
- virtual binder::Status onPhysicalCameraStatusChanged(int32_t /*status*/,
- const std::string& /*cameraId*/, const std::string& /*physicalCameraId*/) override {
+ virtual binder::Status onPhysicalCameraStatusChanged([[maybe_unused]] int32_t /*status*/,
+ [[maybe_unused]] const std::string& /*cameraId*/,
+ [[maybe_unused]] const std::string& /*physicalCameraId*/,
+ [[maybe_unused]] int32_t /*deviceId*/) override {
// No op
return binder::Status::ok();
- };
+ }
virtual binder::Status onTorchStatusChanged(int32_t status,
- const std::string& cameraId) override {
+ const std::string& cameraId, [[maybe_unused]] int32_t /*deviceId*/) override {
Mutex::Autolock l(mLock);
mCameraTorchStatuses[cameraId] = status;
mTorchCondition.broadcast();
return binder::Status::ok();
- };
+ }
- virtual binder::Status onTorchStrengthLevelChanged(const std::string& /*cameraId*/,
- int32_t /*torchStrength*/) override {
+ virtual binder::Status onTorchStrengthLevelChanged(
+ [[maybe_unused]] const std::string& /*cameraId*/,
+ [[maybe_unused]] int32_t /*torchStrength*/,
+ [[maybe_unused]] int32_t /*deviceId*/) override {
// No op
return binder::Status::ok();
}
@@ -109,13 +115,15 @@
return binder::Status::ok();
}
- virtual binder::Status onCameraOpened(const std::string& /*cameraId*/,
- const std::string& /*clientPackageName*/) {
+ virtual binder::Status onCameraOpened([[maybe_unused]] const std::string& /*cameraId*/,
+ [[maybe_unused]] const std::string& /*clientPackageName*/,
+ [[maybe_unused]] int32_t /*deviceId*/) {
// No op
return binder::Status::ok();
}
- virtual binder::Status onCameraClosed(const std::string& /*cameraId*/) override {
+ virtual binder::Status onCameraClosed([[maybe_unused]] const std::string& /*cameraId*/,
+ [[maybe_unused]] int32_t /*deviceId*/) override {
// No op
return binder::Status::ok();
}
@@ -133,7 +141,7 @@
}
}
return true;
- };
+ }
bool waitForTorchState(int32_t status, int32_t cameraId) const {
Mutex::Autolock l(mLock);
@@ -153,7 +161,7 @@
foundStatus = (iter != mCameraTorchStatuses.end() && iter->second == status);
}
return true;
- };
+ }
int32_t getTorchStatus(int32_t cameraId) const {
Mutex::Autolock l(mLock);
@@ -162,7 +170,7 @@
return hardware::ICameraServiceListener::TORCH_STATUS_UNKNOWN;
}
return iter->second;
- };
+ }
int32_t getStatus(const std::string& cameraId) const {
Mutex::Autolock l(mLock);
@@ -171,7 +179,7 @@
return hardware::ICameraServiceListener::STATUS_UNKNOWN;
}
return iter->second;
- };
+ }
};
// Callback implementation
@@ -230,7 +238,6 @@
return binder::Status::ok();
}
-
virtual binder::Status onResultReceived(const CameraMetadata& metadata,
const CaptureResultExtras& resultExtras,
const std::vector<PhysicalCaptureResultInfo>& physicalResultInfos) {
@@ -296,7 +303,6 @@
mStatusesHit.clear();
return true;
-
}
void clearStatus() const {
@@ -307,7 +313,6 @@
bool waitForIdle() const {
return waitForStatus(IDLE);
}
-
};
namespace {
@@ -324,7 +329,7 @@
}
};
sp<DeathNotifier> gDeathNotifier;
-}; // anonymous namespace
+} // anonymous namespace
// Exercise basic binder calls for the camera service
TEST(CameraServiceBinderTest, CheckBinderCameraService) {
@@ -342,7 +347,8 @@
binder::Status res;
int32_t numCameras = 0;
- res = service->getNumberOfCameras(hardware::ICameraService::CAMERA_TYPE_ALL, &numCameras);
+ res = service->getNumberOfCameras(hardware::ICameraService::CAMERA_TYPE_ALL, kDefaultDeviceId,
+ /*devicePolicy*/0, &numCameras);
EXPECT_TRUE(res.isOk()) << res;
EXPECT_LE(0, numCameras);
@@ -354,7 +360,7 @@
EXPECT_EQ(numCameras, static_cast<const int>(statuses.size()));
for (const auto &it : statuses) {
- listener->onStatusChanged(it.status, it.cameraId);
+ listener->onStatusChanged(it.status, it.cameraId, kDefaultDeviceId);
}
for (int32_t i = 0; i < numCameras; i++) {
@@ -372,7 +378,8 @@
// Check metadata binder call
CameraMetadata metadata;
res = service->getCameraCharacteristics(cameraId,
- /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &metadata);
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
+ kDefaultDeviceId, /*devicePolicy*/0, &metadata);
EXPECT_TRUE(res.isOk()) << res;
EXPECT_FALSE(metadata.isEmpty());
@@ -389,7 +396,7 @@
res = service->connectDevice(callbacks, cameraId, "meeeeeeeee!",
{}, hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/ 0,
/*targetSdkVersion*/__ANDROID_API_FUTURE__,
- /*overrideToPortrait*/false, /*out*/&device);
+ /*overrideToPortrait*/false, kDefaultDeviceId, /*devicePolicy*/0, /*out*/&device);
EXPECT_TRUE(res.isOk()) << res;
ASSERT_NE(nullptr, device.get());
device->disconnect();
@@ -399,12 +406,12 @@
if (torchStatus == hardware::ICameraServiceListener::TORCH_STATUS_AVAILABLE_OFF) {
// Check torch calls
res = service->setTorchMode(cameraId,
- /*enabled*/true, callbacks);
+ /*enabled*/true, callbacks, kDefaultDeviceId, /*devicePolicy*/0);
EXPECT_TRUE(res.isOk()) << res;
EXPECT_TRUE(listener->waitForTorchState(
hardware::ICameraServiceListener::TORCH_STATUS_AVAILABLE_ON, i));
res = service->setTorchMode(cameraId,
- /*enabled*/false, callbacks);
+ /*enabled*/false, callbacks, kDefaultDeviceId, /*devicePolicy*/0);
EXPECT_TRUE(res.isOk()) << res;
EXPECT_TRUE(listener->waitForTorchState(
hardware::ICameraServiceListener::TORCH_STATUS_AVAILABLE_OFF, i));
@@ -433,7 +440,8 @@
binder::Status res = service->connectDevice(callbacks, deviceId, "meeeeeeeee!",
{}, hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/ 0,
/*targetSdkVersion*/__ANDROID_API_FUTURE__,
- /*overrideToPortrait*/false, /*out*/&device);
+ /*overrideToPortrait*/false, kDefaultDeviceId, /*devicePolicy*/0,
+ /*out*/&device);
EXPECT_TRUE(res.isOk()) << res;
}
auto p = std::make_pair(callbacks, device);
@@ -466,10 +474,10 @@
std::vector<hardware::CameraStatus> statuses;
service->addListener(serviceListener, &statuses);
for (const auto &it : statuses) {
- serviceListener->onStatusChanged(it.status, it.cameraId);
+ serviceListener->onStatusChanged(it.status, it.cameraId, kDefaultDeviceId);
}
service->getNumberOfCameras(hardware::ICameraService::CAMERA_TYPE_BACKWARD_COMPATIBLE,
- &numCameras);
+ kDefaultDeviceId, /*devicePolicy*/0, &numCameras);
}
virtual void TearDown() {
@@ -479,7 +487,6 @@
closeDevice(p);
}
}
-
};
TEST_F(CameraClientBinderTest, CheckBinderCameraDeviceUser) {
@@ -647,8 +654,7 @@
closeDevice(p);
}
-
-};
+}
TEST_F(CameraClientBinderTest, CheckBinderCaptureRequest) {
sp<CaptureRequest> requestOriginal, requestParceled;
@@ -707,4 +713,4 @@
EXPECT_TRUE(it->settings.exists(ANDROID_CONTROL_CAPTURE_INTENT));
entry = it->settings.find(ANDROID_CONTROL_CAPTURE_INTENT);
EXPECT_EQ(entry.data.u8[0], intent2);
-};
+}
diff --git a/camera/tests/CameraCharacteristicsPermission.cpp b/camera/tests/CameraCharacteristicsPermission.cpp
index 1de7cb4..10f7f22 100644
--- a/camera/tests/CameraCharacteristicsPermission.cpp
+++ b/camera/tests/CameraCharacteristicsPermission.cpp
@@ -24,6 +24,7 @@
#include <utils/Log.h>
#include <camera/CameraMetadata.h>
#include <camera/Camera.h>
+#include <camera/CameraUtils.h>
#include <android/hardware/ICameraService.h>
using namespace android;
@@ -31,7 +32,6 @@
class CameraCharacteristicsPermission : public ::testing::Test {
protected:
-
CameraCharacteristicsPermission() : numCameras(0){}
//Gtest interface
void SetUp() override;
@@ -48,7 +48,8 @@
sp<IBinder> binder = sm->getService(String16("media.camera"));
mCameraService = interface_cast<ICameraService>(binder);
rc = mCameraService->getNumberOfCameras(
- hardware::ICameraService::CAMERA_TYPE_ALL, &numCameras);
+ hardware::ICameraService::CAMERA_TYPE_ALL, kDefaultDeviceId, /*devicePolicy*/0,
+ &numCameras);
EXPECT_TRUE(rc.isOk());
}
@@ -61,7 +62,6 @@
// a camera permission.
TEST_F(CameraCharacteristicsPermission, TestCameraPermission) {
for (int32_t cameraId = 0; cameraId < numCameras; cameraId++) {
-
std::string cameraIdStr = std::to_string(cameraId);
bool isSupported = false;
auto rc = mCameraService->supportsCameraApi(cameraIdStr,
@@ -75,7 +75,7 @@
std::vector<int32_t> tagsNeedingPermission;
rc = mCameraService->getCameraCharacteristics(cameraIdStr,
/*targetSdkVersion*/__ANDROID_API_FUTURE__,
- /*overrideToPortrait*/false, &metadata);
+ /*overrideToPortrait*/false, kDefaultDeviceId, /*devicePolicy*/0, &metadata);
ASSERT_TRUE(rc.isOk());
EXPECT_FALSE(metadata.isEmpty());
EXPECT_EQ(metadata.removePermissionEntries(CAMERA_METADATA_INVALID_VENDOR_ID,
diff --git a/camera/tests/CameraZSLTests.cpp b/camera/tests/CameraZSLTests.cpp
index 3ae7659..56fcfa4 100644
--- a/camera/tests/CameraZSLTests.cpp
+++ b/camera/tests/CameraZSLTests.cpp
@@ -27,6 +27,7 @@
#include <camera/CameraParameters.h>
#include <camera/CameraMetadata.h>
#include <camera/Camera.h>
+#include <camera/CameraUtils.h>
#include <camera/StringUtils.h>
#include <android/hardware/ICameraService.h>
@@ -84,7 +85,8 @@
sp<IBinder> binder = sm->getService(String16("media.camera"));
mCameraService = interface_cast<ICameraService>(binder);
rc = mCameraService->getNumberOfCameras(
- hardware::ICameraService::CAMERA_TYPE_ALL, &numCameras);
+ hardware::ICameraService::CAMERA_TYPE_ALL, kDefaultDeviceId, /*devicePolicy*/0,
+ &numCameras);
EXPECT_TRUE(rc.isOk());
mComposerClient = new SurfaceComposerClient;
@@ -109,7 +111,6 @@
void CameraZSLTests::dataCallback(int32_t msgType, const sp<IMemory>& /*data*/,
camera_frame_metadata_t *) {
-
switch (msgType) {
case CAMERA_MSG_PREVIEW_FRAME: {
Mutex::Autolock l(mPreviewLock);
@@ -127,7 +128,7 @@
default:
ALOGV("%s: msgType: %d", __FUNCTION__, msgType);
}
-};
+}
status_t CameraZSLTests::waitForPreviewStart() {
status_t rc = NO_ERROR;
@@ -184,7 +185,7 @@
CameraMetadata metadata;
rc = mCameraService->getCameraCharacteristics(cameraIdStr,
/*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
- &metadata);
+ kDefaultDeviceId, /*devicePolicy*/0, &metadata);
if (!rc.isOk()) {
// The test is relevant only for cameras with Hal 3.x
// support.
@@ -212,7 +213,8 @@
"ZSLTest", hardware::ICameraService::USE_CALLING_UID,
hardware::ICameraService::USE_CALLING_PID,
/*targetSdkVersion*/__ANDROID_API_FUTURE__,
- /*overrideToPortrait*/false, /*forceSlowJpegMode*/false, &cameraDevice);
+ /*overrideToPortrait*/false, /*forceSlowJpegMode*/false, kDefaultDeviceId,
+ /*devicePolicy*/0, &cameraDevice);
EXPECT_TRUE(rc.isOk());
CameraParameters params(cameraDevice->getParameters());
diff --git a/camera/tests/fuzzer/camera_c2ConcurrentCamera_fuzzer.cpp b/camera/tests/fuzzer/camera_c2ConcurrentCamera_fuzzer.cpp
index 12b5bc3..c00f2ba 100644
--- a/camera/tests/fuzzer/camera_c2ConcurrentCamera_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_c2ConcurrentCamera_fuzzer.cpp
@@ -15,6 +15,7 @@
*/
#include <camera2/ConcurrentCamera.h>
+#include <CameraUtils.h>
#include <fuzzer/FuzzedDataProvider.h>
#include "camera2common.h"
@@ -33,7 +34,8 @@
size_t concurrentCameraIdSize = fdp.ConsumeIntegralInRange<size_t>(kRangeMin, kRangeMax);
for (size_t idx = 0; idx < concurrentCameraIdSize; ++idx) {
string concurrentCameraId = fdp.ConsumeRandomLengthString();
- camIdCombination.mConcurrentCameraIds.push_back(concurrentCameraId);
+ camIdCombination.mConcurrentCameraIdDeviceIdPairs.push_back(
+ {concurrentCameraId, kDefaultDeviceId});
}
}
diff --git a/camera/tests/fuzzer/camera_fuzzer.cpp b/camera/tests/fuzzer/camera_fuzzer.cpp
index c2a7549..0812096 100644
--- a/camera/tests/fuzzer/camera_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_fuzzer.cpp
@@ -16,6 +16,7 @@
#include <Camera.h>
#include <CameraParameters.h>
+#include <CameraUtils.h>
#include <binder/MemoryDealer.h>
#include <fuzzer/FuzzedDataProvider.h>
#include <gui/Surface.h>
@@ -115,7 +116,7 @@
hardware::ICameraService::USE_CALLING_PID,
/*targetSdkVersion*/ __ANDROID_API_FUTURE__,
/*overrideToPortrait*/ false, /*forceSlowJpegMode*/ false,
- &cameraDevice);
+ kDefaultDeviceId, /*devicePolicy*/0, &cameraDevice);
} else {
cameraService->connect(this, mFDP->ConsumeIntegral<int32_t>() /* cameraId */,
mFDP->ConsumeRandomLengthString(kMaxBytes).c_str(),
@@ -123,7 +124,8 @@
mFDP->ConsumeIntegral<int8_t>() /* clientPid */,
/*targetSdkVersion*/ mFDP->ConsumeIntegral<int32_t>(),
/*overrideToPortrait*/ mFDP->ConsumeBool(),
- /*forceSlowJpegMode*/ mFDP->ConsumeBool(), &cameraDevice);
+ /*forceSlowJpegMode*/ mFDP->ConsumeBool(), kDefaultDeviceId,
+ /*devicePolicy*/0, &cameraDevice);
}
mCamera = Camera::create(cameraDevice);
@@ -150,13 +152,14 @@
}
int32_t cameraId = mFDP->ConsumeIntegral<int32_t>();
- Camera::getNumberOfCameras();
+ Camera::getNumberOfCameras(kDefaultDeviceId, /*devicePolicy*/0);
CameraInfo cameraInfo;
cameraInfo.facing = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFacing)
: mFDP->ConsumeIntegral<int32_t>();
cameraInfo.orientation = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidOrientation)
: mFDP->ConsumeIntegral<int32_t>();
- Camera::getCameraInfo(cameraId, /*overrideToPortrait*/false, &cameraInfo);
+ Camera::getCameraInfo(cameraId, /*overrideToPortrait*/false, kDefaultDeviceId,
+ /*devicePolicy*/0, &cameraInfo);
mCamera->reconnect();
sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
diff --git a/camera/tests/fuzzer/camera_utils_fuzzer.cpp b/camera/tests/fuzzer/camera_utils_fuzzer.cpp
index 365305e..c816f82 100644
--- a/camera/tests/fuzzer/camera_utils_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_utils_fuzzer.cpp
@@ -112,7 +112,8 @@
}
string clientPackage = mFDP->ConsumeRandomLengthString(kMaxBytes);
- cameraStatus = new CameraStatus(id, status, unavailSubIds, clientPackage);
+ cameraStatus = new CameraStatus(id, status, unavailSubIds, clientPackage,
+ kDefaultDeviceId);
}
if (mFDP->ConsumeBool()) {
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index a6b20cf..28670b1 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -117,6 +117,7 @@
static bool gSizeSpecified = false; // was size explicitly requested?
static bool gWantInfoScreen = false; // do we want initial info screen?
static bool gWantFrameTime = false; // do we want times on each frame?
+static bool gSecureDisplay = false; // should we create a secure virtual display?
static uint32_t gVideoWidth = 0; // default width+height
static uint32_t gVideoHeight = 0;
static uint32_t gBitRate = 20000000; // 20Mbps
@@ -362,7 +363,7 @@
const sp<IGraphicBufferProducer>& bufferProducer,
sp<IBinder>* pDisplayHandle, sp<SurfaceControl>* mirrorRoot) {
sp<IBinder> dpy = SurfaceComposerClient::createDisplay(
- String8("ScreenRecorder"), false /*secure*/);
+ String8("ScreenRecorder"), gSecureDisplay);
SurfaceComposerClient::Transaction t;
t.setDisplaySurface(dpy, bufferProducer);
setDisplayProjection(t, dpy, displayState);
@@ -1253,6 +1254,7 @@
{ "persistent-surface", no_argument, NULL, 'p' },
{ "bframes", required_argument, NULL, 'B' },
{ "display-id", required_argument, NULL, 'd' },
+ { "capture-secure", no_argument, NULL, 'S' },
{ NULL, 0, NULL, 0 }
};
@@ -1372,6 +1374,9 @@
fprintf(stderr, "Invalid physical display ID\n");
return 2;
+ case 'S':
+ gSecureDisplay = true;
+ break;
default:
if (ic != '?') {
fprintf(stderr, "getopt_long returned unexpected value 0x%x\n", ic);
diff --git a/drm/libmediadrm/DrmHalAidl.cpp b/drm/libmediadrm/DrmHalAidl.cpp
index 7106d66..650a589 100644
--- a/drm/libmediadrm/DrmHalAidl.cpp
+++ b/drm/libmediadrm/DrmHalAidl.cpp
@@ -466,6 +466,12 @@
mMetrics->SetAppPackageName(appPackageName);
mMetrics->SetAppUid(AIBinder_getCallingUid());
for (ssize_t i = mFactories.size() - 1; i >= 0; i--) {
+ CryptoSchemes schemes{};
+ auto err = mFactories[i]->getSupportedCryptoSchemes(&schemes);
+ if (!err.isOk() || !std::count(schemes.uuids.begin(), schemes.uuids.end(), uuidAidl)) {
+ continue;
+ }
+
::ndk::ScopedAStatus status =
mFactories[i]->createDrmPlugin(uuidAidl, appPackageNameAidl, &pluginAidl);
if (status.isOk()) {
diff --git a/drm/libmediadrmrkp/Android.bp b/drm/libmediadrmrkp/Android.bp
index f13eb62..b1a01e4 100644
--- a/drm/libmediadrmrkp/Android.bp
+++ b/drm/libmediadrmrkp/Android.bp
@@ -5,7 +5,7 @@
"src/**/*.cpp",
],
export_include_dirs: [
- "include"
+ "include",
],
shared_libs: [
"libbinder_ndk",
@@ -17,7 +17,7 @@
"android.hardware.drm-V1-ndk",
"android.hardware.security.rkp-V3-ndk",
"libbase",
- "libcppbor_external",
+ "libcppbor",
],
defaults: [
"keymint_use_latest_hal_aidl_ndk_shared",
@@ -42,7 +42,7 @@
"android.hardware.drm-V1-ndk",
"android.hardware.security.rkp-V3-ndk",
"libbase",
- "libcppbor_external",
+ "libcppbor",
"libmediadrmrkp",
],
vendor: true,
@@ -50,4 +50,4 @@
"-Wall",
"-Werror",
],
-}
\ No newline at end of file
+}
diff --git a/drm/mediadrm/plugins/clearkey/aidl/Android.bp b/drm/mediadrm/plugins/clearkey/aidl/Android.bp
index 0b0d46a..9a06bd2 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/Android.bp
+++ b/drm/mediadrm/plugins/clearkey/aidl/Android.bp
@@ -107,6 +107,17 @@
installable: false, // installed in APEX
}
+cc_binary {
+ name: "android.hardware.drm-service-lazy.clearkey.apex",
+ stem: "android.hardware.drm-service-lazy.clearkey",
+ defaults: [
+ "aidl_clearkey_service_defaults",
+ "aidl_clearkey_service_defaults-use-static-deps",
+ ],
+ srcs: ["ServiceLazy.cpp"],
+ installable: false, // installed in APEX
+}
+
phony {
name: "android.hardware.drm@latest-service.clearkey",
required: [
@@ -183,17 +194,63 @@
"android.hardware.drm-service.clearkey.apex.rc",
"android.hardware.drm-service.clearkey.xml"
],
+ overrides: [
+ "android.hardware.drm-service.clearkey",
+ ],
}
prebuilt_etc {
name: "android.hardware.drm-service.clearkey.apex.rc",
- src: "android.hardware.drm-service.clearkey.apex.rc",
+ src: ":gen-android.hardware.drm-service.clearkey.apex.rc",
installable: false,
}
+genrule {
+ name: "gen-android.hardware.drm-service.clearkey.apex.rc",
+ srcs: ["android.hardware.drm-service.clearkey.rc"],
+ out: ["android.hardware.drm-service.clearkey.apex.rc"],
+ cmd: "sed -E 's%/vendor/bin/%/apex/com.android.hardware.drm.clearkey/bin/%' $(in) > $(out)",
+}
+
prebuilt_etc {
name: "android.hardware.drm-service.clearkey.xml",
src: "android.hardware.drm-service.clearkey.xml",
sub_dir: "vintf",
installable: false,
}
+
+apex {
+ name: "com.android.hardware.drm.clearkey.lazy",
+ manifest: "manifest.json",
+ file_contexts: "file_contexts",
+ key: "com.android.hardware.key",
+ certificate: ":com.android.hardware.certificate",
+ vendor: true,
+ updatable: false,
+
+ binaries: [
+ "android.hardware.drm-service-lazy.clearkey.apex",
+ ],
+ prebuilts: [
+ "android.hardware.drm-service-lazy.clearkey.apex.rc",
+ "android.hardware.drm-service.clearkey.xml"
+ ],
+ overrides: [
+ "android.hardware.drm-service.clearkey",
+ "android.hardware.drm-service-lazy.clearkey",
+ "com.android.hardware.drm.clearkey",
+ ],
+}
+
+prebuilt_etc {
+ name: "android.hardware.drm-service-lazy.clearkey.apex.rc",
+ src: ":gen-android.hardware.drm-service-lazy.clearkey.apex.rc",
+ installable: false,
+}
+
+genrule {
+ name: "gen-android.hardware.drm-service-lazy.clearkey.apex.rc",
+ srcs: ["android.hardware.drm-service-lazy.clearkey.rc"],
+ out: ["android.hardware.drm-service-lazy.clearkey.apex.rc"],
+ cmd: "sed -E 's%/vendor/bin/%/apex/com.android.hardware.drm.clearkey/bin/%' $(in) > $(out)",
+}
diff --git a/drm/mediadrm/plugins/clearkey/aidl/android.hardware.drm-service.clearkey.apex.rc b/drm/mediadrm/plugins/clearkey/aidl/android.hardware.drm-service.clearkey.apex.rc
deleted file mode 100644
index f4645b3..0000000
--- a/drm/mediadrm/plugins/clearkey/aidl/android.hardware.drm-service.clearkey.apex.rc
+++ /dev/null
@@ -1,7 +0,0 @@
-service vendor.drm-clearkey-service /apex/com.android.hardware.drm.clearkey/bin/hw/android.hardware.drm-service.clearkey
- class hal
- user media
- group mediadrm drmrpc
- ioprio rt 4
- task_profiles ProcessCapacityHigh
- interface aidl android.hardware.drm.IDrmFactory/clearkey
diff --git a/media/OWNERS b/media/OWNERS
index 976fb9e..b926075 100644
--- a/media/OWNERS
+++ b/media/OWNERS
@@ -14,5 +14,8 @@
taklee@google.com
wonsik@google.com
+# For TEST_MAPPING tv-presubmit and tv-postsubmit configurations:
+per-file TEST_MAPPING = blindahl@google.com
+
# go/android-fwk-media-solutions for info on areas of ownership.
include platform/frameworks/av:/media/janitors/media_solutions_OWNERS
diff --git a/media/TEST_MAPPING b/media/TEST_MAPPING
index cd5d354..1a637ac 100644
--- a/media/TEST_MAPPING
+++ b/media/TEST_MAPPING
@@ -44,5 +44,16 @@
],
"file_patterns": ["(?i)drm|crypto"]
}
+ ],
+ // Postsubmit tests for TV devices
+ "tv-postsubmit": [
+ {
+ "name": "CtsMediaDecoderTestCases",
+ "options": [
+ {
+ "include-filter": "android.media.decoder.cts.DecoderRenderTest"
+ }
+ ]
+ }
]
}
diff --git a/media/aconfig/codec_fwk.aconfig b/media/aconfig/codec_fwk.aconfig
index 3092091..9f64a28 100644
--- a/media/aconfig/codec_fwk.aconfig
+++ b/media/aconfig/codec_fwk.aconfig
@@ -14,6 +14,7 @@
flag {
name: "dynamic_color_aspects"
+ is_exported: true
namespace: "codec_fwk"
description: "Feature flag for dynamic color aspect support"
bug: "297914560"
@@ -21,6 +22,7 @@
flag {
name: "hlg_editing"
+ is_exported: true
namespace: "codec_fwk"
description: "Feature flag for HLG editing support"
bug: "316397061"
@@ -28,6 +30,7 @@
flag {
name: "in_process_sw_audio_codec"
+ is_exported: true
namespace: "codec_fwk"
description: "Feature flag for in-process software audio codec API"
bug: "297922713"
@@ -48,7 +51,15 @@
}
flag {
+ name: "native_capabilites"
+ namespace: "codec_fwk"
+ description: "Feature flag for native codec capabilities"
+ bug: "306023029"
+}
+
+flag {
name: "null_output_surface"
+ is_exported: true
namespace: "codec_fwk"
description: "Feature flag for null output Surface API"
bug: "297920102"
@@ -63,6 +74,7 @@
flag {
name: "region_of_interest"
+ is_exported: true
namespace: "codec_fwk"
description: "Feature flag for region of interest API"
bug: "299191092"
@@ -74,3 +86,40 @@
description: "Feature flag for region of interest support"
bug: "325549730"
}
+
+flag {
+ name: "set_callback_stall"
+ namespace: "codec_fwk"
+ description: "Bugfix flag for setCallback stall"
+ bug: "326010604"
+ metadata {
+ purpose: PURPOSE_BUGFIX
+ }
+}
+
+flag {
+ name: "set_state_early"
+ namespace: "codec_fwk"
+ description: "Bugfix flag for setting state early to avoid a race condition"
+ bug: "298613712"
+ metadata {
+ purpose: PURPOSE_BUGFIX
+ }
+}
+
+flag {
+ name: "stop_hal_before_surface"
+ namespace: "codec_fwk"
+ description: "Bugfix flag for setting state early to avoid a race condition"
+ bug: "339247977"
+ metadata {
+ purpose: PURPOSE_BUGFIX
+ }
+}
+
+flag {
+ name: "teamfood"
+ namespace: "codec_fwk"
+ description: "Feature flag to track teamfood population"
+ bug: "328770262"
+}
diff --git a/media/aconfig/mediacodec_flags.aconfig b/media/aconfig/mediacodec_flags.aconfig
index 4d1e5ca..3cc9a1a 100644
--- a/media/aconfig/mediacodec_flags.aconfig
+++ b/media/aconfig/mediacodec_flags.aconfig
@@ -7,22 +7,23 @@
# ******************************************************************
flag {
- name: "large_audio_frame"
+ name: "aidl_hal"
namespace: "codec_fwk"
- description: "Feature flags for large audio frame support"
- bug: "297219557"
+ description: "Feature flags for enabling AIDL HAL handling"
+ bug: "251850069"
}
flag {
name: "codec_importance"
+ is_exported: true
namespace: "codec_fwk"
description: "Feature flags for media codec importance"
bug: "297929011"
}
flag {
- name: "aidl_hal"
+ name: "large_audio_frame"
namespace: "codec_fwk"
- description: "Feature flags for enabling AIDL HAL handling"
- bug: "251850069"
+ description: "Feature flags for large audio frame support"
+ bug: "297219557"
}
diff --git a/media/audio/aconfig/Android.bp b/media/audio/aconfig/Android.bp
index 6d21e97..ec45e2f 100644
--- a/media/audio/aconfig/Android.bp
+++ b/media/audio/aconfig/Android.bp
@@ -6,6 +6,13 @@
}
aconfig_declarations {
+ name: "com.android.media.audioclient-aconfig",
+ package: "com.android.media.audioclient",
+ container: "system",
+ srcs: ["audioclient.aconfig"],
+}
+
+aconfig_declarations {
name: "com.android.media.audioserver-aconfig",
package: "com.android.media.audioserver",
container: "system",
@@ -53,9 +60,9 @@
// TODO(b/316909431) native_bridge_supported: true,
apex_available: [
"//apex_available:platform",
+ "com.android.btservices",
"com.android.media",
"com.android.media.swcodec",
- "com.android.btservices",
],
min_sdk_version: "29",
}
@@ -66,6 +73,12 @@
defaults: ["audio-aconfig-cc-defaults"],
}
+cc_aconfig_library {
+ name: "com.android.media.audioclient-aconfig-cc",
+ aconfig_declarations: "com.android.media.audioclient-aconfig",
+ defaults: ["audio-aconfig-cc-defaults"],
+}
+
java_aconfig_library {
name: "com.android.media.audio-aconfig-java",
aconfig_declarations: "com.android.media.audio-aconfig",
diff --git a/media/audio/aconfig/audio.aconfig b/media/audio/aconfig/audio.aconfig
index cdbadc2..c642a94 100644
--- a/media/audio/aconfig/audio.aconfig
+++ b/media/audio/aconfig/audio.aconfig
@@ -6,6 +6,14 @@
container: "system"
flag {
+ name: "abs_volume_index_fix"
+ namespace: "media_audio"
+ description:
+ "Fix double attenuation and index jumps in absolute volume mode"
+ bug: "340693050"
+}
+
+flag {
name: "alarm_min_volume_zero"
namespace: "media_audio"
description: "Support configuring alarm min vol to zero"
@@ -20,6 +28,13 @@
}
flag {
+ name: "audioserver_permissions"
+ namespace: "media_audio"
+ description: "Refactoring permission management in audioserver"
+ bug: "338089555"
+}
+
+flag {
name: "bluetooth_mac_address_anonymization"
namespace: "media_audio"
description:
@@ -45,6 +60,14 @@
}
flag {
+ name: "port_to_piid_simplification"
+ namespace: "media_audio"
+ description: "PAM only needs for each piid the last portId mapping"
+ bug: "335747248"
+
+}
+
+flag {
name: "ringer_mode_affects_alarm"
namespace: "media_audio"
description:
@@ -53,6 +76,15 @@
}
flag {
+ name: "set_stream_volume_order"
+ namespace: "media_audio"
+ description:
+ "Fix race condition by adjusting the order when"
+ "setStreamVolume is calling into the BT stack"
+ bug: "329202581"
+}
+
+flag {
name: "spatializer_offload"
namespace: "media_audio"
description: "Enable spatializer offload"
@@ -60,6 +92,13 @@
}
flag {
+ name: "spatializer_upmix"
+ namespace: "media_audio"
+ description: "Enable spatializer upmix"
+ bug: "323985367"
+}
+
+flag {
name: "stereo_spatialization"
namespace: "media_audio"
description: "Enable stereo channel mask for spatialization."
@@ -67,6 +106,15 @@
}
flag {
+ name: "vgs_vss_sync_mute_order"
+ namespace: "media_audio"
+ description:
+ "When syncing the VGS to VSS we need to first adjust the"
+ "mute state before the index."
+ bug: "331849188"
+}
+
+flag {
name: "volume_refactoring"
namespace: "media_audio"
description: "Refactor the audio volume internal architecture logic"
diff --git a/media/audio/aconfig/audio_framework.aconfig b/media/audio/aconfig/audio_framework.aconfig
index cfdf1ab..0209e28 100644
--- a/media/audio/aconfig/audio_framework.aconfig
+++ b/media/audio/aconfig/audio_framework.aconfig
@@ -4,6 +4,7 @@
# Please add flags in alphabetical order.
package: "android.media.audio"
+container: "system"
flag {
name: "auto_public_volume_api_hardening"
@@ -23,6 +24,7 @@
flag {
name: "feature_spatial_audio_headtracking_low_latency"
+ is_exported: true
namespace: "media_audio"
description: "Define feature for low latency headtracking for SA"
bug: "324291076"
@@ -30,6 +32,7 @@
flag {
name: "focus_exclusive_with_recording"
+ is_exported: true
namespace: "media_audio"
description:
"Audio focus GAIN_TRANSIENT_EXCLUSIVE only mutes"
@@ -37,18 +40,22 @@
bug: "316414750"
}
+# TODO remove
flag {
name: "foreground_audio_control"
+ is_exported: true
namespace: "media_audio"
description:
"Audio focus gain requires FGS or delegation to "
- "take effect"
+ "take effect"
bug: "296232417"
+ is_fixed_read_only: true
}
# TODO remove
flag {
name: "focus_freeze_test_api"
+ is_exported: true
namespace: "media_audio"
description: "\
AudioManager audio focus test APIs:\
@@ -62,6 +69,7 @@
flag {
name: "loudness_configurator_api"
+ is_exported: true
namespace: "media_audio"
description: "\
Enable the API for providing loudness metadata and CTA-2075 \
@@ -79,6 +87,7 @@
flag {
name: "sco_managed_by_audio"
+ is_exported: true
namespace: "media_audio"
description: "\
Enable new implementation of headset profile device connection and\
@@ -88,14 +97,37 @@
flag {
name: "supported_device_types_api"
+ is_exported: true
namespace: "media_audio"
description: "Surface new API method AudioManager.getSupportedDeviceTypes()"
bug: "307537538"
}
flag {
+ name: "ro_foreground_audio_control"
+ is_exported: true
+ namespace: "media_audio"
+ description:
+ "Audio focus gain requires FGS or delegation to "
+ "take effect"
+ bug: "296232417"
+ is_fixed_read_only: true
+}
+
+flag {
+ name: "ro_volume_ringer_api_hardening"
+ namespace: "media_audio"
+ description: "Limit access to volume and ringer SDK APIs in AudioManager"
+ bug: "296232417"
+ is_fixed_read_only: true
+}
+
+
+# TODO remove
+flag {
name: "volume_ringer_api_hardening"
namespace: "media_audio"
description: "Limit access to volume and ringer SDK APIs in AudioManager"
bug: "296232417"
+ is_fixed_read_only: true
}
diff --git a/media/audio/aconfig/audioclient.aconfig b/media/audio/aconfig/audioclient.aconfig
new file mode 100644
index 0000000..a804834
--- /dev/null
+++ b/media/audio/aconfig/audioclient.aconfig
@@ -0,0 +1,16 @@
+# Flags for libaudioclient, and other native client libraries.
+#
+# Please add flags in alphabetical order.
+
+package: "com.android.media.audioclient"
+container: "system"
+
+flag {
+ name: "audiosystem_service_acquisition"
+ namespace: "media_audio"
+ description: "Clean up audiosystem service acquisition."
+ bug: "330358287"
+ metadata {
+ purpose: PURPOSE_BUGFIX
+ }
+}
diff --git a/media/audio/aconfig/audiopolicy_framework.aconfig b/media/audio/aconfig/audiopolicy_framework.aconfig
index 72a1e6c..28b6c7f 100644
--- a/media/audio/aconfig/audiopolicy_framework.aconfig
+++ b/media/audio/aconfig/audiopolicy_framework.aconfig
@@ -24,6 +24,7 @@
flag {
name: "audio_mix_test_api"
+ is_exported: true
namespace: "media_audio"
description: "Enable new Test APIs that provide access to registered AudioMixes on system server and native side."
bug: "309080867"
@@ -32,6 +33,7 @@
flag {
name: "audio_policy_update_mixing_rules_api"
+ is_exported: true
namespace: "media_audio"
description: "Enable AudioPolicy.updateMixingRules API for hot-swapping audio mixing rules."
bug: "293874525"
@@ -39,6 +41,7 @@
flag {
name: "enable_fade_manager_configuration"
+ is_exported: true
namespace: "media_audio"
description: "Enable Fade Manager Configuration support to determine fade properties"
bug: "307354764"
diff --git a/media/audio/aconfig/audioserver.aconfig b/media/audio/aconfig/audioserver.aconfig
index 5c6504f..96e2b75 100644
--- a/media/audio/aconfig/audioserver.aconfig
+++ b/media/audio/aconfig/audioserver.aconfig
@@ -22,6 +22,15 @@
}
flag {
+ name: "fix_concurrent_playback_behavior_with_bit_perfect_client"
+ namespace: "media_audio"
+ description:
+ "Treat playback use cases differently when bit-perfect client is active to improve the "
+ "user experience with bit-perfect playback."
+ bug: "339515899"
+}
+
+flag {
name: "mutex_priority_inheritance"
namespace: "media_audio"
description:
diff --git a/media/audio/aconfig/midi_flags.aconfig b/media/audio/aconfig/midi_flags.aconfig
index efb643f..1620e1b 100644
--- a/media/audio/aconfig/midi_flags.aconfig
+++ b/media/audio/aconfig/midi_flags.aconfig
@@ -8,6 +8,7 @@
flag {
name: "virtual_ump"
+ is_exported: true
namespace: "media_audio"
description: "Enable virtual UMP MIDI."
bug: "291115176"
diff --git a/media/audioaidlconversion/Android.bp b/media/audioaidlconversion/Android.bp
index 07c59c7..2e1eb8c 100644
--- a/media/audioaidlconversion/Android.bp
+++ b/media/audioaidlconversion/Android.bp
@@ -58,10 +58,10 @@
cc_defaults {
name: "audio_aidl_conversion_common_default_cpp",
shared_libs: [
+ "framework-permission-aidl-cpp",
"libbinder",
"libshmemcompat",
"shared-file-region-aidl-cpp",
- "framework-permission-aidl-cpp",
],
export_shared_lib_headers: [
"shared-file-region-aidl-cpp",
@@ -94,8 +94,8 @@
],
sanitize: {
misc_undefined: [
- "unsigned-integer-overflow",
"signed-integer-overflow",
+ "unsigned-integer-overflow",
],
},
target: {
@@ -148,8 +148,8 @@
"latest_android_media_audio_common_types_ndk_shared",
],
shared_libs: [
- "libbinder_ndk",
"libbase",
+ "libbinder_ndk",
],
static_libs: [
"libaudioaidlcommon",
@@ -182,8 +182,8 @@
],
shared_libs: [
"libaudio_aidl_conversion_common_ndk",
- "libbinder_ndk",
"libbase",
+ "libbinder_ndk",
],
cflags: [
"-DBACKEND_NDK",
@@ -213,8 +213,8 @@
],
shared_libs: [
"libaudio_aidl_conversion_common_ndk",
- "libbinder_ndk",
"libbase",
+ "libbinder_ndk",
],
cflags: [
"-DBACKEND_NDK",
@@ -238,8 +238,8 @@
"latest_android_media_audio_common_types_ndk_shared",
],
shared_libs: [
- "libbinder_ndk",
"libbase",
+ "libbinder_ndk",
],
cflags: [
"-DBACKEND_CPP_NDK",
diff --git a/media/audioaidlconversion/tests/Android.bp b/media/audioaidlconversion/tests/Android.bp
index 88b2cc9..bca4dd0 100644
--- a/media/audioaidlconversion/tests/Android.bp
+++ b/media/audioaidlconversion/tests/Android.bp
@@ -16,8 +16,8 @@
],
sanitize: {
misc_undefined: [
- "unsigned-integer-overflow",
"signed-integer-overflow",
+ "unsigned-integer-overflow",
],
},
}
@@ -26,8 +26,8 @@
name: "audio_aidl_ndk_conversion_tests",
defaults: [
- "latest_android_media_audio_common_types_ndk_static",
"latest_android_hardware_audio_common_ndk_static",
+ "latest_android_media_audio_common_types_ndk_static",
"libaudio_aidl_conversion_tests_defaults",
],
srcs: ["audio_aidl_ndk_conversion_tests.cpp"],
diff --git a/media/audioserver/Android.bp b/media/audioserver/Android.bp
index 479e13a..e74fb91 100644
--- a/media/audioserver/Android.bp
+++ b/media/audioserver/Android.bp
@@ -27,11 +27,11 @@
],
defaults: [
+ "latest_android_hardware_audio_core_sounddose_ndk_shared",
+ "latest_android_media_audio_common_types_cpp_shared",
"libaaudioservice_dependencies",
"libaudioflinger_dependencies",
"libaudiopolicyservice_dependencies",
- "latest_android_media_audio_common_types_cpp_shared",
- "latest_android_hardware_audio_core_sounddose_ndk_shared",
],
static_libs: [
diff --git a/media/codec2/components/aom/C2SoftAomEnc.cpp b/media/codec2/components/aom/C2SoftAomEnc.cpp
index 7c9d3e8..722b13a 100644
--- a/media/codec2/components/aom/C2SoftAomEnc.cpp
+++ b/media/codec2/components/aom/C2SoftAomEnc.cpp
@@ -29,6 +29,12 @@
#include "C2SoftAomEnc.h"
+/* Quantization param values defined by the spec */
+#define AOM_QP_MIN 0
+#define AOM_QP_MAX 63
+#define AOM_QP_DEFAULT_MIN AOM_QP_MIN
+#define AOM_QP_DEFAULT_MAX AOM_QP_MAX
+
namespace android {
constexpr char COMPONENT_NAME[] = "c2.android.av1.encoder";
@@ -50,11 +56,13 @@
0u, (uint64_t)C2MemoryUsage::CPU_READ))
.build());
+ // Odd dimension support in encoders requires Android V and above
+ size_t stepSize = isAtLeastV() ? 1 : 2;
addParameter(DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
.withDefault(new C2StreamPictureSizeInfo::input(0u, 320, 240))
.withFields({
- C2F(mSize, width).inRange(2, 2048, 2),
- C2F(mSize, height).inRange(2, 2048, 2),
+ C2F(mSize, width).inRange(2, 2048, stepSize),
+ C2F(mSize, height).inRange(2, 2048, stepSize),
})
.withSetter(SizeSetter)
.build());
@@ -173,6 +181,19 @@
.inRange(C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)})
.withSetter(CodedColorAspectsSetter, mColorAspects)
.build());
+
+ addParameter(
+ DefineParam(mPictureQuantization, C2_PARAMKEY_PICTURE_QUANTIZATION)
+ .withDefault(C2StreamPictureQuantizationTuning::output::AllocShared(
+ 0 /* flexCount */, 0u /* stream */))
+ .withFields({C2F(mPictureQuantization, m.values[0].type_).oneOf(
+ {C2Config::I_FRAME, C2Config::P_FRAME}),
+ C2F(mPictureQuantization, m.values[0].min).inRange(
+ AOM_QP_DEFAULT_MIN, AOM_QP_DEFAULT_MAX),
+ C2F(mPictureQuantization, m.values[0].max).inRange(
+ AOM_QP_DEFAULT_MIN, AOM_QP_DEFAULT_MAX)})
+ .withSetter(PictureQuantizationSetter)
+ .build());
}
C2R C2SoftAomEnc::IntfImpl::BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output>& me) {
@@ -305,6 +326,54 @@
return C2R::Ok();
}
+C2R C2SoftAomEnc::IntfImpl::PictureQuantizationSetter(
+ bool mayBlock, C2P<C2StreamPictureQuantizationTuning::output>& me) {
+ (void)mayBlock;
+ int32_t iMin = AOM_QP_DEFAULT_MIN, pMin = AOM_QP_DEFAULT_MIN;
+ int32_t iMax = AOM_QP_DEFAULT_MAX, pMax = AOM_QP_DEFAULT_MAX;
+ for (size_t i = 0; i < me.v.flexCount(); ++i) {
+ const C2PictureQuantizationStruct &layer = me.v.m.values[i];
+ // layerMin is clamped to [AOM_QP_MIN, layerMax] to avoid error
+ // cases where layer.min > layer.max
+ int32_t layerMax = std::clamp(layer.max, AOM_QP_MIN, AOM_QP_MAX);
+ int32_t layerMin = std::clamp(layer.min, AOM_QP_MIN, layerMax);
+ if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
+ iMax = layerMax;
+ iMin = layerMin;
+ ALOGV("iMin %d iMax %d", iMin, iMax);
+ } else if (layer.type_ == C2Config::picture_type_t(P_FRAME)) {
+ pMax = layerMax;
+ pMin = layerMin;
+ ALOGV("pMin %d pMax %d", pMin, pMax);
+ }
+ }
+ ALOGV("PictureQuantizationSetter(entry): i %d-%d p %d-%d",
+ iMin, iMax, pMin, pMax);
+
+ // aom library takes same range for I/P picture type
+ int32_t maxFrameQP = std::min(iMax, pMax);
+ int32_t minFrameQP = std::max(iMin, pMin);
+ if (minFrameQP > maxFrameQP) {
+ minFrameQP = maxFrameQP;
+ }
+ // put them back into the structure
+ for (size_t i = 0; i < me.v.flexCount(); ++i) {
+ const C2PictureQuantizationStruct &layer = me.v.m.values[i];
+
+ if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
+ me.set().m.values[i].max = maxFrameQP;
+ me.set().m.values[i].min = minFrameQP;
+ }
+ else if (layer.type_ == C2Config::picture_type_t(P_FRAME)) {
+ me.set().m.values[i].max = maxFrameQP;
+ me.set().m.values[i].min = minFrameQP;
+ }
+ }
+ ALOGV("PictureQuantizationSetter(exit): minFrameQP = %d maxFrameQP = %d",
+ minFrameQP, maxFrameQP);
+ return C2R::Ok();
+}
+
uint32_t C2SoftAomEnc::IntfImpl::getLevel_l() const {
return mProfileLevel->level - LEVEL_AV1_2;
}
@@ -556,6 +625,7 @@
mQuality = mIntf->getQuality_l();
mComplexity = mIntf->getComplexity_l();
mAV1EncLevel = mIntf->getLevel_l();
+ mQpBounds = mIntf->getPictureQuantization_l();
}
@@ -573,6 +643,18 @@
break;
}
+ if (mQpBounds->flexCount() > 0) {
+ // read min max qp for sequence
+ for (size_t i = 0; i < mQpBounds->flexCount(); ++i) {
+ const C2PictureQuantizationStruct &layer = mQpBounds->m.values[i];
+ if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
+ mMaxQuantizer = layer.max;
+ mMinQuantizer = layer.min;
+ break;
+ }
+ }
+ }
+
mCodecInterface = aom_codec_av1_cx();
if (!mCodecInterface) goto CleanUp;
@@ -605,7 +687,7 @@
mCodecConfiguration->g_timebase.den = 1000000;
// rc_target_bitrate is in kbps, mBitrate in bps
mCodecConfiguration->rc_target_bitrate = (mBitrate->value + 500) / 1000;
- mCodecConfiguration->rc_end_usage = mBitrateControlMode == AOM_Q ? AOM_Q : AOM_CBR;
+ mCodecConfiguration->rc_end_usage = mBitrateControlMode;
// Disable frame drop - not allowed in MediaCodec now.
mCodecConfiguration->rc_dropframe_thresh = 0;
// Disable lagged encoding.
diff --git a/media/codec2/components/aom/C2SoftAomEnc.h b/media/codec2/components/aom/C2SoftAomEnc.h
index 7e5ea63..067b04f 100644
--- a/media/codec2/components/aom/C2SoftAomEnc.h
+++ b/media/codec2/components/aom/C2SoftAomEnc.h
@@ -109,6 +109,7 @@
std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
+ std::shared_ptr<C2StreamPictureQuantizationTuning::output> mQpBounds;
aom_codec_err_t setupCodecParameters();
};
@@ -126,6 +127,8 @@
const C2P<C2StreamPictureSizeInfo::input>& size,
const C2P<C2StreamFrameRateInfo::output>& frameRate,
const C2P<C2StreamBitrateInfo::output>& bitrate);
+ static C2R PictureQuantizationSetter(bool mayBlock,
+ C2P<C2StreamPictureQuantizationTuning::output> &me);
// unsafe getters
std::shared_ptr<C2StreamPictureSizeInfo::input> getSize_l() const { return mSize; }
@@ -150,6 +153,9 @@
std::shared_ptr<C2StreamPixelFormatInfo::input> getPixelFormat_l() const {
return mPixelFormat;
}
+ std::shared_ptr<C2StreamPictureQuantizationTuning::output> getPictureQuantization_l() const {
+ return mPictureQuantization;
+ }
uint32_t getSyncFramePeriod() const;
static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input>& me);
static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output>& me,
@@ -171,6 +177,7 @@
std::shared_ptr<C2StreamColorAspectsInfo::input> mColorAspects;
std::shared_ptr<C2StreamColorAspectsInfo::output> mCodedColorAspects;
std::shared_ptr<C2StreamPixelFormatInfo::input> mPixelFormat;
+ std::shared_ptr<C2StreamPictureQuantizationTuning::output> mPictureQuantization;
};
diff --git a/media/codec2/components/base/Android.bp b/media/codec2/components/base/Android.bp
index 4b189b4..2b59ee3 100644
--- a/media/codec2/components/base/Android.bp
+++ b/media/codec2/components/base/Android.bp
@@ -43,7 +43,7 @@
],
static_libs: [
- "libyuv_static", // for conversion routines
+ "libyuv", // for conversion routines
],
shared_libs: [
diff --git a/media/codec2/components/dav1d/C2SoftDav1dDec.cpp b/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
index 76680a3..4ec26d6 100644
--- a/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
+++ b/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
@@ -243,10 +243,17 @@
.build());
addParameter(
+ DefineParam(mLowLatencyMode, C2_PARAMKEY_LOW_LATENCY_MODE)
+ .withDefault(new C2GlobalLowLatencyModeTuning(0))
+ .withFields({C2F(mLowLatencyMode, value).oneOf({0,1})})
+ .withSetter(Setter<decltype(*mLowLatencyMode)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
DefineParam(mActualOutputDelay, C2_PARAMKEY_OUTPUT_DELAY)
.withDefault(new C2PortActualDelayTuning::output(kOutputDelay))
.withFields({C2F(mActualOutputDelay, value).inRange(0, kOutputDelay)})
- .withSetter(Setter<decltype(*mActualOutputDelay)>::StrictValueWithNoDeps)
+ .withSetter(ActualOutputDelaySetter, mLowLatencyMode)
.build());
}
@@ -365,6 +372,10 @@
return mPixelFormat;
}
+ std::shared_ptr<C2PortActualDelayTuning::output> getActualOutputDelay_l() const {
+ return mActualOutputDelay;
+ }
+
static C2R HdrStaticInfoSetter(bool mayBlock, C2P<C2StreamHdrStaticInfo::output>& me) {
(void)mayBlock;
if (me.v.mastering.red.x > 1) {
@@ -406,6 +417,13 @@
return C2R::Ok();
}
+ static C2R ActualOutputDelaySetter(bool mayBlock, C2P<C2PortActualDelayTuning::output>& me,
+ const C2P<C2GlobalLowLatencyModeTuning>& lowLatencyMode) {
+ (void)mayBlock;
+ me.set().value = lowLatencyMode.v.value ? 1 : kOutputDelay;
+ return C2R::Ok();
+ }
+
private:
std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
@@ -419,6 +437,7 @@
std::shared_ptr<C2StreamHdr10PlusInfo::input> mHdr10PlusInfoInput;
std::shared_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfoOutput;
std::shared_ptr<C2StreamHdrStaticInfo::output> mHdrStaticInfo;
+ std::shared_ptr<C2GlobalLowLatencyModeTuning> mLowLatencyMode;
};
C2SoftDav1dDec::C2SoftDav1dDec(const char* name, c2_node_id_t id,
@@ -516,6 +535,7 @@
{
IntfImpl::Lock lock = mIntf->lock();
mPixelFormatInfo = mIntf->getPixelFormat_l();
+ mActualOutputDelayInfo = mIntf->getActualOutputDelay_l();
}
const char* version = dav1d_version();
@@ -529,7 +549,7 @@
android::base::GetIntProperty(NUM_THREADS_DAV1D_PROPERTY, NUM_THREADS_DAV1D_DEFAULT);
if (numThreads > 0) lib_settings.n_threads = numThreads;
- lib_settings.max_frame_delay = kOutputDelay;
+ lib_settings.max_frame_delay = mActualOutputDelayInfo->value;
int res = 0;
if ((res = dav1d_open(&mDav1dCtx, &lib_settings))) {
diff --git a/media/codec2/components/dav1d/C2SoftDav1dDec.h b/media/codec2/components/dav1d/C2SoftDav1dDec.h
index 5d2a725..6008325 100644
--- a/media/codec2/components/dav1d/C2SoftDav1dDec.h
+++ b/media/codec2/components/dav1d/C2SoftDav1dDec.h
@@ -62,6 +62,7 @@
// configurations used by component in process
// (TODO: keep this in intf but make them internal only)
std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormatInfo;
+ std::shared_ptr<C2PortActualDelayTuning::output> mActualOutputDelayInfo;
uint32_t mHalPixelFormat;
uint32_t mWidth;
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.cpp b/media/codec2/components/flac/C2SoftFlacEnc.cpp
index 591d56d..7b63e75 100644
--- a/media/codec2/components/flac/C2SoftFlacEnc.cpp
+++ b/media/codec2/components/flac/C2SoftFlacEnc.cpp
@@ -21,6 +21,7 @@
#include <audio_utils/primitives.h>
#include <media/stagefright/foundation/MediaDefs.h>
+#include <C2Debug.h>
#include <C2PlatformSupport.h>
#include <SimpleC2Interface.h>
@@ -81,10 +82,6 @@
FLAC_COMPRESSION_LEVEL_MIN, FLAC_COMPRESSION_LEVEL_MAX)})
.withSetter(Setter<decltype(*mComplexity)>::NonStrictValueWithNoDeps)
.build());
- addParameter(
- DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
- .withConstValue(new C2StreamMaxBufferSizeInfo::input(0u, 4608))
- .build());
addParameter(
DefineParam(mPcmEncodingInfo, C2_PARAMKEY_PCM_ENCODING)
@@ -96,6 +93,26 @@
})
.withSetter((Setter<decltype(*mPcmEncodingInfo)>::StrictValueWithNoDeps))
.build());
+
+ addParameter(
+ DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
+ .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, kMaxBlockSize))
+ .withFields({
+ C2F(mInputMaxBufSize, value).any(),
+ })
+ .withSetter(MaxInputSizeSetter, mChannelCount, mPcmEncodingInfo)
+ .build());
+ }
+
+ static C2R MaxInputSizeSetter(bool mayBlock,
+ C2P<C2StreamMaxBufferSizeInfo::input> &me,
+ const C2P<C2StreamChannelCountInfo::input> &channelCount,
+ const C2P<C2StreamPcmEncodingInfo::input> &pcmEncoding) {
+ (void)mayBlock;
+ C2R res = C2R::Ok();
+ int bytesPerSample = pcmEncoding.v.value == C2Config::PCM_FLOAT ? 4 : 2;
+ me.set().value = kMaxBlockSize * bytesPerSample * channelCount.v.value;
+ return res;
}
uint32_t getSampleRate() const { return mSampleRate->value; }
@@ -446,6 +463,9 @@
mBlockSize = FLAC__stream_encoder_get_blocksize(mFlacStreamEncoder);
+ // Update kMaxBlockSize to match maximum size used by the encoder
+ CHECK(mBlockSize <= kMaxBlockSize);
+
ALOGV("encoder successfully configured");
return OK;
}
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.h b/media/codec2/components/flac/C2SoftFlacEnc.h
index a971ab5..1f3be3c 100644
--- a/media/codec2/components/flac/C2SoftFlacEnc.h
+++ b/media/codec2/components/flac/C2SoftFlacEnc.h
@@ -63,7 +63,8 @@
std::shared_ptr<IntfImpl> mIntf;
const unsigned int kInBlockSize = 1152;
- const unsigned int kMaxNumChannels = 2;
+ static constexpr unsigned int kMaxNumChannels = 2;
+ static constexpr unsigned int kMaxBlockSize = 4608;
FLAC__StreamEncoder* mFlacStreamEncoder;
FLAC__int32* mInputBufferPcm32;
std::shared_ptr<C2LinearBlock> mOutputBlock;
diff --git a/media/codec2/components/gav1/Android.bp b/media/codec2/components/gav1/Android.bp
index 9781b6d..f22490d 100644
--- a/media/codec2/components/gav1/Android.bp
+++ b/media/codec2/components/gav1/Android.bp
@@ -23,7 +23,7 @@
srcs: ["C2SoftGav1Dec.cpp"],
static_libs: [
"libgav1",
- "libyuv_static",
+ "libyuv",
],
apex_available: [
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
index 2137964..fd9488b 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
@@ -469,11 +469,12 @@
mInitialized = false;
}
+ bool codecConfig = (work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0;
+
if (!mInitialized) {
uint8_t *vol_data[1]{};
int32_t vol_size = 0;
- bool codecConfig = (work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0;
if (codecConfig || volHeader) {
vol_data[0] = bitstream;
vol_size = inSize;
@@ -512,10 +513,11 @@
return;
}
}
- if (codecConfig) {
- fillEmptyWork(work);
- return;
- }
+ }
+
+ if (codecConfig) {
+ fillEmptyWork(work);
+ return;
}
size_t inPos = 0;
diff --git a/media/codec2/components/opus/C2SoftOpusEnc.cpp b/media/codec2/components/opus/C2SoftOpusEnc.cpp
index cdc3be0..40bb26e 100644
--- a/media/codec2/components/opus/C2SoftOpusEnc.cpp
+++ b/media/codec2/components/opus/C2SoftOpusEnc.cpp
@@ -29,7 +29,6 @@
#include <opus_multistream.h>
}
-#define DEFAULT_FRAME_DURATION_MS 20
namespace android {
namespace {
@@ -38,7 +37,6 @@
} // namespace
-static const int kMaxNumChannelsSupported = 2;
class C2SoftOpusEnc::IntfImpl : public SimpleInterface<void>::BaseParams {
public:
@@ -248,10 +246,11 @@
mAnchorTimeStamp = 0;
mProcessedSamples = 0;
mFilledLen = 0;
- mFrameDurationMs = DEFAULT_FRAME_DURATION_MS;
+ mFrameDurationMs = kDefaultFrameDurationMs;
if (!mInputBufferPcm16) {
+ size_t frameSize = (mFrameDurationMs * kMaxSampleRateSupported) / 1000;
mInputBufferPcm16 =
- (int16_t*)malloc(kFrameSize * kMaxNumChannels * sizeof(int16_t));
+ (int16_t*)malloc(frameSize * kMaxNumChannelsSupported * sizeof(int16_t));
}
if (!mInputBufferPcm16) return C2_NO_MEMORY;
@@ -368,7 +367,9 @@
}
C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
- err = pool->fetchLinearBlock(kMaxPayload, usage, &mOutputBlock);
+ int outCapacity =
+ kMaxPayload * ((inSize + mNumPcmBytesPerInputFrame) / mNumPcmBytesPerInputFrame);
+ err = pool->fetchLinearBlock(outCapacity, usage, &mOutputBlock);
if (err != C2_OK) {
ALOGE("fetchLinearBlock for Output failed with status %d", err);
work->result = C2_NO_MEMORY;
@@ -497,11 +498,11 @@
uint8_t* outPtr = wView.data() + mBytesEncoded;
int encodedBytes =
opus_multistream_encode(mEncoder, mInputBufferPcm16,
- mNumSamplesPerFrame, outPtr, kMaxPayload - mBytesEncoded);
+ mNumSamplesPerFrame, outPtr, outCapacity - mBytesEncoded);
ALOGV("encoded %i Opus bytes from %zu PCM bytes", encodedBytes,
processSize);
- if (encodedBytes < 0 || encodedBytes > (kMaxPayload - mBytesEncoded)) {
+ if (encodedBytes < 0 || encodedBytes > (outCapacity - mBytesEncoded)) {
ALOGE("opus_encode failed, encodedBytes : %d", encodedBytes);
mSignalledError = true;
work->result = C2_CORRUPTED;
diff --git a/media/codec2/components/opus/C2SoftOpusEnc.h b/media/codec2/components/opus/C2SoftOpusEnc.h
index 733a6bc..2c9f5e5 100644
--- a/media/codec2/components/opus/C2SoftOpusEnc.h
+++ b/media/codec2/components/opus/C2SoftOpusEnc.h
@@ -45,12 +45,13 @@
uint32_t drainMode,
const std::shared_ptr<C2BlockPool> &pool) override;
private:
- /* OPUS_FRAMESIZE_20_MS */
- const int kFrameSize = 960;
- const int kMaxSampleRate = 48000;
- const int kMinSampleRate = 8000;
- const int kMaxPayload = (4000 * kMaxSampleRate) / kMinSampleRate;
- const int kMaxNumChannels = 8;
+ static const int kMaxNumChannelsSupported = 2;
+ static const int kMaxSampleRateSupported = 48000;
+ static const int kDefaultFrameDurationMs = 20;
+ // For a frame duration of 20ms, payload recommended size is 1276 as per
+ // https://www.opus-codec.org/docs/html_api/group__opusencoder.html.
+ // For 40ms, 60ms, .. payload size will change proportionately, 1276 x 2, 1276 x 3, ..
+ static const int kMaxPayload = 1500; // from tests/test_opus_encode.c
std::shared_ptr<IntfImpl> mIntf;
std::shared_ptr<C2LinearBlock> mOutputBlock;
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.cpp b/media/codec2/components/vpx/C2SoftVpxDec.cpp
index dab7b89..318f093 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxDec.cpp
@@ -446,6 +446,7 @@
{
IntfImpl::Lock lock = mIntf->lock();
mPixelFormatInfo = mIntf->getPixelFormat_l();
+ mColorAspects = mIntf->getDefaultColorAspects_l();
}
mWidth = 320;
@@ -591,6 +592,41 @@
return;
}
+ // handle dynamic config parameters
+ {
+ IntfImpl::Lock lock = mIntf->lock();
+ std::shared_ptr<C2StreamColorAspectsTuning::output> defaultColorAspects =
+ mIntf->getDefaultColorAspects_l();
+ lock.unlock();
+
+ if (mColorAspects->range != defaultColorAspects->range ||
+ mColorAspects->primaries != defaultColorAspects->primaries ||
+ mColorAspects->matrix != defaultColorAspects->matrix ||
+ mColorAspects->transfer != defaultColorAspects->transfer) {
+
+ mColorAspects->range = defaultColorAspects->range;
+ mColorAspects->primaries = defaultColorAspects->primaries;
+ mColorAspects->matrix = defaultColorAspects->matrix;
+ mColorAspects->transfer = defaultColorAspects->transfer;
+
+ C2StreamColorAspectsTuning::output colorAspect(0u, defaultColorAspects->range,
+ defaultColorAspects->primaries, defaultColorAspects->transfer,
+ defaultColorAspects->matrix);
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ c2_status_t err = mIntf->config({&colorAspect}, C2_MAY_BLOCK, &failures);
+ if (err == C2_OK) {
+ work->worklets.front()->output.configUpdate.push_back(
+ C2Param::Copy(colorAspect));
+ } else {
+ ALOGE("Config update colorAspect failed");
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ }
+ }
+
size_t inOffset = 0u;
size_t inSize = 0u;
C2ReadView rView = mDummyReadView;
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.h b/media/codec2/components/vpx/C2SoftVpxDec.h
index e9d6dc9..93cc213 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.h
+++ b/media/codec2/components/vpx/C2SoftVpxDec.h
@@ -66,6 +66,7 @@
// configurations used by component in process
// (TODO: keep this in intf but make them internal only)
std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormatInfo;
+ std::shared_ptr<C2StreamColorAspectsTuning::output> mColorAspects;
std::shared_ptr<IntfImpl> mIntf;
vpx_codec_ctx_t *mCodecCtx;
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.cpp b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
index 0384e2e..08e2fa6 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
@@ -22,6 +22,7 @@
#include <media/hardware/VideoAPI.h>
#include <Codec2BufferUtils.h>
+#include <Codec2CommonUtils.h>
#include <C2Debug.h>
#include "C2SoftVpxEnc.h"
@@ -63,12 +64,14 @@
0u, (uint64_t)C2MemoryUsage::CPU_READ))
.build());
+ // Odd dimension support in encoders requires Android V and above
+ size_t stepSize = isAtLeastV() ? 1 : 2;
addParameter(
DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
.withDefault(new C2StreamPictureSizeInfo::input(0u, 64, 64))
.withFields({
- C2F(mSize, width).inRange(2, 2048, 2),
- C2F(mSize, height).inRange(2, 2048, 2),
+ C2F(mSize, width).inRange(2, 2048, stepSize),
+ C2F(mSize, height).inRange(2, 2048, stepSize),
})
.withSetter(SizeSetter)
.build());
@@ -351,12 +354,9 @@
return (uint32_t)c2_max(c2_min(period + 0.5, double(UINT32_MAX)), 1.);
}
-C2R C2SoftVpxEnc::IntfImpl::PictureQuantizationSetter(bool mayBlock,
- C2P<C2StreamPictureQuantizationTuning::output>
- &me) {
+C2R C2SoftVpxEnc::IntfImpl::PictureQuantizationSetter(
+ bool mayBlock, C2P<C2StreamPictureQuantizationTuning::output>& me) {
(void)mayBlock;
- // these are the ones we're going to set, so want them to default
- // to the DEFAULT values for the codec
int32_t iMin = VPX_QP_DEFAULT_MIN, pMin = VPX_QP_DEFAULT_MIN;
int32_t iMax = VPX_QP_DEFAULT_MAX, pMax = VPX_QP_DEFAULT_MAX;
for (size_t i = 0; i < me.v.flexCount(); ++i) {
@@ -379,8 +379,8 @@
iMin, iMax, pMin, pMax);
// vpx library takes same range for I/P picture type
- int32_t maxFrameQP = std::min({iMax, pMax});
- int32_t minFrameQP = std::max({iMin, pMin});
+ int32_t maxFrameQP = std::min(iMax, pMax);
+ int32_t minFrameQP = std::max(iMin, pMin);
if (minFrameQP > maxFrameQP) {
minFrameQP = maxFrameQP;
}
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index 785cdf2..e6782a9 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -164,6 +164,9 @@
kParamIndexLargeFrame,
kParamIndexAccessUnitInfos, // struct
+ /* Region of Interest Encoding parameters */
+ kParamIndexQpOffsetMapBuffer, // info-buffer, used to signal qp-offset map for a frame
+
// deprecated
kParamIndexDelayRequest = kParamIndexDelay | C2Param::CoreIndex::IS_REQUEST_FLAG,
@@ -201,6 +204,8 @@
kParamIndexPictureQuantization,
kParamIndexHdrDynamicMetadata,
kParamIndexHdrFormat,
+ kParamIndexQpOffsetRect,
+ kParamIndexQpOffsetRects,
/* ------------------------------------ video components ------------------------------------ */
@@ -1394,6 +1399,47 @@
constexpr char C2_PARAMKEY_VUI_ROTATION[] = "coded.vui.rotation";
/**
+ * Region of Interest of an image/video frame communicated as an array of C2QpOffsetRectStruct
+ *
+ * Fields width, height, left and top of C2QpOffsetRectStruct form a bounding box contouring RoI.
+ * Field qpOffset of C2QpOffsetRectStruct indicates the qp bias to be used for quantizing the
+ * coding units of the bounding box.
+ *
+ * If Roi rect is not valid that is bounding box width is < 0 or bounding box height is < 0,
+ * components may ignore the configuration silently. If Roi rect extends outside frame
+ * boundaries, then rect shall be clamped to the frame boundaries.
+ *
+ * The scope of this key is throughout the encoding session until it is reconfigured with a
+ * different value.
+ *
+ * The number of elements in C2StreamQpOffset array is not limited by C2 specification.
+ * However components may mandate a limit. Implementations may drop the rectangles that are beyond
+ * the supported limits. Hence it is preferable to place the rects in descending order of
+ * importance. Transitively, if the bounding boxes overlap, then the most preferred
+ * rectangle's qp offset (earlier rectangle qp offset) will be used to quantize the block.
+ */
+struct C2QpOffsetRectStruct : C2Rect {
+ C2QpOffsetRectStruct() = default;
+ C2QpOffsetRectStruct(const C2Rect &rect, int32_t offset) : C2Rect(rect), qpOffset(offset) {}
+
+ bool operator==(const C2QpOffsetRectStruct &) = delete;
+ bool operator!=(const C2QpOffsetRectStruct &) = delete;
+
+ int32_t qpOffset;
+
+ DEFINE_AND_DESCRIBE_C2STRUCT(QpOffsetRect)
+ C2FIELD(width, "width")
+ C2FIELD(height, "height")
+ C2FIELD(left, "left")
+ C2FIELD(top, "top")
+ C2FIELD(qpOffset, "qp-offset")
+};
+
+typedef C2StreamParam<C2Info, C2SimpleArrayStruct<C2QpOffsetRectStruct>, kParamIndexQpOffsetRects>
+ C2StreamQpOffsetRects;
+constexpr char C2_PARAMKEY_QP_OFFSET_RECTS[] = "coding.qp-offset-rects";
+
+/**
* Pixel (sample) aspect ratio.
*/
typedef C2StreamParam<C2Info, C2PictureSizeStruct, kParamIndexPixelAspectRatio>
diff --git a/media/codec2/fuzzer/Android.bp b/media/codec2/fuzzer/Android.bp
index b387b2c..ec77427 100644
--- a/media/codec2/fuzzer/Android.bp
+++ b/media/codec2/fuzzer/Android.bp
@@ -163,7 +163,7 @@
static_libs: [
"libgav1",
- "libyuv_static",
+ "libyuv",
"libcodec2_soft_av1dec_gav1",
],
}
diff --git a/media/codec2/hal/aidl/Android.bp b/media/codec2/hal/aidl/Android.bp
index 48b6e21..e16e2b1 100644
--- a/media/codec2/hal/aidl/Android.bp
+++ b/media/codec2/hal/aidl/Android.bp
@@ -8,6 +8,7 @@
name: "libcodec2_aidl_client",
defaults: [
+ "aconfig_lib_cc_static_link.defaults",
"libcodec2_hal_selection",
],
@@ -65,6 +66,7 @@
],
defaults: [
+ "aconfig_lib_cc_static_link.defaults",
"libcodec2_hal_selection",
],
diff --git a/media/codec2/hal/aidl/Component.cpp b/media/codec2/hal/aidl/Component.cpp
index eb64a4a..87c9d87 100644
--- a/media/codec2/hal/aidl/Component.cpp
+++ b/media/codec2/hal/aidl/Component.cpp
@@ -487,7 +487,19 @@
if (__builtin_available(android __ANDROID_API_T__, *)) {
std::shared_ptr<C2Component::Listener> c2listener;
if (mMultiAccessUnitIntf) {
- mMultiAccessUnitHelper = std::make_shared<MultiAccessUnitHelper>(mMultiAccessUnitIntf);
+ std::shared_ptr<C2Allocator> allocator;
+ std::shared_ptr<C2BlockPool> linearPool;
+ std::shared_ptr<C2AllocatorStore> store = ::android::GetCodec2PlatformAllocatorStore();
+ if(store->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, &allocator) == C2_OK) {
+ ::android::C2PlatformAllocatorDesc desc;
+ desc.allocatorId = allocator->getId();
+ if (C2_OK == CreateCodec2BlockPool(desc, mComponent, &linearPool)) {
+ if (linearPool) {
+ mMultiAccessUnitHelper = std::make_shared<MultiAccessUnitHelper>(
+ mMultiAccessUnitIntf, linearPool);
+ }
+ }
+ }
}
c2listener = mMultiAccessUnitHelper ?
std::make_shared<MultiAccessUnitListener>(self, mMultiAccessUnitHelper) :
diff --git a/media/codec2/hal/aidl/ComponentInterface.cpp b/media/codec2/hal/aidl/ComponentInterface.cpp
index 8ae9fa8..8c7a986 100644
--- a/media/codec2/hal/aidl/ComponentInterface.cpp
+++ b/media/codec2/hal/aidl/ComponentInterface.cpp
@@ -79,6 +79,26 @@
}
c2_status_t err2 = C2_OK;
if (paramsToLargeFrameIntf.size() > 0) {
+ C2ComponentKindSetting kind;
+ C2StreamMaxBufferSizeInfo::input maxInputSize(0);
+ c2_status_t err = mIntf->query_vb(
+ {&kind, &maxInputSize}, {}, C2_MAY_BLOCK, nullptr);
+ if ((err == C2_OK) && (kind.value == C2Component::KIND_ENCODER)) {
+ for (int i = 0 ; i < paramsToLargeFrameIntf.size(); i++) {
+ if (paramsToLargeFrameIntf[i]->index() ==
+ C2LargeFrame::output::PARAM_TYPE) {
+ C2LargeFrame::output *lfp = C2LargeFrame::output::From(
+ paramsToLargeFrameIntf[i]);
+ // This is assuming a worst case compression ratio of 1:1
+ // In no case the encoder should give an output more than
+ // what is being provided to the encoder in a single call.
+ if (lfp && (lfp->maxSize < maxInputSize.value)) {
+ lfp->maxSize = maxInputSize.value;
+ }
+ break;
+ }
+ }
+ }
err2 = mMultiAccessUnitIntf->config(
paramsToLargeFrameIntf, mayBlock, failures);
}
diff --git a/media/codec2/hal/client/GraphicsTracker.cpp b/media/codec2/hal/client/GraphicsTracker.cpp
index 01b0678..dbbabfe 100644
--- a/media/codec2/hal/client/GraphicsTracker.cpp
+++ b/media/codec2/hal/client/GraphicsTracker.cpp
@@ -173,7 +173,7 @@
}
GraphicsTracker::GraphicsTracker(int maxDequeueCount)
- : mBufferCache(new BufferCache()), mMaxDequeue{maxDequeueCount},
+ : mBufferCache(new BufferCache()), mNumDequeueing{0}, mMaxDequeue{maxDequeueCount},
mMaxDequeueCommitted{maxDequeueCount},
mDequeueable{maxDequeueCount},
mTotalDequeued{0}, mTotalCancelled{0}, mTotalDropped{0}, mTotalReleased{0},
@@ -235,6 +235,7 @@
const sp<IGraphicBufferProducer>& igbp, uint32_t generation) {
// TODO: wait until operations to previous IGBP is completed.
std::shared_ptr<BufferCache> prevCache;
+ int prevDequeueRequested = 0;
int prevDequeueCommitted;
std::unique_lock<std::mutex> cl(mConfigLock);
@@ -243,6 +244,9 @@
mInConfig = true;
prevCache = mBufferCache;
prevDequeueCommitted = mMaxDequeueCommitted;
+ if (mMaxDequeueRequested.has_value()) {
+ prevDequeueRequested = mMaxDequeueRequested.value();
+ }
}
// NOTE: Switching to the same surface is blocked from MediaCodec.
// Switching to the same surface might not work if tried, since disconnect()
@@ -263,6 +267,11 @@
mInConfig = false;
return C2_BAD_VALUE;
}
+ ALOGD("new surface in configuration: maxDequeueRequested(%d), maxDequeueCommitted(%d)",
+ prevDequeueRequested, prevDequeueCommitted);
+ if (prevDequeueRequested > 0 && prevDequeueRequested > prevDequeueCommitted) {
+ prevDequeueCommitted = prevDequeueRequested;
+ }
if (igbp) {
ret = igbp->setMaxDequeuedBufferCount(prevDequeueCommitted);
if (ret != ::android::OK) {
@@ -280,6 +289,34 @@
std::unique_lock<std::mutex> l(mLock);
mInConfig = false;
mBufferCache = newCache;
+ // {@code dequeued} is the number of currently dequeued buffers.
+ // {@code prevDequeueCommitted} is max dequeued buffer at any moment
+ // from the new surface.
+ // {@code newDequeueable} is hence the current # of dequeueable buffers
+ // if no change occurs.
+ int dequeued = mDequeued.size() + mNumDequeueing;
+ int newDequeueable = prevDequeueCommitted - dequeued;
+ if (newDequeueable < 0) {
+ // This will not happen.
+ // But if this happens, we respect the value and try to continue.
+ ALOGE("calculated new dequeueable is negative: %d max(%d),dequeued(%d)",
+ newDequeueable, prevDequeueCommitted, dequeued);
+ }
+
+ if (mMaxDequeueRequested.has_value() && mMaxDequeueRequested == prevDequeueCommitted) {
+ mMaxDequeueRequested.reset();
+ }
+ mMaxDequeue = mMaxDequeueCommitted = prevDequeueCommitted;
+
+ int delta = newDequeueable - mDequeueable;
+ if (delta > 0) {
+ writeIncDequeueableLocked(delta);
+ } else if (delta < 0) {
+ drainDequeueableLocked(-delta);
+ }
+ ALOGV("new surfcace dequeueable %d(delta %d), maxDequeue %d",
+ newDequeueable, delta, mMaxDequeue);
+ mDequeueable = newDequeueable;
}
return C2_OK;
}
@@ -529,6 +566,7 @@
ALOGE("writing end for the waitable object seems to be closed");
return C2_BAD_STATE;
}
+ mNumDequeueing++;
mDequeueable--;
*cache = mBufferCache;
return C2_OK;
@@ -543,6 +581,7 @@
bool cached, int slot, const sp<Fence> &fence,
std::shared_ptr<BufferItem> *pBuffer, bool *updateDequeue) {
std::unique_lock<std::mutex> l(mLock);
+ mNumDequeueing--;
if (res == C2_OK) {
if (cached) {
auto it = cache->mBuffers.find(slot);
@@ -563,6 +602,8 @@
auto mapRet = mDequeued.emplace(bid, *pBuffer);
CHECK(mapRet.second);
} else {
+ ALOGD("allocate error(%d): Dequeued(%zu), Dequeuable(%d)",
+ (int)res, mDequeued.size(), mDequeueable + 1);
if (adjustDequeueConfLocked(updateDequeue)) {
return;
}
@@ -629,7 +670,11 @@
::android::status_t status = igbp->dequeueBuffer(
&slotId, &fence, width, height, format, usage, &outBufferAge, &outTimestamps);
if (status < ::android::OK) {
- ALOGE("dequeueBuffer() error %d", (int)status);
+ if (status == ::android::TIMED_OUT || status == ::android::WOULD_BLOCK) {
+ ALOGW("BQ might not be ready for dequeueBuffer()");
+ return C2_BLOCKING;
+ }
+ ALOGE("BQ in inconsistent status. dequeueBuffer() error %d", (int)status);
return C2_CORRUPTED;
}
cache->waitOnSlot(slotId);
@@ -649,7 +694,8 @@
ALOGE("allocate by dequeueBuffer() successful, but requestBuffer() failed %d",
status);
igbp->cancelBuffer(slotId, fence);
- return C2_CORRUPTED;
+ // This might be due to life-cycle end and/or surface switching.
+ return C2_BLOCKING;
}
*buffer = std::make_shared<BufferItem>(generation, slotId, realloced, fence);
if (!*buffer) {
diff --git a/media/codec2/hal/client/client.cpp b/media/codec2/hal/client/client.cpp
index b3ae514..1d2794e 100644
--- a/media/codec2/hal/client/client.cpp
+++ b/media/codec2/hal/client/client.cpp
@@ -1868,6 +1868,10 @@
return nullptr;
}
+bool Codec2Client::IsAidlSelected() {
+ return c2_aidl::utils::IsSelected();
+}
+
// Codec2Client::Interface
Codec2Client::Interface::Interface(const sp<HidlBase>& base)
: Configurable{
diff --git a/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h b/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
index dd6c869..762030b 100644
--- a/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
+++ b/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
@@ -234,6 +234,7 @@
// Maps bufferId to buffer
std::map<uint64_t, std::shared_ptr<BufferItem>> mDequeued;
std::set<uint64_t> mDeallocating;
+ int mNumDequeueing;
// These member variables are read and modified accessed as follows.
// 1. mConfigLock being held
diff --git a/media/codec2/hal/client/include/codec2/hidl/client.h b/media/codec2/hal/client/include/codec2/hidl/client.h
index 3b7f7a6..5c75a47 100644
--- a/media/codec2/hal/client/include/codec2/hidl/client.h
+++ b/media/codec2/hal/client/include/codec2/hidl/client.h
@@ -270,6 +270,9 @@
static std::shared_ptr<InputSurface> CreateInputSurface(
char const* serviceName = nullptr);
+ // Whether AIDL is selected.
+ static bool IsAidlSelected();
+
// base and/or configurable cannot be null.
Codec2Client(
sp<HidlBase> const& base,
diff --git a/media/codec2/hal/common/Android.bp b/media/codec2/hal/common/Android.bp
index 7d7b285..4c9da33 100644
--- a/media/codec2/hal/common/Android.bp
+++ b/media/codec2/hal/common/Android.bp
@@ -31,6 +31,10 @@
],
static_libs: ["aconfig_mediacodec_flags_c_lib"],
+
+ defaults: [
+ "aconfig_lib_cc_static_link.defaults",
+ ],
}
cc_library_static {
diff --git a/media/codec2/hal/common/MultiAccessUnitHelper.cpp b/media/codec2/hal/common/MultiAccessUnitHelper.cpp
index 3a71520..b1fa82f 100644
--- a/media/codec2/hal/common/MultiAccessUnitHelper.cpp
+++ b/media/codec2/hal/common/MultiAccessUnitHelper.cpp
@@ -27,6 +27,7 @@
#include <C2Debug.h>
#include <C2PlatformSupport.h>
+static inline constexpr uint32_t MAX_SUPPORTED_SIZE = ( 10 * 512000 * 8 * 2u);
namespace android {
static C2R MultiAccessUnitParamsSetter(
@@ -39,8 +40,6 @@
res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.thresholdSize)));
} else if (me.v.maxSize < me.v.thresholdSize) {
me.set().maxSize = me.v.thresholdSize;
- } else if (me.v.thresholdSize == 0 && me.v.maxSize > 0) {
- me.set().thresholdSize = me.v.maxSize;
}
std::vector<std::unique_ptr<C2SettingResult>> failures;
res.retrieveFailures(&failures);
@@ -61,9 +60,9 @@
.withDefault(new C2LargeFrame::output(0u, 0, 0))
.withFields({
C2F(mLargeFrameParams, maxSize).inRange(
- 0, c2_min(UINT_MAX, 10 * 512000 * 8 * 2u)),
+ 0, c2_min(UINT_MAX, MAX_SUPPORTED_SIZE)),
C2F(mLargeFrameParams, thresholdSize).inRange(
- 0, c2_min(UINT_MAX, 10 * 512000 * 8 * 2u))
+ 0, c2_min(UINT_MAX, MAX_SUPPORTED_SIZE))
})
.withSetter(MultiAccessUnitParamsSetter)
.build());
@@ -96,18 +95,35 @@
return (C2Component::kind_t)(mKind.value);
}
-void MultiAccessUnitInterface::getDecoderSampleRateAndChannelCount(
- uint32_t &sampleRate_, uint32_t &channelCount_) const {
+bool MultiAccessUnitInterface::getDecoderSampleRateAndChannelCount(
+ uint32_t * const sampleRate_, uint32_t * const channelCount_) const {
+ if (sampleRate_ == nullptr || channelCount_ == nullptr) {
+ return false;
+ }
if (mC2ComponentIntf) {
C2StreamSampleRateInfo::output sampleRate;
C2StreamChannelCountInfo::output channelCount;
c2_status_t res = mC2ComponentIntf->query_vb(
{&sampleRate, &channelCount}, {}, C2_MAY_BLOCK, nullptr);
- if (res == C2_OK) {
- sampleRate_ = sampleRate.value;
- channelCount_ = channelCount.value;
+ if (res == C2_OK && sampleRate.value > 0 && channelCount.value > 0) {
+ *sampleRate_ = sampleRate.value;
+ *channelCount_ = channelCount.value;
+ return true;
}
}
+ return false;
+}
+
+bool MultiAccessUnitInterface::getMaxInputSize(
+ C2StreamMaxBufferSizeInfo::input* const maxInputSize) const {
+ if (maxInputSize == nullptr || mC2ComponentIntf == nullptr) {
+ return false;
+ }
+ c2_status_t err = mC2ComponentIntf->query_vb({maxInputSize}, {}, C2_MAY_BLOCK, nullptr);
+ if (err != OK) {
+ return false;
+ }
+ return true;
}
//C2MultiAccessUnitBuffer
@@ -121,12 +137,13 @@
//MultiAccessUnitHelper
MultiAccessUnitHelper::MultiAccessUnitHelper(
- const std::shared_ptr<MultiAccessUnitInterface>& intf):
+ const std::shared_ptr<MultiAccessUnitInterface>& intf,
+ std::shared_ptr<C2BlockPool>& linearPool):
+ mMultiAccessOnOffAllowed(true),
mInit(false),
- mInterface(intf) {
- std::shared_ptr<C2AllocatorStore> store = GetCodec2PlatformAllocatorStore();
- if(store->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, &mLinearAllocator) == C2_OK) {
- mLinearPool = std::make_shared<C2PooledBlockPool>(mLinearAllocator, ++mBlockPoolId);
+ mInterface(intf),
+ mLinearPool(linearPool) {
+ if (mLinearPool) {
mInit = true;
}
}
@@ -147,6 +164,63 @@
return result;
}
+bool MultiAccessUnitHelper::tryReconfigure(const std::unique_ptr<C2Param> ¶m) {
+ C2LargeFrame::output *lfp = C2LargeFrame::output::From(param.get());
+ if (lfp == nullptr) {
+ return false;
+ }
+ bool isDecoder = (mInterface->kind() == C2Component::KIND_DECODER) ? true : false;
+ if (!isDecoder) {
+ C2StreamMaxBufferSizeInfo::input maxInputSize(0);
+ if (!mInterface->getMaxInputSize(&maxInputSize)) {
+ LOG(ERROR) << "Error in reconfigure: "
+ << "Encoder failed to respond with a valid max input size";
+ return false;
+ }
+ // This is assuming a worst case compression ratio of 1:1
+ // In no case the encoder should give an output more than
+ // what is being provided to the encoder in a single call.
+ if (lfp->maxSize < maxInputSize.value) {
+ lfp->maxSize = maxInputSize.value;
+ }
+ }
+ lfp->maxSize =
+ (lfp->maxSize > MAX_SUPPORTED_SIZE) ? MAX_SUPPORTED_SIZE :
+ (lfp->maxSize < 0) ? 0 : lfp->maxSize;
+ lfp->thresholdSize =
+ (lfp->thresholdSize > MAX_SUPPORTED_SIZE) ? MAX_SUPPORTED_SIZE :
+ (lfp->thresholdSize < 0) ? 0 : lfp->thresholdSize;
+ C2LargeFrame::output currentConfig = mInterface->getLargeFrameParam();
+ if ((currentConfig.maxSize == lfp->maxSize)
+ && (currentConfig.thresholdSize == lfp->thresholdSize)) {
+ // no need to update
+ return false;
+ }
+ if (isDecoder) {
+ bool isOnOffTransition =
+ (currentConfig.maxSize == 0 && lfp->maxSize != 0)
+ || (currentConfig.maxSize != 0 && lfp->maxSize == 0);
+ if (isOnOffTransition && !mMultiAccessOnOffAllowed) {
+ LOG(ERROR) << "Setting new configs not allowed"
+ << " MaxSize: " << lfp->maxSize
+ << " ThresholdSize: " << lfp->thresholdSize;
+ return false;
+ }
+ }
+ std::vector<C2Param*> config{lfp};
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ if (C2_OK != mInterface->config(config, C2_MAY_BLOCK, &failures)) {
+ LOG(ERROR) << "Dynamic config not applied for"
+ << " MaxSize: " << lfp->maxSize
+ << " ThresholdSize: " << lfp->thresholdSize;
+ return false;
+ }
+ LOG(DEBUG) << "Updated from param maxSize "
+ << lfp->maxSize
+ << " ThresholdSize " << lfp->thresholdSize;
+ return true;
+}
+
std::shared_ptr<MultiAccessUnitInterface> MultiAccessUnitHelper::getInterface() {
return mInterface;
}
@@ -158,12 +232,14 @@
void MultiAccessUnitHelper::reset() {
std::lock_guard<std::mutex> l(mLock);
mFrameHolder.clear();
+ mMultiAccessOnOffAllowed = true;
}
c2_status_t MultiAccessUnitHelper::error(
std::list<std::unique_ptr<C2Work>> * const worklist) {
if (worklist == nullptr) {
LOG(ERROR) << "Provided null worklist for error()";
+ mFrameHolder.clear();
return C2_OK;
}
std::unique_lock<std::mutex> l(mLock);
@@ -175,6 +251,7 @@
}
}
mFrameHolder.clear();
+ mMultiAccessOnOffAllowed = true;
return C2_OK;
}
@@ -226,16 +303,23 @@
uint64_t newFrameIdx = mFrameIndex++;
// TODO: Do not split buffers if component inherantly supports MultipleFrames.
// if thats case, only replace frameindex.
- auto cloneInputWork = [&newFrameIdx](std::unique_ptr<C2Work>& inWork, uint32_t flags) {
+ auto cloneInputWork = [&frameInfo, &newFrameIdx, this]
+ (std::unique_ptr<C2Work>& inWork, uint32_t flags) -> std::unique_ptr<C2Work> {
std::unique_ptr<C2Work> newWork(new C2Work);
newWork->input.flags = (C2FrameData::flags_t)flags;
newWork->input.ordinal = inWork->input.ordinal;
newWork->input.ordinal.frameIndex = newFrameIdx;
if (!inWork->input.configUpdate.empty()) {
for (std::unique_ptr<C2Param>& param : inWork->input.configUpdate) {
- newWork->input.configUpdate.push_back(
- std::move(C2Param::Copy(*(param.get()))));
+ if (param->index() == C2LargeFrame::output::PARAM_TYPE) {
+ if (tryReconfigure(param)) {
+ frameInfo.mConfigUpdate.push_back(std::move(param));
+ }
+ } else {
+ newWork->input.configUpdate.push_back(std::move(param));
+ }
}
+ inWork->input.configUpdate.clear();
}
newWork->input.infoBuffers = (inWork->input.infoBuffers);
if (!inWork->worklets.empty() && inWork->worklets.front() != nullptr) {
@@ -272,6 +356,7 @@
LOG(ERROR) << "ERROR: Work has Large frame info but has no linear blocks.";
return C2_CORRUPTED;
}
+ frameInfo.mInputC2Ref = inBuffers;
const std::vector<C2ConstLinearBlock>& multiAU =
inBuffers.front()->data().linearBlocks();
std::shared_ptr<const C2AccessUnitInfos::input> auInfo =
@@ -320,26 +405,11 @@
}
}
if (!processedWork->empty()) {
- {
- C2LargeFrame::output multiAccessParams = mInterface->getLargeFrameParam();
- if (mInterface->kind() == C2Component::KIND_DECODER) {
- uint32_t sampleRate = 0;
- uint32_t channelCount = 0;
- uint32_t frameSize = 0;
- mInterface->getDecoderSampleRateAndChannelCount(
- sampleRate, channelCount);
- if (sampleRate > 0 && channelCount > 0) {
- frameSize = channelCount * 2;
- multiAccessParams.maxSize =
- (multiAccessParams.maxSize / frameSize) * frameSize;
- multiAccessParams.thresholdSize =
- (multiAccessParams.thresholdSize / frameSize) * frameSize;
- }
- }
- frameInfo.mLargeFrameTuning = multiAccessParams;
- std::lock_guard<std::mutex> l(mLock);
- mFrameHolder.push_back(std::move(frameInfo));
- }
+ C2LargeFrame::output multiAccessParams = mInterface->getLargeFrameParam();
+ frameInfo.mLargeFrameTuning = multiAccessParams;
+ std::lock_guard<std::mutex> l(mLock);
+ mFrameHolder.push_back(std::move(frameInfo));
+ mMultiAccessOnOffAllowed = false;
}
}
return C2_OK;
@@ -369,6 +439,7 @@
std::list<MultiAccessUnitInfo>::iterator frame =
mFrameHolder.begin();
while (!foundFrame && frame != mFrameHolder.end()) {
+ c2_status_t res = C2_OK;
auto it = frame->mComponentFrameIds.find(thisFrameIndex);
if (it != frame->mComponentFrameIds.end()) {
foundFrame = true;
@@ -378,8 +449,7 @@
if (work->result != C2_OK
|| work->worklets.empty()
|| !work->worklets.front()
- || (frame->mLargeFrameTuning.thresholdSize == 0
- || frame->mLargeFrameTuning.maxSize == 0)) {
+ || frame->mLargeFrameTuning.maxSize == 0) {
if (removeEntry) {
frame->mComponentFrameIds.erase(it);
removeEntry = false;
@@ -397,10 +467,27 @@
addOutWork(frame->mLargeWork);
frame->reset();
if (workResult != C2_OK) {
- frame->mAccessUnitInfos.clear();
+ frame->mComponentFrameIds.clear();
+ removeEntry = false;
}
- } else if (C2_OK != processWorklets(*frame, work, addOutWork)) {
- LOG(DEBUG) << "Error while processing work";
+ } else if (C2_OK != (res = processWorklets(*frame, work, addOutWork))) {
+ // Upon error in processing worklets, we return the work with
+ // result set to the error. This should indicate the error to the
+ // framework and thus doing what is necessary to handle the
+ // error.
+ LOG(DEBUG) << "Error while processing worklets";
+ if (frame->mLargeWork == nullptr) {
+ frame->mLargeWork.reset(new C2Work);
+ frame->mLargeWork->input.ordinal = frame->inOrdinal;
+ frame->mLargeWork->input.ordinal.frameIndex =
+ frame->inOrdinal.frameIndex;
+ }
+ frame->mLargeWork->result = res;
+ finalizeWork(*frame);
+ addOutWork(frame->mLargeWork);
+ frame->reset();
+ frame->mComponentFrameIds.clear();
+ removeEntry = false;
}
if (removeEntry) {
LOG(DEBUG) << "Removing entry: " << thisFrameIndex
@@ -506,6 +593,20 @@
frame.reset();
return C2_OK;
}
+ int64_t sampleTimeUs = 0;
+ uint32_t frameSize = 0;
+ uint32_t sampleRate = 0;
+ uint32_t channelCount = 0;
+ if (mInterface->getDecoderSampleRateAndChannelCount(&sampleRate, &channelCount)) {
+ sampleTimeUs = (1000000u) / (sampleRate * channelCount * 2);
+ frameSize = channelCount * 2;
+ if (mInterface->kind() == C2Component::KIND_DECODER) {
+ frame.mLargeFrameTuning.maxSize =
+ (frame.mLargeFrameTuning.maxSize / frameSize) * frameSize;
+ frame.mLargeFrameTuning.thresholdSize =
+ (frame.mLargeFrameTuning.thresholdSize / frameSize) * frameSize;
+ }
+ }
c2_status_t c2ret = allocateWork(frame, true);
if (c2ret != C2_OK) {
return c2ret;
@@ -520,20 +621,9 @@
outputFramedata.infoBuffers.insert(outputFramedata.infoBuffers.begin(),
(*worklet)->output.infoBuffers.begin(),
(*worklet)->output.infoBuffers.end());
- int64_t sampleTimeUs = 0;
- uint32_t frameSize = 0;
- uint32_t sampleRate = 0;
- uint32_t channelCount = 0;
- mInterface->getDecoderSampleRateAndChannelCount(sampleRate, channelCount);
- if (sampleRate > 0 && channelCount > 0) {
- sampleTimeUs = (1000000u) / (sampleRate * channelCount * 2);
- frameSize = channelCount * 2;
- }
+
LOG(DEBUG) << "maxOutSize " << frame.mLargeFrameTuning.maxSize
<< " threshold " << frame.mLargeFrameTuning.thresholdSize;
- if ((*worklet)->output.buffers.size() > 0) {
- allocateWork(frame, true, true);
- }
LOG(DEBUG) << "This worklet has " << (*worklet)->output.buffers.size() << " buffers"
<< " ts: " << (*worklet)->output.ordinal.timestamp.peekull();
int64_t workletTimestamp = (*worklet)->output.ordinal.timestamp.peekull();
@@ -555,43 +645,39 @@
inputSize -= (inputSize % frameSize);
}
while (inputOffset < inputSize) {
- if (frame.mWview->offset() >= frame.mLargeFrameTuning.thresholdSize) {
+ if ((frame.mWview != nullptr)
+ && (frame.mWview->offset() >= frame.mLargeFrameTuning.thresholdSize)) {
frame.mLargeWork->result = C2_OK;
finalizeWork(frame, flagsForCopy);
addWork(frame.mLargeWork);
frame.reset();
- allocateWork(frame, true, true);
}
if (mInterface->kind() == C2Component::KIND_ENCODER) {
if (inputSize > frame.mLargeFrameTuning.maxSize) {
- LOG(ERROR) << "Enc: Output buffer too small for AU, configured with "
- << frame.mLargeFrameTuning.maxSize
- << " block size: " << blocks.front().size()
- << "alloc size " << frame.mWview->size();
- if (frame.mLargeWork
- && frame.mWview && frame.mWview->offset() > 0) {
+ LOG(WARNING) << "WARNING Encoder:"
+ << " Output buffer too small for configuration"
+ << " configured max size " << frame.mLargeFrameTuning.maxSize
+ << " access unit size " << inputSize;
+ if (frame.mLargeWork && (frame.mWview && frame.mWview->offset() > 0)) {
+ frame.mLargeWork->result = C2_OK;
finalizeWork(frame, flagsForCopy);
addWork(frame.mLargeWork);
frame.reset();
- allocateWork(frame, true, false);
}
- frame.mLargeWork->result = C2_NO_MEMORY;
- finalizeWork(frame, 0, true);
- addWork(frame.mLargeWork);
- frame.reset();
- return C2_NO_MEMORY;
- } else if (inputSize > frame.mWview->size()) {
+ frame.mLargeFrameTuning.maxSize = inputSize;
+ } else if ((frame.mWview != nullptr)
+ && (inputSize > frame.mWview->size())) {
LOG(DEBUG) << "Enc: Large frame hitting bufer limit, current size "
<< frame.mWview->offset();
- if (frame.mLargeWork
- && frame.mWview && frame.mWview->offset() > 0) {
+ if (frame.mWview->offset() > 0) {
+ frame.mLargeWork->result = C2_OK;
finalizeWork(frame, flagsForCopy);
addWork(frame.mLargeWork);
frame.reset();
- allocateWork(frame, true, true);
}
}
}
+ allocateWork(frame, true, true);
C2ReadView rView = blocks.front().map().get();
if (rView.error()) {
LOG(ERROR) << "Buffer read view error";
@@ -686,26 +772,39 @@
frame.mWview->setOffset(0);
std::shared_ptr<C2Buffer> c2Buffer = C2Buffer::CreateLinearBuffer(
frame.mBlock->share(0, size, ::C2Fence()));
- if (frame.mAccessUnitInfos.size() > 0) {
- if (finalFlags & C2FrameData::FLAG_END_OF_STREAM) {
- frame.mAccessUnitInfos.back().flags |=
- C2FrameData::FLAG_END_OF_STREAM;
- }
- std::shared_ptr<C2AccessUnitInfos::output> largeFrame =
- C2AccessUnitInfos::output::AllocShared(
- frame.mAccessUnitInfos.size(), 0u, frame.mAccessUnitInfos);
- frame.mInfos.push_back(largeFrame);
- frame.mAccessUnitInfos.clear();
- }
- for (auto &info : frame.mInfos) {
- c2Buffer->setInfo(std::const_pointer_cast<C2Info>(info));
- }
frame.mLargeWork->worklets.front()->output.buffers.push_back(std::move(c2Buffer));
- frame.mInfos.clear();
- frame.mBlock.reset();
- frame.mWview.reset();
+ }
+ if (frame.mLargeWork->worklets.front()->output.buffers.size() > 0) {
+ std::shared_ptr<C2Buffer>& c2Buffer =
+ frame.mLargeWork->worklets.front()->output.buffers.front();
+ if (c2Buffer != nullptr) {
+ if (frame.mAccessUnitInfos.size() > 0) {
+ if (finalFlags & C2FrameData::FLAG_END_OF_STREAM) {
+ frame.mAccessUnitInfos.back().flags |= C2FrameData::FLAG_END_OF_STREAM;
+ }
+ std::shared_ptr<C2AccessUnitInfos::output> largeFrame =
+ C2AccessUnitInfos::output::AllocShared(
+ frame.mAccessUnitInfos.size(), 0u, frame.mAccessUnitInfos);
+ frame.mInfos.push_back(largeFrame);
+ frame.mAccessUnitInfos.clear();
+ }
+ for (auto &info : frame.mInfos) {
+ c2Buffer->setInfo(std::const_pointer_cast<C2Info>(info));
+ }
+ }
+ }
+ if (frame.mConfigUpdate.size() > 0) {
+ outFrameData.configUpdate.insert(
+ outFrameData.configUpdate.end(),
+ make_move_iterator(frame.mConfigUpdate.begin()),
+ make_move_iterator(frame.mConfigUpdate.end()));
}
}
+ frame.mConfigUpdate.clear();
+ frame.mInfos.clear();
+ frame.mBlock.reset();
+ frame.mWview.reset();
+
LOG(DEBUG) << "Multi access-unitflag setting as " << finalFlags;
return C2_OK;
}
@@ -738,6 +837,7 @@
mBlock.reset();
mWview.reset();
mInfos.clear();
+ mConfigUpdate.clear();
mAccessUnitInfos.clear();
mLargeWork.reset();
}
diff --git a/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h b/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h
index a90ae56..070a1f5 100644
--- a/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h
+++ b/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h
@@ -44,8 +44,9 @@
bool isValidField(const C2ParamField &field) const;
protected:
- void getDecoderSampleRateAndChannelCount(
- uint32_t &sampleRate_, uint32_t &channelCount_) const;
+ bool getDecoderSampleRateAndChannelCount(
+ uint32_t * const sampleRate_, uint32_t * const channelCount_) const;
+ bool getMaxInputSize(C2StreamMaxBufferSizeInfo::input* const maxInputSize) const;
const std::shared_ptr<C2ComponentInterface> mC2ComponentIntf;
std::shared_ptr<C2LargeFrame::output> mLargeFrameParams;
C2ComponentKindSetting mKind;
@@ -58,7 +59,8 @@
struct MultiAccessUnitHelper {
public:
MultiAccessUnitHelper(
- const std::shared_ptr<MultiAccessUnitInterface>& intf);
+ const std::shared_ptr<MultiAccessUnitInterface>& intf,
+ std::shared_ptr<C2BlockPool> &linearPool);
virtual ~MultiAccessUnitHelper();
@@ -139,6 +141,11 @@
std::vector<std::shared_ptr<const C2Info>> mInfos;
/*
+ * Vector for holding config updates from the wrapper
+ */
+ std::vector<std::unique_ptr<C2Param>> mConfigUpdate;
+
+ /*
* C2AccessUnitInfos for the current buffer
*/
std::vector<C2AccessUnitInfosStruct> mAccessUnitInfos;
@@ -153,6 +160,11 @@
*/
std::unique_ptr<C2Work> mLargeWork;
+ /*
+ * For holding a reference to the incoming buffer
+ */
+ std::vector<std::shared_ptr<C2Buffer>> mInputC2Ref;
+
MultiAccessUnitInfo(C2WorkOrdinalStruct ordinal):inOrdinal(ordinal) {
}
@@ -164,6 +176,11 @@
};
/*
+ * Reconfigure helper
+ */
+ bool tryReconfigure(const std::unique_ptr<C2Param> &p);
+
+ /*
* Creates a linear block to be used with work
*/
c2_status_t createLinearBlock(MultiAccessUnitInfo &frame);
@@ -189,6 +206,14 @@
uint32_t size,
int64_t timestamp);
+ // Flag to allow dynamic on/off settings on this helper.
+ // Once enabled and buffers in transit, it is not possible
+ // to turn this module off by setting the max output value
+ // to 0. This is because the skip cut buffer expects the
+ // metadata to be always present along with a valid buffer.
+ // This flag is used to monitor that state of this module.
+ bool mMultiAccessOnOffAllowed;
+
bool mInit;
// Interface of this module
@@ -197,8 +222,6 @@
C2BlockPool::local_id_t mBlockPoolId;
// C2Blockpool for output buffer allocation
std::shared_ptr<C2BlockPool> mLinearPool;
- // Allocator for output buffer allocation
- std::shared_ptr<C2Allocator> mLinearAllocator;
// FrameIndex for the current outgoing work
std::atomic_uint64_t mFrameIndex;
// Mutex to protect mFrameHolder
diff --git a/media/codec2/hal/hidl/1.0/utils/Component.cpp b/media/codec2/hal/hidl/1.0/utils/Component.cpp
index e32e6ae..0259d90 100644
--- a/media/codec2/hal/hidl/1.0/utils/Component.cpp
+++ b/media/codec2/hal/hidl/1.0/utils/Component.cpp
@@ -570,7 +570,19 @@
void Component::initListener(const sp<Component>& self) {
std::shared_ptr<C2Component::Listener> c2listener;
if (mMultiAccessUnitIntf) {
- mMultiAccessUnitHelper = std::make_shared<MultiAccessUnitHelper>(mMultiAccessUnitIntf);
+ std::shared_ptr<C2Allocator> allocator;
+ std::shared_ptr<C2BlockPool> linearPool;
+ std::shared_ptr<C2AllocatorStore> store = ::android::GetCodec2PlatformAllocatorStore();
+ if(store->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, &allocator) == C2_OK) {
+ ::android::C2PlatformAllocatorDesc desc;
+ desc.allocatorId = allocator->getId();
+ if (C2_OK == CreateCodec2BlockPool(desc, mComponent, &linearPool)) {
+ if (linearPool) {
+ mMultiAccessUnitHelper = std::make_shared<MultiAccessUnitHelper>(
+ mMultiAccessUnitIntf, linearPool);
+ }
+ }
+ }
}
c2listener = mMultiAccessUnitHelper ?
std::make_shared<MultiAccessUnitListener>(self, mMultiAccessUnitHelper) :
diff --git a/media/codec2/hal/hidl/1.0/utils/ComponentInterface.cpp b/media/codec2/hal/hidl/1.0/utils/ComponentInterface.cpp
index 41a8904..08f1ae2 100644
--- a/media/codec2/hal/hidl/1.0/utils/ComponentInterface.cpp
+++ b/media/codec2/hal/hidl/1.0/utils/ComponentInterface.cpp
@@ -78,6 +78,26 @@
}
c2_status_t err2 = C2_OK;
if (paramsToLargeFrameIntf.size() > 0) {
+ C2ComponentKindSetting kind;
+ C2StreamMaxBufferSizeInfo::input maxInputSize(0);
+ c2_status_t err = mIntf->query_vb(
+ {&kind, &maxInputSize}, {}, C2_MAY_BLOCK, nullptr);
+ if ((err == C2_OK) && (kind.value == C2Component::KIND_ENCODER)) {
+ for (int i = 0 ; i < paramsToLargeFrameIntf.size(); i++) {
+ if (paramsToLargeFrameIntf[i]->index() ==
+ C2LargeFrame::output::PARAM_TYPE) {
+ C2LargeFrame::output *lfp = C2LargeFrame::output::From(
+ paramsToLargeFrameIntf[i]);
+ // This is assuming a worst case compression ratio of 1:1
+ // In no case the encoder should give an output more than
+ // what is being provided to the encoder in a single call.
+ if (lfp && (lfp->maxSize < maxInputSize.value)) {
+ lfp->maxSize = maxInputSize.value;
+ }
+ break;
+ }
+ }
+ }
err2 = mMultiAccessUnitIntf->config(
paramsToLargeFrameIntf, mayBlock, failures);
}
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
index ab47b7c..36907e1 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
@@ -263,9 +263,6 @@
ALOGV("mComponent->reset() timeConsumed=%" PRId64 " us", timeConsumed);
ASSERT_EQ(err, C2_OK);
- err = mComponent->start();
- ASSERT_EQ(err, C2_OK);
-
// Query supported params by the component
std::vector<std::shared_ptr<C2ParamDescriptor>> params;
startTime = getNowUs();
@@ -298,6 +295,9 @@
timeConsumed);
}
+ err = mComponent->start();
+ ASSERT_EQ(err, C2_OK);
+
std::list<std::unique_ptr<C2Work>> workList;
startTime = getNowUs();
err = mComponent->queue(&workList);
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
index d1f0fb5..4c2ef9c 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
@@ -83,6 +83,7 @@
}
}
+// @VsrTest = 3.2-001.003
TEST_P(Codec2MasterHalTest, MustUseAidlBeyond202404) {
static int sVendorApiLevel = android::base::GetIntProperty("ro.vendor.api_level", 0);
if (sVendorApiLevel < 202404) {
diff --git a/media/codec2/hal/hidl/1.1/utils/Component.cpp b/media/codec2/hal/hidl/1.1/utils/Component.cpp
index 09e5709..d34d84e 100644
--- a/media/codec2/hal/hidl/1.1/utils/Component.cpp
+++ b/media/codec2/hal/hidl/1.1/utils/Component.cpp
@@ -583,7 +583,19 @@
void Component::initListener(const sp<Component>& self) {
std::shared_ptr<C2Component::Listener> c2listener;
if (mMultiAccessUnitIntf) {
- mMultiAccessUnitHelper = std::make_shared<MultiAccessUnitHelper>(mMultiAccessUnitIntf);
+ std::shared_ptr<C2Allocator> allocator;
+ std::shared_ptr<C2BlockPool> linearPool;
+ std::shared_ptr<C2AllocatorStore> store = ::android::GetCodec2PlatformAllocatorStore();
+ if(store->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, &allocator) == C2_OK) {
+ ::android::C2PlatformAllocatorDesc desc;
+ desc.allocatorId = allocator->getId();
+ if (C2_OK == CreateCodec2BlockPool(desc, mComponent, &linearPool)) {
+ if (linearPool) {
+ mMultiAccessUnitHelper = std::make_shared<MultiAccessUnitHelper>(
+ mMultiAccessUnitIntf, linearPool);
+ }
+ }
+ }
}
c2listener = mMultiAccessUnitHelper ?
std::make_shared<MultiAccessUnitListener>(self, mMultiAccessUnitHelper) :
diff --git a/media/codec2/hal/hidl/1.2/utils/Component.cpp b/media/codec2/hal/hidl/1.2/utils/Component.cpp
index 0fe16e3..f78e827 100644
--- a/media/codec2/hal/hidl/1.2/utils/Component.cpp
+++ b/media/codec2/hal/hidl/1.2/utils/Component.cpp
@@ -610,7 +610,19 @@
void Component::initListener(const sp<Component>& self) {
std::shared_ptr<C2Component::Listener> c2listener;
if (mMultiAccessUnitIntf) {
- mMultiAccessUnitHelper = std::make_shared<MultiAccessUnitHelper>(mMultiAccessUnitIntf);
+ std::shared_ptr<C2Allocator> allocator;
+ std::shared_ptr<C2BlockPool> linearPool;
+ std::shared_ptr<C2AllocatorStore> store = ::android::GetCodec2PlatformAllocatorStore();
+ if(store->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, &allocator) == C2_OK) {
+ ::android::C2PlatformAllocatorDesc desc;
+ desc.allocatorId = allocator->getId();
+ if (C2_OK == CreateCodec2BlockPool(desc, mComponent, &linearPool)) {
+ if (linearPool) {
+ mMultiAccessUnitHelper = std::make_shared<MultiAccessUnitHelper>(
+ mMultiAccessUnitIntf, linearPool);
+ }
+ }
+ }
}
c2listener = mMultiAccessUnitHelper ?
std::make_shared<MultiAccessUnitListener>(self, mMultiAccessUnitHelper) :
diff --git a/media/codec2/sfplugin/Android.bp b/media/codec2/sfplugin/Android.bp
index 18c2468..7076bac 100644
--- a/media/codec2/sfplugin/Android.bp
+++ b/media/codec2/sfplugin/Android.bp
@@ -19,7 +19,9 @@
export_include_dirs: ["include"],
srcs: [
+ "C2AidlNode.cpp",
"C2OMXNode.cpp",
+ "C2NodeImpl.cpp",
"CCodec.cpp",
"CCodecBufferChannel.cpp",
"CCodecBuffers.cpp",
@@ -45,6 +47,7 @@
static_libs: [
"libSurfaceFlingerProperties",
+ "aconfig_mediacodec_flags_c_lib",
"android.media.codec-aconfig-cc",
],
@@ -53,8 +56,11 @@
"android.hardware.drm@1.0",
"android.hardware.media.c2@1.0",
"android.hardware.media.omx@1.0",
+ "android.hardware.graphics.common-V5-ndk",
+ "graphicbuffersource-aidl-ndk",
"libbase",
"libbinder",
+ "libbinder_ndk",
"libcodec2",
"libcodec2_client",
"libcodec2_vndk",
@@ -66,9 +72,11 @@
"liblog",
"libmedia_codeclist",
"libmedia_omx",
+ "libnativewindow",
"libsfplugin_ccodec_utils",
"libstagefright_bufferqueue_helper",
"libstagefright_codecbase",
+ "libstagefright_graphicbuffersource_aidl",
"libstagefright_foundation",
"libstagefright_omx",
"libstagefright_surface_utils",
@@ -83,6 +91,10 @@
"libcodec2_client",
],
+ defaults: [
+ "aconfig_lib_cc_static_link.defaults",
+ ],
+
sanitize: {
cfi: true,
misc_undefined: [
diff --git a/media/codec2/sfplugin/C2AidlNode.cpp b/media/codec2/sfplugin/C2AidlNode.cpp
new file mode 100644
index 0000000..93c9d8b
--- /dev/null
+++ b/media/codec2/sfplugin/C2AidlNode.cpp
@@ -0,0 +1,120 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2AidlNode"
+#include <log/log.h>
+#include <private/android/AHardwareBufferHelpers.h>
+
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/aidlpersistentsurface/wrapper/Conversion.h>
+
+#include "C2NodeImpl.h"
+#include "C2AidlNode.h"
+
+namespace android {
+
+using ::aidl::android::media::IAidlBufferSource;
+using ::aidl::android::media::IAidlNode;
+
+// Conversion
+using ::android::media::aidl_conversion::toAidlStatus;
+
+C2AidlNode::C2AidlNode(const std::shared_ptr<Codec2Client::Component> &comp)
+ : mImpl(new C2NodeImpl(comp, true)) {}
+
+// aidl ndk interfaces
+::ndk::ScopedAStatus C2AidlNode::freeNode() {
+ return toAidlStatus(mImpl->freeNode());
+}
+
+::ndk::ScopedAStatus C2AidlNode::getConsumerUsage(int64_t* _aidl_return) {
+ uint64_t usage;
+ mImpl->getConsumerUsageBits(&usage);
+ *_aidl_return = usage;
+ return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus C2AidlNode::getInputBufferParams(IAidlNode::InputBufferParams* _aidl_return) {
+ mImpl->getInputBufferParams(_aidl_return);
+ return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus C2AidlNode::setConsumerUsage(int64_t usage) {
+ mImpl->setConsumerUsageBits(static_cast<uint64_t>(usage));
+ return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus C2AidlNode::setAdjustTimestampGapUs(int32_t gapUs) {
+ mImpl->setAdjustTimestampGapUs(gapUs);
+ return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus C2AidlNode::setInputSurface(
+ const std::shared_ptr<IAidlBufferSource>& bufferSource) {
+ return toAidlStatus(mImpl->setAidlInputSurface(bufferSource));
+}
+
+::ndk::ScopedAStatus C2AidlNode::submitBuffer(
+ int32_t buffer, const ::aidl::android::hardware::HardwareBuffer& hBuffer,
+ int32_t flags, int64_t timestamp, const ::ndk::ScopedFileDescriptor& fence) {
+ sp<GraphicBuffer> gBuf;
+ AHardwareBuffer *ahwb = hBuffer.get();
+ if (ahwb) {
+ gBuf = AHardwareBuffer_to_GraphicBuffer(ahwb);
+ }
+ return toAidlStatus(mImpl->submitBuffer(
+ buffer, gBuf, flags, timestamp, ::dup(fence.get())));
+}
+
+::ndk::ScopedAStatus C2AidlNode::onDataSpaceChanged(
+ int32_t dataSpace,
+ int32_t aspects,
+ int32_t pixelFormat) {
+ // NOTE: legacy codes passed aspects, but they didn't used.
+ (void)aspects;
+
+ return toAidlStatus(mImpl->onDataspaceChanged(
+ static_cast<uint32_t>(dataSpace),
+ static_cast<uint32_t>(pixelFormat)));
+}
+
+// cpp interface
+
+std::shared_ptr<IAidlBufferSource> C2AidlNode::getSource() {
+ return mImpl->getAidlSource();
+}
+
+void C2AidlNode::setFrameSize(uint32_t width, uint32_t height) {
+ return mImpl->setFrameSize(width, height);
+}
+
+void C2AidlNode::onInputBufferDone(c2_cntr64_t index) {
+ return mImpl->onInputBufferDone(index);
+}
+
+android_dataspace C2AidlNode::getDataspace() {
+ return mImpl->getDataspace();
+}
+
+uint32_t C2AidlNode::getPixelFormat() {
+ return mImpl->getPixelFormat();
+}
+
+void C2AidlNode::setPriority(int priority) {
+ return mImpl->setPriority(priority);
+}
+
+} // namespace android
diff --git a/media/codec2/sfplugin/C2AidlNode.h b/media/codec2/sfplugin/C2AidlNode.h
new file mode 100644
index 0000000..365a41d
--- /dev/null
+++ b/media/codec2/sfplugin/C2AidlNode.h
@@ -0,0 +1,97 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/media/BnAidlNode.h>
+#include <codec2/hidl/client.h>
+
+namespace android {
+
+struct C2NodeImpl;
+
+/**
+ * Thin Codec2 AIdL encoder HAL wrapper for InputSurface
+ */
+class C2AidlNode : public ::aidl::android::media::BnAidlNode {
+public:
+ explicit C2AidlNode(const std::shared_ptr<Codec2Client::Component> &comp);
+ ~C2AidlNode() override = default;
+
+ // IAidlNode
+ ::ndk::ScopedAStatus freeNode() override;
+
+ ::ndk::ScopedAStatus getConsumerUsage(int64_t *_aidl_return) override;
+
+ ::ndk::ScopedAStatus getInputBufferParams(
+ ::aidl::android::media::IAidlNode::InputBufferParams *_aidl_return) override;
+
+ ::ndk::ScopedAStatus setConsumerUsage(int64_t usage) override;
+
+ ::ndk::ScopedAStatus setAdjustTimestampGapUs(int32_t gapUs) override;
+
+ ::ndk::ScopedAStatus setInputSurface(
+ const std::shared_ptr<::aidl::android::media::IAidlBufferSource>&
+ bufferSource) override;
+
+ ::ndk::ScopedAStatus submitBuffer(
+ int32_t buffer,
+ const ::aidl::android::hardware::HardwareBuffer& hBuffer,
+ int32_t flags,
+ int64_t timestampUs,
+ const ::ndk::ScopedFileDescriptor& fence) override;
+
+ ::ndk::ScopedAStatus onDataSpaceChanged(
+ int dataSpace, int aspects, int pixelFormat) override;
+
+ /**
+ * Returns underlying IAidlBufferSource object.
+ */
+ std::shared_ptr<::aidl::android::media::IAidlBufferSource> getSource();
+
+ /**
+ * Configure the frame size.
+ */
+ void setFrameSize(uint32_t width, uint32_t height);
+
+ /**
+ * Clean up work item reference.
+ *
+ * \param index input work index
+ */
+ void onInputBufferDone(c2_cntr64_t index);
+
+ /**
+ * Returns dataspace information from GraphicBufferSource.
+ */
+ android_dataspace getDataspace();
+
+ /**
+ * Returns dataspace information from GraphicBufferSource.
+ */
+ uint32_t getPixelFormat();
+
+ /**
+ * Sets priority of the queue thread.
+ */
+ void setPriority(int priority);
+
+private:
+ std::shared_ptr<C2NodeImpl> mImpl;
+};
+
+} // namespace android
+
diff --git a/media/codec2/sfplugin/C2NodeImpl.cpp b/media/codec2/sfplugin/C2NodeImpl.cpp
new file mode 100644
index 0000000..6f53e0f
--- /dev/null
+++ b/media/codec2/sfplugin/C2NodeImpl.cpp
@@ -0,0 +1,451 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2NodeImpl"
+#include <log/log.h>
+
+#include <C2AllocatorGralloc.h>
+#include <C2BlockInternal.h>
+#include <C2Component.h>
+#include <C2Config.h>
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+
+#include <android/fdsan.h>
+#include <media/stagefright/foundation/ColorUtils.h>
+#include <ui/Fence.h>
+#include <ui/GraphicBuffer.h>
+#include <utils/Errors.h>
+#include <utils/Thread.h>
+
+#include "utils/Codec2Mapper.h"
+#include "C2NodeImpl.h"
+#include "Codec2Buffer.h"
+
+namespace android {
+
+using ::aidl::android::media::IAidlBufferSource;
+using ::aidl::android::media::IAidlNode;
+
+using ::android::media::BUFFERFLAG_EOS;
+
+namespace {
+
+class Buffer2D : public C2Buffer {
+public:
+ explicit Buffer2D(C2ConstGraphicBlock block) : C2Buffer({ block }) {}
+};
+
+} // namespace
+
+class C2NodeImpl::QueueThread : public Thread {
+public:
+ QueueThread() : Thread(false) {}
+ ~QueueThread() override = default;
+ void queue(
+ const std::shared_ptr<Codec2Client::Component> &comp,
+ int fenceFd,
+ std::unique_ptr<C2Work> &&work,
+ android::base::unique_fd &&fd0,
+ android::base::unique_fd &&fd1) {
+ Mutexed<Jobs>::Locked jobs(mJobs);
+ auto it = jobs->queues.try_emplace(comp, comp).first;
+ it->second.workList.emplace_back(
+ std::move(work), fenceFd, std::move(fd0), std::move(fd1));
+ jobs->cond.broadcast();
+ }
+
+ void setDataspace(android_dataspace dataspace) {
+ Mutexed<Jobs>::Locked jobs(mJobs);
+ ColorUtils::convertDataSpaceToV0(dataspace);
+ jobs->configUpdate.emplace_back(new C2StreamDataSpaceInfo::input(0u, dataspace));
+ int32_t standard;
+ int32_t transfer;
+ int32_t range;
+ ColorUtils::getColorConfigFromDataSpace(dataspace, &range, &standard, &transfer);
+ std::unique_ptr<C2StreamColorAspectsInfo::input> colorAspects =
+ std::make_unique<C2StreamColorAspectsInfo::input>(0u);
+ if (C2Mapper::map(standard, &colorAspects->primaries, &colorAspects->matrix)
+ && C2Mapper::map(transfer, &colorAspects->transfer)
+ && C2Mapper::map(range, &colorAspects->range)) {
+ jobs->configUpdate.push_back(std::move(colorAspects));
+ }
+ }
+
+ void setPriority(int priority) {
+ androidSetThreadPriority(getTid(), priority);
+ }
+
+protected:
+ bool threadLoop() override {
+ constexpr nsecs_t kIntervalNs = nsecs_t(10) * 1000 * 1000; // 10ms
+ constexpr nsecs_t kWaitNs = kIntervalNs * 2;
+ for (int i = 0; i < 2; ++i) {
+ Mutexed<Jobs>::Locked jobs(mJobs);
+ nsecs_t nowNs = systemTime();
+ bool queued = false;
+ for (auto it = jobs->queues.begin(); it != jobs->queues.end(); ) {
+ Queue &queue = it->second;
+ if (queue.workList.empty()
+ || (queue.lastQueuedTimestampNs != 0 &&
+ nowNs - queue.lastQueuedTimestampNs < kIntervalNs)) {
+ ++it;
+ continue;
+ }
+ std::shared_ptr<Codec2Client::Component> comp = queue.component.lock();
+ if (!comp) {
+ it = jobs->queues.erase(it);
+ continue;
+ }
+ std::list<std::unique_ptr<C2Work>> items;
+ std::vector<int> fenceFds;
+ std::vector<android::base::unique_fd> uniqueFds;
+ while (!queue.workList.empty()) {
+ items.push_back(std::move(queue.workList.front().work));
+ fenceFds.push_back(queue.workList.front().fenceFd);
+ uniqueFds.push_back(std::move(queue.workList.front().fd0));
+ uniqueFds.push_back(std::move(queue.workList.front().fd1));
+ queue.workList.pop_front();
+ }
+ for (const std::unique_ptr<C2Param> ¶m : jobs->configUpdate) {
+ items.front()->input.configUpdate.emplace_back(C2Param::Copy(*param));
+ }
+
+ jobs.unlock();
+ for (int fenceFd : fenceFds) {
+ sp<Fence> fence(new Fence(fenceFd));
+ fence->waitForever(LOG_TAG);
+ }
+ queue.lastQueuedTimestampNs = nowNs;
+ comp->queue(&items);
+ for (android::base::unique_fd &ufd : uniqueFds) {
+ (void)ufd.release();
+ }
+ jobs.lock();
+
+ it = jobs->queues.upper_bound(comp);
+ queued = true;
+ }
+ if (queued) {
+ jobs->configUpdate.clear();
+ return true;
+ }
+ if (i == 0) {
+ jobs.waitForConditionRelative(jobs->cond, kWaitNs);
+ }
+ }
+ return true;
+ }
+
+private:
+ struct WorkFence {
+ WorkFence(std::unique_ptr<C2Work> &&w, int fd) : work(std::move(w)), fenceFd(fd) {}
+
+ WorkFence(
+ std::unique_ptr<C2Work> &&w,
+ int fd,
+ android::base::unique_fd &&uniqueFd0,
+ android::base::unique_fd &&uniqueFd1)
+ : work(std::move(w)),
+ fenceFd(fd),
+ fd0(std::move(uniqueFd0)),
+ fd1(std::move(uniqueFd1)) {}
+
+ std::unique_ptr<C2Work> work;
+ int fenceFd;
+ android::base::unique_fd fd0;
+ android::base::unique_fd fd1;
+ };
+ struct Queue {
+ Queue(const std::shared_ptr<Codec2Client::Component> &comp)
+ : component(comp), lastQueuedTimestampNs(0) {}
+ Queue(const Queue &) = delete;
+ Queue &operator =(const Queue &) = delete;
+
+ std::weak_ptr<Codec2Client::Component> component;
+ std::list<WorkFence> workList;
+ nsecs_t lastQueuedTimestampNs;
+ };
+ struct Jobs {
+ std::map<std::weak_ptr<Codec2Client::Component>,
+ Queue,
+ std::owner_less<std::weak_ptr<Codec2Client::Component>>> queues;
+ std::vector<std::unique_ptr<C2Param>> configUpdate;
+ Condition cond;
+ };
+ Mutexed<Jobs> mJobs;
+};
+
+C2NodeImpl::C2NodeImpl(const std::shared_ptr<Codec2Client::Component> &comp, bool aidl)
+ : mComp(comp), mFrameIndex(0), mWidth(0), mHeight(0), mUsage(0),
+ mAdjustTimestampGapUs(0), mFirstInputFrame(true),
+ mQueueThread(new QueueThread), mAidlHal(aidl) {
+ android_fdsan_set_error_level(ANDROID_FDSAN_ERROR_LEVEL_WARN_ALWAYS);
+ mQueueThread->run("C2NodeImpl", PRIORITY_AUDIO);
+
+ android_dataspace ds = HAL_DATASPACE_UNKNOWN;
+ mDataspace.lock().set(ds);
+ uint32_t pf = PIXEL_FORMAT_UNKNOWN;
+ mPixelFormat.lock().set(pf);
+}
+
+C2NodeImpl::~C2NodeImpl() {
+}
+
+status_t C2NodeImpl::freeNode() {
+ mComp.reset();
+ android_fdsan_set_error_level(ANDROID_FDSAN_ERROR_LEVEL_WARN_ONCE);
+ return mQueueThread->requestExitAndWait();
+}
+
+void C2NodeImpl::onFirstInputFrame() {
+ mFirstInputFrame = true;
+}
+
+void C2NodeImpl::getConsumerUsageBits(uint64_t *usage) {
+ *usage = mUsage;
+}
+
+void C2NodeImpl::getInputBufferParams(IAidlNode::InputBufferParams *params) {
+ params->bufferCountActual = 16;
+
+ // WORKAROUND: having more slots improve performance while consuming
+ // more memory. This is a temporary workaround to reduce memory for
+ // larger-than-4K scenario.
+ if (mWidth * mHeight > 4096 * 2340) {
+ std::shared_ptr<Codec2Client::Component> comp = mComp.lock();
+ C2PortActualDelayTuning::input inputDelay(0);
+ C2ActualPipelineDelayTuning pipelineDelay(0);
+ c2_status_t c2err = C2_NOT_FOUND;
+ if (comp) {
+ c2err = comp->query(
+ {&inputDelay, &pipelineDelay}, {}, C2_DONT_BLOCK, nullptr);
+ }
+ if (c2err == C2_OK || c2err == C2_BAD_INDEX) {
+ params->bufferCountActual = 4;
+ params->bufferCountActual += (inputDelay ? inputDelay.value : 0u);
+ params->bufferCountActual += (pipelineDelay ? pipelineDelay.value : 0u);
+ }
+ }
+
+ params->frameWidth = mWidth;
+ params->frameHeight = mHeight;
+}
+
+void C2NodeImpl::setConsumerUsageBits(uint64_t usage) {
+ mUsage = usage;
+}
+
+void C2NodeImpl::setAdjustTimestampGapUs(int32_t gapUs) {
+ mAdjustTimestampGapUs = gapUs;
+}
+
+status_t C2NodeImpl::setInputSurface(const sp<IOMXBufferSource> &bufferSource) {
+ c2_status_t err = GetCodec2PlatformAllocatorStore()->fetchAllocator(
+ C2PlatformAllocatorStore::GRALLOC,
+ &mAllocator);
+ if (err != OK) {
+ return UNKNOWN_ERROR;
+ }
+ CHECK(!mAidlHal);
+ mBufferSource = bufferSource;
+ return OK;
+}
+
+status_t C2NodeImpl::setAidlInputSurface(
+ const std::shared_ptr<IAidlBufferSource> &aidlBufferSource) {
+ c2_status_t err = GetCodec2PlatformAllocatorStore()->fetchAllocator(
+ C2PlatformAllocatorStore::GRALLOC,
+ &mAllocator);
+ if (err != OK) {
+ return UNKNOWN_ERROR;
+ }
+ CHECK(mAidlHal);
+ mAidlBufferSource = aidlBufferSource;
+ return OK;
+}
+
+status_t C2NodeImpl::submitBuffer(
+ uint32_t buffer, const sp<GraphicBuffer> &graphicBuffer,
+ uint32_t flags, int64_t timestamp, int fenceFd) {
+ std::shared_ptr<Codec2Client::Component> comp = mComp.lock();
+ if (!comp) {
+ return NO_INIT;
+ }
+
+ uint32_t c2Flags = (flags & BUFFERFLAG_EOS)
+ ? C2FrameData::FLAG_END_OF_STREAM : 0;
+ std::shared_ptr<C2GraphicBlock> block;
+
+ android::base::unique_fd fd0, fd1;
+ C2Handle *handle = nullptr;
+ if (graphicBuffer) {
+ std::shared_ptr<C2GraphicAllocation> alloc;
+ handle = WrapNativeCodec2GrallocHandle(
+ graphicBuffer->handle,
+ graphicBuffer->width,
+ graphicBuffer->height,
+ graphicBuffer->format,
+ graphicBuffer->usage,
+ graphicBuffer->stride);
+ if (handle != nullptr) {
+ // unique_fd takes ownership of the fds, we'll get warning if these
+ // fds get closed by somebody else. Onwership will be released before
+ // we return, so that the fds get closed as usually when this function
+ // goes out of scope (when both items and block are gone).
+ native_handle_t *nativeHandle = reinterpret_cast<native_handle_t*>(handle);
+ fd0.reset(nativeHandle->numFds > 0 ? nativeHandle->data[0] : -1);
+ fd1.reset(nativeHandle->numFds > 1 ? nativeHandle->data[1] : -1);
+ }
+ c2_status_t err = mAllocator->priorGraphicAllocation(handle, &alloc);
+ if (err != OK) {
+ (void)fd0.release();
+ (void)fd1.release();
+ native_handle_close(handle);
+ native_handle_delete(handle);
+ return UNKNOWN_ERROR;
+ }
+ block = _C2BlockFactory::CreateGraphicBlock(alloc);
+ } else if (!(flags & BUFFERFLAG_EOS)) {
+ return BAD_VALUE;
+ }
+
+ std::unique_ptr<C2Work> work(new C2Work);
+ work->input.flags = (C2FrameData::flags_t)c2Flags;
+ work->input.ordinal.timestamp = timestamp;
+
+ // WORKAROUND: adjust timestamp based on gapUs
+ {
+ work->input.ordinal.customOrdinal = timestamp; // save input timestamp
+ if (mFirstInputFrame) {
+ // grab timestamps on first frame
+ mPrevInputTimestamp = timestamp;
+ mPrevCodecTimestamp = timestamp;
+ mFirstInputFrame = false;
+ } else if (mAdjustTimestampGapUs > 0) {
+ work->input.ordinal.timestamp =
+ mPrevCodecTimestamp
+ + c2_min((timestamp - mPrevInputTimestamp).peek(), mAdjustTimestampGapUs);
+ } else if (mAdjustTimestampGapUs < 0) {
+ work->input.ordinal.timestamp = mPrevCodecTimestamp - mAdjustTimestampGapUs;
+ }
+ mPrevInputTimestamp = work->input.ordinal.customOrdinal;
+ mPrevCodecTimestamp = work->input.ordinal.timestamp;
+ ALOGV("adjusting %lld to %lld (gap=%lld)",
+ work->input.ordinal.customOrdinal.peekll(),
+ work->input.ordinal.timestamp.peekll(),
+ (long long)mAdjustTimestampGapUs);
+ }
+
+ work->input.ordinal.frameIndex = mFrameIndex++;
+ work->input.buffers.clear();
+ if (block) {
+ std::shared_ptr<C2Buffer> c2Buffer(
+ new Buffer2D(block->share(
+ C2Rect(block->width(), block->height()), ::C2Fence())));
+ work->input.buffers.push_back(c2Buffer);
+ std::shared_ptr<C2StreamHdrStaticInfo::input> staticInfo;
+ std::shared_ptr<C2StreamHdrDynamicMetadataInfo::input> dynamicInfo;
+ GetHdrMetadataFromGralloc4Handle(
+ block->handle(),
+ &staticInfo,
+ &dynamicInfo);
+ if (staticInfo && *staticInfo) {
+ c2Buffer->setInfo(staticInfo);
+ }
+ if (dynamicInfo && *dynamicInfo) {
+ c2Buffer->setInfo(dynamicInfo);
+ }
+ }
+ work->worklets.clear();
+ work->worklets.emplace_back(new C2Worklet);
+ mBufferIdsInUse.lock()->emplace(work->input.ordinal.frameIndex.peeku(), buffer);
+ mQueueThread->queue(comp, fenceFd, std::move(work), std::move(fd0), std::move(fd1));
+
+ return OK;
+}
+
+status_t C2NodeImpl::onDataspaceChanged(uint32_t dataSpace, uint32_t pixelFormat) {
+ ALOGD("dataspace changed to %#x pixel format: %#x", dataSpace, pixelFormat);
+ android_dataspace d = (android_dataspace)dataSpace;
+ mQueueThread->setDataspace(d);
+
+ mDataspace.lock().set(d);
+ mPixelFormat.lock().set(pixelFormat);
+ return OK;
+}
+
+sp<IOMXBufferSource> C2NodeImpl::getSource() {
+ CHECK(!mAidlHal);
+ return mBufferSource;
+}
+
+std::shared_ptr<IAidlBufferSource> C2NodeImpl::getAidlSource() {
+ CHECK(mAidlHal);
+ return mAidlBufferSource;
+}
+
+void C2NodeImpl::setFrameSize(uint32_t width, uint32_t height) {
+ mWidth = width;
+ mHeight = height;
+}
+
+void C2NodeImpl::onInputBufferDone(c2_cntr64_t index) {
+ if (mAidlHal) {
+ if (!mAidlBufferSource) {
+ ALOGD("Buffer source not set (index=%llu)", index.peekull());
+ return;
+ }
+ } else {
+ if (!mBufferSource) {
+ ALOGD("Buffer source not set (index=%llu)", index.peekull());
+ return;
+ }
+ }
+
+ int32_t bufferId = 0;
+ {
+ decltype(mBufferIdsInUse)::Locked bufferIds(mBufferIdsInUse);
+ auto it = bufferIds->find(index.peeku());
+ if (it == bufferIds->end()) {
+ ALOGV("Untracked input index %llu (maybe already removed)", index.peekull());
+ return;
+ }
+ bufferId = it->second;
+ (void)bufferIds->erase(it);
+ }
+ if (mAidlHal) {
+ ::ndk::ScopedFileDescriptor nullFence;
+ (void)mAidlBufferSource->onInputBufferEmptied(bufferId, nullFence);
+ } else {
+ (void)mBufferSource->onInputBufferEmptied(bufferId, -1);
+ }
+}
+
+android_dataspace C2NodeImpl::getDataspace() {
+ return *mDataspace.lock();
+}
+
+uint32_t C2NodeImpl::getPixelFormat() {
+ return *mPixelFormat.lock();
+}
+
+void C2NodeImpl::setPriority(int priority) {
+ mQueueThread->setPriority(priority);
+}
+
+} // namespace android
diff --git a/media/codec2/sfplugin/C2NodeImpl.h b/media/codec2/sfplugin/C2NodeImpl.h
new file mode 100644
index 0000000..e060fd8
--- /dev/null
+++ b/media/codec2/sfplugin/C2NodeImpl.h
@@ -0,0 +1,129 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <atomic>
+
+#include <android/IOMXBufferSource.h>
+#include <aidl/android/media/IAidlBufferSource.h>
+#include <aidl/android/media/IAidlNode.h>
+#include <codec2/hidl/client.h>
+#include <media/stagefright/foundation/Mutexed.h>
+#include <media/stagefright/aidlpersistentsurface/C2NodeDef.h>
+
+namespace android {
+
+/**
+ * IOmxNode implementation around codec 2.0 component, only to be used in
+ * IGraphicBufferSource::configure. Only subset of IOmxNode API is implemented.
+ * As a result, one cannot expect this IOmxNode to work in any other usage than
+ * IGraphicBufferSource(if aidl hal is used, IAidlGraphicBufferSource).
+ */
+struct C2NodeImpl {
+ explicit C2NodeImpl(const std::shared_ptr<Codec2Client::Component> &comp, bool aidl);
+ ~C2NodeImpl();
+
+ // IOMXNode and/or IAidlNode
+ status_t freeNode();
+
+ void onFirstInputFrame();
+ void getConsumerUsageBits(uint64_t *usage /* nonnull */);
+ void getInputBufferParams(
+ ::aidl::android::media::IAidlNode::InputBufferParams *params /* nonnull */);
+ void setConsumerUsageBits(uint64_t usage);
+ void setAdjustTimestampGapUs(int32_t gapUs);
+
+ status_t setInputSurface(
+ const sp<IOMXBufferSource> &bufferSource);
+ status_t setAidlInputSurface(
+ const std::shared_ptr<::aidl::android::media::IAidlBufferSource> &aidlBufferSource);
+
+ status_t submitBuffer(
+ uint32_t buffer, const sp<GraphicBuffer> &graphicBuffer,
+ uint32_t flags, int64_t timestamp, int fenceFd);
+ status_t onDataspaceChanged(uint32_t dataSpace, uint32_t pixelFormat);
+
+ /**
+ * Returns underlying IOMXBufferSource object.
+ */
+ sp<IOMXBufferSource> getSource();
+
+ /**
+ * Returns underlying IAidlBufferSource object.
+ */
+ std::shared_ptr<::aidl::android::media::IAidlBufferSource> getAidlSource();
+
+ /**
+ * Configure the frame size.
+ */
+ void setFrameSize(uint32_t width, uint32_t height);
+
+ /**
+ * Clean up work item reference.
+ *
+ * \param index input work index
+ */
+ void onInputBufferDone(c2_cntr64_t index);
+
+ /**
+ * Returns dataspace information from GraphicBufferSource.
+ */
+ android_dataspace getDataspace();
+
+ /**
+ * Returns dataspace information from GraphicBufferSource.
+ */
+ uint32_t getPixelFormat();
+
+ /**
+ * Sets priority of the queue thread.
+ */
+ void setPriority(int priority);
+
+private:
+ std::weak_ptr<Codec2Client::Component> mComp;
+
+ sp<IOMXBufferSource> mBufferSource;
+ std::shared_ptr<::aidl::android::media::IAidlBufferSource> mAidlBufferSource;
+
+ std::shared_ptr<C2Allocator> mAllocator;
+ std::atomic_uint64_t mFrameIndex;
+ uint32_t mWidth;
+ uint32_t mHeight;
+ uint64_t mUsage;
+ Mutexed<android_dataspace> mDataspace;
+ Mutexed<uint32_t> mPixelFormat;
+
+ // WORKAROUND: timestamp adjustment
+
+ // if >0: this is the max timestamp gap, if <0: this is -1 times the fixed timestamp gap
+ // if 0: no timestamp adjustment is made
+ // note that C2OMXNode can be recycled between encoding sessions.
+ int32_t mAdjustTimestampGapUs;
+ bool mFirstInputFrame; // true for first input
+ c2_cntr64_t mPrevInputTimestamp; // input timestamp for previous frame
+ c2_cntr64_t mPrevCodecTimestamp; // adjusted (codec) timestamp for previous frame
+
+ Mutexed<std::map<uint64_t, uint32_t>> mBufferIdsInUse;
+
+ class QueueThread;
+ sp<QueueThread> mQueueThread;
+
+ bool mAidlHal;
+};
+
+} // namespace android
diff --git a/media/codec2/sfplugin/C2OMXNode.cpp b/media/codec2/sfplugin/C2OMXNode.cpp
index bba022b..ce02c88 100644
--- a/media/codec2/sfplugin/C2OMXNode.cpp
+++ b/media/codec2/sfplugin/C2OMXNode.cpp
@@ -1,5 +1,5 @@
/*
- * Copyright 2018, The Android Open Source Project
+ * Copyright 2024, The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -19,30 +19,17 @@
#endif
//#define LOG_NDEBUG 0
-#define LOG_TAG "C2OMXNode"
+#define LOG_TAG "C2OMXNODE"
#include <log/log.h>
-#include <C2AllocatorGralloc.h>
-#include <C2BlockInternal.h>
-#include <C2Component.h>
-#include <C2Config.h>
-#include <C2PlatformSupport.h>
-
#include <OMX_Component.h>
#include <OMX_Index.h>
#include <OMX_IndexExt.h>
-#include <android/fdsan.h>
-#include <media/stagefright/foundation/ColorUtils.h>
-#include <media/stagefright/omx/OMXUtils.h>
#include <media/stagefright/MediaErrors.h>
-#include <ui/Fence.h>
-#include <ui/GraphicBuffer.h>
-#include <utils/Thread.h>
-#include "utils/Codec2Mapper.h"
#include "C2OMXNode.h"
-#include "Codec2Buffer.h"
+#include "C2NodeImpl.h"
namespace android {
@@ -50,175 +37,25 @@
constexpr OMX_U32 kPortIndexInput = 0;
-class Buffer2D : public C2Buffer {
-public:
- explicit Buffer2D(C2ConstGraphicBlock block) : C2Buffer({ block }) {}
-};
+} // anomymous namespace
-} // namespace
+using ::android::media::BUFFERFLAG_ENDOFFRAME;
+using ::android::media::BUFFERFLAG_EOS;
-class C2OMXNode::QueueThread : public Thread {
-public:
- QueueThread() : Thread(false) {}
- ~QueueThread() override = default;
- void queue(
- const std::shared_ptr<Codec2Client::Component> &comp,
- int fenceFd,
- std::unique_ptr<C2Work> &&work,
- android::base::unique_fd &&fd0,
- android::base::unique_fd &&fd1) {
- Mutexed<Jobs>::Locked jobs(mJobs);
- auto it = jobs->queues.try_emplace(comp, comp).first;
- it->second.workList.emplace_back(
- std::move(work), fenceFd, std::move(fd0), std::move(fd1));
- jobs->cond.broadcast();
- }
-
- void setDataspace(android_dataspace dataspace) {
- Mutexed<Jobs>::Locked jobs(mJobs);
- ColorUtils::convertDataSpaceToV0(dataspace);
- jobs->configUpdate.emplace_back(new C2StreamDataSpaceInfo::input(0u, dataspace));
- int32_t standard;
- int32_t transfer;
- int32_t range;
- ColorUtils::getColorConfigFromDataSpace(dataspace, &range, &standard, &transfer);
- std::unique_ptr<C2StreamColorAspectsInfo::input> colorAspects =
- std::make_unique<C2StreamColorAspectsInfo::input>(0u);
- if (C2Mapper::map(standard, &colorAspects->primaries, &colorAspects->matrix)
- && C2Mapper::map(transfer, &colorAspects->transfer)
- && C2Mapper::map(range, &colorAspects->range)) {
- jobs->configUpdate.push_back(std::move(colorAspects));
- }
- }
-
- void setPriority(int priority) {
- androidSetThreadPriority(getTid(), priority);
- }
-
-protected:
- bool threadLoop() override {
- constexpr nsecs_t kIntervalNs = nsecs_t(10) * 1000 * 1000; // 10ms
- constexpr nsecs_t kWaitNs = kIntervalNs * 2;
- for (int i = 0; i < 2; ++i) {
- Mutexed<Jobs>::Locked jobs(mJobs);
- nsecs_t nowNs = systemTime();
- bool queued = false;
- for (auto it = jobs->queues.begin(); it != jobs->queues.end(); ) {
- Queue &queue = it->second;
- if (queue.workList.empty()
- || (queue.lastQueuedTimestampNs != 0 &&
- nowNs - queue.lastQueuedTimestampNs < kIntervalNs)) {
- ++it;
- continue;
- }
- std::shared_ptr<Codec2Client::Component> comp = queue.component.lock();
- if (!comp) {
- it = jobs->queues.erase(it);
- continue;
- }
- std::list<std::unique_ptr<C2Work>> items;
- std::vector<int> fenceFds;
- std::vector<android::base::unique_fd> uniqueFds;
- while (!queue.workList.empty()) {
- items.push_back(std::move(queue.workList.front().work));
- fenceFds.push_back(queue.workList.front().fenceFd);
- uniqueFds.push_back(std::move(queue.workList.front().fd0));
- uniqueFds.push_back(std::move(queue.workList.front().fd1));
- queue.workList.pop_front();
- }
- for (const std::unique_ptr<C2Param> ¶m : jobs->configUpdate) {
- items.front()->input.configUpdate.emplace_back(C2Param::Copy(*param));
- }
-
- jobs.unlock();
- for (int fenceFd : fenceFds) {
- sp<Fence> fence(new Fence(fenceFd));
- fence->waitForever(LOG_TAG);
- }
- queue.lastQueuedTimestampNs = nowNs;
- comp->queue(&items);
- for (android::base::unique_fd &ufd : uniqueFds) {
- (void)ufd.release();
- }
- jobs.lock();
-
- it = jobs->queues.upper_bound(comp);
- queued = true;
- }
- if (queued) {
- jobs->configUpdate.clear();
- return true;
- }
- if (i == 0) {
- jobs.waitForConditionRelative(jobs->cond, kWaitNs);
- }
- }
- return true;
- }
-
-private:
- struct WorkFence {
- WorkFence(std::unique_ptr<C2Work> &&w, int fd) : work(std::move(w)), fenceFd(fd) {}
-
- WorkFence(
- std::unique_ptr<C2Work> &&w,
- int fd,
- android::base::unique_fd &&uniqueFd0,
- android::base::unique_fd &&uniqueFd1)
- : work(std::move(w)),
- fenceFd(fd),
- fd0(std::move(uniqueFd0)),
- fd1(std::move(uniqueFd1)) {}
-
- std::unique_ptr<C2Work> work;
- int fenceFd;
- android::base::unique_fd fd0;
- android::base::unique_fd fd1;
- };
- struct Queue {
- Queue(const std::shared_ptr<Codec2Client::Component> &comp)
- : component(comp), lastQueuedTimestampNs(0) {}
- Queue(const Queue &) = delete;
- Queue &operator =(const Queue &) = delete;
-
- std::weak_ptr<Codec2Client::Component> component;
- std::list<WorkFence> workList;
- nsecs_t lastQueuedTimestampNs;
- };
- struct Jobs {
- std::map<std::weak_ptr<Codec2Client::Component>,
- Queue,
- std::owner_less<std::weak_ptr<Codec2Client::Component>>> queues;
- std::vector<std::unique_ptr<C2Param>> configUpdate;
- Condition cond;
- };
- Mutexed<Jobs> mJobs;
-};
+using ::aidl::android::media::IAidlNode;
C2OMXNode::C2OMXNode(const std::shared_ptr<Codec2Client::Component> &comp)
- : mComp(comp), mFrameIndex(0), mWidth(0), mHeight(0), mUsage(0),
- mAdjustTimestampGapUs(0), mFirstInputFrame(true),
- mQueueThread(new QueueThread) {
- android_fdsan_set_error_level(ANDROID_FDSAN_ERROR_LEVEL_WARN_ALWAYS);
- mQueueThread->run("C2OMXNode", PRIORITY_AUDIO);
-
- android_dataspace ds = HAL_DATASPACE_UNKNOWN;
- mDataspace.lock().set(ds);
- uint32_t pf = PIXEL_FORMAT_UNKNOWN;
- mPixelFormat.lock().set(pf);
-}
+ : mImpl(new C2NodeImpl(comp, false)) {}
status_t C2OMXNode::freeNode() {
- mComp.reset();
- android_fdsan_set_error_level(ANDROID_FDSAN_ERROR_LEVEL_WARN_ONCE);
- return mQueueThread->requestExitAndWait();
+ return mImpl->freeNode();
}
status_t C2OMXNode::sendCommand(OMX_COMMANDTYPE cmd, OMX_S32 param) {
if (cmd == OMX_CommandStateSet && param == OMX_StateLoaded) {
// Reset first input frame so if C2OMXNode is recycled, the timestamp does not become
// negative. This is a workaround for HW codecs that do not handle timestamp rollover.
- mFirstInputFrame = true;
+ mImpl->onFirstInputFrame();
}
return ERROR_UNSUPPORTED;
}
@@ -228,13 +65,19 @@
switch ((uint32_t)index) {
case OMX_IndexParamConsumerUsageBits: {
OMX_U32 *usage = (OMX_U32 *)params;
- *usage = mUsage;
+ uint64_t val;
+ mImpl->getConsumerUsageBits(&val);
+ *usage = static_cast<uint32_t>(val & 0xFFFFFFFF);
+ ALOGW("retrieving usage bits in 32 bits %llu -> %u",
+ (unsigned long long)val, (unsigned int)*usage);
err = OK;
break;
}
case OMX_IndexParamConsumerUsageBits64: {
OMX_U64 *usage = (OMX_U64 *)params;
- *usage = mUsage;
+ uint64_t val;
+ mImpl->getConsumerUsageBits(&val);
+ *usage = val;
err = OK;
break;
}
@@ -246,31 +89,12 @@
if (pDef->nPortIndex != kPortIndexInput) {
break;
}
-
- pDef->nBufferCountActual = 16;
-
- // WORKAROUND: having more slots improve performance while consuming
- // more memory. This is a temporary workaround to reduce memory for
- // larger-than-4K scenario.
- if (mWidth * mHeight > 4096 * 2340) {
- std::shared_ptr<Codec2Client::Component> comp = mComp.lock();
- C2PortActualDelayTuning::input inputDelay(0);
- C2ActualPipelineDelayTuning pipelineDelay(0);
- c2_status_t c2err = C2_NOT_FOUND;
- if (comp) {
- c2err = comp->query(
- {&inputDelay, &pipelineDelay}, {}, C2_DONT_BLOCK, nullptr);
- }
- if (c2err == C2_OK || c2err == C2_BAD_INDEX) {
- pDef->nBufferCountActual = 4;
- pDef->nBufferCountActual += (inputDelay ? inputDelay.value : 0u);
- pDef->nBufferCountActual += (pipelineDelay ? pipelineDelay.value : 0u);
- }
- }
-
+ IAidlNode::InputBufferParams bufferParams;
+ mImpl->getInputBufferParams(&bufferParams);
+ pDef->nBufferCountActual = bufferParams.bufferCountActual;
pDef->eDomain = OMX_PortDomainVideo;
- pDef->format.video.nFrameWidth = mWidth;
- pDef->format.video.nFrameHeight = mHeight;
+ pDef->format.video.nFrameWidth = bufferParams.frameWidth;
+ pDef->format.video.nFrameHeight = bufferParams.frameHeight;
pDef->format.video.eColorFormat = OMX_COLOR_FormatAndroidOpaque;
err = OK;
break;
@@ -286,28 +110,34 @@
return BAD_VALUE;
}
switch ((uint32_t)index) {
- case OMX_IndexParamMaxFrameDurationForBitrateControl:
+ case OMX_IndexParamMaxFrameDurationForBitrateControl: {
// handle max/fixed frame duration control
if (size != sizeof(OMX_PARAM_U32TYPE)) {
return BAD_VALUE;
}
// The incoming number is an int32_t contained in OMX_U32.
- mAdjustTimestampGapUs = (int32_t)((OMX_PARAM_U32TYPE*)params)->nU32;
+ int32_t gapUs = (int32_t)((OMX_PARAM_U32TYPE*)params)->nU32;
+ mImpl->setAdjustTimestampGapUs(gapUs);
return OK;
-
- case OMX_IndexParamConsumerUsageBits:
+ }
+ case OMX_IndexParamConsumerUsageBits: {
if (size != sizeof(OMX_U32)) {
return BAD_VALUE;
}
- mUsage = *((OMX_U32 *)params);
+ uint32_t usage = *((OMX_U32 *)params);
+ mImpl->setConsumerUsageBits(static_cast<uint64_t>(usage));
return OK;
-
- case OMX_IndexParamConsumerUsageBits64:
+ }
+ case OMX_IndexParamConsumerUsageBits64: {
if (size != sizeof(OMX_U64)) {
return BAD_VALUE;
}
- mUsage = *((OMX_U64 *)params);
+ uint64_t usagell = *((OMX_U64 *)params);
+ mImpl->setConsumerUsageBits(usagell);
return OK;
+ }
+ default:
+ break;
}
return ERROR_UNSUPPORTED;
}
@@ -359,14 +189,7 @@
}
status_t C2OMXNode::setInputSurface(const sp<IOMXBufferSource> &bufferSource) {
- c2_status_t err = GetCodec2PlatformAllocatorStore()->fetchAllocator(
- C2PlatformAllocatorStore::GRALLOC,
- &mAllocator);
- if (err != OK) {
- return UNKNOWN_ERROR;
- }
- mBufferSource = bufferSource;
- return OK;
+ return mImpl->setInputSurface(bufferSource);
}
status_t C2OMXNode::allocateSecureBuffer(
@@ -402,105 +225,39 @@
return ERROR_UNSUPPORTED;
}
+namespace {
+ uint32_t toNodeFlags(OMX_U32 flags) {
+ uint32_t retFlags = 0;
+ if (flags & OMX_BUFFERFLAG_ENDOFFRAME) {
+ retFlags |= BUFFERFLAG_ENDOFFRAME;
+ }
+ if (flags & OMX_BUFFERFLAG_EOS) {
+ retFlags |= BUFFERFLAG_EOS;
+ }
+ return retFlags;
+ }
+ int64_t toNodeTimestamp(OMX_TICKS ticks) {
+ int64_t timestamp = 0;
+#ifndef OMX_SKIP64BIT
+ timestamp = ticks;
+#else
+ timestamp = ((ticks.nHighPart << 32) | ticks.nLowPart);
+#endif
+ return timestamp;
+ }
+} // anonymous namespace
+
status_t C2OMXNode::emptyBuffer(
buffer_id buffer, const OMXBuffer &omxBuf,
OMX_U32 flags, OMX_TICKS timestamp, int fenceFd) {
- std::shared_ptr<Codec2Client::Component> comp = mComp.lock();
- if (!comp) {
- return NO_INIT;
- }
-
- uint32_t c2Flags = (flags & OMX_BUFFERFLAG_EOS)
- ? C2FrameData::FLAG_END_OF_STREAM : 0;
- std::shared_ptr<C2GraphicBlock> block;
-
- android::base::unique_fd fd0, fd1;
- C2Handle *handle = nullptr;
if (omxBuf.mBufferType == OMXBuffer::kBufferTypeANWBuffer
&& omxBuf.mGraphicBuffer != nullptr) {
- std::shared_ptr<C2GraphicAllocation> alloc;
- handle = WrapNativeCodec2GrallocHandle(
- omxBuf.mGraphicBuffer->handle,
- omxBuf.mGraphicBuffer->width,
- omxBuf.mGraphicBuffer->height,
- omxBuf.mGraphicBuffer->format,
- omxBuf.mGraphicBuffer->usage,
- omxBuf.mGraphicBuffer->stride);
- if (handle != nullptr) {
- // unique_fd takes ownership of the fds, we'll get warning if these
- // fds get closed by somebody else. Onwership will be released before
- // we return, so that the fds get closed as usually when this function
- // goes out of scope (when both items and block are gone).
- native_handle_t *nativeHandle = reinterpret_cast<native_handle_t*>(handle);
- fd0.reset(nativeHandle->numFds > 0 ? nativeHandle->data[0] : -1);
- fd1.reset(nativeHandle->numFds > 1 ? nativeHandle->data[1] : -1);
- }
- c2_status_t err = mAllocator->priorGraphicAllocation(handle, &alloc);
- if (err != OK) {
- (void)fd0.release();
- (void)fd1.release();
- native_handle_close(handle);
- native_handle_delete(handle);
- return UNKNOWN_ERROR;
- }
- block = _C2BlockFactory::CreateGraphicBlock(alloc);
- } else if (!(flags & OMX_BUFFERFLAG_EOS)) {
- return BAD_VALUE;
+ return mImpl->submitBuffer(buffer, omxBuf.mGraphicBuffer, toNodeFlags(flags),
+ toNodeTimestamp(timestamp), fenceFd);
}
-
- std::unique_ptr<C2Work> work(new C2Work);
- work->input.flags = (C2FrameData::flags_t)c2Flags;
- work->input.ordinal.timestamp = timestamp;
-
- // WORKAROUND: adjust timestamp based on gapUs
- {
- work->input.ordinal.customOrdinal = timestamp; // save input timestamp
- if (mFirstInputFrame) {
- // grab timestamps on first frame
- mPrevInputTimestamp = timestamp;
- mPrevCodecTimestamp = timestamp;
- mFirstInputFrame = false;
- } else if (mAdjustTimestampGapUs > 0) {
- work->input.ordinal.timestamp =
- mPrevCodecTimestamp
- + c2_min((timestamp - mPrevInputTimestamp).peek(), mAdjustTimestampGapUs);
- } else if (mAdjustTimestampGapUs < 0) {
- work->input.ordinal.timestamp = mPrevCodecTimestamp - mAdjustTimestampGapUs;
- }
- mPrevInputTimestamp = work->input.ordinal.customOrdinal;
- mPrevCodecTimestamp = work->input.ordinal.timestamp;
- ALOGV("adjusting %lld to %lld (gap=%lld)",
- work->input.ordinal.customOrdinal.peekll(),
- work->input.ordinal.timestamp.peekll(),
- (long long)mAdjustTimestampGapUs);
- }
-
- work->input.ordinal.frameIndex = mFrameIndex++;
- work->input.buffers.clear();
- if (block) {
- std::shared_ptr<C2Buffer> c2Buffer(
- new Buffer2D(block->share(
- C2Rect(block->width(), block->height()), ::C2Fence())));
- work->input.buffers.push_back(c2Buffer);
- std::shared_ptr<C2StreamHdrStaticInfo::input> staticInfo;
- std::shared_ptr<C2StreamHdrDynamicMetadataInfo::input> dynamicInfo;
- GetHdrMetadataFromGralloc4Handle(
- block->handle(),
- &staticInfo,
- &dynamicInfo);
- if (staticInfo && *staticInfo) {
- c2Buffer->setInfo(staticInfo);
- }
- if (dynamicInfo && *dynamicInfo) {
- c2Buffer->setInfo(dynamicInfo);
- }
- }
- work->worklets.clear();
- work->worklets.emplace_back(new C2Worklet);
- mBufferIdsInUse.lock()->emplace(work->input.ordinal.frameIndex.peeku(), buffer);
- mQueueThread->queue(comp, fenceFd, std::move(work), std::move(fd0), std::move(fd1));
-
- return OK;
+ sp<GraphicBuffer> gBuf;
+ return mImpl->submitBuffer(buffer, gBuf, toNodeFlags(flags),
+ toNodeTimestamp(timestamp), fenceFd);
}
status_t C2OMXNode::getExtensionIndex(
@@ -517,56 +274,33 @@
if (msg.u.event_data.event != OMX_EventDataSpaceChanged) {
return ERROR_UNSUPPORTED;
}
- android_dataspace dataSpace = (android_dataspace)msg.u.event_data.data1;
- uint32_t pixelFormat = msg.u.event_data.data3;
-
- ALOGD("dataspace changed to %#x pixel format: %#x", dataSpace, pixelFormat);
- mQueueThread->setDataspace(dataSpace);
-
- mDataspace.lock().set(dataSpace);
- mPixelFormat.lock().set(pixelFormat);
- return OK;
+ return mImpl->onDataspaceChanged(
+ msg.u.event_data.data1,
+ msg.u.event_data.data3);
}
sp<IOMXBufferSource> C2OMXNode::getSource() {
- return mBufferSource;
+ return mImpl->getSource();
}
void C2OMXNode::setFrameSize(uint32_t width, uint32_t height) {
- mWidth = width;
- mHeight = height;
+ return mImpl->setFrameSize(width, height);
}
void C2OMXNode::onInputBufferDone(c2_cntr64_t index) {
- if (!mBufferSource) {
- ALOGD("Buffer source not set (index=%llu)", index.peekull());
- return;
- }
-
- int32_t bufferId = 0;
- {
- decltype(mBufferIdsInUse)::Locked bufferIds(mBufferIdsInUse);
- auto it = bufferIds->find(index.peeku());
- if (it == bufferIds->end()) {
- ALOGV("Untracked input index %llu (maybe already removed)", index.peekull());
- return;
- }
- bufferId = it->second;
- (void)bufferIds->erase(it);
- }
- (void)mBufferSource->onInputBufferEmptied(bufferId, -1);
+ return mImpl->onInputBufferDone(index);
}
android_dataspace C2OMXNode::getDataspace() {
- return *mDataspace.lock();
+ return mImpl->getDataspace();
}
uint32_t C2OMXNode::getPixelFormat() {
- return *mPixelFormat.lock();
+ return mImpl->getPixelFormat();
}
void C2OMXNode::setPriority(int priority) {
- mQueueThread->setPriority(priority);
+ return mImpl->setPriority(priority);
}
} // namespace android
diff --git a/media/codec2/sfplugin/C2OMXNode.h b/media/codec2/sfplugin/C2OMXNode.h
index c8ce336..d077202 100644
--- a/media/codec2/sfplugin/C2OMXNode.h
+++ b/media/codec2/sfplugin/C2OMXNode.h
@@ -1,5 +1,5 @@
/*
- * Copyright 2018, The Android Open Source Project
+ * Copyright 2024, The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -17,16 +17,15 @@
#ifndef C2_OMX_NODE_H_
#define C2_OMX_NODE_H_
-#include <atomic>
-
#include <android/IOMXBufferSource.h>
#include <codec2/hidl/client.h>
-#include <media/stagefright/foundation/Mutexed.h>
#include <media/IOMX.h>
#include <media/OMXBuffer.h>
namespace android {
+struct C2NodeImpl;
+
/**
* IOmxNode implementation around codec 2.0 component, only to be used in
* IGraphicBufferSource::configure. Only subset of IOmxNode API is implemented
@@ -109,30 +108,7 @@
void setPriority(int priority);
private:
- std::weak_ptr<Codec2Client::Component> mComp;
- sp<IOMXBufferSource> mBufferSource;
- std::shared_ptr<C2Allocator> mAllocator;
- std::atomic_uint64_t mFrameIndex;
- uint32_t mWidth;
- uint32_t mHeight;
- uint64_t mUsage;
- Mutexed<android_dataspace> mDataspace;
- Mutexed<uint32_t> mPixelFormat;
-
- // WORKAROUND: timestamp adjustment
-
- // if >0: this is the max timestamp gap, if <0: this is -1 times the fixed timestamp gap
- // if 0: no timestamp adjustment is made
- // note that C2OMXNode can be recycled between encoding sessions.
- int32_t mAdjustTimestampGapUs;
- bool mFirstInputFrame; // true for first input
- c2_cntr64_t mPrevInputTimestamp; // input timestamp for previous frame
- c2_cntr64_t mPrevCodecTimestamp; // adjusted (codec) timestamp for previous frame
-
- Mutexed<std::map<uint64_t, buffer_id>> mBufferIdsInUse;
-
- class QueueThread;
- sp<QueueThread> mQueueThread;
+ std::shared_ptr<C2NodeImpl> mImpl;
};
} // namespace android
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 9c264af..20b6d7f 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -21,11 +21,16 @@
#include <sstream>
#include <thread>
+#include <android_media_codec.h>
+
#include <C2Config.h>
#include <C2Debug.h>
#include <C2ParamInternal.h>
#include <C2PlatformSupport.h>
+#include <aidl/android/hardware/graphics/common/Dataspace.h>
+#include <aidl/android/media/IAidlGraphicBufferSource.h>
+#include <aidl/android/media/IAidlBufferSource.h>
#include <android/IOMXBufferSource.h>
#include <android/hardware/media/c2/1.0/IInputSurface.h>
#include <android/hardware/media/omx/1.0/IGraphicBufferSource.h>
@@ -40,6 +45,11 @@
#include <media/openmax/OMX_Core.h>
#include <media/openmax/OMX_IndexExt.h>
#include <media/stagefright/foundation/avc_utils.h>
+#include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/aidlpersistentsurface/AidlGraphicBufferSource.h>
+#include <media/stagefright/aidlpersistentsurface/C2NodeDef.h>
+#include <media/stagefright/aidlpersistentsurface/wrapper/Conversion.h>
+#include <media/stagefright/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.h>
#include <media/stagefright/omx/1.0/WGraphicBufferSource.h>
#include <media/stagefright/omx/OmxGraphicBufferSource.h>
#include <media/stagefright/CCodec.h>
@@ -50,6 +60,7 @@
#include <media/stagefright/RenderedFrameInfo.h>
#include <utils/NativeHandle.h>
+#include "C2AidlNode.h"
#include "C2OMXNode.h"
#include "CCodecBufferChannel.h"
#include "CCodecConfig.h"
@@ -64,8 +75,14 @@
using ::android::hardware::graphics::bufferqueue::V1_0::utils::H2BGraphicBufferProducer;
using android::base::StringPrintf;
using ::android::hardware::media::c2::V1_0::IInputSurface;
+using ::aidl::android::media::IAidlBufferSource;
+using ::aidl::android::media::IAidlNode;
+using ::android::media::AidlGraphicBufferSource;
+using ::android::media::WAidlGraphicBufferSource;
+using ::android::media::aidl_conversion::fromAidlStatus;
typedef hardware::media::omx::V1_0::IGraphicBufferSource HGraphicBufferSource;
+typedef aidl::android::media::IAidlGraphicBufferSource AGraphicBufferSource;
typedef CCodecConfig Config;
namespace {
@@ -189,11 +206,11 @@
std::shared_ptr<Codec2Client::InputSurfaceConnection> mConnection;
};
-class GraphicBufferSourceWrapper : public InputSurfaceWrapper {
+class HGraphicBufferSourceWrapper : public InputSurfaceWrapper {
public:
typedef hardware::media::omx::V1_0::Status OmxStatus;
- GraphicBufferSourceWrapper(
+ HGraphicBufferSourceWrapper(
const sp<HGraphicBufferSource> &source,
uint32_t width,
uint32_t height,
@@ -202,7 +219,7 @@
mDataSpace = HAL_DATASPACE_BT709;
mConfig.mUsage = usage;
}
- ~GraphicBufferSourceWrapper() override = default;
+ ~HGraphicBufferSourceWrapper() override = default;
status_t connect(const std::shared_ptr<Codec2Client::Component> &comp) override {
mNode = new C2OMXNode(comp);
@@ -444,6 +461,224 @@
Config mConfig;
};
+class AGraphicBufferSourceWrapper : public InputSurfaceWrapper {
+public:
+ AGraphicBufferSourceWrapper(
+ const std::shared_ptr<AGraphicBufferSource> &source,
+ uint32_t width,
+ uint32_t height,
+ uint64_t usage)
+ : mSource(source), mWidth(width), mHeight(height) {
+ mDataSpace = HAL_DATASPACE_BT709;
+ mConfig.mUsage = usage;
+ }
+ ~AGraphicBufferSourceWrapper() override = default;
+
+ status_t connect(const std::shared_ptr<Codec2Client::Component> &comp) override {
+ mNode = ::ndk::SharedRefBase::make<C2AidlNode>(comp);
+ mNode->setFrameSize(mWidth, mHeight);
+ // Usage is queried during configure(), so setting it beforehand.
+ uint64_t usage = mConfig.mUsage;
+ (void)mNode->setConsumerUsage((int64_t)usage);
+
+ return fromAidlStatus(mSource->configure(
+ mNode, static_cast<::aidl::android::hardware::graphics::common::Dataspace>(
+ mDataSpace)));
+ }
+
+ void disconnect() override {
+ if (mNode == nullptr) {
+ return;
+ }
+ std::shared_ptr<IAidlBufferSource> source = mNode->getSource();
+ if (source == nullptr) {
+ ALOGD("GBSWrapper::disconnect: node is not configured with OMXBufferSource.");
+ return;
+ }
+ (void)source->onStop();
+ (void)source->onRelease();
+ mNode.reset();
+ }
+
+ status_t start() override {
+ std::shared_ptr<IAidlBufferSource> source = mNode->getSource();
+ if (source == nullptr) {
+ return NO_INIT;
+ }
+
+ size_t numSlots = 16;
+
+ IAidlNode::InputBufferParams param;
+ status_t err = fromAidlStatus(mNode->getInputBufferParams(¶m));
+ if (err == OK) {
+ numSlots = param.bufferCountActual;
+ }
+
+ for (size_t i = 0; i < numSlots; ++i) {
+ (void)source->onInputBufferAdded(i);
+ }
+
+ (void)source->onStart();
+ return OK;
+ }
+
+ status_t signalEndOfInputStream() override {
+ return fromAidlStatus(mSource->signalEndOfInputStream());
+ }
+
+ status_t configure(Config &config) {
+ std::stringstream status;
+ status_t err = OK;
+
+ // handle each configuration granually, in case we need to handle part of the configuration
+ // elsewhere
+
+ // TRICKY: we do not unset frame delay repeating
+ if (config.mMinFps > 0 && config.mMinFps != mConfig.mMinFps) {
+ int64_t us = 1e6 / config.mMinFps + 0.5;
+ status_t res = fromAidlStatus(mSource->setRepeatPreviousFrameDelayUs(us));
+ status << " minFps=" << config.mMinFps << " => repeatDelayUs=" << us;
+ if (res != OK) {
+ status << " (=> " << asString(res) << ")";
+ err = res;
+ }
+ mConfig.mMinFps = config.mMinFps;
+ }
+
+ // pts gap
+ if (config.mMinAdjustedFps > 0 || config.mFixedAdjustedFps > 0) {
+ if (mNode != nullptr) {
+ float gap = (config.mMinAdjustedFps > 0)
+ ? c2_min(INT32_MAX + 0., 1e6 / config.mMinAdjustedFps + 0.5)
+ : c2_max(0. - INT32_MAX, -1e6 / config.mFixedAdjustedFps - 0.5);
+ // float -> uint32_t is undefined if the value is negative.
+ // First convert to int32_t to ensure the expected behavior.
+ int32_t gapUs = int32_t(gap);
+ (void)mNode->setAdjustTimestampGapUs(gapUs);
+ }
+ }
+
+ // max fps
+ // TRICKY: we do not unset max fps to 0 unless using fixed fps
+ if ((config.mMaxFps > 0 || (config.mFixedAdjustedFps > 0 && config.mMaxFps == -1))
+ && config.mMaxFps != mConfig.mMaxFps) {
+ status_t res = fromAidlStatus(mSource->setMaxFps(config.mMaxFps));
+ status << " maxFps=" << config.mMaxFps;
+ if (res != OK) {
+ status << " (=> " << asString(res) << ")";
+ err = res;
+ }
+ mConfig.mMaxFps = config.mMaxFps;
+ }
+
+ if (config.mTimeOffsetUs != mConfig.mTimeOffsetUs) {
+ status_t res = fromAidlStatus(mSource->setTimeOffsetUs(config.mTimeOffsetUs));
+ status << " timeOffset " << config.mTimeOffsetUs << "us";
+ if (res != OK) {
+ status << " (=> " << asString(res) << ")";
+ err = res;
+ }
+ mConfig.mTimeOffsetUs = config.mTimeOffsetUs;
+ }
+
+ if (config.mCaptureFps != mConfig.mCaptureFps || config.mCodedFps != mConfig.mCodedFps) {
+ status_t res =
+ fromAidlStatus(mSource->setTimeLapseConfig(config.mCodedFps, config.mCaptureFps));
+ status << " timeLapse " << config.mCaptureFps << "fps as " << config.mCodedFps << "fps";
+ if (res != OK) {
+ status << " (=> " << asString(res) << ")";
+ err = res;
+ }
+ mConfig.mCaptureFps = config.mCaptureFps;
+ mConfig.mCodedFps = config.mCodedFps;
+ }
+
+ if (config.mStartAtUs != mConfig.mStartAtUs
+ || (config.mStopped != mConfig.mStopped && !config.mStopped)) {
+ status_t res = fromAidlStatus(mSource->setStartTimeUs(config.mStartAtUs));
+ status << " start at " << config.mStartAtUs << "us";
+ if (res != OK) {
+ status << " (=> " << asString(res) << ")";
+ err = res;
+ }
+ mConfig.mStartAtUs = config.mStartAtUs;
+ mConfig.mStopped = config.mStopped;
+ }
+
+ // suspend-resume
+ if (config.mSuspended != mConfig.mSuspended) {
+ status_t res = fromAidlStatus(mSource->setSuspend(
+ config.mSuspended, config.mSuspendAtUs));
+ status << " " << (config.mSuspended ? "suspend" : "resume")
+ << " at " << config.mSuspendAtUs << "us";
+ if (res != OK) {
+ status << " (=> " << asString(res) << ")";
+ err = res;
+ }
+ mConfig.mSuspended = config.mSuspended;
+ mConfig.mSuspendAtUs = config.mSuspendAtUs;
+ }
+
+ if (config.mStopped != mConfig.mStopped && config.mStopped) {
+ status_t res = fromAidlStatus(mSource->setStopTimeUs(config.mStopAtUs));
+ status << " stop at " << config.mStopAtUs << "us";
+ if (res != OK) {
+ status << " (=> " << asString(res) << ")";
+ err = res;
+ } else {
+ status << " delayUs";
+ res = fromAidlStatus(mSource->getStopTimeOffsetUs(&config.mInputDelayUs));
+ if (res != OK) {
+ status << " (=> " << asString(res) << ")";
+ } else {
+ status << "=" << config.mInputDelayUs << "us";
+ }
+ mConfig.mInputDelayUs = config.mInputDelayUs;
+ }
+ mConfig.mStopAtUs = config.mStopAtUs;
+ mConfig.mStopped = config.mStopped;
+ }
+
+ // color aspects (android._color-aspects)
+
+ // consumer usage is queried earlier.
+
+ // priority
+ if (mConfig.mPriority != config.mPriority) {
+ if (config.mPriority != INT_MAX) {
+ mNode->setPriority(config.mPriority);
+ }
+ mConfig.mPriority = config.mPriority;
+ }
+
+ if (status.str().empty()) {
+ ALOGD("ISConfig not changed");
+ } else {
+ ALOGD("ISConfig%s", status.str().c_str());
+ }
+ return err;
+ }
+
+ void onInputBufferDone(c2_cntr64_t index) override {
+ mNode->onInputBufferDone(index);
+ }
+
+ android_dataspace getDataspace() override {
+ return mNode->getDataspace();
+ }
+
+ uint32_t getPixelFormat() override {
+ return mNode->getPixelFormat();
+ }
+
+private:
+ std::shared_ptr<AGraphicBufferSource> mSource;
+ std::shared_ptr<C2AidlNode> mNode;
+ uint32_t mWidth;
+ uint32_t mHeight;
+ Config mConfig;
+};
+
class Codec2ClientInterfaceWrapper : public C2ComponentStore {
std::shared_ptr<Codec2Client> mClient;
@@ -1178,6 +1413,23 @@
}
}
+ /*
+ * configure mock region of interest if Feature_Roi is enabled
+ */
+ if (android::media::codec::provider_->region_of_interest()
+ && android::media::codec::provider_->region_of_interest_support()) {
+ if ((config->mDomain & Config::IS_ENCODER) && (config->mDomain & Config::IS_VIDEO)) {
+ int32_t enableRoi;
+ if (msg->findInt32("feature-region-of-interest", &enableRoi) && enableRoi != 0) {
+ if (!msg->contains(PARAMETER_KEY_QP_OFFSET_MAP) &&
+ !msg->contains(PARAMETER_KEY_QP_OFFSET_RECTS)) {
+ msg->setString(PARAMETER_KEY_QP_OFFSET_RECTS,
+ AStringPrintf("%d,%d-%d,%d=%d;", 0, 0, height, width, 0));
+ }
+ }
+ }
+ }
+
std::vector<std::unique_ptr<C2Param>> configUpdate;
// NOTE: We used to ignore "video-bitrate" at configure; replicate
// the behavior here.
@@ -1458,7 +1710,8 @@
int64_t blockUsage =
usage.value | C2MemoryUsage::CPU_READ | C2MemoryUsage::CPU_WRITE;
std::shared_ptr<C2GraphicBlock> block = FetchGraphicBlock(
- width, height, componentColorFormat, blockUsage, {comp->getName()});
+ align(width, 2), align(height, 2), componentColorFormat, blockUsage,
+ {comp->getName()});
sp<GraphicBlockBuffer> buffer;
if (block) {
buffer = GraphicBlockBuffer::Allocate(
@@ -1627,28 +1880,46 @@
}
sp<PersistentSurface> persistentSurface = CreateCompatibleInputSurface();
- sp<hidl::base::V1_0::IBase> hidlTarget = persistentSurface->getHidlTarget();
- sp<IInputSurface> hidlInputSurface = IInputSurface::castFrom(hidlTarget);
- sp<HGraphicBufferSource> gbs = HGraphicBufferSource::castFrom(hidlTarget);
-
- if (hidlInputSurface) {
- std::shared_ptr<Codec2Client::InputSurface> inputSurface =
- std::make_shared<Codec2Client::InputSurface>(hidlInputSurface);
- err = setupInputSurface(std::make_shared<C2InputSurfaceWrapper>(
- inputSurface));
- bufferProducer = inputSurface->getGraphicBufferProducer();
- } else if (gbs) {
- int32_t width = 0;
- (void)outputFormat->findInt32("width", &width);
- int32_t height = 0;
- (void)outputFormat->findInt32("height", &height);
- err = setupInputSurface(std::make_shared<GraphicBufferSourceWrapper>(
- gbs, width, height, usage));
- bufferProducer = persistentSurface->getBufferProducer();
+ if (persistentSurface->isTargetAidl()) {
+ ::ndk::SpAIBinder aidlTarget = persistentSurface->getAidlTarget();
+ std::shared_ptr<AGraphicBufferSource> gbs = AGraphicBufferSource::fromBinder(aidlTarget);
+ if (gbs) {
+ int32_t width = 0;
+ (void)outputFormat->findInt32("width", &width);
+ int32_t height = 0;
+ (void)outputFormat->findInt32("height", &height);
+ err = setupInputSurface(std::make_shared<AGraphicBufferSourceWrapper>(
+ gbs, width, height, usage));
+ bufferProducer = persistentSurface->getBufferProducer();
+ } else {
+ ALOGE("Corrupted input surface(aidl)");
+ mCallback->onInputSurfaceCreationFailed(UNKNOWN_ERROR);
+ return;
+ }
} else {
- ALOGE("Corrupted input surface");
- mCallback->onInputSurfaceCreationFailed(UNKNOWN_ERROR);
- return;
+ sp<hidl::base::V1_0::IBase> hidlTarget = persistentSurface->getHidlTarget();
+ sp<IInputSurface> hidlInputSurface = IInputSurface::castFrom(hidlTarget);
+ sp<HGraphicBufferSource> gbs = HGraphicBufferSource::castFrom(hidlTarget);
+
+ if (hidlInputSurface) {
+ std::shared_ptr<Codec2Client::InputSurface> inputSurface =
+ std::make_shared<Codec2Client::InputSurface>(hidlInputSurface);
+ err = setupInputSurface(std::make_shared<C2InputSurfaceWrapper>(
+ inputSurface));
+ bufferProducer = inputSurface->getGraphicBufferProducer();
+ } else if (gbs) {
+ int32_t width = 0;
+ (void)outputFormat->findInt32("width", &width);
+ int32_t height = 0;
+ (void)outputFormat->findInt32("height", &height);
+ err = setupInputSurface(std::make_shared<HGraphicBufferSourceWrapper>(
+ gbs, width, height, usage));
+ bufferProducer = persistentSurface->getBufferProducer();
+ } else {
+ ALOGE("Corrupted input surface");
+ mCallback->onInputSurfaceCreationFailed(UNKNOWN_ERROR);
+ return;
+ }
}
if (err != OK) {
@@ -1743,33 +2014,56 @@
outputFormat = config->mOutputFormat;
usage = config->mISConfig ? config->mISConfig->mUsage : 0;
}
- sp<hidl::base::V1_0::IBase> hidlTarget = surface->getHidlTarget();
- sp<IInputSurface> inputSurface = IInputSurface::castFrom(hidlTarget);
- sp<HGraphicBufferSource> gbs = HGraphicBufferSource::castFrom(hidlTarget);
- if (inputSurface) {
- status_t err = setupInputSurface(std::make_shared<C2InputSurfaceWrapper>(
- std::make_shared<Codec2Client::InputSurface>(inputSurface)));
- if (err != OK) {
- ALOGE("Failed to set up input surface: %d", err);
- mCallback->onInputSurfaceDeclined(err);
- return;
- }
- } else if (gbs) {
- int32_t width = 0;
- (void)outputFormat->findInt32("width", &width);
- int32_t height = 0;
- (void)outputFormat->findInt32("height", &height);
- status_t err = setupInputSurface(std::make_shared<GraphicBufferSourceWrapper>(
- gbs, width, height, usage));
- if (err != OK) {
- ALOGE("Failed to set up input surface: %d", err);
- mCallback->onInputSurfaceDeclined(err);
+ if (surface->isTargetAidl()) {
+ ::ndk::SpAIBinder aidlTarget = surface->getAidlTarget();
+ std::shared_ptr<AGraphicBufferSource> gbs = AGraphicBufferSource::fromBinder(aidlTarget);
+ if (gbs) {
+ int32_t width = 0;
+ (void)outputFormat->findInt32("width", &width);
+ int32_t height = 0;
+ (void)outputFormat->findInt32("height", &height);
+
+ status_t err = setupInputSurface(std::make_shared<AGraphicBufferSourceWrapper>(
+ gbs, width, height, usage));
+ if (err != OK) {
+ ALOGE("Failed to set up input surface(aidl): %d", err);
+ mCallback->onInputSurfaceDeclined(err);
+ return;
+ }
+ } else {
+ ALOGE("Failed to set input surface(aidl): Corrupted surface.");
+ mCallback->onInputSurfaceDeclined(UNKNOWN_ERROR);
return;
}
} else {
- ALOGE("Failed to set input surface: Corrupted surface.");
- mCallback->onInputSurfaceDeclined(UNKNOWN_ERROR);
- return;
+ sp<hidl::base::V1_0::IBase> hidlTarget = surface->getHidlTarget();
+ sp<IInputSurface> inputSurface = IInputSurface::castFrom(hidlTarget);
+ sp<HGraphicBufferSource> gbs = HGraphicBufferSource::castFrom(hidlTarget);
+ if (inputSurface) {
+ status_t err = setupInputSurface(std::make_shared<C2InputSurfaceWrapper>(
+ std::make_shared<Codec2Client::InputSurface>(inputSurface)));
+ if (err != OK) {
+ ALOGE("Failed to set up input surface: %d", err);
+ mCallback->onInputSurfaceDeclined(err);
+ return;
+ }
+ } else if (gbs) {
+ int32_t width = 0;
+ (void)outputFormat->findInt32("width", &width);
+ int32_t height = 0;
+ (void)outputFormat->findInt32("height", &height);
+ status_t err = setupInputSurface(std::make_shared<HGraphicBufferSourceWrapper>(
+ gbs, width, height, usage));
+ if (err != OK) {
+ ALOGE("Failed to set up input surface: %d", err);
+ mCallback->onInputSurfaceDeclined(err);
+ return;
+ }
+ } else {
+ ALOGE("Failed to set input surface: Corrupted surface.");
+ mCallback->onInputSurfaceDeclined(UNKNOWN_ERROR);
+ return;
+ }
}
// Formats can change after setupInputSurface
sp<AMessage> inputFormat;
@@ -1933,8 +2227,17 @@
// So we reverse their order for stopUseOutputSurface() to notify C2Fence waiters
// prior to comp->stop().
// See also b/300350761.
- mChannel->stopUseOutputSurface(pushBlankBuffer);
- status_t err = comp->stop();
+ //
+ // The workaround is no longer needed with fetchGraphicBlock & C2Fence changes.
+ // so we are reverting back to the logical sequence of the operations.
+ status_t err = C2_OK;
+ if (android::media::codec::provider_->stop_hal_before_surface()) {
+ err = comp->stop();
+ mChannel->stopUseOutputSurface(pushBlankBuffer);
+ } else {
+ mChannel->stopUseOutputSurface(pushBlankBuffer);
+ err = comp->stop();
+ }
if (err != C2_OK) {
// TODO: convert err into status_t
mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
@@ -2029,8 +2332,16 @@
// So we reverse their order for stopUseOutputSurface() to notify C2Fence waiters
// prior to comp->release().
// See also b/300350761.
- mChannel->stopUseOutputSurface(pushBlankBuffer);
- comp->release();
+ //
+ // The workaround is no longer needed with fetchGraphicBlock & C2Fence changes.
+ // so we are reverting back to the logical sequence of the operations.
+ if (android::media::codec::provider_->stop_hal_before_surface()) {
+ comp->release();
+ mChannel->stopUseOutputSurface(pushBlankBuffer);
+ } else {
+ mChannel->stopUseOutputSurface(pushBlankBuffer);
+ comp->release();
+ }
{
Mutexed<State>::Locked state(mState);
@@ -2259,6 +2570,40 @@
}
}
+ /**
+ * Handle ROI QP map configuration. Recover the QP map configuration from AMessage as an
+ * ABuffer and configure to CCodecBufferChannel as a C2InfoBuffer
+ */
+ if (android::media::codec::provider_->region_of_interest()
+ && android::media::codec::provider_->region_of_interest_support()) {
+ sp<ABuffer> qpOffsetMap;
+ if ((config->mDomain & (Config::IS_VIDEO | Config::IS_IMAGE))
+ && (config->mDomain & Config::IS_ENCODER)
+ && params->findBuffer(PARAMETER_KEY_QP_OFFSET_MAP, &qpOffsetMap)) {
+ std::shared_ptr<C2BlockPool> pool;
+ // TODO(b/331443865) Use pooled block pool to improve efficiency
+ c2_status_t status = GetCodec2BlockPool(C2BlockPool::BASIC_LINEAR, nullptr, &pool);
+
+ if (status == C2_OK) {
+ size_t mapSize = qpOffsetMap->size();
+ std::shared_ptr<C2LinearBlock> block;
+ status = pool->fetchLinearBlock(mapSize,
+ C2MemoryUsage{C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE}, &block);
+ if (status == C2_OK && !block->map().get().error()) {
+ C2WriteView wView = block->map().get();
+ uint8_t* outData = wView.data();
+ memcpy(outData, qpOffsetMap->data(), mapSize);
+ C2InfoBuffer info = C2InfoBuffer::CreateLinearBuffer(
+ kParamIndexQpOffsetMapBuffer,
+ block->share(0, mapSize, C2Fence()));
+ mChannel->setInfoBuffer(std::make_shared<C2InfoBuffer>(info));
+ }
+ }
+ params->removeEntryByName(PARAMETER_KEY_QP_OFFSET_MAP);
+ }
+ }
+
+
std::vector<std::unique_ptr<C2Param>> configUpdate;
(void)config->getConfigUpdateFromSdkParams(
comp, params, Config::IS_PARAM, C2_MAY_BLOCK, &configUpdate);
@@ -2661,15 +3006,32 @@
Codec2Client::CreateInputSurface();
if (!inputSurface) {
if (property_get_int32("debug.stagefright.c2inputsurface", 0) == -1) {
- sp<IGraphicBufferProducer> gbp;
- sp<OmxGraphicBufferSource> gbs = new OmxGraphicBufferSource();
- status_t err = gbs->initCheck();
- if (err != OK) {
- ALOGE("Failed to create persistent input surface: error %d", err);
- return nullptr;
+ if (Codec2Client::IsAidlSelected()) {
+ sp<IGraphicBufferProducer> gbp;
+ sp<AidlGraphicBufferSource> gbs = new AidlGraphicBufferSource();
+ status_t err = gbs->initCheck();
+ if (err != OK) {
+ ALOGE("Failed to create persistent input surface: error %d", err);
+ return nullptr;
+ }
+ ALOGD("aidl based PersistentSurface created");
+ std::shared_ptr<WAidlGraphicBufferSource> wrapper =
+ ::ndk::SharedRefBase::make<WAidlGraphicBufferSource>(gbs);
+
+ return new PersistentSurface(
+ gbs->getIGraphicBufferProducer(), wrapper->asBinder());
+ } else {
+ sp<IGraphicBufferProducer> gbp;
+ sp<OmxGraphicBufferSource> gbs = new OmxGraphicBufferSource();
+ status_t err = gbs->initCheck();
+ if (err != OK) {
+ ALOGE("Failed to create persistent input surface: error %d", err);
+ return nullptr;
+ }
+ ALOGD("hidl based PersistentSurface created");
+ return new PersistentSurface(
+ gbs->getIGraphicBufferProducer(), new TWGraphicBufferSource(gbs));
}
- return new PersistentSurface(
- gbs->getIGraphicBufferProducer(), new TWGraphicBufferSource(gbs));
} else {
return nullptr;
}
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 40656ff..3984b83 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -28,6 +28,8 @@
#include <thread>
#include <chrono>
+#include <android_media_codec.h>
+
#include <C2AllocatorGralloc.h>
#include <C2PlatformSupport.h>
#include <C2BlockInternal.h>
@@ -370,7 +372,17 @@
}
} else {
work->input.flags = (C2FrameData::flags_t)flags;
+
// TODO: fill info's
+ if (android::media::codec::provider_->region_of_interest()
+ && android::media::codec::provider_->region_of_interest_support()) {
+ if (mInfoBuffers.size()) {
+ for (auto infoBuffer : mInfoBuffers) {
+ work->input.infoBuffers.emplace_back(*infoBuffer);
+ }
+ mInfoBuffers.clear();
+ }
+ }
work->input.configUpdate = std::move(mParamsToBeSet);
if (tunnelFirstFrame) {
@@ -552,8 +564,7 @@
}
ssize_t result = -1;
- ssize_t codecDataOffset = 0;
- size_t inBufferOffset = 0;
+ size_t srcOffset = offset;
size_t outBufferSize = 0;
uint32_t cryptoInfoIdx = 0;
int32_t heapSeqNum = getHeapSeqNum(memory);
@@ -565,18 +576,20 @@
for (int i = 0; i < bufferInfos->value.size(); i++) {
if (bufferInfos->value[i].mSize > 0) {
std::unique_ptr<CodecCryptoInfo> info = std::move(cryptoInfos->value[cryptoInfoIdx++]);
+ src.offset = srcOffset;
+ src.size = bufferInfos->value[i].mSize;
result = mCrypto->decrypt(
(uint8_t*)info->mKey,
(uint8_t*)info->mIv,
info->mMode,
info->mPattern,
src,
- inBufferOffset,
+ 0,
info->mSubSamples,
info->mNumSubSamples,
dst,
errorDetailMsg);
- inBufferOffset += bufferInfos->value[i].mSize;
+ srcOffset += bufferInfos->value[i].mSize;
if (result < 0) {
ALOGI("[%s] attachEncryptedBuffers: decrypt failed: result = %zd",
mName, result);
@@ -599,7 +612,7 @@
wView.setOffset(0);
}
std::shared_ptr<C2Buffer> c2Buffer{C2Buffer::CreateLinearBuffer(
- block->share(codecDataOffset, outBufferSize - codecDataOffset, C2Fence{}))};
+ block->share(0, outBufferSize, C2Fence{}))};
if (!buffer->copy(c2Buffer)) {
ALOGI("[%s] attachEncryptedBuffers: buffer copy failed", mName);
return -ENOSYS;
@@ -980,8 +993,7 @@
}
// size of cryptoInfo and accessUnitInfo should be the same?
ssize_t result = -1;
- ssize_t codecDataOffset = 0;
- size_t inBufferOffset = 0;
+ size_t srcOffset = 0;
size_t outBufferSize = 0;
uint32_t cryptoInfoIdx = 0;
{
@@ -994,6 +1006,7 @@
encryptedBuffer->getMappedBlock(&mappedBlock);
hardware::drm::V1_0::SharedBuffer source;
encryptedBuffer->fillSourceBuffer(&source);
+ srcOffset = source.offset;
for (int i = 0 ; i < bufferInfos->value.size(); i++) {
if (bufferInfos->value[i].mSize > 0) {
std::unique_ptr<CodecCryptoInfo> info =
@@ -1004,18 +1017,20 @@
// no data so we only populate the bufferInfo
result = 0;
} else {
+ source.offset = srcOffset;
+ source.size = bufferInfos->value[i].mSize;
result = mCrypto->decrypt(
(uint8_t*)info->mKey,
(uint8_t*)info->mIv,
info->mMode,
info->mPattern,
source,
- inBufferOffset,
+ buffer->offset(),
info->mSubSamples,
info->mNumSubSamples,
destination,
errorDetailMsg);
- inBufferOffset += bufferInfos->value[i].mSize;
+ srcOffset += bufferInfos->value[i].mSize;
if (result < 0) {
ALOGI("[%s] decrypt failed: result=%zd", mName, result);
return result;
@@ -1028,7 +1043,7 @@
}
}
}
- buffer->setRange(codecDataOffset, outBufferSize - codecDataOffset);
+ buffer->setRange(0, outBufferSize);
}
return queueInputBufferInternal(buffer, block, bufferSize);
}
@@ -2055,6 +2070,7 @@
void CCodecBufferChannel::stop() {
mSync.stop();
mFirstValidFrameIndex = mFrameIndex.load(std::memory_order_relaxed);
+ mInfoBuffers.clear();
}
void CCodecBufferChannel::stopUseOutputSurface(bool pushBlankBuffer) {
@@ -2096,6 +2112,7 @@
}
void CCodecBufferChannel::release() {
+ mInfoBuffers.clear();
mComponent.reset();
mInputAllocator.reset();
mOutputSurface.lock()->surface.clear();
@@ -2161,6 +2178,7 @@
output->buffers->flushStash();
}
}
+ mInfoBuffers.clear();
}
void CCodecBufferChannel::onWorkDone(
@@ -2765,6 +2783,10 @@
}
}
+void CCodecBufferChannel::setInfoBuffer(const std::shared_ptr<C2InfoBuffer> &buffer) {
+ mInfoBuffers.push_back(buffer);
+}
+
status_t toStatusT(c2_status_t c2s, c2_operation_t c2op) {
// C2_OK is always translated to OK.
if (c2s == C2_OK) {
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index b470655..94a5998 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -227,6 +227,14 @@
void resetBuffersPixelFormat(bool isEncoder);
+ /**
+ * Queue a C2 info buffer that will be sent to codec in the subsequent
+ * queueInputBuffer
+ *
+ * @param buffer C2 info buffer
+ */
+ void setInfoBuffer(const std::shared_ptr<C2InfoBuffer> &buffer);
+
private:
uint32_t getInputBuffersPixelFormat();
@@ -400,6 +408,8 @@
std::atomic_bool mSendEncryptedInfoBuffer;
std::atomic_bool mTunneled;
+
+ std::vector<std::shared_ptr<C2InfoBuffer>> mInfoBuffers;
};
// Conversion of a c2_status_t value to a status_t value may depend on the
diff --git a/media/codec2/sfplugin/CCodecBuffers.cpp b/media/codec2/sfplugin/CCodecBuffers.cpp
index d313f33..3eec0f3 100644
--- a/media/codec2/sfplugin/CCodecBuffers.cpp
+++ b/media/codec2/sfplugin/CCodecBuffers.cpp
@@ -24,6 +24,7 @@
#include <C2PlatformSupport.h>
#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/foundation/MediaDefs.h>
#include <media/stagefright/CodecBase.h>
#include <media/stagefright/MediaCodecConstants.h>
@@ -57,7 +58,7 @@
std::shared_ptr<C2GraphicBlock> block;
c2_status_t err = pool->fetchGraphicBlock(
- width, height, pixelFormat, fullUsage, &block);
+ align(width, 2), align(height, 2), pixelFormat, fullUsage, &block);
if (err != C2_OK) {
ALOGD("fetch graphic block failed: %d", err);
return nullptr;
@@ -1549,19 +1550,23 @@
sp<Codec2Buffer> LinearOutputBuffers::wrap(const std::shared_ptr<C2Buffer> &buffer) {
if (buffer == nullptr) {
- ALOGV("[%s] using a dummy buffer", mName);
+ ALOGD("[%s] received null buffer", mName);
return new LocalLinearBuffer(mFormat, new ABuffer(0));
}
if (buffer->data().type() != C2BufferData::LINEAR) {
- ALOGV("[%s] non-linear buffer %d", mName, buffer->data().type());
+ ALOGW("[%s] non-linear buffer %d", mName, buffer->data().type());
// We expect linear output buffers from the component.
return nullptr;
}
if (buffer->data().linearBlocks().size() != 1u) {
- ALOGV("[%s] no linear buffers", mName);
+ ALOGW("[%s] no linear buffers", mName);
// We expect one and only one linear block from the component.
return nullptr;
}
+ if (buffer->data().linearBlocks().front().size() == 0) {
+ ALOGD("[%s] received 0-sized buffer", mName);
+ return new LocalLinearBuffer(mFormat, new ABuffer(0));
+ }
sp<Codec2Buffer> clientBuffer = ConstLinearBlockBuffer::Allocate(mFormat, buffer);
if (clientBuffer == nullptr) {
ALOGD("[%s] ConstLinearBlockBuffer::Allocate failed", mName);
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index c22deca..db59227 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -19,6 +19,8 @@
#include <initializer_list>
+#include <android_media_codec.h>
+
#include <cutils/properties.h>
#include <log/log.h>
#include <utils/NativeHandle.h>
@@ -591,6 +593,13 @@
}
return C2Value();
}));
+
+ if (android::media::codec::provider_->region_of_interest()
+ && android::media::codec::provider_->region_of_interest_support()) {
+ add(ConfigMapper(C2_PARAMKEY_QP_OFFSET_RECTS, C2_PARAMKEY_QP_OFFSET_RECTS, "")
+ .limitTo(D::VIDEO & (D::CONFIG | D::PARAM) & D::ENCODER & D::INPUT));
+ }
+
deprecated(ConfigMapper(PARAMETER_KEY_REQUEST_SYNC_FRAME,
"coding.request-sync", "value")
.limitTo(D::PARAM & D::ENCODER)
@@ -1121,6 +1130,11 @@
mParamUpdater->clear();
mParamUpdater->supportWholeParam(
C2_PARAMKEY_TEMPORAL_LAYERING, C2StreamTemporalLayeringTuning::CORE_INDEX);
+ if (android::media::codec::provider_->region_of_interest()
+ && android::media::codec::provider_->region_of_interest_support()) {
+ mParamUpdater->supportWholeParam(
+ C2_PARAMKEY_QP_OFFSET_RECTS, C2StreamQpOffsetRects::CORE_INDEX);
+ }
mParamUpdater->addParamDesc(mReflector, mParamDescs);
// TEMP: add some standard fields even if not reflected
@@ -1871,6 +1885,39 @@
}
}
+ if (android::media::codec::provider_->region_of_interest()
+ && android::media::codec::provider_->region_of_interest_support()) {
+ if (mDomain == (IS_VIDEO | IS_ENCODER)) {
+ AString qpOffsetRects;
+ if (params->findString(PARAMETER_KEY_QP_OFFSET_RECTS, &qpOffsetRects)) {
+ std::vector<C2QpOffsetRectStruct> c2QpOffsetRects;
+ char mutableStrQpOffsetRects[strlen(qpOffsetRects.c_str()) + 1];
+ strcpy(mutableStrQpOffsetRects, qpOffsetRects.c_str());
+ char* box = strtok(mutableStrQpOffsetRects, ";");
+ while (box != nullptr) {
+ int top, left, bottom, right, offset;
+ if (sscanf(box, "%d,%d-%d,%d=%d", &top, &left, &bottom, &right, &offset) == 5) {
+ left = c2_max(0, left);
+ top = c2_max(0, top);
+ if (right > left && bottom > top) {
+ C2Rect rect(right - left, bottom - top);
+ rect.at(left, top);
+ c2QpOffsetRects.push_back(C2QpOffsetRectStruct(rect, offset));
+ }
+ }
+ box = strtok(nullptr, ";");
+ }
+ if (c2QpOffsetRects.size() != 0) {
+ const std::unique_ptr<C2StreamQpOffsetRects::output> regions =
+ C2StreamQpOffsetRects::output::AllocUnique(
+ c2QpOffsetRects.size(), 0u, c2QpOffsetRects);
+ params->setBuffer(C2_PARAMKEY_QP_OFFSET_RECTS,
+ ABuffer::CreateAsCopy(regions.get(), regions->size()));
+ }
+ }
+ }
+ }
+
// this is to verify that we set proper signedness for standard parameters
bool beVeryStrict = property_get_bool("debug.stagefright.ccodec_strict_type", false);
// this is to allow vendors to use the wrong signedness for standard parameters
diff --git a/media/codec2/sfplugin/Codec2Buffer.cpp b/media/codec2/sfplugin/Codec2Buffer.cpp
index 9c514f2..2550dcf 100644
--- a/media/codec2/sfplugin/Codec2Buffer.cpp
+++ b/media/codec2/sfplugin/Codec2Buffer.cpp
@@ -179,10 +179,17 @@
if (!buffer
|| buffer->data().type() != C2BufferData::LINEAR
|| buffer->data().linearBlocks().size() != 1u) {
+ if (!buffer) {
+ ALOGD("ConstLinearBlockBuffer::Allocate: null buffer");
+ } else {
+ ALOGW("ConstLinearBlockBuffer::Allocate: type=%d # linear blocks=%zu",
+ buffer->data().type(), buffer->data().linearBlocks().size());
+ }
return nullptr;
}
C2ReadView readView(buffer->data().linearBlocks()[0].map().get());
if (readView.error() != C2_OK) {
+ ALOGW("ConstLinearBlockBuffer::Allocate: readView.error()=%d", readView.error());
return nullptr;
}
return new ConstLinearBlockBuffer(format, std::move(readView), buffer);
@@ -1137,7 +1144,7 @@
std::optional<Smpte2086> smpte2086;
status_t status = mapper.getSmpte2086(buffer.get(), &smpte2086);
- if (status != OK) {
+ if (status != OK || !smpte2086) {
err = C2_CORRUPTED;
} else {
if (smpte2086) {
@@ -1157,7 +1164,7 @@
std::optional<Cta861_3> cta861_3;
status = mapper.getCta861_3(buffer.get(), &cta861_3);
- if (status != OK) {
+ if (status != OK || !cta861_3) {
err = C2_CORRUPTED;
} else {
if (cta861_3) {
@@ -1176,7 +1183,7 @@
dynamicInfo->reset();
std::optional<std::vector<uint8_t>> vec;
status_t status = mapper.getSmpte2094_40(buffer.get(), &vec);
- if (status != OK) {
+ if (status != OK || !vec) {
dynamicInfo->reset();
err = C2_CORRUPTED;
} else {
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index 8dce789..692f700 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -20,6 +20,7 @@
#include <strings.h>
+#include <com_android_media_codec_flags.h>
#include <android_media_codec.h>
#include <C2Component.h>
@@ -686,6 +687,11 @@
const MediaCodecsXmlParser::AttributeMap &attrMap = typeIt->second;
std::unique_ptr<MediaCodecInfo::CapabilitiesWriter> caps =
codecInfo->addMediaType(mediaType.c_str());
+
+ // we could detect tunneled playback via the playback interface, but we never did
+ // that for the advertised feature, so for now use only the advertised feature.
+ bool canDoTunneledPlayback = false;
+
for (const auto &v : attrMap) {
std::string key = v.first;
std::string value = v.second;
@@ -706,6 +712,11 @@
// Ignore trailing bad characters and default to 0.
(void)sscanf(value.c_str(), "%d", &intValue);
caps->addDetail(key.c_str(), intValue);
+
+ if (key.compare(
+ MediaCodecInfo::Capabilities::FEATURE_TUNNELED_PLAYBACK) == 0) {
+ canDoTunneledPlayback = true;
+ }
} else {
caps->addDetail(key.c_str(), value.c_str());
}
@@ -755,7 +766,8 @@
addSupportedColorFormats(
intf, caps.get(), trait, mediaType, it->second);
- if (android::media::codec::provider_->large_audio_frame_finish()) {
+ if (com::android::media::codec::flags::provider_->large_audio_frame()
+ && android::media::codec::provider_->large_audio_frame_finish()) {
// Adding feature-multiple-frames when C2LargeFrame param is present
if (trait.domain == C2Component::DOMAIN_AUDIO) {
std::vector<std::shared_ptr<C2ParamDescriptor>> params;
@@ -772,6 +784,17 @@
}
}
}
+
+ if (android::media::codec::provider_->null_output_surface_support() &&
+ android::media::codec::provider_->null_output_surface()) {
+ // all non-tunneled video decoders support detached surface mode
+ if (trait.kind == C2Component::KIND_DECODER &&
+ trait.domain == C2Component::DOMAIN_VIDEO &&
+ !canDoTunneledPlayback) {
+ caps->addDetail(
+ MediaCodecInfo::Capabilities::FEATURE_DETACHED_SURFACE, 0);
+ }
+ }
}
}
}
diff --git a/media/codec2/sfplugin/tests/Android.bp b/media/codec2/sfplugin/tests/Android.bp
index 246e563..2739f44 100644
--- a/media/codec2/sfplugin/tests/Android.bp
+++ b/media/codec2/sfplugin/tests/Android.bp
@@ -42,6 +42,7 @@
],
static_libs: [
+ "android.media.codec-aconfig-cc",
"libcodec2_hidl@1.0",
"libstagefright_bufferpool@2.0",
],
diff --git a/media/codec2/sfplugin/tests/CCodecConfig_test.cpp b/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
index 3615289..508bec2 100644
--- a/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
+++ b/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
@@ -20,6 +20,8 @@
#include <gtest/gtest.h>
+#include <android_media_codec.h>
+
#include <codec2/hidl/1.0/Configurable.h>
#include <codec2/hidl/client.h>
#include <util/C2InterfaceHelper.h>
@@ -235,6 +237,22 @@
})
.withSetter(Setter<C2StreamProfileLevelInfo::output>)
.build());
+
+ std::vector<C2QpOffsetRectStruct> c2QpOffsetRectsInfo;
+ addParameter(
+ DefineParam(mInputQpOffsetRects, C2_PARAMKEY_QP_OFFSET_RECTS)
+ .withDefault(C2StreamQpOffsetRects::output::AllocShared(
+ c2QpOffsetRectsInfo.size(), 0, c2QpOffsetRectsInfo))
+ .withFields({
+ C2F(mInputQpOffsetRects, m.values[0].qpOffset)
+ .inRange(-128, 127),
+ C2F(mInputQpOffsetRects, m.values[0].left).any(),
+ C2F(mInputQpOffsetRects, m.values[0].top).any(),
+ C2F(mInputQpOffsetRects, m.values[0].width).any(),
+ C2F(mInputQpOffsetRects, m.values[0].height).any(),
+ })
+ .withSetter(Setter<C2StreamQpOffsetRects::output>)
+ .build());
}
// TODO: more SDK params
@@ -254,6 +272,7 @@
std::shared_ptr<C2StreamBitrateInfo::output> mOutputBitrate;
std::shared_ptr<C2StreamProfileLevelInfo::input> mInputProfileLevel;
std::shared_ptr<C2StreamProfileLevelInfo::output> mOutputProfileLevel;
+ std::shared_ptr<C2StreamQpOffsetRects::output> mInputQpOffsetRects;
template<typename T>
static C2R Setter(bool, C2P<T> &) {
@@ -636,4 +655,56 @@
HdrProfilesTest,
::testing::ValuesIn(kHdrProfilesParams));
+TEST_F(CCodecConfigTest, SetRegionOfInterestParams) {
+ if (!android::media::codec::provider_->region_of_interest()
+ || !android::media::codec::provider_->region_of_interest_support()) {
+ GTEST_SKIP() << "Skipping the test as region_of_interest flags are not enabled.\n";
+ }
+
+ init(C2Component::DOMAIN_VIDEO, C2Component::KIND_ENCODER, MIMETYPE_VIDEO_VP9);
+
+ ASSERT_EQ(OK, mConfig.initialize(mReflector, mConfigurable));
+
+ const int kWidth = 32;
+ const int kHeight = 32;
+ const int kNumBlocks = ((kWidth + 15) / 16) * ((kHeight + 15) / 16);
+ int8_t mapInfo[kNumBlocks] = {-1, 0, 1, 1};
+ int top[kNumBlocks] = {0, 0, 16, 16};
+ int left[kNumBlocks] = {0, 16, 0, 16};
+ int bottom[kNumBlocks] = {16, 16, 32, 32};
+ int right[kNumBlocks] = {16, 32, 16, 32};
+ sp<AMessage> format{new AMessage};
+ format->setInt32(KEY_WIDTH, kWidth);
+ format->setInt32(KEY_HEIGHT, kHeight);
+ AString val;
+ for (int i = 0; i < kNumBlocks; i++) {
+ val.append(AStringPrintf("%d,%d-%d,%d=%d;", top[i], left[i], bottom[i],
+ right[i], mapInfo[i]));
+ }
+ format->setString(PARAMETER_KEY_QP_OFFSET_RECTS, val);
+
+ std::vector<std::unique_ptr<C2Param>> configUpdate;
+ ASSERT_EQ(OK, mConfig.getConfigUpdateFromSdkParams(mConfigurable, format, D::CONFIG,
+ C2_MAY_BLOCK, &configUpdate));
+
+ EXPECT_EQ(1u, configUpdate.size());
+
+ C2StreamQpOffsetRects::output* qpRectParam =
+ FindParam<std::remove_pointer<decltype(qpRectParam)>::type>(configUpdate);
+ ASSERT_NE(nullptr, qpRectParam);
+ ASSERT_EQ(kNumBlocks, qpRectParam->flexCount());
+ for (auto i = 0; i < kNumBlocks; i++) {
+ EXPECT_EQ(mapInfo[i], (int8_t)qpRectParam->m.values[i].qpOffset)
+ << "qp offset for index " << i << " is not as expected ";
+ EXPECT_EQ(left[i], qpRectParam->m.values[i].left)
+ << "left for index " << i << " is not as expected ";
+ EXPECT_EQ(top[i], qpRectParam->m.values[i].top)
+ << "top for index " << i << " is not as expected ";
+ EXPECT_EQ(right[i] - left[i], qpRectParam->m.values[i].width)
+ << "width for index " << i << " is not as expected ";
+ EXPECT_EQ(bottom[i] - top[i], qpRectParam->m.values[i].height)
+ << "height for index " << i << " is not as expected ";
+ }
+}
+
} // namespace android
diff --git a/media/codec2/sfplugin/utils/Android.bp b/media/codec2/sfplugin/utils/Android.bp
index 54a6fb1..bed594c 100644
--- a/media/codec2/sfplugin/utils/Android.bp
+++ b/media/codec2/sfplugin/utils/Android.bp
@@ -54,7 +54,7 @@
static_libs: [
"libarect",
- "libyuv_static",
+ "libyuv",
],
sanitize: {
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
index 261fd05..75e9bbc 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
@@ -593,8 +593,6 @@
uint8_t *dstY, size_t dstStride, size_t dstVStride, size_t bufferSize,
const C2GraphicView &src, C2Color::matrix_t colorMatrix, C2Color::range_t colorRange) {
CHECK(dstY != nullptr);
- CHECK((src.width() & 1) == 0);
- CHECK((src.height() & 1) == 0);
if (dstStride * dstVStride * 3 / 2 > bufferSize) {
ALOGD("conversion buffer is too small for converting from RGB to YUV");
diff --git a/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
index ff72b1f..7a33af4 100644
--- a/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
@@ -32,10 +32,15 @@
namespace android {
-static bool isAtLeast(int version, const char *codeName) {
- char deviceCodeName[PROP_VALUE_MAX];
- __system_property_get("ro.build.version.codename", deviceCodeName);
- return android_get_device_api_level() >= version || !strcmp(deviceCodeName, codeName);
+static bool isAtLeast(int version, const std::string codeName) {
+ static std::once_flag sCheckOnce;
+ static std::string sDeviceCodeName;
+ static int sDeviceApiLevel;
+ std::call_once(sCheckOnce, [&](){
+ sDeviceCodeName = base::GetProperty("ro.build.version.codename", "");
+ sDeviceApiLevel = android_get_device_api_level();
+ });
+ return sDeviceApiLevel >= version || sDeviceCodeName == codeName;
}
bool isAtLeastT() {
@@ -46,6 +51,10 @@
return isAtLeast(__ANDROID_API_U__, "UpsideDownCake");
}
+bool isAtLeastV() {
+ return isAtLeast(__ANDROID_API_V__, "VanillaIceCream");
+}
+
static bool isP010Allowed() {
// The Vendor API level which is min(ro.product.first_api_level, ro.board.[first_]api_level).
// This is the api level to which VSR requirement the device conform.
diff --git a/media/codec2/sfplugin/utils/Codec2CommonUtils.h b/media/codec2/sfplugin/utils/Codec2CommonUtils.h
index 9bb52bd..693b3db 100644
--- a/media/codec2/sfplugin/utils/Codec2CommonUtils.h
+++ b/media/codec2/sfplugin/utils/Codec2CommonUtils.h
@@ -25,6 +25,8 @@
bool isAtLeastU();
+bool isAtLeastV();
+
bool isVendorApiOrFirstApiAtLeastT();
/**
diff --git a/media/codec2/vndk/Android.bp b/media/codec2/vndk/Android.bp
index 9f57bfd..dc06ee6 100644
--- a/media/codec2/vndk/Android.bp
+++ b/media/codec2/vndk/Android.bp
@@ -53,6 +53,7 @@
],
defaults: [
+ "aconfig_lib_cc_static_link.defaults",
"libcodec2_hal_selection",
],
diff --git a/media/libaaudio/examples/loopback/src/loopback.cpp b/media/libaaudio/examples/loopback/src/loopback.cpp
index 4affaed..6a35ced 100644
--- a/media/libaaudio/examples/loopback/src/loopback.cpp
+++ b/media/libaaudio/examples/loopback/src/loopback.cpp
@@ -323,7 +323,6 @@
printf(" -C{channels} number of input channels\n");
printf(" -D{deviceId} input device ID\n");
printf(" -F{0,1,2} input format, 1=I16, 2=FLOAT\n");
- printf(" -g{gain} recirculating loopback gain\n");
printf(" -h{hangMillis} occasionally hang in the callback\n");
printf(" -P{inPerf} set input AAUDIO_PERFORMANCE_MODE*\n");
printf(" n for _NONE\n");
@@ -436,7 +435,6 @@
int written = 0;
int testMode = TEST_LATENCY;
- double gain = 1.0;
int hangTimeMillis = 0;
std::string report;
@@ -468,9 +466,6 @@
case 'F':
requestedInputFormat = atoi(&arg[2]);
break;
- case 'g':
- gain = atof(&arg[2]);
- break;
case 'h':
// Was there a number after the "-h"?
if (arg[2]) {
diff --git a/media/libaaudio/fuzzer/Android.bp b/media/libaaudio/fuzzer/Android.bp
index 6d94f38..fc8ad77 100644
--- a/media/libaaudio/fuzzer/Android.bp
+++ b/media/libaaudio/fuzzer/Android.bp
@@ -36,37 +36,37 @@
"libaaudio_headers",
],
shared_libs: [
- "libbinder",
+ "com.android.media.aaudio-aconfig-cc",
+ "libaudio_aidl_conversion_common_cpp",
+ "libaudioclient_aidl_conversion",
"libaudiomanager",
"libaudiopolicy",
- "libaudioclient_aidl_conversion",
- "libaudio_aidl_conversion_common_cpp",
+ "libbinder",
"libutils",
- "com.android.media.aaudio-aconfig-cc",
],
static_libs: [
- "liblog",
- "libcutils",
+ "aaudio-aidl-cpp",
+ "audioclient-types-aidl-cpp",
+ "audioflinger-aidl-cpp",
+ "audiopolicy-aidl-cpp",
+ "audiopolicy-types-aidl-cpp",
+ "av-types-aidl-cpp",
+ "framework-permission-aidl-cpp",
"libaaudio",
- "libjsoncpp",
+ "libaaudio_internal",
+ "libaudioclient",
+ "libaudioutils",
"libbase_ndk",
"libcgrouprc",
- "libaudioutils",
- "libaudioclient",
- "aaudio-aidl-cpp",
+ "libcgrouprc_format",
+ "libcutils",
+ "libjsoncpp",
+ "liblog",
"libmedia_helper",
"libmediametrics",
"libprocessgroup",
- "av-types-aidl-cpp",
- "libaaudio_internal",
- "libcgrouprc_format",
- "audiopolicy-aidl-cpp",
- "audioflinger-aidl-cpp",
- "audiopolicy-types-aidl-cpp",
- "audioclient-types-aidl-cpp",
- "shared-file-region-aidl-cpp",
- "framework-permission-aidl-cpp",
"mediametricsservice-aidl-cpp",
+ "shared-file-region-aidl-cpp",
],
fuzz_config: {
cc: [
diff --git a/media/libaaudio/src/Android.bp b/media/libaaudio/src/Android.bp
index d2cb265..ebb7637 100644
--- a/media/libaaudio/src/Android.bp
+++ b/media/libaaudio/src/Android.bp
@@ -57,10 +57,10 @@
"-bugprone-macro-parentheses", // found in SharedMemoryParcelable.h
"-bugprone-narrowing-conversions", // found in several interface from size_t to int32_t
- "-google-readability-casting", // C++ casts not always necessary and may be verbose
- "-google-readability-todo", // do not require TODO(info)
"-google-build-using-namespace", // Reenable and fix later.
"-google-global-names-in-headers", // found in several files
+ "-google-readability-casting", // C++ casts not always necessary and may be verbose
+ "-google-readability-todo", // do not require TODO(info)
"-misc-non-private-member-variables-in-classes", // found in aidl generated files
@@ -90,28 +90,27 @@
],
cflags: [
- "-Wthread-safety",
- "-Wno-unused-parameter",
"-Wall",
"-Werror",
- // By default, all symbols are hidden.
- // "-fvisibility=hidden",
+ "-Wno-unused-parameter",
+ "-Wthread-safety",
+
// AAUDIO_API is used to explicitly export a function or a variable as a visible symbol.
"-DAAUDIO_API=__attribute__((visibility(\"default\")))",
],
shared_libs: [
+ "framework-permission-aidl-cpp",
"libaaudio_internal",
"libaudioclient",
"libaudioutils",
+ "libbinder",
+ "libcutils",
+ "liblog",
"libmedia_helper",
"libmediametrics",
"libmediautils",
- "liblog",
- "libcutils",
"libutils",
- "libbinder",
- "framework-permission-aidl-cpp",
],
sanitize: {
@@ -161,56 +160,49 @@
],
shared_libs: [
+ "aaudio-aidl-cpp",
+ "audioclient-types-aidl-cpp",
+ "com.android.media.aaudio-aconfig-cc",
+ "framework-permission-aidl-cpp",
"libaudioclient",
+ "libaudioclient_aidl_conversion",
"libaudioutils",
+ "libbinder",
+ "libcutils",
+ "liblog",
"libmedia_helper",
"libmediametrics",
"libmediautils",
- "liblog",
- "libcutils",
"libutils",
- "libbinder",
- "framework-permission-aidl-cpp",
- "aaudio-aidl-cpp",
- "audioclient-types-aidl-cpp",
- "libaudioclient_aidl_conversion",
- "com.android.media.aaudio-aconfig-cc",
],
cflags: [
- "-Wno-unused-parameter",
"-Wall",
"-Werror",
+ "-Wno-unused-parameter",
],
srcs: [
- "core/AudioGlobal.cpp",
- "core/AudioStream.cpp",
- "core/AudioStreamBuilder.cpp",
- "core/AAudioStreamParameters.cpp",
- "legacy/AudioStreamLegacy.cpp",
- "legacy/AudioStreamRecord.cpp",
- "legacy/AudioStreamTrack.cpp",
- "utility/AAudioUtilities.cpp",
- "utility/FixedBlockAdapter.cpp",
- "utility/FixedBlockReader.cpp",
- "utility/FixedBlockWriter.cpp",
- "fifo/FifoBuffer.cpp",
- "fifo/FifoControllerBase.cpp",
+ "binding/AAudioBinderAdapter.cpp",
+ "binding/AAudioBinderClient.cpp",
+ "binding/AAudioStreamConfiguration.cpp",
+ "binding/AAudioStreamRequest.cpp",
+ "binding/AudioEndpointParcelable.cpp",
+ "binding/RingBufferParcelable.cpp",
+ "binding/SharedMemoryParcelable.cpp",
+ "binding/SharedRegionParcelable.cpp",
"client/AAudioFlowGraph.cpp",
"client/AudioEndpoint.cpp",
"client/AudioStreamInternal.cpp",
"client/AudioStreamInternalCapture.cpp",
"client/AudioStreamInternalPlay.cpp",
"client/IsochronousClockModel.cpp",
- "binding/AudioEndpointParcelable.cpp",
- "binding/AAudioBinderAdapter.cpp",
- "binding/AAudioBinderClient.cpp",
- "binding/AAudioStreamRequest.cpp",
- "binding/AAudioStreamConfiguration.cpp",
- "binding/RingBufferParcelable.cpp",
- "binding/SharedMemoryParcelable.cpp",
- "binding/SharedRegionParcelable.cpp",
+ "core/AAudioStreamParameters.cpp",
+ "core/AudioGlobal.cpp",
+ "core/AudioStream.cpp",
+ "core/AudioStreamBuilder.cpp",
+ "fifo/FifoBuffer.cpp",
+ "fifo/FifoControllerBase.cpp",
"flowgraph/ChannelCountConverter.cpp",
"flowgraph/ClipToRange.cpp",
"flowgraph/FlowGraphNode.cpp",
@@ -218,20 +210,20 @@
"flowgraph/ManyToMultiConverter.cpp",
"flowgraph/MonoBlend.cpp",
"flowgraph/MonoToMultiConverter.cpp",
- "flowgraph/MultiToMonoConverter.cpp",
"flowgraph/MultiToManyConverter.cpp",
+ "flowgraph/MultiToMonoConverter.cpp",
"flowgraph/RampLinear.cpp",
"flowgraph/SampleRateConverter.cpp",
"flowgraph/SinkFloat.cpp",
+ "flowgraph/SinkI8_24.cpp",
"flowgraph/SinkI16.cpp",
"flowgraph/SinkI24.cpp",
"flowgraph/SinkI32.cpp",
- "flowgraph/SinkI8_24.cpp",
"flowgraph/SourceFloat.cpp",
+ "flowgraph/SourceI8_24.cpp",
"flowgraph/SourceI16.cpp",
"flowgraph/SourceI24.cpp",
"flowgraph/SourceI32.cpp",
- "flowgraph/SourceI8_24.cpp",
"flowgraph/resampler/IntegerRatio.cpp",
"flowgraph/resampler/LinearResampler.cpp",
"flowgraph/resampler/MultiChannelResampler.cpp",
@@ -240,6 +232,13 @@
"flowgraph/resampler/PolyphaseResamplerStereo.cpp",
"flowgraph/resampler/SincResampler.cpp",
"flowgraph/resampler/SincResamplerStereo.cpp",
+ "legacy/AudioStreamLegacy.cpp",
+ "legacy/AudioStreamRecord.cpp",
+ "legacy/AudioStreamTrack.cpp",
+ "utility/AAudioUtilities.cpp",
+ "utility/FixedBlockAdapter.cpp",
+ "utility/FixedBlockReader.cpp",
+ "utility/FixedBlockWriter.cpp",
],
sanitize: {
integer_overflow: true,
@@ -263,17 +262,17 @@
],
srcs: [
"binding/aidl/aaudio/Endpoint.aidl",
+ "binding/aidl/aaudio/IAAudioClient.aidl",
+ "binding/aidl/aaudio/IAAudioService.aidl",
"binding/aidl/aaudio/RingBuffer.aidl",
"binding/aidl/aaudio/SharedRegion.aidl",
"binding/aidl/aaudio/StreamParameters.aidl",
"binding/aidl/aaudio/StreamRequest.aidl",
- "binding/aidl/aaudio/IAAudioClient.aidl",
- "binding/aidl/aaudio/IAAudioService.aidl",
],
imports: [
"audioclient-types-aidl",
- "shared-file-region-aidl",
"framework-permission-aidl",
+ "shared-file-region-aidl",
],
backend: {
java: {
diff --git a/media/libaaudio/src/binding/AAudioBinderClient.cpp b/media/libaaudio/src/binding/AAudioBinderClient.cpp
index 5f34a75..439d5af 100644
--- a/media/libaaudio/src/binding/AAudioBinderClient.cpp
+++ b/media/libaaudio/src/binding/AAudioBinderClient.cpp
@@ -71,18 +71,10 @@
{
Mutex::Autolock _l(mServiceLock);
if (mAdapter == nullptr) {
- sp<IBinder> binder;
sp<IServiceManager> sm = defaultServiceManager();
- // Try several times to get the service.
- int retries = 4;
- do {
- binder = sm->getService(String16(AAUDIO_SERVICE_NAME)); // This will wait a while.
- if (binder.get() != nullptr) {
- break;
- }
- } while (retries-- > 0);
+ sp<IBinder> binder = sm->waitForService(String16(AAUDIO_SERVICE_NAME));
- if (binder.get() != nullptr) {
+ if (binder != nullptr) {
// Ask for notification if the service dies.
status_t status = binder->linkToDeath(mAAudioClient);
// TODO review what we should do if this fails
diff --git a/media/libaaudio/src/client/AudioEndpoint.cpp b/media/libaaudio/src/client/AudioEndpoint.cpp
index e780f4f..cd7679c 100644
--- a/media/libaaudio/src/client/AudioEndpoint.cpp
+++ b/media/libaaudio/src/client/AudioEndpoint.cpp
@@ -278,3 +278,9 @@
mDataQueue->eraseMemory();
}
}
+
+void AudioEndpoint::eraseEmptyDataMemory(int32_t numFrames) {
+ if (mDataQueue != nullptr) {
+ mDataQueue->eraseEmptyMemory(numFrames);
+ }
+}
diff --git a/media/libaaudio/src/client/AudioEndpoint.h b/media/libaaudio/src/client/AudioEndpoint.h
index b117572..7e97c6a 100644
--- a/media/libaaudio/src/client/AudioEndpoint.h
+++ b/media/libaaudio/src/client/AudioEndpoint.h
@@ -107,6 +107,8 @@
*/
void eraseDataMemory();
+ void eraseEmptyDataMemory(int32_t numFrames);
+
void freeDataQueue() { mDataQueue.reset(); }
void dump() const;
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index 7648e25..b2e93f0 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -575,10 +575,20 @@
return AAUDIO_ERROR_INVALID_STATE;
}
+ // For playback, sleep until all the audio data has played.
+ // Then clear the buffer to prevent noise.
+ prepareBuffersForStop();
+
mClockModel.stop(AudioClock::getNanoseconds());
setState(AAUDIO_STREAM_STATE_STOPPING);
mAtomicInternalTimestamp.clear();
+#if 0
+ // Simulate very slow CPU, force race condition where the
+ // DSP keeps playing after we stop writing.
+ AudioClock::sleepForNanos(800 * AAUDIO_NANOS_PER_MILLISECOND);
+#endif
+
result = mServiceInterface.stopStream(mServiceStreamHandleInfo);
if (result == AAUDIO_ERROR_INVALID_HANDLE) {
ALOGD("%s() INVALID_HANDLE, stream was probably stolen", __func__);
diff --git a/media/libaaudio/src/client/AudioStreamInternal.h b/media/libaaudio/src/client/AudioStreamInternal.h
index a5981b1..20d55f9 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.h
+++ b/media/libaaudio/src/client/AudioStreamInternal.h
@@ -123,6 +123,8 @@
virtual void prepareBuffersForStart() {}
+ virtual void prepareBuffersForStop() {}
+
virtual void advanceClientToMatchServerPosition(int32_t serverMargin) = 0;
virtual void onFlushFromServer() {}
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
index 5d4c3d4..0427777 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
@@ -19,6 +19,8 @@
#define ATRACE_TAG ATRACE_TAG_AUDIO
+#include <algorithm>
+
#include <media/MediaMetricsItem.h>
#include <utils/Trace.h>
@@ -108,6 +110,61 @@
mAudioEndpoint->eraseDataMemory();
}
+void AudioStreamInternalPlay::prepareBuffersForStop() {
+ // If this is a shared stream and the FIFO is being read by the mixer then
+ // we don't have to worry about the DSP reading past the valid data. We can skip all this.
+ if(!mAudioEndpoint->isFreeRunning()) {
+ return;
+ }
+ // Sleep until the DSP has read all of the data written.
+ int64_t validFramesInBuffer = getFramesWritten() - getFramesRead();
+ if (validFramesInBuffer >= 0) {
+ int64_t emptyFramesInBuffer = ((int64_t) getBufferCapacity()) - validFramesInBuffer;
+
+ // Prevent stale data from being played if the DSP is still running.
+ // Erase some of the FIFO memory in front of the DSP read cursor.
+ // Subtract one burst so we do not accidentally erase data that the DSP might be using.
+ int64_t framesToErase = std::max((int64_t) 0,
+ emptyFramesInBuffer - getFramesPerBurst());
+ mAudioEndpoint->eraseEmptyDataMemory(framesToErase);
+
+ // Sleep until we are confident the DSP has consumed all of the valid data.
+ // Sleep for one extra burst as a safety margin because the IsochronousClockModel
+ // is not perfectly accurate.
+ int64_t positionInEmptyMemory = getFramesWritten() + getFramesPerBurst();
+ int64_t timeAllConsumed = mClockModel.convertPositionToTime(positionInEmptyMemory);
+ int64_t durationAllConsumed = timeAllConsumed - AudioClock::getNanoseconds();
+ // Prevent sleeping for too long.
+ durationAllConsumed = std::min(200 * AAUDIO_NANOS_PER_MILLISECOND, durationAllConsumed);
+ AudioClock::sleepForNanos(durationAllConsumed);
+ }
+
+ // Erase all of the memory in case the DSP keeps going and wraps around.
+ mAudioEndpoint->eraseDataMemory();
+
+ // Wait for the last buffer to reach the DAC.
+ // This is because the expected behavior of stop() is that all data written to the stream
+ // should be played before the hardware actually shuts down.
+ // This is different than pause(), where we just end as soon as possible.
+ // This can be important when, for example, playing car navigation and
+ // you want the user to hear the complete instruction.
+ if (mAtomicInternalTimestamp.isValid()) {
+ // Use timestamps to calculate the latency between the DSP reading
+ // a frame and when it reaches the DAC.
+ // This code assumes that timestamps are accurate.
+ Timestamp timestamp = mAtomicInternalTimestamp.read();
+ int64_t dacPosition = timestamp.getPosition();
+ int64_t hardwareReadTime = mClockModel.convertPositionToTime(dacPosition);
+ int64_t hardwareLatencyNanos = timestamp.getNanoseconds() - hardwareReadTime;
+ ALOGD("%s() hardwareLatencyNanos = %lld", __func__,
+ (long long) hardwareLatencyNanos);
+ // Prevent sleeping for too long.
+ hardwareLatencyNanos = std::min(30 * AAUDIO_NANOS_PER_MILLISECOND,
+ hardwareLatencyNanos);
+ AudioClock::sleepForNanos(hardwareLatencyNanos);
+ }
+}
+
void AudioStreamInternalPlay::advanceClientToMatchServerPosition(int32_t serverMargin) {
int64_t readCounter = mAudioEndpoint->getDataReadCounter() + serverMargin;
int64_t writeCounter = mAudioEndpoint->getDataWriteCounter();
@@ -353,20 +410,26 @@
// Call application using the AAudio callback interface.
callbackResult = maybeCallDataCallback(mCallbackBuffer.get(), mCallbackFrames);
- if (callbackResult == AAUDIO_CALLBACK_RESULT_CONTINUE) {
- // Write audio data to stream. This is a BLOCKING WRITE!
- result = write(mCallbackBuffer.get(), mCallbackFrames, timeoutNanos);
- if ((result != mCallbackFrames)) {
- if (result >= 0) {
- // Only wrote some of the frames requested. The stream can be disconnected
- // or timed out.
- processCommands();
- result = isDisconnected() ? AAUDIO_ERROR_DISCONNECTED : AAUDIO_ERROR_TIMEOUT;
- }
- maybeCallErrorCallback(result);
- break;
+ // Write audio data to stream. This is a BLOCKING WRITE!
+ // Write data regardless of the callbackResult because we assume the data
+ // is valid even when the callback returns AAUDIO_CALLBACK_RESULT_STOP.
+ // Imagine a callback that is playing a large sound in menory.
+ // When it gets to the end of the sound it can partially fill
+ // the last buffer with the end of the sound, then zero pad the buffer, then return STOP.
+ // If the callback has no valid data then it should zero-fill the entire buffer.
+ result = write(mCallbackBuffer.get(), mCallbackFrames, timeoutNanos);
+ if ((result != mCallbackFrames)) {
+ if (result >= 0) {
+ // Only wrote some of the frames requested. The stream can be disconnected
+ // or timed out.
+ processCommands();
+ result = isDisconnected() ? AAUDIO_ERROR_DISCONNECTED : AAUDIO_ERROR_TIMEOUT;
}
- } else if (callbackResult == AAUDIO_CALLBACK_RESULT_STOP) {
+ maybeCallErrorCallback(result);
+ break;
+ }
+
+ if (callbackResult == AAUDIO_CALLBACK_RESULT_STOP) {
ALOGD("%s(): callback returned AAUDIO_CALLBACK_RESULT_STOP", __func__);
result = systemStopInternal();
break;
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.h b/media/libaaudio/src/client/AudioStreamInternalPlay.h
index b51b5d0..4e14f18 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.h
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.h
@@ -66,6 +66,8 @@
void prepareBuffersForStart() override;
+ void prepareBuffersForStop() override;
+
void advanceClientToMatchServerPosition(int32_t serverMargin) override;
void onFlushFromServer() override;
diff --git a/media/libaaudio/src/fifo/FifoBuffer.cpp b/media/libaaudio/src/fifo/FifoBuffer.cpp
index 5c11882..f3e3bbd 100644
--- a/media/libaaudio/src/fifo/FifoBuffer.cpp
+++ b/media/libaaudio/src/fifo/FifoBuffer.cpp
@@ -150,7 +150,7 @@
getEmptyRoomAvailable(&wrappingBuffer);
- // Read data in one or two parts.
+ // Write data in one or two parts.
int partIndex = 0;
while (framesLeft > 0 && partIndex < WrappingBuffer::SIZE) {
fifo_frames_t framesToWrite = framesLeft;
@@ -192,3 +192,29 @@
memset(getStorage(), 0, (size_t) numBytes);
}
}
+
+fifo_frames_t FifoBuffer::eraseEmptyMemory(fifo_frames_t numFrames) {
+ WrappingBuffer wrappingBuffer;
+ fifo_frames_t framesLeft = numFrames;
+
+ getEmptyRoomAvailable(&wrappingBuffer);
+
+ // Erase data in one or two parts.
+ int partIndex = 0;
+ while (framesLeft > 0 && partIndex < WrappingBuffer::SIZE) {
+ fifo_frames_t framesToWrite = framesLeft;
+ fifo_frames_t framesAvailable = wrappingBuffer.numFrames[partIndex];
+ if (framesAvailable > 0) {
+ if (framesToWrite > framesAvailable) {
+ framesToWrite = framesAvailable;
+ }
+ int32_t numBytes = convertFramesToBytes(framesToWrite);
+ memset(wrappingBuffer.data[partIndex], 0, numBytes);
+ framesLeft -= framesToWrite;
+ } else {
+ break;
+ }
+ partIndex++;
+ }
+ return numFrames - framesLeft; // number erased
+}
diff --git a/media/libaaudio/src/fifo/FifoBuffer.h b/media/libaaudio/src/fifo/FifoBuffer.h
index 7b0aca1..860ccad 100644
--- a/media/libaaudio/src/fifo/FifoBuffer.h
+++ b/media/libaaudio/src/fifo/FifoBuffer.h
@@ -115,6 +115,13 @@
*/
void eraseMemory();
+ /**
+ * Clear some memory after the write pointer.
+ * This can be used to prevent the reader from accidentally reading stale data
+ * in case it is reading asynchronously.
+ */
+ fifo_frames_t eraseEmptyMemory(fifo_frames_t numFrames);
+
protected:
virtual uint8_t *getStorage() const = 0;
diff --git a/media/libaaudio/src/flowgraph/SampleRateConverter.cpp b/media/libaaudio/src/flowgraph/SampleRateConverter.cpp
index a15fcb8..890057d 100644
--- a/media/libaaudio/src/flowgraph/SampleRateConverter.cpp
+++ b/media/libaaudio/src/flowgraph/SampleRateConverter.cpp
@@ -28,7 +28,8 @@
void SampleRateConverter::reset() {
FlowGraphNode::reset();
- mInputCursor = kInitialCallCount;
+ mInputCallCount = kInitialCallCount;
+ mInputCursor = 0;
}
// Return true if there is a sample available.
diff --git a/media/libaaudio/src/flowgraph/SampleRateConverter.h b/media/libaaudio/src/flowgraph/SampleRateConverter.h
index f883e6c..a4318f0 100644
--- a/media/libaaudio/src/flowgraph/SampleRateConverter.h
+++ b/media/libaaudio/src/flowgraph/SampleRateConverter.h
@@ -54,7 +54,7 @@
int32_t mNumValidInputFrames = 0; // number of valid frames currently in the input port buffer
// We need our own callCount for upstream calls because calls occur at a different rate.
// This means we cannot have cyclic graphs or merges that contain an SRC.
- int64_t mInputCallCount = 0;
+ int64_t mInputCallCount = kInitialCallCount;
};
diff --git a/media/libaaudio/tests/Android.bp b/media/libaaudio/tests/Android.bp
index 5ec8276..6aa04a8 100644
--- a/media/libaaudio/tests/Android.bp
+++ b/media/libaaudio/tests/Android.bp
@@ -206,9 +206,9 @@
srcs: ["test_steal_exclusive.cpp"],
shared_libs: [
"libaaudio",
- "liblog",
"libbinder",
"libcutils",
+ "liblog",
"libutils",
],
}
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index 90910a1..04a8a45 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -30,14 +30,14 @@
static_libs: [
"audioflinger-aidl-cpp",
"audiopolicy-aidl-cpp",
- "spatializer-aidl-cpp",
"av-types-aidl-cpp",
+ "spatializer-aidl-cpp",
],
export_static_lib_headers: [
"audioflinger-aidl-cpp",
"audiopolicy-aidl-cpp",
- "spatializer-aidl-cpp",
"av-types-aidl-cpp",
+ "spatializer-aidl-cpp",
],
target: {
darwin: {
@@ -49,11 +49,11 @@
cc_library {
name: "libaudiopolicy",
srcs: [
- "VolumeGroupAttributes.cpp",
"AudioPolicy.cpp",
"AudioProductStrategy.cpp",
"AudioVolumeGroup.cpp",
"PolicyAidlConversion.cpp",
+ "VolumeGroupAttributes.cpp",
],
defaults: [
"latest_android_media_audio_common_types_cpp_export_shared",
@@ -64,8 +64,9 @@
"audiopolicy-aidl-cpp",
"audiopolicy-types-aidl-cpp",
"capture_state_listener-aidl-cpp",
- "libaudiofoundation",
+ "framework-permission-aidl-cpp",
"libaudioclient_aidl_conversion",
+ "libaudiofoundation",
"libaudioutils",
"libbinder",
"libcutils",
@@ -73,8 +74,8 @@
"libutils",
],
cflags: [
- "-Werror",
"-Wall",
+ "-Werror",
],
include_dirs: ["system/media/audio_utils/include"],
export_include_dirs: ["include"],
@@ -84,8 +85,8 @@
"audiopolicy-aidl-cpp",
"audiopolicy-types-aidl-cpp",
"capture_state_listener-aidl-cpp",
- "libaudiofoundation",
"libaudioclient_aidl_conversion",
+ "libaudiofoundation",
],
header_libs: ["libaudioclient_headers"],
}
@@ -113,9 +114,9 @@
"AudioTrack.cpp",
"AudioTrackShared.cpp",
"IAudioFlinger.cpp",
- "ToneGenerator.cpp",
"PlayerBase.cpp",
"RecordingActivityTracker.cpp",
+ "ToneGenerator.cpp",
"TrackPlayerBase.cpp",
],
defaults: [
@@ -126,16 +127,17 @@
"audioclient-types-aidl-cpp",
"audioflinger-aidl-cpp",
"audiopolicy-aidl-cpp",
- "spatializer-aidl-cpp",
"audiopolicy-types-aidl-cpp",
"av-types-aidl-cpp",
"capture_state_listener-aidl-cpp",
+ "com.android.media.audioclient-aconfig-cc",
+ "framework-permission-aidl-cpp",
"libaudio_aidl_conversion_common_cpp",
"libaudioclient_aidl_conversion",
"libaudiofoundation",
- "libaudioutils",
- "libaudiopolicy",
"libaudiomanager",
+ "libaudiopolicy",
+ "libaudioutils",
"libbinder",
"libcutils",
"libdl",
@@ -147,24 +149,24 @@
"libprocessgroup",
"libshmemcompat",
"libutils",
- "framework-permission-aidl-cpp",
"packagemanager_aidl-cpp",
+ "spatializer-aidl-cpp",
],
export_shared_lib_headers: [
"audioflinger-aidl-cpp",
"audiopolicy-aidl-cpp",
- "spatializer-aidl-cpp",
"framework-permission-aidl-cpp",
"libbinder",
"libmediametrics",
+ "spatializer-aidl-cpp",
],
include_dirs: [
"frameworks/av/media/libnbaio/include_mono/",
],
local_include_dirs: [
- "include/media",
"aidl",
+ "include/media",
],
header_libs: [
"libaudioclient_headers",
@@ -191,8 +193,8 @@
],
sanitize: {
misc_undefined: [
- "unsigned-integer-overflow",
"signed-integer-overflow",
+ "unsigned-integer-overflow",
],
},
}
@@ -229,8 +231,8 @@
filegroup {
name: "libaudioclient_aidl",
srcs: [
- "aidl/android/media/IPlayer.aidl",
"aidl/android/media/AudioHalVersion.aidl",
+ "aidl/android/media/IPlayer.aidl",
],
path: "aidl",
}
@@ -296,14 +298,14 @@
"aidl/android/media/AudioIoDescriptor.aidl",
"aidl/android/media/AudioPatchFw.aidl",
"aidl/android/media/AudioPolicyConfig.aidl",
- "aidl/android/media/AudioPortFw.aidl",
- "aidl/android/media/AudioPortSys.aidl",
"aidl/android/media/AudioPortConfigFw.aidl",
"aidl/android/media/AudioPortConfigSys.aidl",
"aidl/android/media/AudioPortDeviceExtSys.aidl",
"aidl/android/media/AudioPortExtSys.aidl",
+ "aidl/android/media/AudioPortFw.aidl",
"aidl/android/media/AudioPortMixExtSys.aidl",
"aidl/android/media/AudioPortRole.aidl",
+ "aidl/android/media/AudioPortSys.aidl",
"aidl/android/media/AudioPortType.aidl",
"aidl/android/media/AudioProfileSys.aidl",
"aidl/android/media/AudioRoute.aidl",
@@ -312,8 +314,8 @@
"aidl/android/media/AudioVibratorInfo.aidl",
"aidl/android/media/DeviceConnectedState.aidl",
"aidl/android/media/EffectDescriptor.aidl",
- "aidl/android/media/TrackSecondaryOutputInfo.aidl",
"aidl/android/media/SurroundSoundConfig.aidl",
+ "aidl/android/media/TrackSecondaryOutputInfo.aidl",
],
defaults: [
"latest_android_media_audio_common_types_import_interface",
@@ -345,14 +347,14 @@
srcs: [
"aidl/android/media/AudioAttributesEx.aidl",
"aidl/android/media/AudioMix.aidl",
- "aidl/android/media/AudioMixUpdate.aidl",
- "aidl/android/media/AudioMixerAttributesInternal.aidl",
- "aidl/android/media/AudioMixerBehavior.aidl",
"aidl/android/media/AudioMixCallbackFlag.aidl",
"aidl/android/media/AudioMixMatchCriterion.aidl",
"aidl/android/media/AudioMixMatchCriterionValue.aidl",
"aidl/android/media/AudioMixRouteFlag.aidl",
"aidl/android/media/AudioMixType.aidl",
+ "aidl/android/media/AudioMixUpdate.aidl",
+ "aidl/android/media/AudioMixerAttributesInternal.aidl",
+ "aidl/android/media/AudioMixerBehavior.aidl",
"aidl/android/media/AudioOffloadMode.aidl",
"aidl/android/media/AudioPolicyDeviceState.aidl",
"aidl/android/media/AudioPolicyForceUse.aidl",
@@ -367,6 +369,7 @@
],
imports: [
"audioclient-types-aidl",
+ "framework-permission-aidl",
],
backend: {
cpp: {
@@ -401,8 +404,8 @@
"aidl/android/media/OpenOutputResponse.aidl",
"aidl/android/media/RenderPosition.aidl",
- "aidl/android/media/IAudioFlingerService.aidl",
"aidl/android/media/IAudioFlingerClient.aidl",
+ "aidl/android/media/IAudioFlingerService.aidl",
"aidl/android/media/IAudioRecord.aidl",
"aidl/android/media/IAudioTrack.aidl",
"aidl/android/media/IAudioTrackCallback.aidl",
@@ -418,8 +421,8 @@
"audioclient-types-aidl",
"av-types-aidl",
"effect-aidl",
- "shared-file-region-aidl",
"framework-permission-aidl",
+ "shared-file-region-aidl",
],
double_loadable: true,
backend: {
@@ -446,9 +449,9 @@
"aidl/android/media/GetInputForAttrResponse.aidl",
"aidl/android/media/GetOutputForAttrResponse.aidl",
"aidl/android/media/GetSpatializerResponse.aidl",
- "aidl/android/media/RecordClientInfo.aidl",
"aidl/android/media/IAudioPolicyService.aidl",
"aidl/android/media/IAudioPolicyServiceClient.aidl",
+ "aidl/android/media/RecordClientInfo.aidl",
],
defaults: [
"latest_android_media_audio_common_types_import_interface",
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index 91bc700..f729e1b 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -130,11 +130,7 @@
}
AudioRecord::AudioRecord(const AttributionSourceState &client)
- : mActive(false), mStatus(NO_INIT), mClientAttributionSource(client),
- mSessionId(AUDIO_SESSION_ALLOCATE), mPreviousPriority(ANDROID_PRIORITY_NORMAL),
- mPreviousSchedulingGroup(SP_DEFAULT), mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE),
- mRoutedDeviceId(AUDIO_PORT_HANDLE_NONE), mSelectedMicDirection(MIC_DIRECTION_UNSPECIFIED),
- mSelectedMicFieldDimension(MIC_FIELD_DIMENSION_DEFAULT)
+ : mClientAttributionSource(client)
{
}
@@ -154,13 +150,7 @@
audio_port_handle_t selectedDeviceId,
audio_microphone_direction_t selectedMicDirection,
float microphoneFieldDimension)
- : mActive(false),
- mStatus(NO_INIT),
- mClientAttributionSource(client),
- mSessionId(AUDIO_SESSION_ALLOCATE),
- mPreviousPriority(ANDROID_PRIORITY_NORMAL),
- mPreviousSchedulingGroup(SP_DEFAULT),
- mProxy(nullptr)
+ : mClientAttributionSource(client)
{
uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(mClientAttributionSource.uid));
pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(mClientAttributionSource.pid));
@@ -199,9 +189,6 @@
}
void AudioRecord::stopAndJoinCallbacks() {
- // Prevent nullptr crash if it did not open properly.
- if (mStatus != NO_ERROR) return;
-
// Make sure that callback function exits in the case where
// it is looping on buffer empty condition in obtainBuffer().
// Otherwise the callback thread will never exit.
@@ -693,16 +680,27 @@
AutoMutex lock(mLock);
ALOGV("%s(%d): deviceId=%d mSelectedDeviceId=%d",
__func__, mPortId, deviceId, mSelectedDeviceId);
+
if (mSelectedDeviceId != deviceId) {
mSelectedDeviceId = deviceId;
if (mStatus == NO_ERROR) {
- // stop capture so that audio policy manager does not reject the new instance start request
- // as only one capture can be active at a time.
- if (mAudioRecord != 0 && mActive) {
- mAudioRecord->stop();
+ if (mActive) {
+ if (mSelectedDeviceId != mRoutedDeviceId) {
+ // stop capture so that audio policy manager does not reject the new instance
+ // start request as only one capture can be active at a time.
+ if (mAudioRecord != 0) {
+ mAudioRecord->stop();
+ }
+ android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
+ mProxy->interrupt();
+ }
+ } else {
+ // if the track is idle, try to restore now and
+ // defer to next start if not possible
+ if (restoreRecord_l("setInputDevice") != OK) {
+ android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
+ }
}
- android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
- mProxy->interrupt();
}
}
return NO_ERROR;
@@ -1521,7 +1519,7 @@
.set(AMEDIAMETRICS_PROP_WHERE, from)
.record(); });
- ALOGW("%s(%d): dead IAudioRecord, creating a new one from %s()", __func__, mPortId, from);
+ ALOGW("%s(%d) called from %s()", __func__, mPortId, from);
++mSequence;
const int INITIAL_RETRIES = 3;
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index d1b1849..aa51652 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -86,7 +86,7 @@
typename ServiceTraits>
class ServiceHandler {
public:
- sp<ServiceInterface> getService(bool canStartThreadPool = true)
+ sp<ServiceInterface> getService()
EXCLUDES(mMutex) NO_THREAD_SAFETY_ANALYSIS { // std::unique_ptr
sp<ServiceInterface> service;
sp<Client> client;
@@ -143,7 +143,7 @@
client = mClient;
service = mService;
// Make sure callbacks can be received by the client
- if (canStartThreadPool) {
+ if (mCanStartThreadPool) {
ProcessState::self()->startThreadPool();
}
ul.unlock();
@@ -186,6 +186,10 @@
if (mClient) ServiceTraits::onClearService(mClient);
}
+ void disableThreadPool() {
+ mCanStartThreadPool = false;
+ }
+
private:
std::mutex mSingleGetter;
std::mutex mMutex;
@@ -194,6 +198,7 @@
sp<ServiceInterface> mLocalService GUARDED_BY(mMutex);
sp<ServiceInterface> mService GUARDED_BY(mMutex);
sp<Client> mClient GUARDED_BY(mMutex);
+ std::atomic<bool> mCanStartThreadPool = true;
};
struct AudioFlingerTraits {
@@ -224,10 +229,6 @@
return gAudioFlingerServiceHandler.getService();
}
-sp<IAudioFlinger> AudioSystem::get_audio_flinger_for_fuzzer() {
- return gAudioFlingerServiceHandler.getService(false /* canStartThreadPool */);
-}
-
sp<AudioSystem::AudioFlingerClient> AudioSystem::getAudioFlingerClient() {
return gAudioFlingerServiceHandler.getClient();
}
@@ -957,6 +958,11 @@
gAudioPolicyServiceHandler.clearService();
}
+void AudioSystem::disableThreadPool() {
+ gAudioFlingerServiceHandler.disableThreadPool();
+ gAudioPolicyServiceHandler.disableThreadPool();
+}
+
// ---------------------------------------------------------------------------
void AudioSystem::onNewAudioModulesAvailable() {
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 98a1fde..6772201 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -233,25 +233,9 @@
return NO_ERROR;
}
-AudioTrack::AudioTrack() : AudioTrack(AttributionSourceState())
-{
-}
-
AudioTrack::AudioTrack(const AttributionSourceState& attributionSource)
- : mStatus(NO_INIT),
- mState(STATE_STOPPED),
- mPreviousPriority(ANDROID_PRIORITY_NORMAL),
- mPreviousSchedulingGroup(SP_DEFAULT),
- mPausedPosition(0),
- mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE),
- mRoutedDeviceId(AUDIO_PORT_HANDLE_NONE),
- mClientAttributionSource(attributionSource),
- mAudioTrackCallback(new AudioTrackCallback())
+ : mClientAttributionSource(attributionSource)
{
- mAttributes.content_type = AUDIO_CONTENT_TYPE_UNKNOWN;
- mAttributes.usage = AUDIO_USAGE_UNKNOWN;
- mAttributes.flags = AUDIO_FLAG_NONE;
- strcpy(mAttributes.tags, "");
}
AudioTrack::AudioTrack(
@@ -271,21 +255,12 @@
bool doNotReconnect,
float maxRequiredSpeed,
audio_port_handle_t selectedDeviceId)
- : mStatus(NO_INIT),
- mState(STATE_STOPPED),
- mPreviousPriority(ANDROID_PRIORITY_NORMAL),
- mPreviousSchedulingGroup(SP_DEFAULT),
- mPausedPosition(0),
- mAudioTrackCallback(new AudioTrackCallback())
{
- mAttributes = AUDIO_ATTRIBUTES_INITIALIZER;
-
- // make_unique does not aggregate init until c++20
- mSetParams = std::unique_ptr<SetParams>{
- new SetParams{streamType, sampleRate, format, channelMask, frameCount, flags, callback,
- notificationFrames, 0 /*sharedBuffer*/, false /*threadCanCallJava*/,
- sessionId, transferType, offloadInfo, attributionSource, pAttributes,
- doNotReconnect, maxRequiredSpeed, selectedDeviceId}};
+ mSetParams = std::make_unique<SetParams>(
+ streamType, sampleRate, format, channelMask, frameCount, flags, callback,
+ notificationFrames, nullptr /*sharedBuffer*/, false /*threadCanCallJava*/,
+ sessionId, transferType, offloadInfo, attributionSource, pAttributes,
+ doNotReconnect, maxRequiredSpeed, selectedDeviceId);
}
namespace {
@@ -400,9 +375,6 @@
}
void AudioTrack::stopAndJoinCallbacks() {
- // Prevent nullptr crash if it did not open properly.
- if (mStatus != NO_ERROR) return;
-
// Make sure that callback function exits in the case where
// it is looping on buffer full condition in obtainBuffer().
// Otherwise the callback thread will never exit.
@@ -919,6 +891,7 @@
const int64_t beginNs = systemTime();
AutoMutex lock(mLock);
+ if (mProxy == nullptr) return; // not successfully initialized.
mediametrics::Defer defer([&]() {
mediametrics::LogItem(mMetricsId)
.set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_STOP)
@@ -1707,14 +1680,14 @@
mSelectedDeviceId = deviceId;
if (mStatus == NO_ERROR) {
if (isOffloadedOrDirect_l()) {
- if (mState == STATE_STOPPED || mState == STATE_FLUSHED) {
- ALOGD("%s(%d): creating a new AudioTrack", __func__, mPortId);
- result = restoreTrack_l("setOutputDevice", true /* forceRestore */);
- } else {
+ if (isPlaying_l()) {
ALOGW("%s(%d). Offloaded or Direct track is not STOPPED or FLUSHED. "
"State: %s.",
__func__, mPortId, stateToString(mState));
result = INVALID_OPERATION;
+ } else {
+ ALOGD("%s(%d): creating a new AudioTrack", __func__, mPortId);
+ result = restoreTrack_l("setOutputDevice", true /* forceRestore */);
}
} else {
// allow track invalidation when track is not playing to propagate
diff --git a/media/libaudioclient/PolicyAidlConversion.cpp b/media/libaudioclient/PolicyAidlConversion.cpp
index a71bb18..441e329 100644
--- a/media/libaudioclient/PolicyAidlConversion.cpp
+++ b/media/libaudioclient/PolicyAidlConversion.cpp
@@ -243,6 +243,7 @@
legacy.mAllowPrivilegedMediaPlaybackCapture = aidl.allowPrivilegedMediaPlaybackCapture;
legacy.mVoiceCommunicationCaptureAllowed = aidl.voiceCommunicationCaptureAllowed;
legacy.mToken = aidl.mToken;
+ legacy.mVirtualDeviceId = aidl.mVirtualDeviceId;
return legacy;
}
@@ -267,6 +268,7 @@
aidl.allowPrivilegedMediaPlaybackCapture = legacy.mAllowPrivilegedMediaPlaybackCapture;
aidl.voiceCommunicationCaptureAllowed = legacy.mVoiceCommunicationCaptureAllowed;
aidl.mToken = legacy.mToken;
+ aidl.mVirtualDeviceId = legacy.mVirtualDeviceId;
return aidl;
}
diff --git a/media/libaudioclient/aidl/android/media/AudioMix.aidl b/media/libaudioclient/aidl/android/media/AudioMix.aidl
index f0c561c..bb8537d 100644
--- a/media/libaudioclient/aidl/android/media/AudioMix.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioMix.aidl
@@ -41,4 +41,6 @@
boolean voiceCommunicationCaptureAllowed;
/** Identifies the owner of the AudioPolicy that this AudioMix belongs to */
IBinder mToken;
+ /** Indicates the Id of the VirtualDevice this AudioMix was registered for */
+ int mVirtualDeviceId;
}
diff --git a/media/libaudioclient/aidl/fuzzer/Android.bp b/media/libaudioclient/aidl/fuzzer/Android.bp
index 02c5a3f..a0c535d 100644
--- a/media/libaudioclient/aidl/fuzzer/Android.bp
+++ b/media/libaudioclient/aidl/fuzzer/Android.bp
@@ -22,44 +22,45 @@
name: "libaudioclient_aidl_fuzzer_defaults",
static_libs: [
"android.hardware.audio.common@7.0-enums",
- "effect-aidl-cpp",
+ "libaudiomockhal",
"libcgrouprc",
"libcgrouprc_format",
"libfakeservicemanager",
"libjsoncpp",
"liblog",
- "libmediametricsservice",
"libmedia_helper",
+ "libmediametricsservice",
"libprocessgroup",
"shared-file-region-aidl-cpp",
],
shared_libs: [
"android.hardware.audio.common-util",
"audioclient-types-aidl-cpp",
+ "audioflinger-aidl-cpp",
"audiopolicy-aidl-cpp",
"audiopolicy-types-aidl-cpp",
"av-types-aidl-cpp",
"capture_state_listener-aidl-cpp",
+ "effect-aidl-cpp",
"framework-permission-aidl-cpp",
+ "libactivitymanager_aidl",
"libaudioclient",
- "audioflinger-aidl-cpp",
- "libaudioflinger",
"libaudioclient_aidl_conversion",
+ "libaudioflinger",
"libaudiofoundation",
+ "libaudiohal",
"libaudiomanager",
"libaudiopolicy",
- "libaudioutils",
- "libaudiopolicyservice",
"libaudiopolicymanagerdefault",
- "libaudiohal",
+ "libaudiopolicyservice",
"libaudioprocessing",
- "libactivitymanager_aidl",
+ "libaudioutils",
"libdl",
"libheadtracking",
- "libmediautils",
"libmediametrics",
- "libnblog",
+ "libmediautils",
"libnbaio",
+ "libnblog",
"libpowermanager",
"libvibrator",
"libvndksupport",
@@ -68,16 +69,16 @@
"packagemanager_aidl-cpp",
],
header_libs: [
- "libaudiopolicymanager_interface_headers",
+ "libaudioflinger_headers",
"libaudiofoundation_headers",
"libaudiohal_headers",
- "libaudioflinger_headers",
+ "libaudiopolicymanager_interface_headers",
"libbinder_headers",
"libmedia_headers",
],
fuzz_config: {
cc: [
- "android-media-fuzzing-reports@google.com",
+ "android-audio-fuzzing-reports@google.com",
],
componentid: 155276,
hotlists: ["4593311"],
@@ -93,6 +94,9 @@
name: "audioflinger_aidl_fuzzer",
srcs: ["audioflinger_aidl_fuzzer.cpp"],
defaults: [
+ "latest_android_hardware_audio_core_ndk_shared",
+ "latest_android_hardware_audio_core_sounddose_ndk_shared",
+ "latest_android_hardware_audio_effect_ndk_shared",
"libaudioclient_aidl_fuzzer_defaults",
"service_fuzzer_defaults",
],
diff --git a/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp b/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp
index f99cc3b..c7a04da 100644
--- a/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp
+++ b/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp
@@ -17,8 +17,12 @@
#include <AudioFlinger.h>
#include <android-base/logging.h>
#include <android/binder_interface_utils.h>
+#include <android/binder_manager.h>
#include <android/binder_process.h>
#include <android/media/IAudioPolicyService.h>
+#include <core-mock/ConfigMock.h>
+#include <core-mock/ModuleMock.h>
+#include <effect-mock/FactoryMock.h>
#include <fakeservicemanager/FakeServiceManager.h>
#include <fuzzbinder/libbinder_driver.h>
#include <fuzzbinder/random_binder.h>
@@ -32,6 +36,7 @@
[[clang::no_destroy]] static std::once_flag gSmOnce;
sp<FakeServiceManager> gFakeServiceManager;
+sp<AudioFlingerServerAdapter> gAudioFlingerServerAdapter;
bool addService(const String16& serviceName, const sp<FakeServiceManager>& fakeServiceManager,
FuzzedDataProvider& fdp) {
@@ -43,46 +48,58 @@
return true;
}
+extern "C" int LLVMFuzzerInitialize(int* /*argc*/, char*** /*argv*/) {
+ /* Create a FakeServiceManager instance and add required services */
+ gFakeServiceManager = sp<FakeServiceManager>::make();
+ setDefaultServiceManager(gFakeServiceManager);
+
+ auto configService = ndk::SharedRefBase::make<ConfigMock>();
+ CHECK_EQ(NO_ERROR, AServiceManager_addService(configService.get()->asBinder().get(),
+ "android.hardware.audio.core.IConfig/default"));
+
+ auto factoryService = ndk::SharedRefBase::make<FactoryMock>();
+ CHECK_EQ(NO_ERROR,
+ AServiceManager_addService(factoryService.get()->asBinder().get(),
+ "android.hardware.audio.effect.IFactory/default"));
+
+ auto moduleService = ndk::SharedRefBase::make<ModuleMock>();
+ CHECK_EQ(NO_ERROR, AServiceManager_addService(moduleService.get()->asBinder().get(),
+ "android.hardware.audio.core.IModule/default"));
+
+ // Disable creating thread pool for fuzzer instance of audio flinger and audio policy services
+ AudioSystem::disableThreadPool();
+
+ return 0;
+}
+
extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
FuzzedDataProvider fdp(data, size);
- std::call_once(gSmOnce, [&] {
- /* Create a FakeServiceManager instance and add required services */
- gFakeServiceManager = sp<FakeServiceManager>::make();
- setDefaultServiceManager(gFakeServiceManager);
- });
- gFakeServiceManager->clear();
-
- for (const char* service :
- {"activity", "sensor_privacy", "permission", "scheduling_policy",
- "android.hardware.audio.core.IConfig", "batterystats", "media.metrics"}) {
+ for (const char* service : {"activity", "sensor_privacy", "permission", "scheduling_policy",
+ "batterystats", "media.metrics"}) {
if (!addService(String16(service), gFakeServiceManager, fdp)) {
return 0;
}
}
- const auto audioFlinger = sp<AudioFlinger>::make();
- const auto afAdapter = sp<AudioFlingerServerAdapter>::make(audioFlinger);
+ // TODO(330882064) : Initialise Audio Flinger and Audio Policy services every time
+ std::call_once(gSmOnce, [&] {
+ const auto audioFlinger = sp<AudioFlinger>::make();
+ gAudioFlingerServerAdapter = sp<AudioFlingerServerAdapter>::make(audioFlinger);
+ CHECK_EQ(NO_ERROR,
+ gFakeServiceManager->addService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME),
+ IInterface::asBinder(gAudioFlingerServerAdapter),
+ false /* allowIsolated */,
+ IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
- CHECK_EQ(NO_ERROR,
- gFakeServiceManager->addService(
- String16(IAudioFlinger::DEFAULT_SERVICE_NAME), IInterface::asBinder(afAdapter),
- false /* allowIsolated */, IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
+ const auto audioPolicyService = sp<AudioPolicyService>::make();
+ CHECK_EQ(NO_ERROR,
+ gFakeServiceManager->addService(String16("media.audio_policy"), audioPolicyService,
+ false /* allowIsolated */,
+ IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
+ });
- AudioSystem::get_audio_flinger_for_fuzzer();
- const auto audioPolicyService = sp<AudioPolicyService>::make();
-
- CHECK_EQ(NO_ERROR,
- gFakeServiceManager->addService(String16("media.audio_policy"), audioPolicyService,
- false /* allowIsolated */,
- IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
-
- sp<IBinder> audioFlingerServiceBinder =
- gFakeServiceManager->getService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME));
- sp<media::IAudioFlingerService> audioFlingerService =
- interface_cast<media::IAudioFlingerService>(audioFlingerServiceBinder);
-
- fuzzService(media::IAudioFlingerService::asBinder(audioFlingerService), std::move(fdp));
+ fuzzService(media::IAudioFlingerService::asBinder(gAudioFlingerServerAdapter), std::move(fdp));
return 0;
}
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/Android.bp b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/Android.bp
new file mode 100644
index 0000000..c4afffb
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/Android.bp
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_library {
+ name: "libaudiomockhal",
+
+ defaults: [
+ "latest_android_hardware_audio_core_ndk_shared",
+ "latest_android_hardware_audio_core_sounddose_ndk_shared",
+ "latest_android_hardware_audio_effect_ndk_shared",
+ ],
+ header_libs: [
+ "libbinder_headers",
+ ],
+ static_libs: [
+ "libbinder_random_parcel",
+ ],
+ shared_libs: [
+ "libbinder_ndk",
+ ],
+
+ host_supported: true,
+ srcs: [
+ "FactoryMock.cpp",
+ "ModuleMock.cpp",
+ "StreamInMock.cpp",
+ "StreamOutMock.cpp",
+ ],
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
+ export_include_dirs: ["include"],
+}
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/FactoryMock.cpp b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/FactoryMock.cpp
new file mode 100644
index 0000000..ea07afc
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/FactoryMock.cpp
@@ -0,0 +1,28 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include "effect-mock/FactoryMock.h"
+#include "effect-mock/EffectMock.h"
+
+namespace aidl::android::hardware::audio::effect {
+
+ndk::ScopedAStatus FactoryMock::createEffect(const AudioUuid&,
+ std::shared_ptr<IEffect>* _aidl_return) {
+ *_aidl_return = ndk::SharedRefBase::make<EffectMock>();
+ return ndk::ScopedAStatus::ok();
+}
+
+} // namespace aidl::android::hardware::audio::effect
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/ModuleMock.cpp b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/ModuleMock.cpp
new file mode 100644
index 0000000..711924f
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/ModuleMock.cpp
@@ -0,0 +1,144 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include "core-mock/ModuleMock.h"
+#include "core-mock/BluetoothA2dpMock.h"
+#include "core-mock/BluetoothLeMock.h"
+#include "core-mock/BluetoothMock.h"
+#include "core-mock/StreamInMock.h"
+#include "core-mock/StreamOutMock.h"
+#include "core-mock/TelephonyMock.h"
+#include "sounddose-mock/SoundDoseMock.h"
+
+namespace aidl::android::hardware::audio::core {
+
+ModuleMock::ModuleMock() {
+ // Device ports
+ auto outDevice = createPort(/* PortId */ 0, /* Name */ "Default",
+ /* Flags */ 1 << AudioPortDeviceExt::FLAG_INDEX_DEFAULT_DEVICE,
+ /* isInput */ false,
+ createDeviceExt(
+ /* DeviceType */ AudioDeviceType::OUT_DEFAULT,
+ /* Flags */ AudioPortDeviceExt::FLAG_INDEX_DEFAULT_DEVICE));
+ mPorts.push_back(outDevice);
+ auto inDevice = createPort(/* PortId */ 1, /* Name */ "Default",
+ /* Flags */ 1 << AudioPortDeviceExt::FLAG_INDEX_DEFAULT_DEVICE,
+ /* isInput */ true,
+ createDeviceExt(
+ /* DeviceType */ AudioDeviceType::IN_DEFAULT,
+ /* Flags */ 0));
+ mPorts.push_back(outDevice);
+}
+
+ndk::ScopedAStatus ModuleMock::getTelephony(std::shared_ptr<ITelephony>* _aidl_return) {
+ *_aidl_return = ndk::SharedRefBase::make<TelephonyMock>();
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::getBluetooth(std::shared_ptr<IBluetooth>* _aidl_return) {
+ *_aidl_return = ndk::SharedRefBase::make<BluetoothMock>();
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::getBluetoothA2dp(std::shared_ptr<IBluetoothA2dp>* _aidl_return) {
+ *_aidl_return = ndk::SharedRefBase::make<BluetoothA2dpMock>();
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::getBluetoothLe(std::shared_ptr<IBluetoothLe>* _aidl_return) {
+ *_aidl_return = ndk::SharedRefBase::make<BluetoothLeMock>();
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::openInputStream(const OpenInputStreamArguments&,
+ OpenInputStreamReturn* _aidl_return) {
+ _aidl_return->stream = ndk::SharedRefBase::make<StreamInMock>();
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::openOutputStream(const OpenOutputStreamArguments&,
+ OpenOutputStreamReturn* _aidl_return) {
+ _aidl_return->stream = ndk::SharedRefBase::make<StreamOutMock>();
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::getMasterMute(bool* _aidl_return) {
+ *_aidl_return = mMasterMute;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::setMasterMute(bool masterMute) {
+ mMasterMute = masterMute;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::getMasterVolume(float* _aidl_return) {
+ *_aidl_return = mMasterVolume;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::setMasterVolume(float masterVolume) {
+ mMasterVolume = masterVolume;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::getMicMute(bool* _aidl_return) {
+ *_aidl_return = mMicMute;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::setMicMute(bool micMute) {
+ mMicMute = micMute;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::getSoundDose(std::shared_ptr<ISoundDose>* _aidl_return) {
+ *_aidl_return = ndk::SharedRefBase::make<SoundDoseMock>();
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::getMmapPolicyInfos(AudioMMapPolicyType,
+ std::vector<AudioMMapPolicyInfo>* _aidl_return) {
+ AudioMMapPolicyInfo never;
+ never.mmapPolicy = AudioMMapPolicy::NEVER;
+ _aidl_return->push_back(never);
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus ModuleMock::supportsVariableLatency(bool* _aidl_return) {
+ *_aidl_return = false;
+ return ndk::ScopedAStatus::ok();
+}
+
+AudioPortExt ModuleMock::createDeviceExt(AudioDeviceType devType, int32_t flags) {
+ AudioPortDeviceExt deviceExt;
+ deviceExt.device.type.type = devType;
+ deviceExt.flags = flags;
+ return AudioPortExt::make<AudioPortExt::Tag::device>(deviceExt);
+}
+
+AudioPort ModuleMock::createPort(int32_t id, const std::string& name, int32_t flags, bool isInput,
+ const AudioPortExt& ext) {
+ AudioPort port;
+ port.id = id;
+ port.name = name;
+ port.flags = isInput ? AudioIoFlags::make<AudioIoFlags::Tag::input>(flags)
+ : AudioIoFlags::make<AudioIoFlags::Tag::output>(flags);
+ port.ext = ext;
+ return port;
+}
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/StreamInMock.cpp b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/StreamInMock.cpp
new file mode 100644
index 0000000..093a979
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/StreamInMock.cpp
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include "core-mock/StreamInMock.h"
+#include "core-mock/StreamCommonMock.h"
+
+namespace aidl::android::hardware::audio::core {
+
+ndk::ScopedAStatus StreamInMock::getStreamCommon(std::shared_ptr<IStreamCommon>* _aidl_return) {
+ if (!mStreamCommon) {
+ mStreamCommon = ndk::SharedRefBase::make<StreamCommonMock>();
+ }
+ *_aidl_return = mStreamCommon;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus StreamInMock::getMicrophoneDirection(
+ IStreamIn::MicrophoneDirection* _aidl_return) {
+ *_aidl_return = mMicrophoneDirection;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus StreamInMock::setMicrophoneDirection(
+ IStreamIn::MicrophoneDirection in_direction) {
+ mMicrophoneDirection = in_direction;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus StreamInMock::getMicrophoneFieldDimension(float* _aidl_return) {
+ *_aidl_return = mMicrophoneFieldDimension;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus StreamInMock::setMicrophoneFieldDimension(float in_zoom) {
+ mMicrophoneFieldDimension = in_zoom;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus StreamInMock::getHwGain(std::vector<float>* _aidl_return) {
+ *_aidl_return = mHwGains;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus StreamInMock::setHwGain(const std::vector<float>& in_channelGains) {
+ mHwGains = in_channelGains;
+ return ndk::ScopedAStatus::ok();
+}
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/StreamOutMock.cpp b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/StreamOutMock.cpp
new file mode 100644
index 0000000..a71f954
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/StreamOutMock.cpp
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include "core-mock/StreamOutMock.h"
+#include "core-mock/StreamCommonMock.h"
+
+namespace aidl::android::hardware::audio::core {
+
+ndk::ScopedAStatus StreamOutMock::getStreamCommon(std::shared_ptr<IStreamCommon>* _aidl_return) {
+ if (!mStreamCommon) {
+ mStreamCommon = ndk::SharedRefBase::make<StreamCommonMock>();
+ }
+ *_aidl_return = mStreamCommon;
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus StreamOutMock::getHwVolume(std::vector<float>* _aidl_return) {
+ *_aidl_return = mHwVolume;
+ return ndk::ScopedAStatus::ok();
+}
+ndk::ScopedAStatus StreamOutMock::setHwVolume(const std::vector<float>& in_channelVolumes) {
+ mHwVolume = in_channelVolumes;
+ return ndk::ScopedAStatus::ok();
+}
+ndk::ScopedAStatus StreamOutMock::getAudioDescriptionMixLevel(float* _aidl_return) {
+ *_aidl_return = mAudioDescriptionMixLeveldB;
+ return ndk::ScopedAStatus::ok();
+}
+ndk::ScopedAStatus StreamOutMock::setAudioDescriptionMixLevel(float in_leveldB) {
+ mAudioDescriptionMixLeveldB = in_leveldB;
+ return ndk::ScopedAStatus::ok();
+}
+ndk::ScopedAStatus StreamOutMock::getDualMonoMode(AudioDualMonoMode* _aidl_return) {
+ *_aidl_return = mDualMonoMode;
+ return ndk::ScopedAStatus::ok();
+}
+ndk::ScopedAStatus StreamOutMock::setDualMonoMode(AudioDualMonoMode in_mode) {
+ mDualMonoMode = in_mode;
+ return ndk::ScopedAStatus::ok();
+}
+ndk::ScopedAStatus StreamOutMock::getPlaybackRateParameters(AudioPlaybackRate* _aidl_return) {
+ *_aidl_return = mPlaybackRateParameters;
+ return ndk::ScopedAStatus::ok();
+}
+ndk::ScopedAStatus StreamOutMock::setPlaybackRateParameters(
+ const AudioPlaybackRate& in_playbackRate) {
+ mPlaybackRateParameters = in_playbackRate;
+ return ndk::ScopedAStatus::ok();
+}
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/BluetoothA2dpMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/BluetoothA2dpMock.h
new file mode 100644
index 0000000..c4dd0d9
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/BluetoothA2dpMock.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/BnBluetoothA2dp.h>
+
+using namespace aidl::android::hardware::audio::core;
+
+namespace aidl::android::hardware::audio::core {
+
+class BluetoothA2dpMock : public BnBluetoothA2dp {
+ public:
+ ndk::ScopedAStatus isEnabled(bool* _aidl_return) override {
+ *_aidl_return = mEnabled;
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus setEnabled(bool enabled) override {
+ mEnabled = enabled;
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus supportsOffloadReconfiguration(bool* _aidl_return) override {
+ *_aidl_return = kSupportsOffloadReconfiguration;
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus reconfigureOffload(const std::vector<VendorParameter>&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+
+ private:
+ static constexpr bool kSupportsOffloadReconfiguration = true;
+ bool mEnabled = false;
+};
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/BluetoothLeMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/BluetoothLeMock.h
new file mode 100644
index 0000000..d58695a
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/BluetoothLeMock.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/BnBluetoothLe.h>
+
+using namespace aidl::android::hardware::audio::core;
+
+namespace aidl::android::hardware::audio::core {
+
+class BluetoothLeMock : public BnBluetoothLe {
+ public:
+ ndk::ScopedAStatus isEnabled(bool* _aidl_return) override {
+ *_aidl_return = mEnabled;
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus setEnabled(bool enabled) override {
+ mEnabled = enabled;
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus supportsOffloadReconfiguration(bool* _aidl_return) override {
+ *_aidl_return = kSupportsOffloadReconfiguration;
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus reconfigureOffload(const std::vector<VendorParameter>&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+
+ private:
+ static constexpr bool kSupportsOffloadReconfiguration = true;
+ bool mEnabled = false;
+};
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/BluetoothMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/BluetoothMock.h
new file mode 100644
index 0000000..e805840
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/BluetoothMock.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/BnBluetooth.h>
+
+using namespace aidl::android::hardware::audio::core;
+
+namespace aidl::android::hardware::audio::core {
+
+class BluetoothMock : public BnBluetooth {
+ public:
+ ndk::ScopedAStatus setScoConfig(const IBluetooth::ScoConfig&, IBluetooth::ScoConfig*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus setHfpConfig(const IBluetooth::HfpConfig&, IBluetooth::HfpConfig*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+};
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/ConfigMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/ConfigMock.h
new file mode 100644
index 0000000..f4031b5
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/ConfigMock.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/BnConfig.h>
+
+using namespace aidl::android::media::audio::common;
+using namespace aidl::android::hardware::audio::core;
+
+namespace aidl::android::hardware::audio::core {
+
+class ConfigMock : public BnConfig {
+ private:
+ ndk::ScopedAStatus getSurroundSoundConfig(SurroundSoundConfig*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus getEngineConfig(AudioHalEngineConfig*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+};
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/ModuleMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/ModuleMock.h
new file mode 100644
index 0000000..d49203d
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/ModuleMock.h
@@ -0,0 +1,133 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/BnModule.h>
+
+using namespace aidl::android::media::audio::common;
+using namespace aidl::android::hardware::audio::core;
+using namespace aidl::android::hardware::audio::core::sounddose;
+using namespace aidl::android::hardware::audio::effect;
+
+namespace aidl::android::hardware::audio::core {
+
+class ModuleMock : public BnModule {
+ public:
+ ModuleMock();
+
+ private:
+ ndk::ScopedAStatus getTelephony(std::shared_ptr<ITelephony>*) override;
+ ndk::ScopedAStatus getBluetooth(std::shared_ptr<IBluetooth>*) override;
+ ndk::ScopedAStatus getBluetoothA2dp(std::shared_ptr<IBluetoothA2dp>*) override;
+ ndk::ScopedAStatus getBluetoothLe(std::shared_ptr<IBluetoothLe>*) override;
+ ndk::ScopedAStatus openInputStream(const OpenInputStreamArguments&,
+ OpenInputStreamReturn*) override;
+ ndk::ScopedAStatus openOutputStream(const OpenOutputStreamArguments&,
+ OpenOutputStreamReturn*) override;
+ ndk::ScopedAStatus getMasterMute(bool*) override;
+ ndk::ScopedAStatus setMasterMute(bool) override;
+ ndk::ScopedAStatus getMasterVolume(float*) override;
+ ndk::ScopedAStatus setMasterVolume(float) override;
+ ndk::ScopedAStatus getMicMute(bool*) override;
+ ndk::ScopedAStatus setMicMute(bool) override;
+ ndk::ScopedAStatus getSoundDose(std::shared_ptr<ISoundDose>*) override;
+ ndk::ScopedAStatus getMmapPolicyInfos(AudioMMapPolicyType,
+ std::vector<AudioMMapPolicyInfo>*) override;
+ ndk::ScopedAStatus supportsVariableLatency(bool*) override;
+
+ ndk::ScopedAStatus setModuleDebug(const ModuleDebug&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus connectExternalDevice(const AudioPort&, AudioPort*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus disconnectExternalDevice(int32_t) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus getAudioPatches(std::vector<AudioPatch>*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus getAudioPort(int32_t, AudioPort*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus getAudioPortConfigs(std::vector<AudioPortConfig>*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus getAudioPorts(std::vector<AudioPort>*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus getAudioRoutes(std::vector<AudioRoute>*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus getAudioRoutesForAudioPort(int32_t, std::vector<AudioRoute>*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus getSupportedPlaybackRateFactors(SupportedPlaybackRateFactors*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus setAudioPatch(const AudioPatch&, AudioPatch*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus setAudioPortConfig(const AudioPortConfig&, AudioPortConfig*,
+ bool*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus resetAudioPatch(int32_t) override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus resetAudioPortConfig(int32_t) override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus getMicrophones(std::vector<MicrophoneInfo>*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus updateAudioMode(AudioMode) override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus updateScreenRotation(ScreenRotation) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus updateScreenState(bool) override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus generateHwAvSyncId(int32_t*) override {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+ }
+ ndk::ScopedAStatus getVendorParameters(const std::vector<std::string>&,
+ std::vector<VendorParameter>*) override {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+ }
+ ndk::ScopedAStatus setVendorParameters(const std::vector<VendorParameter>&, bool) override {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+ }
+ ndk::ScopedAStatus addDeviceEffect(int32_t, const std::shared_ptr<IEffect>&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus removeDeviceEffect(int32_t, const std::shared_ptr<IEffect>&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus getAAudioMixerBurstCount(int32_t*) override {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+ }
+ ndk::ScopedAStatus getAAudioHardwareBurstMinUsec(int32_t*) override {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+ }
+ ndk::ScopedAStatus prepareToDisconnectExternalDevice(int32_t) override {
+ return ndk::ScopedAStatus::ok();
+ }
+
+ AudioPortExt createDeviceExt(AudioDeviceType devType, int32_t flags);
+ AudioPort createPort(int32_t id, const std::string& name, int32_t flags, bool isInput,
+ const AudioPortExt& ext);
+
+ bool mMasterMute;
+ float mMasterVolume;
+ bool mMicMute;
+ std::vector<AudioPort> mPorts;
+};
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/StreamCommonMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/StreamCommonMock.h
new file mode 100644
index 0000000..25d53f8
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/StreamCommonMock.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/BnStreamCommon.h>
+
+using namespace aidl::android::hardware::audio::core;
+using namespace aidl::android::hardware::audio::effect;
+
+namespace aidl::android::hardware::audio::core {
+
+class StreamCommonMock : public BnStreamCommon {
+ ndk::ScopedAStatus close() override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus prepareToClose() override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus updateHwAvSyncId(int32_t) override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus getVendorParameters(const std::vector<std::string>&,
+ std::vector<VendorParameter>*) override {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+ }
+ ndk::ScopedAStatus setVendorParameters(const std::vector<VendorParameter>&, bool) override {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+ }
+ ndk::ScopedAStatus addEffect(const std::shared_ptr<IEffect>&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus removeEffect(const std::shared_ptr<IEffect>&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+};
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/StreamInMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/StreamInMock.h
new file mode 100644
index 0000000..5deab5b
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/StreamInMock.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/BnStreamIn.h>
+
+using namespace aidl::android::hardware::audio::common;
+using namespace aidl::android::hardware::audio::core;
+using namespace aidl::android::media::audio::common;
+
+namespace aidl::android::hardware::audio::core {
+
+class StreamInMock : public BnStreamIn {
+ ndk::ScopedAStatus getStreamCommon(std::shared_ptr<IStreamCommon>* _aidl_return) override;
+ ndk::ScopedAStatus getMicrophoneDirection(
+ IStreamIn::MicrophoneDirection* _aidl_return) override;
+ ndk::ScopedAStatus setMicrophoneDirection(IStreamIn::MicrophoneDirection in_direction) override;
+ ndk::ScopedAStatus getMicrophoneFieldDimension(float* _aidl_return) override;
+ ndk::ScopedAStatus setMicrophoneFieldDimension(float in_zoom) override;
+ ndk::ScopedAStatus getHwGain(std::vector<float>* _aidl_return) override;
+ ndk::ScopedAStatus setHwGain(const std::vector<float>& in_channelGains) override;
+
+ ndk::ScopedAStatus getActiveMicrophones(std::vector<MicrophoneDynamicInfo>*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus updateMetadata(const SinkMetadata&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+
+ private:
+ IStreamIn::MicrophoneDirection mMicrophoneDirection;
+ float mMicrophoneFieldDimension;
+ std::vector<float> mHwGains;
+ std::shared_ptr<IStreamCommon> mStreamCommon;
+};
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/StreamOutMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/StreamOutMock.h
new file mode 100644
index 0000000..4d12815
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/StreamOutMock.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/BnStreamOut.h>
+
+using namespace aidl::android::hardware::audio::common;
+using namespace aidl::android::hardware::audio::core;
+using namespace aidl::android::media::audio::common;
+
+namespace aidl::android::hardware::audio::core {
+
+class StreamOutMock : public BnStreamOut {
+ ndk::ScopedAStatus getStreamCommon(std::shared_ptr<IStreamCommon>* _aidl_return) override;
+ ndk::ScopedAStatus getHwVolume(std::vector<float>* _aidl_return) override;
+ ndk::ScopedAStatus setHwVolume(const std::vector<float>& in_channelVolumes) override;
+ ndk::ScopedAStatus getAudioDescriptionMixLevel(float* _aidl_return) override;
+ ndk::ScopedAStatus setAudioDescriptionMixLevel(float in_leveldB) override;
+ ndk::ScopedAStatus getDualMonoMode(AudioDualMonoMode* _aidl_return) override;
+ ndk::ScopedAStatus setDualMonoMode(AudioDualMonoMode in_mode) override;
+ ndk::ScopedAStatus getPlaybackRateParameters(AudioPlaybackRate* _aidl_return) override;
+ ndk::ScopedAStatus setPlaybackRateParameters(const AudioPlaybackRate& in_playbackRate) override;
+
+ ndk::ScopedAStatus updateMetadata(const SourceMetadata&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus updateOffloadMetadata(const AudioOffloadMetadata&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus getRecommendedLatencyModes(std::vector<AudioLatencyMode>*) override {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+ }
+ ndk::ScopedAStatus setLatencyMode(AudioLatencyMode) override {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+ }
+ ndk::ScopedAStatus selectPresentation(int32_t, int32_t) override {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_UNSUPPORTED_OPERATION);
+ }
+
+ private:
+ AudioPlaybackRate mPlaybackRateParameters;
+ AudioDualMonoMode mDualMonoMode;
+ float mAudioDescriptionMixLeveldB;
+ std::vector<float> mHwVolume;
+ std::shared_ptr<IStreamCommon> mStreamCommon;
+};
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/TelephonyMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/TelephonyMock.h
new file mode 100644
index 0000000..d56dee6
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/core-mock/TelephonyMock.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/BnTelephony.h>
+
+using namespace aidl::android::hardware::audio::core;
+using namespace aidl::android::media::audio::common;
+
+namespace aidl::android::hardware::audio::core {
+
+class TelephonyMock : public BnTelephony {
+ public:
+ ndk::ScopedAStatus getSupportedAudioModes(std::vector<AudioMode>*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus switchAudioMode(AudioMode) override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus setTelecomConfig(const ITelephony::TelecomConfig&,
+ ITelephony::TelecomConfig*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+};
+
+} // namespace aidl::android::hardware::audio::core
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/effect-mock/EffectMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/effect-mock/EffectMock.h
new file mode 100644
index 0000000..db20cd8
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/effect-mock/EffectMock.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/effect/BnEffect.h>
+
+using namespace aidl::android::hardware::audio::effect;
+
+namespace aidl::android::hardware::audio::effect {
+
+class EffectMock : public BnEffect {
+ public:
+ ndk::ScopedAStatus open(const Parameter::Common&, const std::optional<Parameter::Specific>&,
+ IEffect::OpenEffectReturn*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus close() override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus command(CommandId) override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus getState(State*) override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus getDescriptor(Descriptor*) override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus reopen(IEffect::OpenEffectReturn*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus setParameter(const Parameter&) override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus getParameter(const Parameter::Id&, Parameter*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+};
+
+} // namespace aidl::android::hardware::audio::effect
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/effect-mock/FactoryMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/effect-mock/FactoryMock.h
new file mode 100644
index 0000000..57d58d5
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/effect-mock/FactoryMock.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/effect/BnFactory.h>
+
+using namespace aidl::android::media::audio::common;
+using namespace aidl::android::hardware::audio::effect;
+
+namespace aidl::android::hardware::audio::effect {
+
+class FactoryMock : public BnFactory {
+ ndk::ScopedAStatus queryEffects(const std::optional<AudioUuid>&,
+ const std::optional<AudioUuid>&,
+ const std::optional<AudioUuid>&,
+ std::vector<Descriptor>*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus queryProcessing(const std::optional<Processing::Type>&,
+ std::vector<Processing>*) override {
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus destroyEffect(const std::shared_ptr<IEffect>&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+
+ ndk::ScopedAStatus createEffect(const AudioUuid&, std::shared_ptr<IEffect>*) override;
+};
+
+} // namespace aidl::android::hardware::audio::effect
diff --git a/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/sounddose-mock/SoundDoseMock.h b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/sounddose-mock/SoundDoseMock.h
new file mode 100644
index 0000000..5557b10
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/libaudiomockhal/include/sounddose-mock/SoundDoseMock.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <aidl/android/hardware/audio/core/sounddose/BnSoundDose.h>
+
+using namespace aidl::android::hardware::audio::core::sounddose;
+
+namespace aidl::android::hardware::audio::core::sounddose {
+
+class SoundDoseMock : public BnSoundDose {
+ ndk::ScopedAStatus setOutputRs2UpperBound(float in_rs2ValueDbA) override {
+ mOutputRs2UpperBound = in_rs2ValueDbA;
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus getOutputRs2UpperBound(float* _aidl_return) override {
+ *_aidl_return = mOutputRs2UpperBound;
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus registerSoundDoseCallback(
+ const std::shared_ptr<ISoundDose::IHalSoundDoseCallback>&) override {
+ return ndk::ScopedAStatus::ok();
+ }
+
+ private:
+ float mOutputRs2UpperBound;
+};
+
+} // namespace aidl::android::hardware::audio::core::sounddose
diff --git a/media/libaudioclient/fuzzer/Android.bp b/media/libaudioclient/fuzzer/Android.bp
index f2ad91c..a95c700 100644
--- a/media/libaudioclient/fuzzer/Android.bp
+++ b/media/libaudioclient/fuzzer/Android.bp
@@ -42,9 +42,9 @@
"libcutils",
"libjsoncpp",
"liblog",
+ "libmedia_helper",
"libmediametrics",
"libmediametricsservice",
- "libmedia_helper",
"libprocessgroup",
"shared-file-region-aidl-cpp",
],
@@ -56,8 +56,9 @@
"audiopolicy-types-aidl-cpp",
"av-types-aidl-cpp",
"capture_state_listener-aidl-cpp",
- "libaudioclient_aidl_conversion",
+ "framework-permission-aidl-cpp",
"libaudio_aidl_conversion_common_cpp",
+ "libaudioclient_aidl_conversion",
"libaudioflinger",
"libaudiofoundation",
"libaudiomanager",
@@ -70,7 +71,6 @@
"libutils",
"libxml2",
"mediametricsservice-aidl-cpp",
- "framework-permission-aidl-cpp",
],
header_libs: [
"libaudiofoundation_headers",
diff --git a/media/libaudioclient/include/media/AudioPolicy.h b/media/libaudioclient/include/media/AudioPolicy.h
index 9e4ae54..b190fba 100644
--- a/media/libaudioclient/include/media/AudioPolicy.h
+++ b/media/libaudioclient/include/media/AudioPolicy.h
@@ -129,6 +129,7 @@
String8 mDeviceAddress;
uint32_t mCbFlags; // flags indicating which callbacks to use, see kCbFlag*
sp<IBinder> mToken;
+ uint32_t mVirtualDeviceId;
/** Ignore the AUDIO_FLAG_NO_MEDIA_PROJECTION */
bool mAllowPrivilegedMediaPlaybackCapture = false;
/** Indicates if the caller can capture voice communication output */
diff --git a/media/libaudioclient/include/media/AudioRecord.h b/media/libaudioclient/include/media/AudioRecord.h
index 00f2c7a..d4479ef 100644
--- a/media/libaudioclient/include/media/AudioRecord.h
+++ b/media/libaudioclient/include/media/AudioRecord.h
@@ -681,7 +681,7 @@
// Current client state: false = stopped, true = active. Protected by mLock. If more states
// are added, consider changing this to enum State { ... } mState as in AudioTrack.
- bool mActive;
+ bool mActive = false;
// for client callback handler
@@ -708,7 +708,7 @@
Modulo<uint32_t> mNewPosition; // in frames
uint32_t mUpdatePeriod; // in frames, zero means no EVENT_NEW_POS
- status_t mStatus;
+ status_t mStatus = NO_INIT;
android::content::AttributionSourceState mClientAttributionSource; // Owner's attribution source
@@ -736,8 +736,8 @@
// held to read or write those bits reliably.
audio_input_flags_t mOrigFlags; // as specified in constructor or set(), const
- audio_session_t mSessionId;
- audio_port_handle_t mPortId; // Id from Audio Policy Manager
+ audio_session_t mSessionId = AUDIO_SESSION_ALLOCATE;
+ audio_port_handle_t mPortId = AUDIO_PORT_HANDLE_NONE;
/**
* mLogSessionId is a string identifying this AudioRecord for the metrics service.
@@ -756,9 +756,9 @@
sp<IMemory> mBufferMemory;
audio_io_handle_t mInput = AUDIO_IO_HANDLE_NONE; // from AudioSystem::getInputforAttr()
- int mPreviousPriority; // before start()
- SchedPolicy mPreviousSchedulingGroup;
- bool mAwaitBoost; // thread should wait for priority boost before running
+ int mPreviousPriority = ANDROID_PRIORITY_NORMAL; // before start()
+ SchedPolicy mPreviousSchedulingGroup = SP_DEFAULT;
+ bool mAwaitBoost = false; // thread should wait for priority boost before running
// The proxy should only be referenced while a lock is held because the proxy isn't
// multi-thread safe.
@@ -799,14 +799,17 @@
// For Device Selection API
// a value of AUDIO_PORT_HANDLE_NONE indicated default (AudioPolicyManager) routing.
- audio_port_handle_t mSelectedDeviceId; // Device requested by the application.
- audio_port_handle_t mRoutedDeviceId; // Device actually selected by audio policy manager:
- // May not match the app selection depending on other
- // activity and connected devices
+
+ // Device requested by the application.
+ audio_port_handle_t mSelectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+ // Device actually selected by AudioPolicyManager: This may not match the app
+ // selection depending on other activity and connected devices
+ audio_port_handle_t mRoutedDeviceId = AUDIO_PORT_HANDLE_NONE;
+
wp<AudioSystem::AudioDeviceCallback> mDeviceCallback;
- audio_microphone_direction_t mSelectedMicDirection;
- float mSelectedMicFieldDimension;
+ audio_microphone_direction_t mSelectedMicDirection = MIC_DIRECTION_UNSPECIFIED;
+ float mSelectedMicFieldDimension = MIC_FIELD_DIMENSION_DEFAULT;
int32_t mMaxSharedAudioHistoryMs = 0;
std::string mSharedAudioPackageName = {};
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index 338534d..5c9a7c6 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -187,7 +187,10 @@
// helper function to obtain AudioFlinger service handle
static sp<IAudioFlinger> get_audio_flinger();
- static sp<IAudioFlinger> get_audio_flinger_for_fuzzer();
+
+ // function to disable creation of thread pool (Used for testing).
+ // This should be called before get_audio_flinger() or get_audio_policy_service().
+ static void disableThreadPool();
static float linearToLog(int volume);
static int logToLinear(float volume);
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index 19780ae..3a001a4 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -257,9 +257,7 @@
/* Constructs an uninitialized AudioTrack. No connection with
* AudioFlinger takes place. Use set() after this.
*/
- AudioTrack();
-
- AudioTrack(const AttributionSourceState& attributionSourceState);
+ explicit AudioTrack(const AttributionSourceState& attributionSourceState = {});
/* Creates an AudioTrack object and registers it with AudioFlinger.
* Once created, the track needs to be started before it can be used.
@@ -1312,11 +1310,11 @@
sp<IMemory> mSharedBuffer;
transfer_type mTransfer;
audio_offload_info_t mOffloadInfoCopy;
- audio_attributes_t mAttributes;
+ audio_attributes_t mAttributes = AUDIO_ATTRIBUTES_INITIALIZER;
size_t mFrameSize; // frame size in bytes
- status_t mStatus;
+ status_t mStatus = NO_INIT;
// can change dynamically when IAudioTrack invalidated
uint32_t mLatency; // in ms
@@ -1329,7 +1327,7 @@
STATE_PAUSED_STOPPING,
STATE_FLUSHED,
STATE_STOPPING,
- } mState;
+ } mState = STATE_STOPPED;
static constexpr const char *stateToString(State state)
{
@@ -1459,8 +1457,8 @@
mutable Mutex mLock;
- int mPreviousPriority; // before start()
- SchedPolicy mPreviousSchedulingGroup;
+ int mPreviousPriority = ANDROID_PRIORITY_NORMAL; // before start()
+ SchedPolicy mPreviousSchedulingGroup = SP_DEFAULT;
bool mAwaitBoost; // thread should wait for priority boost before running
// The proxy should only be referenced while a lock is held because the proxy isn't
@@ -1472,14 +1470,17 @@
sp<AudioTrackClientProxy> mProxy; // primary owner of the memory
bool mInUnderrun; // whether track is currently in underrun state
- uint32_t mPausedPosition;
+ uint32_t mPausedPosition = 0;
// For Device Selection API
// a value of AUDIO_PORT_HANDLE_NONE indicated default (AudioPolicyManager) routing.
- audio_port_handle_t mSelectedDeviceId; // Device requested by the application.
- audio_port_handle_t mRoutedDeviceId; // Device actually selected by audio policy manager:
- // May not match the app selection depending on other
- // activity and connected devices.
+
+ // Device requested by the application.
+ audio_port_handle_t mSelectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+
+ // Device actually selected by AudioPolicyManager: This may not match the app
+ // selection depending on other activity and connected devices.
+ audio_port_handle_t mRoutedDeviceId = AUDIO_PORT_HANDLE_NONE;
sp<media::VolumeHandler> mVolumeHandler;
@@ -1537,7 +1538,7 @@
Mutex mAudioTrackCbLock;
wp<media::IAudioTrackCallback> mCallback;
};
- sp<AudioTrackCallback> mAudioTrackCallback;
+ sp<AudioTrackCallback> mAudioTrackCallback = sp<AudioTrackCallback>::make();
};
}; // namespace android
diff --git a/media/libaudioclient/include/media/EffectClientAsyncProxy.h b/media/libaudioclient/include/media/EffectClientAsyncProxy.h
new file mode 100644
index 0000000..e7d6d80
--- /dev/null
+++ b/media/libaudioclient/include/media/EffectClientAsyncProxy.h
@@ -0,0 +1,111 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android/media/BnEffectClient.h>
+#include <audio_utils/CommandThread.h>
+
+namespace android::media {
+
+class EffectClientAsyncProxy : public IEffectClient {
+public:
+
+ /**
+ * Call this factory method to interpose a worker thread when a binder
+ * callback interface is invoked in-proc.
+ */
+ static sp<IEffectClient> makeIfNeeded(const sp<IEffectClient>& effectClient) {
+ if (isLocalBinder(effectClient)) {
+ return sp<EffectClientAsyncProxy>::make(effectClient);
+ }
+ return effectClient;
+ }
+
+ explicit EffectClientAsyncProxy(const sp<IEffectClient>& effectClient)
+ : mEffectClient(effectClient) {}
+
+ ::android::IBinder* onAsBinder() override {
+ return nullptr;
+ }
+
+ ::android::binder::Status controlStatusChanged(bool controlGranted) override {
+ getThread().add(__func__, [=, effectClient = mEffectClient]() {
+ effectClient->controlStatusChanged(controlGranted);
+ });
+ return ::android::binder::Status::fromStatusT(::android::NO_ERROR);
+ }
+
+ ::android::binder::Status enableStatusChanged(bool enabled) override {
+ getThread().add(__func__, [=, effectClient = mEffectClient]() {
+ effectClient->enableStatusChanged(enabled);
+ });
+ return ::android::binder::Status::fromStatusT(::android::NO_ERROR);
+ }
+
+ ::android::binder::Status commandExecuted(
+ int32_t cmdCode, const ::std::vector<uint8_t>& cmdData,
+ const ::std::vector<uint8_t>& replyData) override {
+ getThread().add(__func__, [=, effectClient = mEffectClient]() {
+ effectClient->commandExecuted(cmdCode, cmdData, replyData);
+ });
+ return ::android::binder::Status::fromStatusT(::android::NO_ERROR);
+ }
+
+ ::android::binder::Status framesProcessed(int32_t frames) override {
+ getThread().add(__func__, [=, effectClient = mEffectClient]() {
+ effectClient->framesProcessed(frames);
+ });
+ return ::android::binder::Status::fromStatusT(::android::NO_ERROR);
+ }
+
+ /**
+ * Returns true if the binder interface is local (in-proc).
+ *
+ * Move to a binder helper class?
+ */
+ static bool isLocalBinder(const sp<IInterface>& interface) {
+ const auto b = IInterface::asBinder(interface);
+ return b && b->localBinder();
+ }
+
+private:
+ const sp<IEffectClient> mEffectClient;
+
+ /**
+ * Returns the per-interface-descriptor CommandThread for in-proc binder transactions.
+ *
+ * Note: Remote RPC transactions to a given binder (kernel) node enter that node's
+ * async_todo list, which serializes all async operations to that binder node.
+ * Each transaction on the async_todo list must complete before the next one
+ * starts, even though there may be available threads in the process threadpool.
+ *
+ * For local transactions, we order all async requests entering
+ * the CommandThread. We do not maintain a threadpool, though a future implementation
+ * could use a shared ThreadPool.
+ *
+ * By using a static here, all in-proc binder interfaces made async with
+ * EffectClientAsyncProxy will get the same CommandThread.
+ *
+ * @return CommandThread to use.
+ */
+ static audio_utils::CommandThread& getThread() {
+ [[clang::no_destroy]] static audio_utils::CommandThread commandThread;
+ return commandThread;
+ }
+}; // class EffectClientAsyncProxy
+
+} // namespace android::media
diff --git a/media/libaudioclient/tests/Android.bp b/media/libaudioclient/tests/Android.bp
index b667c8d..055da5b 100644
--- a/media/libaudioclient/tests/Android.bp
+++ b/media/libaudioclient/tests/Android.bp
@@ -23,8 +23,8 @@
],
sanitize: {
misc_undefined: [
- "unsigned-integer-overflow",
"signed-integer-overflow",
+ "unsigned-integer-overflow",
],
},
}
@@ -32,8 +32,8 @@
cc_defaults {
name: "audio_aidl_conversion_test_defaults",
defaults: [
- "libaudioclient_tests_defaults",
"latest_android_media_audio_common_types_cpp_static",
+ "libaudioclient_tests_defaults",
],
static_libs: [
"audioclient-types-aidl-cpp",
@@ -110,9 +110,9 @@
"libcgrouprc",
"libdl",
"libmedia",
+ "libmedia_helper",
"libmediametrics",
"libmediautils",
- "libmedia_helper",
"libnblog",
"libprocessgroup",
"libshmemcompat",
diff --git a/media/libaudioclient/tests/audiorecord_tests.cpp b/media/libaudioclient/tests/audiorecord_tests.cpp
index 0bf2e82..be6c581 100644
--- a/media/libaudioclient/tests/audiorecord_tests.cpp
+++ b/media/libaudioclient/tests/audiorecord_tests.cpp
@@ -28,6 +28,25 @@
using namespace android;
+// Test that the basic constructor returns an object that doesn't crash
+// on stop() or destruction.
+
+TEST(AudioRecordTestBasic, EmptyAudioRecord) {
+ AttributionSourceState attributionSource;
+ attributionSource.packageName = "AudioRecordTest";
+ attributionSource.uid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(getuid()));
+ attributionSource.pid = VALUE_OR_FATAL(legacy2aidl_pid_t_int32_t(getpid()));
+ attributionSource.token = sp<BBinder>::make();
+ const auto ar = sp<AudioRecord>::make(attributionSource);
+
+ // test key commands on an unset AudioRecord.
+ EXPECT_EQ(NO_INIT, ar->initCheck());
+ EXPECT_EQ(true, ar->stopped());
+
+ // just don't crash.
+ ar->stop();
+}
+
class AudioRecordTest : public ::testing::Test {
public:
void SetUp() override {
diff --git a/media/libaudioclient/tests/audiotrack_tests.cpp b/media/libaudioclient/tests/audiotrack_tests.cpp
index 0282bd7..cb667a0 100644
--- a/media/libaudioclient/tests/audiotrack_tests.cpp
+++ b/media/libaudioclient/tests/audiotrack_tests.cpp
@@ -25,6 +25,24 @@
using namespace android;
+// Test that the basic constructor returns an object that doesn't crash
+// on stop() or destruction.
+
+TEST(AudioTrackTestBasic, EmptyAudioTrack) {
+ AttributionSourceState attributionSource;
+ attributionSource.packageName = "AudioTrackTest";
+ attributionSource.uid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(getuid()));
+ attributionSource.pid = VALUE_OR_FATAL(legacy2aidl_pid_t_int32_t(getpid()));
+ attributionSource.token = sp<BBinder>::make();
+ const auto at = sp<AudioTrack>::make(attributionSource);
+
+ EXPECT_EQ(NO_INIT, at->initCheck());
+ EXPECT_EQ(true, at->stopped());
+
+ // ensure we do not crash.
+ at->stop();
+}
+
TEST(AudioTrackTest, TestPlayTrack) {
const auto ap = sp<AudioPlayback>::make(44100 /* sampleRate */, AUDIO_FORMAT_PCM_16_BIT,
AUDIO_CHANNEL_OUT_STEREO, AUDIO_OUTPUT_FLAG_NONE,
diff --git a/media/libaudiofoundation/Android.bp b/media/libaudiofoundation/Android.bp
index c758fcd..576406d 100644
--- a/media/libaudiofoundation/Android.bp
+++ b/media/libaudiofoundation/Android.bp
@@ -87,7 +87,7 @@
],
cflags: [
- "-Werror",
"-Wall",
+ "-Werror",
],
}
diff --git a/media/libaudiofoundation/tests/Android.bp b/media/libaudiofoundation/tests/Android.bp
index 82c7db7..0ca50ab 100644
--- a/media/libaudiofoundation/tests/Android.bp
+++ b/media/libaudiofoundation/tests/Android.bp
@@ -22,8 +22,8 @@
static_libs: [
"audioclient-types-aidl-cpp",
- "libaudioclient_aidl_conversion",
"libaudio_aidl_conversion_common_cpp",
+ "libaudioclient_aidl_conversion",
"libaudiofoundation",
"libstagefright_foundation",
],
@@ -37,8 +37,8 @@
],
cflags: [
- "-Werror",
"-Wall",
+ "-Werror",
],
test_suites: ["device-tests"],
@@ -64,8 +64,8 @@
],
cflags: [
- "-Werror",
"-Wall",
+ "-Werror",
],
test_suites: ["device-tests"],
diff --git a/media/libaudiohal/Android.bp b/media/libaudiohal/Android.bp
index b8d0998..639c7aa 100644
--- a/media/libaudiohal/Android.bp
+++ b/media/libaudiohal/Android.bp
@@ -18,8 +18,8 @@
cflags: [
"-Wall",
- "-Wextra",
"-Werror",
+ "-Wextra",
],
required: [
@@ -44,7 +44,7 @@
"libbase_headers",
"liberror_headers",
"libmediautils_headers",
- ]
+ ],
}
cc_library_shared {
@@ -61,12 +61,12 @@
shared_libs: [
"libhidlbase",
- "libutils",
"liblog",
+ "libutils",
],
header_libs: [
- "libaudiohal_headers"
+ "libaudiohal_headers",
],
}
diff --git a/media/libaudiohal/impl/Android.bp b/media/libaudiohal/impl/Android.bp
index 876bc4b..1a6b949 100644
--- a/media/libaudiohal/impl/Android.bp
+++ b/media/libaudiohal/impl/Android.bp
@@ -49,8 +49,8 @@
cflags: [
"-Wall",
- "-Wextra",
"-Werror",
+ "-Wextra",
"-fvisibility=hidden",
],
shared_libs: [
@@ -211,10 +211,10 @@
"libbinder_ndk",
],
cflags: [
- "-DMAJOR_VERSION=7",
- "-DMINOR_VERSION=1",
"-DCOMMON_TYPES_MINOR_VERSION=0",
"-DCORE_TYPES_MINOR_VERSION=0",
+ "-DMAJOR_VERSION=7",
+ "-DMINOR_VERSION=1",
"-include common/all-versions/VersionMacro.h",
],
}
@@ -227,11 +227,11 @@
"latest_android_hardware_audio_core_sounddose_ndk_shared",
"latest_android_hardware_audio_effect_ndk_shared",
"latest_android_media_audio_common_types_ndk_shared",
+ "latest_av_audio_types_aidl_ndk_shared",
],
shared_libs: [
"android.hardware.common-V2-ndk",
"android.hardware.common.fmq-V1-ndk",
- "av-audio-types-aidl-V1-ndk",
"libaudio_aidl_conversion_common_cpp",
"libaudio_aidl_conversion_common_ndk",
"libaudio_aidl_conversion_common_ndk_cpp",
@@ -245,26 +245,26 @@
"libeffectsconfig_headers",
],
cflags: [
- "-Wall",
- "-Wextra",
- "-Werror",
- "-Wthread-safety",
"-DBACKEND_CPP_NDK",
+ "-Wall",
+ "-Werror",
+ "-Wextra",
+ "-Wthread-safety",
],
}
cc_library_shared {
name: "libaudiohal@aidl",
defaults: [
- "libaudiohal_default",
"libaudiohal_aidl_default",
+ "libaudiohal_default",
],
srcs: [
+ ":audio_effect_hal_aidl_src_files",
+ ":core_audio_hal_aidl_src_files",
"AidlUtils.cpp",
"DevicesFactoryHalEntry.cpp",
"EffectsFactoryHalEntry.cpp",
- ":audio_effect_hal_aidl_src_files",
- ":core_audio_hal_aidl_src_files",
],
}
@@ -282,8 +282,9 @@
filegroup {
name: "audio_effect_hal_aidl_src_files",
srcs: [
- "EffectConversionHelperAidl.cpp",
+ ":audio_effectproxy_src_files",
"EffectBufferHalAidl.cpp",
+ "EffectConversionHelperAidl.cpp",
"EffectHalAidl.cpp",
"EffectsFactoryHalAidl.cpp",
"effectsAidlConversion/AidlConversionAec.cpp",
@@ -302,7 +303,6 @@
"effectsAidlConversion/AidlConversionVendorExtension.cpp",
"effectsAidlConversion/AidlConversionVirtualizer.cpp",
"effectsAidlConversion/AidlConversionVisualizer.cpp",
- ":audio_effectproxy_src_files",
],
}
diff --git a/media/libaudiohal/impl/DeviceHalHidl.cpp b/media/libaudiohal/impl/DeviceHalHidl.cpp
index 478e0f0..ea4258c 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalHidl.cpp
@@ -752,10 +752,14 @@
// the attributes reported by `getParameters` API.
struct audio_port_v7 temp = *devicePort;
AudioProfileAttributesMultimap attrsFromDevice;
- status_t status = getAudioPort(&temp);
- if (status == NO_ERROR) {
- attrsFromDevice = createAudioProfilesAttrMap(temp.audio_profiles, 0 /*first*/,
- temp.num_audio_profiles);
+ bool supportsPatches;
+ if (supportsAudioPatches(&supportsPatches) == OK && supportsPatches) {
+ // The audio patches are supported since HAL 3.0, which is the same HAL version
+ // requirement for 'getAudioPort' API.
+ if (getAudioPort(&temp) == NO_ERROR) {
+ attrsFromDevice = createAudioProfilesAttrMap(temp.audio_profiles, 0 /*first*/,
+ temp.num_audio_profiles);
+ }
}
auto streamIt = mStreams.find(mixPort->ext.mix.handle);
if (streamIt == mStreams.end()) {
@@ -767,7 +771,7 @@
}
String8 formatsStr;
- status = getParametersFromStream(
+ status_t status = getParametersFromStream(
stream, AudioParameter::keyStreamSupportedFormats, nullptr /*extraParameters*/,
&formatsStr);
if (status != NO_ERROR) {
diff --git a/media/libaudiohal/impl/Hal2AidlMapper.cpp b/media/libaudiohal/impl/Hal2AidlMapper.cpp
index 729d4e1..453f9e2 100644
--- a/media/libaudiohal/impl/Hal2AidlMapper.cpp
+++ b/media/libaudiohal/impl/Hal2AidlMapper.cpp
@@ -458,7 +458,7 @@
*portConfig = it->second;
return OK;
}
- ALOGE("%s: could not find a configured device port for device %s",
+ ALOGE("%s: could not find a device port config for device %s",
__func__, device.toString().c_str());
return BAD_VALUE;
}
@@ -1037,6 +1037,13 @@
}
}
resetUnusedPortConfigs();
+ // Patches created by Hal2AidlMapper during stream creation and not "claimed"
+ // by the framework must not be surfaced to it.
+ for (auto& s : mStreams) {
+ if (auto it = releasedPatches.find(s.second.second); it != releasedPatches.end()) {
+ releasedPatches.erase(it);
+ }
+ }
mFwkPatches.merge(releasedPatches);
LOG_ALWAYS_FATAL_IF(!releasedPatches.empty(),
"mFwkPatches already contains some of released patches");
diff --git a/media/libaudiohal/impl/StreamHalAidl.cpp b/media/libaudiohal/impl/StreamHalAidl.cpp
index b110978..5aff5f2 100644
--- a/media/libaudiohal/impl/StreamHalAidl.cpp
+++ b/media/libaudiohal/impl/StreamHalAidl.cpp
@@ -332,8 +332,11 @@
return INVALID_OPERATION;
}
}
+ StreamContextAidl::DataMQ::Error fmqError = StreamContextAidl::DataMQ::Error::NONE;
+ std::string fmqErrorMsg;
if (!mIsInput) {
- bytes = std::min(bytes, mContext.getDataMQ()->availableToWrite());
+ bytes = std::min(bytes,
+ mContext.getDataMQ()->availableToWrite(&fmqError, &fmqErrorMsg));
}
StreamDescriptor::Command burst =
StreamDescriptor::Command::make<StreamDescriptor::Command::Tag::burst>(bytes);
@@ -350,12 +353,14 @@
LOG_ALWAYS_FATAL_IF(*transferred > bytes,
"%s: HAL module read %zu bytes, which exceeds requested count %zu",
__func__, *transferred, bytes);
- if (auto toRead = mContext.getDataMQ()->availableToRead();
+ if (auto toRead = mContext.getDataMQ()->availableToRead(&fmqError, &fmqErrorMsg);
toRead != 0 && !mContext.getDataMQ()->read(static_cast<int8_t*>(buffer), toRead)) {
ALOGE("%s: failed to read %zu bytes to data MQ", __func__, toRead);
return NOT_ENOUGH_DATA;
}
}
+ LOG_ALWAYS_FATAL_IF(fmqError != StreamContextAidl::DataMQ::Error::NONE,
+ "%s", fmqErrorMsg.c_str());
mStreamPowerLog.log(buffer, *transferred);
return OK;
}
@@ -375,24 +380,28 @@
if (mIsInput) {
return sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), reply);
} else {
- if (mContext.isAsynchronous()) {
+ if (const auto state = getState(); state == StreamDescriptor::State::IDLE) {
// Handle pause-flush-resume sequence. 'flush' from PAUSED goes to
// IDLE. We move here from IDLE to ACTIVE (same as 'start' from PAUSED).
- const auto state = getState();
- if (state == StreamDescriptor::State::IDLE) {
- StreamDescriptor::Reply localReply{};
- StreamDescriptor::Reply* innerReply = reply ?: &localReply;
- RETURN_STATUS_IF_ERROR(
- sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), innerReply));
- if (innerReply->state != StreamDescriptor::State::ACTIVE) {
- ALOGE("%s: unexpected stream state: %s (expected ACTIVE)",
- __func__, toString(innerReply->state).c_str());
- return INVALID_OPERATION;
- }
- return OK;
+ StreamDescriptor::Reply localReply{};
+ StreamDescriptor::Reply* innerReply = reply ?: &localReply;
+ RETURN_STATUS_IF_ERROR(
+ sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), innerReply));
+ if (innerReply->state != StreamDescriptor::State::ACTIVE) {
+ ALOGE("%s: unexpected stream state: %s (expected ACTIVE)",
+ __func__, toString(innerReply->state).c_str());
+ return INVALID_OPERATION;
}
+ return OK;
+ } else if (state == StreamDescriptor::State::PAUSED ||
+ state == StreamDescriptor::State::TRANSFER_PAUSED ||
+ state == StreamDescriptor::State::DRAIN_PAUSED) {
+ return sendCommand(makeHalCommand<HalCommand::Tag::start>(), reply);
+ } else {
+ ALOGE("%s: unexpected stream state: %s (expected IDLE or one of *PAUSED states)",
+ __func__, toString(state).c_str());
+ return INVALID_OPERATION;
}
- return sendCommand(makeHalCommand<HalCommand::Tag::start>(), reply);
}
}
@@ -424,8 +433,10 @@
void StreamHalAidl::onAsyncTransferReady() {
if (auto state = getState(); state == StreamDescriptor::State::TRANSFERRING) {
- // Retrieve the current state together with position counters.
- updateCountersIfNeeded();
+ // Retrieve the current state together with position counters unconditionally
+ // to ensure that the state on our side gets updated.
+ sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(),
+ nullptr, true /*safeFromNonWorkerThread */);
} else {
ALOGW("%s: unexpected onTransferReady in the state %s", __func__, toString(state).c_str());
}
@@ -433,8 +444,10 @@
void StreamHalAidl::onAsyncDrainReady() {
if (auto state = getState(); state == StreamDescriptor::State::DRAINING) {
- // Retrieve the current state together with position counters.
- updateCountersIfNeeded();
+ // Retrieve the current state together with position counters unconditionally
+ // to ensure that the state on our side gets updated.
+ sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(),
+ nullptr, true /*safeFromNonWorkerThread */);
} else {
ALOGW("%s: unexpected onDrainReady in the state %s", __func__, toString(state).c_str());
}
@@ -650,21 +663,16 @@
return transfer(const_cast<void*>(buffer), bytes, written);
}
-status_t StreamOutHalAidl::getRenderPosition(uint32_t *dspFrames) {
+status_t StreamOutHalAidl::getRenderPosition(uint64_t *dspFrames) {
if (dspFrames == nullptr) {
return BAD_VALUE;
}
int64_t aidlFrames = 0, aidlTimestamp = 0;
RETURN_STATUS_IF_ERROR(getObservablePosition(&aidlFrames, &aidlTimestamp));
- *dspFrames = static_cast<uint32_t>(aidlFrames);
+ *dspFrames = aidlFrames;
return OK;
}
-status_t StreamOutHalAidl::getNextWriteTimestamp(int64_t *timestamp __unused) {
- // Obsolete, use getPresentationPosition.
- return INVALID_OPERATION;
-}
-
status_t StreamOutHalAidl::setCallback(wp<StreamOutHalInterfaceCallback> callback) {
ALOGD("%p %s", this, __func__);
TIME_CHECK();
@@ -725,6 +733,11 @@
return OK;
}
+status_t StreamOutHalAidl::presentationComplete() {
+ ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+ return OK;
+}
+
status_t StreamOutHalAidl::updateSourceMetadata(
const StreamOutHalInterface::SourceMetadata& sourceMetadata) {
TIME_CHECK();
diff --git a/media/libaudiohal/impl/StreamHalAidl.h b/media/libaudiohal/impl/StreamHalAidl.h
index b20eb00..8a398d8 100644
--- a/media/libaudiohal/impl/StreamHalAidl.h
+++ b/media/libaudiohal/impl/StreamHalAidl.h
@@ -308,10 +308,7 @@
// Return the number of audio frames written by the audio dsp to DAC since
// the output has exited standby.
- status_t getRenderPosition(uint32_t *dspFrames) override;
-
- // Get the local time at which the next write to the audio driver will be presented.
- status_t getNextWriteTimestamp(int64_t *timestamp) override;
+ status_t getRenderPosition(uint64_t *dspFrames) override;
// Set the callback for notifying completion of non-blocking write and drain.
status_t setCallback(wp<StreamOutHalInterfaceCallback> callback) override;
@@ -337,6 +334,9 @@
// Return a recent count of the number of audio frames presented to an external observer.
status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp) override;
+ // Notifies the HAL layer that the framework considers the current playback as completed.
+ status_t presentationComplete() override;
+
// Called when the metadata of the stream's source has been changed.
status_t updateSourceMetadata(const SourceMetadata& sourceMetadata) override;
diff --git a/media/libaudiohal/impl/StreamHalHidl.cpp b/media/libaudiohal/impl/StreamHalHidl.cpp
index 77c75db..9e22700 100644
--- a/media/libaudiohal/impl/StreamHalHidl.cpp
+++ b/media/libaudiohal/impl/StreamHalHidl.cpp
@@ -17,6 +17,8 @@
#define LOG_TAG "StreamHalHidl"
//#define LOG_NDEBUG 0
+#include <cinttypes>
+
#include <android/hidl/manager/1.0/IServiceManager.h>
#include <hwbinder/IPCThreadState.h>
#include <media/AudioParameter.h>
@@ -589,32 +591,39 @@
return OK;
}
-status_t StreamOutHalHidl::getRenderPosition(uint32_t *dspFrames) {
+status_t StreamOutHalHidl::getRenderPosition(uint64_t *dspFrames) {
// TIME_CHECK(); // TODO(b/243839867) reenable only when optimized.
if (mStream == 0) return NO_INIT;
Result retval;
+ uint32_t halPosition = 0;
Return<void> ret = mStream->getRenderPosition(
[&](Result r, uint32_t d) {
retval = r;
if (retval == Result::OK) {
- *dspFrames = d;
+ halPosition = d;
}
});
- return processReturn("getRenderPosition", ret, retval);
-}
+ status_t status = processReturn("getRenderPosition", ret, retval);
+ if (status != OK) {
+ return status;
+ }
+ // Maintain a 64-bit render position using the 32-bit result from the HAL.
+ // This delta calculation relies on the arithmetic overflow behavior
+ // of integers. For example (100 - 0xFFFFFFF0) = 116.
+ std::lock_guard l(mPositionMutex);
+ const auto truncatedPosition = (uint32_t)mRenderPosition;
+ int32_t deltaHalPosition; // initialization not needed, overwitten by __builtin_sub_overflow()
+ (void) __builtin_sub_overflow(halPosition, truncatedPosition, &deltaHalPosition);
-status_t StreamOutHalHidl::getNextWriteTimestamp(int64_t *timestamp) {
- TIME_CHECK();
- if (mStream == 0) return NO_INIT;
- Result retval;
- Return<void> ret = mStream->getNextWriteTimestamp(
- [&](Result r, int64_t t) {
- retval = r;
- if (retval == Result::OK) {
- *timestamp = t;
- }
- });
- return processReturn("getRenderPosition", ret, retval);
+ if (deltaHalPosition >= 0) {
+ mRenderPosition += deltaHalPosition;
+ } else if (mExpectRetrograde) {
+ mExpectRetrograde = false;
+ mRenderPosition -= static_cast<uint64_t>(-deltaHalPosition);
+ ALOGW("Retrograde motion of %" PRId32 " frames", -deltaHalPosition);
+ }
+ *dspFrames = mRenderPosition;
+ return OK;
}
status_t StreamOutHalHidl::setCallback(wp<StreamOutHalInterfaceCallback> callback) {
@@ -667,9 +676,23 @@
status_t StreamOutHalHidl::flush() {
TIME_CHECK();
if (mStream == 0) return NO_INIT;
+ {
+ std::lock_guard l(mPositionMutex);
+ mRenderPosition = 0;
+ mExpectRetrograde = false;
+ }
return processReturn("pause", mStream->flush());
}
+status_t StreamOutHalHidl::standby() {
+ {
+ std::lock_guard l(mPositionMutex);
+ mRenderPosition = 0;
+ mExpectRetrograde = false;
+ }
+ return StreamHalHidl::standby();
+}
+
status_t StreamOutHalHidl::getPresentationPosition(uint64_t *frames, struct timespec *timestamp) {
// TIME_CHECK(); // TODO(b/243839867) reenable only when optimized.
if (mStream == 0) return NO_INIT;
@@ -696,6 +719,16 @@
}
}
+status_t StreamOutHalHidl::presentationComplete() {
+ // Avoid suppressing retrograde motion in mRenderPosition for gapless offload/direct when
+ // transitioning between tracks.
+ // The HAL resets the frame position without flush/stop being called, but calls back prior to
+ // this event. So, on the next occurrence of retrograde motion, we permit backwards movement of
+ // mRenderPosition.
+ mExpectRetrograde = true;
+ return OK;
+}
+
#if MAJOR_VERSION == 2
status_t StreamOutHalHidl::updateSourceMetadata(
const StreamOutHalInterface::SourceMetadata& /* sourceMetadata */) {
diff --git a/media/libaudiohal/impl/StreamHalHidl.h b/media/libaudiohal/impl/StreamHalHidl.h
index 48da633..80379d0 100644
--- a/media/libaudiohal/impl/StreamHalHidl.h
+++ b/media/libaudiohal/impl/StreamHalHidl.h
@@ -18,10 +18,12 @@
#define ANDROID_HARDWARE_STREAM_HAL_HIDL_H
#include <atomic>
+#include <mutex>
#include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/IStream.h)
#include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/IStreamIn.h)
#include PATH(android/hardware/audio/FILE_VERSION/IStreamOut.h)
+#include <android-base/thread_annotations.h>
#include <fmq/EventFlag.h>
#include <fmq/MessageQueue.h>
#include <media/audiohal/EffectHalInterface.h>
@@ -119,6 +121,9 @@
class StreamOutHalHidl : public StreamOutHalInterface, public StreamHalHidl {
public:
+ // Put the audio hardware input/output into standby mode (from StreamHalInterface).
+ status_t standby() override;
+
// Return the frame size (number of bytes per sample) of a stream.
virtual status_t getFrameSize(size_t *size);
@@ -136,10 +141,7 @@
// Return the number of audio frames written by the audio dsp to DAC since
// the output has exited standby.
- virtual status_t getRenderPosition(uint32_t *dspFrames);
-
- // Get the local time at which the next write to the audio driver will be presented.
- virtual status_t getNextWriteTimestamp(int64_t *timestamp);
+ virtual status_t getRenderPosition(uint64_t *dspFrames);
// Set the callback for notifying completion of non-blocking write and drain.
virtual status_t setCallback(wp<StreamOutHalInterfaceCallback> callback);
@@ -165,6 +167,9 @@
// Return a recent count of the number of audio frames presented to an external observer.
virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp);
+ // Notifies the HAL layer that the framework considers the current playback as completed.
+ status_t presentationComplete() override;
+
// Called when the metadata of the stream's source has been changed.
status_t updateSourceMetadata(const SourceMetadata& sourceMetadata) override;
@@ -221,6 +226,10 @@
std::unique_ptr<StatusMQ> mStatusMQ;
std::atomic<pid_t> mWriterClient;
EventFlag* mEfGroup;
+ std::mutex mPositionMutex;
+ // Used to expand correctly the 32-bit position from the HAL.
+ uint64_t mRenderPosition GUARDED_BY(mPositionMutex) = 0;
+ bool mExpectRetrograde GUARDED_BY(mPositionMutex) = false; // See 'presentationComplete'.
// Can not be constructed directly by clients.
StreamOutHalHidl(const sp<::android::hardware::audio::CPP_VERSION::IStreamOut>& stream);
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp
index a88ef83..c2aa278 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionSpatializer.cpp
@@ -179,17 +179,19 @@
if (!range) {
return BAD_VALUE;
}
+ std::vector<Spatialization::Level> levels;
for (const auto level : ::ndk::enum_range<Spatialization::Level>()) {
const auto spatializer =
Spatializer::make<Spatializer::spatializationLevel>(level);
if (spatializer >= range->min && spatializer <= range->max) {
- if (status_t status = param.writeToValue(&level); status != OK) {
- ALOGW("%s %d: write level %s to value failed %d", __func__, __LINE__,
- toString(level).c_str(), status);
- return status;
- }
+ levels.emplace_back(level);
}
}
+ const uint8_t num = levels.size();
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&num));
+ for (const auto level : levels) {
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&level));
+ }
return OK;
}
case SPATIALIZER_PARAM_LEVEL: {
@@ -238,15 +240,14 @@
const auto& supportedLayouts = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
aidlParam, Spatializer, spatializer, Spatializer::supportedChannelLayout,
std::vector<AudioChannelLayout>));
+ // audio_channel_mask_t is uint32_t enum, write number in 32bit
+ const uint32_t num = supportedLayouts.size();
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&num));
for (const auto& layout : supportedLayouts) {
audio_channel_mask_t mask = VALUE_OR_RETURN_STATUS(
::aidl::android::aidl2legacy_AudioChannelLayout_audio_channel_mask_t(
layout, false /* isInput */));
- if (status_t status = param.writeToValue(&mask); status != OK) {
- ALOGW("%s %d: write mask %s to value failed %d", __func__, __LINE__,
- layout.toString().c_str(), status);
- return status;
- }
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&mask));
}
return OK;
}
@@ -256,17 +257,19 @@
if (!range) {
return BAD_VALUE;
}
+ std::vector<Spatialization::Mode> modes;
for (const auto mode : ::ndk::enum_range<Spatialization::Mode>()) {
if (const auto spatializer =
Spatializer::make<Spatializer::spatializationMode>(mode);
spatializer >= range->min && spatializer <= range->max) {
- if (status_t status = param.writeToValue(&mode); status != OK) {
- ALOGW("%s %d: write mode %s to value failed %d", __func__, __LINE__,
- toString(mode).c_str(), status);
- return status;
- }
+ modes.emplace_back(mode);
}
}
+ const uint8_t num = modes.size();
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&num));
+ for (const auto mode : modes) {
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&mode));
+ }
return OK;
}
case SPATIALIZER_PARAM_SUPPORTED_HEADTRACKING_CONNECTION: {
@@ -275,17 +278,18 @@
if (!range) {
return BAD_VALUE;
}
+ std::vector<HeadTracking::ConnectionMode> modes;
for (const auto mode : ::ndk::enum_range<HeadTracking::ConnectionMode>()) {
if (const auto spatializer =
Spatializer::make<Spatializer::headTrackingConnectionMode>(mode);
spatializer < range->min || spatializer > range->max) {
- continue;
+ modes.emplace_back(mode);
}
- if (status_t status = param.writeToValue(&mode); status != OK) {
- ALOGW("%s %d: write mode %s to value failed %d", __func__, __LINE__,
- toString(mode).c_str(), status);
- return status;
- }
+ }
+ const uint8_t num = modes.size();
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&num));
+ for (const auto mode : modes) {
+ RETURN_STATUS_IF_ERROR(param.writeToValue(&mode));
}
return OK;
}
diff --git a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
index 37615af..eb14f6b 100644
--- a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
@@ -151,10 +151,7 @@
// Return the number of audio frames written by the audio dsp to DAC since
// the output has exited standby.
- virtual status_t getRenderPosition(uint32_t *dspFrames) = 0;
-
- // Get the local time at which the next write to the audio driver will be presented.
- virtual status_t getNextWriteTimestamp(int64_t *timestamp) = 0;
+ virtual status_t getRenderPosition(uint64_t *dspFrames) = 0;
// Set the callback for notifying completion of non-blocking write and drain.
// The callback must be owned by someone else. The output stream does not own it
@@ -182,6 +179,9 @@
// Return a recent count of the number of audio frames presented to an external observer.
virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp) = 0;
+ // Notifies the HAL layer that the framework considers the current playback as completed.
+ virtual status_t presentationComplete() = 0;
+
struct SourceMetadata {
std::vector<playback_track_metadata_v7_t> tracks;
};
diff --git a/media/libaudiohal/tests/Android.bp b/media/libaudiohal/tests/Android.bp
index b9af0bf..f6a7eea 100644
--- a/media/libaudiohal/tests/Android.bp
+++ b/media/libaudiohal/tests/Android.bp
@@ -25,8 +25,8 @@
name: "libaudiohal_aidl_test_default",
test_suites: ["device-tests"],
defaults: [
- "libaudiohal_default",
"libaudiohal_aidl_default",
+ "libaudiohal_default",
],
shared_libs: [
"libaudiohal",
@@ -36,8 +36,8 @@
cc_test {
name: "CoreAudioHalAidlTest",
srcs: [
- "CoreAudioHalAidl_test.cpp",
":core_audio_hal_aidl_src_files",
+ "CoreAudioHalAidl_test.cpp",
],
defaults: ["libaudiohal_aidl_test_default"],
header_libs: ["libaudiohalimpl_headers"],
@@ -56,8 +56,8 @@
cc_test {
name: "EffectProxyTest",
srcs: [
- "EffectProxy_test.cpp",
":audio_effectproxy_src_files",
+ "EffectProxy_test.cpp",
],
defaults: [
"libaudiohal_aidl_test_default",
@@ -69,8 +69,8 @@
cc_test {
name: "EffectHalVersionCompatibilityTest",
srcs: [
- "EffectHalVersionCompatibility_test.cpp",
":audio_effect_hal_aidl_src_files",
+ "EffectHalVersionCompatibility_test.cpp",
],
defaults: ["libaudiohal_aidl_test_default"],
header_libs: ["libaudiohalimpl_headers"],
diff --git a/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp b/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
index 3541078..5106874 100644
--- a/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
+++ b/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
@@ -14,7 +14,9 @@
* limitations under the License.
*/
+#include <algorithm>
#include <memory>
+#include <mutex>
#include <string>
#include <vector>
@@ -22,6 +24,7 @@
#include <gtest/gtest.h>
#include <DeviceHalAidl.h>
+#include <Hal2AidlMapper.h>
#include <StreamHalAidl.h>
#include <aidl/android/hardware/audio/core/BnModule.h>
#include <aidl/android/hardware/audio/core/BnStreamCommon.h>
@@ -31,7 +34,24 @@
namespace {
+using ::aidl::android::hardware::audio::core::AudioPatch;
+using ::aidl::android::hardware::audio::core::AudioRoute;
using ::aidl::android::hardware::audio::core::VendorParameter;
+using ::aidl::android::media::audio::common::AudioChannelLayout;
+using ::aidl::android::media::audio::common::AudioConfig;
+using ::aidl::android::media::audio::common::AudioDeviceDescription;
+using ::aidl::android::media::audio::common::AudioDeviceType;
+using ::aidl::android::media::audio::common::AudioFormatDescription;
+using ::aidl::android::media::audio::common::AudioFormatType;
+using ::aidl::android::media::audio::common::AudioIoFlags;
+using ::aidl::android::media::audio::common::AudioPort;
+using ::aidl::android::media::audio::common::AudioPortConfig;
+using ::aidl::android::media::audio::common::AudioPortDeviceExt;
+using ::aidl::android::media::audio::common::AudioPortExt;
+using ::aidl::android::media::audio::common::AudioPortMixExt;
+using ::aidl::android::media::audio::common::AudioProfile;
+using ::aidl::android::media::audio::common::AudioSource;
+using ::aidl::android::media::audio::common::PcmType;
class VendorParameterMock {
public:
@@ -63,9 +83,105 @@
std::vector<VendorParameter> mSyncParameters;
};
+struct Configuration {
+ std::vector<AudioPort> ports;
+ std::vector<AudioPortConfig> portConfigs;
+ std::vector<AudioRoute> routes;
+ std::vector<AudioPatch> patches;
+ int32_t nextPortId = 1;
+ int32_t nextPatchId = 1;
+};
+
+void fillProfile(AudioProfile* profile, const std::vector<int32_t>& channelLayouts,
+ const std::vector<int32_t>& sampleRates) {
+ for (auto layout : channelLayouts) {
+ profile->channelMasks.push_back(
+ AudioChannelLayout::make<AudioChannelLayout::layoutMask>(layout));
+ }
+ profile->sampleRates.insert(profile->sampleRates.end(), sampleRates.begin(), sampleRates.end());
+}
+
+AudioProfile createProfile(PcmType pcmType, const std::vector<int32_t>& channelLayouts,
+ const std::vector<int32_t>& sampleRates) {
+ AudioProfile profile;
+ profile.format.type = AudioFormatType::PCM;
+ profile.format.pcm = pcmType;
+ fillProfile(&profile, channelLayouts, sampleRates);
+ return profile;
+}
+
+AudioPortExt createPortDeviceExt(AudioDeviceType devType, int32_t flags,
+ std::string connection = "") {
+ AudioPortDeviceExt deviceExt;
+ deviceExt.device.type.type = devType;
+ if (devType == AudioDeviceType::IN_MICROPHONE && connection.empty()) {
+ deviceExt.device.address = "bottom";
+ } else if (devType == AudioDeviceType::IN_MICROPHONE_BACK && connection.empty()) {
+ deviceExt.device.address = "back";
+ }
+ deviceExt.device.type.connection = std::move(connection);
+ deviceExt.flags = flags;
+ return AudioPortExt::make<AudioPortExt::device>(deviceExt);
+}
+
+AudioPortExt createPortMixExt(int32_t maxOpenStreamCount, int32_t maxActiveStreamCount) {
+ AudioPortMixExt mixExt;
+ mixExt.maxOpenStreamCount = maxOpenStreamCount;
+ mixExt.maxActiveStreamCount = maxActiveStreamCount;
+ return AudioPortExt::make<AudioPortExt::mix>(mixExt);
+}
+
+AudioPort createPort(int32_t id, const std::string& name, int32_t flags, bool isInput,
+ const AudioPortExt& ext) {
+ AudioPort port;
+ port.id = id;
+ port.name = name;
+ port.flags = isInput ? AudioIoFlags::make<AudioIoFlags::input>(flags)
+ : AudioIoFlags::make<AudioIoFlags::output>(flags);
+ port.ext = ext;
+ return port;
+}
+
+AudioRoute createRoute(const std::vector<AudioPort>& sources, const AudioPort& sink) {
+ AudioRoute route;
+ route.sinkPortId = sink.id;
+ std::transform(sources.begin(), sources.end(), std::back_inserter(route.sourcePortIds),
+ [](const auto& port) { return port.id; });
+ return route;
+}
+
+template <typename T>
+auto findById(std::vector<T>& v, int32_t id) {
+ return std::find_if(v.begin(), v.end(), [&](const auto& e) { return e.id == id; });
+}
+
+Configuration getTestConfiguration() {
+ const std::vector<AudioProfile> standardPcmAudioProfiles = {
+ createProfile(PcmType::INT_16_BIT, {AudioChannelLayout::LAYOUT_STEREO}, {48000})};
+ Configuration c;
+
+ AudioPort btOutDevice =
+ createPort(c.nextPortId++, "BT A2DP Out", 0, false,
+ createPortDeviceExt(AudioDeviceType::OUT_DEVICE, 0,
+ AudioDeviceDescription::CONNECTION_BT_A2DP));
+ btOutDevice.profiles = standardPcmAudioProfiles;
+ c.ports.push_back(btOutDevice);
+
+ AudioPort btOutMix =
+ createPort(c.nextPortId++, "a2dp output", 0, false, createPortMixExt(1, 1));
+ btOutMix.profiles = standardPcmAudioProfiles;
+ c.ports.push_back(btOutMix);
+
+ c.routes.push_back(createRoute({btOutMix}, btOutDevice));
+
+ return c;
+}
+
class ModuleMock : public ::aidl::android::hardware::audio::core::BnModule,
public VendorParameterMock {
public:
+ ModuleMock() = default;
+ explicit ModuleMock(const Configuration& config) : mConfig(config) {}
bool isScreenTurnedOn() const { return mIsScreenTurnedOn; }
ScreenRotation getScreenRotation() const { return mScreenRotation; }
@@ -91,35 +207,91 @@
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus connectExternalDevice(
- const ::aidl::android::media::audio::common::AudioPort&,
- ::aidl::android::media::audio::common::AudioPort*) override {
+ const ::aidl::android::media::audio::common::AudioPort& portIdAndData,
+ ::aidl::android::media::audio::common::AudioPort* port) override {
+ auto src = portIdAndData; // Make a copy to mimic RPC behavior.
+ auto iter = findById<AudioPort>(mConfig.ports, src.id);
+ if (iter == mConfig.ports.end()) {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+ }
+ *port = *iter;
+ port->ext = src.ext;
+ port->id = mConfig.nextPortId++;
+ ALOGD("%s: returning %s", __func__, port->toString().c_str());
+ mConfig.ports.push_back(*port);
+ std::vector<AudioRoute> newRoutes;
+ for (auto& r : mConfig.routes) {
+ if (r.sinkPortId == src.id) {
+ newRoutes.push_back(AudioRoute{.sourcePortIds = r.sourcePortIds,
+ .sinkPortId = port->id,
+ .isExclusive = r.isExclusive});
+ } else if (std::find(r.sourcePortIds.begin(), r.sourcePortIds.end(), src.id) !=
+ r.sourcePortIds.end()) {
+ r.sourcePortIds.push_back(port->id);
+ }
+ }
+ mConfig.routes.insert(mConfig.routes.end(), newRoutes.begin(), newRoutes.end());
return ndk::ScopedAStatus::ok();
}
- ndk::ScopedAStatus disconnectExternalDevice(int32_t) override {
+ ndk::ScopedAStatus disconnectExternalDevice(int32_t portId) override {
+ auto iter = findById<AudioPort>(mConfig.ports, portId);
+ if (iter == mConfig.ports.end()) {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+ }
+ mConfig.ports.erase(iter);
+ for (auto it = mConfig.routes.begin(); it != mConfig.routes.end();) {
+ if (it->sinkPortId == portId) {
+ it = mConfig.routes.erase(it);
+ } else {
+ if (auto srcIt =
+ std::find(it->sourcePortIds.begin(), it->sourcePortIds.end(), portId);
+ srcIt != it->sourcePortIds.end()) {
+ it->sourcePortIds.erase(srcIt);
+ }
+ ++it;
+ }
+ }
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus getAudioPatches(
- std::vector<::aidl::android::hardware::audio::core::AudioPatch>*) override {
+ std::vector<::aidl::android::hardware::audio::core::AudioPatch>* patches) override {
+ *patches = mConfig.patches;
return ndk::ScopedAStatus::ok();
}
- ndk::ScopedAStatus getAudioPort(int32_t,
- ::aidl::android::media::audio::common::AudioPort*) override {
+ ndk::ScopedAStatus getAudioPort(
+ int32_t portId, ::aidl::android::media::audio::common::AudioPort* port) override {
+ auto iter = findById<AudioPort>(mConfig.ports, portId);
+ if (iter == mConfig.ports.end()) {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+ }
+ *port = *iter;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus getAudioPortConfigs(
- std::vector<::aidl::android::media::audio::common::AudioPortConfig>*) override {
+ std::vector<::aidl::android::media::audio::common::AudioPortConfig>* configs) override {
+ *configs = mConfig.portConfigs;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus getAudioPorts(
- std::vector<::aidl::android::media::audio::common::AudioPort>*) override {
+ std::vector<::aidl::android::media::audio::common::AudioPort>* ports) override {
+ *ports = mConfig.ports;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus getAudioRoutes(
- std::vector<::aidl::android::hardware::audio::core::AudioRoute>*) override {
+ std::vector<::aidl::android::hardware::audio::core::AudioRoute>* routes) override {
+ *routes = mConfig.routes;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus getAudioRoutesForAudioPort(
- int32_t, std::vector<::aidl::android::hardware::audio::core::AudioRoute>*) override {
+ int32_t portId,
+ std::vector<::aidl::android::hardware::audio::core::AudioRoute>* routes) override {
+ for (auto& r : mConfig.routes) {
+ const auto& srcs = r.sourcePortIds;
+ if (r.sinkPortId == portId ||
+ std::find(srcs.begin(), srcs.end(), portId) != srcs.end()) {
+ routes->push_back(r);
+ }
+ }
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus openInputStream(const OpenInputStreamArguments&,
@@ -133,17 +305,69 @@
ndk::ScopedAStatus getSupportedPlaybackRateFactors(SupportedPlaybackRateFactors*) override {
return ndk::ScopedAStatus::ok();
}
- ndk::ScopedAStatus setAudioPatch(const ::aidl::android::hardware::audio::core::AudioPatch&,
- ::aidl::android::hardware::audio::core::AudioPatch*) override {
+ ndk::ScopedAStatus setAudioPatch(
+ const ::aidl::android::hardware::audio::core::AudioPatch& requested,
+ ::aidl::android::hardware::audio::core::AudioPatch* patch) override {
+ if (requested.id == 0) {
+ *patch = requested;
+ patch->id = mConfig.nextPatchId++;
+ mConfig.patches.push_back(*patch);
+ ALOGD("%s: returning %s", __func__, patch->toString().c_str());
+ } else {
+ auto iter = findById<AudioPatch>(mConfig.patches, requested.id);
+ if (iter == mConfig.patches.end()) {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+ }
+ *iter = *patch = requested;
+ ALOGD("%s: updated %s", __func__, patch->toString().c_str());
+ }
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus setAudioPortConfig(
- const ::aidl::android::media::audio::common::AudioPortConfig&,
- ::aidl::android::media::audio::common::AudioPortConfig*, bool*) override {
+ const ::aidl::android::media::audio::common::AudioPortConfig& requested,
+ ::aidl::android::media::audio::common::AudioPortConfig* config,
+ bool* applied) override {
+ *applied = false;
+ auto src = requested; // Make a copy to mimic RPC behavior.
+ if (src.id == 0) {
+ *config = src;
+ if (config->ext.getTag() == AudioPortExt::unspecified) {
+ auto iter = findById<AudioPort>(mConfig.ports, src.portId);
+ if (iter == mConfig.ports.end()) {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+ }
+ config->ext = iter->ext;
+ }
+ config->id = mConfig.nextPortId++;
+ mConfig.portConfigs.push_back(*config);
+ ALOGD("%s: returning %s", __func__, config->toString().c_str());
+ } else {
+ auto iter = findById<AudioPortConfig>(mConfig.portConfigs, src.id);
+ if (iter == mConfig.portConfigs.end()) {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+ }
+ *iter = *config = src;
+ ALOGD("%s: updated %s", __func__, config->toString().c_str());
+ }
+ *applied = true;
return ndk::ScopedAStatus::ok();
}
- ndk::ScopedAStatus resetAudioPatch(int32_t) override { return ndk::ScopedAStatus::ok(); }
- ndk::ScopedAStatus resetAudioPortConfig(int32_t) override { return ndk::ScopedAStatus::ok(); }
+ ndk::ScopedAStatus resetAudioPatch(int32_t patchId) override {
+ auto iter = findById<AudioPatch>(mConfig.patches, patchId);
+ if (iter == mConfig.patches.end()) {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+ }
+ mConfig.patches.erase(iter);
+ return ndk::ScopedAStatus::ok();
+ }
+ ndk::ScopedAStatus resetAudioPortConfig(int32_t portConfigId) override {
+ auto iter = findById<AudioPortConfig>(mConfig.portConfigs, portConfigId);
+ if (iter == mConfig.portConfigs.end()) {
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+ }
+ mConfig.portConfigs.erase(iter);
+ return ndk::ScopedAStatus::ok();
+ }
ndk::ScopedAStatus getMasterMute(bool*) override { return ndk::ScopedAStatus::ok(); }
ndk::ScopedAStatus setMasterMute(bool) override { return ndk::ScopedAStatus::ok(); }
ndk::ScopedAStatus getMasterVolume(float*) override { return ndk::ScopedAStatus::ok(); }
@@ -205,6 +429,7 @@
return ndk::ScopedAStatus::ok();
}
+ Configuration mConfig;
bool mIsScreenTurnedOn = false;
ScreenRotation mScreenRotation = ScreenRotation::DEG_0;
};
@@ -398,6 +623,35 @@
using namespace android;
+namespace {
+
+class StreamHalMock : public virtual StreamHalInterface {
+ public:
+ StreamHalMock() = default;
+ ~StreamHalMock() override = default;
+ status_t getBufferSize(size_t*) override { return OK; }
+ status_t getAudioProperties(audio_config_base_t*) override { return OK; }
+ status_t setParameters(const String8&) override { return OK; }
+ status_t getParameters(const String8&, String8*) override { return OK; }
+ status_t getFrameSize(size_t*) override { return OK; }
+ status_t addEffect(sp<EffectHalInterface>) override { return OK; }
+ status_t removeEffect(sp<EffectHalInterface>) override { return OK; }
+ status_t standby() override { return OK; }
+ status_t dump(int, const Vector<String16>&) override { return OK; }
+ status_t start() override { return OK; }
+ status_t stop() override { return OK; }
+ status_t createMmapBuffer(int32_t, struct audio_mmap_buffer_info*) override { return OK; }
+ status_t getMmapPosition(struct audio_mmap_position*) override { return OK; }
+ status_t setHalThreadPriority(int) override { return OK; }
+ status_t legacyCreateAudioPatch(const struct audio_port_config&, std::optional<audio_source_t>,
+ audio_devices_t) override {
+ return OK;
+ }
+ status_t legacyReleaseAudioPatch() override { return OK; }
+};
+
+} // namespace
+
class DeviceHalAidlTest : public testing::Test {
public:
void SetUp() override {
@@ -593,3 +847,297 @@
EXPECT_EQ(0UL, mStreamCommon->getAsyncParameters().size());
EXPECT_EQ(0UL, mStreamCommon->getSyncParameters().size());
}
+
+class Hal2AidlMapperTest : public testing::Test {
+ public:
+ void SetUp() override {
+ mModule = ndk::SharedRefBase::make<ModuleMock>(getTestConfiguration());
+ mMapper = std::make_unique<Hal2AidlMapper>("test", mModule);
+ ASSERT_EQ(OK, mMapper->initialize());
+
+ mConnectedPort.ext = createPortDeviceExt(AudioDeviceType::OUT_DEVICE, 0,
+ AudioDeviceDescription::CONNECTION_BT_A2DP);
+ mConnectedPort.ext.get<AudioPortExt::device>().device.address = "00:11:22:33:44:55";
+ ASSERT_EQ(OK, mMapper->setDevicePortConnectedState(mConnectedPort, true /*connected*/));
+
+ std::mutex mutex; // Only needed for cleanups.
+ auto mapperAccessor = std::make_unique<LockedAccessor<Hal2AidlMapper>>(*mMapper, mutex);
+ Hal2AidlMapper::Cleanups cleanups(*mapperAccessor);
+ AudioConfig config;
+ config.base.channelMask = AudioChannelLayout::make<AudioChannelLayout::layoutMask>(
+ AudioChannelLayout::LAYOUT_STEREO);
+ config.base.format =
+ AudioFormatDescription{.type = AudioFormatType::PCM, .pcm = PcmType::INT_16_BIT};
+ config.base.sampleRate = 48000;
+ ASSERT_EQ(OK,
+ mMapper->prepareToOpenStream(
+ 42 /*ioHandle*/, mConnectedPort.ext.get<AudioPortExt::device>().device,
+ AudioIoFlags::make<AudioIoFlags::output>(0), AudioSource::DEFAULT,
+ &cleanups, &config, &mMixPortConfig, &mPatch));
+ cleanups.disarmAll();
+ ASSERT_NE(0, mPatch.id);
+ ASSERT_NE(0, mMixPortConfig.id);
+ mStream = sp<StreamHalMock>::make();
+ mMapper->addStream(mStream, mMixPortConfig.id, mPatch.id);
+
+ ASSERT_EQ(OK, mMapper->findPortConfig(mConnectedPort.ext.get<AudioPortExt::device>().device,
+ &mDevicePortConfig));
+ ASSERT_EQ(1UL, mPatch.sourcePortConfigIds.size());
+ ASSERT_EQ(mMixPortConfig.id, mPatch.sourcePortConfigIds[0]);
+ ASSERT_EQ(1UL, mPatch.sinkPortConfigIds.size());
+ ASSERT_EQ(mDevicePortConfig.id, mPatch.sinkPortConfigIds[0]);
+ }
+
+ void TearDown() override {
+ mStream.clear();
+ mMapper.reset();
+ mModule.reset();
+ }
+
+ protected:
+ void CloseDisconnectImpl() {
+ mStream.clear();
+ ASSERT_NO_FATAL_FAILURE(DisconnectDevice());
+ }
+
+ void ConnectAnotherDevice() {
+ mConnectedPort.ext.get<AudioPortExt::device>().device.address = "00:11:22:33:44:66";
+ ASSERT_EQ(OK, mMapper->setDevicePortConnectedState(mConnectedPort, true /*connected*/));
+ }
+
+ void CreateFwkPatch(int32_t* patchId) {
+ std::mutex mutex; // Only needed for cleanups.
+ auto mapperAccessor = std::make_unique<LockedAccessor<Hal2AidlMapper>>(*mMapper, mutex);
+ Hal2AidlMapper::Cleanups cleanups(*mapperAccessor);
+ ASSERT_EQ(OK, mMapper->createOrUpdatePatch({mMixPortConfig}, {mDevicePortConfig}, patchId,
+ &cleanups));
+ cleanups.disarmAll();
+ }
+
+ void DisconnectDevice() {
+ ASSERT_EQ(OK, mMapper->prepareToDisconnectExternalDevice(mConnectedPort));
+ ASSERT_EQ(OK, mMapper->setDevicePortConnectedState(mConnectedPort, false /*connected*/));
+ }
+
+ void ReleaseFwkOnlyPatch(int32_t patchId) {
+ // The patch only exists for the framework.
+ EXPECT_EQ(patchId, mMapper->findFwkPatch(patchId));
+ ASSERT_EQ(BAD_VALUE, mMapper->releaseAudioPatch(patchId));
+ mMapper->eraseFwkPatch(patchId);
+ // The patch is now erased.
+ EXPECT_EQ(0, mMapper->findFwkPatch(patchId));
+ }
+
+ std::shared_ptr<ModuleMock> mModule;
+ std::unique_ptr<Hal2AidlMapper> mMapper;
+ AudioPort mConnectedPort;
+ AudioPortConfig mMixPortConfig;
+ AudioPortConfig mDevicePortConfig;
+ AudioPatch mPatch;
+ sp<StreamHalInterface> mStream;
+};
+
+/**
+ * External device connections and patches tests diagram.
+ *
+ * [Connect device] -> [Create Stream]
+ * |-> [ (1) Close Stream] -> [Disconnect Device]
+ * |-> [ (2) Disconnect Device]
+ * | |-> [ (3) Close Stream]
+ * | \-> [ (4) Connect Another Device]
+ * | |-> (1)
+ * | |-> (2) -> (3)
+ * | \-> (5) -> (7)
+ * \-> [ (5) Create/Update Fwk Patch]
+ * |-> [(6) Release Fwk Patch]
+ * | |-> (1)
+ * | \-> (2) (including reconnection)
+ * \-> [(7) Disconnect Device]
+ * |-> [Release Fwk Patch] -> [Close Stream]
+ * \-> (4) -> (5) -> (6) -> (1)
+ *
+ * Note that the test (acting on behalf of DeviceHalAidl) is responsible
+ * for calling `eraseFwkPatch` and `updateFwkPatch` when needed.
+ */
+
+// (1)
+TEST_F(Hal2AidlMapperTest, CloseDisconnect) {
+ ASSERT_NO_FATAL_FAILURE(CloseDisconnectImpl());
+ // The patch is owned by HAL, must not be listed under fwkPatches after disconnection.
+ EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+}
+
+// (2) -> (3)
+TEST_F(Hal2AidlMapperTest, DisconnectClose) {
+ ASSERT_NO_FATAL_FAILURE(DisconnectDevice());
+ // The patch is owned by HAL, must not be listed under fwkPatches after disconnection.
+ EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+ mStream.clear();
+}
+
+// (2) -> (4) -> (1)
+TEST_F(Hal2AidlMapperTest, DisconnectConnectCloseDisconnect) {
+ ASSERT_NO_FATAL_FAILURE(DisconnectDevice());
+ // The patch is owned by HAL, must not be listed under fwkPatches after disconnection.
+ EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+ ASSERT_NO_FATAL_FAILURE(ConnectAnotherDevice());
+ ASSERT_NO_FATAL_FAILURE(CloseDisconnectImpl());
+ // The patch is owned by HAL, must not be listed under fwkPatches after disconnection.
+ EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+}
+
+// (2) -> (4) -> (2) -> (3)
+TEST_F(Hal2AidlMapperTest, DisconnectConnectDisconnectClose) {
+ ASSERT_NO_FATAL_FAILURE(DisconnectDevice());
+ // The patch is owned by HAL, must not be listed under fwkPatches after disconnection.
+ EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+ ASSERT_NO_FATAL_FAILURE(ConnectAnotherDevice());
+ ASSERT_NO_FATAL_FAILURE(DisconnectDevice());
+ // The patch is owned by HAL, must not be listed under fwkPatches after disconnection.
+ EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+ mStream.clear();
+}
+
+// (5) -> (6) -> (1)
+TEST_F(Hal2AidlMapperTest, CreateFwkPatchReleaseCloseDisconnect) {
+ int32_t patchId;
+ ASSERT_NO_FATAL_FAILURE(CreateFwkPatch(&patchId));
+ // Must be the patch created during stream opening.
+ ASSERT_EQ(mPatch.id, patchId);
+ // The patch was not reset by HAL, must not be listed under fwkPatches.
+ EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+
+ ASSERT_EQ(OK, mMapper->releaseAudioPatch(patchId));
+ // The patch does not exist both for the fwk and the HAL, must not be listed under fwkPatches.
+ EXPECT_EQ(0, mMapper->findFwkPatch(patchId));
+ ASSERT_NO_FATAL_FAILURE(CloseDisconnectImpl());
+ // The patch does not exist both for the fwk and the HAL, must not be listed under fwkPatches.
+ EXPECT_EQ(0, mMapper->findFwkPatch(patchId));
+}
+
+// (5) -> (6) -> (2) -> (3)
+TEST_F(Hal2AidlMapperTest, CreateFwkPatchReleaseDisconnectClose) {
+ int32_t patchId;
+ ASSERT_NO_FATAL_FAILURE(CreateFwkPatch(&patchId));
+ // Must be the patch created during stream opening.
+ ASSERT_EQ(mPatch.id, patchId);
+ // The patch was not reset by HAL, must not be listed under fwkPatches.
+ EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+
+ ASSERT_EQ(OK, mMapper->releaseAudioPatch(patchId));
+ // The patch does not exist both for the fwk and the HAL, must not be listed under fwkPatches.
+ EXPECT_EQ(0, mMapper->findFwkPatch(patchId));
+ ASSERT_NO_FATAL_FAILURE(DisconnectDevice());
+ // The patch does not exist both for the fwk and the HAL, must not be listed under fwkPatches.
+ EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+ mStream.clear();
+}
+
+// (5) -> (6) -> (2) -> (4) -> (2) -> (3)
+TEST_F(Hal2AidlMapperTest, CreateFwkPatchReleaseDisconnectConnectDisconnectClose) {
+ int32_t patchId;
+ ASSERT_NO_FATAL_FAILURE(CreateFwkPatch(&patchId));
+ // Must be the patch created during stream opening.
+ ASSERT_EQ(mPatch.id, patchId);
+ // The patch was not reset by HAL, must not be listed under fwkPatches.
+ EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+
+ ASSERT_EQ(OK, mMapper->releaseAudioPatch(patchId));
+ // The patch does not exist both for the fwk and the HAL, must not be listed under fwkPatches.
+ EXPECT_EQ(0, mMapper->findFwkPatch(patchId));
+ ASSERT_NO_FATAL_FAILURE(DisconnectDevice());
+ // The patch does not exist both for the fwk and the HAL, must not be listed under fwkPatches.
+ EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+
+ ASSERT_NO_FATAL_FAILURE(ConnectAnotherDevice());
+ ASSERT_NO_FATAL_FAILURE(DisconnectDevice());
+ // The patch does not exist both for the fwk and the HAL, must not be listed under fwkPatches.
+ EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+ mStream.clear();
+}
+
+// (5) -> (7) -> Release -> Close
+TEST_F(Hal2AidlMapperTest, CreateFwkPatchDisconnectReleaseClose) {
+ int32_t patchId;
+ ASSERT_NO_FATAL_FAILURE(CreateFwkPatch(&patchId));
+ // Must be the patch created during stream opening.
+ ASSERT_EQ(mPatch.id, patchId);
+ // The patch was not reset by HAL, must not be listed under fwkPatches.
+ EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+
+ ASSERT_NO_FATAL_FAILURE(DisconnectDevice());
+ ASSERT_NO_FATAL_FAILURE(ReleaseFwkOnlyPatch(patchId));
+
+ mStream.clear();
+ EXPECT_EQ(0, mMapper->findFwkPatch(patchId));
+}
+
+// (5) -> (7) -> (4) -> (5) -> (6) -> (1)
+TEST_F(Hal2AidlMapperTest, CreateFwkPatchDisconnectConnectUpdateReleaseCloseDisconnect) {
+ int32_t patchId;
+ ASSERT_NO_FATAL_FAILURE(CreateFwkPatch(&patchId));
+ // Must be the patch created during stream opening.
+ ASSERT_EQ(mPatch.id, patchId);
+ // The patch was not reset by HAL, must not be listed under fwkPatches.
+ EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+
+ ASSERT_NO_FATAL_FAILURE(DisconnectDevice());
+ // The patch now only exists for the framework.
+ EXPECT_EQ(mPatch.id, mMapper->findFwkPatch(mPatch.id));
+
+ ASSERT_NO_FATAL_FAILURE(ConnectAnotherDevice());
+ // Change the device address locally, for patch update.
+ mDevicePortConfig.ext.get<AudioPortExt::device>().device.address =
+ mConnectedPort.ext.get<AudioPortExt::device>().device.address;
+ int32_t newPatchId = patchId;
+ ASSERT_NO_FATAL_FAILURE(CreateFwkPatch(&newPatchId));
+ EXPECT_NE(patchId, newPatchId);
+ mMapper->updateFwkPatch(patchId, newPatchId);
+ EXPECT_EQ(newPatchId, mMapper->findFwkPatch(patchId));
+ // Just in case, check that HAL patch ID is not listed as a fwk patch.
+ EXPECT_EQ(0, mMapper->findFwkPatch(newPatchId));
+ // Verify that device port config was updated.
+ ASSERT_EQ(OK, mMapper->findPortConfig(mConnectedPort.ext.get<AudioPortExt::device>().device,
+ &mDevicePortConfig));
+
+ ASSERT_EQ(OK, mMapper->releaseAudioPatch(newPatchId));
+ // The patch does not exist both for the fwk and the HAL, must not be listed under fwkPatches.
+ EXPECT_EQ(0, mMapper->findFwkPatch(patchId));
+ // Just in case, check that HAL patch ID is not listed.
+ EXPECT_EQ(0, mMapper->findFwkPatch(newPatchId));
+
+ ASSERT_NO_FATAL_FAILURE(CloseDisconnectImpl());
+ EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+ EXPECT_EQ(0, mMapper->findFwkPatch(patchId));
+ EXPECT_EQ(0, mMapper->findFwkPatch(newPatchId));
+}
+
+// (2) -> (4) -> (5) -> (7) -> Release -> Close
+TEST_F(Hal2AidlMapperTest, DisconnectConnectCreateFwkPatchDisconnectReleaseClose) {
+ const int32_t patchId = mPatch.id;
+ ASSERT_NO_FATAL_FAILURE(DisconnectDevice());
+ // The patch is owned by HAL, must not be listed under fwkPatches after disconnection.
+ EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+
+ ASSERT_NO_FATAL_FAILURE(ConnectAnotherDevice());
+ // Change the device address locally, for patch update.
+ mDevicePortConfig.ext.get<AudioPortExt::device>().device.address =
+ mConnectedPort.ext.get<AudioPortExt::device>().device.address;
+ int32_t newPatchId = 0; // Use 0 since the fwk does not know about the HAL patch.
+ EXPECT_EQ(0, mMapper->findFwkPatch(newPatchId));
+ ASSERT_NO_FATAL_FAILURE(CreateFwkPatch(&newPatchId));
+ EXPECT_NE(0, newPatchId);
+ EXPECT_NE(patchId, newPatchId);
+ // Just in case, check that HAL patch ID is not listed as a fwk patch.
+ EXPECT_EQ(0, mMapper->findFwkPatch(newPatchId));
+ // Verify that device port config was updated.
+ ASSERT_EQ(OK, mMapper->findPortConfig(mConnectedPort.ext.get<AudioPortExt::device>().device,
+ &mDevicePortConfig));
+
+ ASSERT_NO_FATAL_FAILURE(DisconnectDevice());
+ ASSERT_NO_FATAL_FAILURE(ReleaseFwkOnlyPatch(newPatchId));
+
+ mStream.clear();
+ EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
+ EXPECT_EQ(0, mMapper->findFwkPatch(newPatchId));
+}
diff --git a/media/libaudioprocessing/Android.bp b/media/libaudioprocessing/Android.bp
index 6160d7d..c84796e 100644
--- a/media/libaudioprocessing/Android.bp
+++ b/media/libaudioprocessing/Android.bp
@@ -22,10 +22,11 @@
],
cflags: [
- "-Werror",
"-Wall",
// uncomment to disable NEON on architectures that actually do support NEON, for benchmarking
+
+ "-Werror",
// "-DUSE_NEON=false",
],
@@ -62,7 +63,7 @@
header_libs: [
"libaudiohal_headers",
"libbase_headers",
- "libmedia_headers"
+ "libmedia_headers",
],
shared_libs: [
@@ -87,8 +88,8 @@
"AudioMixerBase.cpp",
"AudioResampler.cpp",
"AudioResamplerCubic.cpp",
- "AudioResamplerSinc.cpp",
"AudioResamplerDyn.cpp",
+ "AudioResamplerSinc.cpp",
],
arch: {
diff --git a/media/libaudioprocessing/AudioMixerBase.cpp b/media/libaudioprocessing/AudioMixerBase.cpp
index 3d11d92..7e362f7 100644
--- a/media/libaudioprocessing/AudioMixerBase.cpp
+++ b/media/libaudioprocessing/AudioMixerBase.cpp
@@ -1122,7 +1122,7 @@
aux = t->auxBuffer + numFrames;
}
for (int outFrames = frameCount; outFrames > 0; ) {
- // t->in == nullptr can happen if the track was flushed just after having
+ // t->mIn == nullptr can happen if the track was flushed just after having
// been enabled for mixing.
if (t->mIn == nullptr) {
break;
diff --git a/media/libaudioprocessing/audio-resampler/Android.bp b/media/libaudioprocessing/audio-resampler/Android.bp
index 4ea75e7..791ae37 100644
--- a/media/libaudioprocessing/audio-resampler/Android.bp
+++ b/media/libaudioprocessing/audio-resampler/Android.bp
@@ -13,12 +13,12 @@
srcs: ["AudioResamplerCoefficients.cpp"],
shared_libs: [
- "libutils",
"liblog",
+ "libutils",
],
cflags: [
- "-Werror",
"-Wall",
+ "-Werror",
],
}
diff --git a/media/libaudioprocessing/tests/Android.bp b/media/libaudioprocessing/tests/Android.bp
index a33bf55..ba9b165 100644
--- a/media/libaudioprocessing/tests/Android.bp
+++ b/media/libaudioprocessing/tests/Android.bp
@@ -29,8 +29,8 @@
],
cflags: [
- "-Werror",
"-Wall",
+ "-Werror",
],
}
diff --git a/media/libcpustats/Android.bp b/media/libcpustats/Android.bp
index 1ab1de0..2b134a7 100644
--- a/media/libcpustats/Android.bp
+++ b/media/libcpustats/Android.bp
@@ -24,8 +24,8 @@
],
cflags: [
- "-Werror",
"-Wall",
+ "-Werror",
],
host_supported: true,
diff --git a/media/libeffects/config/Android.bp b/media/libeffects/config/Android.bp
index 293a9c2..1672797 100644
--- a/media/libeffects/config/Android.bp
+++ b/media/libeffects/config/Android.bp
@@ -20,11 +20,11 @@
],
shared_libs: [
+ "libcutils",
"liblog",
+ "libmedia_helper",
"libtinyxml2",
"libutils",
- "libmedia_helper",
- "libcutils",
],
header_libs: [
diff --git a/media/libeffects/data/Android.bp b/media/libeffects/data/Android.bp
new file mode 100644
index 0000000..2acf229
--- /dev/null
+++ b/media/libeffects/data/Android.bp
@@ -0,0 +1,19 @@
+// Copyright (C) 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+prebuilt_etc {
+ name: "framework-audio_effects.xml",
+ src: "audio_effects.xml",
+ filename: "audio_effects.xml",
+}
diff --git a/media/libeffects/downmix/Android.bp b/media/libeffects/downmix/Android.bp
index 0b25327..19b8082 100644
--- a/media/libeffects/downmix/Android.bp
+++ b/media/libeffects/downmix/Android.bp
@@ -38,9 +38,9 @@
relative_install_path: "soundfx",
cflags: [
- "-fvisibility=hidden",
"-Wall",
"-Werror",
+ "-fvisibility=hidden",
],
header_libs: [
@@ -52,9 +52,9 @@
cc_library_shared {
name: "libdownmixaidl",
srcs: [
- "aidl/EffectDownmix.cpp",
- "aidl/DownmixContext.cpp",
":effectCommonFile",
+ "aidl/DownmixContext.cpp",
+ "aidl/EffectDownmix.cpp",
],
defaults: [
"aidlaudioeffectservice_defaults",
diff --git a/media/libeffects/dynamicsproc/Android.bp b/media/libeffects/dynamicsproc/Android.bp
index e93a4e6..12477a4 100644
--- a/media/libeffects/dynamicsproc/Android.bp
+++ b/media/libeffects/dynamicsproc/Android.bp
@@ -33,7 +33,7 @@
}
cc_defaults {
- name : "dynamicsprocessingdefaults",
+ name: "dynamicsprocessingdefaults",
srcs: [
"dsp/DPBase.cpp",
"dsp/DPFrequency.cpp",
@@ -50,9 +50,9 @@
"libeigen",
],
cflags: [
- "-Wthread-safety",
"-Wall",
"-Werror",
+ "-Wthread-safety",
],
relative_install_path: "soundfx",
}
@@ -80,9 +80,9 @@
name: "libdynamicsprocessingaidl",
srcs: [
+ ":effectCommonFile",
"aidl/DynamicsProcessing.cpp",
"aidl/DynamicsProcessingContext.cpp",
- ":effectCommonFile",
],
defaults: [
diff --git a/media/libeffects/factory/Android.bp b/media/libeffects/factory/Android.bp
index ad5188f..9be45a5 100644
--- a/media/libeffects/factory/Android.bp
+++ b/media/libeffects/factory/Android.bp
@@ -21,17 +21,17 @@
name: "libeffects",
vendor: true,
srcs: [
- "EffectsFactory.c",
"EffectsConfigLoader.c",
+ "EffectsFactory.c",
"EffectsFactoryState.c",
"EffectsXmlConfigLoader.cpp",
],
shared_libs: [
"libcutils",
- "liblog",
"libdl",
"libeffectsconfig",
+ "liblog",
],
cflags: ["-fvisibility=hidden"],
@@ -54,13 +54,13 @@
cflags: [
"-Wall",
- "-Wextra",
"-Werror",
+ "-Wextra",
],
shared_libs: [
- "libeffectsconfig",
"libeffects",
+ "libeffectsconfig",
],
local_include_dirs: [
".",
diff --git a/media/libeffects/hapticgenerator/Android.bp b/media/libeffects/hapticgenerator/Android.bp
index 7d96b53..e4ac38e 100644
--- a/media/libeffects/hapticgenerator/Android.bp
+++ b/media/libeffects/hapticgenerator/Android.bp
@@ -23,7 +23,7 @@
}
cc_defaults {
- name : "hapticgeneratordefaults",
+ name: "hapticgeneratordefaults",
srcs: [
"Processors.cpp",
],
@@ -37,6 +37,14 @@
header_libs: [
"libaudioeffects",
],
+ cflags: [
+ // This is needed for the non-zero coefficients optimization for
+ // BiquadFilter. Try the biquad_filter_benchmark test in audio_utils
+ // with/without `-ffast-math` for more context.
+ "-ffast-math",
+ "-fhonor-infinities",
+ "-fhonor-nans",
+ ],
relative_install_path: "soundfx",
}
@@ -54,13 +62,11 @@
],
cflags: [
- "-O2", // Turning on the optimization in order to reduce effect processing time.
- // The latency is around 1/5 less than without the optimization.
+ // Turning on the optimization in order to reduce effect processing time.
+ // The latency is around 1/5 less than without the optimization.
+ "-O2",
"-Wall",
"-Werror",
- "-ffast-math", // This is needed for the non-zero coefficients optimization for
- // BiquadFilter. Try the biquad_filter_benchmark test in audio_utils
- // with/without `-ffast-math` for more context.
"-fvisibility=hidden",
],
}
@@ -69,9 +75,9 @@
name: "libhapticgeneratoraidl",
srcs: [
+ ":effectCommonFile",
"aidl/EffectHapticGenerator.cpp",
"aidl/HapticGeneratorContext.cpp",
- ":effectCommonFile",
],
defaults: [
diff --git a/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp b/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
index 5d9886c..f60d616 100644
--- a/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
+++ b/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
@@ -114,10 +114,11 @@
std::stringstream ss;
ss << "\t\tHaptic setting:\n";
ss << "\t\t- tracks intensity map:\n";
- for (const auto&[id, intensity] : param.id2Intensity) {
- ss << "\t\t\t- id=" << id << ", intensity=" << (int) intensity;
+ for (const auto&[id, hapticScale] : param.id2HapticScale) {
+ ss << "\t\t\t- id=" << id << ", hapticLevel=" << (int) hapticScale.getLevel()
+ << ", adaptiveScaleFactor=" << hapticScale.getAdaptiveScaleFactor();
}
- ss << "\t\t- max intensity: " << (int) param.maxHapticIntensity << '\n';
+ ss << "\t\t- max scale level: " << (int) param.maxHapticScale.getLevel() << '\n';
ss << "\t\t- max haptic amplitude: " << param.maxHapticAmplitude << '\n';
return ss.str();
}
@@ -145,7 +146,7 @@
memset(context->param.hapticChannelSource, 0, sizeof(context->param.hapticChannelSource));
context->param.hapticChannelCount = 0;
context->param.audioChannelCount = 0;
- context->param.maxHapticIntensity = os::HapticLevel::MUTE;
+ context->param.maxHapticScale = os::HapticScale::mute();
context->param.resonantFrequency = DEFAULT_RESONANT_FREQUENCY;
context->param.bpfQ = 1.0f;
@@ -312,22 +313,25 @@
void *value) {
switch (param) {
case HG_PARAM_HAPTIC_INTENSITY: {
- if (value == nullptr || size != (uint32_t) (2 * sizeof(int))) {
+ if (value == nullptr || size != (uint32_t) (2 * sizeof(int) + sizeof(float))) {
return -EINVAL;
}
- int id = *(int *) value;
- os::HapticLevel hapticIntensity =
- static_cast<os::HapticLevel>(*((int *) value + 1));
- ALOGD("Setting haptic intensity as %d", static_cast<int>(hapticIntensity));
- if (hapticIntensity == os::HapticLevel::MUTE) {
- context->param.id2Intensity.erase(id);
+ const int id = *(int *) value;
+ const os::HapticLevel hapticLevel = static_cast<os::HapticLevel>(*((int *) value + 1));
+ const float adaptiveScaleFactor = (*((float *) value + 2));
+ const os::HapticScale hapticScale = {hapticLevel, adaptiveScaleFactor};
+ ALOGD("Updating haptic scale, hapticLevel=%d, adaptiveScaleFactor=%f",
+ static_cast<int>(hapticLevel), adaptiveScaleFactor);
+ if (hapticScale.isScaleMute()) {
+ context->param.id2HapticScale.erase(id);
} else {
- context->param.id2Intensity.emplace(id, hapticIntensity);
+ context->param.id2HapticScale.emplace(id, hapticScale);
}
- context->param.maxHapticIntensity = hapticIntensity;
- for (const auto&[id, intensity] : context->param.id2Intensity) {
- context->param.maxHapticIntensity = std::max(
- context->param.maxHapticIntensity, intensity);
+ context->param.maxHapticScale = hapticScale;
+ for (const auto&[id, scale] : context->param.id2HapticScale) {
+ if (scale.getLevel() > context->param.maxHapticScale.getLevel()) {
+ context->param.maxHapticScale = scale;
+ }
}
break;
}
@@ -479,7 +483,7 @@
return -ENODATA;
}
- if (context->param.maxHapticIntensity == os::HapticLevel::MUTE) {
+ if (context->param.maxHapticScale.isScaleMute()) {
// Haptic channels are muted, not need to generate haptic data.
return 0;
}
@@ -506,7 +510,7 @@
context->processingChain, context->inputBuffer.data(),
context->outputBuffer.data(), inBuffer->frameCount);
os::scaleHapticData(hapticOutBuffer, hapticSampleCount,
- { /*level=*/context->param.maxHapticIntensity},
+ context->param.maxHapticScale,
context->param.maxHapticAmplitude);
// For haptic data, the haptic playback thread will copy the data from effect input buffer,
diff --git a/media/libeffects/hapticgenerator/EffectHapticGenerator.h b/media/libeffects/hapticgenerator/EffectHapticGenerator.h
index f122c0a..dbfc5ea 100644
--- a/media/libeffects/hapticgenerator/EffectHapticGenerator.h
+++ b/media/libeffects/hapticgenerator/EffectHapticGenerator.h
@@ -48,9 +48,9 @@
uint32_t audioChannelCount;
uint32_t hapticChannelCount;
- // A map from track id to haptic intensity.
- std::map<int, os::HapticLevel> id2Intensity;
- os::HapticLevel maxHapticIntensity; // max intensity will be used to scale haptic data.
+ // A map from track id to haptic scale.
+ std::map<int, os::HapticScale> id2HapticScale;
+ os::HapticScale maxHapticScale; // max haptic scale will be used to scale haptic data.
float maxHapticAmplitude; // max amplitude will be used to limit haptic data absolute values.
float resonantFrequency;
diff --git a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
index 5634b8b..9b2f443 100644
--- a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
+++ b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
@@ -14,16 +14,17 @@
* limitations under the License.
*/
-#include <cstddef>
#define LOG_TAG "AHAL_HapticGeneratorContext"
-#include <Utils.h>
+#include "HapticGeneratorContext.h"
#include <android-base/logging.h>
#include <android-base/parsedouble.h>
#include <android-base/properties.h>
#include <audio_utils/primitives.h>
+#include <audio_utils/safe_math.h>
+#include <Utils.h>
-#include "HapticGeneratorContext.h"
+#include <cstddef>
using aidl::android::hardware::audio::common::getChannelCount;
using aidl::android::hardware::audio::common::getPcmSampleSizeInBytes;
@@ -110,6 +111,15 @@
RetCode HapticGeneratorContext::setHgVibratorInformation(
const HapticGenerator::VibratorInformation& vibratorInfo) {
mParams.mVibratorInfo = vibratorInfo;
+ if (::android::audio_utils::safe_isnan(mParams.mVibratorInfo.resonantFrequencyHz)) {
+ LOG(WARNING) << __func__ << " resonantFrequencyHz reset from nan to "
+ << DEFAULT_RESONANT_FREQUENCY;
+ mParams.mVibratorInfo.resonantFrequencyHz = DEFAULT_RESONANT_FREQUENCY;
+ }
+ if (::android::audio_utils::safe_isnan(mParams.mVibratorInfo.qFactor)) {
+ LOG(WARNING) << __func__ << " qFactor reset from nan to " << DEFAULT_BSF_ZERO_Q;
+ mParams.mVibratorInfo.qFactor = DEFAULT_BSF_ZERO_Q;
+ }
if (mProcessorsRecord.bpf != nullptr) {
mProcessorsRecord.bpf->setCoefficients(::android::audio_effect::haptic_generator::bpfCoefs(
diff --git a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h
index d413b96..8a736e3 100644
--- a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h
+++ b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h
@@ -16,11 +16,13 @@
#pragma once
-#include <vibrator/ExternalVibrationUtils.h>
-#include <map>
-
-#include "Processors.h"
#include "effect-impl/EffectContext.h"
+#include "Processors.h"
+
+#include <vibrator/ExternalVibrationUtils.h>
+
+#include <cstddef>
+#include <map>
namespace aidl::android::hardware::audio::effect {
diff --git a/media/libeffects/loudness/Android.bp b/media/libeffects/loudness/Android.bp
index 46e4669..4f04ffb 100644
--- a/media/libeffects/loudness/Android.bp
+++ b/media/libeffects/loudness/Android.bp
@@ -48,10 +48,10 @@
cc_library_shared {
name: "libloudnessenhanceraidl",
srcs: [
+ ":effectCommonFile",
"aidl/EffectLoudnessEnhancer.cpp",
"aidl/LoudnessEnhancerContext.cpp",
"dsp/core/dynamic_range_compression.cpp",
- ":effectCommonFile",
],
defaults: [
"aidlaudioeffectservice_defaults",
diff --git a/media/libeffects/lvm/lib/Android.bp b/media/libeffects/lvm/lib/Android.bp
index c1a77f0..02b918b 100644
--- a/media/libeffects/lvm/lib/Android.bp
+++ b/media/libeffects/lvm/lib/Android.bp
@@ -31,6 +31,60 @@
vendor: true,
host_supported: true,
srcs: [
+ "Bass/src/LVDBE_Control.cpp",
+ "Bass/src/LVDBE_Init.cpp",
+ "Bass/src/LVDBE_Process.cpp",
+ "Bass/src/LVDBE_Tables.cpp",
+ "Bundle/src/LVM_API_Specials.cpp",
+ "Bundle/src/LVM_Buffers.cpp",
+ "Bundle/src/LVM_Control.cpp",
+ "Bundle/src/LVM_Init.cpp",
+ "Bundle/src/LVM_Process.cpp",
+ "Bundle/src/LVM_Tables.cpp",
+ "Common/src/AGC_MIX_VOL_2St1Mon_D32_WRA.cpp",
+ "Common/src/Add2_Sat_32x32.cpp",
+ "Common/src/Copy_16.cpp",
+ "Common/src/DC_2I_D16_TRC_WRA_01.cpp",
+ "Common/src/DC_2I_D16_TRC_WRA_01_Init.cpp",
+ "Common/src/DelayMix_16x16.cpp",
+ "Common/src/From2iToMS_16x16.cpp",
+ "Common/src/From2iToMono_32.cpp",
+ "Common/src/LVC_Core_MixHard_1St_2i_D16C31_SAT.cpp",
+ "Common/src/LVC_Core_MixHard_2St_D16C31_SAT.cpp",
+ "Common/src/LVC_Core_MixInSoft_D16C31_SAT.cpp",
+ "Common/src/LVC_Core_MixSoft_1St_2i_D16C31_WRA.cpp",
+ "Common/src/LVC_Core_MixSoft_1St_D16C31_WRA.cpp",
+ "Common/src/LVC_MixInSoft_D16C31_SAT.cpp",
+ "Common/src/LVC_MixSoft_1St_2i_D16C31_SAT.cpp",
+ "Common/src/LVC_MixSoft_1St_D16C31_SAT.cpp",
+ "Common/src/LVC_MixSoft_2St_D16C31_SAT.cpp",
+ "Common/src/LVC_Mixer_GetCurrent.cpp",
+ "Common/src/LVC_Mixer_GetTarget.cpp",
+ "Common/src/LVC_Mixer_Init.cpp",
+ "Common/src/LVC_Mixer_SetTarget.cpp",
+ "Common/src/LVC_Mixer_SetTimeConstant.cpp",
+ "Common/src/LVC_Mixer_VarSlope_SetTimeConstant.cpp",
+ "Common/src/LVM_Timer.cpp",
+ "Common/src/LVM_Timer_Init.cpp",
+ "Common/src/MSTo2i_Sat_16x16.cpp",
+ "Common/src/Mac3s_Sat_32x16.cpp",
+ "Common/src/MonoTo2I_32.cpp",
+ "Common/src/Mult3s_32x16.cpp",
+ "Common/src/NonLinComp_D16.cpp",
+ "Common/src/Shift_Sat_v16xv16.cpp",
+ "Common/src/Shift_Sat_v32xv32.cpp",
+ "Common/src/dB_to_Lin32.cpp",
+ "Eq/src/LVEQNB_CalcCoef.cpp",
+ "Eq/src/LVEQNB_Control.cpp",
+ "Eq/src/LVEQNB_Init.cpp",
+ "Eq/src/LVEQNB_Process.cpp",
+ "Eq/src/LVEQNB_Tables.cpp",
+ "SpectrumAnalyzer/src/LVPSA_Control.cpp",
+ "SpectrumAnalyzer/src/LVPSA_Init.cpp",
+ "SpectrumAnalyzer/src/LVPSA_Process.cpp",
+ "SpectrumAnalyzer/src/LVPSA_QPD_Init.cpp",
+ "SpectrumAnalyzer/src/LVPSA_QPD_Process.cpp",
+ "SpectrumAnalyzer/src/LVPSA_Tables.cpp",
"StereoWidening/src/LVCS_BypassMix.cpp",
"StereoWidening/src/LVCS_Control.cpp",
"StereoWidening/src/LVCS_Equaliser.cpp",
@@ -39,77 +93,23 @@
"StereoWidening/src/LVCS_ReverbGenerator.cpp",
"StereoWidening/src/LVCS_StereoEnhancer.cpp",
"StereoWidening/src/LVCS_Tables.cpp",
- "Bass/src/LVDBE_Control.cpp",
- "Bass/src/LVDBE_Init.cpp",
- "Bass/src/LVDBE_Process.cpp",
- "Bass/src/LVDBE_Tables.cpp",
- "Bundle/src/LVM_API_Specials.cpp",
- "Bundle/src/LVM_Buffers.cpp",
- "Bundle/src/LVM_Init.cpp",
- "Bundle/src/LVM_Process.cpp",
- "Bundle/src/LVM_Tables.cpp",
- "Bundle/src/LVM_Control.cpp",
- "SpectrumAnalyzer/src/LVPSA_Control.cpp",
- "SpectrumAnalyzer/src/LVPSA_Init.cpp",
- "SpectrumAnalyzer/src/LVPSA_Process.cpp",
- "SpectrumAnalyzer/src/LVPSA_QPD_Init.cpp",
- "SpectrumAnalyzer/src/LVPSA_QPD_Process.cpp",
- "SpectrumAnalyzer/src/LVPSA_Tables.cpp",
- "Eq/src/LVEQNB_CalcCoef.cpp",
- "Eq/src/LVEQNB_Control.cpp",
- "Eq/src/LVEQNB_Init.cpp",
- "Eq/src/LVEQNB_Process.cpp",
- "Eq/src/LVEQNB_Tables.cpp",
- "Common/src/DC_2I_D16_TRC_WRA_01.cpp",
- "Common/src/DC_2I_D16_TRC_WRA_01_Init.cpp",
- "Common/src/Copy_16.cpp",
- "Common/src/MonoTo2I_32.cpp",
- "Common/src/dB_to_Lin32.cpp",
- "Common/src/Shift_Sat_v16xv16.cpp",
- "Common/src/Shift_Sat_v32xv32.cpp",
- "Common/src/From2iToMono_32.cpp",
- "Common/src/Mult3s_32x16.cpp",
- "Common/src/NonLinComp_D16.cpp",
- "Common/src/DelayMix_16x16.cpp",
- "Common/src/MSTo2i_Sat_16x16.cpp",
- "Common/src/From2iToMS_16x16.cpp",
- "Common/src/Mac3s_Sat_32x16.cpp",
- "Common/src/Add2_Sat_32x32.cpp",
- "Common/src/LVC_MixSoft_1St_2i_D16C31_SAT.cpp",
- "Common/src/LVC_MixSoft_1St_D16C31_SAT.cpp",
- "Common/src/LVC_Mixer_VarSlope_SetTimeConstant.cpp",
- "Common/src/LVC_Mixer_SetTimeConstant.cpp",
- "Common/src/LVC_Mixer_SetTarget.cpp",
- "Common/src/LVC_Mixer_GetTarget.cpp",
- "Common/src/LVC_Mixer_Init.cpp",
- "Common/src/LVC_Core_MixHard_1St_2i_D16C31_SAT.cpp",
- "Common/src/LVC_Core_MixSoft_1St_2i_D16C31_WRA.cpp",
- "Common/src/LVC_Core_MixInSoft_D16C31_SAT.cpp",
- "Common/src/LVC_Mixer_GetCurrent.cpp",
- "Common/src/LVC_MixSoft_2St_D16C31_SAT.cpp",
- "Common/src/LVC_Core_MixSoft_1St_D16C31_WRA.cpp",
- "Common/src/LVC_Core_MixHard_2St_D16C31_SAT.cpp",
- "Common/src/LVC_MixInSoft_D16C31_SAT.cpp",
- "Common/src/AGC_MIX_VOL_2St1Mon_D32_WRA.cpp",
- "Common/src/LVM_Timer.cpp",
- "Common/src/LVM_Timer_Init.cpp",
],
local_include_dirs: [
- "Eq/lib",
- "Eq/src",
"Bass/lib",
"Bass/src",
- "Common/src",
"Bundle/src",
+ "Common/src",
+ "Eq/lib",
+ "Eq/src",
"SpectrumAnalyzer/lib",
"SpectrumAnalyzer/src",
- "StereoWidening/src",
"StereoWidening/lib",
+ "StereoWidening/src",
],
export_include_dirs: [
- "Common/lib",
"Bundle/lib",
+ "Common/lib",
],
shared_libs: [
"liblog",
@@ -121,9 +121,9 @@
"libhardware_headers",
],
cppflags: [
- "-fvisibility=hidden",
"-Wall",
"-Werror",
+ "-fvisibility=hidden",
],
}
@@ -141,6 +141,26 @@
vendor: true,
host_supported: true,
srcs: [
+ "Common/src/Add2_Sat_32x32.cpp",
+ "Common/src/Copy_16.cpp",
+ "Common/src/Core_MixHard_2St_D32C31_SAT.cpp",
+ "Common/src/Core_MixInSoft_D32C31_SAT.cpp",
+ "Common/src/Core_MixSoft_1St_D32C31_WRA.cpp",
+ "Common/src/From2iToMono_32.cpp",
+ "Common/src/JoinTo2i_32x32.cpp",
+ "Common/src/LVM_FO_HPF.cpp",
+ "Common/src/LVM_FO_LPF.cpp",
+ "Common/src/LVM_GetOmega.cpp",
+ "Common/src/LVM_Mixer_TimeConstant.cpp",
+ "Common/src/LVM_Polynomial.cpp",
+ "Common/src/LVM_Power10.cpp",
+ "Common/src/Mac3s_Sat_32x16.cpp",
+ "Common/src/MixInSoft_D32C31_SAT.cpp",
+ "Common/src/MixSoft_1St_D32C31_WRA.cpp",
+ "Common/src/MixSoft_2St_D32C31_SAT.cpp",
+ "Common/src/MonoTo2I_32.cpp",
+ "Common/src/Mult3s_32x16.cpp",
+ "Common/src/Shift_Sat_v32xv32.cpp",
"Reverb/src/LVREV_ApplyNewSettings.cpp",
"Reverb/src/LVREV_ClearAudioBuffers.cpp",
"Reverb/src/LVREV_GetControlParameters.cpp",
@@ -148,42 +168,22 @@
"Reverb/src/LVREV_Process.cpp",
"Reverb/src/LVREV_SetControlParameters.cpp",
"Reverb/src/LVREV_Tables.cpp",
- "Common/src/From2iToMono_32.cpp",
- "Common/src/Mult3s_32x16.cpp",
- "Common/src/Copy_16.cpp",
- "Common/src/Mac3s_Sat_32x16.cpp",
- "Common/src/Shift_Sat_v32xv32.cpp",
- "Common/src/Add2_Sat_32x32.cpp",
- "Common/src/JoinTo2i_32x32.cpp",
- "Common/src/MonoTo2I_32.cpp",
- "Common/src/LVM_FO_HPF.cpp",
- "Common/src/LVM_FO_LPF.cpp",
- "Common/src/LVM_Polynomial.cpp",
- "Common/src/LVM_Power10.cpp",
- "Common/src/LVM_GetOmega.cpp",
- "Common/src/MixSoft_2St_D32C31_SAT.cpp",
- "Common/src/MixSoft_1St_D32C31_WRA.cpp",
- "Common/src/MixInSoft_D32C31_SAT.cpp",
- "Common/src/LVM_Mixer_TimeConstant.cpp",
- "Common/src/Core_MixHard_2St_D32C31_SAT.cpp",
- "Common/src/Core_MixSoft_1St_D32C31_WRA.cpp",
- "Common/src/Core_MixInSoft_D32C31_SAT.cpp",
],
local_include_dirs: [
- "Reverb/src",
"Common/src",
+ "Reverb/src",
],
export_include_dirs: [
- "Reverb/lib",
"Common/lib",
+ "Reverb/lib",
],
static_libs: [
"libaudioutils",
],
cppflags: [
- "-fvisibility=hidden",
"-Wall",
"-Werror",
+ "-fvisibility=hidden",
],
}
diff --git a/media/libeffects/lvm/wrapper/Android.bp b/media/libeffects/lvm/wrapper/Android.bp
index 781aad6..5b48045 100644
--- a/media/libeffects/lvm/wrapper/Android.bp
+++ b/media/libeffects/lvm/wrapper/Android.bp
@@ -52,8 +52,8 @@
local_include_dirs: ["Bundle"],
header_libs: [
- "libhardware_headers",
"libaudioeffects",
+ "libhardware_headers",
],
}
@@ -93,8 +93,8 @@
export_include_dirs: ["Reverb"],
header_libs: [
- "libhardware_headers",
"libaudioeffects",
+ "libhardware_headers",
],
sanitize: {
@@ -105,9 +105,9 @@
cc_library_shared {
name: "libbundleaidl",
srcs: [
+ ":effectCommonFile",
"Aidl/BundleContext.cpp",
"Aidl/EffectBundleAidl.cpp",
- ":effectCommonFile",
],
static_libs: ["libmusicbundle"],
defaults: [
@@ -125,8 +125,8 @@
"libstagefright_foundation",
],
cflags: [
- "-Wthread-safety",
"-DBACKEND_NDK",
+ "-Wthread-safety",
],
relative_install_path: "soundfx",
visibility: [
@@ -137,9 +137,9 @@
cc_library_shared {
name: "libreverbaidl",
srcs: [
- "Reverb/aidl/ReverbContext.cpp",
- "Reverb/aidl/EffectReverb.cpp",
":effectCommonFile",
+ "Reverb/aidl/EffectReverb.cpp",
+ "Reverb/aidl/ReverbContext.cpp",
],
static_libs: ["libreverb"],
defaults: [
@@ -151,8 +151,8 @@
"libhardware_headers",
],
shared_libs: [
- "libbase",
"libaudioutils",
+ "libbase",
"libcutils",
"liblog",
],
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
index 67518af..44ea2a4 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
@@ -353,45 +353,62 @@
return status;
}
- std::vector<float> inFrames(samples);
- std::vector<float> outFrames(frameCount * FCC_2);
+ std::vector<float> inputSamples;
+ std::vector<float> outputSamples(frameCount * FCC_2);
if (isPreset() && mNextPreset != mPreset) {
loadPreset();
}
if (isAuxiliary()) {
- inFrames.assign(in, in + samples);
+ inputSamples.resize(samples);
+ inputSamples.assign(in, in + samples);
} else {
- // mono input is duplicated
+ // Resizing to stereo is required to duplicate mono input
+ inputSamples.resize(frameCount * FCC_2);
if (channels >= FCC_2) {
for (int i = 0; i < frameCount; i++) {
- inFrames[FCC_2 * i] = in[channels * i] * kSendLevel;
- inFrames[FCC_2 * i + 1] = in[channels * i + 1] * kSendLevel;
+ inputSamples[FCC_2 * i] = in[channels * i] * kSendLevel;
+ inputSamples[FCC_2 * i + 1] = in[channels * i + 1] * kSendLevel;
}
} else {
for (int i = 0; i < frameCount; i++) {
- inFrames[FCC_2 * i] = inFrames[FCC_2 * i + 1] = in[i] * kSendLevel;
+ inputSamples[FCC_2 * i] = inputSamples[FCC_2 * i + 1] = in[i] * kSendLevel;
}
}
}
if (isPreset() && mPreset == PresetReverb::Presets::NONE) {
- std::fill(outFrames.begin(), outFrames.end(), 0); // always stereo here
+ std::fill(outputSamples.begin(), outputSamples.end(), 0); // always stereo here
} else {
if (!mEnabled && mSamplesToExitCount > 0) {
- std::fill(outFrames.begin(), outFrames.end(), 0);
+ std::fill(outputSamples.begin(), outputSamples.end(), 0);
}
+ int inputBufferIndex = 0;
+ int outputBufferIndex = 0;
+
+ // LVREV library supports max of int16_t frames at a time
+ constexpr int kMaxBlockFrames = std::numeric_limits<int16_t>::max();
+ const auto inputFrameSize = getInputFrameSize();
+ const auto outputFrameSize = getOutputFrameSize();
/* Process the samples, producing a stereo output */
- LVREV_ReturnStatus_en lvrevStatus =
- LVREV_Process(mInstance, /* Instance handle */
- inFrames.data(), /* Input buffer */
- outFrames.data(), /* Output buffer */
- frameCount); /* Number of samples to read */
- if (lvrevStatus != LVREV_SUCCESS) {
- LOG(ERROR) << __func__ << " LVREV_Process error: " << lvrevStatus;
- return {EX_UNSUPPORTED_OPERATION, 0, 0};
+ for (int fc = frameCount; fc > 0;) {
+ int processFrames = std::min(fc, kMaxBlockFrames);
+ LVREV_ReturnStatus_en lvrevStatus =
+ LVREV_Process(mInstance, /* Instance handle */
+ inputSamples.data() + inputBufferIndex, /* Input buffer */
+ outputSamples.data() + outputBufferIndex, /* Output buffer */
+ processFrames); /* Number of samples to process */
+ if (lvrevStatus != LVREV_SUCCESS) {
+ LOG(ERROR) << __func__ << " LVREV_Process error: " << lvrevStatus;
+ return {EX_UNSUPPORTED_OPERATION, 0, 0};
+ }
+
+ fc -= processFrames;
+
+ inputBufferIndex += processFrames * inputFrameSize / sizeof(float);
+ outputBufferIndex += processFrames * outputFrameSize / sizeof(float);
}
}
// Convert to 16 bits
@@ -401,14 +418,14 @@
if (channels >= FCC_2) {
for (int i = 0; i < frameCount; i++) {
// Mix with dry input
- outFrames[FCC_2 * i] += in[channels * i];
- outFrames[FCC_2 * i + 1] += in[channels * i + 1];
+ outputSamples[FCC_2 * i] += in[channels * i];
+ outputSamples[FCC_2 * i + 1] += in[channels * i + 1];
}
} else {
for (int i = 0; i < frameCount; i++) {
// Mix with dry input
- outFrames[FCC_2 * i] += in[i];
- outFrames[FCC_2 * i + 1] += in[i];
+ outputSamples[FCC_2 * i] += in[i];
+ outputSamples[FCC_2 * i + 1] += in[i];
}
}
@@ -420,8 +437,8 @@
float incr = (mVolume.right - vr) / frameCount;
for (int i = 0; i < frameCount; i++) {
- outFrames[FCC_2 * i] *= vl;
- outFrames[FCC_2 * i + 1] *= vr;
+ outputSamples[FCC_2 * i] *= vl;
+ outputSamples[FCC_2 * i + 1] *= vr;
vl += incl;
vr += incr;
@@ -430,8 +447,8 @@
} else if (volumeMode != VOLUME_OFF) {
if (mVolume.left != kUnitVolume || mVolume.right != kUnitVolume) {
for (int i = 0; i < frameCount; i++) {
- outFrames[FCC_2 * i] *= mVolume.left;
- outFrames[FCC_2 * i + 1] *= mVolume.right;
+ outputSamples[FCC_2 * i] *= mVolume.left;
+ outputSamples[FCC_2 * i + 1] *= mVolume.right;
}
}
mPrevVolume = mVolume;
@@ -441,8 +458,8 @@
if (outChannels > 2) {
for (int i = 0; i < frameCount; i++) {
- out[outChannels * i] = outFrames[FCC_2 * i];
- out[outChannels * i + 1] = outFrames[FCC_2 * i + 1];
+ out[outChannels * i] = outputSamples[FCC_2 * i];
+ out[outChannels * i + 1] = outputSamples[FCC_2 * i + 1];
}
if (!isAuxiliary()) {
for (int i = 0; i < frameCount; i++) {
@@ -454,10 +471,10 @@
}
} else {
if (outChannels == FCC_1) {
- From2iToMono_Float(outFrames.data(), out, frameCount);
+ From2iToMono_Float(outputSamples.data(), out, frameCount);
} else {
for (int i = 0; i < frameCount * FCC_2; i++) {
- out[i] = outFrames[i];
+ out[i] = outputSamples[i];
}
}
}
diff --git a/media/libeffects/preprocessing/Android.bp b/media/libeffects/preprocessing/Android.bp
index d658536..44b7d97 100644
--- a/media/libeffects/preprocessing/Android.bp
+++ b/media/libeffects/preprocessing/Android.bp
@@ -63,30 +63,30 @@
cc_library_shared {
name: "libpreprocessingaidl",
srcs: [
- "aidl/PreProcessingContext.cpp",
- "aidl/EffectPreProcessing.cpp",
":effectCommonFile",
+ "aidl/EffectPreProcessing.cpp",
+ "aidl/PreProcessingContext.cpp",
],
defaults: [
"aidlaudioeffectservice_defaults",
],
local_include_dirs: ["aidl"],
shared_libs: [
+ "libaudioutils",
"liblog",
"libutils",
- "libaudioutils",
],
static_libs: [
"webrtc_audio_processing",
],
header_libs: [
- "libwebrtc_absl_headers",
"libaudioeffects",
"libhardware_headers",
+ "libwebrtc_absl_headers",
],
cflags: [
- "-Wthread-safety",
"-Wno-unused-parameter",
+ "-Wthread-safety",
],
relative_install_path: "soundfx",
visibility: [
diff --git a/media/libeffects/proxy/Android.bp b/media/libeffects/proxy/Android.bp
index 6256eda..95da4de 100644
--- a/media/libeffects/proxy/Android.bp
+++ b/media/libeffects/proxy/Android.bp
@@ -29,19 +29,19 @@
srcs: ["EffectProxy.cpp"],
cflags: [
- "-fvisibility=hidden",
"-Wall",
"-Werror",
+ "-fvisibility=hidden",
],
include_dirs: ["frameworks/av/media/libeffects/factory"],
header_libs: ["libaudioeffects"],
shared_libs: [
- "liblog",
"libcutils",
- "libutils",
"libdl",
"libeffects",
+ "liblog",
+ "libutils",
],
}
diff --git a/media/libeffects/testlibs/Android.bp b/media/libeffects/testlibs/Android.bp
index 5ba56bb..f5aad92 100644
--- a/media/libeffects/testlibs/Android.bp
+++ b/media/libeffects/testlibs/Android.bp
@@ -33,10 +33,10 @@
relative_install_path: "soundfx",
cflags: [
- "-fvisibility=hidden",
"-Wall",
"-Werror",
"-Wno-address-of-packed-member",
+ "-fvisibility=hidden",
],
header_libs: [
@@ -66,9 +66,9 @@
relative_install_path: "soundfx",
cflags: [
- "-fvisibility=hidden",
"-Wall",
"-Werror",
+ "-fvisibility=hidden",
],
header_libs: [
diff --git a/media/libeffects/visualizer/Android.bp b/media/libeffects/visualizer/Android.bp
index 66ceadf..8f1d8da 100644
--- a/media/libeffects/visualizer/Android.bp
+++ b/media/libeffects/visualizer/Android.bp
@@ -54,9 +54,9 @@
cc_library_shared {
name: "libvisualizeraidl",
srcs: [
+ ":effectCommonFile",
"aidl/Visualizer.cpp",
"aidl/VisualizerContext.cpp",
- ":effectCommonFile",
],
defaults: [
"aidlaudioeffectservice_defaults",
diff --git a/media/libheif/OWNERS b/media/libheif/OWNERS
new file mode 100644
index 0000000..a61ad21
--- /dev/null
+++ b/media/libheif/OWNERS
@@ -0,0 +1,2 @@
+include platform/frameworks/av:/media/janitors/avic_OWNERS
+include platform/frameworks/av:/media/janitors/codec_OWNERS
\ No newline at end of file
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index 590a7b7..840897f 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -223,7 +223,6 @@
"com.android.media",
],
-
srcs: ["MidiIoWrapper.cpp"],
static_libs: [
@@ -278,6 +277,10 @@
"libutils",
],
+ static_libs: [
+ "android.media.codec-aconfig-cc",
+ ],
+
include_dirs: [
"system/libhidl/transport/token/1.0/utils/include",
],
diff --git a/media/libmedia/MediaCodecInfo.cpp b/media/libmedia/MediaCodecInfo.cpp
index 86ad997..c45c5c3 100644
--- a/media/libmedia/MediaCodecInfo.cpp
+++ b/media/libmedia/MediaCodecInfo.cpp
@@ -36,6 +36,7 @@
constexpr char MediaCodecInfo::Capabilities::FEATURE_MULTIPLE_FRAMES[];
constexpr char MediaCodecInfo::Capabilities::FEATURE_SECURE_PLAYBACK[];
constexpr char MediaCodecInfo::Capabilities::FEATURE_TUNNELED_PLAYBACK[];
+constexpr char MediaCodecInfo::Capabilities::FEATURE_DETACHED_SURFACE[];
void MediaCodecInfo::Capabilities::getSupportedProfileLevels(
Vector<ProfileLevel> *profileLevels) const {
diff --git a/media/libmedia/include/media/MediaCodecInfo.h b/media/libmedia/include/media/MediaCodecInfo.h
index 54f565a..88a2dc4 100644
--- a/media/libmedia/include/media/MediaCodecInfo.h
+++ b/media/libmedia/include/media/MediaCodecInfo.h
@@ -69,6 +69,7 @@
constexpr static char FEATURE_MULTIPLE_FRAMES[] = "feature-multiple-frames";
constexpr static char FEATURE_SECURE_PLAYBACK[] = "feature-secure-playback";
constexpr static char FEATURE_TUNNELED_PLAYBACK[] = "feature-tunneled-playback";
+ constexpr static char FEATURE_DETACHED_SURFACE[] = "feature-detached-surface";
/**
* Returns the supported levels for each supported profile in a target array.
diff --git a/media/libmediametrics/Android.bp b/media/libmediametrics/Android.bp
index 8a38dd7..5214dfe 100644
--- a/media/libmediametrics/Android.bp
+++ b/media/libmediametrics/Android.bp
@@ -16,8 +16,8 @@
name: "libmediametrics",
srcs: [
- "MediaMetricsItem.cpp",
"MediaMetrics.cpp",
+ "MediaMetricsItem.cpp",
],
shared_libs: [
@@ -40,8 +40,8 @@
sanitize: {
misc_undefined: [
- "unsigned-integer-overflow",
"signed-integer-overflow",
+ "unsigned-integer-overflow",
],
cfi: true,
},
@@ -50,8 +50,8 @@
stubs: {
symbol_file: "libmediametrics.map.txt",
versions: [
- "1" ,
- ]
+ "1",
+ ],
},
header_abi_checker: {
@@ -65,7 +65,7 @@
"//frameworks/base/apex/media/framework",
"//frameworks/base/core/jni",
"//frameworks/base/media/jni",
- "//packages/modules/Media/apex/framework",
+ "//packages/modules/Media/apex/framework",
],
}
diff --git a/media/libmediaplayerservice/DeathNotifier.cpp b/media/libmediaplayerservice/DeathNotifier.cpp
index ab22f67..241c52d 100644
--- a/media/libmediaplayerservice/DeathNotifier.cpp
+++ b/media/libmediaplayerservice/DeathNotifier.cpp
@@ -17,11 +17,18 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "MediaPlayerService-DeathNotifier"
#include <android-base/logging.h>
+#include <map>
#include "DeathNotifier.h"
namespace android {
+// Only dereference the cookie if it's valid (if it's in this set)
+// Only used with ndk
+static uintptr_t sCookieKeyCounter = 0;
+static std::map<uintptr_t, wp<DeathNotifier::DeathRecipient>> sCookies;
+static std::mutex sCookiesMutex;
+
class DeathNotifier::DeathRecipient :
public IBinder::DeathRecipient,
public hardware::hidl_death_recipient {
@@ -44,13 +51,32 @@
}
static void OnBinderDied(void *cookie) {
- DeathRecipient *thiz = (DeathRecipient *)cookie;
- thiz->mNotify();
+ std::unique_lock<std::mutex> guard(sCookiesMutex);
+ if (auto it = sCookies.find(reinterpret_cast<uintptr_t>(cookie)); it != sCookies.end()) {
+ sp<DeathRecipient> recipient = it->second.promote();
+ sCookies.erase(it);
+ guard.unlock();
+
+ if (recipient) {
+ LOG(INFO) << "Notifying DeathRecipient from OnBinderDied.";
+ recipient->mNotify();
+ } else {
+ LOG(INFO) <<
+ "Tried to notify DeathRecipient from OnBinderDied but could not promote.";
+ }
+ }
}
AIBinder_DeathRecipient *getNdkRecipient() {
return mNdkRecipient.get();;
}
+ ~DeathRecipient() {
+ // lock must be taken so object is not used in OnBinderDied"
+ std::lock_guard<std::mutex> guard(sCookiesMutex);
+ sCookies.erase(mCookieKey);
+ }
+
+ uintptr_t mCookieKey;
private:
Notify mNotify;
@@ -73,8 +99,15 @@
: mService{std::in_place_index<3>, service},
mDeathRecipient{new DeathRecipient(notify)} {
mDeathRecipient->initNdk();
+ {
+ std::lock_guard<std::mutex> guard(sCookiesMutex);
+ mDeathRecipient->mCookieKey = sCookieKeyCounter++;
+ sCookies[mDeathRecipient->mCookieKey] = mDeathRecipient;
+ }
AIBinder_linkToDeath(
- service.get(), mDeathRecipient->getNdkRecipient(), mDeathRecipient.get());
+ service.get(),
+ mDeathRecipient->getNdkRecipient(),
+ reinterpret_cast<void*>(mDeathRecipient->mCookieKey));
}
DeathNotifier::DeathNotifier(DeathNotifier&& other)
@@ -94,10 +127,11 @@
std::get<2>(mService)->unlinkToDeath(mDeathRecipient);
break;
case 3:
+
AIBinder_unlinkToDeath(
std::get<3>(mService).get(),
mDeathRecipient->getNdkRecipient(),
- mDeathRecipient.get());
+ reinterpret_cast<void*>(mDeathRecipient->mCookieKey));
break;
default:
CHECK(false) << "Corrupted service type during destruction.";
diff --git a/media/libmediaplayerservice/DeathNotifier.h b/media/libmediaplayerservice/DeathNotifier.h
index 24e45a3..0fd7c65 100644
--- a/media/libmediaplayerservice/DeathNotifier.h
+++ b/media/libmediaplayerservice/DeathNotifier.h
@@ -37,10 +37,11 @@
DeathNotifier(DeathNotifier&& other);
~DeathNotifier();
+ class DeathRecipient;
+
private:
std::variant<std::monostate, sp<IBinder>, sp<HBase>, ::ndk::SpAIBinder> mService;
- class DeathRecipient;
sp<DeathRecipient> mDeathRecipient;
};
diff --git a/media/libmediaplayerservice/fuzzer/Android.bp b/media/libmediaplayerservice/fuzzer/Android.bp
index 74b0a85..7a1411d 100644
--- a/media/libmediaplayerservice/fuzzer/Android.bp
+++ b/media/libmediaplayerservice/fuzzer/Android.bp
@@ -159,10 +159,13 @@
"libmediaplayerserviceFuzzer_defaults",
],
static_libs: [
+ "libgmock",
+ "libgtest_ndk_c++",
"libplayerservice_datasource",
"libstagefright_nuplayer",
"libstagefright_rtsp",
"libstagefright_timedtext",
+ "libbinder_random_parcel",
],
shared_libs: [
"android.hardware.media.c2@1.0",
@@ -191,7 +194,10 @@
"libpowermanager",
"libstagefright_httplive",
"libaudiohal@7.0",
+ "libmediaextractorservice",
],
+ corpus: ["corpus/*"],
+ include_dirs: ["frameworks/av/services/mediaextractor"],
}
cc_fuzz {
diff --git a/media/libmediaplayerservice/fuzzer/corpus/08873afe0bb32c29d6aed741d06c3ebfcfcf6204 b/media/libmediaplayerservice/fuzzer/corpus/08873afe0bb32c29d6aed741d06c3ebfcfcf6204
new file mode 100755
index 0000000..13e4732
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/08873afe0bb32c29d6aed741d06c3ebfcfcf6204
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/2c1e844c2b86f22075a69121efd280af3104e31f b/media/libmediaplayerservice/fuzzer/corpus/2c1e844c2b86f22075a69121efd280af3104e31f
new file mode 100755
index 0000000..591816e
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/2c1e844c2b86f22075a69121efd280af3104e31f
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/2e5297399ed949e919852cb0471cab25bcca82f4 b/media/libmediaplayerservice/fuzzer/corpus/2e5297399ed949e919852cb0471cab25bcca82f4
new file mode 100755
index 0000000..2acf349
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/2e5297399ed949e919852cb0471cab25bcca82f4
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/2f9bd1fce3b3422e31f2f5bb38db695e2ace7bb8 b/media/libmediaplayerservice/fuzzer/corpus/2f9bd1fce3b3422e31f2f5bb38db695e2ace7bb8
new file mode 100755
index 0000000..941885f
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/2f9bd1fce3b3422e31f2f5bb38db695e2ace7bb8
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/3f967d82b89c0b39e04727213ba71910801b85fa b/media/libmediaplayerservice/fuzzer/corpus/3f967d82b89c0b39e04727213ba71910801b85fa
new file mode 100755
index 0000000..a6920fa
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/3f967d82b89c0b39e04727213ba71910801b85fa
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/40a46cd4f323d9d5d8508188f21f94ddae5bfae6 b/media/libmediaplayerservice/fuzzer/corpus/40a46cd4f323d9d5d8508188f21f94ddae5bfae6
new file mode 100755
index 0000000..6b70ddd
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/40a46cd4f323d9d5d8508188f21f94ddae5bfae6
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/543adcc7136463c859c38fce066f87b1141bd622 b/media/libmediaplayerservice/fuzzer/corpus/543adcc7136463c859c38fce066f87b1141bd622
new file mode 100755
index 0000000..a919290
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/543adcc7136463c859c38fce066f87b1141bd622
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/874152a3db53b5e4c153655e8ee9443e6ec4c0b1 b/media/libmediaplayerservice/fuzzer/corpus/874152a3db53b5e4c153655e8ee9443e6ec4c0b1
new file mode 100755
index 0000000..1062677
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/874152a3db53b5e4c153655e8ee9443e6ec4c0b1
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/bf37374209d5417fa2a6a1cddcae6df44397b075 b/media/libmediaplayerservice/fuzzer/corpus/bf37374209d5417fa2a6a1cddcae6df44397b075
new file mode 100755
index 0000000..ed11aff
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/bf37374209d5417fa2a6a1cddcae6df44397b075
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/d537f5df0726fa840146a4c788855f8658b7aae0 b/media/libmediaplayerservice/fuzzer/corpus/d537f5df0726fa840146a4c788855f8658b7aae0
new file mode 100755
index 0000000..d82f45d
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/d537f5df0726fa840146a4c788855f8658b7aae0
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/e66a979713ba719fc30005b770b627c187a64607 b/media/libmediaplayerservice/fuzzer/corpus/e66a979713ba719fc30005b770b627c187a64607
new file mode 100755
index 0000000..32af6ee
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/e66a979713ba719fc30005b770b627c187a64607
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/e712688b60610ff05d48b7b74695993a05338112 b/media/libmediaplayerservice/fuzzer/corpus/e712688b60610ff05d48b7b74695993a05338112
new file mode 100755
index 0000000..abfba79
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/e712688b60610ff05d48b7b74695993a05338112
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/f5c60a210c8fbd5a9a3c131f6097d5d07fc45324 b/media/libmediaplayerservice/fuzzer/corpus/f5c60a210c8fbd5a9a3c131f6097d5d07fc45324
new file mode 100755
index 0000000..7fb1bca
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/f5c60a210c8fbd5a9a3c131f6097d5d07fc45324
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/mediaplayer_fuzzer.cpp b/media/libmediaplayerservice/fuzzer/mediaplayer_fuzzer.cpp
index a189d04..652b1ee 100644
--- a/media/libmediaplayerservice/fuzzer/mediaplayer_fuzzer.cpp
+++ b/media/libmediaplayerservice/fuzzer/mediaplayer_fuzzer.cpp
@@ -15,9 +15,13 @@
*
*/
+#include <MediaExtractorService.h>
#include <MediaPlayerService.h>
+#include <android/gui/BnSurfaceComposerClient.h>
#include <camera/Camera.h>
#include <datasource/FileSource.h>
+#include <fuzzbinder/random_binder.h>
+#include <gmock/gmock.h>
#include <gui/Surface.h>
#include <gui/SurfaceComposerClient.h>
#include <media/IMediaCodecList.h>
@@ -31,40 +35,37 @@
#include <media/stagefright/RemoteDataSource.h>
#include <media/stagefright/foundation/base64.h>
#include <thread>
+#include "android-base/stringprintf.h"
#include "fuzzer/FuzzedDataProvider.h"
-
-constexpr int32_t kUuidSize = 16;
-constexpr int32_t kMaxSleepTimeInMs = 100;
-constexpr int32_t kMinSleepTimeInMs = 0;
-constexpr int32_t kPlayCountMin = 1;
-constexpr int32_t kPlayCountMax = 10;
-constexpr int32_t kMaxDimension = 8192;
-constexpr int32_t kMinDimension = 0;
-
using namespace std;
using namespace android;
-constexpr audio_session_t kSupportedAudioSessions[] = {
- AUDIO_SESSION_DEVICE, AUDIO_SESSION_OUTPUT_STAGE, AUDIO_SESSION_OUTPUT_MIX};
+constexpr int32_t kUuidSize = 16;
+constexpr int32_t kMinSize = 0;
+constexpr int32_t kMaxSize = 100;
+constexpr int32_t kFourCCVal = android::FOURCC('m', 't', 'r', 'X');
+constexpr int32_t kFlagVal =
+ ISurfaceComposerClient::eCursorWindow | ISurfaceComposerClient::eOpaque;
-constexpr audio_timestretch_stretch_mode_t kAudioStretchModes[] = {
- AUDIO_TIMESTRETCH_STRETCH_DEFAULT, AUDIO_TIMESTRETCH_STRETCH_VOICE};
+const char dumpFile[] = "OutputDumpFile";
-constexpr audio_timestretch_fallback_mode_t kAudioFallbackModes[] = {
- AUDIO_TIMESTRETCH_FALLBACK_CUT_REPEAT, AUDIO_TIMESTRETCH_FALLBACK_DEFAULT,
- AUDIO_TIMESTRETCH_FALLBACK_MUTE, AUDIO_TIMESTRETCH_FALLBACK_FAIL};
+enum DataSourceType { HTTP, FD, STREAM, FILETYPE, SOCKET, kMaxValue = SOCKET };
+
+constexpr PixelFormat kPixelFormat[] = {
+ PIXEL_FORMAT_UNKNOWN, PIXEL_FORMAT_NONE, PIXEL_FORMAT_CUSTOM,
+ PIXEL_FORMAT_TRANSLUCENT, PIXEL_FORMAT_TRANSPARENT, PIXEL_FORMAT_OPAQUE,
+ PIXEL_FORMAT_RGBA_8888, PIXEL_FORMAT_RGBX_8888, PIXEL_FORMAT_RGB_888,
+ PIXEL_FORMAT_RGB_565, PIXEL_FORMAT_BGRA_8888, PIXEL_FORMAT_RGBA_5551,
+ PIXEL_FORMAT_RGBA_4444, PIXEL_FORMAT_RGBA_FP16, PIXEL_FORMAT_RGBA_1010102,
+ PIXEL_FORMAT_R_8, PIXEL_FORMAT_R_16_UINT, PIXEL_FORMAT_RG_1616_UINT,
+ PIXEL_FORMAT_RGBA_10101010,
+};
constexpr media_parameter_keys kMediaParamKeys[] = {
KEY_PARAMETER_CACHE_STAT_COLLECT_FREQ_MS, KEY_PARAMETER_AUDIO_CHANNEL_COUNT,
KEY_PARAMETER_PLAYBACK_RATE_PERMILLE, KEY_PARAMETER_AUDIO_ATTRIBUTES,
KEY_PARAMETER_RTP_ATTRIBUTES};
-constexpr audio_stream_type_t kAudioStreamTypes[] = {
- AUDIO_STREAM_DEFAULT, AUDIO_STREAM_VOICE_CALL, AUDIO_STREAM_SYSTEM,
- AUDIO_STREAM_RING, AUDIO_STREAM_MUSIC, AUDIO_STREAM_ALARM,
- AUDIO_STREAM_NOTIFICATION, AUDIO_STREAM_BLUETOOTH_SCO, AUDIO_STREAM_ENFORCED_AUDIBLE,
- AUDIO_STREAM_DTMF, AUDIO_STREAM_TTS, AUDIO_STREAM_ASSISTANT};
-
constexpr media_event_type kMediaEventTypes[] = {MEDIA_NOP,
MEDIA_PREPARED,
MEDIA_PLAYBACK_COMPLETE,
@@ -140,9 +141,26 @@
DISALLOW_EVIL_CONSTRUCTORS(TestMediaHTTPService);
};
-class BinderDeathNotifier : public IBinder::DeathRecipient {
- public:
- void binderDied(const wp<IBinder> &) { abort(); }
+class FakeBnSurfaceComposerClient : public gui::BnSurfaceComposerClient {
+ public:
+ MOCK_METHOD(binder::Status, createSurface,
+ (const std::string& name, int32_t flags, const sp<IBinder>& parent,
+ const gui::LayerMetadata& metadata, gui::CreateSurfaceResult* outResult),
+ (override));
+
+ MOCK_METHOD(binder::Status, clearLayerFrameStats, (const sp<IBinder>& handle), (override));
+
+ MOCK_METHOD(binder::Status, getLayerFrameStats,
+ (const sp<IBinder>& handle, gui::FrameStats* outStats), (override));
+
+ MOCK_METHOD(binder::Status, mirrorSurface,
+ (const sp<IBinder>& mirrorFromHandle, gui::CreateSurfaceResult* outResult),
+ (override));
+
+ MOCK_METHOD(binder::Status, mirrorDisplay,
+ (int64_t displayId, gui::CreateSurfaceResult* outResult), (override));
+
+ MOCK_METHOD(binder::Status, getSchedulingPolicy, (gui::SchedulingPolicy*), (override));
};
class MediaPlayerServiceFuzzer {
@@ -153,24 +171,40 @@
void process(const uint8_t *data, size_t size);
private:
- bool setDataSource(const uint8_t *data, size_t size);
- void invokeMediaPlayer();
- FuzzedDataProvider mFdp;
- sp<IMediaPlayer> mMediaPlayer = nullptr;
- sp<IMediaPlayerClient> mMediaPlayerClient = nullptr;
- const int32_t mDataSourceFd;
+ FuzzedDataProvider mFdp;
+ const int32_t mDataSourceFd;
+ sp<IMediaPlayer> mMediaPlayer = nullptr;
+ sp<IMediaPlayerClient> mMediaPlayerClient = nullptr;
+ void invokeMediaPlayer();
+ sp<SurfaceControl> makeSurfaceControl();
+ bool setDataSource(const uint8_t* data, size_t size);
};
-bool MediaPlayerServiceFuzzer::setDataSource(const uint8_t *data, size_t size) {
- status_t status = -1;
- enum DataSourceType {http, fd, stream, file, socket, kMaxValue = socket};
- switch (mFdp.ConsumeEnum<DataSourceType>()) {
- case http: {
+sp<SurfaceControl> MediaPlayerServiceFuzzer::makeSurfaceControl() {
+ sp<IBinder> handle = getRandomBinder(&mFdp);
+ const sp<FakeBnSurfaceComposerClient> testClient(new FakeBnSurfaceComposerClient());
+ sp<SurfaceComposerClient> client = new SurfaceComposerClient(testClient);
+ uint32_t width = mFdp.ConsumeIntegral<uint32_t>();
+ uint32_t height = mFdp.ConsumeIntegral<uint32_t>();
+ uint32_t transformHint = mFdp.ConsumeIntegral<uint32_t>();
+ uint32_t flags = mFdp.ConsumeBool() ? kFlagVal : mFdp.ConsumeIntegral<uint32_t>();
+ int32_t format = mFdp.ConsumeBool() ? mFdp.ConsumeIntegral<uint32_t>()
+ : mFdp.PickValueInArray(kPixelFormat);
+ int32_t layerId = mFdp.ConsumeIntegral<int32_t>();
+ std::string layerName = android::base::StringPrintf("#%d", layerId);
+ return new SurfaceControl(client, handle, layerId, layerName, width, height, format,
+ transformHint, flags);
+}
+
+bool MediaPlayerServiceFuzzer::setDataSource(const uint8_t* data, size_t size) {
+ status_t status = UNKNOWN_ERROR;
+ switch (mFdp.ConsumeEnum<DataSourceType>()) {
+ case HTTP: {
KeyedVector<String8, String8> headers;
headers.add(String8(mFdp.ConsumeRandomLengthString().c_str()),
String8(mFdp.ConsumeRandomLengthString().c_str()));
- uint32_t dataBlobSize = mFdp.ConsumeIntegralInRange<uint16_t>(0, size);
+ uint32_t dataBlobSize = mFdp.ConsumeIntegralInRange<uint16_t>(kMinSize, size);
vector<uint8_t> uriSuffix = mFdp.ConsumeBytes<uint8_t>(dataBlobSize);
string uri(mFdp.PickValueInArray(kUrlPrefix));
@@ -183,18 +217,17 @@
mMediaPlayer->setDataSource(testService /*httpService*/, uri.c_str(), &headers);
break;
}
- case fd: {
+ case FD: {
write(mDataSourceFd, data, size);
-
status = mMediaPlayer->setDataSource(mDataSourceFd, 0, size);
break;
}
- case stream: {
+ case STREAM: {
sp<IStreamSource> streamSource = sp<TestStreamSource>::make();
status = mMediaPlayer->setDataSource(streamSource);
break;
}
- case file: {
+ case FILETYPE: {
write(mDataSourceFd, data, size);
sp<DataSource> dataSource = new FileSource(dup(mDataSourceFd), 0, size);
@@ -205,7 +238,7 @@
status = mMediaPlayer->setDataSource(iDataSource);
break;
}
- case socket: {
+ case SOCKET: {
String8 rtpParams = String8(mFdp.ConsumeRandomLengthString().c_str());
struct sockaddr_in endpoint;
endpoint.sin_family = mFdp.ConsumeIntegral<unsigned short>();
@@ -214,190 +247,239 @@
status = mMediaPlayer->setDataSource(rtpParams);
break;
}
- }
-
- if (status != 0) {
+ }
+ if (status != OK) {
return false;
- }
- return true;
+ }
+ return true;
}
void MediaPlayerServiceFuzzer::invokeMediaPlayer() {
- sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
- String8 name = String8(mFdp.ConsumeRandomLengthString().c_str());
- uint32_t width = mFdp.ConsumeIntegralInRange<uint32_t>(kMinDimension, kMaxDimension);
- uint32_t height = mFdp.ConsumeIntegralInRange<uint32_t>(kMinDimension, kMaxDimension);
- uint32_t pixelFormat = mFdp.ConsumeIntegral<int32_t>();
- uint32_t flags = mFdp.ConsumeIntegral<int32_t>();
- sp<SurfaceControl> surfaceControl =
- composerClient->createSurface(name, width, height, pixelFormat, flags);
- if (surfaceControl) {
- sp<Surface> surface = surfaceControl->getSurface();
- mMediaPlayer->setVideoSurfaceTexture(surface->getIGraphicBufferProducer());
- }
-
- BufferingSettings buffering;
- buffering.mInitialMarkMs = mFdp.ConsumeIntegral<int32_t>();
- buffering.mResumePlaybackMarkMs = mFdp.ConsumeIntegral<int32_t>();
- mMediaPlayer->setBufferingSettings(buffering);
- mMediaPlayer->getBufferingSettings(&buffering);
-
- mMediaPlayer->prepareAsync();
- size_t playCount = mFdp.ConsumeIntegralInRange<size_t>(kPlayCountMin, kPlayCountMax);
- for (size_t Idx = 0; Idx < playCount; ++Idx) {
- mMediaPlayer->start();
- this_thread::sleep_for(chrono::milliseconds(
- mFdp.ConsumeIntegralInRange<int32_t>(kMinSleepTimeInMs, kMaxSleepTimeInMs)));
- mMediaPlayer->pause();
- this_thread::sleep_for(chrono::milliseconds(
- mFdp.ConsumeIntegralInRange<int32_t>(kMinSleepTimeInMs, kMaxSleepTimeInMs)));
- mMediaPlayer->stop();
- }
- bool state;
- mMediaPlayer->isPlaying(&state);
-
- AudioPlaybackRate rate;
- rate.mSpeed = mFdp.ConsumeFloatingPoint<float>();
- rate.mPitch = mFdp.ConsumeFloatingPoint<float>();
- rate.mStretchMode = mFdp.PickValueInArray(kAudioStretchModes);
- rate.mFallbackMode = mFdp.PickValueInArray(kAudioFallbackModes);
- mMediaPlayer->setPlaybackSettings(rate);
- mMediaPlayer->getPlaybackSettings(&rate);
-
- AVSyncSettings *avSyncSettings = new AVSyncSettings();
- float videoFpsHint = mFdp.ConsumeFloatingPoint<float>();
- mMediaPlayer->setSyncSettings(*avSyncSettings, videoFpsHint);
- mMediaPlayer->getSyncSettings(avSyncSettings, &videoFpsHint);
- delete avSyncSettings;
-
- mMediaPlayer->seekTo(mFdp.ConsumeIntegral<int32_t>());
-
- int32_t msec;
- mMediaPlayer->getCurrentPosition(&msec);
- mMediaPlayer->getDuration(&msec);
- mMediaPlayer->reset();
-
- mMediaPlayer->notifyAt(mFdp.ConsumeIntegral<int64_t>());
-
- mMediaPlayer->setAudioStreamType(mFdp.PickValueInArray(kAudioStreamTypes));
- mMediaPlayer->setLooping(mFdp.ConsumeIntegral<int32_t>());
- float left = mFdp.ConsumeFloatingPoint<float>();
- float right = mFdp.ConsumeFloatingPoint<float>();
- mMediaPlayer->setVolume(left, right);
-
- Parcel request, reply;
- request.writeInt32(mFdp.ConsumeIntegral<int32_t>());
- request.setDataPosition(0);
- mMediaPlayer->invoke(request, &reply);
-
- Parcel filter;
- filter.writeInt32(mFdp.ConsumeIntegral<int32_t>());
- filter.setDataPosition(0);
- mMediaPlayer->setMetadataFilter(filter);
-
- bool updateOnly = mFdp.ConsumeBool();
- bool applyFilter = mFdp.ConsumeBool();
- mMediaPlayer->getMetadata(updateOnly, applyFilter, &reply);
- mMediaPlayer->setAuxEffectSendLevel(mFdp.ConsumeFloatingPoint<float>());
- mMediaPlayer->attachAuxEffect(mFdp.ConsumeIntegral<int32_t>());
-
- int32_t key = mFdp.PickValueInArray(kMediaParamKeys);
- request.writeInt32(mFdp.ConsumeIntegral<int32_t>());
- request.setDataPosition(0);
- mMediaPlayer->setParameter(key, request);
- key = mFdp.PickValueInArray(kMediaParamKeys);
- mMediaPlayer->getParameter(key, &reply);
-
- struct sockaddr_in endpoint;
- mMediaPlayer->getRetransmitEndpoint(&endpoint);
-
- AttributionSourceState attributionSource;
- attributionSource.packageName = mFdp.ConsumeRandomLengthString().c_str();
- attributionSource.token = sp<BBinder>::make();
- const sp<IMediaPlayerService> mpService(IMediaDeathNotifier::getMediaPlayerService());
- sp<IMediaPlayer> mNextMediaPlayer = mpService->create(
- mMediaPlayerClient, mFdp.PickValueInArray(kSupportedAudioSessions), attributionSource);
- mMediaPlayer->setNextPlayer(mNextMediaPlayer);
-
- const sp<media::VolumeShaper::Configuration> configuration =
- sp<media::VolumeShaper::Configuration>::make();
- const sp<media::VolumeShaper::Operation> operation = sp<media::VolumeShaper::Operation>::make();
- mMediaPlayer->applyVolumeShaper(configuration, operation);
-
- mMediaPlayer->getVolumeShaperState(mFdp.ConsumeIntegral<int32_t>());
- uint8_t uuid[kUuidSize];
- for (int32_t index = 0; index < kUuidSize; ++index) {
- uuid[index] = mFdp.ConsumeIntegral<uint8_t>();
- }
- Vector<uint8_t> drmSessionId;
- drmSessionId.push_back(mFdp.ConsumeIntegral<uint8_t>());
- mMediaPlayer->prepareDrm(uuid, drmSessionId);
- mMediaPlayer->releaseDrm();
-
- audio_port_handle_t deviceId = mFdp.ConsumeIntegral<int32_t>();
- mMediaPlayer->setOutputDevice(deviceId);
- mMediaPlayer->getRoutedDeviceId(&deviceId);
-
- mMediaPlayer->enableAudioDeviceCallback(mFdp.ConsumeBool());
-
- sp<MediaPlayer> mediaPlayer = (MediaPlayer *)mMediaPlayer.get();
-
- int32_t msg = mFdp.PickValueInArray(kMediaEventTypes);
- int32_t ext1 = mFdp.PickValueInArray(kMediaInfoTypes);
- int32_t ext2 = mFdp.ConsumeIntegral<int32_t>();
- Parcel obj;
- obj.writeInt32(mFdp.ConsumeIntegral<int32_t>());
- obj.setDataPosition(0);
- mediaPlayer->notify(msg, ext1, ext2, &obj);
-
- int32_t mediaPlayerDumpFd = memfd_create("OutputDumpFile", MFD_ALLOW_SEALING);
- Vector<String16> args;
- args.push_back(String16(mFdp.ConsumeRandomLengthString().c_str()));
- mediaPlayer->dump(mediaPlayerDumpFd, args);
- close(mediaPlayerDumpFd);
-
- mMediaPlayer->disconnect();
+ Parcel request, reply;
+ while (mFdp.remaining_bytes()) {
+ auto invokeMediaPlayerApi = mFdp.PickValueInArray<const std::function<void()>>({
+ [&]() {
+ sp<SurfaceControl> surfaceControl = makeSurfaceControl();
+ if (surfaceControl) {
+ sp<Surface> surface = surfaceControl->getSurface();
+ mMediaPlayer->setVideoSurfaceTexture(surface->getIGraphicBufferProducer());
+ }
+ },
+ [&]() {
+ BufferingSettings buffering;
+ buffering.mInitialMarkMs = mFdp.ConsumeIntegral<int32_t>();
+ buffering.mResumePlaybackMarkMs = mFdp.ConsumeIntegral<int32_t>();
+ mMediaPlayer->setBufferingSettings(buffering);
+ },
+ [&]() {
+ BufferingSettings buffering;
+ mMediaPlayer->getBufferingSettings(&buffering);
+ },
+ [&]() {
+ mMediaPlayer->prepareAsync();
+ this_thread::sleep_for(chrono::milliseconds(100)); // Time to post message
+ },
+ [&]() {
+ mMediaPlayer->start();
+ this_thread::sleep_for(chrono::milliseconds(100)); // Time to post message
+ },
+ [&]() {
+ mMediaPlayer->pause();
+ this_thread::sleep_for(chrono::milliseconds(100)); // Time to post message
+ },
+ [&]() { mMediaPlayer->stop(); },
+ [&]() {
+ bool state;
+ mMediaPlayer->isPlaying(&state);
+ },
+ [&]() {
+ AudioPlaybackRate rate;
+ rate.mSpeed = mFdp.ConsumeFloatingPoint<float>();
+ rate.mPitch = mFdp.ConsumeFloatingPoint<float>();
+ rate.mStretchMode = mFdp.ConsumeBool() ? AUDIO_TIMESTRETCH_STRETCH_DEFAULT
+ : AUDIO_TIMESTRETCH_STRETCH_VOICE;
+ rate.mFallbackMode =
+ (audio_timestretch_fallback_mode_t)mFdp.ConsumeIntegralInRange<int32_t>(
+ AUDIO_TIMESTRETCH_FALLBACK_CUT_REPEAT,
+ AUDIO_TIMESTRETCH_FALLBACK_FAIL);
+ mMediaPlayer->setPlaybackSettings(rate);
+ mMediaPlayer->getPlaybackSettings(&rate);
+ },
+ [&]() {
+ AVSyncSettings* avSyncSettings = new AVSyncSettings();
+ float videoFpsHint = mFdp.ConsumeFloatingPoint<float>();
+ mMediaPlayer->setSyncSettings(*avSyncSettings, videoFpsHint);
+ delete avSyncSettings;
+ },
+ [&]() {
+ AVSyncSettings* avSyncSettings = new AVSyncSettings();
+ float videoFpsHint = 0;
+ mMediaPlayer->getSyncSettings(avSyncSettings, &videoFpsHint);
+ delete avSyncSettings;
+ },
+ [&]() { mMediaPlayer->seekTo(mFdp.ConsumeIntegral<int32_t>()); },
+ [&]() {
+ int32_t msec;
+ mMediaPlayer->getCurrentPosition(&msec);
+ mMediaPlayer->getDuration(&msec);
+ },
+ [&]() { mMediaPlayer->reset(); },
+ [&]() { mMediaPlayer->notifyAt(mFdp.ConsumeIntegral<uint64_t>()); },
+ [&]() {
+ mMediaPlayer->setAudioStreamType(
+ (audio_stream_type_t)mFdp.ConsumeIntegralInRange<int32_t>(
+ AUDIO_STREAM_VOICE_CALL, AUDIO_STREAM_CALL_ASSISTANT));
+ },
+ [&]() { mMediaPlayer->setLooping(mFdp.ConsumeIntegral<int32_t>()); },
+ [&]() {
+ mMediaPlayer->setVolume(mFdp.ConsumeFloatingPoint<float>() /* left */,
+ mFdp.ConsumeFloatingPoint<float>() /* right */);
+ },
+ [&]() {
+ request.writeInt32(mFdp.ConsumeIntegral<int32_t>());
+ request.setDataPosition(0);
+ mMediaPlayer->invoke(request, &reply);
+ },
+ [&]() {
+ Parcel filter;
+ filter.writeInt32(mFdp.ConsumeIntegral<int32_t>());
+ filter.setDataPosition(0);
+ mMediaPlayer->setMetadataFilter(filter);
+ },
+ [&]() {
+ mMediaPlayer->getMetadata(mFdp.ConsumeBool() /* updateOnly */,
+ mFdp.ConsumeBool() /* applyFilter */, &reply);
+ },
+ [&]() { mMediaPlayer->setAuxEffectSendLevel(mFdp.ConsumeFloatingPoint<float>()); },
+ [&]() { mMediaPlayer->attachAuxEffect(mFdp.ConsumeIntegral<int32_t>()); },
+ [&]() {
+ int32_t key = mFdp.PickValueInArray(kMediaParamKeys);
+ request.writeInt32(mFdp.ConsumeIntegral<int32_t>());
+ request.setDataPosition(0);
+ mMediaPlayer->setParameter(key, request);
+ key = mFdp.PickValueInArray(kMediaParamKeys);
+ mMediaPlayer->getParameter(key, &reply);
+ },
+ [&]() {
+ int32_t key =
+ mFdp.ConsumeBool() ? kFourCCVal : mFdp.ConsumeIntegral<uint32_t>();
+ mMediaPlayer->getParameter(key, &reply);
+ },
+ [&]() {
+ struct sockaddr_in endpoint;
+ mMediaPlayer->getRetransmitEndpoint(&endpoint);
+ },
+ [&]() {
+ AttributionSourceState attributionSource;
+ attributionSource.packageName = mFdp.ConsumeRandomLengthString().c_str();
+ attributionSource.token = sp<BBinder>::make();
+ const sp<IMediaPlayerService> mpService(
+ IMediaDeathNotifier::getMediaPlayerService());
+ audio_session_t audioSessionId =
+ (audio_session_t)mFdp.ConsumeIntegralInRange<int32_t>(
+ AUDIO_SESSION_DEVICE, AUDIO_SESSION_OUTPUT_MIX);
+ sp<IMediaPlayer> mNextMediaPlayer = mpService->create(
+ mMediaPlayerClient, audioSessionId, attributionSource);
+ mMediaPlayer->setNextPlayer(mNextMediaPlayer);
+ },
+ [&]() {
+ const sp<media::VolumeShaper::Configuration> configuration =
+ sp<media::VolumeShaper::Configuration>::make();
+ const sp<media::VolumeShaper::Operation> operation =
+ sp<media::VolumeShaper::Operation>::make();
+ mMediaPlayer->applyVolumeShaper(configuration, operation);
+ },
+ [&]() { mMediaPlayer->getVolumeShaperState(mFdp.ConsumeIntegral<int32_t>()); },
+ [&]() {
+ uint8_t uuid[kUuidSize];
+ for (int32_t index = 0; index < kUuidSize; ++index) {
+ uuid[index] = mFdp.ConsumeIntegral<uint8_t>();
+ }
+ Vector<uint8_t> drmSessionId;
+ int32_t length = mFdp.ConsumeIntegralInRange<uint32_t>(kMinSize, kMaxSize);
+ while (length--) {
+ drmSessionId.push_back(mFdp.ConsumeIntegral<uint8_t>());
+ }
+ mMediaPlayer->prepareDrm(uuid, drmSessionId);
+ },
+ [&]() { mMediaPlayer->releaseDrm(); },
+ [&]() {
+ audio_port_handle_t deviceId = mFdp.ConsumeIntegral<int32_t>();
+ mMediaPlayer->setOutputDevice(deviceId);
+ },
+ [&]() {
+ audio_port_handle_t deviceId;
+ mMediaPlayer->getRoutedDeviceId(&deviceId);
+ },
+ [&]() { mMediaPlayer->enableAudioDeviceCallback(mFdp.ConsumeBool()); },
+ [&]() {
+ sp<MediaPlayer> mediaPlayer = (MediaPlayer*)mMediaPlayer.get();
+ Parcel obj;
+ obj.writeInt32(mFdp.ConsumeIntegral<int32_t>());
+ obj.setDataPosition(0);
+ mediaPlayer->notify(mFdp.PickValueInArray(kMediaEventTypes) /* msg */,
+ mFdp.PickValueInArray(kMediaInfoTypes) /* ext1 */,
+ mFdp.ConsumeIntegral<int32_t>() /* ext2 */, &obj);
+ },
+ [&]() {
+ sp<MediaPlayer> mediaPlayer = (MediaPlayer*)mMediaPlayer.get();
+ int32_t mediaPlayerDumpFd = memfd_create(dumpFile, MFD_ALLOW_SEALING);
+ Vector<String16> args;
+ args.push_back(String16(mFdp.ConsumeRandomLengthString().c_str()));
+ mediaPlayer->dump(mediaPlayerDumpFd, args);
+ close(mediaPlayerDumpFd);
+ },
+ [&]() { mMediaPlayer->disconnect(); },
+ });
+ invokeMediaPlayerApi();
+ }
}
-void MediaPlayerServiceFuzzer::process(const uint8_t *data, size_t size) {
- MediaPlayerService::instantiate();
-
- const sp<IMediaPlayerService> mpService(IMediaDeathNotifier::getMediaPlayerService());
- if (!mpService) {
+void MediaPlayerServiceFuzzer::process(const uint8_t* data, size_t size) {
+ const sp<IMediaPlayerService> mpService(IMediaDeathNotifier::getMediaPlayerService());
+ if (!mpService) {
return;
- }
+ }
- sp<IMediaCodecList> mediaCodecList = mpService->getCodecList();
+ sp<IMediaCodecList> mediaCodecList = mpService->getCodecList();
- sp<IRemoteDisplayClient> remoteDisplayClient;
- sp<IRemoteDisplay> remoteDisplay = mpService->listenForRemoteDisplay(
- String16(mFdp.ConsumeRandomLengthString().c_str()) /*opPackageName*/, remoteDisplayClient,
- String8(mFdp.ConsumeRandomLengthString().c_str()) /*iface*/);
+ sp<IRemoteDisplayClient> remoteDisplayClient;
+ sp<IRemoteDisplay> remoteDisplay = mpService->listenForRemoteDisplay(
+ String16(mFdp.ConsumeRandomLengthString().c_str()) /*opPackageName*/,
+ remoteDisplayClient, String8(mFdp.ConsumeRandomLengthString().c_str()) /*iface*/);
- mpService->addBatteryData(mFdp.ConsumeIntegral<uint32_t>());
- Parcel reply;
- mpService->pullBatteryData(&reply);
+ mpService->addBatteryData(mFdp.ConsumeIntegral<uint32_t>());
+ Parcel reply;
+ mpService->pullBatteryData(&reply);
- sp<MediaPlayerService> mediaPlayerService = (MediaPlayerService *)mpService.get();
- AttributionSourceState attributionSource;
- attributionSource.packageName = mFdp.ConsumeRandomLengthString().c_str();
- attributionSource.token = sp<BBinder>::make();
- mMediaPlayer = mediaPlayerService->create(
- mMediaPlayerClient, mFdp.PickValueInArray(kSupportedAudioSessions), attributionSource);
+ sp<MediaPlayerService> mediaPlayerService = (MediaPlayerService*)mpService.get();
+ AttributionSourceState attributionSource;
+ attributionSource.packageName = mFdp.ConsumeRandomLengthString().c_str();
+ attributionSource.token = sp<BBinder>::make();
+ mMediaPlayer =
+ mediaPlayerService->create(mMediaPlayerClient,
+ (audio_session_t)mFdp.ConsumeIntegralInRange<int32_t>(
+ AUDIO_SESSION_DEVICE, AUDIO_SESSION_OUTPUT_MIX),
+ attributionSource);
- int32_t mediaPlayerServiceDumpFd = memfd_create("OutputDumpFile", MFD_ALLOW_SEALING);
- Vector<String16> args;
- args.push_back(String16(mFdp.ConsumeRandomLengthString().c_str()));
- mediaPlayerService->dump(mediaPlayerServiceDumpFd, args);
- close(mediaPlayerServiceDumpFd);
+ int32_t mediaPlayerServiceDumpFd = memfd_create(dumpFile, MFD_ALLOW_SEALING);
+ Vector<String16> args;
+ args.push_back(String16(mFdp.ConsumeRandomLengthString().c_str()));
+ mediaPlayerService->dump(mediaPlayerServiceDumpFd, args);
+ close(mediaPlayerServiceDumpFd);
- if (!mMediaPlayer) {
+ if (!mMediaPlayer) {
return;
- }
-
- if (setDataSource(data, size)) {
+ }
+ if (setDataSource(data, size)) {
invokeMediaPlayer();
- }
+ }
+}
+
+extern "C" int LLVMFuzzerInitialize(int* /* argc */, char*** /* argv */) {
+ MediaPlayerService::instantiate();
+ MediaExtractorService::instantiate();
+ return 0;
}
extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index bb49b5a..bd43fe2 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -2098,9 +2098,12 @@
displayHeight,
cropLeft, cropTop);
} else {
- CHECK(inputFormat->findInt32("width", &displayWidth));
- CHECK(inputFormat->findInt32("height", &displayHeight));
-
+ if (!inputFormat->findInt32("width", &displayWidth)
+ || !inputFormat->findInt32("height", &displayHeight)) {
+ ALOGW("Either video width or video height missing, reporting 0x0!");
+ notifyListener(MEDIA_SET_VIDEO_SIZE, 0, 0);
+ return;
+ }
ALOGV("Video input format %d x %d", displayWidth, displayHeight);
}
diff --git a/media/libnbaio/Android.bp b/media/libnbaio/Android.bp
index 434ae00..158900a 100644
--- a/media/libnbaio/Android.bp
+++ b/media/libnbaio/Android.bp
@@ -15,8 +15,8 @@
"NBAIO.cpp",
],
header_libs: [
- "libaudioclient_headers",
"libaudio_system_headers",
+ "libaudioclient_headers",
],
export_header_lib_headers: [
"libaudioclient_headers",
@@ -35,8 +35,8 @@
export_include_dirs: ["include_mono"],
cflags: [
- "-Werror",
"-Wall",
+ "-Werror",
],
}
diff --git a/media/libnblog/Android.bp b/media/libnblog/Android.bp
index 8cfece6..b4d48b0 100644
--- a/media/libnblog/Android.bp
+++ b/media/libnblog/Android.bp
@@ -35,8 +35,8 @@
],
cflags: [
- "-Werror",
"-Wall",
+ "-Werror",
],
include_dirs: ["system/media/audio_utils/include"],
diff --git a/media/libnblog/Reader.cpp b/media/libnblog/Reader.cpp
index 71ebfd1..d5f16e8 100644
--- a/media/libnblog/Reader.cpp
+++ b/media/libnblog/Reader.cpp
@@ -93,7 +93,7 @@
do {
availToRead = mFifoReader->obtain(iovec, capacity, NULL /*timeout*/, &lostTemp);
lost += lostTemp;
- } while (availToRead < 0 || ++tries <= kMaxObtainTries);
+ } while (availToRead < 0 && ++tries <= kMaxObtainTries);
if (availToRead <= 0) {
ALOGW_IF(availToRead < 0, "NBLog Reader %s failed to catch up with Writer", mName.c_str());
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index f9ceef2..e06efac 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -21,6 +21,8 @@
#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS
#endif
+#include <android_media_codec.h>
+
#include <inttypes.h>
#include <utils/Trace.h>
@@ -7573,6 +7575,22 @@
return true;
}
+ // When Acodec receive an error event at LoadedToIdleState, it will not release
+ // allocated buffers, which will cause gralloc buffer leak issue. We need to first release
+ // these buffers and then process the error event
+ case OMX_EventError:
+ {
+ if (mCodec->allYourBuffersAreBelongToUs(kPortIndexInput)) {
+ mCodec->freeBuffersOnPort(kPortIndexInput);
+ }
+
+ if (mCodec->allYourBuffersAreBelongToUs(kPortIndexOutput)) {
+ mCodec->freeBuffersOnPort(kPortIndexOutput);
+ }
+
+ return BaseState::onOMXEvent(event, data1, data2);
+ }
+
default:
return BaseState::onOMXEvent(event, data1, data2);
}
@@ -9314,6 +9332,12 @@
// adaptive playback is not supported
caps->removeDetail(MediaCodecInfo::Capabilities::FEATURE_ADAPTIVE_PLAYBACK);
}
+
+ // all non-tunneled video decoders support detached surface mode
+ if (android::media::codec::provider_->null_output_surface_support() &&
+ android::media::codec::provider_->null_output_surface()) {
+ caps->addDetail(MediaCodecInfo::Capabilities::FEATURE_DETACHED_SURFACE, 0);
+ }
}
}
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 896e021..5b6848c 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -318,10 +318,15 @@
"aconfig_mediacodec_flags_c_lib",
],
+ defaults: [
+ "aconfig_lib_cc_static_link.defaults",
+ ],
+
static_libs: [
+ "android.media.codec-aconfig-cc",
"libstagefright_esds",
"libstagefright_color_conversion",
- "libyuv_static",
+ "libyuv",
"libstagefright_webm",
"libstagefright_timedtext",
"libogg",
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 4441121..e229844 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -151,7 +151,8 @@
if (camera == 0) {
mCamera = Camera::connect(cameraId, clientName, clientUid, clientPid,
- /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+ /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
/*forceSlowJpegMode*/false);
if (mCamera == 0) return -EBUSY;
mCameraFlags &= ~FLAGS_HOT_CAMERA;
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index 1a0bb7f..46703bb 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -48,6 +48,9 @@
static const int64_t kBufferTimeOutUs = 10000LL; // 10 msec
static const size_t kRetryCount = 100; // must be >0
static const int64_t kDefaultSampleDurationUs = 33333LL; // 33ms
+// For codec, 0 is the highest importance; higher the number lesser important.
+// To make codec for thumbnail less important, give it a value more than 0.
+static const int kThumbnailImportance = 1;
sp<IMemory> allocVideoFrame(const sp<MetaData>& trackMeta,
int32_t width, int32_t height, int32_t tileWidth, int32_t tileHeight,
@@ -585,6 +588,9 @@
}
}
+ // Set the importance for thumbnail.
+ videoFormat->setInt32(KEY_IMPORTANCE, kThumbnailImportance);
+
int32_t frameRate;
if (trackMeta()->findInt32(kKeyFrameRate, &frameRate) && frameRate > 0) {
mDefaultSampleDurationUs = 1000000LL / frameRate;
@@ -902,6 +908,10 @@
videoFormat->setInt32("android._num-input-buffers", 1);
videoFormat->setInt32("android._num-output-buffers", 1);
}
+
+ /// Set the importance for thumbnail.
+ videoFormat->setInt32(KEY_IMPORTANCE, kThumbnailImportance);
+
return videoFormat;
}
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index e79e55b..0401e82 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -30,6 +30,8 @@
#include "include/SoftwareRenderer.h"
+#include <android_media_codec.h>
+
#include <android/api-level.h>
#include <android/content/pm/IPackageManagerNative.h>
#include <android/hardware/cas/native/1.0/IDescrambler.h>
@@ -3017,6 +3019,13 @@
return PostAndAwaitResponse(msg, &response);
}
+status_t MediaCodec::detachOutputSurface() {
+ sp<AMessage> msg = new AMessage(kWhatDetachSurface, this);
+
+ sp<AMessage> response;
+ return PostAndAwaitResponse(msg, &response);
+}
+
status_t MediaCodec::setSurface(const sp<Surface> &surface) {
sp<AMessage> msg = new AMessage(kWhatSetSurface, this);
msg->setObject("surface", surface);
@@ -3395,9 +3404,6 @@
if (bufferInfos == nullptr || bufferInfos->value.empty()) {
return BAD_VALUE;
}
- if (cryptoInfos == nullptr || cryptoInfos->value.empty()) {
- return BAD_VALUE;
- }
status_t err = OK;
sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
msg->setSize("index", index);
@@ -3405,8 +3411,12 @@
new WrapperObject<sp<hardware::HidlMemory>>{buffer}};
msg->setObject("memory", memory);
msg->setSize("offset", offset);
- msg->setSize("ssize", size);
- msg->setObject("cryptoInfos", cryptoInfos);
+ if (cryptoInfos != nullptr) {
+ msg->setSize("ssize", size);
+ msg->setObject("cryptoInfos", cryptoInfos);
+ } else {
+ msg->setSize("size", size);
+ }
msg->setObject("accessUnitInfo", bufferInfos);
if (OK != (err = generateFlagsFromAccessUnitInfo(msg, bufferInfos))) {
return err;
@@ -3965,6 +3975,15 @@
switch (mState) {
case INITIALIZING:
{
+ // Resource error during INITIALIZING state needs to be logged
+ // through metrics, to be able to track such occurrences.
+ if (isResourceError(err)) {
+ mediametrics_setInt32(mMetricsHandle, kCodecError, err);
+ mediametrics_setCString(mMetricsHandle, kCodecErrorState,
+ stateString(mState).c_str());
+ flushMediametrics();
+ initMediametrics();
+ }
setState(UNINITIALIZED);
break;
}
@@ -4675,7 +4694,7 @@
}
mResourceManagerProxy->removeClient();
- mReleaseSurface.reset();
+ mDetachedSurface.reset();
if (mReplyID != nullptr) {
postPendingRepliesAndDeferredMessages("kWhatReleaseCompleted");
@@ -4848,6 +4867,23 @@
mFlags |= kFlagPushBlankBuffersOnShutdown;
}
+ uint32_t flags;
+ CHECK(msg->findInt32("flags", (int32_t *)&flags));
+
+ if (android::media::codec::provider_->null_output_surface_support()) {
+ if (obj == nullptr
+ && (flags & CONFIGURE_FLAG_DETACHED_SURFACE)
+ && !(flags & CONFIGURE_FLAG_ENCODE)) {
+ sp<Surface> surface = getOrCreateDetachedSurface();
+ if (surface == nullptr) {
+ mErrorLog.log(
+ LOG_TAG, "Detached surface mode is not supported by this codec");
+ PostReplyWithError(replyID, INVALID_OPERATION);
+ }
+ obj = surface;
+ }
+ }
+
if (obj != NULL) {
if (!format->findInt32(KEY_ALLOW_FRAME_DROP, &mAllowFrameDroppingBySurface)) {
// allow frame dropping by surface by default
@@ -4871,8 +4907,6 @@
mApiUsageMetrics.isUsingOutputSurface = true;
- uint32_t flags;
- CHECK(msg->findInt32("flags", (int32_t *)&flags));
if (flags & CONFIGURE_FLAG_USE_BLOCK_MODEL ||
flags & CONFIGURE_FLAG_USE_CRYPTO_ASYNC) {
if (!(mFlags & kFlagIsAsync)) {
@@ -4887,8 +4921,8 @@
if (flags & CONFIGURE_FLAG_USE_CRYPTO_ASYNC) {
mFlags |= kFlagUseCryptoAsync;
if ((mFlags & kFlagUseBlockModel)) {
- ALOGW("CrytoAsync not yet enabled for block model,\
- falling back to normal");
+ ALOGW("CrytoAsync not yet enabled for block model, "
+ "falling back to normal");
}
}
}
@@ -4945,8 +4979,7 @@
mDescrambler = static_cast<IDescrambler *>(descrambler);
mBufferChannel->setDescrambler(mDescrambler);
- if ((mFlags & kFlagUseCryptoAsync) &&
- mCrypto && (mDomain == DOMAIN_VIDEO)) {
+ if ((mFlags & kFlagUseCryptoAsync) && mCrypto) {
// set kFlagUseCryptoAsync but do-not use this for block model
// this is to propagate the error in onCryptoError()
// TODO (b/274628160): Enable Use of CONFIG_FLAG_USE_CRYPTO_ASYNC
@@ -4993,6 +5026,23 @@
break;
}
+ case kWhatDetachSurface:
+ {
+ // detach surface is equivalent to setSurface(mDetachedSurface)
+ sp<Surface> surface = getOrCreateDetachedSurface();
+
+ if (surface == nullptr) {
+ sp<AReplyToken> replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+ mErrorLog.log(LOG_TAG, "Detaching surface is not supported by the codec.");
+ PostReplyWithError(replyID, INVALID_OPERATION);
+ break;
+ }
+
+ msg->setObject("surface", surface);
+ }
+ [[fallthrough]];
+
case kWhatSetSurface:
{
sp<AReplyToken> replyID;
@@ -5010,14 +5060,17 @@
sp<Surface> surface = static_cast<Surface *>(obj.get());
if (mSurface == NULL) {
// do not support setting surface if it was not set
- mErrorLog.log(LOG_TAG,
- "Cannot set surface if the codec is not configured with "
- "a surface already");
+ mErrorLog.log(LOG_TAG, base::StringPrintf(
+ "Cannot %s surface if the codec is not configured with "
+ "a surface already",
+ msg->what() == kWhatDetachSurface ? "detach" : "set"));
err = INVALID_OPERATION;
} else if (obj == NULL) {
// do not support unsetting surface
mErrorLog.log(LOG_TAG, "Unsetting surface is not supported");
err = BAD_VALUE;
+ } else if (android::media::codec::provider_->null_output_surface_support()) {
+ err = handleSetSurface(surface, true /* callCodec */);
} else {
uint32_t generation;
err = connectToSurface(surface, &generation);
@@ -5051,7 +5104,8 @@
default:
mErrorLog.log(LOG_TAG, base::StringPrintf(
- "setSurface() is valid only at Executing states; currently %s",
+ "%sSurface() is valid only at Executing states; currently %s",
+ msg->what() == kWhatDetachSurface ? "detach" : "set",
apiStateString().c_str()));
err = INVALID_OPERATION;
break;
@@ -5272,30 +5326,40 @@
bool forceSync = false;
if (asyncNotify != nullptr && mSurface != NULL) {
- if (!mReleaseSurface) {
- uint64_t usage = 0;
- if (mSurface->getConsumerUsage(&usage) != OK) {
- usage = 0;
- }
- mReleaseSurface.reset(new ReleaseSurface(usage));
- }
- if (mSurface != mReleaseSurface->getSurface()) {
- uint32_t generation;
- status_t err = connectToSurface(mReleaseSurface->getSurface(), &generation);
- ALOGW_IF(err != OK, "error connecting to release surface: err = %d", err);
- if (err == OK && !(mFlags & kFlagUsesSoftwareRenderer)) {
- err = mCodec->setSurface(mReleaseSurface->getSurface(), generation);
- ALOGW_IF(err != OK, "error setting release surface: err = %d", err);
- }
- if (err == OK) {
- (void)disconnectFromSurface();
- mSurface = mReleaseSurface->getSurface();
- mSurfaceGeneration = generation;
- } else {
- // We were not able to switch the surface, so force
+ if (android::media::codec::provider_->null_output_surface_support()) {
+ if (handleSetSurface(getOrCreateDetachedSurface(), true /* callCodec */,
+ true /* onShutDown */) != OK) {
+ // We were not able to detach the surface, so force
// synchronous release.
forceSync = true;
}
+ } else {
+ if (!mDetachedSurface) {
+ uint64_t usage = 0;
+ if (mSurface->getConsumerUsage(&usage) != OK) {
+ usage = 0;
+ }
+ mDetachedSurface.reset(new ReleaseSurface(usage));
+ }
+ if (mSurface != mDetachedSurface->getSurface()) {
+ uint32_t generation;
+ status_t err =
+ connectToSurface(mDetachedSurface->getSurface(), &generation);
+ ALOGW_IF(err != OK, "error connecting to release surface: err = %d", err);
+ if (err == OK && !(mFlags & kFlagUsesSoftwareRenderer)) {
+ err = mCodec->setSurface(mDetachedSurface->getSurface(), generation);
+ ALOGW_IF(err != OK, "error setting release surface: err = %d", err);
+ }
+ if (err == OK) {
+ (void)disconnectFromSurface();
+ mSurface = mDetachedSurface->getSurface();
+ mSurfaceGeneration = generation;
+ } else {
+ // We were not able to switch the surface, so force
+ // synchronous release.
+ forceSync = true;
+ }
+ }
}
}
@@ -5996,6 +6060,10 @@
mErrorLog.clear();
}
+ if (android::media::codec::provider_->set_state_early()) {
+ mState = newState;
+ }
+
if (newState == UNINITIALIZED) {
// return any straggling buffers, e.g. if we got here on an error
returnBuffersToCodec();
@@ -6006,7 +6074,9 @@
mFlags &= ~kFlagSawMediaServerDie;
}
- mState = newState;
+ if (!android::media::codec::provider_->set_state_early()) {
+ mState = newState;
+ }
if (mBatteryChecker != nullptr) {
mBatteryChecker->setExecuting(isExecuting());
@@ -6207,15 +6277,8 @@
cryptoInfo->setInt32("secure", mFlags & kFlagIsSecure);
sp<RefBase> obj;
if (msg->findObject("cryptoInfos", &obj)) {
- sp<CryptoInfosWrapper> infos{(CryptoInfosWrapper*)obj.get()};
- sp<CryptoInfosWrapper> asyncInfos{
- new CryptoInfosWrapper(std::vector<std::unique_ptr<CodecCryptoInfo>>())};
- for (std::unique_ptr<CodecCryptoInfo> &info : infos->value) {
- if (info) {
- asyncInfos->value.emplace_back(new CryptoAsync::CryptoAsyncInfo(info));
- }
- }
- buffer->meta()->setObject("cryptoInfos", asyncInfos);
+ // this object is a standalone object when created (no copy requied here)
+ buffer->meta()->setObject("cryptoInfos", obj);
} else {
size_t key_len = (key != nullptr)? 16 : 0;
size_t iv_len = (iv != nullptr)? 16 : 0;
@@ -6354,7 +6417,6 @@
}
}
if (mCryptoAsync) {
- // TODO b/316565675 - enable async path for audio
// prepare a message and enqueue
sp<AMessage> cryptoInfo = new AMessage();
buildCryptoInfoAMessage(cryptoInfo, CryptoAsync::kActionDecrypt);
@@ -6590,9 +6652,9 @@
CHECK_EQ(info, &mPortBuffers[portIndex][index]);
availBuffers->erase(availBuffers->begin());
- CHECK(!info->mOwnedByClient);
{
Mutex::Autolock al(mBufferLock);
+ CHECK(!info->mOwnedByClient);
info->mOwnedByClient = true;
// set image-data
@@ -6611,6 +6673,23 @@
return index;
}
+sp<Surface> MediaCodec::getOrCreateDetachedSurface() {
+ if (mDomain != DOMAIN_VIDEO || (mFlags & kFlagIsEncoder)) {
+ return nullptr;
+ }
+
+ if (!mDetachedSurface) {
+ uint64_t usage = 0;
+ if (!mSurface || mSurface->getConsumerUsage(&usage) != OK) {
+ // TODO: should we use a/the default consumer usage?
+ usage = 0;
+ }
+ mDetachedSurface.reset(new ReleaseSurface(usage));
+ }
+
+ return mDetachedSurface->getSurface();
+}
+
status_t MediaCodec::connectToSurface(const sp<Surface> &surface, uint32_t *generation) {
status_t err = OK;
if (surface != NULL) {
@@ -6684,7 +6763,56 @@
return err;
}
+status_t MediaCodec::handleSetSurface(const sp<Surface> &surface, bool callCodec, bool onShutDown) {
+ uint32_t generation;
+ status_t err = OK;
+ if (surface != nullptr) {
+ err = connectToSurface(surface, &generation);
+ if (err == ALREADY_EXISTS) {
+ // reconnecting to same surface
+ return OK;
+ }
+
+ if (err == OK && callCodec) {
+ if (mFlags & kFlagUsesSoftwareRenderer) {
+ if (mSoftRenderer != NULL
+ && (mFlags & kFlagPushBlankBuffersOnShutdown)) {
+ pushBlankBuffersToNativeWindow(mSurface.get());
+ }
+ // do not create a new software renderer on shutdown (release)
+ // as it will not be used anyway
+ if (!onShutDown) {
+ surface->setDequeueTimeout(-1);
+ mSoftRenderer = new SoftwareRenderer(surface);
+ // TODO: check if this was successful
+ }
+ } else {
+ err = mCodec->setSurface(surface, generation);
+ }
+
+ mReliabilityContextMetrics.setOutputSurfaceCount++;
+ }
+ }
+
+ if (err == OK) {
+ if (mSurface != NULL) {
+ (void)disconnectFromSurface();
+ }
+
+ if (surface != NULL) {
+ mSurface = surface;
+ mSurfaceGeneration = generation;
+ }
+ }
+
+ return err;
+}
+
status_t MediaCodec::handleSetSurface(const sp<Surface> &surface) {
+ if (android::media::codec::provider_->null_output_surface_support()) {
+ return handleSetSurface(surface, false /* callCodec */);
+ }
+
status_t err = OK;
if (mSurface != NULL) {
(void)disconnectFromSurface();
diff --git a/media/libstagefright/SurfaceUtils.cpp b/media/libstagefright/SurfaceUtils.cpp
index 604dcb0..714e312 100644
--- a/media/libstagefright/SurfaceUtils.cpp
+++ b/media/libstagefright/SurfaceUtils.cpp
@@ -111,8 +111,9 @@
}
}
- int finalUsage = usage | consumerUsage;
- ALOGV("gralloc usage: %#x(producer) + %#x(consumer) = %#x", usage, consumerUsage, finalUsage);
+ uint64_t finalUsage = (uint32_t) usage | (uint32_t) consumerUsage;
+ ALOGV("gralloc usage: %#x(producer) + %#x(consumer) = 0x%" PRIx64,
+ usage, consumerUsage, finalUsage);
err = native_window_set_usage(nativeWindow, finalUsage);
if (err != NO_ERROR) {
ALOGE("native_window_set_usage failed: %s (%d)", strerror(-err), -err);
@@ -126,7 +127,7 @@
return err;
}
- ALOGD("set up nativeWindow %p for %dx%d, color %#x, rotation %d, usage %#x",
+ ALOGD("set up nativeWindow %p for %dx%d, color %#x, rotation %d, usage 0x%" PRIx64,
nativeWindow, width, height, format, rotation, finalUsage);
return NO_ERROR;
}
diff --git a/media/libstagefright/TEST_MAPPING b/media/libstagefright/TEST_MAPPING
index 5dd8423..b7efbce 100644
--- a/media/libstagefright/TEST_MAPPING
+++ b/media/libstagefright/TEST_MAPPING
@@ -1,13 +1,4 @@
{
- "postsubmit": [
- // writerTest fails about 5 out of 66
- // { "name": "writerTest" },
-
- { "name": "HEVCUtilsUnitTest" },
- { "name": "ExtractorFactoryTest" }
-
- ],
-
"presubmit-large": [
{
"name": "CtsMediaMiscTestCases",
@@ -29,7 +20,6 @@
{
"exclude-annotation": "android.platform.test.annotations.RequiresDevice"
},
- // TODO: b/149314419
{
"exclude-filter": "android.media.audio.cts.AudioPlaybackCaptureTest"
},
@@ -92,8 +82,16 @@
}
],
"postsubmit": [
+ // writerTest fails about 5 out of 66
+ // { "name": "writerTest" },
{
"name": "BatteryChecker_test"
+ },
+ {
+ "name": "ExtractorFactoryTest"
+ },
+ {
+ "name": "HEVCUtilsUnitTest"
}
]
}
diff --git a/media/libstagefright/VideoRenderQualityTracker.cpp b/media/libstagefright/VideoRenderQualityTracker.cpp
index eb9ac0f..bf29b1d 100644
--- a/media/libstagefright/VideoRenderQualityTracker.cpp
+++ b/media/libstagefright/VideoRenderQualityTracker.cpp
@@ -302,13 +302,6 @@
mRenderDurationMs += (actualRenderTimeUs - mLastRenderTimeUs) / 1000;
}
- // Now that a frame has been rendered, the previously skipped frames can be processed as skipped
- // frames since the app is not skipping them to terminate playback.
- for (int64_t contentTimeUs : mPendingSkippedFrameContentTimeUsList) {
- processMetricsForSkippedFrame(contentTimeUs);
- }
- mPendingSkippedFrameContentTimeUsList = {};
-
// We can render a pending queued frame if it's the last frame of the video, so release it
// immediately.
if (contentTimeUs == mTunnelFrameQueuedContentTimeUs && mTunnelFrameQueuedContentTimeUs != -1) {
@@ -332,9 +325,25 @@
(long long) contentTimeUs, (long long) nextExpectedFrame.contentTimeUs);
break;
}
+ // Process all skipped frames before the dropped frame.
+ while (!mPendingSkippedFrameContentTimeUsList.empty()) {
+ if (mPendingSkippedFrameContentTimeUsList.front() >= nextExpectedFrame.contentTimeUs) {
+ break;
+ }
+ processMetricsForSkippedFrame(mPendingSkippedFrameContentTimeUsList.front());
+ mPendingSkippedFrameContentTimeUsList.pop_front();
+ }
processMetricsForDroppedFrame(nextExpectedFrame.contentTimeUs,
nextExpectedFrame.desiredRenderTimeUs);
}
+ // Process all skipped frames before the rendered frame.
+ while (!mPendingSkippedFrameContentTimeUsList.empty()) {
+ if (mPendingSkippedFrameContentTimeUsList.front() >= nextExpectedFrame.contentTimeUs) {
+ break;
+ }
+ processMetricsForSkippedFrame(mPendingSkippedFrameContentTimeUsList.front());
+ mPendingSkippedFrameContentTimeUsList.pop_front();
+ }
processMetricsForRenderedFrame(nextExpectedFrame.contentTimeUs,
nextExpectedFrame.desiredRenderTimeUs, actualRenderTimeUs,
freezeEventOut, judderEventOut);
diff --git a/media/libstagefright/colorconversion/Android.bp b/media/libstagefright/colorconversion/Android.bp
index 7ff9b10..4072bf9 100644
--- a/media/libstagefright/colorconversion/Android.bp
+++ b/media/libstagefright/colorconversion/Android.bp
@@ -36,7 +36,7 @@
"media_plugin_headers",
],
- static_libs: ["libyuv_static"],
+ static_libs: ["libyuv"],
cflags: ["-Werror"],
diff --git a/media/libstagefright/colorconversion/fuzzer/Android.bp b/media/libstagefright/colorconversion/fuzzer/Android.bp
index 237e715..50a2477 100644
--- a/media/libstagefright/colorconversion/fuzzer/Android.bp
+++ b/media/libstagefright/colorconversion/fuzzer/Android.bp
@@ -27,7 +27,7 @@
cc_defaults {
name: "libcolorconversion_fuzzer_defaults",
static_libs: [
- "libyuv_static",
+ "libyuv",
"libstagefright_color_conversion",
"libstagefright",
"liblog",
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index dc7d787..d50bc1e 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -139,6 +139,7 @@
<Limit name="bitrate" range="1-40000000" />
</Variant>
<Feature name="adaptive-playback" />
+ <Feature name="dynamic-color-aspects" />
<Attribute name="software-codec" />
</MediaCodec>
<MediaCodec name="c2.android.hevc.decoder" type="video/hevc" variant="slow-cpu,!slow-cpu">
@@ -160,6 +161,7 @@
<Limit name="bitrate" range="1-5000000" />
</Variant>
<Feature name="adaptive-playback" />
+ <Feature name="dynamic-color-aspects" />
<Attribute name="software-codec" />
</MediaCodec>
<MediaCodec name="c2.android.vp8.decoder" type="video/x-vnd.on2.vp8" variant="slow-cpu,!slow-cpu">
@@ -178,6 +180,7 @@
<Limit name="bitrate" range="1-40000000" />
</Variant>
<Feature name="adaptive-playback" />
+ <Feature name="dynamic-color-aspects" />
<Attribute name="software-codec" />
</MediaCodec>
<MediaCodec name="c2.android.vp9.decoder" type="video/x-vnd.on2.vp9" variant="slow-cpu,!slow-cpu">
@@ -197,6 +200,7 @@
<Limit name="bitrate" range="1-5000000" />
</Variant>
<Feature name="adaptive-playback" />
+ <Feature name="dynamic-color-aspects" />
<Attribute name="software-codec" />
</MediaCodec>
<MediaCodec name="c2.android.av1.decoder" type="video/av01" variant="slow-cpu,!slow-cpu">
@@ -216,6 +220,8 @@
<Limit name="bitrate" range="1-5000000" />
</Variant>
<Feature name="adaptive-playback" />
+ <Feature name="dynamic-color-aspects" />
+ <Feature name="low-latency" />
<Attribute name="software-codec" />
</MediaCodec>
<MediaCodec name="c2.android.av1-dav1d.decoder" type="video/av01" variant="slow-cpu,!slow-cpu" rank="1024">
@@ -234,6 +240,8 @@
<Limit name="bitrate" range="1-5000000" />
</Variant>
<Feature name="adaptive-playback" />
+ <Feature name="dynamic-color-aspects" />
+ <Feature name="low-latency" />
<Attribute name="software-codec" />
</MediaCodec>
<MediaCodec name="c2.android.mpeg2.decoder" type="video/mpeg2" domain="tv">
@@ -335,7 +343,7 @@
</MediaCodec>
<MediaCodec name="c2.android.vp8.encoder" type="video/x-vnd.on2.vp8" variant="slow-cpu,!slow-cpu">
<Alias name="OMX.google.vp8.encoder" />
- <Limit name="alignment" value="2x2" />
+ <Limit name="alignment" value="1x1" />
<Limit name="block-size" value="16x16" />
<Variant name="!slow-cpu">
<Limit name="size" min="2x2" max="2048x2048" />
@@ -351,6 +359,7 @@
<Limit name="bitrate" range="1-20000000" />
</Variant>
<Feature name="bitrate-modes" value="VBR,CBR" />
+ <Feature name="qp-bounds" />
<Attribute name="software-codec" />
</MediaCodec>
<MediaCodec name="c2.android.hevc.encoder" type="video/hevc" variant="!slow-cpu">
@@ -365,22 +374,24 @@
<Limit name="complexity" range="0-10" default="0" />
<Limit name="quality" range="0-100" default="80" />
<Feature name="bitrate-modes" value="VBR,CBR,CQ" />
+ <Feature name="qp-bounds" />
<Attribute name="software-codec" />
</MediaCodec>
<MediaCodec name="c2.android.vp9.encoder" type="video/x-vnd.on2.vp9" variant="!slow-cpu">
<Alias name="OMX.google.vp9.encoder" />
<!-- profiles and levels: ProfileMain : Level_Version0-3 -->
<Limit name="size" min="2x2" max="2048x2048" />
- <Limit name="alignment" value="2x2" />
+ <Limit name="alignment" value="1x1" />
<Limit name="block-size" value="16x16" />
<!-- 2016 devices can encode at about 8fps at this block count -->
<Limit name="block-count" range="1-3600" /> <!-- max 1280x720 -->
<Limit name="bitrate" range="1-40000000" />
<Feature name="bitrate-modes" value="VBR,CBR" />
+ <Feature name="qp-bounds" />
<Attribute name="software-codec" />
</MediaCodec>
<MediaCodec name="c2.android.av1.encoder" type="video/av01" enabled="false" minsdk="34" variant="slow-cpu,!slow-cpu">
- <Limit name="alignment" value="2x2" />
+ <Limit name="alignment" value="1x1" />
<Limit name="block-size" value="16x16" />
<Variant name="!slow-cpu">
<Limit name="size" min="2x2" max="1920x1920" />
@@ -395,6 +406,7 @@
<Limit name="quality" range="0-100" default="80" />
<Limit name="complexity" range="0-5" default="0" />
<Feature name="bitrate-modes" value="VBR,CBR,CQ" />
+ <Feature name="qp-bounds" />
<Attribute name="software-codec" />
</MediaCodec>
</Encoders>
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index 9ecb12e..7169b1e 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -96,6 +96,7 @@
CONFIGURE_FLAG_ENCODE = 1,
CONFIGURE_FLAG_USE_BLOCK_MODEL = 2,
CONFIGURE_FLAG_USE_CRYPTO_ASYNC = 4,
+ CONFIGURE_FLAG_DETACHED_SURFACE = 8,
};
enum BufferFlags {
@@ -274,6 +275,8 @@
status_t setSurface(const sp<Surface> &nativeWindow);
+ status_t detachOutputSurface();
+
status_t requestIDRFrame();
// Notification will be posted once there "is something to do", i.e.
@@ -368,6 +371,7 @@
kWhatInit = 'init',
kWhatConfigure = 'conf',
kWhatSetSurface = 'sSur',
+ kWhatDetachSurface = 'dSur',
kWhatCreateInputSurface = 'cisf',
kWhatSetInputSurface = 'sisf',
kWhatStart = 'strt',
@@ -474,6 +478,10 @@
uint32_t mSurfaceGeneration = 0;
SoftwareRenderer *mSoftRenderer;
+ // Get the detached BufferQueue surface for a video decoder, and create it
+ // if it did not yet exist.
+ sp<Surface> getOrCreateDetachedSurface();
+
Mutex mMetricsLock;
mediametrics_handle_t mMetricsHandle = 0;
bool mMetricsToUpload = false;
@@ -642,6 +650,13 @@
status_t queueCSDInputBuffer(size_t bufferIndex);
status_t handleSetSurface(const sp<Surface> &surface);
+
+ // Common reimplementation of changing the output surface.
+ // Handles setting null surface, which is used during configure and init.
+ // Set |callCodec| to true if the codec needs to be notified (e.g. during executing state).
+ // Setting |onShutdown| to true will avoid extra work, if this is used for detaching on
+ // delayed release.
+ status_t handleSetSurface(const sp<Surface> &surface, bool callCodec, bool onShutdown = false);
status_t connectToSurface(const sp<Surface> &surface, uint32_t *generation);
status_t disconnectFromSurface();
@@ -714,7 +729,7 @@
sp<AMessage> mMsgPollForRenderedBuffers;
class ReleaseSurface;
- std::unique_ptr<ReleaseSurface> mReleaseSurface;
+ std::unique_ptr<ReleaseSurface> mDetachedSurface;
std::list<sp<AMessage>> mLeftover;
status_t handleLeftover(size_t index);
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index 24ac2e8..72785d5 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -893,6 +893,8 @@
inline constexpr char PARAMETER_KEY_SUSPEND_TIME[] = "drop-start-time-us";
inline constexpr char PARAMETER_KEY_TUNNEL_PEEK[] = "tunnel-peek";
inline constexpr char PARAMETER_KEY_VIDEO_BITRATE[] = "video-bitrate";
+inline constexpr char PARAMETER_KEY_QP_OFFSET_MAP[] = "qp-offset-map";
+inline constexpr char PARAMETER_KEY_QP_OFFSET_RECTS[] = "qp-offset-rects";
}
diff --git a/media/libstagefright/include/media/stagefright/PersistentSurface.h b/media/libstagefright/include/media/stagefright/PersistentSurface.h
index f4943c3..554ee43 100644
--- a/media/libstagefright/include/media/stagefright/PersistentSurface.h
+++ b/media/libstagefright/include/media/stagefright/PersistentSurface.h
@@ -18,6 +18,8 @@
#define PERSISTENT_SURFACE_H_
+#include <android/binder_auto_utils.h>
+#include <android/binder_libbinder.h>
#include <binder/Parcel.h>
#include <hidl/HidlSupport.h>
#include <hidl/HybridInterface.h>
@@ -29,24 +31,43 @@
struct PersistentSurface : public RefBase {
PersistentSurface() {}
- // create a persistent surface
+ // create a persistent surface in HIDL
PersistentSurface(
const sp<IGraphicBufferProducer>& bufferProducer,
const sp<hidl::base::V1_0::IBase>& hidlTarget) :
mBufferProducer(bufferProducer),
- mHidlTarget(hidlTarget) { }
+ mHidlTarget(hidlTarget),
+ mAidlTarget(nullptr),
+ mAidl(false) { }
+
+ // create a persistent surface in AIDL
+ PersistentSurface(
+ const sp<IGraphicBufferProducer>& bufferProducer,
+ const ::ndk::SpAIBinder& aidlTarget) :
+ mBufferProducer(bufferProducer),
+ mHidlTarget(nullptr),
+ mAidlTarget(aidlTarget),
+ mAidl(true) { }
sp<IGraphicBufferProducer> getBufferProducer() const {
return mBufferProducer;
}
+ bool isTargetAidl() const {
+ return mAidl;
+ }
+
sp<hidl::base::V1_0::IBase> getHidlTarget() const {
- return mHidlTarget;
+ return mAidl ? nullptr : mHidlTarget;
+ }
+
+ ::ndk::SpAIBinder getAidlTarget() const {
+ return mAidl ? mAidlTarget : nullptr;
}
status_t writeToParcel(Parcel *parcel) const {
parcel->writeStrongBinder(IInterface::asBinder(mBufferProducer));
- // write hidl target
+ // write hidl target if available
if (mHidlTarget != nullptr) {
HalToken token;
bool result = createHalToken(mHidlTarget, &token);
@@ -57,6 +78,22 @@
} else {
parcel->writeBool(false);
}
+ // write aidl target if available
+ if (mAidl) {
+ AIBinder *binder = mAidlTarget.get();
+ if (binder != nullptr) {
+ ::android::sp<::android::IBinder> intf =
+ AIBinder_toPlatformBinder(binder);
+ if (intf) {
+ parcel->writeBool(true);
+ parcel->writeStrongBinder(intf);
+ } else {
+ parcel->writeBool(false);
+ }
+ } else {
+ parcel->writeBool(false);
+ }
+ }
return NO_ERROR;
}
@@ -65,21 +102,43 @@
parcel->readStrongBinder());
// read hidl target
bool haveHidlTarget = parcel->readBool();
+ mAidl = false;
if (haveHidlTarget) {
std::vector<uint8_t> tokenVector;
parcel->readByteVector(&tokenVector);
HalToken token = HalToken(tokenVector);
mHidlTarget = retrieveHalInterface(token);
deleteHalToken(token);
+ return NO_ERROR;
} else {
mHidlTarget.clear();
}
+
+ // read aidl target
+ bool haveAidlTarget = false;
+ if (parcel->readBool(&haveAidlTarget) != NO_ERROR) {
+ return NO_ERROR;
+ }
+ mAidl = true;
+ if (haveAidlTarget) {
+ ::android::sp<::android::IBinder> intf = parcel->readStrongBinder();
+ AIBinder *ndkBinder = AIBinder_fromPlatformBinder(intf);
+ if (ndkBinder) {
+ mAidlTarget.set(ndkBinder);
+ } else {
+ mAidlTarget.set(nullptr);
+ }
+ } else {
+ mAidlTarget.set(nullptr);
+ }
return NO_ERROR;
}
private:
sp<IGraphicBufferProducer> mBufferProducer;
sp<hidl::base::V1_0::IBase> mHidlTarget;
+ ::ndk::SpAIBinder mAidlTarget;
+ bool mAidl;
DISALLOW_EVIL_CONSTRUCTORS(PersistentSurface);
};
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index 959f43e..458ac9c 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -626,6 +626,10 @@
// ACodec is waiting for all buffers to be returned, do NOT
// submit any more buffers to the codec.
bufferSource->onOmxIdle();
+ } else if (param == OMX_StateExecuting) {
+ // Initiating transition from Idle -> Executing
+ // Start submitting buffers to codec.
+ bufferSource->onOmxExecuting();
} else if (param == OMX_StateLoaded) {
// Initiating transition from Idle/Executing -> Loaded
// Buffers are about to be freed.
@@ -2404,13 +2408,6 @@
asString(event), event, arg1String, arg1, arg2String, arg2);
const sp<IOMXBufferSource> bufferSource(getBufferSource());
- if (bufferSource != NULL
- && event == OMX_EventCmdComplete
- && arg1 == OMX_CommandStateSet
- && arg2 == OMX_StateExecuting) {
- bufferSource->onOmxExecuting();
- }
-
// allow configuration if we return to the loaded state
if (event == OMX_EventCmdComplete
&& arg1 == OMX_CommandStateSet
diff --git a/media/libstagefright/renderfright/Android.bp b/media/libstagefright/renderfright/Android.bp
index 3598e8d..22b13f6 100644
--- a/media/libstagefright/renderfright/Android.bp
+++ b/media/libstagefright/renderfright/Android.bp
@@ -118,3 +118,11 @@
local_include_dirs: ["include"],
export_include_dirs: ["include"],
}
+
+cc_library_headers {
+ name: "librenderfright_gl_headers",
+ export_include_dirs: ["gl"],
+ visibility: [
+ "//frameworks/av/media/libstagefright/renderfright/fuzzer:__subpackages__",
+ ],
+}
diff --git a/media/libstagefright/renderfright/fuzzer/Android.bp b/media/libstagefright/renderfright/fuzzer/Android.bp
new file mode 100644
index 0000000..574e49f
--- /dev/null
+++ b/media/libstagefright/renderfright/fuzzer/Android.bp
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+ default_team: "trendy_team_android_media_codec_framework",
+ // See: http://go/android-license-faq
+ // A large-scale-change added 'default_applicable_licenses' to import
+ // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
+ // to get the below license kinds:
+ // SPDX-license-identifier-Apache-2.0
+ default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
+}
+
+cc_fuzz {
+ name: "libstagefright_renderfright_fuzzer",
+ srcs: [
+ "libstagefright_renderfright_fuzzer.cpp",
+ ],
+ static_libs: [
+ "librenderfright",
+ ],
+ header_libs: [
+ "librenderfright_gl_headers",
+ ],
+ shared_libs: [
+ "libcutils",
+ "libgui",
+ "liblog",
+ "libutils",
+ "libEGL",
+ "libGLESv1_CM",
+ "libGLESv2",
+ "libGLESv3",
+ "libui",
+ "libbase",
+ "libprocessgroup",
+ "libsync",
+ ],
+ fuzz_config: {
+ cc: [
+ "android-media-fuzzing-reports@google.com",
+ ],
+ componentid: 155276,
+ hotlists: ["4593311"],
+ description: "The fuzzer targets the APIs of librenderfright",
+ vector: "local_no_privileges_required",
+ service_privilege: "constrained",
+ users: "multi_user",
+ fuzzed_code_usage: "shipped",
+ },
+}
diff --git a/media/libstagefright/renderfright/fuzzer/README.md b/media/libstagefright/renderfright/fuzzer/README.md
new file mode 100644
index 0000000..742bfdc
--- /dev/null
+++ b/media/libstagefright/renderfright/fuzzer/README.md
@@ -0,0 +1,33 @@
+# Fuzzer for libstagefright_renderfright
+
+RenderFright supports the following parameters:
+1. SetContextPriority (parameter name: "kSetContextPriority")
+2. SetRenderEngineType (parameter name: "kSetRenderEngineType")
+3. CleanupMode (parameter name: "kCleanupMode")
+4. DataSpace (parameter name: "kDataSpace")
+5. ReadBufferUsage(parameter name: "kReadBufferUsage")
+6. WriteBufferUsage(parameter name: "kWriteBufferUsage")
+7. RenderBufferUsage(parameter name: "kRenderBufferUsage")
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+|`kSetContextPriority`| 0. `RenderEngine::ContextPriority::LOW`<br/>1. `RenderEngine::ContextPriority::MEDIUM`<br/>2. `RenderEngine::ContextPriority::HIGH` |Value obtained from FuzzedDataProvider|
+|`kSetRenderEngineType`| 0. `RenderEngine::RenderEngineType::GLES`<br/>1. `RenderEngine::RenderEngineType::THREADED`|Value obtained from FuzzedDataProvider|
+|`kCleanupMode`| 0. `RenderEngine::CleanupMode::CLEAN_OUTPUT_RESOURCES`<br/>1. `RenderEngine::CleanupMode::CLEAN_ALL`|Value obtained from FuzzedDataProvider|
+|`kDataSpace`| 0. `ui::Dataspace::UNKNOWN`<br/>1. `ui::Dataspace::ARBITRARY`<br/>2. `ui::Dataspace::STANDARD_SHIFT`<br/>3. `ui::Dataspace::STANDARD_MASK`<br/>4. `ui::Dataspace::STANDARD_UNSPECIFIED`<br/>5. `ui::Dataspace::STANDARD_BT709`<br/>6. `ui::Dataspace::STANDARD_BT601_625`<br/>7. `ui::Dataspace::STANDARD_BT601_625_UNADJUSTED`<br/>8. `ui::Dataspace::STANDARD_BT601_525`<br/>9. `ui::Dataspace::STANDARD_BT601_525_UNADJUSTED`<br/>10. `ui::Dataspace::STANDARD_BT2020`<br/>11. `ui::Dataspace::STANDARD_BT2020_CONSTANT_LUMINANCE`<br/>12. `ui::Dataspace::STANDARD_BT470M`<br/>13. `ui::Dataspace::STANDARD_FILM`<br/>14. `ui::Dataspace::STANDARD_DCI_P3`<br/>15. `ui::Dataspace::STANDARD_ADOBE_RGB`<br/>16. `ui::Dataspace::TRANSFER_SHIFT`<br/>17. `ui::Dataspace::TRANSFER_MASK`<br/>18. `ui::Dataspace::TRANSFER_UNSPECIFIED`<br/>19. `ui::Dataspace::TRANSFER_LINEAR`<br/>20. `ui::Dataspace::TRANSFER_SRGB`<br/>21. `ui::Dataspace::TRANSFER_SMPTE_170M`<br/>22. `ui::Dataspace::TRANSFER_GAMMA2_2`<br/>23. `ui::Dataspace::TRANSFER_GAMMA2_6`<br/>24. `ui::Dataspace::TRANSFER_GAMMA2_8`<br/>25. `ui::Dataspace::TRANSFER_ST2084`<br/>26. `ui::Dataspace::TRANSFER_HLG`<br/>27. `ui::Dataspace::RANGE_SHIFT`<br/>28. `ui::Dataspace::RANGE_MASK`<br/>29. `ui::Dataspace::RANGE_UNSPECIFIED`<br/>30. `ui::Dataspace::RANGE_FULL`<br/>31. `ui::Dataspace::RANGE_LIMITED`<br/>32. `ui::Dataspace::RANGE_EXTENDED`<br/>33. `ui::Dataspace::SRGB_LINEAR`<br/>34. `ui::Dataspace::V0_SRGB_LINEAR`<br/>35. `ui::Dataspace::V0_SCRGB_LINEAR`<br/>36. `ui::Dataspace::SRGB`<br/>37. `ui::Dataspace::V0_SRGB`<br/>38. `ui::Dataspace::V0_SCRGB`<br/>39. `ui::Dataspace::JFIF`<br/>40. `ui::Dataspace::V0_JFIF`<br/>41. `ui::Dataspace::BT601_625`<br/>42. `ui::Dataspace::V0_BT601_625`<br/>43. `ui::Dataspace::BT601_525`<br/>44. `ui::Dataspace::V0_BT601_525`<br/>45. `ui::Dataspace::BT709`<br/>46. `ui::Dataspace::V0_BT709`<br/>47. `ui::Dataspace::DCI_P3_LINEAR`<br/>48. `ui::Dataspace::DCI_P3`<br/>49. `ui::Dataspace::DISPLAY_P3_LINEAR`<br/>50. `ui::Dataspace::DISPLAY_P3`<br/>51. `ui::Dataspace::ADOBE_RGB`<br/>52. `ui::Dataspace::BT2020_LINEAR`<br/>53. `ui::Dataspace::BT2020`<br/>54. `ui::Dataspace::BT2020_PQ`<br/>55. `ui::Dataspace::DEPTH`<br/>56. `ui::Dataspace::SENSOR`<br/>57. `ui::Dataspace::BT2020_ITU`<br/>58. `ui::Dataspace::BT2020_ITU_PQ`<br/>59. `ui::Dataspace::BT2020_ITU_HLG`<br/>60. `ui::Dataspace::BT2020_HLG`<br/>61. `ui::Dataspace::DISPLAY_BT2020`<br/>62. `ui::Dataspace::DYNAMIC_DEPTH`<br/>63. `ui::Dataspace::JPEG_APP_SEGMENTS`<br/>64. `ui::Dataspace::HEIF`|Value obtained from FuzzedDataProvider|
+|`kReadBufferUsage`| 0. `GRALLOC_USAGE_SW_READ_NEVER`<br/>1. `GRALLOC_USAGE_SW_READ_RARELY`<br/>2. `GRALLOC_USAGE_SW_READ_OFTEN`<br/>3. `GRALLOC_USAGE_SW_READ_MASK`|Value obtained from FuzzedDataProvider|
+|`kWriteBufferUsage`| 0. `GRALLOC_USAGE_SW_WRITE_NEVER`<br/>1. `GRALLOC_USAGE_SW_WRITE_RARELY`<br/>2. `GRALLOC_USAGE_SW_WRITE_OFTEN`<br/>3. `GRALLOC_USAGE_SW_WRITE_MASK`|Value obtained from FuzzedDataProvider|
+|`kRenderBufferUsage`| 0. `GRALLOC_USAGE_HW_TEXTURE`<br/>1. `GRALLOC_USAGE_HW_RENDER`<br/>2. `GRALLOC_USAGE_HW_2D`<br/>3. `GRALLOC_USAGE_HW_COMPOSER`<br/>4. `GRALLOC_USAGE_HW_FB`<br/>5. `GRALLOC_USAGE_EXTERNAL_DISP`<br/>6. `GRALLOC_USAGE_PROTECTED`<br/>7. `GRALLOC_USAGE_CURSOR`<br/>8. `GRALLOC_USAGE_HW_VIDEO_ENCODER`<br/>9. `GRALLOC_USAGE_HW_CAMERA_WRITE`<br/>10. `GRALLOC_USAGE_HW_CAMERA_READ`<br/>11. `GRALLOC_USAGE_HW_CAMERA_ZSL`<br/>12. `GRALLOC_USAGE_HW_CAMERA_MASK`<br/>13. `GRALLOC_USAGE_HW_MASK`<br/>14. `GRALLOC_USAGE_RENDERSCRIPT`<br/>15. `GRALLOC_USAGE_FOREIGN_BUFFERS`<br/>16. `GRALLOC_USAGE_HW_IMAGE_ENCODER`|Value obtained from FuzzedDataProvider|
+
+
+
+#### Steps to run
+1. Build the fuzzer
+```
+ $ mm -j$(nproc) libstagefright_renderfright_fuzzer
+```
+2. Run on device
+```
+ $ adb sync data
+ $ adb shell /data/fuzz/arm64/libstagefright_renderfright_fuzzer/libstagefright_renderfright_fuzzer
+```
diff --git a/media/libstagefright/renderfright/fuzzer/libstagefright_renderfright_fuzzer.cpp b/media/libstagefright/renderfright/fuzzer/libstagefright_renderfright_fuzzer.cpp
new file mode 100644
index 0000000..b0721e0
--- /dev/null
+++ b/media/libstagefright/renderfright/fuzzer/libstagefright_renderfright_fuzzer.cpp
@@ -0,0 +1,297 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <GLESRenderEngine.h>
+#include <GLFramebuffer.h>
+#include <GLImage.h>
+#include <Program.h>
+#include <ProgramCache.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <renderengine/RenderEngine.h>
+
+using namespace android::renderengine;
+using namespace android;
+
+static constexpr int32_t kMinRenderAPI = 0;
+static constexpr int32_t kMaxRenderAPI = 8;
+static constexpr int32_t kMaxTextureCount = 100;
+static constexpr int32_t KMaxDisplayWidth = 3840;
+static constexpr int32_t KMaxDisplayHeight = 2160;
+static constexpr int32_t kMinPixelFormat = 1;
+static constexpr int32_t kMaxPixelFormat = 55;
+static constexpr int32_t kMaxRenderLayer = 5;
+
+static constexpr ui::Dataspace kDataSpace[] = {
+ ui::Dataspace::UNKNOWN,
+ ui::Dataspace::ARBITRARY,
+ ui::Dataspace::STANDARD_SHIFT,
+ ui::Dataspace::STANDARD_MASK,
+ ui::Dataspace::STANDARD_UNSPECIFIED,
+ ui::Dataspace::STANDARD_BT709,
+ ui::Dataspace::STANDARD_BT601_625,
+ ui::Dataspace::STANDARD_BT601_625_UNADJUSTED,
+ ui::Dataspace::STANDARD_BT601_525,
+ ui::Dataspace::STANDARD_BT601_525_UNADJUSTED,
+ ui::Dataspace::STANDARD_BT2020,
+ ui::Dataspace::STANDARD_BT2020_CONSTANT_LUMINANCE,
+ ui::Dataspace::STANDARD_BT470M,
+ ui::Dataspace::STANDARD_FILM,
+ ui::Dataspace::STANDARD_DCI_P3,
+ ui::Dataspace::STANDARD_ADOBE_RGB,
+ ui::Dataspace::TRANSFER_SHIFT,
+ ui::Dataspace::TRANSFER_MASK,
+ ui::Dataspace::TRANSFER_UNSPECIFIED,
+ ui::Dataspace::TRANSFER_LINEAR,
+ ui::Dataspace::TRANSFER_SRGB,
+ ui::Dataspace::TRANSFER_SMPTE_170M,
+ ui::Dataspace::TRANSFER_GAMMA2_2,
+ ui::Dataspace::TRANSFER_GAMMA2_6,
+ ui::Dataspace::TRANSFER_GAMMA2_8,
+ ui::Dataspace::TRANSFER_ST2084,
+ ui::Dataspace::TRANSFER_HLG,
+ ui::Dataspace::RANGE_SHIFT,
+ ui::Dataspace::RANGE_MASK,
+ ui::Dataspace::RANGE_UNSPECIFIED,
+ ui::Dataspace::RANGE_FULL,
+ ui::Dataspace::RANGE_LIMITED,
+ ui::Dataspace::RANGE_EXTENDED,
+ ui::Dataspace::SRGB_LINEAR,
+ ui::Dataspace::V0_SRGB_LINEAR,
+ ui::Dataspace::V0_SCRGB_LINEAR,
+ ui::Dataspace::SRGB,
+ ui::Dataspace::V0_SRGB,
+ ui::Dataspace::V0_SCRGB,
+ ui::Dataspace::JFIF,
+ ui::Dataspace::V0_JFIF,
+ ui::Dataspace::BT601_625,
+ ui::Dataspace::V0_BT601_625,
+ ui::Dataspace::BT601_525,
+ ui::Dataspace::V0_BT601_525,
+ ui::Dataspace::BT709,
+ ui::Dataspace::V0_BT709,
+ ui::Dataspace::DCI_P3_LINEAR,
+ ui::Dataspace::DCI_P3,
+ ui::Dataspace::DISPLAY_P3_LINEAR,
+ ui::Dataspace::DISPLAY_P3,
+ ui::Dataspace::ADOBE_RGB,
+ ui::Dataspace::BT2020_LINEAR,
+ ui::Dataspace::BT2020,
+ ui::Dataspace::BT2020_PQ,
+ ui::Dataspace::DEPTH,
+ ui::Dataspace::SENSOR,
+ ui::Dataspace::BT2020_ITU,
+ ui::Dataspace::BT2020_ITU_PQ,
+ ui::Dataspace::BT2020_ITU_HLG,
+ ui::Dataspace::BT2020_HLG,
+ ui::Dataspace::DISPLAY_BT2020,
+ ui::Dataspace::DYNAMIC_DEPTH,
+ ui::Dataspace::JPEG_APP_SEGMENTS,
+ ui::Dataspace::HEIF,
+};
+
+static constexpr int32_t kReadBufferUsage[] = {
+ GRALLOC_USAGE_SW_READ_NEVER, GRALLOC_USAGE_SW_READ_RARELY, GRALLOC_USAGE_SW_READ_OFTEN,
+ GRALLOC_USAGE_SW_READ_MASK};
+
+static constexpr int32_t kWriteBufferUsage[] = {
+ GRALLOC_USAGE_SW_WRITE_NEVER, GRALLOC_USAGE_SW_WRITE_RARELY, GRALLOC_USAGE_SW_WRITE_OFTEN,
+ GRALLOC_USAGE_SW_WRITE_MASK};
+
+static constexpr int32_t kRenderBufferUsage[] = {
+ GRALLOC_USAGE_HW_TEXTURE,
+ GRALLOC_USAGE_HW_RENDER,
+ GRALLOC_USAGE_HW_2D,
+ GRALLOC_USAGE_HW_COMPOSER,
+ GRALLOC_USAGE_HW_FB,
+ GRALLOC_USAGE_EXTERNAL_DISP,
+ GRALLOC_USAGE_PROTECTED,
+ GRALLOC_USAGE_CURSOR,
+ GRALLOC_USAGE_HW_VIDEO_ENCODER,
+ GRALLOC_USAGE_HW_CAMERA_WRITE,
+ GRALLOC_USAGE_HW_CAMERA_READ,
+ GRALLOC_USAGE_HW_CAMERA_ZSL,
+ GRALLOC_USAGE_HW_CAMERA_MASK,
+ GRALLOC_USAGE_HW_MASK,
+ GRALLOC_USAGE_RENDERSCRIPT,
+ GRALLOC_USAGE_FOREIGN_BUFFERS,
+ GRALLOC_USAGE_HW_IMAGE_ENCODER,
+};
+
+static constexpr RenderEngine::ContextPriority kSetContextPriority[] = {
+ RenderEngine::ContextPriority::LOW, RenderEngine::ContextPriority::MEDIUM,
+ RenderEngine::ContextPriority::HIGH};
+
+static constexpr RenderEngine::RenderEngineType kSetRenderEngineType[] = {
+ RenderEngine::RenderEngineType::GLES, RenderEngine::RenderEngineType::THREADED};
+
+static constexpr RenderEngine::CleanupMode kCleanupMode[] = {
+ RenderEngine::CleanupMode::CLEAN_OUTPUT_RESOURCES, RenderEngine::CleanupMode::CLEAN_ALL};
+
+class RenderFrightFuzzer {
+ public:
+ RenderFrightFuzzer(const uint8_t* data, size_t size) : mFdp(data, size){};
+ void process();
+
+ private:
+ FuzzedDataProvider mFdp;
+ void getLayerSetting(renderengine::LayerSettings& layerSetting, sp<GraphicBuffer> buffer,
+ const Rect& sourceCrop, uint32_t textureName);
+};
+
+void RenderFrightFuzzer::getLayerSetting(renderengine::LayerSettings& layerSetting,
+ sp<GraphicBuffer> buffer, const Rect& sourceCrop,
+ uint32_t textureName) {
+ layerSetting.geometry.boundaries = sourceCrop.toFloatRect();
+ layerSetting.geometry.roundedCornersRadius = mFdp.ConsumeFloatingPoint<float>();
+ layerSetting.geometry.roundedCornersCrop = sourceCrop.toFloatRect();
+
+ layerSetting.alpha = mFdp.ConsumeFloatingPoint<float>();
+ layerSetting.sourceDataspace = mFdp.PickValueInArray(kDataSpace);
+ layerSetting.backgroundBlurRadius = mFdp.ConsumeFloatingPoint<float>();
+ layerSetting.source.buffer.buffer = buffer;
+ layerSetting.source.buffer.isOpaque = mFdp.ConsumeBool();
+ layerSetting.source.buffer.fence = Fence::NO_FENCE;
+ layerSetting.source.buffer.textureName = textureName;
+ layerSetting.source.buffer.usePremultipliedAlpha = mFdp.ConsumeBool();
+ layerSetting.source.buffer.isY410BT2020 =
+ (layerSetting.sourceDataspace == ui::Dataspace::BT2020_ITU_PQ ||
+ layerSetting.sourceDataspace == ui::Dataspace::BT2020_ITU_HLG);
+ layerSetting.source.buffer.maxMasteringLuminance = mFdp.ConsumeFloatingPoint<float>();
+ layerSetting.source.buffer.maxContentLuminance = mFdp.ConsumeFloatingPoint<float>();
+
+ layerSetting.shadow.lightPos =
+ vec3(mFdp.ConsumeFloatingPoint<float>(), mFdp.ConsumeFloatingPoint<float>(), 0);
+ layerSetting.shadow.ambientColor = {
+ mFdp.ConsumeFloatingPoint<float>(), mFdp.ConsumeFloatingPoint<float>(),
+ mFdp.ConsumeFloatingPoint<float>(), mFdp.ConsumeFloatingPoint<float>()};
+ layerSetting.shadow.spotColor = {
+ mFdp.ConsumeFloatingPoint<float>(), mFdp.ConsumeFloatingPoint<float>(),
+ mFdp.ConsumeFloatingPoint<float>(), mFdp.ConsumeFloatingPoint<float>()};
+ layerSetting.shadow.length = mFdp.ConsumeFloatingPoint<float>();
+ layerSetting.shadow.casterIsTranslucent = mFdp.ConsumeBool();
+}
+
+void RenderFrightFuzzer::process() {
+ auto args = RenderEngineCreationArgs::Builder()
+ .setPixelFormat(mFdp.ConsumeIntegralInRange<int32_t>(kMinPixelFormat,
+ kMaxPixelFormat))
+ .setImageCacheSize(mFdp.ConsumeIntegral<uint32_t>())
+ .setUseColorManagerment(mFdp.ConsumeBool())
+ .setEnableProtectedContext(mFdp.ConsumeBool())
+ .setPrecacheToneMapperShaderOnly(mFdp.ConsumeBool())
+ .setSupportsBackgroundBlur(mFdp.ConsumeBool())
+ .setContextPriority(mFdp.PickValueInArray(kSetContextPriority))
+ .setRenderEngineType(mFdp.PickValueInArray(kSetRenderEngineType))
+ .build();
+ std::unique_ptr<RenderEngine> renderEngine = RenderEngine::create(args);
+
+ std::vector<uint32_t> textures;
+ int32_t maxCount = mFdp.ConsumeIntegralInRange<size_t>(0, kMaxTextureCount);
+ for (size_t i = 0; i < maxCount; ++i) {
+ textures.push_back(mFdp.ConsumeIntegral<uint32_t>());
+ }
+
+ while (mFdp.remaining_bytes()) {
+ int32_t renderFrightAPIs =
+ mFdp.ConsumeIntegralInRange<int32_t>(kMinRenderAPI, kMaxRenderAPI);
+ switch (renderFrightAPIs) {
+ case 0: {
+ renderEngine->genTextures(textures.size(), textures.data());
+ break;
+ }
+ case 1: {
+ renderEngine->deleteTextures(textures.size(), textures.data());
+ break;
+ }
+ case 2: {
+ renderEngine->useProtectedContext(mFdp.ConsumeBool());
+ break;
+ }
+ case 3: {
+ renderEngine->cleanupPostRender(mFdp.PickValueInArray(kCleanupMode));
+ break;
+ }
+ case 4: {
+ renderEngine->unbindExternalTextureBuffer(mFdp.ConsumeIntegral<uint64_t>());
+ break;
+ }
+ case 5: {
+ renderEngine->primeCache();
+ break;
+ }
+ case 6: {
+ sp<Fence> fence = sp<Fence>::make();
+ sp<GraphicBuffer> buffer = sp<GraphicBuffer>::make();
+ renderEngine->bindExternalTextureBuffer(mFdp.ConsumeIntegral<uint32_t>(), buffer,
+ fence);
+ break;
+ }
+ case 7: {
+ sp<GraphicBuffer> buffer = sp<GraphicBuffer>::make();
+ renderEngine->cacheExternalTextureBuffer(buffer);
+ break;
+ }
+ case 8: {
+ std::vector<const renderengine::LayerSettings*> layers;
+ renderengine::LayerSettings layerSetting;
+ int32_t width = mFdp.ConsumeIntegralInRange<int32_t>(0, KMaxDisplayWidth);
+ int32_t height = mFdp.ConsumeIntegralInRange<int32_t>(0, KMaxDisplayHeight);
+ Rect sourceCrop(mFdp.ConsumeIntegralInRange<int32_t>(0, width),
+ mFdp.ConsumeIntegralInRange<int32_t>(0, height));
+ uint32_t textureName = 0;
+ /* Get a single texture name to pass to layers */
+ renderEngine->genTextures(1 /*numTextures*/, &textureName);
+ sp<GraphicBuffer> buffer;
+ const uint32_t usage = (mFdp.PickValueInArray(kReadBufferUsage) |
+ mFdp.PickValueInArray(kWriteBufferUsage) |
+ mFdp.PickValueInArray(kRenderBufferUsage));
+
+ for (int i = 0; i < kMaxRenderLayer; ++i) {
+ buffer = new GraphicBuffer(
+ width, height,
+ mFdp.ConsumeIntegralInRange<int32_t>(PIXEL_FORMAT_RGBA_8888,
+ PIXEL_FORMAT_RGBA_4444),
+ usage, "input");
+ getLayerSetting(layerSetting, buffer, sourceCrop, textureName);
+ layers.push_back(&layerSetting);
+ }
+
+ DisplaySettings settings;
+ settings.physicalDisplay = sourceCrop;
+ settings.clip = sourceCrop;
+ settings.outputDataspace = mFdp.PickValueInArray(kDataSpace);
+ settings.maxLuminance = mFdp.ConsumeFloatingPoint<float>();
+
+ sp<GraphicBuffer> dstBuffer =
+ new GraphicBuffer(width, height,
+ mFdp.ConsumeIntegralInRange<int32_t>(
+ PIXEL_FORMAT_RGBA_8888, PIXEL_FORMAT_RGBA_4444),
+ usage, "output");
+ base::unique_fd bufferFence;
+ base::unique_fd drawFence;
+
+ renderEngine->drawLayers(settings, layers, dstBuffer, mFdp.ConsumeBool(),
+ std::move(bufferFence),
+ (mFdp.ConsumeBool() ? nullptr : &drawFence));
+ }
+ }
+ }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ RenderFrightFuzzer renderFrightFuzzer(data, size);
+ renderFrightFuzzer.process();
+ return 0;
+}
diff --git a/media/libstagefright/rtsp/fuzzer/Android.bp b/media/libstagefright/rtsp/fuzzer/Android.bp
index a2791ba..ff64af5 100644
--- a/media/libstagefright/rtsp/fuzzer/Android.bp
+++ b/media/libstagefright/rtsp/fuzzer/Android.bp
@@ -29,11 +29,19 @@
header_libs: [
"libstagefright_rtsp_headers",
],
- fuzz_config:{
+ fuzz_config: {
cc: [
- "android-media-fuzzing-reports@google.com",
+ "android-media-playback@google.com",
],
componentid: 155276,
+ hotlists: [
+ "4593311",
+ ],
+ description: "This fuzzer targets the APIs of libstagefright_rtsp",
+ vector: "local_privileges_required",
+ service_privilege: "privileged",
+ users: "multi_user",
+ fuzzed_code_usage: "shipped",
},
}
@@ -44,7 +52,7 @@
],
defaults: [
"libstagefright_rtsp_fuzzer_defaults",
- ]
+ ],
}
cc_fuzz {
@@ -55,7 +63,7 @@
defaults: [
"libstagefright_rtsp_fuzzer_defaults",
],
- shared_libs:[
+ shared_libs: [
"libandroid_net",
"libbase",
"libstagefright",
diff --git a/media/libstagefright/tests/fuzzers/Android.bp b/media/libstagefright/tests/fuzzers/Android.bp
index 2bcfd67..43542c5 100644
--- a/media/libstagefright/tests/fuzzers/Android.bp
+++ b/media/libstagefright/tests/fuzzers/Android.bp
@@ -32,6 +32,15 @@
"liblog",
"media_permission-aidl-cpp",
],
+ fuzz_config: {
+ componentid: 42195,
+ hotlists: ["4593311"],
+ description: "The fuzzer targets the APIs of libstagefright",
+ vector: "local_no_privileges_required",
+ service_privilege: "constrained",
+ users: "multi_user",
+ fuzzed_code_usage: "shipped",
+ },
}
cc_fuzz {
diff --git a/media/libstagefright/tests/fuzzers/FrameDecoderFuzzer.cpp b/media/libstagefright/tests/fuzzers/FrameDecoderFuzzer.cpp
index 4218d2d..3f850c2 100644
--- a/media/libstagefright/tests/fuzzers/FrameDecoderFuzzer.cpp
+++ b/media/libstagefright/tests/fuzzers/FrameDecoderFuzzer.cpp
@@ -24,61 +24,64 @@
namespace android {
-#define MAX_MEDIA_BUFFER_SIZE 2048
+static const android_pixel_format_t kColorFormats[] = {
+ HAL_PIXEL_FORMAT_RGBA_8888,
+ HAL_PIXEL_FORMAT_RGB_565,
+ HAL_PIXEL_FORMAT_BGRA_8888,
+ HAL_PIXEL_FORMAT_RGBA_1010102,
+ HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, /* To cover the default case */
+};
-// Fuzzer entry point.
-extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
- // Init our wrapper
+static const MediaSource::ReadOptions::SeekMode kSeekModes[] = {
+ MediaSource::ReadOptions::SeekMode::SEEK_PREVIOUS_SYNC,
+ MediaSource::ReadOptions::SeekMode::SEEK_NEXT_SYNC,
+ MediaSource::ReadOptions::SeekMode::SEEK_CLOSEST_SYNC,
+ MediaSource::ReadOptions::SeekMode::SEEK_CLOSEST,
+ MediaSource::ReadOptions::SeekMode::SEEK_FRAME_INDEX,
+};
+
+static const std::string kComponentNames[] = {
+ "c2.android.avc.decoder", "c2.android.hevc.decoder", "c2.android.vp8.decoder",
+ "c2.android.vp9.decoder", "c2.android.av1.decoder", "c2.android.mpeg4.decoder",
+ "c2.android.h263.decoder",
+};
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
FuzzedDataProvider fdp(data, size);
+ std::string component = fdp.PickValueInArray(kComponentNames);
+ AString componentName(component.c_str());
+ sp<MetaData> trackMeta = generateMetaData(&fdp, component);
+ sp<IMediaSource> source = sp<IMediaSourceFuzzImpl>::make(&fdp, gMaxMediaBufferSize);
- std::string name = fdp.ConsumeRandomLengthString(fdp.remaining_bytes());
- AString componentName(name.c_str());
- sp<MetaData> trackMeta = generateMetaData(&fdp);
- sp<IMediaSource> source = new IMediaSourceFuzzImpl(&fdp, MAX_MEDIA_BUFFER_SIZE);
-
- // Image or video Decoder?
- sp<FrameDecoder> decoder;
- bool isVideoDecoder = fdp.ConsumeBool();
- if (isVideoDecoder) {
- decoder = new VideoFrameDecoder(componentName, trackMeta, source);
+ sp<FrameDecoder> decoder = nullptr;
+ if (fdp.ConsumeBool()) {
+ decoder = sp<MediaImageDecoder>::make(componentName, trackMeta, source);
} else {
- decoder = new MediaImageDecoder(componentName, trackMeta, source);
+ decoder = sp<VideoFrameDecoder>::make(componentName, trackMeta, source);
}
- while (fdp.remaining_bytes()) {
- uint8_t switchCase = fdp.ConsumeIntegralInRange<uint8_t>(0, 3);
- switch (switchCase) {
- case 0: {
- int64_t frameTimeUs = fdp.ConsumeIntegral<int64_t>();
- int option = fdp.ConsumeIntegral<int>();
- int colorFormat = fdp.ConsumeIntegral<int>();
- decoder->init(frameTimeUs, option, colorFormat);
- break;
- }
- case 1:
- decoder->extractFrame();
- break;
- case 2: {
- FrameRect rect;
- rect.left = fdp.ConsumeIntegral<int32_t>();
- rect.top = fdp.ConsumeIntegral<int32_t>();
- rect.right = fdp.ConsumeIntegral<int32_t>();
- rect.bottom = fdp.ConsumeIntegral<int32_t>();
- decoder->extractFrame(&rect);
- break;
- }
- case 3: {
- sp<MetaData> trackMeta = generateMetaData(&fdp);
- decoder->getMetadataOnly(trackMeta,
- /*colorFormat*/ fdp.ConsumeIntegral<int>(),
- /*thumbnail*/ fdp.ConsumeBool());
- break;
- }
- }
+ if (decoder.get() &&
+ decoder->init(fdp.ConsumeIntegral<uint64_t>() /* frameTimeUs */,
+ fdp.PickValueInArray(kSeekModes) /* option */,
+ fdp.PickValueInArray(kColorFormats) /* colorFormat */) == OK) {
+ auto frameDecoderAPI = fdp.PickValueInArray<const std::function<void()>>({
+ [&]() { decoder->extractFrame(); },
+ [&]() {
+ FrameRect rect(fdp.ConsumeIntegral<int32_t>() /* left */,
+ fdp.ConsumeIntegral<int32_t>() /* top */,
+ fdp.ConsumeIntegral<int32_t>() /* right */,
+ fdp.ConsumeIntegral<int32_t>() /* bottom */
+ );
+ decoder->extractFrame(&rect);
+ },
+ [&]() {
+ FrameDecoder::getMetadataOnly(
+ trackMeta, fdp.PickValueInArray(kColorFormats) /* colorFormat */,
+ fdp.ConsumeBool() /* thumbnail */);
+ },
+ });
+ frameDecoderAPI();
}
-
- generated_mime_types.clear();
-
return 0;
}
diff --git a/media/libstagefright/tests/fuzzers/FrameDecoderHelpers.h b/media/libstagefright/tests/fuzzers/FrameDecoderHelpers.h
index 228c04a..5430530 100644
--- a/media/libstagefright/tests/fuzzers/FrameDecoderHelpers.h
+++ b/media/libstagefright/tests/fuzzers/FrameDecoderHelpers.h
@@ -20,69 +20,100 @@
#include <media/stagefright/MetaData.h>
#include "MediaMimeTypes.h"
-#define MAX_METADATA_BUF_SIZE 512
-
namespace android {
std::vector<std::shared_ptr<char>> generated_mime_types;
+constexpr uint8_t kMinKeyHeight = 32;
+constexpr uint8_t kMinKeyWidth = 32;
+constexpr uint16_t kMaxKeyHeight = 2160;
+constexpr uint16_t kMaxKeyWidth = 3840;
+size_t gMaxMediaBufferSize = 0;
-sp<MetaData> generateMetaData(FuzzedDataProvider *fdp) {
- sp<MetaData> newMeta = new MetaData();
+sp<MetaData> generateMetaData(FuzzedDataProvider* fdp, std::string componentName = std::string()) {
+ sp<MetaData> newMeta = sp<MetaData>::make();
- // random MIME Type
- const char *mime_type;
- size_t index = fdp->ConsumeIntegralInRange<size_t>(0, kMimeTypes.size());
- // Let there be a chance of a true random string
- if (index == kMimeTypes.size()) {
- std::string mime_str = fdp->ConsumeRandomLengthString(64);
- std::shared_ptr<char> mime_cstr(new char[mime_str.length()+1]);
- generated_mime_types.push_back(mime_cstr);
- strncpy(mime_cstr.get(), mime_str.c_str(), mime_str.length()+1);
- mime_type = mime_cstr.get();
- } else {
- mime_type = kMimeTypes[index];
+ const char* mime;
+ if(!componentName.empty())
+ {
+ auto it = decoderToMediaType.find(componentName);
+ mime = it->second;
}
- newMeta->setCString(kKeyMIMEType, mime_type);
+ else{
+ size_t index = fdp->ConsumeIntegralInRange<size_t>(0, kMimeTypes.size());
+ // Let there be a chance of a true random string
+ if (index == kMimeTypes.size()) {
+ std::string mime_str = fdp->ConsumeRandomLengthString(64);
+ std::shared_ptr<char> mime_cstr(new char[mime_str.length()+1]);
+ generated_mime_types.push_back(mime_cstr);
+ strncpy(mime_cstr.get(), mime_str.c_str(), mime_str.length()+1);
+ mime = mime_cstr.get();
+ } else {
+ mime = kMimeTypes[index];
+ }
+ }
+ newMeta->setCString(kKeyMIMEType, mime);
- // Thumbnail time
- newMeta->setInt64(kKeyThumbnailTime, fdp->ConsumeIntegral<int64_t>());
+ auto height = fdp->ConsumeIntegralInRange<uint16_t>(kMinKeyHeight, kMaxKeyHeight);
+ auto width = fdp->ConsumeIntegralInRange<uint16_t>(kMinKeyWidth, kMaxKeyWidth);
+ newMeta->setInt32(kKeyHeight, height);
+ newMeta->setInt32(kKeyWidth, width);
- // Values used by allocVideoFrame
- newMeta->setInt32(kKeyRotation, fdp->ConsumeIntegral<int32_t>());
- size_t profile_size =
- fdp->ConsumeIntegralInRange<size_t>(0, MAX_METADATA_BUF_SIZE);
- std::vector<uint8_t> profile_bytes =
- fdp->ConsumeBytes<uint8_t>(profile_size);
- newMeta->setData(kKeyIccProfile,
- fdp->ConsumeIntegral<int32_t>(),
- profile_bytes.empty() ? nullptr : profile_bytes.data(),
- profile_bytes.size());
- newMeta->setInt32(kKeySARWidth, fdp->ConsumeIntegral<int32_t>());
- newMeta->setInt32(kKeySARHeight, fdp->ConsumeIntegral<int32_t>());
- newMeta->setInt32(kKeyDisplayWidth, fdp->ConsumeIntegral<int32_t>());
- newMeta->setInt32(kKeyDisplayHeight, fdp->ConsumeIntegral<int32_t>());
+ gMaxMediaBufferSize = height * width;
- // Values used by findThumbnailInfo
- newMeta->setInt32(kKeyThumbnailWidth, fdp->ConsumeIntegral<int32_t>());
- newMeta->setInt32(kKeyThumbnailHeight, fdp->ConsumeIntegral<int32_t>());
- size_t thumbnail_size =
- fdp->ConsumeIntegralInRange<size_t>(0, MAX_METADATA_BUF_SIZE);
- std::vector<uint8_t> thumb_bytes =
- fdp->ConsumeBytes<uint8_t>(thumbnail_size);
- newMeta->setData(kKeyThumbnailHVCC,
- fdp->ConsumeIntegral<int32_t>(),
- thumb_bytes.empty() ? nullptr : thumb_bytes.data(),
- thumb_bytes.size());
+ if (fdp->ConsumeBool()) {
+ newMeta->setInt32(kKeyTileHeight,
+ fdp->ConsumeIntegralInRange<uint16_t>(kMinKeyHeight, height));
+ newMeta->setInt32(kKeyTileWidth,
+ fdp->ConsumeIntegralInRange<uint16_t>(kMinKeyWidth, width));
+ newMeta->setInt32(kKeyGridRows, fdp->ConsumeIntegral<uint8_t>());
+ newMeta->setInt32(kKeyGridCols, fdp->ConsumeIntegral<uint8_t>());
+ }
- // Values used by findGridInfo
- newMeta->setInt32(kKeyTileWidth, fdp->ConsumeIntegral<int32_t>());
- newMeta->setInt32(kKeyTileHeight, fdp->ConsumeIntegral<int32_t>());
- newMeta->setInt32(kKeyGridRows, fdp->ConsumeIntegral<int32_t>());
- newMeta->setInt32(kKeyGridCols, fdp->ConsumeIntegral<int32_t>());
+ if (fdp->ConsumeBool()) {
+ newMeta->setInt32(kKeySARHeight, fdp->ConsumeIntegral<uint8_t>());
+ newMeta->setInt32(kKeySARWidth, fdp->ConsumeIntegral<uint8_t>());
+ }
- // A few functions perform a CHECK() that height/width are set
- newMeta->setInt32(kKeyHeight, fdp->ConsumeIntegral<int32_t>());
- newMeta->setInt32(kKeyWidth, fdp->ConsumeIntegral<int32_t>());
+ if (fdp->ConsumeBool()) {
+ newMeta->setInt32(kKeyDisplayHeight,
+ fdp->ConsumeIntegralInRange<uint16_t>(height, UINT16_MAX));
+ newMeta->setInt32(kKeyDisplayWidth,
+ fdp->ConsumeIntegralInRange<uint16_t>(width, UINT16_MAX));
+ }
+
+ if (fdp->ConsumeBool()) {
+ newMeta->setRect(kKeyCropRect, fdp->ConsumeIntegral<int32_t>() /* left */,
+ fdp->ConsumeIntegral<int32_t>() /* top */,
+ fdp->ConsumeIntegral<int32_t>() /* right */,
+ fdp->ConsumeIntegral<int32_t>() /* bottom */);
+ }
+
+ if (fdp->ConsumeBool()) {
+ newMeta->setInt32(kKeyRotation, fdp->ConsumeIntegralInRange<uint8_t>(0, 3) * 90);
+ }
+
+ if (fdp->ConsumeBool()) {
+ newMeta->setInt64(kKeyThumbnailTime, fdp->ConsumeIntegral<uint64_t>());
+ newMeta->setInt32(kKeyThumbnailHeight, fdp->ConsumeIntegral<uint8_t>());
+ newMeta->setInt32(kKeyThumbnailWidth, fdp->ConsumeIntegral<uint8_t>());
+
+ size_t thumbnailSize = fdp->ConsumeIntegral<size_t>();
+ std::vector<uint8_t> thumbnailData = fdp->ConsumeBytes<uint8_t>(thumbnailSize);
+ if (mime == MEDIA_MIMETYPE_VIDEO_AV1) {
+ newMeta->setData(kKeyThumbnailAV1C, fdp->ConsumeIntegral<int32_t>() /* type */,
+ thumbnailData.data(), thumbnailData.size());
+ } else {
+ newMeta->setData(kKeyThumbnailHVCC, fdp->ConsumeIntegral<int32_t>() /* type */,
+ thumbnailData.data(), thumbnailData.size());
+ }
+ }
+
+ if (fdp->ConsumeBool()) {
+ size_t profileSize = fdp->ConsumeIntegral<size_t>();
+ std::vector<uint8_t> profileData = fdp->ConsumeBytes<uint8_t>(profileSize);
+ newMeta->setData(kKeyIccProfile, fdp->ConsumeIntegral<int32_t>() /* type */,
+ profileData.data(), profileData.size());
+ }
return newMeta;
}
diff --git a/media/libstagefright/tests/fuzzers/IMediaSourceFuzzImpl.h b/media/libstagefright/tests/fuzzers/IMediaSourceFuzzImpl.h
index e769950..7e6f662 100644
--- a/media/libstagefright/tests/fuzzers/IMediaSourceFuzzImpl.h
+++ b/media/libstagefright/tests/fuzzers/IMediaSourceFuzzImpl.h
@@ -19,31 +19,33 @@
#include <media/stagefright/MediaSource.h>
+#define MAX_FRAMES 5
+
namespace android {
class IMediaSourceFuzzImpl : public IMediaSource {
public:
- IMediaSourceFuzzImpl(FuzzedDataProvider *_fdp, size_t _max_buffer_size) :
- fdp(_fdp),
- max_buffer_size(_max_buffer_size) {}
- status_t start(MetaData*) override { return 0; }
- status_t stop() override { return 0; }
- sp<MetaData> getFormat() override { return nullptr; }
- status_t read(MediaBufferBase**,
- const MediaSource::ReadOptions*) override;
- status_t readMultiple(Vector<MediaBufferBase*>*, uint32_t,
- const MediaSource::ReadOptions*) override;
- bool supportReadMultiple() override { return true; }
- bool supportNonblockingRead() override { return true; }
- status_t pause() override { return 0; }
+ IMediaSourceFuzzImpl(FuzzedDataProvider* _fdp, size_t _max_buffer_size)
+ : frames_read(0), fdp(_fdp), min_buffer_size(32 * 32), max_buffer_size(_max_buffer_size) {}
+ status_t start(MetaData*) override { return 0; }
+ status_t stop() override { return 0; }
+ sp<MetaData> getFormat() override { return nullptr; }
+ status_t read(MediaBufferBase**, const MediaSource::ReadOptions*) override;
+ status_t readMultiple(Vector<MediaBufferBase*>*, uint32_t,
+ const MediaSource::ReadOptions*) override;
+ bool supportReadMultiple() override { return true; }
+ bool supportNonblockingRead() override { return true; }
+ status_t pause() override { return 0; }
protected:
IBinder* onAsBinder() { return nullptr; }
private:
- FuzzedDataProvider *fdp;
- std::vector<std::shared_ptr<MediaBufferBase>> buffer_bases;
- const size_t max_buffer_size;
+ uint8_t frames_read;
+ FuzzedDataProvider* fdp;
+ const size_t min_buffer_size;
+ const size_t max_buffer_size;
+ std::vector<uint8_t> buf;
};
// This class is simply to expose the destructor
@@ -53,32 +55,41 @@
~MediaBufferFuzzImpl() {}
};
-status_t IMediaSourceFuzzImpl::read(MediaBufferBase **buffer,
- const MediaSource::ReadOptions *options) {
+status_t IMediaSourceFuzzImpl::read(MediaBufferBase** buffer, const MediaSource::ReadOptions*) {
Vector<MediaBufferBase*> buffers;
- status_t ret = readMultiple(&buffers, 1, options);
+ status_t ret = readMultiple(&buffers, 1, nullptr);
*buffer = buffers.empty() ? nullptr : buffers[0];
return ret;
}
-status_t IMediaSourceFuzzImpl::readMultiple(Vector<MediaBufferBase*>* buffers,
- uint32_t maxNumBuffers, const MediaSource::ReadOptions*) {
- uint32_t num_buffers =
- fdp->ConsumeIntegralInRange<uint32_t>(0, maxNumBuffers);
- for(uint32_t i = 0; i < num_buffers; i++) {
- std::vector<uint8_t> buf = fdp->ConsumeBytes<uint8_t>(
- fdp->ConsumeIntegralInRange<size_t>(0, max_buffer_size));
+status_t IMediaSourceFuzzImpl::readMultiple(Vector<MediaBufferBase*>* buffers, uint32_t,
+ const MediaSource::ReadOptions*) {
+ if (++frames_read == MAX_FRAMES) {
+ auto size = fdp->ConsumeIntegralInRange<size_t>(min_buffer_size, max_buffer_size);
+ buf = fdp->ConsumeBytes<uint8_t>(size);
+ if (buf.size() < size) {
+ buf.resize(size, 0);
+ }
- std::shared_ptr<MediaBufferBase> mbb(
- new MediaBufferFuzzImpl(buf.data(), buf.size()));
+ MediaBufferBase* mbb = new MediaBufferFuzzImpl(buf.data(), buf.size());
+ mbb->meta_data().setInt64(kKeyTime, fdp->ConsumeIntegral<uint64_t>());
+ buffers->push_back(mbb);
- buffer_bases.push_back(mbb);
- buffers->push_back(mbb.get());
+ return ERROR_END_OF_STREAM;
}
- // STATUS_OK
- return 0;
+ auto size = fdp->ConsumeIntegralInRange<size_t>(min_buffer_size, max_buffer_size);
+ buf = fdp->ConsumeBytes<uint8_t>(size);
+ if (buf.size() < size) {
+ buf.resize(size, 0);
+ }
+
+ MediaBufferBase* mbb = new MediaBufferFuzzImpl(buf.data(), buf.size());
+ mbb->meta_data().setInt64(kKeyTime, fdp->ConsumeIntegral<uint64_t>());
+ buffers->push_back(mbb);
+
+ return OK;
}
} // namespace android
diff --git a/media/libstagefright/tests/fuzzers/MediaMimeTypes.h b/media/libstagefright/tests/fuzzers/MediaMimeTypes.h
index 9f337ac..de7814e 100644
--- a/media/libstagefright/tests/fuzzers/MediaMimeTypes.h
+++ b/media/libstagefright/tests/fuzzers/MediaMimeTypes.h
@@ -18,6 +18,7 @@
#define FUZZER_MEDIAMIMETYPES_H_
#include <media/stagefright/foundation/MediaDefs.h>
+#include <unordered_map>
namespace android {
@@ -80,6 +81,15 @@
MEDIA_MIMETYPE_DATA_TIMED_ID3
};
+static const std::unordered_map<std::string, const char*> decoderToMediaType = {
+ {"c2.android.vp8.decoder", MEDIA_MIMETYPE_VIDEO_VP8},
+ {"c2.android.vp9.decoder", MEDIA_MIMETYPE_VIDEO_VP9},
+ {"c2.android.av1.decoder", MEDIA_MIMETYPE_VIDEO_AV1},
+ {"c2.android.avc.decoder", MEDIA_MIMETYPE_VIDEO_AVC},
+ {"c2.android.hevc.decoder", MEDIA_MIMETYPE_VIDEO_HEVC},
+ {"c2.android.mpeg4.decoder", MEDIA_MIMETYPE_VIDEO_MPEG4},
+ {"c2.android.h263.decoder", MEDIA_MIMETYPE_VIDEO_H263}};
+
} // namespace android
#endif // FUZZER_MEDIAMIMETYPES_H_
diff --git a/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp b/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp
index 70d73c8..5ac2a54 100644
--- a/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp
+++ b/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp
@@ -13,94 +13,221 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-// Authors: corbin.souffrant@leviathansecurity.com
-// dylan.katz@leviathansecurity.com
-#include <MediaMuxerFuzzer.h>
-#include <cutils/ashmem.h>
#include <fuzzer/FuzzedDataProvider.h>
#include <media/stagefright/MediaMuxer.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/MediaDefs.h>
namespace android {
+const uint8_t kMinSize = 0;
+const uint8_t kMinTrackCount = 0;
-// Can't seem to get setBuffer or setString working. It always segfaults on a
-// null pointer read or memleaks. So that functionality is missing.
-void createMessage(AMessage *msg, FuzzedDataProvider *fdp) {
- size_t count = fdp->ConsumeIntegralInRange<size_t>(0, 32);
- while (fdp->remaining_bytes() > 0 && count > 0) {
- uint8_t function_id =
- fdp->ConsumeIntegralInRange<uint8_t>(0, amessage_setvals.size() - 1);
- amessage_setvals[function_id](msg, fdp);
- count--;
- }
+enum kBufferFlags { BUFFER_FLAG_SYNCFRAME = 1, BUFFER_FLAG_CODECCONFIG = 2, BUFFER_FLAG_EOS = 4 };
+
+constexpr char kMuxerFile[] = "MediaMuxer";
+
+const std::string kAudioMimeTypes[] = {
+ MEDIA_MIMETYPE_AUDIO_AMR_NB,
+ MEDIA_MIMETYPE_AUDIO_AMR_WB,
+ MEDIA_MIMETYPE_AUDIO_MPEG,
+ MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I,
+ MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II,
+ MEDIA_MIMETYPE_AUDIO_MIDI,
+ MEDIA_MIMETYPE_AUDIO_AAC,
+ MEDIA_MIMETYPE_AUDIO_QCELP,
+ MEDIA_MIMETYPE_AUDIO_VORBIS,
+ MEDIA_MIMETYPE_AUDIO_OPUS,
+ MEDIA_MIMETYPE_AUDIO_G711_ALAW,
+ MEDIA_MIMETYPE_AUDIO_G711_MLAW,
+ MEDIA_MIMETYPE_AUDIO_RAW,
+ MEDIA_MIMETYPE_AUDIO_FLAC,
+ MEDIA_MIMETYPE_AUDIO_AAC_ADTS,
+ MEDIA_MIMETYPE_AUDIO_MSGSM,
+ MEDIA_MIMETYPE_AUDIO_AC3,
+ MEDIA_MIMETYPE_AUDIO_EAC3,
+ MEDIA_MIMETYPE_AUDIO_EAC3_JOC,
+ MEDIA_MIMETYPE_AUDIO_AC4,
+ MEDIA_MIMETYPE_AUDIO_MPEGH_MHA1,
+ MEDIA_MIMETYPE_AUDIO_MPEGH_MHM1,
+ MEDIA_MIMETYPE_AUDIO_MPEGH_BL_L3,
+ MEDIA_MIMETYPE_AUDIO_MPEGH_BL_L4,
+ MEDIA_MIMETYPE_AUDIO_MPEGH_LC_L3,
+ MEDIA_MIMETYPE_AUDIO_MPEGH_LC_L4,
+ MEDIA_MIMETYPE_AUDIO_SCRAMBLED,
+ MEDIA_MIMETYPE_AUDIO_ALAC,
+ MEDIA_MIMETYPE_AUDIO_WMA,
+ MEDIA_MIMETYPE_AUDIO_MS_ADPCM,
+ MEDIA_MIMETYPE_AUDIO_DVI_IMA_ADPCM,
+ MEDIA_MIMETYPE_AUDIO_DTS,
+ MEDIA_MIMETYPE_AUDIO_DTS_HD,
+ MEDIA_MIMETYPE_AUDIO_DTS_HD_MA,
+ MEDIA_MIMETYPE_AUDIO_DTS_UHD,
+ MEDIA_MIMETYPE_AUDIO_DTS_UHD_P1,
+ MEDIA_MIMETYPE_AUDIO_DTS_UHD_P2,
+ MEDIA_MIMETYPE_AUDIO_EVRC,
+ MEDIA_MIMETYPE_AUDIO_EVRCB,
+ MEDIA_MIMETYPE_AUDIO_EVRCWB,
+ MEDIA_MIMETYPE_AUDIO_EVRCNW,
+ MEDIA_MIMETYPE_AUDIO_AMR_WB_PLUS,
+ MEDIA_MIMETYPE_AUDIO_APTX,
+ MEDIA_MIMETYPE_AUDIO_DRA,
+ MEDIA_MIMETYPE_AUDIO_DOLBY_MAT,
+ MEDIA_MIMETYPE_AUDIO_DOLBY_MAT_1_0,
+ MEDIA_MIMETYPE_AUDIO_DOLBY_MAT_2_0,
+ MEDIA_MIMETYPE_AUDIO_DOLBY_MAT_2_1,
+ MEDIA_MIMETYPE_AUDIO_DOLBY_TRUEHD,
+ MEDIA_MIMETYPE_AUDIO_AAC_MP4,
+ MEDIA_MIMETYPE_AUDIO_AAC_MAIN,
+ MEDIA_MIMETYPE_AUDIO_AAC_LC,
+ MEDIA_MIMETYPE_AUDIO_AAC_SSR,
+ MEDIA_MIMETYPE_AUDIO_AAC_LTP,
+ MEDIA_MIMETYPE_AUDIO_AAC_HE_V1,
+ MEDIA_MIMETYPE_AUDIO_AAC_SCALABLE,
+ MEDIA_MIMETYPE_AUDIO_AAC_ERLC,
+ MEDIA_MIMETYPE_AUDIO_AAC_LD,
+ MEDIA_MIMETYPE_AUDIO_AAC_HE_V2,
+ MEDIA_MIMETYPE_AUDIO_AAC_ELD,
+ MEDIA_MIMETYPE_AUDIO_AAC_XHE,
+ MEDIA_MIMETYPE_AUDIO_AAC_ADIF,
+ MEDIA_MIMETYPE_AUDIO_AAC_ADTS_MAIN,
+ MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LC,
+ MEDIA_MIMETYPE_AUDIO_AAC_ADTS_SSR,
+ MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LTP,
+ MEDIA_MIMETYPE_AUDIO_AAC_ADTS_HE_V1,
+ MEDIA_MIMETYPE_AUDIO_AAC_ADTS_SCALABLE,
+ MEDIA_MIMETYPE_AUDIO_AAC_ADTS_ERLC,
+ MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LD,
+ MEDIA_MIMETYPE_AUDIO_AAC_ADTS_HE_V2,
+ MEDIA_MIMETYPE_AUDIO_AAC_ADTS_ELD,
+ MEDIA_MIMETYPE_AUDIO_AAC_ADTS_XHE,
+ MEDIA_MIMETYPE_AUDIO_AAC_LATM_LC,
+ MEDIA_MIMETYPE_AUDIO_AAC_LATM_HE_V1,
+ MEDIA_MIMETYPE_AUDIO_AAC_LATM_HE_V2,
+ MEDIA_MIMETYPE_AUDIO_IEC61937,
+ MEDIA_MIMETYPE_AUDIO_IEC60958,
+};
+
+const std::string kVideoMimeTypes[] = {
+ MEDIA_MIMETYPE_VIDEO_VP8, MEDIA_MIMETYPE_VIDEO_VP9,
+ MEDIA_MIMETYPE_VIDEO_AV1, MEDIA_MIMETYPE_VIDEO_AVC,
+ MEDIA_MIMETYPE_VIDEO_HEVC, MEDIA_MIMETYPE_VIDEO_MPEG4,
+ MEDIA_MIMETYPE_VIDEO_H263, MEDIA_MIMETYPE_VIDEO_MPEG2,
+ MEDIA_MIMETYPE_VIDEO_RAW, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION,
+ MEDIA_MIMETYPE_VIDEO_SCRAMBLED, MEDIA_MIMETYPE_VIDEO_DIVX,
+ MEDIA_MIMETYPE_VIDEO_DIVX3, MEDIA_MIMETYPE_VIDEO_XVID,
+ MEDIA_MIMETYPE_VIDEO_MJPEG,
+};
+
+void getSampleAudioFormat(FuzzedDataProvider& fdp, AMessage* format) {
+ std::string mimeType = fdp.PickValueInArray(kAudioMimeTypes);
+ format->setString("mime", mimeType.c_str(), mimeType.length());
+ format->setInt32("sample-rate", fdp.ConsumeIntegral<int32_t>());
+ format->setInt32("channel-count", fdp.ConsumeIntegral<int32_t>());
+}
+
+void getSampleVideoFormat(FuzzedDataProvider& fdp, AMessage* format) {
+ std::string mimeType = fdp.PickValueInArray(kVideoMimeTypes);
+ format->setString("mime", mimeType.c_str(), mimeType.length());
+ format->setInt32("height", fdp.ConsumeIntegral<int32_t>());
+ format->setInt32("width", fdp.ConsumeIntegral<int32_t>());
+ format->setInt32("time-lapse-fps", fdp.ConsumeIntegral<int32_t>());
}
extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
- FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+ FuzzedDataProvider fdp(data, size);
- size_t data_size = fdp.ConsumeIntegralInRange<size_t>(0, size);
- int fd = ashmem_create_region("mediamuxer_fuzz_region", data_size);
- if (fd < 0)
+ // memfd_create() creates an anonymous file and returns a file
+ // descriptor that refers to it. MFD_ALLOW_SEALING allows sealing
+ // operations on this file.
+ int32_t fd = memfd_create(kMuxerFile, MFD_ALLOW_SEALING);
+ if (fd == -1) {
+ ALOGE("memfd_create failed: %s", strerror(errno));
+ return 0;
+ }
+
+ auto outputFormat = (MediaMuxer::OutputFormat)fdp.ConsumeIntegralInRange<int32_t>(
+ MediaMuxer::OutputFormat::OUTPUT_FORMAT_MPEG_4,
+ MediaMuxer::OutputFormat::OUTPUT_FORMAT_LIST_END);
+
+ sp<MediaMuxer> mMuxer = MediaMuxer::create(fd, outputFormat);
+ if (mMuxer == nullptr) {
+ close(fd);
+ return 0;
+ }
+
+ // Used to consume a maximum of 80% of the data to send buffer data to writeSampleData().
+ // This ensures that we don't completely exhaust data and use the rest 20% for fuzzing
+ // of APIs.
+ const size_t kMaxSize = (size * 80) / 100;
+ while (fdp.remaining_bytes()) {
+ auto invokeMediaMuxerAPI = fdp.PickValueInArray<const std::function<void()>>({
+ [&]() {
+ // Using 'return' here due to a timeout bug present in OGGWriter.cpp
+ // (b/310316183).
+ if (outputFormat == MediaMuxer::OutputFormat::OUTPUT_FORMAT_OGG) {
+ return;
+ }
+
+ sp<AMessage> format = sp<AMessage>::make();
+ fdp.ConsumeBool() ? getSampleAudioFormat(fdp, format.get())
+ : getSampleVideoFormat(fdp, format.get());
+
+ mMuxer->addTrack(fdp.ConsumeBool() ? format : nullptr);
+ },
+ [&]() {
+ mMuxer->setLocation(fdp.ConsumeIntegral<int32_t>() /* latitude */,
+ fdp.ConsumeIntegral<int32_t>() /* longitude */);
+ },
+ [&]() { mMuxer->setOrientationHint(fdp.ConsumeIntegral<int32_t>() /* degrees */); },
+ [&]() { mMuxer->start(); },
+ [&]() {
+ std::vector<uint8_t> sample = fdp.ConsumeBytes<uint8_t>(
+ fdp.ConsumeIntegralInRange<size_t>(kMinSize, kMaxSize));
+ sp<ABuffer> buffer = sp<ABuffer>::make(sample.data(), sample.size());
+
+ size_t offset = fdp.ConsumeIntegralInRange<size_t>(kMinSize, sample.size());
+ size_t length =
+ fdp.ConsumeIntegralInRange<size_t>(kMinSize, buffer->size() - offset);
+ buffer->setRange(offset, length);
+
+ sp<AMessage> meta = buffer->meta();
+ meta->setInt64("sample-file-offset", fdp.ConsumeIntegral<int64_t>());
+ meta->setInt64("last-sample-index-in-chunk", fdp.ConsumeIntegral<int64_t>());
+
+ uint32_t flags = 0;
+ if (fdp.ConsumeBool()) {
+ flags |= kBufferFlags::BUFFER_FLAG_SYNCFRAME;
+ }
+ if (fdp.ConsumeBool()) {
+ flags |= kBufferFlags::BUFFER_FLAG_CODECCONFIG;
+ }
+ if (fdp.ConsumeBool()) {
+ flags |= kBufferFlags::BUFFER_FLAG_EOS;
+ }
+
+ size_t trackIndex = fdp.ConsumeBool()
+ ? fdp.ConsumeIntegralInRange<size_t>(
+ kMinTrackCount, mMuxer->getTrackCount())
+ : fdp.ConsumeIntegral<size_t>();
+ int64_t timeUs = fdp.ConsumeIntegral<int64_t>();
+ mMuxer->writeSampleData(fdp.ConsumeBool() ? buffer : nullptr, trackIndex,
+ timeUs, flags);
+ },
+ [&]() {
+ mMuxer->getTrackFormat(
+ fdp.ConsumeBool() ? fdp.ConsumeIntegralInRange<size_t>(
+ kMinTrackCount, mMuxer->getTrackCount())
+ : fdp.ConsumeIntegral<size_t>() /* idx */);
+ },
+ [&]() { mMuxer->stop(); },
+ });
+
+ invokeMediaMuxerAPI();
+ }
+
+ close(fd);
return 0;
-
- uint8_t *sh_data = static_cast<uint8_t *>(
- mmap(NULL, data_size, PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0));
- if (sh_data == MAP_FAILED)
- return 0;
-
- MediaMuxer::OutputFormat format =
- (MediaMuxer::OutputFormat)fdp.ConsumeIntegralInRange<int32_t>(0, 4);
- sp<MediaMuxer> mMuxer = MediaMuxer::create(fd, format);
- if (mMuxer == nullptr) {
- return 0;
- }
-
- while (fdp.remaining_bytes() > 1) {
- switch (fdp.ConsumeIntegralInRange<uint8_t>(0, 4)) {
- case 0: {
- // For some reason it only likes mp4s here...
- if (format == 1 || format == 4)
- break;
-
- sp<AMessage> a_format(new AMessage);
- createMessage(a_format.get(), &fdp);
- mMuxer->addTrack(a_format);
- break;
- }
- case 1: {
- mMuxer->start();
- break;
- }
- case 2: {
- int degrees = fdp.ConsumeIntegral<int>();
- mMuxer->setOrientationHint(degrees);
- break;
- }
- case 3: {
- int latitude = fdp.ConsumeIntegral<int>();
- int longitude = fdp.ConsumeIntegral<int>();
- mMuxer->setLocation(latitude, longitude);
- break;
- }
- case 4: {
- size_t buf_size = fdp.ConsumeIntegralInRange<size_t>(0, data_size);
- sp<ABuffer> a_buffer(new ABuffer(buf_size));
-
- size_t trackIndex = fdp.ConsumeIntegral<size_t>();
- int64_t timeUs = fdp.ConsumeIntegral<int64_t>();
- uint32_t flags = fdp.ConsumeIntegral<uint32_t>();
- mMuxer->writeSampleData(a_buffer, trackIndex, timeUs, flags);
- }
- }
- }
-
- if (fdp.ConsumeBool())
- mMuxer->stop();
-
- munmap(sh_data, data_size);
- close(fd);
- return 0;
}
} // namespace android
diff --git a/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.h b/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.h
deleted file mode 100644
index 7d4421d..0000000
--- a/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.h
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Copyright 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-// Authors: corbin.souffrant@leviathansecurity.com
-// dylan.katz@leviathansecurity.com
-
-#pragma once
-
-#include <fuzzer/FuzzedDataProvider.h>
-#include <media/stagefright/foundation/AMessage.h>
-
-namespace android {
-
-// Mappings vectors are the list of attributes that the MediaMuxer
-// class looks for in the message.
-static std::vector<const char *> floatMappings{
- "capture-rate",
- "time-lapse-fps",
- "frame-rate",
-};
-
-static std::vector<const char *> int64Mappings{
- "exif-offset", "exif-size", "target-time",
- "thumbnail-time", "timeUs", "durationUs",
-};
-
-static std::vector<const char *> int32Mappings{"loop",
- "time-scale",
- "crypto-mode",
- "crypto-default-iv-size",
- "crypto-encrypted-byte-block",
- "crypto-skip-byte-block",
- "frame-count",
- "max-bitrate",
- "pcm-big-endian",
- "temporal-layer-count",
- "temporal-layer-id",
- "thumbnail-width",
- "thumbnail-height",
- "track-id",
- "valid-samples",
- "color-format",
- "ca-system-id",
- "is-sync-frame",
- "bitrate",
- "max-bitrate",
- "width",
- "height",
- "sar-width",
- "sar-height",
- "display-width",
- "display-height",
- "is-default",
- "tile-width",
- "tile-height",
- "grid-rows",
- "grid-cols",
- "rotation-degrees",
- "channel-count",
- "sample-rate",
- "bits-per-sample",
- "channel-mask",
- "encoder-delay",
- "encoder-padding",
- "is-adts",
- "frame-rate",
- "max-height",
- "max-width",
- "max-input-size",
- "haptic-channel-count",
- "pcm-encoding",
- "aac-profile"};
-
-static const std::vector<std::function<void(AMessage *, FuzzedDataProvider *)>>
- amessage_setvals = {
- [](AMessage *msg, FuzzedDataProvider *fdp) -> void {
- msg->setRect("crop", fdp->ConsumeIntegral<int32_t>(),
- fdp->ConsumeIntegral<int32_t>(),
- fdp->ConsumeIntegral<int32_t>(),
- fdp->ConsumeIntegral<int32_t>());
- },
- [](AMessage *msg, FuzzedDataProvider *fdp) -> void {
- msg->setFloat(floatMappings[fdp->ConsumeIntegralInRange<size_t>(
- 0, floatMappings.size() - 1)],
- fdp->ConsumeFloatingPoint<float>());
- },
- [](AMessage *msg, FuzzedDataProvider *fdp) -> void {
- msg->setInt64(int64Mappings[fdp->ConsumeIntegralInRange<size_t>(
- 0, int64Mappings.size() - 1)],
- fdp->ConsumeIntegral<int64_t>());
- },
- [](AMessage *msg, FuzzedDataProvider *fdp) -> void {
- msg->setInt32(int32Mappings[fdp->ConsumeIntegralInRange<size_t>(
- 0, int32Mappings.size() - 1)],
- fdp->ConsumeIntegral<int32_t>());
- }};
-} // namespace android
diff --git a/media/libstagefright/tests/fuzzers/corpus/0ef67b8a074fed50b8875df345ab2e62175c34c9 b/media/libstagefright/tests/fuzzers/corpus/0ef67b8a074fed50b8875df345ab2e62175c34c9
new file mode 100644
index 0000000..652581f
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/corpus/0ef67b8a074fed50b8875df345ab2e62175c34c9
Binary files differ
diff --git a/media/libstagefright/tests/fuzzers/corpus/60eb43c963545c0b2676dad3e4c38cfe87136bbc b/media/libstagefright/tests/fuzzers/corpus/60eb43c963545c0b2676dad3e4c38cfe87136bbc
new file mode 100644
index 0000000..60ca169
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/corpus/60eb43c963545c0b2676dad3e4c38cfe87136bbc
Binary files differ
diff --git a/media/libstagefright/tests/fuzzers/corpus/8c7cb9439f81a8e00b651b3658fe24116f37df7e b/media/libstagefright/tests/fuzzers/corpus/8c7cb9439f81a8e00b651b3658fe24116f37df7e
new file mode 100644
index 0000000..c03bcad
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/corpus/8c7cb9439f81a8e00b651b3658fe24116f37df7e
Binary files differ
diff --git a/media/libstagefright/tests/fuzzers/corpus/c624e73c16c59dfbc3c563416cfc962e3c3a96a0 b/media/libstagefright/tests/fuzzers/corpus/c624e73c16c59dfbc3c563416cfc962e3c3a96a0
new file mode 100644
index 0000000..52f2d5a
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/corpus/c624e73c16c59dfbc3c563416cfc962e3c3a96a0
Binary files differ
diff --git a/media/libstagefright/tests/fuzzers/corpus/c6aff0d7ccaf58a1964a6bcc51777bf1786503ca b/media/libstagefright/tests/fuzzers/corpus/c6aff0d7ccaf58a1964a6bcc51777bf1786503ca
new file mode 100644
index 0000000..83c522f
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/corpus/c6aff0d7ccaf58a1964a6bcc51777bf1786503ca
Binary files differ
diff --git a/media/libstagefright/tests/fuzzers/corpus/ec6bd6069f74a2f6e92442f88efb29288ad6f456 b/media/libstagefright/tests/fuzzers/corpus/ec6bd6069f74a2f6e92442f88efb29288ad6f456
new file mode 100644
index 0000000..62d259b
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/corpus/ec6bd6069f74a2f6e92442f88efb29288ad6f456
Binary files differ
diff --git a/media/libstagefright/tests/fuzzers/corpus/fbf47d9a9173df0a39285c94d89fcbc767d5e774 b/media/libstagefright/tests/fuzzers/corpus/fbf47d9a9173df0a39285c94d89fcbc767d5e774
new file mode 100644
index 0000000..db78b75
--- /dev/null
+++ b/media/libstagefright/tests/fuzzers/corpus/fbf47d9a9173df0a39285c94d89fcbc767d5e774
Binary files differ
diff --git a/media/libstagefright/timedtext/test/fuzzer/Android.bp b/media/libstagefright/timedtext/test/fuzzer/Android.bp
index 6590ebb..8724d51 100644
--- a/media/libstagefright/timedtext/test/fuzzer/Android.bp
+++ b/media/libstagefright/timedtext/test/fuzzer/Android.bp
@@ -48,8 +48,16 @@
],
fuzz_config: {
cc: [
- "android-media-fuzzing-reports@google.com",
+ "android-media-playback@google.com",
],
- componentid: 155276,
+ componentid: 42195,
+ hotlists: [
+ "4593311",
+ ],
+ description: "This fuzzer targets the APIs of libstagefright_timedtext",
+ vector: "local_no_privileges_required",
+ service_privilege: "constrained",
+ users: "multi_user",
+ fuzzed_code_usage: "shipped",
},
}
diff --git a/media/libstagefright/webm/Android.bp b/media/libstagefright/webm/Android.bp
index 6ed3e0e..723131d 100644
--- a/media/libstagefright/webm/Android.bp
+++ b/media/libstagefright/webm/Android.bp
@@ -10,8 +10,6 @@
cc_library_static {
name: "libstagefright_webm",
- cppflags: ["-D__STDINT_LIMITS"],
-
cflags: [
"-Werror",
"-Wall",
diff --git a/media/mediaserver/Android.bp b/media/mediaserver/Android.bp
index 2341af1..6ea40e3 100644
--- a/media/mediaserver/Android.bp
+++ b/media/mediaserver/Android.bp
@@ -85,16 +85,7 @@
"-Wall",
],
- // AIDL is only used when release_aidl_use_unfrozen is true
- // because the swcodec mainline module is a prebuilt from an
- // Android U branch in that case.
- // TODO(b/327508501)
- vintf_fragments: ["manifest_media_c2_software_hidl.xml"],
- product_variables: {
- release_aidl_use_unfrozen: {
- vintf_fragments: ["manifest_media_c2_software_aidl.xml"],
- },
- },
+ vintf_fragments: ["manifest_media_c2_software.xml"],
soong_config_variables: {
TARGET_DYNAMIC_64_32_MEDIASERVER: {
diff --git a/media/mediaserver/manifest_media_c2_software_hidl.xml b/media/mediaserver/manifest_media_c2_software.xml
similarity index 68%
rename from media/mediaserver/manifest_media_c2_software_hidl.xml
rename to media/mediaserver/manifest_media_c2_software.xml
index 69a27be..31dfafb 100644
--- a/media/mediaserver/manifest_media_c2_software_hidl.xml
+++ b/media/mediaserver/manifest_media_c2_software.xml
@@ -8,4 +8,9 @@
<instance>software</instance>
</interface>
</hal>
+ <hal format="aidl">
+ <name>android.hardware.media.c2</name>
+ <version>1</version>
+ <fqname>IComponentStore/software</fqname>
+ </hal>
</manifest>
diff --git a/media/mediaserver/manifest_media_c2_software_aidl.xml b/media/mediaserver/manifest_media_c2_software_aidl.xml
deleted file mode 100644
index e6bcafa..0000000
--- a/media/mediaserver/manifest_media_c2_software_aidl.xml
+++ /dev/null
@@ -1,7 +0,0 @@
-<manifest version="1.0" type="framework">
- <hal format="aidl">
- <name>android.hardware.media.c2</name>
- <version>1</version>
- <fqname>IComponentStore/software</fqname>
- </hal>
-</manifest>
diff --git a/media/module/aidlpersistentsurface/AidlGraphicBufferSource.cpp b/media/module/aidlpersistentsurface/AidlGraphicBufferSource.cpp
new file mode 100644
index 0000000..c208666
--- /dev/null
+++ b/media/module/aidlpersistentsurface/AidlGraphicBufferSource.cpp
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <inttypes.h>
+
+#define LOG_TAG "AidlGraphicBufferSource"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include <media/stagefright/bqhelper/ComponentWrapper.h>
+#include <media/stagefright/bqhelper/GraphicBufferSource.h>
+#include <media/stagefright/aidlpersistentsurface/AidlGraphicBufferSource.h>
+#include <media/stagefright/aidlpersistentsurface/C2NodeDef.h>
+
+namespace android::media {
+
+namespace {
+
+class AidlComponentWrapper : public ComponentWrapper {
+public:
+ explicit AidlComponentWrapper(const sp<IAidlNodeWrapper> &node)
+ : mAidlNode(node) {}
+ virtual ~AidlComponentWrapper() = default;
+
+ status_t submitBuffer(
+ int32_t bufferId, const sp<GraphicBuffer> &buffer,
+ int64_t timestamp, int fenceFd) override {
+ return mAidlNode->submitBuffer(
+ bufferId, BUFFERFLAG_ENDOFFRAME, buffer, timestamp, fenceFd);
+ }
+
+ status_t submitEos(int32_t bufferId) override {
+ return mAidlNode->submitBuffer(
+ bufferId, BUFFERFLAG_ENDOFFRAME | BUFFERFLAG_EOS);
+ }
+
+ void dispatchDataSpaceChanged(
+ int32_t dataSpace, int32_t aspects, int32_t pixelFormat) override {
+ mAidlNode->dispatchDataSpaceChanged(dataSpace, aspects, pixelFormat);
+ }
+
+private:
+ sp<IAidlNodeWrapper> mAidlNode;
+
+ DISALLOW_EVIL_CONSTRUCTORS(AidlComponentWrapper);
+};
+
+} // namespace
+
+::ndk::ScopedAStatus AidlGraphicBufferSource::onStart() {
+ status_t err = start();
+ return (OK == err) ? ::ndk::ScopedAStatus::ok() :
+ ::ndk::ScopedAStatus::fromServiceSpecificError(err);
+}
+
+::ndk::ScopedAStatus AidlGraphicBufferSource::onStop() {
+ status_t err = stop();
+ return (OK == err) ? ::ndk::ScopedAStatus::ok() :
+ ::ndk::ScopedAStatus::fromServiceSpecificError(err);
+}
+
+::ndk::ScopedAStatus AidlGraphicBufferSource::onRelease(){
+ status_t err = release();
+ return (OK == err) ? ::ndk::ScopedAStatus::ok() :
+ ::ndk::ScopedAStatus::fromServiceSpecificError(err);
+}
+
+status_t AidlGraphicBufferSource::configure(
+ const sp<IAidlNodeWrapper>& aidlNode,
+ int32_t dataSpace,
+ int32_t bufferCount,
+ uint32_t frameWidth,
+ uint32_t frameHeight,
+ uint64_t consumerUsage) {
+ if (aidlNode == NULL) {
+ return BAD_VALUE;
+ }
+
+ return GraphicBufferSource::configure(
+ new AidlComponentWrapper(aidlNode), dataSpace, bufferCount,
+ frameWidth, frameHeight, consumerUsage);
+}
+
+} // namespace android::media
diff --git a/media/module/aidlpersistentsurface/Android.bp b/media/module/aidlpersistentsurface/Android.bp
new file mode 100644
index 0000000..5c1a010
--- /dev/null
+++ b/media/module/aidlpersistentsurface/Android.bp
@@ -0,0 +1,69 @@
+aidl_interface {
+ name: "graphicbuffersource-aidl",
+ unstable: true,
+ local_include_dir: "aidl",
+ min_sdk_version: "29",
+ srcs: [
+ "aidl/android/media/AidlColorAspects.aidl",
+ "aidl/android/media/IAidlGraphicBufferSource.aidl",
+ "aidl/android/media/IAidlBufferSource.aidl",
+ "aidl/android/media/IAidlNode.aidl",
+ ],
+ headers: [
+ "HardwareBuffer_aidl",
+ ],
+ imports: [
+ "android.hardware.graphics.common-V5",
+ ],
+ include_dirs: [
+ "frameworks/native/aidl/gui",
+ ],
+ backend: {
+ cpp: {
+ enabled: false,
+ },
+ java: {
+ enabled: false,
+ },
+ ndk: {
+ enabled: true,
+ additional_shared_libraries: [
+ "libnativewindow",
+ ],
+ },
+ rust: {
+ // No users, and no rust implementation of android.os.Surface yet
+ enabled: false,
+ },
+ },
+}
+
+cc_library_shared {
+ name: "libstagefright_graphicbuffersource_aidl",
+ min_sdk_version: "29",
+ srcs: [
+ "AidlGraphicBufferSource.cpp",
+ "wrapper/WAidlGraphicBufferSource.cpp",
+ ],
+ export_include_dirs: [
+ "include",
+ ],
+ header_libs: [
+ "media_plugin_headers",
+ ],
+
+ export_header_lib_headers: [
+ "media_plugin_headers",
+ ],
+ shared_libs: [
+ "android.hardware.graphics.common-V5-ndk",
+ "graphicbuffersource-aidl-ndk",
+ "libbinder_ndk",
+ "libcutils",
+ "libgui",
+ "liblog",
+ "libnativewindow",
+ "libstagefright_bufferqueue_helper",
+ "libutils",
+ ],
+}
diff --git a/media/module/aidlpersistentsurface/aidl/android/media/AidlColorAspects.aidl b/media/module/aidlpersistentsurface/aidl/android/media/AidlColorAspects.aidl
new file mode 100644
index 0000000..4edd6ce
--- /dev/null
+++ b/media/module/aidlpersistentsurface/aidl/android/media/AidlColorAspects.aidl
@@ -0,0 +1,89 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * Ref: frameworks/native/include/media/hardware/VideoAPI.h
+ *
+ * Framework defined color aspects. These are based mainly on ISO 23001-8 spec. As this standard
+ * continues to evolve, new values may be defined in the future. Use OTHER for these future values
+ * as well as for values not listed here, as those are not supported by the framework.
+ */
+parcelable AidlColorAspects {
+ @Backing(type="int")
+ enum Range {
+ UNSPECIFIED, // Unspecified
+ FULL, // Full range
+ LIMITED, // Limited range (if defined), or not full range
+
+ OTHER = 0xff, // Not one of the above values
+ }
+
+ // Color primaries
+ @Backing(type="int")
+ enum Primaries {
+ UNSPECIFIED, // Unspecified
+ BT709_5, // Rec.ITU-R BT.709-5 or equivalent
+ BT470_6M, // Rec.ITU-R BT.470-6 System M or equivalent
+ BT601_6_625, // Rec.ITU-R BT.601-6 625 or equivalent
+ BT601_6_525, // Rec.ITU-R BT.601-6 525 or equivalent
+ GENERIC_FILM, // Generic Film
+ BT2020, // Rec.ITU-R BT.2020 or equivalent
+
+ OTHER = 0xff, // Not one of the above values
+ }
+
+ // Transfer characteristics
+ @Backing(type="int")
+ enum Transfer {
+ UNSPECIFIED, // Unspecified
+ LINEAR, // Linear transfer characteristics
+ SRGB, // sRGB or equivalent
+ SMPTE170M, // SMPTE 170M or equivalent (e.g. BT.601/709/2020)
+ GAMMA22, // Assumed display gamma 2.2
+ GAMMA28, // Assumed display gamma 2.8
+ ST2084, // SMPTE ST 2084 for 10/12/14/16 bit systems
+ HLG, // ARIB STD-B67 hybrid-log-gamma
+
+ // values unlikely to be required by Android follow here
+ SMPTE240M = 0x40, // SMPTE 240M
+ XVYCC, // IEC 61966-2-4
+ BT1361, // Rec.ITU-R BT.1361 extended gamut
+ ST428, // SMPTE ST 428-1
+
+ OTHER = 0xff, // Not one of the above values
+ }
+
+ // YUV <-> RGB conversion
+ @Backing(type="int")
+ enum MatrixCoeffs {
+ UNSPECIFIED, // Unspecified
+ BT709_5, // Rec.ITU-R BT.709-5 or equivalent
+ BT470_6M, // KR=0.30, KB=0.11 or equivalent
+ BT601_6, // Rec.ITU-R BT.601-6 625 or equivalent
+ SMPTE240M, // SMPTE 240M or equivalent
+ BT2020, // Rec.ITU-R BT.2020 non-constant luminance
+ BT2020CONSTANT, // Rec.ITU-R BT.2020 constant luminance
+
+ OTHER = 0xff, // Not one of the above values
+ }
+
+ Range range;
+ Primaries primaries;
+ Transfer transfer;
+ MatrixCoeffs matrixCoeffs;
+}
diff --git a/media/module/aidlpersistentsurface/aidl/android/media/IAidlBufferSource.aidl b/media/module/aidlpersistentsurface/aidl/android/media/IAidlBufferSource.aidl
new file mode 100644
index 0000000..d428e99
--- /dev/null
+++ b/media/module/aidlpersistentsurface/aidl/android/media/IAidlBufferSource.aidl
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.os.ParcelFileDescriptor;
+
+/**
+ * Binder interface for controlling and handling IAidlGraphicBufferSource
+ * from the process which owns IAidlNode.
+ *
+ * In order to support Persistent InputSurface and/or MediaRecorder
+ */
+interface IAidlBufferSource {
+ /**
+ * This is called when IAidlGraphicBufferSource can start handing buffers.
+ * If we already have buffers of data sitting in the BufferQueue,
+ * this will send them to the codec.
+ */
+ void onStart();
+
+ /**
+ * This is called when IAidlGraphicBufferSource indicaters that
+ * the codec is meant to return all buffers back to the client for them
+ * to be freed. Do NOT submit any more buffers to the component.
+ */
+ void onStop();
+
+ /**
+ * This is called when IAidlGraphicBufferSource indicates that
+ * we are shutting down.
+ */
+ void onRelease();
+
+ /**
+ * A "codec buffer", i.e. a buffer that can be used to pass data into
+ * the encoder, has been allocated.
+ */
+ void onInputBufferAdded(int bufferID);
+
+ /**
+ * If we have a BQ buffer available,
+ * fill it with a new frame of data; otherwise, just mark it as available.
+ *
+ * fence contains the fence's fd that the callee should wait on before
+ * using the buffer (or pass on to the user of the buffer, if the user supports
+ * fences). Callee takes ownership of the fence fd even if it fails.
+ */
+ void onInputBufferEmptied(int bufferID, in @nullable ParcelFileDescriptor fence);
+}
+
diff --git a/media/module/aidlpersistentsurface/aidl/android/media/IAidlGraphicBufferSource.aidl b/media/module/aidlpersistentsurface/aidl/android/media/IAidlGraphicBufferSource.aidl
new file mode 100644
index 0000000..6642e89
--- /dev/null
+++ b/media/module/aidlpersistentsurface/aidl/android/media/IAidlGraphicBufferSource.aidl
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.hardware.graphics.common.Dataspace;
+import android.media.AidlColorAspects;
+import android.media.IAidlNode;
+
+/**
+ * Binder interface for configuring/controlling a Codec2 AIDL encoder instance
+ * on behalf of a Surface which will produce input buffers.
+ *
+ * In order to support Persistent InputSurface and/or MediaRecorder.
+ */
+interface IAidlGraphicBufferSource {
+ void configure(IAidlNode node, Dataspace dataSpace);
+ void setSuspend(boolean suspend, long suspendTimeUs);
+ void setRepeatPreviousFrameDelayUs(long repeatAfterUs);
+ void setMaxFps(float maxFps);
+ void setTimeLapseConfig(double fps, double captureFps);
+ void setStartTimeUs(long startTimeUs);
+ void setStopTimeUs(long stopTimeUs);
+ long getStopTimeOffsetUs();
+ void setColorAspects(in AidlColorAspects aspects);
+ void setTimeOffsetUs(long timeOffsetsUs);
+ void signalEndOfInputStream();
+}
diff --git a/media/module/aidlpersistentsurface/aidl/android/media/IAidlNode.aidl b/media/module/aidlpersistentsurface/aidl/android/media/IAidlNode.aidl
new file mode 100644
index 0000000..cf880c2
--- /dev/null
+++ b/media/module/aidlpersistentsurface/aidl/android/media/IAidlNode.aidl
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.hardware.HardwareBuffer;
+import android.media.IAidlBufferSource;
+import android.os.ParcelFileDescriptor;
+
+/**
+ * Binder interface abstraction for codec2 encoder instance.
+ *
+ * In order to support Persistent InputSurface and/or MediaRecorder.
+ */
+interface IAidlNode {
+
+ /**
+ * InputBuffer parameter for retrieval for the Node
+ */
+ parcelable InputBufferParams {
+ int bufferCountActual;
+ int frameWidth;
+ int frameHeight;
+ }
+
+ void freeNode();
+ long getConsumerUsage();
+ InputBufferParams getInputBufferParams();
+ void setConsumerUsage(long usage);
+ void setAdjustTimestampGapUs(int gapUs);
+ void setInputSurface(IAidlBufferSource bufferSource);
+ void submitBuffer(
+ int buffer,
+ in HardwareBuffer hBuffer,
+ int flags,
+ long timestampUs,
+ in @nullable ParcelFileDescriptor fence);
+ void onDataSpaceChanged(int dataSpace, int aspects, int pixelFormat);
+}
diff --git a/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/AidlGraphicBufferSource.h b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/AidlGraphicBufferSource.h
new file mode 100644
index 0000000..85de688
--- /dev/null
+++ b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/AidlGraphicBufferSource.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <media/stagefright/bqhelper/GraphicBufferSource.h>
+#include <media/stagefright/foundation/ABase.h>
+
+#include <media/stagefright/aidlpersistentsurface/IAidlNodeWrapper.h>
+
+#include <utils/Errors.h>
+
+#include <aidl/android/media/BnAidlBufferSource.h>
+
+namespace android::media {
+
+/*
+ * This class is used to feed codec encoders from a Surface via BufferQueue or
+ * HW producer using AIDL binder interfaces.
+ *
+ * See media/stagefright/bqhelper/GraphicBufferSource.h for documentation.
+ */
+class AidlGraphicBufferSource : public GraphicBufferSource {
+public:
+ AidlGraphicBufferSource() = default;
+ virtual ~AidlGraphicBufferSource() = default;
+
+ // For IAidlBufferSource interface
+ // ------------------------------
+
+ // When we can start handling buffers. If we already have buffers of data
+ // sitting in the BufferQueue, this will send them to the codec.
+ ::ndk::ScopedAStatus onStart();
+
+ // When the codec is meant to return all buffers back to the client for
+ // them to be freed. Do NOT submit any more buffers to the component.
+ ::ndk::ScopedAStatus onStop();
+
+ // When we are shutting down.
+ ::ndk::ScopedAStatus onRelease();
+
+ // Rest of the interface in GraphicBufferSource.
+
+ // IAidlGraphicBufferSource interface
+ // ------------------------------
+
+ // Configure the buffer source to be used with a codec2 aidl node given
+ // parameters.
+ status_t configure(
+ const sp<IAidlNodeWrapper> &aidlNode,
+ int32_t dataSpace,
+ int32_t bufferCount,
+ uint32_t frameWidth,
+ uint32_t frameHeight,
+ uint64_t consumerUsage);
+
+ // Rest of the interface in GraphicBufferSource.
+
+private:
+ DISALLOW_EVIL_CONSTRUCTORS(AidlGraphicBufferSource);
+};
+
+} // namespace android::media
diff --git a/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/C2NodeDef.h b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/C2NodeDef.h
new file mode 100644
index 0000000..364efe2
--- /dev/null
+++ b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/C2NodeDef.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+namespace android::media {
+
+// Node definitions for aidl input surface.
+//
+// Copied from non-aidl version implementation.
+// Unnecessary definitions for input surface implementation
+// are all omitted.
+
+enum C2NodeBufferFlag : uint32_t {
+ BUFFERFLAG_EOS = 1,
+ BUFFERFLAG_ENDOFFRAME = (1 << 4)
+};
+
+} // namespace android::media
diff --git a/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/IAidlNodeWrapper.h b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/IAidlNodeWrapper.h
new file mode 100644
index 0000000..f23b5e4
--- /dev/null
+++ b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/IAidlNodeWrapper.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <utils/RefBase.h>
+#include <utils/StrongPointer.h>
+#include <ui/GraphicBuffer.h>
+
+#include <stdint.h>
+
+namespace android::media {
+
+struct IAidlNodeWrapper : public RefBase {
+ virtual status_t submitBuffer(
+ int32_t bufferId, uint32_t flags,
+ const sp<GraphicBuffer> &buffer = nullptr,
+ int64_t timestamp = 0, int fenceFd = -1) = 0;
+ virtual void dispatchDataSpaceChanged(
+ int32_t dataSpace, int32_t aspects, int32_t pixelFormat) = 0;
+};
+
+} // namespace android::media
diff --git a/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/wrapper/Conversion.h b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/wrapper/Conversion.h
new file mode 100644
index 0000000..dcb83f6
--- /dev/null
+++ b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/wrapper/Conversion.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android/binder_auto_utils.h>
+#include <ui/GraphicBuffer.h>
+#include <utils/Errors.h>
+
+#include <aidl/android/hardware/graphics/common/Dataspace.h>
+#include <aidl/android/hardware/graphics/common/PixelFormat.h>
+#include <aidl/android/media/AidlColorAspects.h>
+
+namespace android::media::aidl_conversion {
+
+inline status_t fromAidlStatus(const ::ndk::ScopedAStatus &status) {
+ if (!status.isOk()) {
+ if (status.getExceptionCode() == EX_SERVICE_SPECIFIC) {
+ return static_cast<status_t>(status.getServiceSpecificError());
+ } else {
+ return static_cast<status_t>(FAILED_TRANSACTION);
+ }
+ }
+ return NO_ERROR;
+}
+
+inline ::ndk::ScopedAStatus toAidlStatus(status_t status) {
+ if (status == NO_ERROR) {
+ return ::ndk::ScopedAStatus::ok();
+ }
+ return ::ndk::ScopedAStatus::fromServiceSpecificError(status);
+}
+
+inline int32_t compactFromAidlColorAspects(::aidl::android::media::AidlColorAspects const& s) {
+ return static_cast<int32_t>(
+ (static_cast<uint32_t>(s.range) << 24) |
+ (static_cast<uint32_t>(s.primaries) << 16) |
+ (static_cast<uint32_t>(s.transfer)) |
+ (static_cast<uint32_t>(s.matrixCoeffs) << 8));
+}
+
+inline int32_t rawFromAidlDataspace(
+ ::aidl::android::hardware::graphics::common::Dataspace const& s) {
+ return static_cast<int32_t>(s);
+}
+
+} // namespace android::media::aidl_conversion
diff --git a/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.h b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.h
new file mode 100644
index 0000000..f4d7fe8
--- /dev/null
+++ b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <utils/RefBase.h>
+#include <aidl/android/hardware/graphics/common/Dataspace.h>
+#include <aidl/android/media/IAidlBufferSource.h>
+#include <aidl/android/media/IAidlNode.h>
+#include <aidl/android/media/BnAidlGraphicBufferSource.h>
+
+namespace android::media {
+
+class AidlGraphicBufferSource;
+
+using ::android::sp;
+
+/**
+ * Aidl wrapper implementation for IAidlGraphicBufferSource
+ */
+class WAidlGraphicBufferSource : public ::aidl::android::media::BnAidlGraphicBufferSource {
+public:
+
+ struct WAidlNodeWrapper;
+ class WAidlBufferSource;
+
+ sp<AidlGraphicBufferSource> mBase;
+ std::shared_ptr<::aidl::android::media::IAidlBufferSource> mBufferSource;
+
+ WAidlGraphicBufferSource(sp<AidlGraphicBufferSource> const& base);
+ ::ndk::ScopedAStatus configure(
+ const std::shared_ptr<::aidl::android::media::IAidlNode>& node,
+ aidl::android::hardware::graphics::common::Dataspace dataspace) override;
+ ::ndk::ScopedAStatus setSuspend(bool suspend, int64_t timeUs) override;
+ ::ndk::ScopedAStatus setRepeatPreviousFrameDelayUs(int64_t repeatAfterUs) override;
+ ::ndk::ScopedAStatus setMaxFps(float maxFps) override;
+ ::ndk::ScopedAStatus setTimeLapseConfig(double fps, double captureFps) override;
+ ::ndk::ScopedAStatus setStartTimeUs(int64_t startTimeUs) override;
+ ::ndk::ScopedAStatus setStopTimeUs(int64_t stopTimeUs) override;
+ ::ndk::ScopedAStatus getStopTimeOffsetUs(int64_t *_aidl_return) override;
+ ::ndk::ScopedAStatus setColorAspects(
+ const ::aidl::android::media::AidlColorAspects& aspects) override;
+ ::ndk::ScopedAStatus setTimeOffsetUs(int64_t timeOffsetUs) override;
+ ::ndk::ScopedAStatus signalEndOfInputStream() override;
+};
+
+} // namespace android::media
diff --git a/media/module/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.cpp b/media/module/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.cpp
new file mode 100644
index 0000000..5526b10
--- /dev/null
+++ b/media/module/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.cpp
@@ -0,0 +1,221 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+//#define LOG_NDEBUG 0
+#define LOG_TAG "WAidlGraphicBufferSource"
+#include <android/hardware_buffer_aidl.h>
+#include <private/android/AHardwareBufferHelpers.h>
+#include <utils/Log.h>
+
+#include <aidl/android/media/BnAidlBufferSource.h>
+#include <aidl/android/media/IAidlNode.h>
+
+#include <media/stagefright/aidlpersistentsurface/AidlGraphicBufferSource.h>
+#include <media/stagefright/aidlpersistentsurface/C2NodeDef.h>
+#include <media/stagefright/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.h>
+#include <media/stagefright/aidlpersistentsurface/wrapper/Conversion.h>
+
+namespace android::media {
+using ::android::binder::unique_fd;
+using ::aidl::android::hardware::graphics::common::PixelFormat;
+using ::aidl::android::hardware::graphics::common::Dataspace;
+using ::aidl::android::media::AidlColorAspects;
+using ::aidl::android::media::IAidlNode;
+using ::aidl::android::media::BnAidlBufferSource;
+
+// Conversion
+using ::android::media::aidl_conversion::fromAidlStatus;
+using ::android::media::aidl_conversion::toAidlStatus;
+using ::android::media::aidl_conversion::compactFromAidlColorAspects;
+using ::android::media::aidl_conversion::rawFromAidlDataspace;
+
+struct WAidlGraphicBufferSource::WAidlNodeWrapper : public IAidlNodeWrapper {
+ std::shared_ptr<IAidlNode> mNode;
+
+ WAidlNodeWrapper(const std::shared_ptr<IAidlNode> &node): mNode(node) {
+ }
+
+ virtual status_t submitBuffer(
+ int32_t bufferId, uint32_t flags,
+ const sp<GraphicBuffer> &buffer,
+ int64_t timestamp, int fenceFd) override {
+ AHardwareBuffer *ahwBuffer = nullptr;
+ ::aidl::android::hardware::HardwareBuffer hBuffer;
+ if (buffer.get()) {
+ ahwBuffer = AHardwareBuffer_from_GraphicBuffer(buffer.get());
+ AHardwareBuffer_acquire(ahwBuffer);
+ hBuffer.reset(ahwBuffer);
+ }
+
+ ::ndk::ScopedFileDescriptor fence(fenceFd);
+
+ return fromAidlStatus(mNode->submitBuffer(
+ bufferId,
+ hBuffer,
+ flags,
+ timestamp,
+ fence));
+ }
+
+ virtual void dispatchDataSpaceChanged(
+ int32_t dataSpace, int32_t aspects, int32_t pixelFormat) override {
+ ::ndk::ScopedAStatus err = mNode->onDataSpaceChanged(
+ dataSpace, aspects, pixelFormat);
+ status_t status = fromAidlStatus(err);
+ if (status != NO_ERROR) {
+ ALOGE("WAidlNodeWrapper failed to change dataspace (%d): "
+ "dataSpace = %ld, aspects = %ld, pixelFormat = %ld",
+ static_cast<int>(status),
+ static_cast<long>(dataSpace),
+ static_cast<long>(aspects),
+ static_cast<long>(pixelFormat));
+ }
+ }
+};
+
+class WAidlGraphicBufferSource::WAidlBufferSource : public BnAidlBufferSource {
+ sp<AidlGraphicBufferSource> mSource;
+
+public:
+ WAidlBufferSource(const sp<AidlGraphicBufferSource> &source): mSource(source) {
+ }
+
+ ::ndk::ScopedAStatus onStart() override {
+ mSource->onStart();
+ return ::ndk::ScopedAStatus::ok();
+ }
+
+ ::ndk::ScopedAStatus onStop() override {
+ mSource->onStop();
+ return ::ndk::ScopedAStatus::ok();
+ }
+
+ ::ndk::ScopedAStatus onRelease() override {
+ mSource->onRelease();
+ return ::ndk::ScopedAStatus::ok();
+ }
+
+ ::ndk::ScopedAStatus onInputBufferAdded(int32_t bufferId) override {
+ mSource->onInputBufferAdded(bufferId);
+ return ::ndk::ScopedAStatus::ok();
+ }
+
+ ::ndk::ScopedAStatus onInputBufferEmptied(
+ int32_t bufferId, const ::ndk::ScopedFileDescriptor& fence) override {
+ mSource->onInputBufferEmptied(bufferId, ::dup(fence.get()));
+ return ::ndk::ScopedAStatus::ok();
+ }
+};
+
+// WAidlGraphicBufferSource
+WAidlGraphicBufferSource::WAidlGraphicBufferSource(
+ sp<AidlGraphicBufferSource> const& base) :
+ mBase(base),
+ mBufferSource(::ndk::SharedRefBase::make<WAidlBufferSource>(base)) {
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::configure(
+ const std::shared_ptr<IAidlNode>& node, Dataspace dataspace) {
+ if (node == NULL) {
+ return toAidlStatus(BAD_VALUE);
+ }
+
+ // Do setInputSurface() first, the node will try to enable metadata
+ // mode on input, and does necessary error checking. If this fails,
+ // we can't use this input surface on the node.
+ ::ndk::ScopedAStatus err = node->setInputSurface(mBufferSource);
+ status_t fnStatus = fromAidlStatus(err);
+ if (fnStatus != NO_ERROR) {
+ ALOGE("Unable to set input surface: %d", fnStatus);
+ return err;
+ }
+
+ // use consumer usage bits queried from encoder, but always add
+ // HW_VIDEO_ENCODER for backward compatibility.
+ int64_t consumerUsage;
+ fnStatus = OK;
+ err = node->getConsumerUsage(&consumerUsage);
+ fnStatus = fromAidlStatus(err);
+ if (fnStatus != NO_ERROR) {
+ if (fnStatus == FAILED_TRANSACTION) {
+ return err;
+ }
+ consumerUsage = 0;
+ }
+
+ IAidlNode::InputBufferParams rDef ;
+ err = node->getInputBufferParams(&rDef);
+ fnStatus = fromAidlStatus(err);
+ if (fnStatus != NO_ERROR) {
+ ALOGE("Failed to get port definition: %d", fnStatus);
+ return toAidlStatus(fnStatus);
+ }
+
+ return toAidlStatus(mBase->configure(
+ new WAidlNodeWrapper(node),
+ rawFromAidlDataspace(dataspace),
+ rDef.bufferCountActual,
+ rDef.frameWidth,
+ rDef.frameHeight,
+ static_cast<uint64_t>(consumerUsage)));
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::setSuspend(
+ bool suspend, int64_t timeUs) {
+ return toAidlStatus(mBase->setSuspend(suspend, timeUs));
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::setRepeatPreviousFrameDelayUs(
+ int64_t repeatAfterUs) {
+ return toAidlStatus(mBase->setRepeatPreviousFrameDelayUs(repeatAfterUs));
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::setMaxFps(float maxFps) {
+ return toAidlStatus(mBase->setMaxFps(maxFps));
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::setTimeLapseConfig(
+ double fps, double captureFps) {
+ return toAidlStatus(mBase->setTimeLapseConfig(fps, captureFps));
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::setStartTimeUs(int64_t startTimeUs) {
+ return toAidlStatus(mBase->setStartTimeUs(startTimeUs));
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::setStopTimeUs(int64_t stopTimeUs) {
+ return toAidlStatus(mBase->setStopTimeUs(stopTimeUs));
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::getStopTimeOffsetUs(int64_t* _aidl_return) {
+ status_t status = mBase->getStopTimeOffsetUs(_aidl_return);
+ return toAidlStatus(status);
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::setColorAspects(
+ const AidlColorAspects& aspects) {
+ return toAidlStatus(mBase->setColorAspects(compactFromAidlColorAspects(aspects)));
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::setTimeOffsetUs(int64_t timeOffsetUs) {
+ return toAidlStatus(mBase->setTimeOffsetUs(timeOffsetUs));
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::signalEndOfInputStream() {
+ return toAidlStatus(mBase->signalEndOfInputStream());
+}
+
+
+} // namespace android::media
diff --git a/media/module/bufferpool/1.0/vts/multi.cpp b/media/module/bufferpool/1.0/vts/multi.cpp
index d8cc285..21f47d3 100644
--- a/media/module/bufferpool/1.0/vts/multi.cpp
+++ b/media/module/bufferpool/1.0/vts/multi.cpp
@@ -24,6 +24,7 @@
#include <hidl/HidlSupport.h>
#include <hidl/HidlTransportSupport.h>
#include <hidl/LegacySupport.h>
+#include <hidl/ServiceManagement.h>
#include <hidl/Status.h>
#include <signal.h>
#include <sys/types.h>
@@ -36,6 +37,7 @@
using android::hardware::configureRpcThreadpool;
using android::hardware::hidl_handle;
+using android::hardware::isHidlSupported;
using android::hardware::media::bufferpool::V1_0::IClientManager;
using android::hardware::media::bufferpool::V1_0::ResultStatus;
using android::hardware::media::bufferpool::V1_0::implementation::BufferId;
@@ -178,6 +180,7 @@
ResultStatus status;
PipeMessage message;
+ if (!isHidlSupported()) GTEST_SKIP() << "HIDL is not supported on this device";
ASSERT_TRUE(receiveMessage(mResultPipeFds, &message));
android::sp<IClientManager> receiver = IClientManager::getService();
diff --git a/media/module/bufferpool/2.0/AccessorImpl.cpp b/media/module/bufferpool/2.0/AccessorImpl.cpp
index 1d2562e..202d803 100644
--- a/media/module/bufferpool/2.0/AccessorImpl.cpp
+++ b/media/module/bufferpool/2.0/AccessorImpl.cpp
@@ -609,7 +609,7 @@
}
if (ret == false) {
ALOGW("buffer status message processing failure - message : %d connection : %lld",
- message.newStatus, (long long)message.connectionId);
+ (int)message.newStatus, (long long)message.connectionId);
}
}
messages.clear();
diff --git a/media/module/codecs/amrwb/enc/Android.bp b/media/module/codecs/amrwb/enc/Android.bp
index 8780136..04f36b5 100644
--- a/media/module/codecs/amrwb/enc/Android.bp
+++ b/media/module/codecs/amrwb/enc/Android.bp
@@ -79,67 +79,31 @@
arch: {
arm: {
srcs: [
- "src/asm/ARMV5E/convolve_opt.s",
- "src/asm/ARMV5E/cor_h_vec_opt.s",
- "src/asm/ARMV5E/Deemph_32_opt.s",
- "src/asm/ARMV5E/Dot_p_opt.s",
- "src/asm/ARMV5E/Filt_6k_7k_opt.s",
- "src/asm/ARMV5E/Norm_Corr_opt.s",
- "src/asm/ARMV5E/pred_lt4_1_opt.s",
- "src/asm/ARMV5E/residu_asm_opt.s",
- "src/asm/ARMV5E/scale_sig_opt.s",
- "src/asm/ARMV5E/Syn_filt_32_opt.s",
- "src/asm/ARMV5E/syn_filt_opt.s",
+ "src/asm/ARMV7/convolve_neon.s",
+ "src/asm/ARMV7/cor_h_vec_neon.s",
+ "src/asm/ARMV7/Deemph_32_neon.s",
+ "src/asm/ARMV7/Dot_p_neon.s",
+ "src/asm/ARMV7/Filt_6k_7k_neon.s",
+ "src/asm/ARMV7/Norm_Corr_neon.s",
+ "src/asm/ARMV7/pred_lt4_1_neon.s",
+ "src/asm/ARMV7/residu_asm_neon.s",
+ "src/asm/ARMV7/scale_sig_neon.s",
+ "src/asm/ARMV7/Syn_filt_32_neon.s",
+ "src/asm/ARMV7/syn_filt_neon.s",
],
cflags: [
"-DARM",
+ "-DARMV7",
"-DASM_OPT",
+ // don't actually generate neon instructions, see bug 26932980
+ "-mfpu=vfpv3",
],
- local_include_dirs: ["src/asm/ARMV5E"],
+ local_include_dirs: [
+ "src/asm/ARMV7",
+ ],
instruction_set: "arm",
-
- neon: {
- exclude_srcs: [
- "src/asm/ARMV5E/convolve_opt.s",
- "src/asm/ARMV5E/cor_h_vec_opt.s",
- "src/asm/ARMV5E/Deemph_32_opt.s",
- "src/asm/ARMV5E/Dot_p_opt.s",
- "src/asm/ARMV5E/Filt_6k_7k_opt.s",
- "src/asm/ARMV5E/Norm_Corr_opt.s",
- "src/asm/ARMV5E/pred_lt4_1_opt.s",
- "src/asm/ARMV5E/residu_asm_opt.s",
- "src/asm/ARMV5E/scale_sig_opt.s",
- "src/asm/ARMV5E/Syn_filt_32_opt.s",
- "src/asm/ARMV5E/syn_filt_opt.s",
- ],
-
- srcs: [
- "src/asm/ARMV7/convolve_neon.s",
- "src/asm/ARMV7/cor_h_vec_neon.s",
- "src/asm/ARMV7/Deemph_32_neon.s",
- "src/asm/ARMV7/Dot_p_neon.s",
- "src/asm/ARMV7/Filt_6k_7k_neon.s",
- "src/asm/ARMV7/Norm_Corr_neon.s",
- "src/asm/ARMV7/pred_lt4_1_neon.s",
- "src/asm/ARMV7/residu_asm_neon.s",
- "src/asm/ARMV7/scale_sig_neon.s",
- "src/asm/ARMV7/Syn_filt_32_neon.s",
- "src/asm/ARMV7/syn_filt_neon.s",
- ],
-
- // don't actually generate neon instructions, see bug 26932980
- cflags: [
- "-DARMV7",
- "-mfpu=vfpv3",
- ],
- local_include_dirs: [
- "src/asm/ARMV5E",
- "src/asm/ARMV7",
- ],
- },
-
},
},
diff --git a/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp b/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp
index 4fbfab1..6df9dc8 100644
--- a/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp
+++ b/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp
@@ -49,6 +49,7 @@
using ::android::hardware::Return;
using ::android::sp;
using ::ndk::ScopedAStatus;
+namespace c2_hidl_V1_0 = ::android::hardware::media::c2::V1_0;
namespace c2_hidl = ::android::hardware::media::c2::V1_2;
namespace c2_aidl = ::aidl::android::hardware::media::c2;
@@ -734,6 +735,46 @@
} // unnamed namespace
+static android::sp<c2_hidl_V1_0::IComponentStore> getDeclaredHidlSwcodec(
+ const std::shared_ptr<C2ComponentStore>& store) {
+ using ::android::hidl::manager::V1_2::IServiceManager;
+ using namespace ::android::hardware::media::c2;
+
+ int platformVersion = android_get_device_api_level();
+ // STOPSHIP: Remove code name checking once platform version bumps up to 35.
+ std::string codeName = android::base::GetProperty("ro.build.version.codename", "");
+
+ if (codeName == "VanillaIceCream") {
+ platformVersion = __ANDROID_API_V__;
+ }
+ IServiceManager::Transport transport =
+ android::hardware::defaultServiceManager1_2()->getTransport(
+ V1_2::IComponentStore::descriptor, "software");
+ if (transport == IServiceManager::Transport::HWBINDER) {
+ if (platformVersion < __ANDROID_API_S__) {
+ LOG(ERROR) << "We don't expect V1.2::IComponentStore to be declared on this device";
+ }
+ return ::android::sp<V1_2::utils::ComponentStore>::make(store);
+ }
+ transport = android::hardware::defaultServiceManager1_2()->getTransport(
+ V1_1::IComponentStore::descriptor, "software");
+ if (transport == IServiceManager::Transport::HWBINDER) {
+ if (platformVersion != __ANDROID_API_R__) {
+ LOG(ERROR) << "We don't expect V1.1::IComponentStore to be declared on this device";
+ }
+ return ::android::sp<V1_1::utils::ComponentStore>::make(store);
+ }
+ transport = android::hardware::defaultServiceManager1_2()->getTransport(
+ V1_0::IComponentStore::descriptor, "software");
+ if (transport == IServiceManager::Transport::HWBINDER) {
+ if (platformVersion != __ANDROID_API_Q__) {
+ LOG(ERROR) << "We don't expect V1.0::IComponentStore to be declared on this device";
+ }
+ return ::android::sp<V1_0::utils::ComponentStore>::make(store);
+ }
+ return nullptr;
+}
+
extern "C" void RegisterCodecServices() {
const bool aidlSelected = c2_aidl::utils::IsSelected();
constexpr int kThreadCount = 64;
@@ -751,33 +792,6 @@
using namespace ::android::hardware::media::c2;
- int platformVersion = android_get_device_api_level();
- // STOPSHIP: Remove code name checking once platform version bumps up to 35.
- std::string codeName =
- android::base::GetProperty("ro.build.version.codename", "");
- if (codeName == "VanillaIceCream") {
- platformVersion = __ANDROID_API_V__;
- }
-
- android::sp<V1_0::IComponentStore> hidlStore;
- std::shared_ptr<c2_aidl::IComponentStore> aidlStore;
- const char *hidlVer = "(unknown)";
- if (aidlSelected) {
- aidlStore = ::ndk::SharedRefBase::make<c2_aidl::utils::ComponentStore>(store);
- } else if (platformVersion >= __ANDROID_API_S__) {
- hidlStore = ::android::sp<V1_2::utils::ComponentStore>::make(store);
- hidlVer = "1.2";
- } else if (platformVersion == __ANDROID_API_R__) {
- hidlStore = ::android::sp<V1_1::utils::ComponentStore>::make(store);
- hidlVer = "1.1";
- } else if (platformVersion == __ANDROID_API_Q__) {
- hidlStore = ::android::sp<V1_0::utils::ComponentStore>::make(store);
- hidlVer = "1.0";
- } else { // platformVersion < __ANDROID_API_Q__
- LOG(ERROR) << "The platform version " << platformVersion <<
- " is not supported.";
- return;
- }
if (!ionPropertiesDefined()) {
using IComponentStore =
::android::hardware::media::c2::V1_0::IComponentStore;
@@ -823,7 +837,10 @@
std::string(c2_aidl::IComponentStore::descriptor) + "/software";
if (__builtin_available(android __ANDROID_API_S__, *)) {
if (AServiceManager_isDeclared(aidlServiceName.c_str())) {
- if (!aidlStore) {
+ std::shared_ptr<c2_aidl::IComponentStore> aidlStore;
+ if (aidlSelected) {
+ aidlStore = ::ndk::SharedRefBase::make<c2_aidl::utils::ComponentStore>(store);
+ } else {
aidlStore = ::ndk::SharedRefBase::make<c2_aidl::utils::ComponentStore>(
std::make_shared<H2C2ComponentStore>(nullptr));
}
@@ -837,22 +854,23 @@
}
}
+ android::sp<V1_0::IComponentStore> hidlStore = getDeclaredHidlSwcodec(store);
// If the software component store isn't declared in the manifest, we don't
// need to create the service and register it.
- using ::android::hidl::manager::V1_2::IServiceManager;
- IServiceManager::Transport transport =
- android::hardware::defaultServiceManager1_2()->getTransport(
- V1_2::utils::ComponentStore::descriptor, "software");
- if (transport == IServiceManager::Transport::HWBINDER) {
- if (!hidlStore) {
+ if (hidlStore) {
+ if (registered && aidlSelected) {
+ LOG(INFO) << "Both HIDL and AIDL software codecs are declared in the vintf "
+ << "manifest, but AIDL was selected. "
+ << "Creating a null HIDL service so it's not accidentally "
+ << "used. The AIDL software codec is already registered.";
hidlStore = ::android::sp<V1_2::utils::ComponentStore>::make(
std::make_shared<H2C2ComponentStore>(nullptr));
- hidlVer = "1.2";
}
if (hidlStore->registerAsService("software") == android::OK) {
registered = true;
} else {
- LOG(ERROR) << "Cannot register software Codec2 v" << hidlVer << " service.";
+ LOG(ERROR) << "Cannot register software Codec2 " << hidlStore->descriptor
+ << " service.";
}
} else {
LOG(INFO) << "The HIDL software Codec2 service is deprecated"
diff --git a/media/module/esds/tests/ESDSTest.cpp b/media/module/esds/tests/ESDSTest.cpp
index 33bdcac..ba64f60 100644
--- a/media/module/esds/tests/ESDSTest.cpp
+++ b/media/module/esds/tests/ESDSTest.cpp
@@ -52,7 +52,7 @@
/* BitrateMax */ int32_t,
/* BitrateAvg */ int32_t>> {
public:
- ESDSUnitTest() : mESDSData(nullptr) {
+ ESDSUnitTest() {
mESDSParams.inputFile = get<0>(GetParam());
mESDSParams.objectTypeIndication = get<1>(GetParam());
mESDSParams.codecSpecificInfoData = get<2>(GetParam());
@@ -61,6 +61,13 @@
mESDSParams.bitrateAvg = get<5>(GetParam());
};
+ ~ESDSUnitTest() {
+ if (mESDSData != nullptr) {
+ free(mESDSData);
+ mESDSData = nullptr;
+ }
+ }
+
virtual void TearDown() override {
if (mDataSource) mDataSource.clear();
if (mInputFp) {
@@ -70,8 +77,8 @@
}
virtual void SetUp() override { ASSERT_NO_FATAL_FAILURE(readESDSData()); }
- const void *mESDSData;
- size_t mESDSSize;
+ void *mESDSData = nullptr;
+ size_t mESDSSize = 0;
ESDSParams mESDSParams;
private:
@@ -105,10 +112,19 @@
bool esdsDataPresent(size_t numTracks, sp<IMediaExtractor> extractor) {
bool foundESDS = false;
uint32_t type;
+ if (mESDSData != nullptr) {
+ free(mESDSData);
+ mESDSData = nullptr;
+ }
for (size_t i = 0; i < numTracks; ++i) {
sp<MetaData> trackMeta = extractor->getTrackMetaData(i);
+ const void *esdsData = nullptr;
+ size_t esdsSize = 0;
if (trackMeta != nullptr &&
- trackMeta->findData(kKeyESDS, &type, &mESDSData, &mESDSSize)) {
+ trackMeta->findData(kKeyESDS, &type, &esdsData, &esdsSize)) {
+ mESDSData = malloc(esdsSize);
+ mESDSSize = esdsSize;
+ memcpy(mESDSData, esdsData, esdsSize);
trackMeta->clear();
foundESDS = true;
break;
diff --git a/media/module/extractors/fuzzers/Android.bp b/media/module/extractors/fuzzers/Android.bp
index d096d63..7a49d8e 100644
--- a/media/module/extractors/fuzzers/Android.bp
+++ b/media/module/extractors/fuzzers/Android.bp
@@ -134,6 +134,8 @@
],
dictionary: "mp4_extractor_fuzzer.dict",
+
+ corpus: ["corpus_mp4/*"],
}
cc_fuzz {
@@ -202,7 +204,6 @@
"ogg_extractor_fuzzer.cpp",
],
-
static_libs: [
"libstagefright_metadatautils",
"libvorbisidec",
diff --git a/media/module/extractors/fuzzers/corpus_mp4/164a5bad5340b262316f93932c4160813657e1e0 b/media/module/extractors/fuzzers/corpus_mp4/164a5bad5340b262316f93932c4160813657e1e0
new file mode 100644
index 0000000..c17251b
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/164a5bad5340b262316f93932c4160813657e1e0
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/24f242f3b30fd5c2ff0f9aebed4375a3ab5cdceb b/media/module/extractors/fuzzers/corpus_mp4/24f242f3b30fd5c2ff0f9aebed4375a3ab5cdceb
new file mode 100644
index 0000000..16907fd
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/24f242f3b30fd5c2ff0f9aebed4375a3ab5cdceb
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/2a017927fdab79a8cc3b0bb75224cb44f4c1b35b b/media/module/extractors/fuzzers/corpus_mp4/2a017927fdab79a8cc3b0bb75224cb44f4c1b35b
new file mode 100644
index 0000000..2ec7881
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/2a017927fdab79a8cc3b0bb75224cb44f4c1b35b
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/58b3155e64ac16e4e6c68b68257871bcd769b92f b/media/module/extractors/fuzzers/corpus_mp4/58b3155e64ac16e4e6c68b68257871bcd769b92f
new file mode 100644
index 0000000..cd1fdcc
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/58b3155e64ac16e4e6c68b68257871bcd769b92f
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/64093d4da00ba406310c7679cd8b37562e6344b5 b/media/module/extractors/fuzzers/corpus_mp4/64093d4da00ba406310c7679cd8b37562e6344b5
new file mode 100644
index 0000000..f1ea812
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/64093d4da00ba406310c7679cd8b37562e6344b5
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/7355066d4975de07e9b6d0e9907c896eeb90577a b/media/module/extractors/fuzzers/corpus_mp4/7355066d4975de07e9b6d0e9907c896eeb90577a
new file mode 100644
index 0000000..c5d3eb2
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/7355066d4975de07e9b6d0e9907c896eeb90577a
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/7a48b0237581c794097a15add08517b3c6dc0aa2 b/media/module/extractors/fuzzers/corpus_mp4/7a48b0237581c794097a15add08517b3c6dc0aa2
new file mode 100644
index 0000000..1f6c29d
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/7a48b0237581c794097a15add08517b3c6dc0aa2
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/8706f07041a0cf828a7b40d727533d6c732b5ebc b/media/module/extractors/fuzzers/corpus_mp4/8706f07041a0cf828a7b40d727533d6c732b5ebc
new file mode 100644
index 0000000..40d639d
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/8706f07041a0cf828a7b40d727533d6c732b5ebc
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/911a46d40d60b9a806dbdc70799048df2f546615 b/media/module/extractors/fuzzers/corpus_mp4/911a46d40d60b9a806dbdc70799048df2f546615
new file mode 100644
index 0000000..2056348
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/911a46d40d60b9a806dbdc70799048df2f546615
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/97b1d0e78525c793574cce3e66f86564c2a10271 b/media/module/extractors/fuzzers/corpus_mp4/97b1d0e78525c793574cce3e66f86564c2a10271
new file mode 100644
index 0000000..f50d4f4
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/97b1d0e78525c793574cce3e66f86564c2a10271
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/d52e7095534fdf1f040b10a80df4cbc069a97a4e b/media/module/extractors/fuzzers/corpus_mp4/d52e7095534fdf1f040b10a80df4cbc069a97a4e
new file mode 100644
index 0000000..25ea55b
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/d52e7095534fdf1f040b10a80df4cbc069a97a4e
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/ec6bd6069f74a2f6e92442f88efb29288ad6f456 b/media/module/extractors/fuzzers/corpus_mp4/ec6bd6069f74a2f6e92442f88efb29288ad6f456
new file mode 100644
index 0000000..62d259b
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/ec6bd6069f74a2f6e92442f88efb29288ad6f456
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/faa22bcb745206340d5d411b498a3868d2b1feec b/media/module/extractors/fuzzers/corpus_mp4/faa22bcb745206340d5d411b498a3868d2b1feec
new file mode 100644
index 0000000..d649632
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/faa22bcb745206340d5d411b498a3868d2b1feec
Binary files differ
diff --git a/media/module/extractors/fuzzers/mp4_extractor_fuzzer.dict b/media/module/extractors/fuzzers/mp4_extractor_fuzzer.dict
index 3683649..b48c854 100644
--- a/media/module/extractors/fuzzers/mp4_extractor_fuzzer.dict
+++ b/media/module/extractors/fuzzers/mp4_extractor_fuzzer.dict
@@ -246,3 +246,4 @@
kw245="iso5"
kw246="resv"
kw247="iso6"
+kw248="clap"
diff --git a/media/module/extractors/mkv/MatroskaExtractor.cpp b/media/module/extractors/mkv/MatroskaExtractor.cpp
index 6900341..f326db1 100644
--- a/media/module/extractors/mkv/MatroskaExtractor.cpp
+++ b/media/module/extractors/mkv/MatroskaExtractor.cpp
@@ -1769,6 +1769,30 @@
}
+status_t MatroskaExtractor::synthesizeVP9(TrackInfo* trackInfo, size_t index) {
+ BlockIterator iter(this, trackInfo->mTrackNum, index);
+ if (iter.eos()) {
+ return ERROR_MALFORMED;
+ }
+
+ const mkvparser::Block* block = iter.block();
+ if (block->GetFrameCount() <= 0) {
+ return ERROR_MALFORMED;
+ }
+
+ const mkvparser::Block::Frame& frame = block->GetFrame(0);
+ auto tmpData = heapbuffer<unsigned char>(frame.len);
+ long n = frame.Read(mReader, tmpData.get());
+ if (n != 0) {
+ return ERROR_MALFORMED;
+ }
+
+ if (!MakeVP9CodecSpecificData(trackInfo->mMeta, tmpData.get(), frame.len)) {
+ return ERROR_MALFORMED;
+ }
+
+ return OK;
+}
static inline bool isValidInt32ColourValue(long long value) {
return value != mkvparser::Colour::kValueNotPresent
@@ -2002,6 +2026,8 @@
// specified in http://www.webmproject.org/vp9/profiles/.
AMediaFormat_setBuffer(meta,
AMEDIAFORMAT_KEY_CSD_0, codecPrivate, codecPrivateSize);
+ } else {
+ isSetCsdFrom1stFrame = true;
}
} else if (!strcmp("V_AV1", codecID)) {
AMediaFormat_setString(meta, AMEDIAFORMAT_KEY_MIME, MEDIA_MIMETYPE_VIDEO_AV1);
@@ -2254,6 +2280,13 @@
mTracks.pop();
continue;
}
+ } else if ((!strcmp("V_VP9", codecID) && codecPrivateSize == 0) ||
+ (!strcmp(mimetype, MEDIA_MIMETYPE_VIDEO_VP9) && isSetCsdFrom1stFrame)) {
+ // Attempt to recover from VP9 track without codec private data
+ err = synthesizeVP9(trackInfo, n);
+ if (err != OK) {
+ ALOGW("ignoring error %d in synthesizeVP9", err);
+ }
}
// the TrackInfo owns the metadata now
meta = nullptr;
@@ -2279,6 +2312,8 @@
int64_t thumbnailTimeUs = 0;
size_t maxBlockSize = 0;
while (!iter.eos() && j < 20) {
+ int64_t blockTimeUs = iter.blockTimeUs();
+
if (iter.block()->IsKey()) {
++j;
@@ -2289,9 +2324,13 @@
if (blockSize > maxBlockSize) {
maxBlockSize = blockSize;
- thumbnailTimeUs = iter.blockTimeUs();
+ thumbnailTimeUs = blockTimeUs;
}
}
+ // Exit after 20s if we've already found at least one key frame.
+ if (blockTimeUs > 20000000 && maxBlockSize > 0) {
+ break;
+ }
iter.advance();
}
AMediaFormat_setInt64(info->mMeta,
diff --git a/media/module/extractors/mkv/include/MatroskaExtractor.h b/media/module/extractors/mkv/include/MatroskaExtractor.h
index 99fad17..2e4d955 100644
--- a/media/module/extractors/mkv/include/MatroskaExtractor.h
+++ b/media/module/extractors/mkv/include/MatroskaExtractor.h
@@ -95,6 +95,7 @@
status_t synthesizeAVCC(TrackInfo *trackInfo, size_t index);
status_t synthesizeMPEG2(TrackInfo *trackInfo, size_t index);
status_t synthesizeMPEG4(TrackInfo *trackInfo, size_t index);
+ status_t synthesizeVP9(TrackInfo* trackInfo, size_t index);
status_t initTrackInfo(
const mkvparser::Track *track,
AMediaFormat *meta,
diff --git a/media/module/extractors/mp4/MPEG4Extractor.cpp b/media/module/extractors/mp4/MPEG4Extractor.cpp
index b3707c8..cb2994e 100644
--- a/media/module/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/module/extractors/mp4/MPEG4Extractor.cpp
@@ -1615,39 +1615,6 @@
mLastTrack->timescale = ntohl(timescale);
- // 14496-12 says all ones means indeterminate, but some files seem to use
- // 0 instead. We treat both the same.
- int64_t duration = 0;
- if (version == 1) {
- if (mDataSource->readAt(
- timescale_offset + 4, &duration, sizeof(duration))
- < (ssize_t)sizeof(duration)) {
- return ERROR_IO;
- }
- if (duration != -1) {
- duration = ntoh64(duration);
- }
- } else {
- uint32_t duration32;
- if (mDataSource->readAt(
- timescale_offset + 4, &duration32, sizeof(duration32))
- < (ssize_t)sizeof(duration32)) {
- return ERROR_IO;
- }
- if (duration32 != 0xffffffff) {
- duration = ntohl(duration32);
- }
- }
- if (duration != 0 && mLastTrack->timescale != 0) {
- long double durationUs = ((long double)duration * 1000000) / mLastTrack->timescale;
- if (durationUs < 0 || durationUs > INT64_MAX) {
- ALOGE("cannot represent %lld * 1000000 / %lld in 64 bits",
- (long long) duration, (long long) mLastTrack->timescale);
- return ERROR_MALFORMED;
- }
- AMediaFormat_setInt64(mLastTrack->meta, AMEDIAFORMAT_KEY_DURATION, durationUs);
- }
-
uint8_t lang[2];
off64_t lang_offset;
if (version == 1) {
@@ -3907,17 +3874,18 @@
}
int32_t id;
+ int64_t duration;
if (version == 1) {
// we can get ctime value from U64_AT(&buffer[4])
// we can get mtime value from U64_AT(&buffer[12])
id = U32_AT(&buffer[20]);
- // we can get duration value from U64_AT(&buffer[28])
+ duration = U64_AT(&buffer[28]);
} else if (version == 0) {
// we can get ctime value from U32_AT(&buffer[4])
// we can get mtime value from U32_AT(&buffer[8])
id = U32_AT(&buffer[12]);
- // we can get duration value from U32_AT(&buffer[20])
+ duration = U32_AT(&buffer[20]);
} else {
return ERROR_UNSUPPORTED;
}
@@ -3926,6 +3894,15 @@
return ERROR_MALFORMED;
AMediaFormat_setInt32(mLastTrack->meta, AMEDIAFORMAT_KEY_TRACK_ID, id);
+ if (duration != 0 && mHeaderTimescale != 0) {
+ long double durationUs = ((long double)duration * 1000000) / mHeaderTimescale;
+ if (durationUs < 0 || durationUs > INT64_MAX) {
+ ALOGE("cannot represent %lld * 1000000 / %lld in 64 bits",
+ (long long) duration, (long long) mHeaderTimescale);
+ return ERROR_MALFORMED;
+ }
+ AMediaFormat_setInt64(mLastTrack->meta, AMEDIAFORMAT_KEY_DURATION, durationUs);
+ }
size_t matrixOffset = dynSize + 16;
int32_t a00 = U32_AT(&buffer[matrixOffset]);
diff --git a/media/module/id3/Android.bp b/media/module/id3/Android.bp
index bea3e34..e426796 100644
--- a/media/module/id3/Android.bp
+++ b/media/module/id3/Android.bp
@@ -17,6 +17,24 @@
],
}
+cc_library_headers {
+ name: "libstagefright_id3_headers",
+ export_include_dirs: ["include"],
+ vendor_available: true,
+ apex_available: [
+ "//apex_available:platform",
+ "com.android.media",
+ "com.android.media.swcodec",
+ ],
+ min_sdk_version: "29",
+ host_supported: true,
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
+}
+
cc_library_static {
name: "libstagefright_id3",
min_sdk_version: "29",
@@ -25,7 +43,6 @@
"com.android.media",
],
-
srcs: ["ID3.cpp"],
header_libs: [
@@ -35,6 +52,8 @@
"media_ndk_headers",
],
+ export_include_dirs: ["include"],
+
cflags: [
"-Werror",
"-Wall",
diff --git a/media/libstagefright/include/ID3.h b/media/module/id3/include/ID3.h
similarity index 100%
rename from media/libstagefright/include/ID3.h
rename to media/module/id3/include/ID3.h
diff --git a/media/module/libmediatranscoding/TranscodingResourcePolicy.cpp b/media/module/libmediatranscoding/TranscodingResourcePolicy.cpp
index af53f64..43a4628 100644
--- a/media/module/libmediatranscoding/TranscodingResourcePolicy.cpp
+++ b/media/module/libmediatranscoding/TranscodingResourcePolicy.cpp
@@ -21,6 +21,7 @@
#include <aidl/android/media/IResourceObserverService.h>
#include <android/binder_manager.h>
#include <android/binder_process.h>
+#include <map>
#include <media/TranscodingResourcePolicy.h>
#include <utils/Log.h>
@@ -66,11 +67,31 @@
TranscodingResourcePolicy* mOwner;
};
+// cookie used for death recipients. The TranscodingResourcePolicy
+// that this cookie is associated with must outlive this cookie. It is
+// either deleted by binderDied, or in unregisterSelf which is also called
+// in the destructor of TranscodingResourcePolicy
+class TranscodingResourcePolicyCookie {
+ public:
+ TranscodingResourcePolicyCookie(TranscodingResourcePolicy* policy) : mPolicy(policy) {}
+ TranscodingResourcePolicyCookie() = delete;
+ TranscodingResourcePolicy* mPolicy;
+};
+
+static std::map<uintptr_t, std::unique_ptr<TranscodingResourcePolicyCookie>> sCookies;
+static uintptr_t sCookieKeyCounter;
+static std::mutex sCookiesMutex;
+
// static
void TranscodingResourcePolicy::BinderDiedCallback(void* cookie) {
- TranscodingResourcePolicy* owner = reinterpret_cast<TranscodingResourcePolicy*>(cookie);
- if (owner != nullptr) {
- owner->unregisterSelf();
+ std::lock_guard<std::mutex> guard(sCookiesMutex);
+ if (auto it = sCookies.find(reinterpret_cast<uintptr_t>(cookie)); it != sCookies.end()) {
+ ALOGI("BinderDiedCallback unregistering TranscodingResourcePolicy");
+ auto policy = reinterpret_cast<TranscodingResourcePolicy*>(it->second->mPolicy);
+ if (policy) {
+ policy->unregisterSelf();
+ }
+ sCookies.erase(it);
}
// TODO(chz): retry to connecting to IResourceObserverService after failure.
// Also need to have back-up logic if IResourceObserverService is offline for
@@ -88,6 +109,23 @@
}
TranscodingResourcePolicy::~TranscodingResourcePolicy() {
+ {
+ std::lock_guard<std::mutex> guard(sCookiesMutex);
+
+ // delete all of the cookies associated with this TranscodingResourcePolicy
+ // instance since they are holding pointers to this object that will no
+ // longer be valid.
+ std::erase_if(sCookies, [this](const auto& cookieEntry) {
+ auto const& [key, cookie] = cookieEntry;
+ std::lock_guard guard(mCookieKeysLock);
+ if (const auto& it = mCookieKeys.find(key); it != mCookieKeys.end()) {
+ // No longer need to track this cookie
+ mCookieKeys.erase(key);
+ return true;
+ }
+ return false;
+ });
+ }
unregisterSelf();
}
@@ -123,7 +161,17 @@
return;
}
- AIBinder_linkToDeath(binder.get(), mDeathRecipient.get(), reinterpret_cast<void*>(this));
+ std::unique_ptr<TranscodingResourcePolicyCookie> cookie =
+ std::make_unique<TranscodingResourcePolicyCookie>(this);
+ void* cookiePtr = static_cast<void*>(cookie.get());
+ uintptr_t cookieKey = sCookieKeyCounter++;
+ sCookies.emplace(cookieKey, std::move(cookie));
+ {
+ std::lock_guard guard(mCookieKeysLock);
+ mCookieKeys.insert(cookieKey);
+ }
+
+ AIBinder_linkToDeath(binder.get(), mDeathRecipient.get(), reinterpret_cast<void*>(cookieKey));
ALOGD("@@@ registered observer");
mRegistered = true;
@@ -141,7 +189,6 @@
::ndk::SpAIBinder binder = mService->asBinder();
if (binder.get() != nullptr) {
Status status = mService->unregisterObserver(mObserver);
- AIBinder_unlinkToDeath(binder.get(), mDeathRecipient.get(), reinterpret_cast<void*>(this));
}
mService = nullptr;
diff --git a/media/module/libmediatranscoding/include/media/TranscodingResourcePolicy.h b/media/module/libmediatranscoding/include/media/TranscodingResourcePolicy.h
index ee232e7..4d762b5 100644
--- a/media/module/libmediatranscoding/include/media/TranscodingResourcePolicy.h
+++ b/media/module/libmediatranscoding/include/media/TranscodingResourcePolicy.h
@@ -22,6 +22,7 @@
#include <utils/Condition.h>
#include <mutex>
+#include <set>
namespace aidl {
namespace android {
namespace media {
@@ -48,6 +49,8 @@
bool mRegistered GUARDED_BY(mRegisteredLock);
std::shared_ptr<IResourceObserverService> mService GUARDED_BY(mRegisteredLock);
std::shared_ptr<ResourceObserver> mObserver;
+ mutable std::mutex mCookieKeysLock;
+ std::set<uintptr_t> mCookieKeys;
mutable std::mutex mCallbackLock;
std::weak_ptr<ResourcePolicyCallbackInterface> mResourcePolicyCallback
@@ -59,6 +62,7 @@
static void BinderDiedCallback(void* cookie);
void registerSelf();
+ // must delete the associated TranscodingResourcePolicyCookie any time this is called
void unregisterSelf();
void onResourceAvailable(pid_t pid);
}; // class TranscodingUidPolicy
diff --git a/media/module/metadatautils/MetaDataUtils.cpp b/media/module/metadatautils/MetaDataUtils.cpp
index db60f04..0895bb5 100644
--- a/media/module/metadatautils/MetaDataUtils.cpp
+++ b/media/module/metadatautils/MetaDataUtils.cpp
@@ -81,6 +81,177 @@
return true;
}
+// Check if the next 24 bits are VP9 SYNC_CODE
+static bool isVp9SyncCode(ABitReader &bits) {
+ if (bits.numBitsLeft() < 24) {
+ return false;
+ }
+ return bits.getBits(24) == 0x498342;
+}
+
+// This parses bitdepth and subsampling in a VP9 uncompressed header
+// (refer section bitdepth_colorspace_sampling in 6.2 of the VP9 bitstream spec)
+static bool getVp9BitdepthChromaSubSampling(ABitReader &bits,
+ int32_t profile,
+ int32_t *bitDepth,
+ int32_t *chromaSubsampling) {
+ if (profile >= 2) {
+ if (bits.numBitsLeft() < 1) {
+ return false;
+ }
+ *bitDepth = bits.getBits(1) ? 12 : 10;
+ } else {
+ *bitDepth = 8;
+ }
+
+ uint32_t colorspace;
+ if (!bits.getBitsGraceful(3, &colorspace)) {
+ return false;
+ }
+
+ *chromaSubsampling = -1;
+ if (colorspace != 7 /*SRGB*/) {
+ // Skip yuv_range_flag
+ if (!bits.skipBits(1)) {
+ return false;
+ }
+ // Check for subsampling only for profiles 1 and 3.
+ if (profile == 1 || profile == 3) {
+ uint32_t ss_x;
+ uint32_t ss_y;
+ if (bits.getBitsGraceful(1, &ss_x) && bits.getBitsGraceful(1, &ss_y)) {
+ *chromaSubsampling = ss_x << 1 & ss_y;
+ } else {
+ return false;
+ }
+ } else {
+ *chromaSubsampling = 3;
+ }
+ } else {
+ if (profile == 1 || profile == 3) {
+ *chromaSubsampling = 0;
+ }
+ }
+ return true;
+}
+// The param data contains the first frame data, starting with the uncompressed frame
+// header. This uncompressed header (refer section 6.2 of the VP9 bitstream spec) is
+// used to parse profile, bitdepth and subsampling.
+bool MakeVP9CodecSpecificData(AMediaFormat* meta, const uint8_t* data, size_t size) {
+ if (meta == nullptr || data == nullptr || size == 0) {
+ return false;
+ }
+
+ ABitReader bits(data, size);
+
+ // First 2 bits of the uncompressed header should be the frame_marker.
+ if (bits.getBits(2) != 0b10) {
+ return false;
+ }
+
+ int32_t profileLowBit = bits.getBits(1);
+ int32_t profileHighBit = bits.getBits(1);
+ int32_t profile = profileHighBit * 2 + profileLowBit;
+
+ // One reserved '0' bit if profile is 3.
+ if (profile == 3 && bits.getBits(1) != 0) {
+ return false;
+ }
+
+ // If show_existing_frame is set, we get no more data. Since this is
+ // expected to be the first frame, we can return false which will cascade
+ // into ERROR_MALFORMED.
+ if (bits.getBits(1)) {
+ return false;
+ }
+
+ int32_t frame_type = bits.getBits(1);
+
+ // Upto 7 bits could be read till now, which were guaranteed to be available
+ // since size > 0. Check for bits available before reading them from now on.
+ if (bits.numBitsLeft() < 2) {
+ return false;
+ }
+
+ int32_t show_frame = bits.getBits(1);
+ int32_t error_resilient_mode = bits.getBits(1);
+ int32_t bitDepth = 8;
+ int32_t chromaSubsampling = -1;
+
+ if (frame_type == 0 /* KEY_FRAME */) {
+ // Check for sync code.
+ if (!isVp9SyncCode(bits)) {
+ return false;
+ }
+
+ if (!getVp9BitdepthChromaSubSampling(bits, profile, &bitDepth, &chromaSubsampling)) {
+ return false;
+ }
+ } else {
+ int32_t intra_only = 0;
+ if (!show_frame) {
+ if (bits.numBitsLeft() < 1) {
+ return false;
+ }
+ intra_only = bits.getBits(1);
+ }
+
+ if (!error_resilient_mode) {
+ if (bits.numBitsLeft() < 2) {
+ return false;
+ }
+ // ignore reset_frame_context
+ bits.skipBits(2);
+ }
+
+ if (!intra_only) {
+ // Require first frame to be either KEY_FRAME or INTER_FRAME with intra_only set to true
+ return false;
+ }
+
+ // Check for sync code.
+ if (!isVp9SyncCode(bits)) {
+ return false;
+ }
+
+ if (profile > 0) {
+ if (!getVp9BitdepthChromaSubSampling(bits, profile, &bitDepth, &chromaSubsampling)) {
+ return false;
+ }
+ } else {
+ bitDepth = 8;
+ chromaSubsampling = 3;
+ }
+ }
+ int32_t csdSize = 6;
+ if (chromaSubsampling != -1) {
+ csdSize += 3;
+ }
+
+ // Create VP9 Codec Feature Metadata (CodecPrivate) that can be parsed
+ // https://www.webmproject.org/docs/container/#vp9-codec-feature-metadata-codecprivate
+ sp<ABuffer> csd = sp<ABuffer>::make(csdSize);
+ uint8_t* csdData = csd->data();
+
+ *csdData++ = 0x01 /* FEATURE PROFILE */;
+ *csdData++ = 0x01 /* length */;
+ *csdData++ = profile;
+
+ *csdData++ = 0x03 /* FEATURE BITDEPTH */;
+ *csdData++ = 0x01 /* length */;
+ *csdData++ = bitDepth;
+
+ // csdSize more than 6 means chroma subsampling data was found.
+ if (csdSize > 6) {
+ *csdData++ = 0x04 /* FEATURE SUBSAMPLING */;
+ *csdData++ = 0x01 /* length */;
+ *csdData++ = chromaSubsampling;
+ }
+
+ AMediaFormat_setBuffer(meta, AMEDIAFORMAT_KEY_CSD_0, csd->data(), csd->size());
+ return true;
+}
+
bool MakeAACCodecSpecificData(MetaDataBase &meta, const uint8_t *data, size_t size) {
if (data == nullptr || size < 7) {
return false;
diff --git a/media/module/metadatautils/include/media/stagefright/MetaDataUtils.h b/media/module/metadatautils/include/media/stagefright/MetaDataUtils.h
index dcaf27f..69cf21a 100644
--- a/media/module/metadatautils/include/media/stagefright/MetaDataUtils.h
+++ b/media/module/metadatautils/include/media/stagefright/MetaDataUtils.h
@@ -38,6 +38,8 @@
void parseVorbisComment(
AMediaFormat *fileMeta, const char *comment, size_t commentLength);
+bool MakeVP9CodecSpecificData(AMediaFormat* meta, const uint8_t* data, size_t size);
+
} // namespace android
#endif // META_DATA_UTILS_H_
diff --git a/media/mtp/MtpServer.cpp b/media/mtp/MtpServer.cpp
index d917772..80fe51a 100644
--- a/media/mtp/MtpServer.cpp
+++ b/media/mtp/MtpServer.cpp
@@ -132,6 +132,10 @@
}
MtpServer::~MtpServer() {
+ if (mHandle) {
+ delete mHandle;
+ mHandle = NULL;
+ }
}
void MtpServer::addStorage(MtpStorage* storage) {
diff --git a/media/tests/benchmark/src/native/decoder/C2Decoder.cpp b/media/tests/benchmark/src/native/decoder/C2Decoder.cpp
index 6539f24..f9a6b1c 100644
--- a/media/tests/benchmark/src/native/decoder/C2Decoder.cpp
+++ b/media/tests/benchmark/src/native/decoder/C2Decoder.cpp
@@ -106,7 +106,7 @@
work->input.ordinal.frameIndex = mNumInputFrame;
work->input.buffers.clear();
int size = frameInfo[mNumInputFrame].size;
- int alignedSize = ALIGN(size, PAGE_SIZE);
+ int alignedSize = ALIGN(size, getpagesize());
if (size) {
std::shared_ptr<C2LinearBlock> block;
status = mLinearPool->fetchLinearBlock(
diff --git a/media/utils/ServiceUtilities.cpp b/media/utils/ServiceUtilities.cpp
index c4f2808..4b0192a 100644
--- a/media/utils/ServiceUtilities.cpp
+++ b/media/utils/ServiceUtilities.cpp
@@ -14,6 +14,7 @@
* limitations under the License.
*/
+//#define LOG_NDEBUG 0
#define LOG_TAG "ServiceUtilities"
#include <audio_utils/clock.h>
@@ -85,7 +86,7 @@
}
std::optional<AttributionSourceState> resolveAttributionSource(
- const AttributionSourceState& callerAttributionSource) {
+ const AttributionSourceState& callerAttributionSource, const uint32_t virtualDeviceId) {
AttributionSourceState nextAttributionSource = callerAttributionSource;
if (!nextAttributionSource.packageName.has_value()) {
@@ -100,6 +101,7 @@
return std::nullopt;
}
}
+ nextAttributionSource.deviceId = virtualDeviceId;
AttributionSourceState myAttributionSource;
myAttributionSource.uid = VALUE_OR_FATAL(android::legacy2aidl_uid_t_int32_t(getuid()));
@@ -108,13 +110,15 @@
// audioserver to the app ops system
static sp<BBinder> appOpsToken = sp<BBinder>::make();
myAttributionSource.token = appOpsToken;
+ myAttributionSource.deviceId = virtualDeviceId;
myAttributionSource.next.push_back(nextAttributionSource);
return std::optional<AttributionSourceState>{myAttributionSource};
}
-static bool checkRecordingInternal(const AttributionSourceState& attributionSource,
- const String16& msg, bool start, audio_source_t source) {
+ static bool checkRecordingInternal(const AttributionSourceState &attributionSource,
+ const uint32_t virtualDeviceId,
+ const String16 &msg, bool start, audio_source_t source) {
// Okay to not track in app ops as audio server or media server is us and if
// device is rooted security model is considered compromised.
// system_server loses its RECORD_AUDIO permission when a secondary
@@ -126,8 +130,8 @@
// We specify a pid and uid here as mediaserver (aka MediaRecorder or StageFrightRecorder)
// may open a record track on behalf of a client. Note that pid may be a tid.
// IMPORTANT: DON'T USE PermissionCache - RUNTIME PERMISSIONS CHANGE.
- const std::optional<AttributionSourceState> resolvedAttributionSource =
- resolveAttributionSource(attributionSource);
+ std::optional<AttributionSourceState> resolvedAttributionSource =
+ resolveAttributionSource(attributionSource, virtualDeviceId);
if (!resolvedAttributionSource.has_value()) {
return false;
}
@@ -149,16 +153,30 @@
return permitted;
}
-bool recordingAllowed(const AttributionSourceState& attributionSource, audio_source_t source) {
- return checkRecordingInternal(attributionSource, String16(), /*start*/ false, source);
+static constexpr int DEVICE_ID_DEFAULT = 0;
+
+bool recordingAllowed(const AttributionSourceState &attributionSource, audio_source_t source) {
+ return checkRecordingInternal(attributionSource, DEVICE_ID_DEFAULT, String16(), /*start*/ false,
+ source);
}
-bool startRecording(const AttributionSourceState& attributionSource, const String16& msg,
- audio_source_t source) {
- return checkRecordingInternal(attributionSource, msg, /*start*/ true, source);
+bool recordingAllowed(const AttributionSourceState &attributionSource,
+ const uint32_t virtualDeviceId,
+ audio_source_t source) {
+ return checkRecordingInternal(attributionSource, virtualDeviceId,
+ String16(), /*start*/ false, source);
}
-void finishRecording(const AttributionSourceState& attributionSource, audio_source_t source) {
+bool startRecording(const AttributionSourceState& attributionSource,
+ const uint32_t virtualDeviceId,
+ const String16& msg,
+ audio_source_t source) {
+ return checkRecordingInternal(attributionSource, virtualDeviceId, msg, /*start*/ true,
+ source);
+}
+
+void finishRecording(const AttributionSourceState &attributionSource, uint32_t virtualDeviceId,
+ audio_source_t source) {
// Okay to not track in app ops as audio server is us and if
// device is rooted security model is considered compromised.
uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
@@ -168,7 +186,7 @@
// may open a record track on behalf of a client. Note that pid may be a tid.
// IMPORTANT: DON'T USE PermissionCache - RUNTIME PERMISSIONS CHANGE.
const std::optional<AttributionSourceState> resolvedAttributionSource =
- resolveAttributionSource(attributionSource);
+ resolveAttributionSource(attributionSource, virtualDeviceId);
if (!resolvedAttributionSource.has_value()) {
return;
}
@@ -392,7 +410,7 @@
return false;
}
const std::optional<AttributionSourceState> resolvedAttributionSource =
- resolveAttributionSource(attributionSource);
+ resolveAttributionSource(attributionSource, DEVICE_ID_DEFAULT);
if (!resolvedAttributionSource.has_value()) {
return true;
}
diff --git a/media/utils/fuzzers/ServiceUtilitiesFuzz.cpp b/media/utils/fuzzers/ServiceUtilitiesFuzz.cpp
index 15f043a..449e7de 100644
--- a/media/utils/fuzzers/ServiceUtilitiesFuzz.cpp
+++ b/media/utils/fuzzers/ServiceUtilitiesFuzz.cpp
@@ -53,6 +53,7 @@
int32_t pid = data_provider.ConsumeIntegral<int32_t>();
audio_source_t source = static_cast<audio_source_t>(data_provider
.ConsumeIntegral<std::underlying_type_t<audio_source_t>>());
+ uint32_t deviceId = data_provider.ConsumeIntegral<uint32_t>();
std::string packageNameStr = data_provider.ConsumeRandomLengthString(kMaxStringLen);
std::string msgStr = data_provider.ConsumeRandomLengthString(kMaxStringLen);
@@ -70,8 +71,9 @@
android::isAudioServerOrSystemServerUid(uid);
android::isAudioServerOrMediaServerUid(uid);
android::recordingAllowed(attributionSource);
- android::startRecording(attributionSource, msgStr16, source);
- android::finishRecording(attributionSource, source);
+ android::recordingAllowed(attributionSource, deviceId, source);
+ android::startRecording(attributionSource, deviceId, msgStr16, source);
+ android::finishRecording(attributionSource, deviceId, source);
android::captureAudioOutputAllowed(attributionSource);
android::captureMediaOutputAllowed(attributionSource);
android::captureHotwordAllowed(attributionSource);
diff --git a/media/utils/include/mediautils/ServiceUtilities.h b/media/utils/include/mediautils/ServiceUtilities.h
index 0b3a3f9..9c02cd4 100644
--- a/media/utils/include/mediautils/ServiceUtilities.h
+++ b/media/utils/include/mediautils/ServiceUtilities.h
@@ -87,11 +87,16 @@
bool recordingAllowed(const AttributionSourceState& attributionSource,
audio_source_t source = AUDIO_SOURCE_DEFAULT);
-bool startRecording(const AttributionSourceState& attributionSource,
- const String16& msg, audio_source_t source);
-void finishRecording(const AttributionSourceState& attributionSource, audio_source_t source);
+
+bool recordingAllowed(const AttributionSourceState &attributionSource,
+ uint32_t virtualDeviceId,
+ audio_source_t source);
+bool startRecording(const AttributionSourceState& attributionSource, uint32_t virtualDeviceId,
+ const String16& msg, audio_source_t source);
+void finishRecording(const AttributionSourceState& attributionSource, uint32_t virtualDeviceId,
+ audio_source_t source);
std::optional<AttributionSourceState> resolveAttributionSource(
- const AttributionSourceState& callerAttributionSource);
+ const AttributionSourceState& callerAttributionSource, uint32_t virtualDeviceId);
bool captureAudioOutputAllowed(const AttributionSourceState& attributionSource);
bool captureMediaOutputAllowed(const AttributionSourceState& attributionSource);
bool captureTunerAudioInputAllowed(const AttributionSourceState& attributionSource);
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index 129541f..9016420 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -66,12 +66,12 @@
// Remove some pedantic stylistic requirements.
"-google-readability-casting", // C++ casts not always necessary and may be verbose
- "-google-readability-todo", // do not require TODO(info)
+ "-google-readability-todo", // do not require TODO(info)
- "-bugprone-unhandled-self-assignment",
- "-bugprone-suspicious-string-compare",
- "-cert-oop54-cpp", // found in TransactionLog.h
"-bugprone-narrowing-conversions", // b/182410845
+ "-bugprone-suspicious-string-compare",
+ "-bugprone-unhandled-self-assignment",
+ "-cert-oop54-cpp", // found in TransactionLog.h
]
// TODO(b/275642749) Reenable these warnings
@@ -101,9 +101,9 @@
"-Wall",
"-Wdeprecated",
"-Werror",
+ "-Werror=conditional-uninitialized",
"-Werror=implicit-fallthrough",
"-Werror=sometimes-uninitialized",
- "-Werror=conditional-uninitialized",
"-Wextra",
// suppress some warning chatter.
@@ -113,7 +113,6 @@
"-Wredundant-decls",
"-Wshadow",
"-Wstrict-aliasing",
- "-fstrict-aliasing",
"-Wthread-safety",
//"-Wthread-safety-negative", // experimental - looks broken in R.
"-Wunreachable-code",
@@ -121,6 +120,7 @@
"-Wunreachable-code-return",
"-Wunused",
"-Wused-but-marked-unused",
+ "-fstrict-aliasing",
]
// Eventually use common tidy defaults
@@ -134,7 +134,7 @@
tidy_checks: audioflinger_tidy_errors,
tidy_checks_as_errors: audioflinger_tidy_errors,
tidy_flags: [
- "-format-style=file",
+ "-format-style=file",
],
}
@@ -142,49 +142,48 @@
name: "libaudioflinger_dependencies",
shared_libs: [
- "audioflinger-aidl-cpp",
"audioclient-types-aidl-cpp",
+ "audioflinger-aidl-cpp",
"av-types-aidl-cpp",
"com.android.media.audio-aconfig-cc",
"effect-aidl-cpp",
- "libaudioclient_aidl_conversion",
"libactivitymanager_aidl",
+ "libaudioclient",
+ "libaudioclient_aidl_conversion",
"libaudioflinger_datapath",
"libaudioflinger_fastpath",
"libaudioflinger_timing",
"libaudioflinger_utils",
"libaudiofoundation",
"libaudiohal",
+ "libaudiomanager",
"libaudioprocessing",
"libaudioutils",
- "libcutils",
- "libutils",
- "liblog",
"libbinder",
"libbinder_ndk",
- "libaudioclient",
- "libaudiomanager",
+ "libcutils",
+ "liblog",
+ "libmedia_helper",
"libmediametrics",
"libmediautils",
+ "libmemunreachable",
"libnbaio",
"libnblog",
"libpermission",
"libpowermanager",
- "libmemunreachable",
- "libmedia_helper",
"libshmemcompat",
"libsounddose",
+ "libutils",
"libvibrator",
"packagemanager_aidl-cpp",
],
static_libs: [
- "libmedialogservice",
"libaudiospdif",
+ "libmedialogservice",
],
}
-
cc_library {
name: "libaudioflinger",
@@ -231,9 +230,9 @@
],
cflags: [
- "-fvisibility=hidden",
- "-Werror",
"-Wall",
+ "-Werror",
+ "-fvisibility=hidden",
],
sanitize: {
integer_overflow: true,
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index 8b03ccf..ad043c8 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -31,6 +31,7 @@
#include <media/AudioContainers.h>
#include <media/AudioDeviceTypeAddr.h>
#include <media/AudioEffect.h>
+#include <media/EffectClientAsyncProxy.h>
#include <media/ShmemCompat.h>
#include <media/TypeConverter.h>
#include <media/audiohal/EffectHalInterface.h>
@@ -561,12 +562,9 @@
#undef LOG_TAG
#define LOG_TAG "EffectModule"
-EffectModule::EffectModule(const sp<EffectCallbackInterface>& callback,
- effect_descriptor_t *desc,
- int id,
- audio_session_t sessionId,
- bool pinned,
- audio_port_handle_t deviceId)
+EffectModule::EffectModule(const sp<EffectCallbackInterface>& callback, effect_descriptor_t* desc,
+ int id, audio_session_t sessionId, bool pinned,
+ audio_port_handle_t deviceId)
: EffectBase(callback, desc, id, sessionId, pinned),
// clear mConfig to ensure consistent initial value of buffer framecount
// in case buffers are associated by setInBuffer() or setOutBuffer()
@@ -576,9 +574,9 @@
mMaxDisableWaitCnt(1), // set by configure_l(), should be >= 1
mDisableWaitCnt(0), // set by process() and updateState()
mOffloaded(false),
- mIsOutput(false)
- , mSupportsFloat(false)
-{
+ mIsOutput(false),
+ mSupportsFloat(false),
+ mEffectInterfaceDebug(desc->name) {
ALOGV("Constructor %p pinned %d", this, pinned);
int lStatus;
@@ -586,6 +584,7 @@
mStatus = callback->createEffectHal(
&desc->uuid, sessionId, deviceId, &mEffectInterface);
if (mStatus != NO_ERROR) {
+ ALOGE("%s createEffectHal failed: %d", __func__, mStatus);
return;
}
lStatus = init_l();
@@ -595,12 +594,14 @@
}
setOffloaded_l(callback->isOffload(), callback->io());
- ALOGV("Constructor success name %s, Interface %p", mDescriptor.name, mEffectInterface.get());
+ ALOGV("%s Constructor success name %s, Interface %p", __func__, mDescriptor.name,
+ mEffectInterface.get());
return;
Error:
mEffectInterface.clear();
- ALOGV("Constructor Error %d", mStatus);
+ mEffectInterfaceDebug += " init failed:" + std::to_string(lStatus);
+ ALOGE("%s Constructor Error %d", __func__, mStatus);
}
EffectModule::~EffectModule()
@@ -611,7 +612,7 @@
AudioEffect::guidToString(&mDescriptor.uuid, uuidStr, sizeof(uuidStr));
ALOGW("EffectModule %p destructor called with unreleased interface, effect %s",
this, uuidStr);
- release_l();
+ release_l("~EffectModule");
}
}
@@ -1126,13 +1127,14 @@
}
// must be called with EffectChain::mutex() held
-void EffectModule::release_l()
+void EffectModule::release_l(const std::string& from)
{
if (mEffectInterface != 0) {
removeEffectFromHal_l();
// release effect engine
mEffectInterface->close();
mEffectInterface.clear();
+ mEffectInterfaceDebug += " released by: " + from;
}
}
@@ -1383,6 +1385,7 @@
*right = mReturnedVolume.value()[1];
return NO_ERROR;
}
+ LOG_ALWAYS_FATAL_IF(mEffectInterface == nullptr, "%s", mEffectInterfaceDebug.c_str());
uint32_t volume[2] = {*left, *right};
uint32_t *pVolume = controller ? volume : nullptr;
uint32_t size = sizeof(volume);
@@ -1555,13 +1558,16 @@
return INVALID_OPERATION;
}
- std::vector<uint8_t> request(sizeof(effect_param_t) + 3 * sizeof(uint32_t));
+ std::vector<uint8_t> request(sizeof(effect_param_t) + 3 * sizeof(uint32_t) + sizeof(float));
effect_param_t *param = (effect_param_t*) request.data();
param->psize = sizeof(int32_t);
- param->vsize = sizeof(int32_t) * 2;
+ param->vsize = sizeof(int32_t) * 2 + sizeof(float);
*(int32_t*)param->data = HG_PARAM_HAPTIC_INTENSITY;
- *((int32_t*)param->data + 1) = id;
- *((int32_t*)param->data + 2) = static_cast<int32_t>(hapticScale.getLevel());
+ int32_t* hapticScalePtr = reinterpret_cast<int32_t*>(param->data + sizeof(int32_t));
+ hapticScalePtr[0] = id;
+ hapticScalePtr[1] = static_cast<int32_t>(hapticScale.getLevel());
+ float* adaptiveScaleFactorPtr = reinterpret_cast<float*>(param->data + 3 * sizeof(int32_t));
+ *adaptiveScaleFactorPtr = hapticScale.getAdaptiveScaleFactor();
std::vector<uint8_t> response;
status_t status = command(EFFECT_CMD_SET_PARAM, request, sizeof(int32_t), &response);
if (status == NO_ERROR) {
@@ -1736,7 +1742,8 @@
const sp<media::IEffectClient>& effectClient,
int32_t priority, bool notifyFramesProcessed)
: BnEffect(),
- mEffect(effect), mEffectClient(effectClient), mClient(client), mCblk(NULL),
+ mEffect(effect), mEffectClient(media::EffectClientAsyncProxy::makeIfNeeded(effectClient)),
+ mClient(client), mCblk(nullptr),
mPriority(priority), mHasControl(false), mEnabled(false), mDisconnected(false),
mNotifyFramesProcessed(notifyFramesProcessed)
{
@@ -2525,7 +2532,7 @@
mEffects[i]->stop_l();
}
if (release) {
- mEffects[i]->release_l();
+ mEffects[i]->release_l("EffectChain::removeEffect");
}
// Skip operation when no thread attached (could lead to sigfpe as framecount is 0...)
if (hasThreadAttached && type != EFFECT_FLAG_TYPE_AUXILIARY) {
@@ -3562,7 +3569,7 @@
{
audio_utils::lock_guard _l(proxyMutex());
if (effect == mHalEffect) {
- mHalEffect->release_l();
+ mHalEffect->release_l("DeviceEffectProxy::removeEffect");
mHalEffect.clear();
mDevicePort.id = AUDIO_PORT_HANDLE_NONE;
}
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index d374b2c..b516c37 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -228,7 +228,7 @@
bool isOffloaded_l() const final
REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex;
void addEffectToHal_l() final REQUIRES(audio_utils::EffectChain_Mutex);
- void release_l() final REQUIRES(audio_utils::EffectChain_Mutex);
+ void release_l(const std::string& from = "") final REQUIRES(audio_utils::EffectChain_Mutex);
sp<IAfEffectModule> asEffectModule() final { return this; }
@@ -312,6 +312,8 @@
// Cache the volume that returned from the effect when setting volume successfully. The value
// here is used to indicate the volume to apply before this effect.
std::optional<std::vector<uint32_t>> mReturnedVolume;
+ // TODO: b/315995877, remove this debugging string after root cause
+ std::string mEffectInterfaceDebug;
};
// The EffectHandle class implements the IEffect interface. It provides resources
diff --git a/services/audioflinger/IAfEffect.h b/services/audioflinger/IAfEffect.h
index d5adeb4..b9bb18c 100644
--- a/services/audioflinger/IAfEffect.h
+++ b/services/audioflinger/IAfEffect.h
@@ -211,7 +211,7 @@
virtual status_t stop_l() = 0;
virtual void addEffectToHal_l() = 0;
- virtual void release_l() = 0;
+ virtual void release_l(const std::string& from) = 0;
};
class IAfEffectChain : public RefBase {
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 41914e3..7c248dc 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -694,6 +694,10 @@
}
// When Thread::requestExitAndWait is made virtual and this method is renamed to
// "virtual status_t requestExitAndWait()", replace by "return Thread::requestExitAndWait();"
+
+ // For TimeCheck: track waiting on the thread join of getTid().
+ audio_utils::mutex::scoped_join_wait_check sjw(getTid());
+
requestExitAndWait();
}
@@ -2928,7 +2932,6 @@
// Set haptic intensity for effect
if (chain != nullptr) {
- // TODO(b/324559333): Add adaptive haptics scaling support for the HapticGenerator.
chain->setHapticScale_l(track->id(), hapticScale);
}
}
@@ -3398,9 +3401,9 @@
return NO_ERROR;
} else {
status_t status;
- uint32_t frames;
+ uint64_t frames = 0;
status = mOutput->getRenderPosition(&frames);
- *dspFrames = (size_t)frames;
+ *dspFrames = (uint32_t)frames;
return status;
}
}
@@ -3510,7 +3513,7 @@
char *endptr;
unsigned long ul = strtoul(value, &endptr, 0);
if (*endptr == '\0' && ul != 0) {
- ALOGD("Silence is golden");
+ ALOGW("%s: mute from ro.audio.silent. Silence is golden", __func__);
// The setprop command will not allow a property to be changed after
// the first time it is set, so we don't have to worry about un-muting.
setMasterMute_l(true);
@@ -5967,7 +5970,7 @@
vaf = v * sendLevel * (1. / MAX_GAIN_INT);
}
- track->setFinalVolume(vrf, vlf);
+ track->setFinalVolume(vlf, vrf);
// Delegate volume control to effect in track effect chain if needed
if (chain != 0 && chain->setVolume(&vl, &vr)) {
@@ -7147,11 +7150,14 @@
{
PlaybackThread::flushHw_l();
mOutput->flush();
- mHwPaused = false;
mFlushPending = false;
mTimestampVerifier.discontinuity(discontinuityForStandbyOrFlush());
mTimestamp.clear();
mMonotonicFrameCounter.onFlush();
+ // We do not reset mHwPaused which is hidden from the Track client.
+ // Note: the client track in Tracks.cpp and AudioTrack.cpp
+ // has a FLUSHED state but the DirectOutputThread does not;
+ // those tracks will continue to show isStopped().
}
int64_t DirectOutputThread::computeWaitTimeNs_l() const {
@@ -7901,6 +7907,11 @@
if (mSupportedLatencyModes.empty()) {
return;
}
+ // Do not update the HAL latency mode if no track is active
+ if (mActiveTracks.isEmpty()) {
+ return;
+ }
+
audio_latency_mode_t latencyMode = AUDIO_LATENCY_MODE_FREE;
if (mSupportedLatencyModes.size() == 1) {
// If the HAL only support one latency mode currently, confirm the choice
@@ -8219,7 +8230,6 @@
inputStandBy();
reacquire_wakelock:
- sp<IAfRecordTrack> activeTrack;
{
audio_utils::lock_guard _l(mutex());
acquireWakeLock_l();
@@ -8235,6 +8245,9 @@
// loop while there is work to do
for (int64_t loopCount = 0;; ++loopCount) { // loopCount used for statistics tracking
+ // Note: these sp<> are released at the end of the for loop outside of the mutex() lock.
+ sp<IAfRecordTrack> activeTrack;
+ std::vector<sp<IAfRecordTrack>> oldActiveTracks;
Vector<sp<IAfEffectChain>> effectChains;
// activeTracks accumulates a copy of a subset of mActiveTracks
@@ -8284,7 +8297,9 @@
bool doBroadcast = false;
bool allStopped = true;
for (size_t i = 0; i < size; ) {
-
+ if (activeTrack) { // ensure track release is outside lock.
+ oldActiveTracks.emplace_back(std::move(activeTrack));
+ }
activeTrack = mActiveTracks[i];
if (activeTrack->isTerminated()) {
if (activeTrack->isFastTrack()) {
@@ -9160,7 +9175,7 @@
// This is needed for proper patchRecord peer release.
while (recordTrack->state() == IAfTrackBase::PAUSING && !recordTrack->isInvalid()) {
mWaitWorkCV.notify_all(); // signal thread to stop
- mStartStopCV.wait(_l);
+ mStartStopCV.wait(_l, getTid());
}
if (recordTrack->state() == IAfTrackBase::PAUSED) { // successful stop
diff --git a/services/audioflinger/afutils/Android.bp b/services/audioflinger/afutils/Android.bp
index 5e29ce9..e147266 100644
--- a/services/audioflinger/afutils/Android.bp
+++ b/services/audioflinger/afutils/Android.bp
@@ -23,7 +23,7 @@
tidy_checks: audioflinger_utils_tidy_errors,
tidy_checks_as_errors: audioflinger_utils_tidy_errors,
tidy_flags: [
- "-format-style=file",
+ "-format-style=file",
],
}
@@ -64,10 +64,10 @@
],
header_libs: [
- "libaaudio_headers", // PropertyUtils.cpp
+ "libaaudio_headers", // PropertyUtils.cpp
],
include_dirs: [
- "frameworks/av/services/audioflinger", // for configuration
+ "frameworks/av/services/audioflinger", // for configuration
],
}
diff --git a/services/audioflinger/afutils/NBAIO_Tee.cpp b/services/audioflinger/afutils/NBAIO_Tee.cpp
index 86fb128..cdc8e95 100644
--- a/services/audioflinger/afutils/NBAIO_Tee.cpp
+++ b/services/audioflinger/afutils/NBAIO_Tee.cpp
@@ -514,6 +514,12 @@
return NO_ERROR; // return full path
}
+/* static */
+NBAIO_Tee::RunningTees& NBAIO_Tee::getRunningTees() {
+ [[clang::no_destroy]] static RunningTees runningTees;
+ return runningTees;
+}
+
} // namespace android
#endif // TEE_SINK
diff --git a/services/audioflinger/afutils/NBAIO_Tee.h b/services/audioflinger/afutils/NBAIO_Tee.h
index a5c544e..5ab1949 100644
--- a/services/audioflinger/afutils/NBAIO_Tee.h
+++ b/services/audioflinger/afutils/NBAIO_Tee.h
@@ -310,10 +310,7 @@
};
// singleton
- static RunningTees &getRunningTees() {
- static RunningTees runningTees;
- return runningTees;
- }
+ static RunningTees& getRunningTees();
// The NBAIO TeeImpl may have lifetime longer than NBAIO_Tee if
// RunningTees::dump() is being called simultaneous to ~NBAIO_Tee().
diff --git a/services/audioflinger/datapath/Android.bp b/services/audioflinger/datapath/Android.bp
index 4235f14..6918881 100644
--- a/services/audioflinger/datapath/Android.bp
+++ b/services/audioflinger/datapath/Android.bp
@@ -29,7 +29,7 @@
tidy_checks: audioflinger_datapath_tidy_errors,
tidy_checks_as_errors: audioflinger_datapath_tidy_errors,
tidy_flags: [
- "-format-style=file",
+ "-format-style=file",
],
}
@@ -70,6 +70,6 @@
],
include_dirs: [
- "frameworks/av/services/audioflinger", // for configuration
+ "frameworks/av/services/audioflinger", // for configuration
],
}
diff --git a/services/audioflinger/datapath/AudioStreamIn.cpp b/services/audioflinger/datapath/AudioStreamIn.cpp
index 76618f4..165ac25 100644
--- a/services/audioflinger/datapath/AudioStreamIn.cpp
+++ b/services/audioflinger/datapath/AudioStreamIn.cpp
@@ -58,7 +58,7 @@
if (mHalFormatHasProportionalFrames &&
(flags & AUDIO_INPUT_FLAG_DIRECT) == AUDIO_INPUT_FLAG_DIRECT) {
- // For DirectRecord reset timestamp to 0 on standby.
+ // For DirectRecord reset position to 0 on standby.
const uint64_t adjustedPosition = (halPosition <= mFramesReadAtStandby) ?
0 : (halPosition - mFramesReadAtStandby);
// Scale from HAL sample rate to application rate.
diff --git a/services/audioflinger/datapath/AudioStreamOut.cpp b/services/audioflinger/datapath/AudioStreamOut.cpp
index aad538f..a686ff6 100644
--- a/services/audioflinger/datapath/AudioStreamOut.cpp
+++ b/services/audioflinger/datapath/AudioStreamOut.cpp
@@ -51,42 +51,17 @@
return NO_INIT;
}
- uint32_t halPosition = 0;
+ uint64_t halPosition = 0;
const status_t status = stream->getRenderPosition(&halPosition);
if (status != NO_ERROR) {
return status;
}
-
- // Maintain a 64-bit render position using the 32-bit result from the HAL.
- // This delta calculation relies on the arithmetic overflow behavior
- // of integers. For example (100 - 0xFFFFFFF0) = 116.
- const auto truncatedPosition = (uint32_t)mRenderPosition;
- int32_t deltaHalPosition; // initialization not needed, overwitten by __builtin_sub_overflow()
- (void) __builtin_sub_overflow(halPosition, truncatedPosition, &deltaHalPosition);
-
- if (deltaHalPosition >= 0) {
- mRenderPosition += deltaHalPosition;
- } else if (mExpectRetrograde) {
- mExpectRetrograde = false;
- mRenderPosition -= static_cast<uint64_t>(-deltaHalPosition);
- }
// Scale from HAL sample rate to application rate.
- *frames = mRenderPosition / mRateMultiplier;
+ *frames = halPosition / mRateMultiplier;
return status;
}
-// return bottom 32-bits of the render position
-status_t AudioStreamOut::getRenderPosition(uint32_t *frames)
-{
- uint64_t position64 = 0;
- const status_t status = getRenderPosition(&position64);
- if (status == NO_ERROR) {
- *frames = (uint32_t)position64;
- }
- return status;
-}
-
status_t AudioStreamOut::getPresentationPosition(uint64_t *frames, struct timespec *timestamp)
{
if (stream == nullptr) {
@@ -101,7 +76,7 @@
if (mHalFormatHasProportionalFrames &&
(flags & AUDIO_OUTPUT_FLAG_DIRECT) == AUDIO_OUTPUT_FLAG_DIRECT) {
- // For DirectTrack reset timestamp to 0 on standby.
+ // For DirectTrack reset position to 0 on standby.
const uint64_t adjustedPosition = (halPosition <= mFramesWrittenAtStandby) ?
0 : (halPosition - mFramesWrittenAtStandby);
// Scale from HAL sample rate to application rate.
@@ -179,8 +154,6 @@
int AudioStreamOut::flush()
{
- mRenderPosition = 0;
- mExpectRetrograde = false;
mFramesWritten = 0;
mFramesWrittenAtStandby = 0;
const status_t result = stream->flush();
@@ -189,12 +162,14 @@
int AudioStreamOut::standby()
{
- mRenderPosition = 0;
- mExpectRetrograde = false;
mFramesWrittenAtStandby = mFramesWritten;
return stream->standby();
}
+void AudioStreamOut::presentationComplete() {
+ stream->presentationComplete();
+}
+
ssize_t AudioStreamOut::write(const void *buffer, size_t numBytes)
{
size_t bytesWritten;
diff --git a/services/audioflinger/datapath/AudioStreamOut.h b/services/audioflinger/datapath/AudioStreamOut.h
index ea41bba..2c9fb3e 100644
--- a/services/audioflinger/datapath/AudioStreamOut.h
+++ b/services/audioflinger/datapath/AudioStreamOut.h
@@ -51,9 +51,6 @@
virtual ~AudioStreamOut();
- // Get the bottom 32-bits of the 64-bit render position.
- status_t getRenderPosition(uint32_t *frames);
-
virtual status_t getRenderPosition(uint64_t *frames);
virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp);
@@ -91,21 +88,14 @@
virtual status_t flush();
virtual status_t standby();
- // Avoid suppressing retrograde motion in mRenderPosition for gapless offload/direct when
- // transitioning between tracks.
- // The HAL resets the frame position without flush/stop being called, but calls back prior to
- // this event. So, on the next occurrence of retrograde motion, we permit backwards movement of
- // mRenderPosition.
- virtual void presentationComplete() { mExpectRetrograde = true; }
+ virtual void presentationComplete();
protected:
uint64_t mFramesWritten = 0; // reset by flush
uint64_t mFramesWrittenAtStandby = 0;
- uint64_t mRenderPosition = 0; // reset by flush, standby, or presentation complete
int mRateMultiplier = 1;
bool mHalFormatHasProportionalFrames = false;
size_t mHalFrameSize = 0;
- bool mExpectRetrograde = false; // see presentationComplete
};
} // namespace android
diff --git a/services/audioflinger/fastpath/Android.bp b/services/audioflinger/fastpath/Android.bp
index 84a580f..5ebc583 100644
--- a/services/audioflinger/fastpath/Android.bp
+++ b/services/audioflinger/fastpath/Android.bp
@@ -24,7 +24,7 @@
tidy_checks: fastpath_tidy_errors,
tidy_checks_as_errors: fastpath_tidy_errors,
tidy_flags: [
- "-format-style=file",
+ "-format-style=file",
],
}
diff --git a/services/audioflinger/sounddose/Android.bp b/services/audioflinger/sounddose/Android.bp
index 2cab5d1..884622e 100644
--- a/services/audioflinger/sounddose/Android.bp
+++ b/services/audioflinger/sounddose/Android.bp
@@ -29,7 +29,7 @@
tidy_checks: audioflinger_sounddose_tidy_errors,
tidy_checks_as_errors: audioflinger_sounddose_tidy_errors,
tidy_flags: [
- "-format-style=file",
+ "-format-style=file",
],
}
@@ -40,9 +40,9 @@
defaults: [
"audioflinger_sounddose_flags_defaults",
- "latest_android_media_audio_common_types_ndk_shared",
"latest_android_hardware_audio_core_sounddose_ndk_shared",
"latest_android_hardware_audio_sounddose_ndk_shared",
+ "latest_android_media_audio_common_types_ndk_shared",
],
srcs: [
@@ -66,9 +66,9 @@
],
cflags: [
+ "-DBACKEND_NDK",
"-Wall",
"-Werror",
- "-DBACKEND_NDK",
],
}
diff --git a/services/audioflinger/sounddose/tests/Android.bp b/services/audioflinger/sounddose/tests/Android.bp
index 60e170d..fcbebe1 100644
--- a/services/audioflinger/sounddose/tests/Android.bp
+++ b/services/audioflinger/sounddose/tests/Android.bp
@@ -16,9 +16,9 @@
],
defaults: [
- "latest_android_media_audio_common_types_ndk_static",
"latest_android_hardware_audio_core_sounddose_ndk_static",
"latest_android_hardware_audio_sounddose_ndk_static",
+ "latest_android_media_audio_common_types_ndk_static",
],
shared_libs: [
@@ -43,10 +43,10 @@
],
cflags: [
+ "-DBACKEND_NDK",
"-Wall",
"-Werror",
"-Wextra",
- "-DBACKEND_NDK",
],
test_suites: [
diff --git a/services/audioflinger/timing/Android.bp b/services/audioflinger/timing/Android.bp
index 30ebca0..2666ddb 100644
--- a/services/audioflinger/timing/Android.bp
+++ b/services/audioflinger/timing/Android.bp
@@ -29,7 +29,7 @@
tidy_checks: audioflinger_timing_tidy_errors,
tidy_checks_as_errors: audioflinger_timing_tidy_errors,
tidy_flags: [
- "-format-style=file",
+ "-format-style=file",
],
}
diff --git a/services/audioparameterparser/Android.bp b/services/audioparameterparser/Android.bp
index b3da333..1c1c1e1 100644
--- a/services/audioparameterparser/Android.bp
+++ b/services/audioparameterparser/Android.bp
@@ -35,18 +35,18 @@
name: "android.hardware.audio.parameter_parser.example_defaults",
defaults: [
"latest_android_hardware_audio_core_ndk_shared",
+ "latest_av_audio_types_aidl_ndk_shared",
],
shared_libs: [
- "av-audio-types-aidl-V1-ndk",
"libbase",
"libbinder_ndk",
],
cflags: [
"-Wall",
- "-Wextra",
"-Werror",
+ "-Wextra",
"-Wthread-safety",
],
}
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index bfc3132..8f17ffc 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -166,7 +166,8 @@
audio_input_flags_t flags,
audio_port_handle_t *selectedDeviceId,
input_type_t *inputType,
- audio_port_handle_t *portId) = 0;
+ audio_port_handle_t *portId,
+ uint32_t *virtualDeviceId) = 0;
// indicates to the audio policy manager that the input starts being used.
virtual status_t startInput(audio_port_handle_t portId) = 0;
// indicates to the audio policy manager that the input stops being used.
diff --git a/services/audiopolicy/TEST_MAPPING b/services/audiopolicy/TEST_MAPPING
index a2ebb8d..cf1a771 100644
--- a/services/audiopolicy/TEST_MAPPING
+++ b/services/audiopolicy/TEST_MAPPING
@@ -46,6 +46,9 @@
"include-filter": "com.google.android.gts.audio.AudioPolicyHostTest"
}
]
+ },
+ {
+ "name": "spatializer_tests"
}
]
}
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index 7c70877..00958aa 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -16,7 +16,6 @@
#pragma once
-#define __STDC_LIMIT_MACROS
#include <inttypes.h>
#include <sys/types.h>
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioProfileVectorHelper.h b/services/audiopolicy/common/managerdefinitions/include/AudioProfileVectorHelper.h
index f84bda7..5fb0ad4 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioProfileVectorHelper.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioProfileVectorHelper.h
@@ -38,11 +38,40 @@
void appendAudioProfiles(AudioProfileVector &audioProfileVector,
const AudioProfileVector &audioProfileVectorToAppend);
+/**
+ * Check if the profile vector contains a profile that matches the given sampling rate, channel
+ * mask and format. Note that this method uses `audio_formats_match` from policy.h, which will
+ * consider PCM formats match if their bytes per sample are greater than 2.
+ *
+ * @param audioProfileVector
+ * @param samplingRate
+ * @param channelMask
+ * @param format
+ * @return NO_ERROR if the given profile vector is empty or it contains a profile that matches the
+ * given sampling rate, channel mask and format. Otherwise, returns BAD_VALUE.
+ */
status_t checkExactProfile(const AudioProfileVector &audioProfileVector,
const uint32_t samplingRate,
audio_channel_mask_t channelMask,
audio_format_t format);
+/**
+ * Check if the profile vector contains a profile that has exactly the same sampling rate, channel
+ * mask and format as the given values.
+ *
+ * @param audioProfileVector
+ * @param samplingRate
+ * @param channelMask
+ * @param format
+ * @return NO_ERROR if the given profile vector is empty or it contains a profile that that has
+ * exactly the same sampling rate, channel mask and format as the given values. Otherwise,
+ * returns BAD_VALUE.
+ */
+status_t checkIdenticalProfile(const AudioProfileVector &audioProfileVector,
+ const uint32_t samplingRate,
+ audio_channel_mask_t channelMask,
+ audio_format_t format);
+
status_t checkCompatibleProfile(const AudioProfileVector &audioProfileVector,
uint32_t &samplingRate,
audio_channel_mask_t &channelMask,
diff --git a/services/audiopolicy/common/managerdefinitions/include/PolicyAudioPort.h b/services/audiopolicy/common/managerdefinitions/include/PolicyAudioPort.h
index acf787b..6b21e9f 100644
--- a/services/audiopolicy/common/managerdefinitions/include/PolicyAudioPort.h
+++ b/services/audiopolicy/common/managerdefinitions/include/PolicyAudioPort.h
@@ -56,9 +56,14 @@
// Audio port IDs are in a different namespace than AudioFlinger unique IDs
static audio_port_handle_t getNextUniqueId();
- // searches for an exact match
+ // searches for an exact match, note that this method use `audio_formats_match` from policy.h,
+ // which will consider PCM formats match if their bytes per sample are greater than 2.
virtual status_t checkExactAudioProfile(const struct audio_port_config *config) const;
+ // searches for an identical match, unlike `checkExactAudioProfile` above, this will also
+ // require the formats to be exactly the same.
+ virtual status_t checkIdenticalAudioProfile(const struct audio_port_config *config) const;
+
// searches for a compatible match, currently implemented for input
// parameters are input|output, returned value is the best match.
status_t checkCompatibleAudioProfile(uint32_t &samplingRate,
@@ -100,6 +105,12 @@
const ChannelMaskSet &channelMasks) const;
void pickSamplingRate(uint32_t &rate, const SampleRateSet &samplingRates) const;
+ status_t checkAudioProfile(const struct audio_port_config *config,
+ std::function<status_t(const AudioProfileVector&,
+ const uint32_t samplingRate,
+ audio_channel_mask_t,
+ audio_format_t)> checkProfile) const;
+
sp<HwModule> mModule; // audio HW module exposing this I/O stream
AudioRouteVector mRoutes; // Routes involving this port
};
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
index 6f71ac5..44f84b9 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
@@ -235,7 +235,8 @@
&deviceType,
String8(mDevice->address().c_str()),
source,
- flags);
+ static_cast<audio_input_flags_t>(
+ flags & mProfile->getFlags()));
LOG_ALWAYS_FATAL_IF(mDevice->type() != deviceType,
"%s openInput returned device %08x when given device %08x",
__FUNCTION__, mDevice->type(), deviceType);
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index d1819fd..3430f4b 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -30,7 +30,7 @@
#include <AudioOutputDescriptor.h>
#include <android_media_audiopolicy.h>
-namespace audio_flags = android::media::audiopolicy;
+namespace audiopolicy_flags = android::media::audiopolicy;
namespace android {
namespace {
@@ -193,7 +193,7 @@
mix.mDeviceType, mix.mDeviceAddress.c_str());
return BAD_VALUE;
}
- if (audio_flags::audio_mix_ownership()) {
+ if (audiopolicy_flags::audio_mix_ownership()) {
if (mix.mToken == registeredMix->mToken) {
ALOGE("registerMix(): same mix already registered - skipping");
return BAD_VALUE;
@@ -221,7 +221,7 @@
{
for (size_t i = 0; i < size(); i++) {
const sp<AudioPolicyMix>& registeredMix = itemAt(i);
- if (audio_flags::audio_mix_ownership()) {
+ if (audiopolicy_flags::audio_mix_ownership()) {
if (mix.mToken == registeredMix->mToken) {
ALOGD("unregisterMix(): removing mix for dev=0x%x addr=%s",
mix.mDeviceType, mix.mDeviceAddress.c_str());
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioProfileVectorHelper.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioProfileVectorHelper.cpp
index 82f51ad..164f70a 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioProfileVectorHelper.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioProfileVectorHelper.cpp
@@ -190,6 +190,18 @@
return BAD_VALUE;
}
+status_t checkIdentical(const sp<AudioProfile> &audioProfile,
+ uint32_t samplingRate,
+ audio_channel_mask_t channelMask,
+ audio_format_t format) {
+ if(audioProfile->getFormat() == format &&
+ audioProfile->supportsChannels(channelMask) &&
+ audioProfile->supportsRate(samplingRate)) {
+ return NO_ERROR;
+ }
+ return BAD_VALUE;
+}
+
status_t checkCompatibleSamplingRate(const sp<AudioProfile> &audioProfile,
uint32_t samplingRate,
uint32_t &updatedSamplingRate)
@@ -320,23 +332,43 @@
return bestMatch > 0 ? NO_ERROR : BAD_VALUE;
}
-status_t checkExactProfile(const AudioProfileVector& audioProfileVector,
- const uint32_t samplingRate,
- audio_channel_mask_t channelMask,
- audio_format_t format)
-{
+namespace {
+
+status_t checkProfile(const AudioProfileVector& audioProfileVector,
+ const uint32_t samplingRate,
+ audio_channel_mask_t channelMask,
+ audio_format_t format,
+ std::function<status_t(const sp<AudioProfile> &, uint32_t,
+ audio_channel_mask_t, audio_format_t)> check) {
if (audioProfileVector.empty()) {
return NO_ERROR;
}
for (const auto& profile : audioProfileVector) {
- if (checkExact(profile, samplingRate, channelMask, format) == NO_ERROR) {
+ if (check(profile, samplingRate, channelMask, format) == NO_ERROR) {
return NO_ERROR;
}
}
return BAD_VALUE;
}
+} // namespace
+
+status_t checkExactProfile(const AudioProfileVector& audioProfileVector,
+ const uint32_t samplingRate,
+ audio_channel_mask_t channelMask,
+ audio_format_t format)
+{
+ return checkProfile(audioProfileVector, samplingRate, channelMask, format, checkExact);
+}
+
+status_t checkIdenticalProfile(const AudioProfileVector &audioProfileVector,
+ const uint32_t samplingRate,
+ audio_channel_mask_t channelMask,
+ audio_format_t format) {
+ return checkProfile(audioProfileVector, samplingRate, channelMask, format, checkIdentical);
+}
+
status_t checkCompatibleProfile(const AudioProfileVector &audioProfileVector,
uint32_t &samplingRate,
audio_channel_mask_t &channelMask,
diff --git a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
index d9fbd89..991b103 100644
--- a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
@@ -73,7 +73,7 @@
if (isRecordThread)
{
if ((flags & AUDIO_INPUT_FLAG_MMAP_NOIRQ) != 0) {
- if (checkExactAudioProfile(&config) != NO_ERROR) {
+ if (checkIdenticalAudioProfile(&config) != NO_ERROR) {
return result;
}
result = EXACT_MATCH;
@@ -86,7 +86,13 @@
return result;
}
} else {
- if (checkExactAudioProfile(&config) == NO_ERROR) {
+ if ((flags & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) != 0 ||
+ (flags & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != 0) {
+ if (checkIdenticalAudioProfile(&config) != NO_ERROR) {
+ return result;
+ }
+ result = EXACT_MATCH;
+ } else if (checkExactAudioProfile(&config) == NO_ERROR) {
result = EXACT_MATCH;
} else {
return result;
diff --git a/services/audiopolicy/common/managerdefinitions/src/PolicyAudioPort.cpp b/services/audiopolicy/common/managerdefinitions/src/PolicyAudioPort.cpp
index ce8178f..ae99191 100644
--- a/services/audiopolicy/common/managerdefinitions/src/PolicyAudioPort.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/PolicyAudioPort.cpp
@@ -63,21 +63,11 @@
status_t PolicyAudioPort::checkExactAudioProfile(const struct audio_port_config *config) const
{
- status_t status = NO_ERROR;
- auto config_mask = config->config_mask;
- if (config_mask & AUDIO_PORT_CONFIG_GAIN) {
- config_mask &= ~AUDIO_PORT_CONFIG_GAIN;
- status = asAudioPort()->checkGain(&config->gain, config->gain.index);
- if (status != NO_ERROR) {
- return status;
- }
- }
- if (config_mask != 0) {
- // TODO should we check sample_rate / channel_mask / format separately?
- status = checkExactProfile(asAudioPort()->getAudioProfiles(), config->sample_rate,
- config->channel_mask, config->format);
- }
- return status;
+ return checkAudioProfile(config, checkExactProfile);
+}
+
+status_t PolicyAudioPort::checkIdenticalAudioProfile(const struct audio_port_config *config) const {
+ return checkAudioProfile(config, checkIdenticalProfile);
}
void PolicyAudioPort::pickSamplingRate(uint32_t &pickedRate,
@@ -266,4 +256,28 @@
asAudioPort()->getName().c_str(), samplingRate, channelMask, format);
}
+status_t PolicyAudioPort::checkAudioProfile(
+ const struct audio_port_config *config,
+ std::function<status_t(const AudioProfileVector &,
+ const uint32_t,
+ audio_channel_mask_t,
+ audio_format_t)> checkProfile) const {
+ status_t status = NO_ERROR;
+ auto config_mask = config->config_mask;
+ if (config_mask & AUDIO_PORT_CONFIG_GAIN) {
+ config_mask &= ~AUDIO_PORT_CONFIG_GAIN;
+ status = asAudioPort()->checkGain(&config->gain, config->gain.index);
+ if (status != NO_ERROR) {
+ return status;
+ }
+ }
+ if (config_mask != 0) {
+ // TODO should we check sample_rate / channel_mask / format separately?
+ status = checkProfile(asAudioPort()->getAudioProfiles(), config->sample_rate,
+ config->channel_mask, config->format);
+ }
+ return status;
+
+}
+
} // namespace android
diff --git a/services/audiopolicy/engine/common/Android.bp b/services/audiopolicy/engine/common/Android.bp
index a93c816..d8aac37 100644
--- a/services/audiopolicy/engine/common/Android.bp
+++ b/services/audiopolicy/engine/common/Android.bp
@@ -32,10 +32,10 @@
name: "libaudiopolicyengine_common",
srcs: [
"src/EngineBase.cpp",
+ "src/LastRemovableMediaDevices.cpp",
"src/ProductStrategy.cpp",
"src/VolumeCurve.cpp",
"src/VolumeGroup.cpp",
- "src/LastRemovableMediaDevices.cpp",
],
cflags: [
"-Wall",
@@ -43,10 +43,10 @@
"-Wextra",
],
header_libs: [
- "libbase_headers",
"libaudiopolicycommon",
"libaudiopolicyengine_common_headers",
"libaudiopolicyengine_interface_headers",
+ "libbase_headers",
],
export_header_lib_headers: [
"libaudiopolicyengine_common_headers",
@@ -59,7 +59,10 @@
"libaudiopolicycomponents",
],
whole_static_libs: [
- "server_configurable_flags",
"com.android.media.audio-aconfig-cc",
+ "server_configurable_flags",
+ ],
+ defaults: [
+ "aconfig_lib_cc_static_link.defaults",
],
}
diff --git a/services/audiopolicy/engine/common/src/VolumeCurve.cpp b/services/audiopolicy/engine/common/src/VolumeCurve.cpp
index fccbc60..9411155 100644
--- a/services/audiopolicy/engine/common/src/VolumeCurve.cpp
+++ b/services/audiopolicy/engine/common/src/VolumeCurve.cpp
@@ -69,7 +69,7 @@
return mCurvePoints[nbCurvePoints - 1].mAttenuationInMb / 100.0f;
}
if (indexInUiPosition == 0) {
- if (indexInUiPosition != mCurvePoints[0].mIndex) {
+ if ((size_t)volIdx != mCurvePoints[0].mIndex) {
return VOLUME_MIN_DB; // out of bounds
}
return mCurvePoints[0].mAttenuationInMb / 100.0f;
diff --git a/services/audiopolicy/engine/config/Android.bp b/services/audiopolicy/engine/config/Android.bp
index 0864e6a..ab2c134 100644
--- a/services/audiopolicy/engine/config/Android.bp
+++ b/services/audiopolicy/engine/config/Android.bp
@@ -33,7 +33,7 @@
],
header_libs: [
"libaudio_system_headers",
- "libmedia_headers",
"libaudioclient_headers",
+ "libmedia_headers",
],
}
diff --git a/services/audiopolicy/engine/config/tests/Android.bp b/services/audiopolicy/engine/config/tests/Android.bp
index 8c7b7db..2df51d0 100644
--- a/services/audiopolicy/engine/config/tests/Android.bp
+++ b/services/audiopolicy/engine/config/tests/Android.bp
@@ -28,8 +28,8 @@
data: [":audiopolicy_engineconfig_files"],
cflags: [
- "-Werror",
"-Wall",
+ "-Werror",
],
test_suites: ["device-tests"],
diff --git a/services/audiopolicy/engineconfigurable/Android.bp b/services/audiopolicy/engineconfigurable/Android.bp
index d59ab5a..a0a4bdf 100644
--- a/services/audiopolicy/engineconfigurable/Android.bp
+++ b/services/audiopolicy/engineconfigurable/Android.bp
@@ -20,8 +20,8 @@
srcs: [
"src/Engine.cpp",
"src/EngineInstance.cpp",
- "src/Stream.cpp",
"src/InputSource.cpp",
+ "src/Stream.cpp",
],
cflags: [
"-Wall",
@@ -30,10 +30,10 @@
],
local_include_dirs: ["include"],
header_libs: [
- "libbase_headers",
"libaudiopolicycommon",
"libaudiopolicyengine_interface_headers",
"libaudiopolicyengineconfigurable_interface_headers",
+ "libbase_headers",
],
static_libs: [
"libaudiopolicyengine_common",
@@ -44,14 +44,17 @@
shared_libs: [
"libaudio_aidl_conversion_common_cpp",
"libaudiofoundation",
+ "libaudiopolicy",
"libaudiopolicycomponents",
"libbase",
- "liblog",
"libcutils",
- "libutils",
+ "liblog",
"libmedia_helper",
- "libaudiopolicy",
"libparameter",
+ "libutils",
"libxml2",
],
+ defaults: [
+ "aconfig_lib_cc_static_link.defaults",
+ ],
}
diff --git a/services/audiopolicy/engineconfigurable/config/example/automotive/Android.bp b/services/audiopolicy/engineconfigurable/config/example/automotive/Android.bp
index fb1a71c..7e429ef 100644
--- a/services/audiopolicy/engineconfigurable/config/example/automotive/Android.bp
+++ b/services/audiopolicy/engineconfigurable/config/example/automotive/Android.bp
@@ -37,8 +37,8 @@
vendor: true,
src: ":audio_policy_engine_configuration",
required: [
- ":audio_policy_engine_criterion_types.xml",
":audio_policy_engine_criteria.xml",
+ ":audio_policy_engine_criterion_types.xml",
":audio_policy_engine_product_strategies.xml",
":audio_policy_engine_volumes.xml",
],
@@ -69,19 +69,19 @@
name: "audio_policy_engine_criterion_types",
defaults: ["buildpolicycriteriontypesrule"],
srcs: [
- ":audio_policy_configuration_top_file",
":audio_policy_configuration_files",
+ ":audio_policy_configuration_top_file",
],
}
filegroup {
name: "audio_policy_configuration_files",
srcs: [
- ":r_submix_audio_policy_configuration",
- ":default_volume_tables",
":audio_policy_volumes",
- ":surround_sound_configuration_5_0",
+ ":default_volume_tables",
":primary_audio_policy_configuration",
+ ":r_submix_audio_policy_configuration",
+ ":surround_sound_configuration_5_0",
],
}
@@ -104,9 +104,9 @@
name: "audio_policy_engine_configuration_files",
srcs: [
":audio_policy_engine_configuration",
- "audio_policy_engine_product_strategies.xml",
- ":audio_policy_engine_volumes",
- ":audio_policy_engine_criterion_types",
":audio_policy_engine_criteria",
+ ":audio_policy_engine_criterion_types",
+ ":audio_policy_engine_volumes",
+ "audio_policy_engine_product_strategies.xml",
],
}
diff --git a/services/audiopolicy/engineconfigurable/config/example/caremu/Android.bp b/services/audiopolicy/engineconfigurable/config/example/caremu/Android.bp
index b9abb54..12a554d 100644
--- a/services/audiopolicy/engineconfigurable/config/example/caremu/Android.bp
+++ b/services/audiopolicy/engineconfigurable/config/example/caremu/Android.bp
@@ -18,8 +18,8 @@
soong_namespace {
imports: [
- "frameworks/av/services/audiopolicy/engineconfigurable/config/example/automotive",
"frameworks/av/services/audiopolicy/config",
+ "frameworks/av/services/audiopolicy/engineconfigurable/config/example/automotive",
],
}
@@ -38,10 +38,10 @@
vendor: true,
src: ":audio_policy_engine_configuration",
required: [
- "audio_policy_engine_criterion_types.xml",
- "audio_policy_engine_criteria.xml",
- "audio_policy_engine_product_strategies.xml",
":audio_policy_engine_volumes.xml",
+ "audio_policy_engine_criteria.xml",
+ "audio_policy_engine_criterion_types.xml",
+ "audio_policy_engine_product_strategies.xml",
],
}
@@ -64,19 +64,19 @@
name: "audio_policy_engine_criterion_types",
defaults: ["buildpolicycriteriontypesrule"],
srcs: [
- ":audio_policy_configuration_top_file",
":audio_policy_configuration_files",
+ ":audio_policy_configuration_top_file",
],
}
filegroup {
name: "audio_policy_configuration_files",
srcs: [
- ":r_submix_audio_policy_configuration",
- ":default_volume_tables",
":audio_policy_volumes",
- ":surround_sound_configuration_5_0",
+ ":default_volume_tables",
":primary_audio_policy_configuration",
+ ":r_submix_audio_policy_configuration",
+ ":surround_sound_configuration_5_0",
],
}
@@ -89,9 +89,9 @@
name: "audio_policy_engine_configuration_files",
srcs: [
":audio_policy_engine_configuration",
- "audio_policy_engine_product_strategies.xml",
- ":audio_policy_engine_volumes",
- ":audio_policy_engine_criterion_types",
":audio_policy_engine_criteria",
+ ":audio_policy_engine_criterion_types",
+ ":audio_policy_engine_volumes",
+ "audio_policy_engine_product_strategies.xml",
],
}
diff --git a/services/audiopolicy/engineconfigurable/config/example/phone/Android.bp b/services/audiopolicy/engineconfigurable/config/example/phone/Android.bp
index 67a6128..b0a4dfd 100644
--- a/services/audiopolicy/engineconfigurable/config/example/phone/Android.bp
+++ b/services/audiopolicy/engineconfigurable/config/example/phone/Android.bp
@@ -37,8 +37,8 @@
vendor: true,
src: ":audio_policy_engine_configuration",
required: [
- ":audio_policy_engine_criterion_types.xml",
":audio_policy_engine_criteria.xml",
+ ":audio_policy_engine_criterion_types.xml",
":audio_policy_engine_product_strategies.xml",
":audio_policy_engine_volumes.xml",
],
@@ -75,19 +75,19 @@
name: "audio_policy_engine_criterion_types",
defaults: ["buildpolicycriteriontypesrule"],
srcs: [
- ":audio_policy_configuration_top_file",
":audio_policy_configuration_files",
+ ":audio_policy_configuration_top_file",
],
}
filegroup {
name: "audio_policy_configuration_files",
srcs: [
- ":r_submix_audio_policy_configuration",
- ":default_volume_tables",
":audio_policy_volumes",
- ":surround_sound_configuration_5_0",
+ ":default_volume_tables",
":primary_audio_policy_configuration",
+ ":r_submix_audio_policy_configuration",
+ ":surround_sound_configuration_5_0",
],
}
@@ -115,10 +115,10 @@
name: "audio_policy_engine_configuration_files",
srcs: [
":audio_policy_engine_configuration",
- "audio_policy_engine_product_strategies.xml",
- ":audio_policy_engine_stream_volumes",
- ":audio_policy_engine_default_stream_volumes",
- ":audio_policy_engine_criterion_types",
":audio_policy_engine_criteria",
+ ":audio_policy_engine_criterion_types",
+ ":audio_policy_engine_default_stream_volumes",
+ ":audio_policy_engine_stream_volumes",
+ "audio_policy_engine_product_strategies.xml",
],
}
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car/Android.bp
index 38451f2..42585e9 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car/Android.bp
@@ -18,8 +18,8 @@
soong_namespace {
imports: [
- "frameworks/av/services/audiopolicy/engineconfigurable/config/example/automotive",
"frameworks/av/services/audiopolicy/config",
+ "frameworks/av/services/audiopolicy/engineconfigurable/config/example/automotive",
],
}
@@ -63,10 +63,10 @@
src: ":domaingeneratorpolicyrule_gen",
sub_dir: "parameter-framework/Settings/Policy",
required: [
- "ProductStrategies.xml",
"PolicyClass.xml",
- "PolicySubsystem.xml",
"PolicySubsystem-CommonTypes.xml",
+ "PolicySubsystem.xml",
+ "ProductStrategies.xml",
],
}
@@ -75,9 +75,9 @@
enabled: false, // TODO: This module fails to build
defaults: ["domaingeneratorpolicyrule"],
srcs: [
- ":audio_policy_pfw_toplevel",
- ":audio_policy_pfw_structure_files",
":audio_policy_engine_criterion_types",
+ ":audio_policy_pfw_structure_files",
+ ":audio_policy_pfw_toplevel",
":edd_files",
],
}
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/CarEmu/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/CarEmu/Android.bp
index eae6ae2..efde298 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/CarEmu/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/CarEmu/Android.bp
@@ -18,9 +18,9 @@
soong_namespace {
imports: [
+ "frameworks/av/services/audiopolicy/config",
"frameworks/av/services/audiopolicy/engineconfigurable/config/example/caremu",
"frameworks/av/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car",
- "frameworks/av/services/audiopolicy/config",
],
}
@@ -64,10 +64,10 @@
src: ":domaingeneratorpolicyrule_gen",
sub_dir: "parameter-framework/Settings/Policy",
required: [
- "ProductStrategies.xml",
"PolicyClass.xml",
- "PolicySubsystem.xml",
"PolicySubsystem-CommonTypes.xml",
+ "PolicySubsystem.xml",
+ "ProductStrategies.xml",
],
}
@@ -76,9 +76,9 @@
enabled: false, // TODO: This module fails to build
defaults: ["domaingeneratorpolicyrule"],
srcs: [
- ":audio_policy_pfw_toplevel",
- ":audio_policy_pfw_structure_files",
":audio_policy_engine_criterion_types",
+ ":audio_policy_pfw_structure_files",
+ ":audio_policy_pfw_toplevel",
":edd_files",
],
}
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Phone/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Phone/Android.bp
index 4e8654b..474094e 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Phone/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Phone/Android.bp
@@ -18,8 +18,8 @@
soong_namespace {
imports: [
- "frameworks/av/services/audiopolicy/engineconfigurable/config/example/phone",
"frameworks/av/services/audiopolicy/config",
+ "frameworks/av/services/audiopolicy/engineconfigurable/config/example/phone",
],
}
@@ -63,10 +63,10 @@
src: ":domaingeneratorpolicyrule_gen",
sub_dir: "parameter-framework/Settings/Policy",
required: [
- "ProductStrategies.xml",
"PolicyClass.xml",
- "PolicySubsystem.xml",
"PolicySubsystem-CommonTypes.xml",
+ "PolicySubsystem.xml",
+ "ProductStrategies.xml",
],
}
@@ -75,9 +75,9 @@
enabled: false, // TODO: This module fails to build
defaults: ["domaingeneratorpolicyrule"],
srcs: [
- ":audio_policy_pfw_toplevel",
- ":audio_policy_pfw_structure_files",
":audio_policy_engine_criterion_types",
+ ":audio_policy_pfw_structure_files",
+ ":audio_policy_pfw_toplevel",
":edd_files",
],
}
@@ -87,16 +87,16 @@
srcs: [
":device_for_input_source.pfw",
":volumes.pfw",
- "Settings/device_for_product_strategy_media.pfw",
"Settings/device_for_product_strategy_accessibility.pfw",
"Settings/device_for_product_strategy_dtmf.pfw",
"Settings/device_for_product_strategy_enforced_audible.pfw",
+ "Settings/device_for_product_strategy_media.pfw",
+ "Settings/device_for_product_strategy_patch.pfw",
"Settings/device_for_product_strategy_phone.pfw",
+ "Settings/device_for_product_strategy_rerouting.pfw",
"Settings/device_for_product_strategy_sonification.pfw",
"Settings/device_for_product_strategy_sonification_respectful.pfw",
"Settings/device_for_product_strategy_transmitted_through_speaker.pfw",
- "Settings/device_for_product_strategy_rerouting.pfw",
- "Settings/device_for_product_strategy_patch.pfw",
],
}
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoInput/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoInput/Android.bp
index e279a8f..aba9767 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoInput/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoInput/Android.bp
@@ -18,8 +18,8 @@
soong_namespace {
imports: [
- "frameworks/av/services/audiopolicy/engineconfigurable/config/example/phone",
"frameworks/av/services/audiopolicy/config",
+ "frameworks/av/services/audiopolicy/engineconfigurable/config/example/phone",
],
}
@@ -42,8 +42,8 @@
sub_dir: "parameter-framework/Settings/Policy",
required: [
"PolicyClass.xml",
- "PolicySubsystem.xml",
"PolicySubsystem-CommonTypes.xml",
+ "PolicySubsystem.xml",
],
}
@@ -52,9 +52,9 @@
enabled: false, // TODO: This module fails to build
defaults: ["domaingeneratorpolicyrule"],
srcs: [
- ":audio_policy_pfw_toplevel",
- ":audio_policy_pfw_structure_files",
":audio_policy_engine_criterion_types",
+ ":audio_policy_pfw_structure_files",
+ ":audio_policy_pfw_toplevel",
":edd_files",
],
}
@@ -76,8 +76,8 @@
filegroup {
name: "edd_files",
srcs: [
- "device_for_input_source.pfw",
":volumes.pfw",
+ "device_for_input_source.pfw",
],
}
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoOutput/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoOutput/Android.bp
index 47b8b54..77677a1 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoOutput/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoOutput/Android.bp
@@ -18,8 +18,8 @@
soong_namespace {
imports: [
- "frameworks/av/services/audiopolicy/engineconfigurable/config/example/phone",
"frameworks/av/services/audiopolicy/config",
+ "frameworks/av/services/audiopolicy/engineconfigurable/config/example/phone",
],
}
@@ -42,8 +42,8 @@
sub_dir: "parameter-framework/Settings/Policy",
required: [
"PolicyClass.xml",
- "PolicySubsystem.xml",
"PolicySubsystem-CommonTypes.xml",
+ "PolicySubsystem.xml",
],
}
@@ -52,9 +52,9 @@
enabled: false, // TODO: This module fails to build
defaults: ["domaingeneratorpolicyrule"],
srcs: [
- ":audio_policy_pfw_toplevel",
- ":audio_policy_pfw_structure_files",
":audio_policy_engine_criterion_types",
+ ":audio_policy_pfw_structure_files",
+ ":audio_policy_pfw_toplevel",
":edd_files",
],
}
@@ -76,9 +76,9 @@
filegroup {
name: "edd_files",
srcs: [
- "device_for_strategies.pfw",
- ":volumes.pfw",
":device_for_input_source.pfw",
+ ":volumes.pfw",
+ "device_for_strategies.pfw",
],
}
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp
index aa2163e..3dc2229 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp
@@ -11,11 +11,11 @@
cc_library_shared {
name: "libpolicy-subsystem",
srcs: [
- "PolicySubsystemBuilder.cpp",
- "PolicySubsystem.cpp",
"InputSource.cpp",
- "Stream.cpp",
+ "PolicySubsystem.cpp",
+ "PolicySubsystemBuilder.cpp",
"ProductStrategy.cpp",
+ "Stream.cpp",
],
cflags: [
"-Wall",
@@ -25,11 +25,11 @@
"-fvisibility=hidden",
],
header_libs: [
- "libbase_headers",
- "libaudiopolicycommon",
"libaudioclient_headers",
+ "libaudiopolicycommon",
"libaudiopolicyengine_interface_headers",
"libaudiopolicyengineconfigurable_interface_headers",
+ "libbase_headers",
],
static_libs: [
"libaudiopolicyengine_common",
@@ -39,8 +39,8 @@
"libaudiopolicycomponents",
"libaudiopolicyengineconfigurable",
"liblog",
- "libutils",
"libmedia_helper",
"libparameter",
+ "libutils",
],
}
diff --git a/services/audiopolicy/engineconfigurable/tools/Android.bp b/services/audiopolicy/engineconfigurable/tools/Android.bp
index 2f77372..d1fb2fb 100644
--- a/services/audiopolicy/engineconfigurable/tools/Android.bp
+++ b/services/audiopolicy/engineconfigurable/tools/Android.bp
@@ -67,8 +67,8 @@
],
libs: [
"EddParser.py",
- "hostConfig.py",
"PFWScriptGenerator.py",
+ "hostConfig.py",
],
required: [
"domainGeneratorConnector",
@@ -78,8 +78,8 @@
genrule_defaults {
name: "domaingeneratorpolicyrule",
tools: [
- "domainGeneratorPolicy",
"domainGeneratorConnector",
+ "domainGeneratorPolicy",
],
cmd: "mkdir -p $(genDir)/Structure/Policy && " +
"cp $(locations :audio_policy_pfw_structure_files) $(genDir)/Structure/Policy && " +
diff --git a/services/audiopolicy/engineconfigurable/wrapper/Android.bp b/services/audiopolicy/engineconfigurable/wrapper/Android.bp
index a897880..78d5fa3 100644
--- a/services/audiopolicy/engineconfigurable/wrapper/Android.bp
+++ b/services/audiopolicy/engineconfigurable/wrapper/Android.bp
@@ -18,14 +18,14 @@
"-Wextra",
],
header_libs: [
- "libbase_headers",
- "libaudiopolicycommon",
"libaudiofoundation_headers",
+ "libaudiopolicycommon",
+ "libbase_headers",
],
shared_libs: [
"liblog",
- "libutils",
"libmedia_helper",
"libparameter",
+ "libutils",
],
}
diff --git a/services/audiopolicy/enginedefault/Android.bp b/services/audiopolicy/enginedefault/Android.bp
index 98adff0..799b8d9 100644
--- a/services/audiopolicy/enginedefault/Android.bp
+++ b/services/audiopolicy/enginedefault/Android.bp
@@ -15,15 +15,15 @@
"src/EngineInstance.cpp",
],
cflags: [
- "-fvisibility=hidden",
"-Wall",
"-Werror",
"-Wextra",
+ "-fvisibility=hidden",
],
header_libs: [
- "libbase_headers",
"libaudiopolicycommon",
"libaudiopolicyengine_interface_headers",
+ "libbase_headers",
],
static_libs: [
"libaudiopolicyengine_common",
@@ -32,13 +32,16 @@
shared_libs: [
"libaudio_aidl_conversion_common_cpp",
"libaudiofoundation",
+ "libaudiopolicy",
"libaudiopolicycomponents",
"libbase",
- "liblog",
"libcutils",
- "libutils",
+ "liblog",
"libmedia_helper",
- "libaudiopolicy",
+ "libutils",
"libxml2",
],
+ defaults: [
+ "aconfig_lib_cc_static_link.defaults",
+ ],
}
diff --git a/services/audiopolicy/enginedefault/config/example/Android.bp b/services/audiopolicy/enginedefault/config/example/Android.bp
index f305c39..31f9a46 100644
--- a/services/audiopolicy/enginedefault/config/example/Android.bp
+++ b/services/audiopolicy/enginedefault/config/example/Android.bp
@@ -34,9 +34,9 @@
vendor: true,
src: "phone/audio_policy_engine_configuration.xml",
required: [
- ":audio_policy_engine_stream_volumes.xml",
":audio_policy_engine_default_stream_volumes.xml",
":audio_policy_engine_product_strategies.xml",
+ ":audio_policy_engine_stream_volumes.xml",
],
}
diff --git a/services/audiopolicy/fuzzer/Android.bp b/services/audiopolicy/fuzzer/Android.bp
index fca02e4..8cee613 100644
--- a/services/audiopolicy/fuzzer/Android.bp
+++ b/services/audiopolicy/fuzzer/Android.bp
@@ -37,22 +37,22 @@
shared_libs: [
"android.hardware.audio.common-util",
"capture_state_listener-aidl-cpp",
+ "framework-permission-aidl-cpp",
"libaudioclient",
"libaudiofoundation",
+ "libaudiopolicy",
"libaudiopolicycomponents",
+ "libaudiopolicymanagerdefault",
"libbase",
+ "libbinder",
"libcutils",
- "libhidlbase",
"libdl",
+ "libhidlbase",
"liblog",
"libmedia_helper",
"libmediametrics",
"libutils",
"libxml2",
- "libbinder",
- "libaudiopolicy",
- "libaudiopolicymanagerdefault",
- "framework-permission-aidl-cpp",
],
static_libs: [
"android.hardware.audio.common@7.0-enums",
diff --git a/services/audiopolicy/fuzzer/aidl/Android.bp b/services/audiopolicy/fuzzer/aidl/Android.bp
index 8b37d36..2c85955 100644
--- a/services/audiopolicy/fuzzer/aidl/Android.bp
+++ b/services/audiopolicy/fuzzer/aidl/Android.bp
@@ -26,30 +26,31 @@
"audiopolicy-aidl-cpp",
"audiopolicy-types-aidl-cpp",
"framework-permission-aidl-cpp",
+ "libactivitymanager_aidl",
+ "libaudioclient",
+ "libaudioflinger",
+ "libaudiohal",
"libaudiopolicy",
"libaudiopolicymanagerdefault",
- "libactivitymanager_aidl",
- "libaudiohal",
"libaudiopolicyservice",
- "libaudioflinger",
- "libaudioclient",
"libaudioprocessing",
"libhidlbase",
"liblog",
"libmediautils",
- "libnblog",
"libnbaio",
+ "libnblog",
"libpowermanager",
"libvibrator",
"packagemanager_aidl-cpp",
],
static_libs: [
+ "libaudiomockhal",
"libfakeservicemanager",
"libmediaplayerservice",
],
header_libs: [
- "libaudiohal_headers",
"libaudioflinger_headers",
+ "libaudiohal_headers",
"libaudiopolicymanager_interface_headers",
"libbinder_headers",
"libmedia_headers",
@@ -73,6 +74,9 @@
srcs: ["audiopolicy_aidl_fuzzer.cpp"],
defaults: [
"audiopolicy_aidl_fuzzer_defaults",
+ "latest_android_hardware_audio_core_ndk_shared",
+ "latest_android_hardware_audio_core_sounddose_ndk_shared",
+ "latest_android_hardware_audio_effect_ndk_shared",
"service_fuzzer_defaults",
],
}
diff --git a/services/audiopolicy/fuzzer/aidl/audiopolicy_aidl_fuzzer.cpp b/services/audiopolicy/fuzzer/aidl/audiopolicy_aidl_fuzzer.cpp
index ca79c49..f5e72f5 100644
--- a/services/audiopolicy/fuzzer/aidl/audiopolicy_aidl_fuzzer.cpp
+++ b/services/audiopolicy/fuzzer/aidl/audiopolicy_aidl_fuzzer.cpp
@@ -18,8 +18,12 @@
#include <AudioFlinger.h>
#include <android-base/logging.h>
#include <android/binder_interface_utils.h>
+#include <android/binder_manager.h>
#include <android/binder_process.h>
#include <android/media/IAudioPolicyService.h>
+#include <core-mock/ConfigMock.h>
+#include <core-mock/ModuleMock.h>
+#include <effect-mock/FactoryMock.h>
#include <fakeservicemanager/FakeServiceManager.h>
#include <fuzzbinder/libbinder_driver.h>
#include <fuzzbinder/random_binder.h>
@@ -34,6 +38,7 @@
[[clang::no_destroy]] static std::once_flag gSmOnce;
sp<FakeServiceManager> gFakeServiceManager;
+sp<AudioPolicyService> gAudioPolicyService;
bool addService(const String16& serviceName, const sp<FakeServiceManager>& fakeServiceManager,
FuzzedDataProvider& fdp) {
@@ -45,42 +50,58 @@
return true;
}
+extern "C" int LLVMFuzzerInitialize(int* /*argc*/, char*** /*argv*/) {
+ /* Create a FakeServiceManager instance and add required services */
+ gFakeServiceManager = sp<FakeServiceManager>::make();
+ setDefaultServiceManager(gFakeServiceManager);
+
+ auto configService = ndk::SharedRefBase::make<ConfigMock>();
+ CHECK_EQ(NO_ERROR, AServiceManager_addService(configService.get()->asBinder().get(),
+ "android.hardware.audio.core.IConfig/default"));
+
+ auto factoryService = ndk::SharedRefBase::make<FactoryMock>();
+ CHECK_EQ(NO_ERROR,
+ AServiceManager_addService(factoryService.get()->asBinder().get(),
+ "android.hardware.audio.effect.IFactory/default"));
+
+ auto moduleService = ndk::SharedRefBase::make<ModuleMock>();
+ CHECK_EQ(NO_ERROR, AServiceManager_addService(moduleService.get()->asBinder().get(),
+ "android.hardware.audio.core.IModule/default"));
+
+ // Disable creating thread pool for fuzzer instance of audio flinger and audio policy services
+ AudioSystem::disableThreadPool();
+
+ return 0;
+}
+
extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
FuzzedDataProvider fdp(data, size);
- std::call_once(gSmOnce, [&] {
- /* Create a FakeServiceManager instance and add required services */
- gFakeServiceManager = sp<FakeServiceManager>::make();
- setDefaultServiceManager(gFakeServiceManager);
- });
- gFakeServiceManager->clear();
-
- for (const char* service :
- {"activity", "sensor_privacy", "permission", "scheduling_policy",
- "android.hardware.audio.core.IConfig", "batterystats", "media.metrics"}) {
+ for (const char* service : {"activity", "sensor_privacy", "permission", "scheduling_policy",
+ "batterystats", "media.metrics"}) {
if (!addService(String16(service), gFakeServiceManager, fdp)) {
return 0;
}
}
- const auto audioFlinger = sp<AudioFlinger>::make();
- const auto afAdapter = sp<AudioFlingerServerAdapter>::make(audioFlinger);
+ // TODO(330882064) : Initialise Audio Flinger and Audio Policy services every time
+ std::call_once(gSmOnce, [&] {
+ const auto audioFlinger = sp<AudioFlinger>::make();
+ const auto audioFlingerServerAdapter = sp<AudioFlingerServerAdapter>::make(audioFlinger);
+ CHECK_EQ(NO_ERROR,
+ gFakeServiceManager->addService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME),
+ IInterface::asBinder(audioFlingerServerAdapter),
+ false /* allowIsolated */,
+ IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
- CHECK_EQ(NO_ERROR,
- gFakeServiceManager->addService(
- String16(IAudioFlinger::DEFAULT_SERVICE_NAME), IInterface::asBinder(afAdapter),
- false /* allowIsolated */, IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
+ gAudioPolicyService = sp<AudioPolicyService>::make();
+ CHECK_EQ(NO_ERROR,
+ gFakeServiceManager->addService(String16("media.audio_policy"),
+ gAudioPolicyService, false /* allowIsolated */,
+ IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
+ });
- AudioSystem::get_audio_flinger_for_fuzzer();
- const auto audioPolicyService = sp<AudioPolicyService>::make();
-
- CHECK_EQ(NO_ERROR,
- gFakeServiceManager->addService(String16("media.audio_policy"), audioPolicyService,
- false /* allowIsolated */,
- IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
-
- fuzzService(media::IAudioPolicyService::asBinder(audioPolicyService),
- FuzzedDataProvider(data, size));
+ fuzzService(media::IAudioPolicyService::asBinder(gAudioPolicyService), std::move(fdp));
return 0;
}
diff --git a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
index 58fcb5c..6416a47 100644
--- a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
+++ b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
@@ -207,7 +207,8 @@
audio_port_handle_t *selectedDeviceId, audio_format_t format,
audio_channel_mask_t channelMask, int sampleRate,
audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE,
- audio_port_handle_t *portId = nullptr);
+ audio_port_handle_t *portId = nullptr,
+ uint32_t *virtualDeviceId = nullptr);
bool findDevicePort(audio_port_role_t role, audio_devices_t deviceType,
const std::string &address, audio_port_v7 *foundPort);
static audio_port_handle_t getDeviceIdFromPatch(const struct audio_patch *patch);
@@ -283,7 +284,7 @@
bool AudioPolicyManagerFuzzer::getInputForAttr(
const audio_attributes_t &attr, audio_unique_id_t riid, audio_port_handle_t *selectedDeviceId,
audio_format_t format, audio_channel_mask_t channelMask, int sampleRate,
- audio_input_flags_t flags, audio_port_handle_t *portId) {
+ audio_input_flags_t flags, audio_port_handle_t *portId, uint32_t *virtualDeviceId) {
audio_io_handle_t input = AUDIO_IO_HANDLE_NONE;
audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
config.sample_rate = sampleRate;
@@ -298,7 +299,7 @@
attributionSource.uid = 0;
attributionSource.token = sp<BBinder>::make();
if (mManager->getInputForAttr(&attr, &input, riid, AUDIO_SESSION_NONE, attributionSource,
- &config, flags, selectedDeviceId, &inputType, portId) != OK) {
+ &config, flags, selectedDeviceId, &inputType, portId, virtualDeviceId) != OK) {
return false;
}
if (*portId == AUDIO_PORT_HANDLE_NONE || input == AUDIO_IO_HANDLE_NONE) {
diff --git a/services/audiopolicy/managerdefault/Android.bp b/services/audiopolicy/managerdefault/Android.bp
index 2f46d48..e6f6374 100644
--- a/services/audiopolicy/managerdefault/Android.bp
+++ b/services/audiopolicy/managerdefault/Android.bp
@@ -25,28 +25,28 @@
shared_libs: [
"com.android.media.audio-aconfig-cc",
"libaudiofoundation",
+ "libaudiopolicy",
"libaudiopolicycomponents",
+ "libbinder",
"libcutils",
"libdl",
- "libutils",
+ "libhidlbase",
"liblog",
- "libaudiopolicy",
"libmedia_helper",
"libmediametrics",
- "libbinder",
- "libhidlbase",
+ "libutils",
"libxml2",
// The default audio policy engine is always present in the system image.
// libaudiopolicyengineconfigurable can be built in addition by specifying
// a dependency on it in the device makefile. There will be no build time
// conflict with libaudiopolicyenginedefault.
- "libaudiopolicyenginedefault",
- "framework-permission-aidl-cpp",
- "libaudioclient_aidl_conversion",
"audioclient-types-aidl-cpp",
// Flag support
"android.media.audiopolicy-aconfig-cc",
"com.android.media.audioserver-aconfig-cc",
+ "framework-permission-aidl-cpp",
+ "libaudioclient_aidl_conversion",
+ "libaudiopolicyenginedefault",
],
header_libs: [
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 2b0f95f..7f4be79 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -2137,7 +2137,14 @@
// sampling rate match
if (samplingRate > SAMPLE_RATE_HZ_DEFAULT) {
- currentMatchCriteria[4] = outputDesc->getSamplingRate();
+ int diff; // avoid unsigned integer overflow.
+ __builtin_sub_overflow(outputDesc->getSamplingRate(), samplingRate, &diff);
+
+ // prefer the closest output sampling rate greater than or equal to target
+ // if none exists, prefer the closest output sampling rate less than target.
+ //
+ // criteria is offset to make non-negative.
+ currentMatchCriteria[4] = diff >= 0 ? -diff + 200'000'000 : diff + 100'000'000;
}
// performance flags match
@@ -2713,7 +2720,8 @@
audio_input_flags_t flags,
audio_port_handle_t *selectedDeviceId,
input_type_t *inputType,
- audio_port_handle_t *portId)
+ audio_port_handle_t *portId,
+ uint32_t *virtualDeviceId)
{
ALOGV("%s() source %d, sampling rate %d, format %#x, channel mask %#x, session %d, "
"flags %#x attributes=%s requested device ID %d",
@@ -2815,6 +2823,9 @@
} else {
*inputType = API_INPUT_MIX_EXT_POLICY_REROUTE;
}
+ if (virtualDeviceId) {
+ *virtualDeviceId = policyMix->mVirtualDeviceId;
+ }
} else {
if (explicitRoutingDevice != nullptr) {
device = explicitRoutingDevice;
@@ -2838,6 +2849,10 @@
// meaning it receives audio injected into the framework, so the recorder doesn't
// know about it and is therefore considered "legacy"
*inputType = API_INPUT_LEGACY;
+
+ if (virtualDeviceId) {
+ *virtualDeviceId = policyMix->mVirtualDeviceId;
+ }
} else if (audio_is_remote_submix_device(device->type())) {
*inputType = API_INPUT_MIX_CAPTURE;
} else if (device->type() == AUDIO_DEVICE_IN_TELEPHONY_RX) {
@@ -2869,6 +2884,11 @@
goto error;
}
+
+ if (policyMix != nullptr && virtualDeviceId != nullptr) {
+ *virtualDeviceId = policyMix->mVirtualDeviceId;
+ }
+
exit:
*selectedDeviceId = mAvailableInputDevices.contains(device) ?
@@ -3846,7 +3866,6 @@
status_t AudioPolicyManager::unregisterPolicyMixes(Vector<AudioMix> mixes)
{
ALOGV("unregisterPolicyMixes() num mixes %zu", mixes.size());
- status_t endResult = NO_ERROR;
status_t res = NO_ERROR;
bool checkOutputs = false;
sp<HwModule> rSubmixModule;
@@ -3859,7 +3878,6 @@
AUDIO_HARDWARE_MODULE_ID_REMOTE_SUBMIX);
if (rSubmixModule == 0) {
res = INVALID_OPERATION;
- endResult = INVALID_OPERATION;
continue;
}
}
@@ -3868,20 +3886,25 @@
if (mPolicyMixes.unregisterMix(mix) != NO_ERROR) {
res = INVALID_OPERATION;
- endResult = INVALID_OPERATION;
continue;
}
- for (auto device : {AUDIO_DEVICE_IN_REMOTE_SUBMIX, AUDIO_DEVICE_OUT_REMOTE_SUBMIX}) {
+ for (auto device: {AUDIO_DEVICE_IN_REMOTE_SUBMIX, AUDIO_DEVICE_OUT_REMOTE_SUBMIX}) {
if (getDeviceConnectionState(device, address.c_str()) ==
- AUDIO_POLICY_DEVICE_STATE_AVAILABLE) {
- res = setDeviceConnectionStateInt(device, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
- address.c_str(), "remote-submix",
- AUDIO_FORMAT_DEFAULT);
- if (res != OK) {
+ AUDIO_POLICY_DEVICE_STATE_AVAILABLE) {
+ status_t currentRes =
+ setDeviceConnectionStateInt(device,
+ AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+ address.c_str(),
+ "remote-submix",
+ AUDIO_FORMAT_DEFAULT);
+ if (!audio_flags::audio_mix_ownership()) {
+ res = currentRes;
+ }
+ if (currentRes != OK) {
ALOGE("Error making RemoteSubmix device unavailable for mix "
"with type %d, address %s", device, address.c_str());
- endResult = INVALID_OPERATION;
+ res = INVALID_OPERATION;
}
}
}
@@ -3891,24 +3914,16 @@
} else if ((mix.mRouteFlags & MIX_ROUTE_FLAG_RENDER) == MIX_ROUTE_FLAG_RENDER) {
if (mPolicyMixes.unregisterMix(mix) != NO_ERROR) {
res = INVALID_OPERATION;
- endResult = INVALID_OPERATION;
continue;
} else {
checkOutputs = true;
}
}
}
- if (audio_flags::audio_mix_ownership()) {
- res = endResult;
- if (res == NO_ERROR && checkOutputs) {
- checkForDeviceAndOutputChanges();
- updateCallAndOutputRouting();
- }
- } else {
- if (res == NO_ERROR && checkOutputs) {
- checkForDeviceAndOutputChanges();
- updateCallAndOutputRouting();
- }
+
+ if (res == NO_ERROR && checkOutputs) {
+ checkForDeviceAndOutputChanges();
+ updateCallAndOutputRouting();
}
return res;
}
@@ -3926,9 +3941,10 @@
policyMix->mCbFlags);
_aidl_return.back().mDeviceType = policyMix->mDeviceType;
_aidl_return.back().mToken = policyMix->mToken;
+ _aidl_return.back().mVirtualDeviceId = policyMix->mVirtualDeviceId;
}
- ALOGVV("%s() returning %zu registered mixes", __func__, _aidl_return->size());
+ ALOGVV("%s() returning %zu registered mixes", __func__, _aidl_return.size());
return OK;
}
@@ -7338,7 +7354,6 @@
DeviceVector devices;
for (const auto &productStrategy : mEngine->getOrderedProductStrategies()) {
StreamTypeVector streams = mEngine->getStreamTypesForProductStrategy(productStrategy);
- auto attr = mEngine->getAllAttributesForProductStrategy(productStrategy).front();
auto hasStreamActive = [&](auto stream) {
return hasStream(streams, stream) && isStreamActive(stream, 0);
};
@@ -7363,6 +7378,7 @@
mOutputs.isStrategyActiveOnSameModule(productStrategy, outputDesc))) {
// Retrieval of devices for voice DL is done on primary output profile, cannot
// check the route (would force modifying configuration file for this profile)
+ auto attr = mEngine->getAllAttributesForProductStrategy(productStrategy).front();
devices = mEngine->getOutputDevicesForAttributes(attr, nullptr, fromCache);
break;
}
@@ -7878,10 +7894,18 @@
float AudioPolicyManager::computeVolume(IVolumeCurves &curves,
VolumeSource volumeSource,
int index,
- const DeviceTypeSet& deviceTypes)
+ const DeviceTypeSet& deviceTypes,
+ bool computeInternalInteraction)
{
float volumeDb = curves.volIndexToDb(Volume::getDeviceCategory(deviceTypes), index);
+ ALOGV("%s volume source %d, index %d, devices %s, compute internal %b ", __func__,
+ volumeSource, index, dumpDeviceTypes(deviceTypes).c_str(), computeInternalInteraction);
+
+ if (!computeInternalInteraction) {
+ return volumeDb;
+ }
+
// handle the case of accessibility active while a ringtone is playing: if the ringtone is much
// louder than the accessibility prompt, the prompt cannot be heard, thus masking the touch
// exploration of the dialer UI. In this situation, bring the accessibility volume closer to
@@ -7891,14 +7915,11 @@
const auto musicVolumeSrc = toVolumeSource(AUDIO_STREAM_MUSIC, false);
const auto alarmVolumeSrc = toVolumeSource(AUDIO_STREAM_ALARM, false);
const auto a11yVolumeSrc = toVolumeSource(AUDIO_STREAM_ACCESSIBILITY, false);
- // Verify that the current volume source is not the ringer volume to prevent recursively
- // calling to compute volume. This could happen in cases where a11y and ringer sounds belong
- // to the same volume group.
- if (volumeSource != ringVolumeSrc && volumeSource == a11yVolumeSrc
- && (AUDIO_MODE_RINGTONE == mEngine->getPhoneState()) &&
+ if (AUDIO_MODE_RINGTONE == mEngine->getPhoneState() &&
mOutputs.isActive(ringVolumeSrc, 0)) {
auto &ringCurves = getVolumeCurves(AUDIO_STREAM_RING);
- const float ringVolumeDb = computeVolume(ringCurves, ringVolumeSrc, index, deviceTypes);
+ const float ringVolumeDb = computeVolume(ringCurves, ringVolumeSrc, index, deviceTypes,
+ /* computeInternalInteraction= */ false);
return ringVolumeDb - 4 > volumeDb ? ringVolumeDb - 4 : volumeDb;
}
@@ -7915,7 +7936,8 @@
auto &voiceCurves = getVolumeCurves(callVolumeSrc);
int voiceVolumeIndex = voiceCurves.getVolumeIndex(deviceTypes);
const float maxVoiceVolDb =
- computeVolume(voiceCurves, callVolumeSrc, voiceVolumeIndex, deviceTypes)
+ computeVolume(voiceCurves, callVolumeSrc, voiceVolumeIndex, deviceTypes,
+ /* computeInternalInteraction= */ false)
+ IN_CALL_EARPIECE_HEADROOM_DB;
// FIXME: Workaround for call screening applications until a proper audio mode is defined
// to support this scenario : Exempt the RING stream from the audio cap if the audio was
@@ -7957,12 +7979,8 @@
// when the phone is ringing we must consider that music could have been paused just before
// by the music application and behave as if music was active if the last music track was
// just stopped
- // Verify that the current volume source is not the music volume to prevent recursively
- // calling to compute volume. This could happen in cases where music and
- // (alarm, ring, notification, system, etc.) sounds belong to the same volume group.
- if (volumeSource != musicVolumeSrc &&
- (isStreamActive(AUDIO_STREAM_MUSIC, SONIFICATION_HEADSET_MUSIC_DELAY)
- || mLimitRingtoneVolume)) {
+ if (isStreamActive(AUDIO_STREAM_MUSIC, SONIFICATION_HEADSET_MUSIC_DELAY)
+ || mLimitRingtoneVolume) {
volumeDb += SONIFICATION_HEADSET_VOLUME_FACTOR_DB;
DeviceTypeSet musicDevice =
mEngine->getOutputDevicesForAttributes(attributes_initializer(AUDIO_USAGE_MEDIA),
@@ -7971,7 +7989,8 @@
float musicVolDb = computeVolume(musicCurves,
musicVolumeSrc,
musicCurves.getVolumeIndex(musicDevice),
- musicDevice);
+ musicDevice,
+ /* computeInternalInteraction= */ false);
float minVolDb = (musicVolDb > SONIFICATION_HEADSET_VOLUME_MIN_DB) ?
musicVolDb : SONIFICATION_HEADSET_VOLUME_MIN_DB;
if (volumeDb > minVolDb) {
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 328cc32..2d015d3 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -141,7 +141,8 @@
audio_input_flags_t flags,
audio_port_handle_t *selectedDeviceId,
input_type_t *inputType,
- audio_port_handle_t *portId);
+ audio_port_handle_t *portId,
+ uint32_t *virtualDeviceId);
// indicates to the audio policy manager that the input starts being used.
virtual status_t startInput(audio_port_handle_t portId);
@@ -564,12 +565,36 @@
status_t resetInputDevice(audio_io_handle_t input,
audio_patch_handle_t *patchHandle = NULL);
- // compute the actual volume for a given stream according to the requested index and a particular
- // device
- virtual float computeVolume(IVolumeCurves &curves,
- VolumeSource volumeSource,
- int index,
- const DeviceTypeSet& deviceTypes);
+ /**
+ * Compute volume in DB that should be applied for a volume source and device types for a
+ * particular volume index.
+ *
+ * <p><b>Note:</b>Internally the compute method recursively calls itself to accurately
+ * determine the volume given the currently active sources and devices. Some of the
+ * interaction that require recursive computation are:
+ * <ul>
+ * <li>Match accessibility volume if ringtone volume is much louder</li>
+ * <li>If voice call is active cap other volumes (except ringtone and accessibility)</li>
+ * <li>Attenuate notification if headset is connected to prevent burst in user's ear</li>
+ * <li>Attenuate ringtone if headset is connected and music is not playing and speaker is
+ * part of the devices to prevent burst in user's ear</li>
+ * <li>Limit music volume if headset is connected and notification is also active</li>
+ * </ul>
+ *
+ * @param curves volume curves to use for calculating volume value given the index
+ * @param volumeSource source (use case) of the volume
+ * @param index index to match in the volume curves for the calculation
+ * @param deviceTypes devices that should be considered in the volume curves for the
+ * calculation
+ * @param computeInternalInteraction boolean indicating whether recursive volume computation
+ * should continue within the volume computation. Defaults to {@code true} so the
+ * volume interactions can be computed. Calls within the method should always set the
+ * the value to {@code false} to prevent infinite recursion.
+ * @return computed volume in DB
+ */
+ virtual float computeVolume(IVolumeCurves &curves, VolumeSource volumeSource,
+ int index, const DeviceTypeSet& deviceTypes,
+ bool computeInternalInteraction = true);
// rescale volume index from srcStream within range of dstStream
int rescaleVolumeIndex(int srcIndex,
diff --git a/services/audiopolicy/service/Android.bp b/services/audiopolicy/service/Android.bp
index cddbf39..9b7a470 100644
--- a/services/audiopolicy/service/Android.bp
+++ b/services/audiopolicy/service/Android.bp
@@ -12,6 +12,15 @@
name: "libaudiopolicyservice_dependencies",
shared_libs: [
+ "android.media.audiopolicy-aconfig-cc",
+ "audioclient-types-aidl-cpp",
+ "audioflinger-aidl-cpp",
+ "audiopolicy-aidl-cpp",
+ "audiopolicy-types-aidl-cpp",
+ "capture_state_listener-aidl-cpp",
+ "com.android.media.audio-aconfig-cc",
+ "framework-permission-aidl-cpp",
+ "libPlatformProperties",
"libactivitymanager_aidl",
"libaudioclient",
"libaudioclient_aidl_conversion",
@@ -32,27 +41,19 @@
"libmediametrics",
"libmediautils",
"libpermission",
- "libPlatformProperties",
"libsensor",
"libsensorprivacy",
"libshmemcompat",
"libstagefright_foundation",
"libutils",
"libxml2",
- "audioclient-types-aidl-cpp",
- "audioflinger-aidl-cpp",
- "audiopolicy-aidl-cpp",
- "audiopolicy-types-aidl-cpp",
- "capture_state_listener-aidl-cpp",
- "com.android.media.audio-aconfig-cc",
- "framework-permission-aidl-cpp",
"packagemanager_aidl-cpp",
"spatializer-aidl-cpp",
],
static_libs: [
- "libeffectsconfig",
"libaudiopolicycomponents",
+ "libeffectsconfig",
],
}
@@ -60,16 +61,16 @@
name: "libaudiopolicyservice",
defaults: [
- "libaudiopolicyservice_dependencies",
"latest_android_media_audio_common_types_cpp_shared",
+ "libaudiopolicyservice_dependencies",
],
srcs: [
- "AudioRecordClient.cpp",
"AudioPolicyClientImpl.cpp",
"AudioPolicyEffects.cpp",
"AudioPolicyInterfaceImpl.cpp",
"AudioPolicyService.cpp",
+ "AudioRecordClient.cpp",
"CaptureStateNotifier.cpp",
"Spatializer.cpp",
"SpatializerPoseController.cpp",
@@ -92,18 +93,24 @@
],
cflags: [
- "-fvisibility=hidden",
- "-Werror",
"-Wall",
+ "-Werror",
"-Wthread-safety",
+ "-fvisibility=hidden",
],
export_shared_lib_headers: [
+ "framework-permission-aidl-cpp",
"libactivitymanager_aidl",
"libaudiousecasevalidation",
"libheadtracking",
"libheadtracking-binding",
"libsensorprivacy",
- "framework-permission-aidl-cpp",
],
}
+
+cc_library_headers {
+ name: "libaudiopolicyservice_headers",
+ host_supported: true,
+ export_include_dirs: ["."],
+}
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 345bac2..a862037 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -20,6 +20,7 @@
#include "AudioPolicyService.h"
#include "AudioRecordClient.h"
#include "TypeConverter.h"
+#include <android_media_audiopolicy.h>
#include <media/AidlConversion.h>
#include <media/AudioPolicy.h>
#include <media/AudioValidator.h>
@@ -45,6 +46,7 @@
#define MAX_ITEMS_PER_LIST 1024
namespace android {
+namespace audiopolicy_flags = android::media::audiopolicy;
using binder::Status;
using aidl_utils::binderStatusFromStatusT;
using content::AttributionSourceState;
@@ -62,6 +64,8 @@
using media::audio::common::AudioUuid;
using media::audio::common::Int;
+constexpr int kDefaultVirtualDeviceId = 0;
+
const std::vector<audio_usage_t>& SYSTEM_USAGES = {
AUDIO_USAGE_CALL_ASSISTANT,
AUDIO_USAGE_EMERGENCY,
@@ -627,6 +631,8 @@
RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(validateUsage(attr,
attributionSource)));
+ uint32_t virtualDeviceId = kDefaultVirtualDeviceId;
+
// check calling permissions.
// Capturing from the following sources does not require permission RECORD_AUDIO
// as the captured audio does not come from a microphone:
@@ -698,7 +704,8 @@
status = mAudioPolicyManager->getInputForAttr(&attr, &input, riid, session,
attributionSource, &config,
flags, &selectedDeviceId,
- &inputType, &portId);
+ &inputType, &portId,
+ &virtualDeviceId);
}
audioPolicyEffects = mAudioPolicyEffects;
@@ -737,6 +744,14 @@
LOG_ALWAYS_FATAL("%s encountered an invalid input type %d",
__func__, (int)inputType);
}
+
+ if (audiopolicy_flags::record_audio_device_aware_permission()) {
+ // enforce device-aware RECORD_AUDIO permission
+ if (virtualDeviceId != kDefaultVirtualDeviceId &&
+ !recordingAllowed(attributionSource, virtualDeviceId, inputSource)) {
+ status = PERMISSION_DENIED;
+ }
+ }
}
if (status != NO_ERROR) {
@@ -752,6 +767,7 @@
sp<AudioRecordClient> client = new AudioRecordClient(attr, input, session, portId,
selectedDeviceId, attributionSource,
+ virtualDeviceId,
canCaptureOutput, canCaptureHotword,
mOutputCommandThread);
mAudioRecordClients.add(portId, client);
@@ -807,8 +823,8 @@
msg << "Audio recording on session " << client->session;
// check calling permissions
- if (!(startRecording(client->attributionSource, String16(msg.str().c_str()),
- client->attributes.source)
+ if (!(startRecording(client->attributionSource, client->virtualDeviceId,
+ String16(msg.str().c_str()), client->attributes.source)
|| client->attributes.source == AUDIO_SOURCE_FM_TUNER
|| client->attributes.source == AUDIO_SOURCE_REMOTE_SUBMIX
|| client->attributes.source == AUDIO_SOURCE_ECHO_REFERENCE)) {
@@ -826,7 +842,8 @@
if (client->active) {
ALOGE("Client should never be active before startInput. Uid %d port %d",
client->attributionSource.uid, portId);
- finishRecording(client->attributionSource, client->attributes.source);
+ finishRecording(client->attributionSource, client->virtualDeviceId,
+ client->attributes.source);
return binderStatusFromStatusT(INVALID_OPERATION);
}
@@ -922,7 +939,8 @@
client->active = false;
client->startTimeNs = 0;
updateUidStates_l();
- finishRecording(client->attributionSource, client->attributes.source);
+ finishRecording(client->attributionSource, client->virtualDeviceId,
+ client->attributes.source);
}
return binderStatusFromStatusT(status);
@@ -951,7 +969,7 @@
updateUidStates_l();
// finish the recording app op
- finishRecording(client->attributionSource, client->attributes.source);
+ finishRecording(client->attributionSource, client->virtualDeviceId, client->attributes.source);
AutoCallerClear acc;
return binderStatusFromStatusT(mAudioPolicyManager->stopInput(portId));
}
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 2265eb2..f372c71 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -18,9 +18,6 @@
//#define LOG_NDEBUG 0
#include "Configuration.h"
-#undef __STRICT_ANSI__
-#define __STDINT_LIMITS
-#define __STDC_LIMIT_MACROS
#include <stdint.h>
#include <sys/time.h>
#include <dlfcn.h>
@@ -868,6 +865,8 @@
// OR client has CAPTURE_AUDIO_OUTPUT privileged permission
// OR the client is the current InputMethodService
// AND a RTT call is active AND the source is VOICE_RECOGNITION
+// OR The client is an active communication owner
+// AND is on TOP or latest started
// OR Any client
// AND The assistant is not on TOP
// AND is on TOP or latest started
@@ -1032,7 +1031,12 @@
bool isTopOrLatestAssistant = latestActiveAssistant == nullptr ? false :
current->attributionSource.uid == latestActiveAssistant->attributionSource.uid;
- auto canCaptureIfInCallOrCommunication = [&](const auto &recordClient) REQUIRES(mMutex) {
+ // TODO: b/339112720
+ // Refine this logic when we have the correct phone state owner UID. The current issue is
+ // when a VOIP APP use Telecom API to manage calls, the mPhoneStateOwnerUid is AID_SYSTEM
+ // instead of the actual VOIP APP UID, so isPhoneStateOwnerActive here is not accurate.
+ const bool canCaptureIfInCallOrCommunication = [&](const auto& recordClient) REQUIRES(
+ mMutex) {
uid_t recordUid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(
recordClient->attributionSource.uid));
bool canCaptureCall = recordClient->canCaptureOutput;
@@ -1041,19 +1045,26 @@
|| recordUid == mPhoneStateOwnerUid;
return !(isInCall && !canCaptureCall)
&& !(isInCommunication && !canCaptureCommunication);
- };
+ }(current);
// By default allow capture if:
// The assistant is not on TOP
- // AND is on TOP or latest started
- // AND there is no active privacy sensitive capture or call
+ // AND is on TOP or latest started
+ // AND there is no active privacy sensitive capture or call
// OR client has CAPTURE_AUDIO_OUTPUT privileged permission
- bool allowSensitiveCapture =
+ // The assistant is on TOP
+ // AND is ongoing communication owner
+ // AND is on TOP or latest started
+ const bool allowSensitiveCapture =
!isSensitiveActive || isTopOrLatestSensitive || current->canCaptureOutput;
- bool allowCapture = !isAssistantOnTop
- && (isTopOrLatestActive || isTopOrLatestSensitive)
- && allowSensitiveCapture
- && canCaptureIfInCallOrCommunication(current);
+ bool allowCapture = false;
+ if (!isAssistantOnTop) {
+ allowCapture = (isTopOrLatestActive || isTopOrLatestSensitive) &&
+ allowSensitiveCapture && canCaptureIfInCallOrCommunication;
+ } else {
+ allowCapture = isInCommunication && isTopOrLatestSensitive &&
+ canCaptureIfInCallOrCommunication;
+ }
if (!current->hasOp()) {
// Never allow capture if app op is denied
@@ -1076,7 +1087,7 @@
allowCapture = true;
}
} else if (allowSensitiveCapture
- && canCaptureIfInCallOrCommunication(current)) {
+ && canCaptureIfInCallOrCommunication) {
if (isTopOrLatestAssistant
&& (source == AUDIO_SOURCE_VOICE_RECOGNITION
|| source == AUDIO_SOURCE_HOTWORD)) {
@@ -1097,7 +1108,7 @@
allowCapture = true;
}
} else if (allowSensitiveCapture
- && canCaptureIfInCallOrCommunication(current)) {
+ && canCaptureIfInCallOrCommunication) {
if ((source == AUDIO_SOURCE_VOICE_RECOGNITION) || (source == AUDIO_SOURCE_HOTWORD))
{
allowCapture = true;
@@ -1112,7 +1123,7 @@
// Is on TOP AND the source is VOICE_RECOGNITION or HOTWORD
if (!isAssistantOnTop
&& allowSensitiveCapture
- && canCaptureIfInCallOrCommunication(current)) {
+ && canCaptureIfInCallOrCommunication) {
allowCapture = true;
}
if (isA11yOnTop) {
@@ -1126,7 +1137,7 @@
// AND no call is active
// OR client has CAPTURE_AUDIO_OUTPUT privileged permission
if (onlyHotwordActive
- && canCaptureIfInCallOrCommunication(current)) {
+ && canCaptureIfInCallOrCommunication) {
allowCapture = true;
}
} else if (mUidPolicy->isCurrentImeUid(currentUid)) {
@@ -1193,12 +1204,13 @@
if (client->silenced != silenced) {
if (client->active) {
if (silenced) {
- finishRecording(client->attributionSource, client->attributes.source);
+ finishRecording(client->attributionSource, client->virtualDeviceId,
+ client->attributes.source);
} else {
std::stringstream msg;
msg << "Audio recording un-silenced on session " << client->session;
- if (!startRecording(client->attributionSource, String16(msg.str().c_str()),
- client->attributes.source)) {
+ if (!startRecording(client->attributionSource, client->virtualDeviceId,
+ String16(msg.str().c_str()), client->attributes.source)) {
silenced = true;
}
}
@@ -1427,144 +1439,13 @@
if (in == BAD_TYPE || out == BAD_TYPE || err == BAD_TYPE) {
return BAD_VALUE;
}
- if (args.size() >= 3 && args[0] == String16("set-uid-state")) {
- return handleSetUidState(args, err);
- } else if (args.size() >= 2 && args[0] == String16("reset-uid-state")) {
- return handleResetUidState(args, err);
- } else if (args.size() >= 2 && args[0] == String16("get-uid-state")) {
- return handleGetUidState(args, out, err);
- } else if (args.size() >= 1 && args[0] == String16("purge_permission-cache")) {
+ if (args.size() >= 1 && args[0] == String16("purge_permission-cache")) {
purgePermissionCache();
return NO_ERROR;
- } else if (args.size() == 1 && args[0] == String16("help")) {
- printHelp(out);
- return NO_ERROR;
}
- printHelp(err);
return BAD_VALUE;
}
-static status_t getUidForPackage(String16 packageName, int userId, /*inout*/uid_t& uid, int err) {
- if (userId < 0) {
- ALOGE("Invalid user: %d", userId);
- dprintf(err, "Invalid user: %d\n", userId);
- return BAD_VALUE;
- }
-
- PermissionController pc;
- uid = pc.getPackageUid(packageName, 0);
- if (uid <= 0) {
- ALOGE("Unknown package: '%s'", String8(packageName).c_str());
- dprintf(err, "Unknown package: '%s'\n", String8(packageName).c_str());
- return BAD_VALUE;
- }
-
- uid = multiuser_get_uid(userId, uid);
- return NO_ERROR;
-}
-
-status_t AudioPolicyService::handleSetUidState(Vector<String16>& args, int err) {
- // Valid arg.size() is 3 or 5, args.size() is 5 with --user option.
- if (!(args.size() == 3 || args.size() == 5)) {
- printHelp(err);
- return BAD_VALUE;
- }
-
- bool active = false;
- if (args[2] == String16("active")) {
- active = true;
- } else if ((args[2] != String16("idle"))) {
- ALOGE("Expected active or idle but got: '%s'", String8(args[2]).c_str());
- return BAD_VALUE;
- }
-
- int userId = 0;
- if (args.size() >= 5 && args[3] == String16("--user")) {
- userId = atoi(String8(args[4]));
- }
-
- uid_t uid;
- if (getUidForPackage(args[1], userId, uid, err) == BAD_VALUE) {
- return BAD_VALUE;
- }
-
- sp<UidPolicy> uidPolicy;
- {
- audio_utils::lock_guard _l(mMutex);
- uidPolicy = mUidPolicy;
- }
- if (uidPolicy) {
- uidPolicy->addOverrideUid(uid, active);
- return NO_ERROR;
- }
- return NO_INIT;
-}
-
-status_t AudioPolicyService::handleResetUidState(Vector<String16>& args, int err) {
- // Valid arg.size() is 2 or 4, args.size() is 4 with --user option.
- if (!(args.size() == 2 || args.size() == 4)) {
- printHelp(err);
- return BAD_VALUE;
- }
-
- int userId = 0;
- if (args.size() >= 4 && args[2] == String16("--user")) {
- userId = atoi(String8(args[3]));
- }
-
- uid_t uid;
- if (getUidForPackage(args[1], userId, uid, err) == BAD_VALUE) {
- return BAD_VALUE;
- }
-
- sp<UidPolicy> uidPolicy;
- {
- audio_utils::lock_guard _l(mMutex);
- uidPolicy = mUidPolicy;
- }
- if (uidPolicy) {
- uidPolicy->removeOverrideUid(uid);
- return NO_ERROR;
- }
- return NO_INIT;
-}
-
-status_t AudioPolicyService::handleGetUidState(Vector<String16>& args, int out, int err) {
- // Valid arg.size() is 2 or 4, args.size() is 4 with --user option.
- if (!(args.size() == 2 || args.size() == 4)) {
- printHelp(err);
- return BAD_VALUE;
- }
-
- int userId = 0;
- if (args.size() >= 4 && args[2] == String16("--user")) {
- userId = atoi(String8(args[3]));
- }
-
- uid_t uid;
- if (getUidForPackage(args[1], userId, uid, err) == BAD_VALUE) {
- return BAD_VALUE;
- }
-
- sp<UidPolicy> uidPolicy;
- {
- audio_utils::lock_guard _l(mMutex);
- uidPolicy = mUidPolicy;
- }
- if (uidPolicy) {
- return dprintf(out, uidPolicy->isUidActive(uid) ? "active\n" : "idle\n");
- }
- return NO_INIT;
-}
-
-status_t AudioPolicyService::printHelp(int out) {
- return dprintf(out, "Audio policy service commands:\n"
- " get-uid-state <PACKAGE> [--user USER_ID] gets the uid state\n"
- " set-uid-state <PACKAGE> <active|idle> [--user USER_ID] overrides the uid state\n"
- " reset-uid-state <PACKAGE> [--user USER_ID] clears the uid state override\n"
- " help print this message\n");
-}
-
status_t AudioPolicyService::registerOutput(audio_io_handle_t output,
const audio_config_base_t& config,
const audio_output_flags_t flags) {
@@ -1625,10 +1506,6 @@
checkRegistered();
{
audio_utils::lock_guard _l(mMutex);
- auto overrideIter = mOverrideUids.find(uid);
- if (overrideIter != mOverrideUids.end()) {
- return overrideIter->second.first;
- }
// In an absense of the ActivityManager, assume everything to be active.
if (!mObserverRegistered) return true;
auto cacheIter = mCachedUids.find(uid);
@@ -1654,20 +1531,6 @@
checkRegistered();
{
audio_utils::lock_guard _l(mMutex);
- auto overrideIter = mOverrideUids.find(uid);
- if (overrideIter != mOverrideUids.end()) {
- if (overrideIter->second.first) {
- if (overrideIter->second.second != ActivityManager::PROCESS_STATE_UNKNOWN) {
- return overrideIter->second.second;
- } else {
- auto cacheIter = mCachedUids.find(uid);
- if (cacheIter != mCachedUids.end()) {
- return cacheIter->second.second;
- }
- }
- }
- return ActivityManager::PROCESS_STATE_UNKNOWN;
- }
// In an absense of the ActivityManager, assume everything to be active.
if (!mObserverRegistered) {
return ActivityManager::PROCESS_STATE_TOP;
@@ -1720,10 +1583,6 @@
void AudioPolicyService::UidPolicy::onUidProcAdjChanged(uid_t uid __unused, int32_t adj __unused) {
}
-void AudioPolicyService::UidPolicy::updateOverrideUid(uid_t uid, bool active, bool insert) {
- updateUid(&mOverrideUids, uid, active, ActivityManager::PROCESS_STATE_UNKNOWN, insert);
-}
-
void AudioPolicyService::UidPolicy::notifyService() {
sp<AudioPolicyService> service = mService.promote();
if (service != nullptr) {
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 7aa80cf..5297e47 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -500,8 +500,6 @@
int32_t capability) override;
void onUidProcAdjChanged(uid_t uid, int32_t adj) override;
- void addOverrideUid(uid_t uid, bool active) { updateOverrideUid(uid, active, true); }
- void removeOverrideUid(uid_t uid) { updateOverrideUid(uid, false, false); }
void updateUid(std::unordered_map<uid_t, std::pair<bool, int>> *uids,
uid_t uid, bool active, int state, bool insert);
@@ -510,7 +508,6 @@
private:
void notifyService();
- void updateOverrideUid(uid_t uid, bool active, bool insert);
void updateUidLocked(std::unordered_map<uid_t, std::pair<bool, int>> *uids,
uid_t uid, bool active, int state, bool insert);
void checkRegistered();
@@ -519,7 +516,6 @@
audio_utils::mutex mMutex{audio_utils::MutexOrder::kUidPolicy_Mutex};
ActivityManager mAm;
bool mObserverRegistered = false;
- std::unordered_map<uid_t, std::pair<bool, int>> mOverrideUids GUARDED_BY(mMutex);
std::unordered_map<uid_t, std::pair<bool, int>> mCachedUids GUARDED_BY(mMutex);
std::vector<uid_t> mAssistantUids;
std::vector<uid_t> mActiveAssistantUids;
diff --git a/services/audiopolicy/service/AudioRecordClient.cpp b/services/audiopolicy/service/AudioRecordClient.cpp
index a89a84d..6d8b3cf 100644
--- a/services/audiopolicy/service/AudioRecordClient.cpp
+++ b/services/audiopolicy/service/AudioRecordClient.cpp
@@ -18,9 +18,10 @@
#include "AudioRecordClient.h"
#include "AudioPolicyService.h"
+#include <android_media_audiopolicy.h>
namespace android::media::audiopolicy {
-
+namespace audiopolicy_flags = android::media::audiopolicy;
using android::AudioPolicyService;
namespace {
@@ -59,8 +60,10 @@
// static
sp<OpRecordAudioMonitor>
OpRecordAudioMonitor::createIfNeeded(
- const AttributionSourceState& attributionSource, const audio_attributes_t& attr,
- wp<AudioPolicyService::AudioCommandThread> commandThread)
+ const AttributionSourceState &attributionSource,
+ const uint32_t virtualDeviceId,
+ const audio_attributes_t &attr,
+ wp<AudioPolicyService::AudioCommandThread> commandThread)
{
if (isAudioServerOrRootUid(attributionSource.uid)) {
ALOGV("not silencing record for audio or root source %s",
@@ -78,15 +81,19 @@
|| attributionSource.packageName.value().size() == 0) {
return nullptr;
}
- return new OpRecordAudioMonitor(attributionSource, getOpForSource(attr.source), commandThread);
+
+ return new OpRecordAudioMonitor(attributionSource, virtualDeviceId, attr,
+ getOpForSource(attr.source), commandThread);
}
OpRecordAudioMonitor::OpRecordAudioMonitor(
- const AttributionSourceState& attributionSource, int32_t appOp,
+ const AttributionSourceState &attributionSource,
+ const uint32_t virtualDeviceId, const audio_attributes_t &attr,
+ int32_t appOp,
wp<AudioPolicyService::AudioCommandThread> commandThread) :
- mHasOp(true), mAttributionSource(attributionSource), mAppOp(appOp),
- mCommandThread(commandThread)
-{
+ mHasOp(true), mAttributionSource(attributionSource),
+ mVirtualDeviceId(virtualDeviceId), mAttr(attr), mAppOp(appOp),
+ mCommandThread(commandThread) {
}
OpRecordAudioMonitor::~OpRecordAudioMonitor()
@@ -131,7 +138,12 @@
const int32_t mode = mAppOpsManager.checkOp(mAppOp,
mAttributionSource.uid, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
mAttributionSource.packageName.value_or(""))));
- const bool hasIt = (mode == AppOpsManager::MODE_ALLOWED);
+ bool hasIt = (mode == AppOpsManager::MODE_ALLOWED);
+
+ if (audiopolicy_flags::record_audio_device_aware_permission()) {
+ const bool canRecord = recordingAllowed(mAttributionSource, mVirtualDeviceId, mAttr.source);
+ hasIt = hasIt && canRecord;
+ }
// verbose logging only log when appOp changed
ALOGI_IF(hasIt != mHasOp.load(),
"App op %d missing, %ssilencing record %s",
diff --git a/services/audiopolicy/service/AudioRecordClient.h b/services/audiopolicy/service/AudioRecordClient.h
index d3be316..76aff41 100644
--- a/services/audiopolicy/service/AudioRecordClient.h
+++ b/services/audiopolicy/service/AudioRecordClient.h
@@ -38,12 +38,16 @@
static sp<OpRecordAudioMonitor> createIfNeeded(
const AttributionSourceState& attributionSource,
+ uint32_t virtualDeviceId,
const audio_attributes_t& attr,
wp<AudioPolicyService::AudioCommandThread> commandThread);
private:
- OpRecordAudioMonitor(const AttributionSourceState& attributionSource, int32_t appOp,
- wp<AudioPolicyService::AudioCommandThread> commandThread);
+ OpRecordAudioMonitor(const AttributionSourceState &attributionSource,
+ uint32_t virtualDeviceId,
+ const audio_attributes_t &attr,
+ int32_t appOp,
+ wp<AudioPolicyService::AudioCommandThread> commandThread);
void onFirstRef() override;
@@ -67,6 +71,8 @@
std::atomic_bool mHasOp;
const AttributionSourceState mAttributionSource;
+ const uint32_t mVirtualDeviceId;
+ const audio_attributes_t mAttr;
const int32_t mAppOp;
wp<AudioPolicyService::AudioCommandThread> mCommandThread;
};
@@ -81,15 +87,20 @@
const audio_session_t session, audio_port_handle_t portId,
const audio_port_handle_t deviceId,
const AttributionSourceState& attributionSource,
+ const uint32_t virtualDeviceId,
bool canCaptureOutput, bool canCaptureHotword,
wp<AudioPolicyService::AudioCommandThread> commandThread) :
AudioClient(attributes, io, attributionSource,
session, portId, deviceId), attributionSource(attributionSource),
+ virtualDeviceId(virtualDeviceId),
startTimeNs(0), canCaptureOutput(canCaptureOutput),
canCaptureHotword(canCaptureHotword), silenced(false),
mOpRecordAudioMonitor(
OpRecordAudioMonitor::createIfNeeded(attributionSource,
- attributes, commandThread)) {}
+ virtualDeviceId,
+ attributes, commandThread)) {
+
+ }
~AudioRecordClient() override = default;
bool hasOp() const {
@@ -97,6 +108,7 @@
}
const AttributionSourceState attributionSource; // attribution source of client
+ const uint32_t virtualDeviceId; // id of the virtual device associated with the audio device
nsecs_t startTimeNs;
const bool canCaptureOutput;
const bool canCaptureHotword;
diff --git a/services/audiopolicy/service/Spatializer.cpp b/services/audiopolicy/service/Spatializer.cpp
index bb36df7..c98f8df 100644
--- a/services/audiopolicy/service/Spatializer.cpp
+++ b/services/audiopolicy/service/Spatializer.cpp
@@ -1097,7 +1097,7 @@
}
void Spatializer::checkSensorsState_l() {
- audio_latency_mode_t requestedLatencyMode = AUDIO_LATENCY_MODE_FREE;
+ mRequestedLatencyMode = AUDIO_LATENCY_MODE_FREE;
const bool supportsSetLatencyMode = !mSupportedLatencyModes.empty();
bool supportsLowLatencyMode;
if (com::android::media::audio::dsa_over_bt_le_audio()) {
@@ -1118,7 +1118,7 @@
&& mDesiredHeadTrackingMode != HeadTrackingMode::STATIC
&& mHeadSensor != SpatializerPoseController::INVALID_SENSOR) {
if (supportsLowLatencyMode) {
- requestedLatencyMode = selectHeadtrackingConnectionMode_l();
+ mRequestedLatencyMode = selectHeadtrackingConnectionMode_l();
}
if (mEngine != nullptr) {
setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
@@ -1140,9 +1140,9 @@
}
if (mOutput != AUDIO_IO_HANDLE_NONE && supportsSetLatencyMode) {
const status_t status =
- AudioSystem::setRequestedLatencyMode(mOutput, requestedLatencyMode);
+ AudioSystem::setRequestedLatencyMode(mOutput, mRequestedLatencyMode);
ALOGD("%s: setRequestedLatencyMode for output thread(%d) to %s returned %d", __func__,
- mOutput, toString(requestedLatencyMode).c_str(), status);
+ mOutput, toString(mRequestedLatencyMode).c_str(), status);
}
}
diff --git a/services/audiopolicy/service/Spatializer.h b/services/audiopolicy/service/Spatializer.h
index 355df18..c5f159c 100644
--- a/services/audiopolicy/service/Spatializer.h
+++ b/services/audiopolicy/service/Spatializer.h
@@ -27,6 +27,7 @@
#include <audio_utils/SimpleLog.h>
#include <math.h>
#include <media/AudioEffect.h>
+#include <media/MediaMetricsItem.h>
#include <media/audiohal/EffectsFactoryHalInterface.h>
#include <media/VectorRecorder.h>
#include <media/audiohal/EffectHalInterface.h>
@@ -153,6 +154,34 @@
return mLevel;
}
+ /** For test only */
+ std::unordered_set<media::audio::common::HeadTracking::ConnectionMode>
+ getSupportedHeadtrackingConnectionModes() const {
+ return mSupportedHeadtrackingConnectionModes;
+ }
+
+ /** For test only */
+ media::audio::common::HeadTracking::ConnectionMode getHeadtrackingConnectionMode() const {
+ return mHeadtrackingConnectionMode;
+ }
+
+ /** For test only */
+ std::vector<audio_latency_mode_t> getSupportedLatencyModes() const {
+ audio_utils::lock_guard lock(mMutex);
+ return mSupportedLatencyModes;
+ }
+
+ /** For test only */
+ std::vector<audio_latency_mode_t> getOrderedLowLatencyModes() const {
+ return mOrderedLowLatencyModes;
+ }
+
+ /** For test only */
+ audio_latency_mode_t getRequestedLatencyMode() const {
+ audio_utils::lock_guard lock(mMutex);
+ return mRequestedLatencyMode;
+ }
+
/** Called by audio policy service when the special output mixer dedicated to spatialization
* is opened and the spatializer engine must be created.
*/
@@ -164,6 +193,12 @@
/** Returns the output stream the spatializer is attached to. */
audio_io_handle_t getOutput() const { audio_utils::lock_guard lock(mMutex); return mOutput; }
+ /** For test only */
+ void setOutput(audio_io_handle_t output) {
+ audio_utils::lock_guard lock(mMutex);
+ mOutput = output;
+ }
+
void updateActiveTracks(size_t numActiveTracks);
/** Gets the channel mask, sampling rate and format set for the spatializer input. */
@@ -188,6 +223,10 @@
// NO_INIT: Spatializer creation failed.
static void sendEmptyCreateSpatializerMetricWithStatus(status_t status);
+ /** Made public for test only */
+ void onSupportedLatencyModesChangedMsg(
+ audio_io_handle_t output, std::vector<audio_latency_mode_t>&& modes);
+
private:
Spatializer(effect_descriptor_t engineDescriptor,
SpatializerPolicyCallback *callback);
@@ -200,8 +239,6 @@
void onHeadToStagePoseMsg(const std::vector<float>& headToStage);
void onActualModeChangeMsg(media::HeadTrackingMode mode);
- void onSupportedLatencyModesChangedMsg(
- audio_io_handle_t output, std::vector<audio_latency_mode_t>&& modes);
static constexpr int kMaxEffectParamValues = 10;
/**
@@ -484,9 +521,11 @@
std::vector<media::audio::common::Spatialization::Mode> mSpatializationModes;
std::vector<audio_channel_mask_t> mChannelMasks;
bool mSupportsHeadTracking;
- /** List of supported headtracking connection modes reported by the spatializer.
+
+ /** List of supported head tracking connection modes reported by the spatializer.
* If the list is empty, the spatializer does not support any optional connection
* mode and mode HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED is assumed.
+ * This is set in the factory constructor and can be accessed without mutex.
*/
std::unordered_set<media::audio::common::HeadTracking::ConnectionMode>
mSupportedHeadtrackingConnectionModes;
@@ -504,6 +543,9 @@
std::vector<audio_latency_mode_t> mSupportedLatencyModes GUARDED_BY(mMutex);
/** preference order for low latency modes according to persist.bluetooth.hid.transport */
std::vector<audio_latency_mode_t> mOrderedLowLatencyModes;
+
+ audio_latency_mode_t mRequestedLatencyMode GUARDED_BY(mMutex) = AUDIO_LATENCY_MODE_FREE;
+
/** string to latency mode map used to parse bluetooth.core.le.dsa_transport_preference */
static const std::map<std::string, audio_latency_mode_t> sStringToLatencyModeMap;
static const std::vector<const char*> sHeadPoseKeys;
diff --git a/services/audiopolicy/tests/Android.bp b/services/audiopolicy/tests/Android.bp
index 34bd3b4..fc349ee 100644
--- a/services/audiopolicy/tests/Android.bp
+++ b/services/audiopolicy/tests/Android.bp
@@ -28,11 +28,11 @@
"libbase",
"libbinder",
"libcutils",
+ "libcutils",
"libhidlbase",
"liblog",
"libmedia_helper",
"libutils",
- "libcutils",
"libxml2",
"server_configurable_flags",
],
@@ -56,13 +56,13 @@
data: [":audiopolicytest_configuration_files"],
cflags: [
- "-Werror",
"-Wall",
+ "-Werror",
],
test_suites: [
- "device-tests",
"automotive-tests",
+ "device-tests",
],
}
@@ -101,10 +101,47 @@
srcs: ["audio_health_tests.cpp"],
cflags: [
- "-Werror",
"-Wall",
+ "-Werror",
],
test_suites: ["device-tests"],
}
+
+cc_test {
+ name: "spatializer_tests",
+
+ defaults: [
+ "latest_android_media_audio_common_types_cpp_shared",
+ "libaudiopolicyservice_dependencies",
+ ],
+
+ require_root: true,
+
+ shared_libs: [
+ "libaudioclient",
+ "libaudiofoundation",
+ "libcutils",
+ "liblog",
+ ],
+
+ static_libs: [
+ "libaudiopolicyservice",
+ ],
+
+ header_libs: [
+ "libaudiohal_headers",
+ "libaudiopolicyservice_headers",
+ "libmediametrics_headers",
+ ],
+
+ srcs: ["spatializer_tests.cpp"],
+
+ cflags: [
+ "-Wall",
+ "-Werror",
+ ],
+
+ test_suites: ["device-tests"],
+}
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index dbf3f05..5dc039c 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -204,7 +204,8 @@
audio_channel_mask_t channelMask,
int sampleRate,
audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE,
- audio_port_handle_t *portId = nullptr);
+ audio_port_handle_t *portId = nullptr,
+ uint32_t *virtualDeviceId = nullptr);
PatchCountCheck snapshotPatchCount() { return PatchCountCheck(mClient.get()); }
void getAudioPorts(audio_port_type_t type, audio_port_role_t role,
@@ -316,7 +317,8 @@
audio_channel_mask_t channelMask,
int sampleRate,
audio_input_flags_t flags,
- audio_port_handle_t *portId) {
+ audio_port_handle_t *portId,
+ uint32_t *virtualDeviceId) {
audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
config.sample_rate = sampleRate;
config.channel_mask = channelMask;
@@ -324,11 +326,12 @@
audio_port_handle_t localPortId;
if (!portId) portId = &localPortId;
*portId = AUDIO_PORT_HANDLE_NONE;
+ if (!virtualDeviceId) virtualDeviceId = 0;
AudioPolicyInterface::input_type_t inputType;
AttributionSourceState attributionSource = createAttributionSourceState(/*uid=*/ 0);
ASSERT_EQ(OK, mManager->getInputForAttr(
&attr, input, riid, session, attributionSource, &config, flags,
- selectedDeviceId, &inputType, portId));
+ selectedDeviceId, &inputType, portId, virtualDeviceId));
ASSERT_NE(AUDIO_PORT_HANDLE_NONE, *portId);
}
@@ -1296,10 +1299,11 @@
};
audio_config_base_t config = requestedConfig;
audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
+ uint32_t *virtualDeviceId = 0;
ASSERT_EQ(OK, mManager->getInputForAttr(
&attr, &input, 1 /*riid*/, AUDIO_SESSION_NONE, attributionSource, &config,
AUDIO_INPUT_FLAG_NONE,
- &selectedDeviceId, &inputType, &portId));
+ &selectedDeviceId, &inputType, &portId, virtualDeviceId));
ASSERT_NE(AUDIO_PORT_HANDLE_NONE, portId);
ASSERT_TRUE(equals(requestedConfig, config));
@@ -1313,7 +1317,7 @@
ASSERT_EQ(OK, mManager->getInputForAttr(
&attr, &input, 1 /*riid*/, AUDIO_SESSION_NONE, attributionSource, &config,
AUDIO_INPUT_FLAG_NONE,
- &selectedDeviceId, &inputType, &portId));
+ &selectedDeviceId, &inputType, &portId, virtualDeviceId));
ASSERT_NE(AUDIO_PORT_HANDLE_NONE, portId);
ASSERT_TRUE(equals(requestedConfig, config));
@@ -1369,6 +1373,7 @@
AudioMix myAudioMix(matchCriteria, mixType, audioConfig, mixFlag,
String8(mixAddress.c_str()), 0);
myAudioMix.mDeviceType = deviceType;
+ myAudioMix.mToken = sp<BBinder>::make();
// Clear mAudioMix before add new one to make sure we don't add already exist mixes.
mAudioMixes.clear();
return addPolicyMix(myAudioMix);
@@ -1569,8 +1574,7 @@
validAudioMix.mDeviceType = AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
mAudioMixes.clear();
- mAudioMixes.add(validAudioMix);
- status_t ret = mManager->registerPolicyMixes(mAudioMixes);
+ status_t ret = addPolicyMix(validAudioMix);
ASSERT_EQ(NO_ERROR, ret);
@@ -1586,8 +1590,7 @@
MIX_ROUTE_FLAG_LOOP_BACK, String8(mMixAddress.c_str()), 0);
validAudioMix.mDeviceType = AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
- mAudioMixes.add(invalidAudioMix);
- ret = mManager->registerPolicyMixes(mAudioMixes);
+ ret = addPolicyMix(invalidAudioMix);
ASSERT_EQ(INVALID_OPERATION, ret);
@@ -1614,8 +1617,7 @@
validAudioMix.mDeviceType = AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
mAudioMixes.clear();
- mAudioMixes.add(validAudioMix);
- status_t ret = mManager->registerPolicyMixes(mAudioMixes);
+ status_t ret = addPolicyMix(validAudioMix);
ASSERT_EQ(NO_ERROR, ret);
@@ -1629,7 +1631,7 @@
AudioMix invalidAudioMix(invalidMixMatchCriteria, MIX_TYPE_PLAYERS, audioConfig,
MIX_ROUTE_FLAG_LOOP_BACK, String8(mMixAddress.c_str()), 0);
- validAudioMix.mDeviceType = AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
+ invalidAudioMix.mDeviceType = AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
Vector<AudioMix> mixes;
mixes.add(invalidAudioMix);
diff --git a/services/audiopolicy/tests/resources/Android.bp b/services/audiopolicy/tests/resources/Android.bp
index 3a4af4c..1c191f5 100644
--- a/services/audiopolicy/tests/resources/Android.bp
+++ b/services/audiopolicy/tests/resources/Android.bp
@@ -16,7 +16,7 @@
"test_audio_policy_primary_only_configuration.xml",
"test_car_ap_atmos_offload_configuration.xml",
"test_invalid_audio_policy_configuration.xml",
- "test_tv_apm_configuration.xml",
"test_settop_box_surround_configuration.xml",
+ "test_tv_apm_configuration.xml",
],
}
diff --git a/services/audiopolicy/tests/spatializer_tests.cpp b/services/audiopolicy/tests/spatializer_tests.cpp
new file mode 100644
index 0000000..73bef43
--- /dev/null
+++ b/services/audiopolicy/tests/spatializer_tests.cpp
@@ -0,0 +1,231 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Spatializer_Test"
+
+#include "Spatializer.h"
+
+#include <string>
+#include <unordered_set>
+
+#include <gtest/gtest.h>
+
+#include <android/media/audio/common/AudioLatencyMode.h>
+#include <android/media/audio/common/HeadTracking.h>
+#include <android/media/audio/common/Spatialization.h>
+#include <com_android_media_audio.h>
+#include <utils/Log.h>
+
+using namespace android;
+using media::audio::common::HeadTracking;
+using media::audio::common::Spatialization;
+
+class TestSpatializerPolicyCallback :
+ public SpatializerPolicyCallback {
+public:
+ void onCheckSpatializer() override {};
+};
+
+class SpatializerTest : public ::testing::Test {
+protected:
+ void SetUp() override {
+ const sp<EffectsFactoryHalInterface> effectsFactoryHal
+ = EffectsFactoryHalInterface::create();
+ mSpatializer = Spatializer::create(&mTestCallback, effectsFactoryHal);
+ if (mSpatializer == nullptr) {
+ GTEST_SKIP() << "Skipping Spatializer tests: no spatializer";
+ }
+ std::vector<Spatialization::Level> levels;
+ binder::Status status = mSpatializer->getSupportedLevels(&levels);
+ ASSERT_TRUE(status.isOk());
+ for (auto level : levels) {
+ if (level != Spatialization::Level::NONE) {
+ mSpatializer->setLevel(level);
+ break;
+ }
+ }
+ mSpatializer->setOutput(sTestOutput);
+ }
+
+ void TearDown() override {
+ if (mSpatializer == nullptr) {
+ return;
+ }
+ mSpatializer->setLevel(Spatialization::Level::NONE);
+ mSpatializer->setOutput(AUDIO_IO_HANDLE_NONE);
+ mSpatializer->setDesiredHeadTrackingMode(HeadTracking::Mode::DISABLED);
+ mSpatializer->setHeadSensor(SpatializerPoseController::INVALID_SENSOR);
+ mSpatializer->updateActiveTracks(0);
+ }
+
+ static constexpr audio_io_handle_t sTestOutput= 1977;
+ static constexpr int sTestSensorHandle = 1980;
+
+ const static inline std::vector<audio_latency_mode_t> sA2DPLatencyModes = {
+ AUDIO_LATENCY_MODE_LOW,
+ AUDIO_LATENCY_MODE_FREE
+ };
+ const static inline std::vector<audio_latency_mode_t> sBLELatencyModes = {
+ AUDIO_LATENCY_MODE_LOW,
+ AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_SOFTWARE,
+ AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_HARDWARE,
+ AUDIO_LATENCY_MODE_FREE
+ };
+
+ bool setpUpForHeadtracking() {
+ bool htSupported;
+ mSpatializer->isHeadTrackingSupported(&htSupported);
+ if (!htSupported) {
+ return false;
+ }
+
+ std::vector<HeadTracking::Mode> htModes;
+ mSpatializer->getSupportedHeadTrackingModes(&htModes);
+ for (auto htMode : htModes) {
+ if (htMode != HeadTracking::Mode::DISABLED) {
+ mSpatializer->setDesiredHeadTrackingMode(htMode);
+ break;
+ }
+ }
+
+ mSpatializer->setHeadSensor(sTestSensorHandle);
+ return true;
+ }
+
+ TestSpatializerPolicyCallback mTestCallback;
+ sp<Spatializer> mSpatializer;
+};
+
+TEST_F(SpatializerTest, SupportedA2dpLatencyTest) {
+ if (!setpUpForHeadtracking()) {
+ GTEST_SKIP() << "Skipping SupportedA2dpLatencyTest: head tracking not supported";
+ }
+ std::vector<audio_latency_mode_t> latencies = sA2DPLatencyModes;
+ mSpatializer->onSupportedLatencyModesChangedMsg(sTestOutput, std::move(latencies));
+
+ std::vector<audio_latency_mode_t> supportedLatencies =
+ mSpatializer->getSupportedLatencyModes();
+
+ ASSERT_TRUE(supportedLatencies == sA2DPLatencyModes);
+ // Free mode must always be the last of the ordered list
+ ASSERT_TRUE(supportedLatencies.back() == AUDIO_LATENCY_MODE_FREE);
+}
+
+TEST_F(SpatializerTest, SupportedBleLatencyTest) {
+ if (!setpUpForHeadtracking()) {
+ GTEST_SKIP() << "Skipping SupportedBleLatencyTest: head tracking not supported";
+ }
+ if (!com::android::media::audio::dsa_over_bt_le_audio()) {
+ GTEST_SKIP() << "Skipping SupportedBleLatencyTest: DSA over LE not enabled";
+ }
+ std::vector<audio_latency_mode_t> latencies = sBLELatencyModes;
+ mSpatializer->onSupportedLatencyModesChangedMsg(sTestOutput, std::move(latencies));
+
+ std::vector<audio_latency_mode_t> supportedLatencies =
+ mSpatializer->getSupportedLatencyModes();
+
+ ASSERT_TRUE(supportedLatencies.back() == AUDIO_LATENCY_MODE_FREE);
+ ASSERT_TRUE(std::find(supportedLatencies.begin(), supportedLatencies.end(),
+ AUDIO_LATENCY_MODE_LOW) != supportedLatencies.end());
+
+ std::vector<audio_latency_mode_t> orderedLowLatencyModes =
+ mSpatializer->getOrderedLowLatencyModes();
+
+ std::vector<audio_latency_mode_t> supportedLowLatencyModes;
+ // remove free mode at the end of the supported list to only retain low latency modes
+ std::copy(supportedLatencies.begin(),
+ supportedLatencies.begin() + supportedLatencies.size() - 1,
+ std::back_inserter(supportedLowLatencyModes));
+
+ // Verify that supported low latency modes are always in ordered latency modes list and
+ // in the same order
+ std::vector<audio_latency_mode_t>::iterator lastIt = orderedLowLatencyModes.begin();
+ for (auto latency : supportedLowLatencyModes) {
+ auto it = std::find(orderedLowLatencyModes.begin(), orderedLowLatencyModes.end(), latency);
+ ASSERT_NE(it, orderedLowLatencyModes.end());
+ ASSERT_LE(lastIt, it);
+ lastIt = it;
+ }
+}
+
+TEST_F(SpatializerTest, RequestedA2dpLatencyTest) {
+ if (!setpUpForHeadtracking()) {
+ GTEST_SKIP() << "Skipping RequestedA2dpLatencyTest: head tracking not supported";
+ }
+
+ std::vector<audio_latency_mode_t> latencies = sA2DPLatencyModes;
+ mSpatializer->onSupportedLatencyModesChangedMsg(sTestOutput, std::move(latencies));
+
+ // requested latency mode must be free if no spatialized tracks are active
+ audio_latency_mode_t requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
+ ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_FREE);
+
+ // requested latency mode must be low if at least one spatialized tracks is active
+ mSpatializer->updateActiveTracks(1);
+ requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
+ ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_LOW);
+
+ // requested latency mode must be free after stopping the last spatialized tracks
+ mSpatializer->updateActiveTracks(0);
+ requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
+ ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_FREE);
+}
+
+TEST_F(SpatializerTest, RequestedBleLatencyTest) {
+ if (!setpUpForHeadtracking()) {
+ GTEST_SKIP() << "Skipping RequestedBleLatencyTest: head tracking not supported";
+ }
+ if (!com::android::media::audio::dsa_over_bt_le_audio()) {
+ GTEST_SKIP() << "Skipping RequestedBleLatencyTest: DSA over LE not enabled";
+ }
+
+ mSpatializer->onSupportedLatencyModesChangedMsg(sTestOutput,
+ { AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_SOFTWARE,
+ AUDIO_LATENCY_MODE_FREE });
+
+ // requested latency mode must be free if no spatialized tracks are active
+ audio_latency_mode_t requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
+ ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_FREE);
+
+ // requested latency mode must be low software if at least one spatialized tracks is active
+ // and the only supported low latency mode is low software
+ mSpatializer->updateActiveTracks(1);
+ requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
+ ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_SOFTWARE);
+
+ mSpatializer->onSupportedLatencyModesChangedMsg(sTestOutput,
+ { AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_SOFTWARE,
+ AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_HARDWARE,
+ AUDIO_LATENCY_MODE_FREE });
+
+ requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
+ HeadTracking::ConnectionMode connectionMode = mSpatializer->getHeadtrackingConnectionMode();
+
+ // If low hardware mode is used, the spatializer must use either use one of the sensor
+ // connection tunneled modes.
+ // Otherwise, low software mode must be used
+ if (requestedLatencyMode == AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_HARDWARE) {
+ ASSERT_TRUE(connectionMode == HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_TUNNEL
+ || connectionMode == HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_SW);
+ } else {
+ ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_SOFTWARE);
+ }
+
+ // requested latency mode must be free after stopping the last spatialized tracks
+ mSpatializer->updateActiveTracks(0);
+ requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
+ ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_FREE);
+}
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index 5b76bb0..38476a4 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -72,8 +72,10 @@
"libsensorprivacy",
"libstagefright",
"libstagefright_foundation",
+ "libvendorsupport",
"libxml2",
"libyuv",
+ "android.companion.virtual.virtualdevice_aidl-cpp",
"android.companion.virtualdevice.flags-aconfig-cc",
"android.hardware.camera.common@1.0",
"android.hardware.camera.device@1.0",
@@ -86,6 +88,7 @@
"android.hardware.common-V2-ndk",
"android.hardware.common.fmq-V1-ndk",
"camera_platform_flags_c_lib",
+ "com.android.window.flags.window-aconfig_flags_c_lib",
"media_permission-aidl-cpp",
],
@@ -186,17 +189,17 @@
"aidl/AidlCameraServiceListener.cpp",
"aidl/AidlUtils.cpp",
"aidl/DeathPipe.cpp",
+ "utils/AttributionAndPermissionUtils.cpp",
"utils/CameraServiceProxyWrapper.cpp",
- "utils/CameraThreadState.cpp",
"utils/CameraTraces.cpp",
"utils/AutoConditionLock.cpp",
"utils/SchedulingPolicyUtils.cpp",
"utils/SessionConfigurationUtils.cpp",
"utils/SessionConfigurationUtilsHidl.cpp",
- "utils/SessionStatsBuilder.cpp",
"utils/TagMonitor.cpp",
"utils/LatencyHistogram.cpp",
"utils/Utils.cpp",
+ "utils/VirtualDeviceCameraIdMapper.cpp",
],
header_libs: [
@@ -228,7 +231,6 @@
"-Werror",
"-Wno-ignored-qualifiers",
],
-
}
cc_library_static {
@@ -245,6 +247,7 @@
"device3/ZoomRatioMapper.cpp",
"utils/ExifUtils.cpp",
"utils/SessionConfigurationUtilsHost.cpp",
+ "utils/SessionStatsBuilder.cpp",
],
header_libs: [
@@ -262,7 +265,7 @@
"liblog",
"libutils",
"libxml2",
- "camera_platform_flags_c_lib"
+ "camera_platform_flags_c_lib",
],
include_dirs: [
@@ -278,5 +281,4 @@
"-Werror",
"-Wno-ignored-qualifiers",
],
-
}
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index c67b5ed..7a2b434 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -38,7 +38,8 @@
#include <aidl/AidlCameraService.h>
#include <android-base/macros.h>
#include <android-base/parseint.h>
-#include <android/permission/PermissionChecker.h>
+#include <android_companion_virtualdevice_flags.h>
+#include <android/companion/virtualnative/IVirtualDeviceManagerNative.h>
#include <binder/ActivityManager.h>
#include <binder/AppOpsManager.h>
#include <binder/IPCThreadState.h>
@@ -73,6 +74,7 @@
#include <system/camera_metadata.h>
#include <binder/IServiceManager.h>
#include <binder/IActivityManager.h>
+#include <camera/CameraUtils.h>
#include <camera/StringUtils.h>
#include <system/camera.h>
@@ -81,7 +83,6 @@
#include "api1/Camera2Client.h"
#include "api2/CameraDeviceClient.h"
#include "utils/CameraServiceProxyWrapper.h"
-#include "utils/CameraThreadState.h"
#include "utils/CameraTraces.h"
#include "utils/SessionConfigurationUtils.h"
#include "utils/TagMonitor.h"
@@ -93,7 +94,18 @@
const char* kSensorPrivacyServiceName = "sensor_privacy";
const char* kAppopsServiceName = "appops";
const char* kProcessInfoServiceName = "processinfo";
-}; // namespace anonymous
+ const char* kVirtualDeviceBackCameraId = "0";
+ const char* kVirtualDeviceFrontCameraId = "1";
+
+ int32_t getDeviceId(const android::CameraMetadata& cameraInfo) {
+ if (!cameraInfo.exists(ANDROID_INFO_DEVICE_ID)) {
+ return android::kDefaultDeviceId;
+ }
+
+ const auto &deviceIdEntry = cameraInfo.find(ANDROID_INFO_DEVICE_ID);
+ return deviceIdEntry.data.i32[0];
+ }
+} // namespace anonymous
namespace android {
@@ -101,6 +113,7 @@
using namespace camera3::SessionConfigurationUtils;
using binder::Status;
+using companion::virtualnative::IVirtualDeviceManagerNative;
using frameworks::cameraservice::service::V2_0::implementation::HidlCameraService;
using frameworks::cameraservice::service::implementation::AidlCameraService;
using hardware::ICamera;
@@ -110,7 +123,9 @@
using hardware::camera2::ICameraInjectionSession;
using hardware::camera2::utils::CameraIdAndSessionConfiguration;
using hardware::camera2::utils::ConcurrentCameraIdCombination;
+
namespace flags = com::android::internal::camera::flags;
+namespace vd_flags = android::companion::virtualdevice::flags;
// ----------------------------------------------------------------------------
// Logging support -- this is for debugging only
@@ -130,20 +145,16 @@
// ----------------------------------------------------------------------------
-static const std::string sDumpPermission("android.permission.DUMP");
-static const std::string sManageCameraPermission("android.permission.MANAGE_CAMERA");
-static const std::string sCameraPermission("android.permission.CAMERA");
-static const std::string sSystemCameraPermission("android.permission.SYSTEM_CAMERA");
-static const std::string sCameraHeadlessSystemUserPermission(
- "android.permission.CAMERA_HEADLESS_SYSTEM_USER");
-static const std::string sCameraPrivacyAllowlistPermission(
- "android.permission.CAMERA_PRIVACY_ALLOWLIST");
-static const std::string
- sCameraSendSystemEventsPermission("android.permission.CAMERA_SEND_SYSTEM_EVENTS");
-static const std::string sCameraOpenCloseListenerPermission(
- "android.permission.CAMERA_OPEN_CLOSE_LISTENER");
-static const std::string
- sCameraInjectExternalCameraPermission("android.permission.CAMERA_INJECT_EXTERNAL_CAMERA");
+// Permission strings (references to AttributionAndPermissionUtils for brevity)
+static const std::string &sDumpPermission =
+ AttributionAndPermissionUtils::sDumpPermission;
+static const std::string &sManageCameraPermission =
+ AttributionAndPermissionUtils::sManageCameraPermission;
+static const std::string &sCameraSendSystemEventsPermission =
+ AttributionAndPermissionUtils::sCameraSendSystemEventsPermission;
+static const std::string &sCameraInjectExternalCameraPermission =
+ AttributionAndPermissionUtils::sCameraInjectExternalCameraPermission;
+
// Constant integer for FGS Logging, used to denote the API type for logger
static const int LOG_FGS_CAMERA_API = 1;
const char *sFileName = "lastOpenSessionDumpFile";
@@ -155,11 +166,17 @@
const std::string CameraService::kOfflineDevice("offline-");
const std::string CameraService::kWatchAllClientsFlag("all");
+constexpr int32_t kInvalidDeviceId = -1;
+
// Set to keep track of logged service error events.
static std::set<std::string> sServiceErrorEventSet;
CameraService::CameraService(
- std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper) :
+ std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils) :
+ AttributionAndPermissionUtilsEncapsulator(attributionAndPermissionUtils == nullptr ?
+ std::make_shared<AttributionAndPermissionUtils>()\
+ : attributionAndPermissionUtils),
mCameraServiceProxyWrapper(cameraServiceProxyWrapper == nullptr ?
std::make_shared<CameraServiceProxyWrapper>() : cameraServiceProxyWrapper),
mEventLog(DEFAULT_EVENT_LOG_LENGTH),
@@ -168,6 +185,7 @@
mSoundRef(0), mInitialized(false),
mAudioRestriction(hardware::camera2::ICameraDeviceUser::AUDIO_RESTRICTION_NONE) {
ALOGI("CameraService started (pid=%d)", getpid());
+ mAttributionAndPermissionUtils->setCameraService(this);
mServiceLockWrapper = std::make_shared<WaitableMutexWrapper>(&mServiceLock);
mMemFd = memfd_create(sFileName, MFD_ALLOW_SEALING);
if (mMemFd == -1) {
@@ -175,12 +193,6 @@
}
}
-// The word 'System' here does not refer to clients only on the system
-// partition. They just need to have a android system uid.
-static bool doesClientHaveSystemUid() {
- return (CameraThreadState::getCallingUid() < AID_APP_START);
-}
-
// Enable processes with isolated AID to request the binder
void CameraService::instantiate() {
CameraService::publish(true);
@@ -197,7 +209,6 @@
void CameraService::onFirstRef()
{
-
ALOGI("CameraService process starting");
BnCameraService::onFirstRef();
@@ -216,7 +227,7 @@
mUidPolicy = new UidPolicy(this);
mUidPolicy->registerSelf();
- mSensorPrivacyPolicy = new SensorPrivacyPolicy(this);
+ mSensorPrivacyPolicy = new SensorPrivacyPolicy(this, mAttributionAndPermissionUtils);
mSensorPrivacyPolicy->registerSelf();
mInjectionStatusListener = new InjectionStatusListener(this);
@@ -269,7 +280,6 @@
}
}
-
// Setup vendor tags before we call get_camera_info the first time
// because HAL might need to setup static vendor keys in get_camera_info
// TODO: maybe put this into CameraProviderManager::initialize()?
@@ -287,7 +297,6 @@
deviceIds = mCameraProviderManager->getCameraDeviceIds(&unavailPhysicalIds);
}
-
for (auto& cameraId : deviceIds) {
if (getCameraState(cameraId) == nullptr) {
onDeviceStatusChanged(cameraId, CameraDeviceStatus::PRESENT);
@@ -316,6 +325,10 @@
void CameraService::broadcastTorchModeStatus(const std::string& cameraId, TorchModeStatus status,
SystemCameraKind systemCameraKind) {
+ // Get the device id and app-visible camera id for the given HAL-visible camera id.
+ auto [deviceId, mappedCameraId] =
+ mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(cameraId);
+
Mutex::Autolock lock(mStatusListenerLock);
for (auto& i : mListenerList) {
if (shouldSkipStatusUpdates(systemCameraKind, i->isVendorListener(), i->getListenerPid(),
@@ -324,19 +337,11 @@
__FUNCTION__, cameraId.c_str());
continue;
}
+
auto ret = i->getListener()->onTorchStatusChanged(mapToInterface(status),
- cameraId);
+ mappedCameraId, deviceId);
i->handleBinderStatus(ret, "%s: Failed to trigger onTorchStatusChanged for %d:%d: %d",
__FUNCTION__, i->getListenerUid(), i->getListenerPid(), ret.exceptionCode());
- // Also trigger the torch callbacks for cameras that were remapped to the current cameraId
- // for the specific package that this listener belongs to.
- std::vector<std::string> remappedCameraIds =
- findOriginalIdsForRemappedCameraId(cameraId, i->getListenerUid());
- for (auto& remappedCameraId : remappedCameraIds) {
- ret = i->getListener()->onTorchStatusChanged(mapToInterface(status), remappedCameraId);
- i->handleBinderStatus(ret, "%s: Failed to trigger onTorchStatusChanged for %d:%d: %d",
- __FUNCTION__, i->getListenerUid(), i->getListenerPid(), ret.exceptionCode());
- }
}
}
@@ -354,10 +359,29 @@
void CameraService::filterAPI1SystemCameraLocked(
const std::vector<std::string> &normalDeviceIds) {
mNormalDeviceIdsWithoutSystemCamera.clear();
- for (auto &deviceId : normalDeviceIds) {
+ for (auto &cameraId : normalDeviceIds) {
+ if (vd_flags::camera_device_awareness()) {
+ CameraMetadata cameraInfo;
+ status_t res = mCameraProviderManager->getCameraCharacteristics(
+ cameraId, false, &cameraInfo,
+ hardware::ICameraService::ROTATION_OVERRIDE_NONE);
+ int32_t deviceId = kDefaultDeviceId;
+ if (res != OK) {
+ ALOGW("%s: Not able to get camera characteristics for camera id %s",
+ __FUNCTION__, cameraId.c_str());
+ } else {
+ deviceId = getDeviceId(cameraInfo);
+ }
+ // Cameras associated with non-default device id's (i.e., virtual cameras) can never be
+ // system cameras, so skip for non-default device id's.
+ if (deviceId != kDefaultDeviceId) {
+ continue;
+ }
+ }
+
SystemCameraKind deviceKind = SystemCameraKind::PUBLIC;
- if (getSystemCameraKind(deviceId, &deviceKind) != OK) {
- ALOGE("%s: Invalid camera id %s, skipping", __FUNCTION__, deviceId.c_str());
+ if (getSystemCameraKind(cameraId, &deviceKind) != OK) {
+ ALOGE("%s: Invalid camera id %s, skipping", __FUNCTION__, cameraId.c_str());
continue;
}
if (deviceKind == SystemCameraKind::SYSTEM_ONLY_CAMERA) {
@@ -365,7 +389,7 @@
// device ids as per the HAL interface contract.
break;
}
- mNormalDeviceIdsWithoutSystemCamera.push_back(deviceId);
+ mNormalDeviceIdsWithoutSystemCamera.push_back(cameraId);
}
ALOGV("%s: number of API1 compatible public cameras is %zu", __FUNCTION__,
mNormalDeviceIdsWithoutSystemCamera.size());
@@ -403,8 +427,9 @@
int facing = -1;
int orientation = 0;
int portraitRotation;
- getDeviceVersion(cameraId, /*overrideToPortrait*/false, /*out*/&portraitRotation,
- /*out*/&facing, /*out*/&orientation);
+ getDeviceVersion(cameraId,
+ /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+ /*out*/&portraitRotation, /*out*/&facing, /*out*/&orientation);
if (facing == -1) {
ALOGE("%s: Unable to get camera device \"%s\" facing", __FUNCTION__, cameraId.c_str());
return;
@@ -519,10 +544,10 @@
if (newStatus == StatusInternal::NOT_PRESENT) {
logDeviceRemoved(cameraId, fmt::format("Device status changed from {} to {}",
oldStatus, newStatus));
-
// Set the device status to NOT_PRESENT, clients will no longer be able to connect
// to this device until the status changes
updateStatus(StatusInternal::NOT_PRESENT, cameraId);
+ mVirtualDeviceCameraIdMapper.removeCamera(cameraId);
sp<BasicClient> clientToDisconnectOnline, clientToDisconnectOffline;
{
@@ -604,7 +629,7 @@
continue;
}
auto ret = listener->getListener()->onPhysicalCameraStatusChanged(
- mapToInterface(newStatus), id, physicalId);
+ mapToInterface(newStatus), id, physicalId, kDefaultDeviceId);
listener->handleBinderStatus(ret,
"%s: Failed to trigger onPhysicalCameraStatusChanged for %d:%d: %d",
__FUNCTION__, listener->getListenerUid(), listener->getListenerPid(),
@@ -638,7 +663,6 @@
onTorchStatusChangedLocked(cameraId, newStatus, systemCameraKind);
}
-
void CameraService::onTorchStatusChanged(const std::string& cameraId,
TorchModeStatus newStatus, SystemCameraKind systemCameraKind) {
Mutex::Autolock al(mTorchStatusMutex);
@@ -647,9 +671,14 @@
void CameraService::broadcastTorchStrengthLevel(const std::string& cameraId,
int32_t newStrengthLevel) {
+ // Get the device id and app-visible camera id for the given HAL-visible camera id.
+ auto [deviceId, mappedCameraId] =
+ mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(cameraId);
+
Mutex::Autolock lock(mStatusListenerLock);
for (auto& i : mListenerList) {
- auto ret = i->getListener()->onTorchStrengthLevelChanged(cameraId, newStrengthLevel);
+ auto ret = i->getListener()->onTorchStrengthLevelChanged(mappedCameraId,
+ newStrengthLevel, deviceId);
i->handleBinderStatus(ret,
"%s: Failed to trigger onTorchStrengthLevelChanged for %d:%d: %d", __FUNCTION__,
i->getListenerUid(), i->getListenerPid(), ret.exceptionCode());
@@ -709,34 +738,7 @@
broadcastTorchModeStatus(cameraId, newStatus, systemCameraKind);
}
-static bool isAutomotiveDevice() {
- // Checks the property ro.hardware.type and returns true if it is
- // automotive.
- char value[PROPERTY_VALUE_MAX] = {0};
- property_get("ro.hardware.type", value, "");
- return strncmp(value, "automotive", PROPERTY_VALUE_MAX) == 0;
-}
-
-static bool isHeadlessSystemUserMode() {
- // Checks if the device is running in headless system user mode
- // by checking the property ro.fw.mu.headless_system_user.
- char value[PROPERTY_VALUE_MAX] = {0};
- property_get("ro.fw.mu.headless_system_user", value, "");
- return strncmp(value, "true", PROPERTY_VALUE_MAX) == 0;
-}
-
-static bool isAutomotivePrivilegedClient(int32_t uid) {
- // Returns false if this is not an automotive device type.
- if (!isAutomotiveDevice())
- return false;
-
- // Returns true if the uid is AID_AUTOMOTIVE_EVS which is a
- // privileged client uid used for safety critical use cases such as
- // rear view and surround view.
- return uid == AID_AUTOMOTIVE_EVS;
-}
-
-bool CameraService::isAutomotiveExteriorSystemCamera(const std::string& cam_id) const{
+bool CameraService::isAutomotiveExteriorSystemCamera(const std::string& cam_id) const {
// Returns false if this is not an automotive device type.
if (!isAutomotiveDevice())
return false;
@@ -759,7 +761,7 @@
CameraMetadata cameraInfo;
status_t res = mCameraProviderManager->getCameraCharacteristics(
- cam_id, false, &cameraInfo, false);
+ cam_id, false, &cameraInfo, hardware::ICameraService::ROTATION_OVERRIDE_NONE);
if (res != OK){
ALOGE("%s: Not able to get camera characteristics for camera id %s",__FUNCTION__,
cam_id.c_str());
@@ -781,62 +783,19 @@
return true;
}
-bool CameraService::checkPermission(const std::string& cameraId, const std::string& permission,
- const AttributionSourceState& attributionSource, const std::string& message,
- int32_t attributedOpCode) const{
- if (isAutomotivePrivilegedClient(attributionSource.uid)) {
- // If cameraId is empty, then it means that this check is not used for the
- // purpose of accessing a specific camera, hence grant permission just
- // based on uid to the automotive privileged client.
- if (cameraId.empty())
- return true;
- // If this call is used for accessing a specific camera then cam_id must be provided.
- // In that case, only pre-grants the permission for accessing the exterior system only
- // camera.
- return isAutomotiveExteriorSystemCamera(cameraId);
+Status CameraService::getNumberOfCameras(int32_t type, int32_t deviceId, int32_t devicePolicy,
+ int32_t* numCameras) {
+ ATRACE_CALL();
+ if (vd_flags::camera_device_awareness() && (deviceId != kDefaultDeviceId)
+ && (devicePolicy != IVirtualDeviceManagerNative::DEVICE_POLICY_DEFAULT)) {
+ *numCameras = mVirtualDeviceCameraIdMapper.getNumberOfCameras(deviceId);
+ return Status::ok();
}
-
- return mPermissionChecker->checkPermissionForPreflight(
- toString16(permission), attributionSource, toString16(message),
- attributedOpCode) != permission::PermissionChecker::PERMISSION_HARD_DENIED;
-}
-
-bool CameraService::hasPermissionsForSystemCamera(const std::string& cameraId, int callingPid,
- int callingUid) const{
- AttributionSourceState attributionSource{};
- attributionSource.pid = callingPid;
- attributionSource.uid = callingUid;
- bool checkPermissionForSystemCamera = checkPermission(cameraId,
- sSystemCameraPermission, attributionSource, std::string(), AppOpsManager::OP_NONE);
- bool checkPermissionForCamera = checkPermission(cameraId,
- sCameraPermission, attributionSource, std::string(), AppOpsManager::OP_NONE);
- return checkPermissionForSystemCamera && checkPermissionForCamera;
-}
-
-bool CameraService::hasPermissionsForCameraHeadlessSystemUser(const std::string& cameraId,
- int callingPid, int callingUid) const{
- AttributionSourceState attributionSource{};
- attributionSource.pid = callingPid;
- attributionSource.uid = callingUid;
- return checkPermission(cameraId, sCameraHeadlessSystemUserPermission, attributionSource,
- std::string(), AppOpsManager::OP_NONE);
-}
-
-bool CameraService::hasPermissionsForCameraPrivacyAllowlist(int callingPid, int callingUid) const{
- AttributionSourceState attributionSource{};
- attributionSource.pid = callingPid;
- attributionSource.uid = callingUid;
- return checkPermission(std::string(), sCameraPrivacyAllowlistPermission, attributionSource,
- std::string(), AppOpsManager::OP_NONE);
-}
-
-Status CameraService::getNumberOfCameras(int32_t type, int32_t* numCameras) {
- ATRACE_CALL();
Mutex::Autolock l(mServiceLock);
bool hasSystemCameraPermissions =
- hasPermissionsForSystemCamera(std::string(), CameraThreadState::getCallingPid(),
- CameraThreadState::getCallingUid());
+ hasPermissionsForSystemCamera(std::string(), getCallingPid(),
+ getCallingUid());
switch (type) {
case CAMERA_TYPE_BACKWARD_COMPATIBLE:
if (hasSystemCameraPermissions) {
@@ -861,25 +820,8 @@
return Status::ok();
}
-Status CameraService::remapCameraIds(const hardware::CameraIdRemapping& cameraIdRemapping) {
- if (!checkCallingPermission(toString16(sCameraInjectExternalCameraPermission))) {
- const int pid = CameraThreadState::getCallingPid();
- const int uid = CameraThreadState::getCallingUid();
- ALOGE("%s: Permission Denial: can't configure camera ID mapping pid=%d, uid=%d",
- __FUNCTION__, pid, uid);
- return STATUS_ERROR(ERROR_PERMISSION_DENIED,
- "Permission Denial: no permission to configure camera id mapping");
- }
- TCameraIdRemapping cameraIdRemappingMap{};
- binder::Status parseStatus = parseCameraIdRemapping(cameraIdRemapping, &cameraIdRemappingMap);
- if (!parseStatus.isOk()) {
- return parseStatus;
- }
- remapCameraIds(cameraIdRemappingMap);
- return Status::ok();
-}
-
Status CameraService::createDefaultRequest(const std::string& unresolvedCameraId, int templateId,
+ int32_t deviceId, int32_t devicePolicy,
/* out */
hardware::camera2::impl::CameraMetadataNative* request) {
ATRACE_CALL();
@@ -894,8 +836,15 @@
return STATUS_ERROR(ERROR_DISCONNECTED, "Camera subsystem is not available");
}
- const std::string cameraId = resolveCameraId(unresolvedCameraId,
- CameraThreadState::getCallingUid());
+ std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
+ devicePolicy);
+ if (!cameraIdOptional.has_value()) {
+ std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
+ unresolvedCameraId.c_str(), deviceId);
+ ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+ }
+ std::string cameraId = cameraIdOptional.value();
binder::Status res;
if (request == nullptr) {
@@ -934,10 +883,10 @@
}
Status CameraService::isSessionConfigurationWithParametersSupported(
- const std::string& unresolvedCameraId,
+ const std::string& unresolvedCameraId, int targetSdkVersion,
const SessionConfiguration& sessionConfiguration,
- /*out*/
- bool* supported) {
+ int32_t deviceId, int32_t devicePolicy,
+ /*out*/ bool* supported) {
ATRACE_CALL();
if (!flags::feature_combination_query()) {
@@ -950,8 +899,16 @@
return STATUS_ERROR(ERROR_DISCONNECTED, "Camera subsystem is not available");
}
- const std::string cameraId = resolveCameraId(unresolvedCameraId,
- CameraThreadState::getCallingUid());
+ std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
+ devicePolicy);
+ if (!cameraIdOptional.has_value()) {
+ std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
+ unresolvedCameraId.c_str(), deviceId);
+ ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+ }
+ std::string cameraId = cameraIdOptional.value();
+
if (supported == nullptr) {
std::string msg = fmt::sprintf("Camera %s: Invalid 'support' input!",
unresolvedCameraId.c_str());
@@ -965,51 +922,65 @@
cameraId.c_str());
}
+ bool overrideForPerfClass = flags::calculate_perf_override_during_session_support() &&
+ SessionConfigurationUtils::targetPerfClassPrimaryCamera(
+ mPerfClassPrimaryCameraIds, cameraId, targetSdkVersion);
+
+ return isSessionConfigurationWithParametersSupportedUnsafe(cameraId, sessionConfiguration,
+ overrideForPerfClass, supported);
+}
+
+Status CameraService::isSessionConfigurationWithParametersSupportedUnsafe(
+ const std::string& cameraId, const SessionConfiguration& sessionConfiguration,
+ bool overrideForPerfClass, /*out*/ bool* supported) {
*supported = false;
- status_t ret = mCameraProviderManager->isSessionConfigurationSupported(cameraId.c_str(),
- sessionConfiguration, /*mOverrideForPerfClass*/false, /*checkSessionParams*/true,
- supported);
+ status_t ret = mCameraProviderManager->isSessionConfigurationSupported(
+ cameraId, sessionConfiguration, overrideForPerfClass,
+ /*checkSessionParams=*/true, supported);
binder::Status res;
switch (ret) {
case OK:
- // Expected, do nothing.
- break;
+ // Expected. Do Nothing.
+ return Status::ok();
case INVALID_OPERATION: {
std::string msg = fmt::sprintf(
- "Camera %s: Session configuration query not supported!",
+ "Camera %s: Session configuration with parameters supported query not "
+ "supported!",
cameraId.c_str());
- ALOGD("%s: %s", __FUNCTION__, msg.c_str());
- res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
+ ALOGW("%s: %s", __FUNCTION__, msg.c_str());
+ logServiceError(msg, CameraService::ERROR_INVALID_OPERATION);
+ *supported = false;
+ return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
}
-
+ break;
+ case NAME_NOT_FOUND: {
+ std::string msg = fmt::sprintf("Camera %s: Unknown camera ID.", cameraId.c_str());
+ ALOGW("%s: %s", __FUNCTION__, msg.c_str());
+ logServiceError(msg, CameraService::ERROR_ILLEGAL_ARGUMENT);
+ *supported = false;
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+ }
break;
default: {
- std::string msg = fmt::sprintf( "Camera %s: Error: %s (%d)", cameraId.c_str(),
- strerror(-ret), ret);
+ std::string msg = fmt::sprintf(
+ "Unable to retrieve session configuration support for camera "
+ "device %s: Error: %s (%d)",
+ cameraId.c_str(), strerror(-ret), ret);
ALOGE("%s: %s", __FUNCTION__, msg.c_str());
- res = STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
- msg.c_str());
+ logServiceError(msg, CameraService::ERROR_ILLEGAL_ARGUMENT);
+ *supported = false;
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
}
+ break;
}
-
- return res;
}
Status CameraService::getSessionCharacteristics(const std::string& unresolvedCameraId,
- int targetSdkVersion, bool overrideToPortrait,
- const SessionConfiguration& sessionConfiguration,
- /*out*/ CameraMetadata* outMetadata) {
+ int targetSdkVersion, int rotationOverride,
+ const SessionConfiguration& sessionConfiguration, int32_t deviceId, int32_t devicePolicy,
+ /*out*/ CameraMetadata* outMetadata) {
ATRACE_CALL();
- if (!mInitialized) {
- ALOGE("%s: Camera HAL couldn't be initialized", __FUNCTION__);
- logServiceError("Camera subsystem is not available", ERROR_DISCONNECTED);
- return STATUS_ERROR(ERROR_DISCONNECTED, "Camera subsystem is not available");
- }
-
- const std::string cameraId =
- resolveCameraId(unresolvedCameraId, CameraThreadState::getCallingUid());
-
if (outMetadata == nullptr) {
std::string msg =
fmt::sprintf("Camera %s: Invalid 'outMetadata' input!", unresolvedCameraId.c_str());
@@ -1017,16 +988,53 @@
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
}
+ if (!mInitialized) {
+ ALOGE("%s: Camera HAL couldn't be initialized", __FUNCTION__);
+ logServiceError("Camera subsystem is not available", ERROR_DISCONNECTED);
+ return STATUS_ERROR(ERROR_DISCONNECTED, "Camera subsystem is not available");
+ }
+
+ std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
+ devicePolicy);
+ if (!cameraIdOptional.has_value()) {
+ std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
+ unresolvedCameraId.c_str(), deviceId);
+ ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+ }
+ std::string cameraId = cameraIdOptional.value();
+
+ if (shouldRejectSystemCameraConnection(cameraId)) {
+ return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
+ "Unable to retrieve camera"
+ "characteristics for system only device %s: ",
+ cameraId.c_str());
+ }
+
bool overrideForPerfClass = SessionConfigurationUtils::targetPerfClassPrimaryCamera(
mPerfClassPrimaryCameraIds, cameraId, targetSdkVersion);
+ if (flags::check_session_support_before_session_char()) {
+ bool sessionConfigSupported;
+ Status res = isSessionConfigurationWithParametersSupportedUnsafe(
+ cameraId, sessionConfiguration, overrideForPerfClass, &sessionConfigSupported);
+ if (!res.isOk()) {
+ // isSessionConfigurationWithParametersSupportedUnsafe should log what went wrong and
+ // report the correct Status to send to the client. Simply forward the error to
+ // the client.
+ outMetadata->clear();
+ return res;
+ }
+ if (!sessionConfigSupported) {
+ std::string msg = fmt::sprintf(
+ "Session configuration not supported for camera device %s.", cameraId.c_str());
+ outMetadata->clear();
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+ }
+ }
status_t ret = mCameraProviderManager->getSessionCharacteristics(
- cameraId, sessionConfiguration, overrideForPerfClass, overrideToPortrait, outMetadata);
+ cameraId, sessionConfiguration, overrideForPerfClass, rotationOverride, outMetadata);
- // TODO(b/303645857): Remove fingerprintable metadata if the caller process does not have
- // camera access permission.
-
- Status res = Status::ok();
switch (ret) {
case OK:
// Expected, no handling needed.
@@ -1035,131 +1043,116 @@
std::string msg = fmt::sprintf(
"Camera %s: Session characteristics query not supported!",
cameraId.c_str());
- ALOGD("%s: %s", __FUNCTION__, msg.c_str());
- res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
+ ALOGW("%s: %s", __FUNCTION__, msg.c_str());
+ logServiceError(msg, CameraService::ERROR_INVALID_OPERATION);
+ outMetadata->clear();
+ return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
+ }
+ break;
+ case NAME_NOT_FOUND: {
+ std::string msg = fmt::sprintf(
+ "Camera %s: Unknown camera ID.",
+ cameraId.c_str());
+ ALOGW("%s: %s", __FUNCTION__, msg.c_str());
+ logServiceError(msg, CameraService::ERROR_ILLEGAL_ARGUMENT);
+ outMetadata->clear();
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
}
break;
default: {
- std::string msg = fmt::sprintf("Camera %s: Error: %s (%d)", cameraId.c_str(),
- strerror(-ret), ret);
+ std::string msg = fmt::sprintf(
+ "Unable to retrieve session characteristics for camera device %s: "
+ "Error: %s (%d)",
+ cameraId.c_str(), strerror(-ret), ret);
ALOGE("%s: %s", __FUNCTION__, msg.c_str());
- res = STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+ logServiceError(msg, CameraService::ERROR_INVALID_OPERATION);
+ outMetadata->clear();
+ return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
}
}
- return res;
+ return filterSensitiveMetadataIfNeeded(cameraId, outMetadata);
}
-Status CameraService::parseCameraIdRemapping(
- const hardware::CameraIdRemapping& cameraIdRemapping,
- /* out */ TCameraIdRemapping* cameraIdRemappingMap) {
- std::string packageName;
- std::string cameraIdToReplace, updatedCameraId;
- for(const auto& packageIdRemapping: cameraIdRemapping.packageIdRemappings) {
- packageName = packageIdRemapping.packageName;
- if (packageName.empty()) {
- return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT,
- "CameraIdRemapping: Package name cannot be empty");
- }
- if (packageIdRemapping.cameraIdsToReplace.size()
- != packageIdRemapping.updatedCameraIds.size()) {
- return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
- "CameraIdRemapping: Mismatch in CameraId Remapping lists sizes for package %s",
- packageName.c_str());
- }
- for(size_t i = 0; i < packageIdRemapping.cameraIdsToReplace.size(); i++) {
- cameraIdToReplace = packageIdRemapping.cameraIdsToReplace[i];
- updatedCameraId = packageIdRemapping.updatedCameraIds[i];
- if (cameraIdToReplace.empty() || updatedCameraId.empty()) {
- return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
- "CameraIdRemapping: Camera Id cannot be empty for package %s",
- packageName.c_str());
- }
- if (cameraIdToReplace == updatedCameraId) {
- return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
- "CameraIdRemapping: CameraIdToReplace cannot be the same"
- " as updatedCameraId for %s",
- packageName.c_str());
- }
- (*cameraIdRemappingMap)[packageName][cameraIdToReplace] = updatedCameraId;
+Status CameraService::filterSensitiveMetadataIfNeeded(
+ const std::string& cameraId, CameraMetadata* metadata) {
+ int callingPid = getCallingPid();
+ int callingUid = getCallingUid();
+
+ if (callingPid == getpid()) {
+ // Caller is cameraserver; no need to remove keys
+ return Status::ok();
+ }
+
+ SystemCameraKind deviceKind = SystemCameraKind::PUBLIC;
+ if (getSystemCameraKind(cameraId, &deviceKind) != OK) {
+ ALOGE("%s: Couldn't get camera kind for camera id %s", __FUNCTION__, cameraId.c_str());
+ metadata->clear();
+ return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
+ "Unable to retrieve camera kind for device %s", cameraId.c_str());
+ }
+ if (deviceKind == SystemCameraKind::SYSTEM_ONLY_CAMERA) {
+ // Attempting to query system only camera without system camera permission would have
+ // failed the shouldRejectSystemCameraConnection in the caller. So if we get here
+ // for a system only camera, then the caller has the required permission.
+ // No need to remove keys
+ return Status::ok();
+ }
+
+ std::vector<int32_t> tagsRemoved;
+ // Get the device id that owns this camera.
+ auto [cameraOwnerDeviceId, _] = mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(
+ cameraId);
+ bool hasCameraPermission = hasPermissionsForCamera(cameraId, callingPid, callingUid,
+ cameraOwnerDeviceId);
+ if (hasCameraPermission) {
+ // Caller has camera permission; no need to remove keys
+ return Status::ok();
+ }
+
+ status_t ret = metadata->removePermissionEntries(
+ mCameraProviderManager->getProviderTagIdLocked(cameraId), &tagsRemoved);
+ if (ret != OK) {
+ metadata->clear();
+ return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
+ "Failed to remove camera characteristics needing camera permission "
+ "for device %s:%s (%d)",
+ cameraId.c_str(), strerror(-ret), ret);
+ }
+
+ if (!tagsRemoved.empty()) {
+ ret = metadata->update(ANDROID_REQUEST_CHARACTERISTIC_KEYS_NEEDING_PERMISSION,
+ tagsRemoved.data(), tagsRemoved.size());
+ if (ret != OK) {
+ metadata->clear();
+ return STATUS_ERROR_FMT(
+ ERROR_INVALID_OPERATION,
+ "Failed to insert camera keys needing permission for device %s: %s (%d)",
+ cameraId.c_str(), strerror(-ret), ret);
}
}
return Status::ok();
}
-void CameraService::remapCameraIds(const TCameraIdRemapping& cameraIdRemapping) {
- // Acquire mServiceLock and prevent other clients from connecting
- std::unique_ptr<AutoConditionLock> serviceLockWrapper =
- AutoConditionLock::waitAndAcquire(mServiceLockWrapper);
-
- // Collect all existing clients for camera Ids that are being
- // remapped in the new cameraIdRemapping, but only if they were being used by a
- // targeted packageName.
- std::vector<sp<BasicClient>> clientsToDisconnect;
- std::vector<std::string> cameraIdsToUpdate;
- for (const auto& [packageName, injectionMap] : cameraIdRemapping) {
- for (auto& [id0, id1] : injectionMap) {
- ALOGI("%s: UPDATE:= %s: %s: %s", __FUNCTION__, packageName.c_str(),
- id0.c_str(), id1.c_str());
- auto clientDescriptor = mActiveClientManager.get(id0);
- if (clientDescriptor != nullptr) {
- sp<BasicClient> clientSp = clientDescriptor->getValue();
- if (clientSp->getPackageName() == packageName) {
- // This camera is being used by a targeted packageName and
- // being remapped to a new camera Id. We should disconnect it.
- clientsToDisconnect.push_back(clientSp);
- cameraIdsToUpdate.push_back(id0);
- }
- }
- }
- }
-
- for (auto& clientSp : clientsToDisconnect) {
- // Notify the clients about the disconnection.
- clientSp->notifyError(hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISCONNECTED,
- CaptureResultExtras{});
- }
-
- // Do not hold mServiceLock while disconnecting clients, but retain the condition
- // blocking other clients from connecting in mServiceLockWrapper if held.
- mServiceLock.unlock();
-
- // Clear calling identity for disconnect() PID checks.
- int64_t token = CameraThreadState::clearCallingIdentity();
-
- // Disconnect clients.
- for (auto& clientSp : clientsToDisconnect) {
- // This also triggers a call to updateStatus() which also reads mCameraIdRemapping
- // and requires mCameraIdRemappingLock.
- clientSp->disconnect();
- }
-
- // Invoke destructors (which call disconnect()) now while we don't hold the mServiceLock.
- clientsToDisconnect.clear();
-
- CameraThreadState::restoreCallingIdentity(token);
- mServiceLock.lock();
-
- {
- Mutex::Autolock lock(mCameraIdRemappingLock);
- // Update mCameraIdRemapping.
- mCameraIdRemapping.clear();
- mCameraIdRemapping.insert(cameraIdRemapping.begin(), cameraIdRemapping.end());
- }
-}
-
Status CameraService::injectSessionParams(
- const std::string& cameraId,
- const CameraMetadata& sessionParams) {
- if (!checkCallingPermission(toString16(sCameraInjectExternalCameraPermission))) {
- const int pid = CameraThreadState::getCallingPid();
- const int uid = CameraThreadState::getCallingUid();
+ const std::string& cameraId,
+ const CameraMetadata& sessionParams) {
+ if (!checkCallingPermission(toString16(sCameraInjectExternalCameraPermission))) {
+ const int pid = getCallingPid();
+ const int uid = getCallingUid();
ALOGE("%s: Permission Denial: can't inject session params pid=%d, uid=%d",
__FUNCTION__, pid, uid);
return STATUS_ERROR(ERROR_PERMISSION_DENIED,
"Permission Denial: no permission to inject session params");
}
+ // Do not allow session params injection for a virtual camera.
+ auto [deviceId, _] = mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(cameraId);
+ if (deviceId != kDefaultDeviceId) {
+ return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT,
+ "Cannot inject session params for a virtual camera");
+ }
+
std::unique_ptr<AutoConditionLock> serviceLockWrapper =
AutoConditionLock::waitAndAcquire(mServiceLockWrapper);
@@ -1181,57 +1174,38 @@
return Status::ok();
}
-std::vector<std::string> CameraService::findOriginalIdsForRemappedCameraId(
- const std::string& inputCameraId, int clientUid) {
- std::string packageName = getPackageNameFromUid(clientUid);
- std::vector<std::string> cameraIds;
- Mutex::Autolock lock(mCameraIdRemappingLock);
- if (auto packageMapIter = mCameraIdRemapping.find(packageName);
- packageMapIter != mCameraIdRemapping.end()) {
- for (auto& [id0, id1]: packageMapIter->second) {
- if (id1 == inputCameraId) {
- cameraIds.push_back(id0);
- }
+std::optional<std::string> CameraService::resolveCameraId(
+ const std::string& inputCameraId,
+ int32_t deviceId,
+ int32_t devicePolicy) {
+ if ((deviceId == kDefaultDeviceId)
+ || (devicePolicy == IVirtualDeviceManagerNative::DEVICE_POLICY_DEFAULT)) {
+ auto [storedDeviceId, _] =
+ mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(inputCameraId);
+ if (storedDeviceId != kDefaultDeviceId) {
+ // Trying to access a virtual camera from default-policy device context, we should fail.
+ std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
+ inputCameraId.c_str(), deviceId);
+ ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+ return std::nullopt;
}
- }
- return cameraIds;
-}
-
-std::string CameraService::resolveCameraId(
- const std::string& inputCameraId,
- int clientUid,
- const std::string& packageName) {
- std::string packageNameVal = packageName;
- if (packageName.empty()) {
- packageNameVal = getPackageNameFromUid(clientUid);
- }
- if (clientUid < AID_APP_START || packageNameVal.empty()) {
- // We shouldn't remap cameras for processes with system/vendor UIDs.
return inputCameraId;
}
- Mutex::Autolock lock(mCameraIdRemappingLock);
- if (auto packageMapIter = mCameraIdRemapping.find(packageNameVal);
- packageMapIter != mCameraIdRemapping.end()) {
- auto packageMap = packageMapIter->second;
- if (auto replacementIdIter = packageMap.find(inputCameraId);
- replacementIdIter != packageMap.end()) {
- ALOGI("%s: resolveCameraId: remapping cameraId %s for %s to %s",
- __FUNCTION__, inputCameraId.c_str(),
- packageNameVal.c_str(),
- replacementIdIter->second.c_str());
- return replacementIdIter->second;
- }
- }
- return inputCameraId;
+
+ return mVirtualDeviceCameraIdMapper.getActualCameraId(deviceId, inputCameraId);
}
-Status CameraService::getCameraInfo(int cameraId, bool overrideToPortrait,
- CameraInfo* cameraInfo) {
+Status CameraService::getCameraInfo(int cameraId, int rotationOverride, int32_t deviceId,
+ int32_t devicePolicy, CameraInfo* cameraInfo) {
ATRACE_CALL();
Mutex::Autolock l(mServiceLock);
- std::string unresolvedCameraId = cameraIdIntToStrLocked(cameraId);
- std::string cameraIdStr = resolveCameraId(
- unresolvedCameraId, CameraThreadState::getCallingUid());
+ std::string cameraIdStr = cameraIdIntToStrLocked(cameraId, deviceId, devicePolicy);
+ if (cameraIdStr.empty()) {
+ std::string msg = fmt::sprintf("Camera %d: Invalid camera id for device id %d",
+ cameraId, deviceId);
+ ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+ }
if (shouldRejectSystemCameraConnection(cameraIdStr)) {
return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION, "Unable to retrieve camera"
@@ -1244,7 +1218,7 @@
"Camera subsystem is not available");
}
bool hasSystemCameraPermissions = hasPermissionsForSystemCamera(std::to_string(cameraId),
- CameraThreadState::getCallingPid(), CameraThreadState::getCallingUid());
+ getCallingPid(), getCallingUid());
int cameraIdBound = mNumberOfCamerasWithoutSystemCamera;
if (hasSystemCameraPermissions) {
cameraIdBound = mNumberOfCameras;
@@ -1257,7 +1231,7 @@
Status ret = Status::ok();
int portraitRotation;
status_t err = mCameraProviderManager->getCameraInfo(
- cameraIdStr, overrideToPortrait, &portraitRotation, cameraInfo);
+ cameraIdStr, rotationOverride, &portraitRotation, cameraInfo);
if (err != OK) {
ret = STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
"Error retrieving camera info from device %d: %s (%d)", cameraId,
@@ -1269,40 +1243,43 @@
return ret;
}
-std::string CameraService::cameraIdIntToStrLocked(int cameraIdInt) {
- const std::vector<std::string> *deviceIds = &mNormalDeviceIdsWithoutSystemCamera;
- auto callingPid = CameraThreadState::getCallingPid();
- auto callingUid = CameraThreadState::getCallingUid();
- AttributionSourceState attributionSource{};
- attributionSource.pid = callingPid;
- attributionSource.uid = callingUid;
- bool checkPermissionForSystemCamera = checkPermission(std::to_string(cameraIdInt),
- sSystemCameraPermission, attributionSource, std::string(),
- AppOpsManager::OP_NONE);
- if (checkPermissionForSystemCamera || getpid() == callingPid) {
- deviceIds = &mNormalDeviceIds;
+std::string CameraService::cameraIdIntToStrLocked(int cameraIdInt,
+ int32_t deviceId, int32_t devicePolicy) {
+ if (vd_flags::camera_device_awareness() && (deviceId != kDefaultDeviceId)
+ && (devicePolicy != IVirtualDeviceManagerNative::DEVICE_POLICY_DEFAULT)) {
+ std::optional<std::string> cameraIdOptional =
+ mVirtualDeviceCameraIdMapper.getActualCameraId(cameraIdInt, deviceId);
+ return cameraIdOptional.has_value() ? cameraIdOptional.value() : std::string{};
}
- if (cameraIdInt < 0 || cameraIdInt >= static_cast<int>(deviceIds->size())) {
- ALOGE("%s: input id %d invalid: valid range (0, %zu)",
- __FUNCTION__, cameraIdInt, deviceIds->size());
+
+ const std::vector<std::string> *cameraIds = &mNormalDeviceIdsWithoutSystemCamera;
+ auto callingPid = getCallingPid();
+ auto callingUid = getCallingUid();
+ bool systemCameraPermissions = hasPermissionsForSystemCamera(std::to_string(cameraIdInt),
+ callingPid, callingUid, /* checkCameraPermissions= */ false);
+ if (systemCameraPermissions || getpid() == callingPid) {
+ cameraIds = &mNormalDeviceIds;
+ }
+ if (cameraIdInt < 0 || cameraIdInt >= static_cast<int>(cameraIds->size())) {
+ ALOGE("%s: input id %d invalid: valid range (0, %zu)",
+ __FUNCTION__, cameraIdInt, cameraIds->size());
return std::string{};
}
- return (*deviceIds)[cameraIdInt];
+ return (*cameraIds)[cameraIdInt];
}
-std::string CameraService::cameraIdIntToStr(int cameraIdInt) {
+std::string CameraService::cameraIdIntToStr(int cameraIdInt, int32_t deviceId,
+ int32_t devicePolicy) {
Mutex::Autolock lock(mServiceLock);
- return cameraIdIntToStrLocked(cameraIdInt);
+ return cameraIdIntToStrLocked(cameraIdInt, deviceId, devicePolicy);
}
Status CameraService::getCameraCharacteristics(const std::string& unresolvedCameraId,
- int targetSdkVersion, bool overrideToPortrait, CameraMetadata* cameraInfo) {
+ int targetSdkVersion, int rotationOverride, int32_t deviceId, int32_t devicePolicy,
+ CameraMetadata* cameraInfo) {
ATRACE_CALL();
- const std::string cameraId = resolveCameraId(unresolvedCameraId,
- CameraThreadState::getCallingUid());
-
if (!cameraInfo) {
ALOGE("%s: cameraInfo is NULL", __FUNCTION__);
return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, "cameraInfo is NULL");
@@ -1315,18 +1292,26 @@
"Camera subsystem is not available");;
}
+ std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
+ devicePolicy);
+ if (!cameraIdOptional.has_value()) {
+ std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
+ unresolvedCameraId.c_str(), deviceId);
+ ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+ }
+ std::string cameraId = cameraIdOptional.value();
+
if (shouldRejectSystemCameraConnection(cameraId)) {
return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION, "Unable to retrieve camera"
"characteristics for system only device %s: ", cameraId.c_str());
}
- Status ret{};
-
bool overrideForPerfClass =
SessionConfigurationUtils::targetPerfClassPrimaryCamera(mPerfClassPrimaryCameraIds,
cameraId, targetSdkVersion);
status_t res = mCameraProviderManager->getCameraCharacteristics(
- cameraId, overrideForPerfClass, cameraInfo, overrideToPortrait);
+ cameraId, overrideForPerfClass, cameraInfo, rotationOverride);
if (res != OK) {
if (res == NAME_NOT_FOUND) {
return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "Unable to retrieve camera "
@@ -1340,63 +1325,31 @@
strerror(-res), res);
}
}
- SystemCameraKind deviceKind = SystemCameraKind::PUBLIC;
- if (getSystemCameraKind(cameraId, &deviceKind) != OK) {
- ALOGE("%s: Invalid camera id %s, skipping", __FUNCTION__, cameraId.c_str());
- return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION, "Unable to retrieve camera kind "
- "for device %s", cameraId.c_str());
- }
- int callingPid = CameraThreadState::getCallingPid();
- int callingUid = CameraThreadState::getCallingUid();
- std::vector<int32_t> tagsRemoved;
- // If it's not calling from cameraserver, check the permission only if
- // android.permission.CAMERA is required. If android.permission.SYSTEM_CAMERA was needed,
- // it would've already been checked in shouldRejectSystemCameraConnection.
- AttributionSourceState attributionSource{};
- attributionSource.pid = callingPid;
- attributionSource.uid = callingUid;
- bool checkPermissionForCamera = checkPermission(cameraId, sCameraPermission,
- attributionSource, std::string(), AppOpsManager::OP_NONE);
- if ((callingPid != getpid()) &&
- (deviceKind != SystemCameraKind::SYSTEM_ONLY_CAMERA) &&
- !checkPermissionForCamera) {
- res = cameraInfo->removePermissionEntries(
- mCameraProviderManager->getProviderTagIdLocked(cameraId),
- &tagsRemoved);
- if (res != OK) {
- cameraInfo->clear();
- return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION, "Failed to remove camera"
- " characteristics needing camera permission for device %s: %s (%d)",
- cameraId.c_str(), strerror(-res), res);
- }
- }
- if (!tagsRemoved.empty()) {
- res = cameraInfo->update(ANDROID_REQUEST_CHARACTERISTIC_KEYS_NEEDING_PERMISSION,
- tagsRemoved.data(), tagsRemoved.size());
- if (res != OK) {
- cameraInfo->clear();
- return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION, "Failed to insert camera "
- "keys needing permission for device %s: %s (%d)", cameraId.c_str(),
- strerror(-res), res);
- }
- }
-
- return ret;
+ return filterSensitiveMetadataIfNeeded(cameraId, cameraInfo);
}
-Status CameraService::getTorchStrengthLevel(const std::string& unresolvedCameraId,
- int32_t* torchStrength) {
+Status CameraService::getTorchStrengthLevel(const std::string& unresolvedCameraId, int32_t deviceId,
+ int32_t devicePolicy, int32_t* torchStrength) {
ATRACE_CALL();
Mutex::Autolock l(mServiceLock);
- const std::string cameraId = resolveCameraId(
- unresolvedCameraId, CameraThreadState::getCallingUid());
+
+ std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
+ devicePolicy);
+ if (!cameraIdOptional.has_value()) {
+ std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
+ unresolvedCameraId.c_str(), deviceId);
+ ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+ }
+ std::string cameraId = cameraIdOptional.value();
+
if (!mInitialized) {
ALOGE("%s: Camera HAL couldn't be initialized.", __FUNCTION__);
return STATUS_ERROR(ERROR_DISCONNECTED, "Camera HAL couldn't be initialized.");
}
- if(torchStrength == NULL) {
+ if (torchStrength == NULL) {
ALOGE("%s: strength level must not be null.", __FUNCTION__);
return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, "Strength level should not be null.");
}
@@ -1454,7 +1407,8 @@
}
std::pair<int, IPCTransport> CameraService::getDeviceVersion(const std::string& cameraId,
- bool overrideToPortrait, int* portraitRotation, int* facing, int* orientation) {
+ int rotationOverride, int* portraitRotation, int* facing,
+ int* orientation) {
ATRACE_CALL();
int deviceVersion = 0;
@@ -1472,7 +1426,7 @@
hardware::CameraInfo info;
if (facing) {
- res = mCameraProviderManager->getCameraInfo(cameraId, overrideToPortrait,
+ res = mCameraProviderManager->getCameraInfo(cameraId, rotationOverride,
portraitRotation, &info);
if (res != OK) {
return std::make_pair(-1, IPCTransport::INVALID);
@@ -1508,7 +1462,7 @@
const std::optional<std::string>& featureId, const std::string& cameraId,
int api1CameraId, int facing, int sensorOrientation, int clientPid, uid_t clientUid,
int servicePid, std::pair<int, IPCTransport> deviceVersionAndTransport,
- apiLevel effectiveApiLevel, bool overrideForPerfClass, bool overrideToPortrait,
+ apiLevel effectiveApiLevel, bool overrideForPerfClass, int rotationOverride,
bool forceSlowJpegMode, const std::string& originalCameraId,
/*out*/sp<BasicClient>* client) {
// For HIDL devices
@@ -1542,20 +1496,21 @@
if (effectiveApiLevel == API_1) { // Camera1 API route
sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
*client = new Camera2Client(cameraService, tmp, cameraService->mCameraServiceProxyWrapper,
- packageName, featureId, cameraId,
+ cameraService->mAttributionAndPermissionUtils, packageName, featureId, cameraId,
api1CameraId, facing, sensorOrientation,
- clientPid, clientUid, servicePid, overrideForPerfClass, overrideToPortrait,
+ clientPid, clientUid, servicePid, overrideForPerfClass, rotationOverride,
forceSlowJpegMode);
- ALOGI("%s: Camera1 API (legacy), override to portrait %d, forceSlowJpegMode %d",
- __FUNCTION__, overrideToPortrait, forceSlowJpegMode);
+ ALOGI("%s: Camera1 API (legacy), rotationOverride %d, forceSlowJpegMode %d",
+ __FUNCTION__, rotationOverride, forceSlowJpegMode);
} else { // Camera2 API route
sp<hardware::camera2::ICameraDeviceCallbacks> tmp =
static_cast<hardware::camera2::ICameraDeviceCallbacks*>(cameraCb.get());
*client = new CameraDeviceClient(cameraService, tmp,
- cameraService->mCameraServiceProxyWrapper, packageName, systemNativeClient,
+ cameraService->mCameraServiceProxyWrapper,
+ cameraService->mAttributionAndPermissionUtils, packageName, systemNativeClient,
featureId, cameraId, facing, sensorOrientation, clientPid, clientUid, servicePid,
- overrideForPerfClass, overrideToPortrait, originalCameraId);
- ALOGI("%s: Camera2 API, override to portrait %d", __FUNCTION__, overrideToPortrait);
+ overrideForPerfClass, rotationOverride, originalCameraId);
+ ALOGI("%s: Camera2 API, rotationOverride %d", __FUNCTION__, rotationOverride);
}
return Status::ok();
}
@@ -1635,7 +1590,7 @@
}
Status CameraService::initializeShimMetadata(int cameraId) {
- int uid = CameraThreadState::getCallingUid();
+ int uid = getCallingUid();
std::string cameraIdStr = std::to_string(cameraId);
Status ret = Status::ok();
@@ -1644,7 +1599,8 @@
sp<ICameraClient>{nullptr}, cameraIdStr, cameraId,
kServiceName, /*systemNativeClient*/ false, {}, uid, USE_CALLING_PID,
API_1, /*shimUpdateOnly*/ true, /*oomScoreOffset*/ 0,
- /*targetSdkVersion*/ __ANDROID_API_FUTURE__, /*overrideToPortrait*/ true,
+ /*targetSdkVersion*/ __ANDROID_API_FUTURE__,
+ /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT,
/*forceSlowJpegMode*/false, cameraIdStr, /*out*/ tmp)
).isOk()) {
ALOGE("%s: Error initializing shim metadata: %s", __FUNCTION__, ret.toString8().c_str());
@@ -1665,9 +1621,7 @@
return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, "Parameters must not be null");
}
- std::string unresolvedCameraId = std::to_string(cameraId);
- std::string cameraIdStr = resolveCameraId(unresolvedCameraId,
- CameraThreadState::getCallingUid());
+ std::string cameraIdStr = std::to_string(cameraId);
// Check if we already have parameters
{
@@ -1686,9 +1640,9 @@
}
}
- int64_t token = CameraThreadState::clearCallingIdentity();
+ int64_t token = clearCallingIdentity();
ret = initializeShimMetadata(cameraId);
- CameraThreadState::restoreCallingIdentity(token);
+ restoreCallingIdentity(token);
if (!ret.isOk()) {
// Error already logged by callee
return ret;
@@ -1716,38 +1670,6 @@
return STATUS_ERROR(ERROR_INVALID_OPERATION, "Unable to initialize legacy parameters");
}
-// Can camera service trust the caller based on the calling UID?
-static bool isTrustedCallingUid(uid_t uid) {
- switch (uid) {
- case AID_MEDIA: // mediaserver
- case AID_CAMERASERVER: // cameraserver
- case AID_RADIO: // telephony
- return true;
- default:
- return false;
- }
-}
-
-static status_t getUidForPackage(const std::string &packageName, int userId, /*inout*/uid_t& uid,
- int err) {
- PermissionController pc;
- uid = pc.getPackageUid(toString16(packageName), 0);
- if (uid <= 0) {
- ALOGE("Unknown package: '%s'", packageName.c_str());
- dprintf(err, "Unknown package: '%s'\n", packageName.c_str());
- return BAD_VALUE;
- }
-
- if (userId < 0) {
- ALOGE("Invalid user: %d", userId);
- dprintf(err, "Invalid user: %d\n", userId);
- return BAD_VALUE;
- }
-
- uid = multiuser_get_uid(userId, uid);
- return NO_ERROR;
-}
-
Status CameraService::validateConnectLocked(const std::string& cameraId,
const std::string& clientName8, /*inout*/int& clientUid, /*inout*/int& clientPid,
/*out*/int& originalClientPid) const {
@@ -1765,7 +1687,7 @@
}
#endif // __BRILLO__
- int callingPid = CameraThreadState::getCallingPid();
+ int callingPid = getCallingPid();
if (!mInitialized) {
ALOGE("CameraService::connect X (PID %d) rejected (camera HAL module not loaded)",
@@ -1799,10 +1721,8 @@
Status CameraService::validateClientPermissionsLocked(const std::string& cameraId,
const std::string& clientName, int& clientUid, int& clientPid,
/*out*/int& originalClientPid) const {
- AttributionSourceState attributionSource{};
-
- int callingPid = CameraThreadState::getCallingPid();
- int callingUid = CameraThreadState::getCallingUid();
+ int callingPid = getCallingPid();
+ int callingUid = getCallingUid();
// Check if we can trust clientUid
if (clientUid == USE_CALLING_UID) {
@@ -1814,7 +1734,7 @@
"Untrusted caller (calling PID %d, UID %d) trying to "
"forward camera access to camera %s for client %s (PID %d, UID %d)",
callingPid, callingUid, cameraId.c_str(),
- clientName.c_str(), clientUid, clientPid);
+ clientName.c_str(), clientPid, clientUid);
}
// Check if we can trust clientPid
@@ -1827,7 +1747,7 @@
"Untrusted caller (calling PID %d, UID %d) trying to "
"forward camera access to camera %s for client %s (PID %d, UID %d)",
callingPid, callingUid, cameraId.c_str(),
- clientName.c_str(), clientUid, clientPid);
+ clientName.c_str(), clientPid, clientUid);
}
if (shouldRejectSystemCameraConnection(cameraId)) {
@@ -1841,23 +1761,22 @@
ALOGE("%s: Invalid camera id %s, skipping", __FUNCTION__, cameraId.c_str());
return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "No camera device with ID \"%s\""
"found while trying to query device kind", cameraId.c_str());
-
}
+ // Get the device id that owns this camera.
+ auto [deviceId, _] = mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(cameraId);
+
// If it's not calling from cameraserver, check the permission if the
// device isn't a system only camera (shouldRejectSystemCameraConnection already checks for
// android.permission.SYSTEM_CAMERA for system only camera devices).
- attributionSource.pid = clientPid;
- attributionSource.uid = clientUid;
- attributionSource.packageName = clientName;
- bool checkPermissionForCamera = checkPermission(cameraId, sCameraPermission, attributionSource,
- std::string(), AppOpsManager::OP_NONE);
+ bool checkPermissionForCamera =
+ hasPermissionsForCamera(cameraId, clientPid, clientUid, clientName, deviceId);
if (callingPid != getpid() &&
(deviceKind != SystemCameraKind::SYSTEM_ONLY_CAMERA) && !checkPermissionForCamera) {
ALOGE("Permission Denial: can't use the camera pid=%d, uid=%d", clientPid, clientUid);
return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
"Caller \"%s\" (PID %d, UID %d) cannot open camera \"%s\" without camera permission",
- clientName.c_str(), clientUid, clientPid, cameraId.c_str());
+ clientName.c_str(), clientPid, clientUid, cameraId.c_str());
}
// Make sure the UID is in an active state to use the camera
@@ -1868,7 +1787,7 @@
return STATUS_ERROR_FMT(ERROR_DISABLED,
"Caller \"%s\" (PID %d, UID %d) cannot open camera \"%s\" from background ("
"calling UID %d proc state %" PRId32 ")",
- clientName.c_str(), clientUid, clientPid, cameraId.c_str(),
+ clientName.c_str(), clientPid, clientUid, cameraId.c_str(),
callingUid, procState);
}
@@ -1881,7 +1800,7 @@
ALOGE("Access Denial: cannot use the camera when sensor privacy is enabled");
return STATUS_ERROR_FMT(ERROR_DISABLED,
"Caller \"%s\" (PID %d, UID %d) cannot open camera \"%s\" when sensor privacy "
- "is enabled", clientName.c_str(), clientUid, clientPid, cameraId.c_str());
+ "is enabled", clientName.c_str(), clientPid, clientUid, cameraId.c_str());
}
// Only use passed in clientPid to check permission. Use calling PID as the client PID that's
@@ -1893,7 +1812,7 @@
// For non-system clients : Only allow clients who are being used by the current foreground
// device user, unless calling from our own process.
- if (!doesClientHaveSystemUid() && callingPid != getpid() &&
+ if (!callerHasSystemUid() && callingPid != getpid() &&
(mAllowedUsers.find(clientUserId) == mAllowedUsers.end())) {
ALOGE("CameraService::connect X (PID %d) rejected (cannot connect from "
"device user %d, currently allowed device users: %s)", callingPid, clientUserId,
@@ -1907,13 +1826,14 @@
// If the System User tries to access the camera when the device is running in
// headless system user mode, ensure that client has the required permission
// CAMERA_HEADLESS_SYSTEM_USER.
- if (isHeadlessSystemUserMode() && (clientUserId == USER_SYSTEM) &&
- !hasPermissionsForCameraHeadlessSystemUser(cameraId, callingPid, callingUid)) {
+ if (isHeadlessSystemUserMode()
+ && (clientUserId == USER_SYSTEM)
+ && !hasPermissionsForCameraHeadlessSystemUser(cameraId, callingPid, callingUid)) {
ALOGE("Permission Denial: can't use the camera pid=%d, uid=%d", clientPid, clientUid);
return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
"Caller \"%s\" (PID %d, UID %d) cannot open camera \"%s\" as Headless System \
User without camera headless system user permission",
- clientName.c_str(), clientUid, clientPid, cameraId.c_str());
+ clientName.c_str(), clientPid, clientUid, cameraId.c_str());
}
}
@@ -1922,7 +1842,7 @@
status_t CameraService::checkIfDeviceIsUsable(const std::string& cameraId) const {
auto cameraState = getCameraState(cameraId);
- int callingPid = CameraThreadState::getCallingPid();
+ int callingPid = getCallingPid();
if (cameraState == nullptr) {
ALOGE("CameraService::connect X (PID %d) rejected (invalid camera ID %s)", callingPid,
cameraId.c_str());
@@ -2020,7 +1940,7 @@
sp<IServiceManager> sm = defaultServiceManager();
sp<IBinder> binder = sm->checkService(String16(kProcessInfoServiceName));
- if (!binder && isAutomotivePrivilegedClient(CameraThreadState::getCallingUid())) {
+ if (!binder && isAutomotivePrivilegedClient(getCallingUid())) {
// If processinfo service is not available and the client is automotive privileged
// client used for safety critical uses cases such as rear-view and surround-view which
// needs to be available before android boot completes, then use the hardcoded values
@@ -2153,7 +2073,7 @@
mServiceLock.unlock();
// Clear caller identity temporarily so client disconnect PID checks work correctly
- int64_t token = CameraThreadState::clearCallingIdentity();
+ int64_t token = clearCallingIdentity();
// Destroy evicted clients
for (auto& i : evictedClients) {
@@ -2161,7 +2081,7 @@
i->getValue()->disconnect(); // Clients will remove themselves from the active client list
}
- CameraThreadState::restoreCallingIdentity(token);
+ restoreCallingIdentity(token);
for (const auto& i : evictedClients) {
ALOGV("%s: Waiting for disconnect to complete for client for device %s (PID %" PRId32 ")",
@@ -2202,27 +2122,31 @@
int clientUid,
int clientPid,
int targetSdkVersion,
- bool overrideToPortrait,
+ int rotationOverride,
bool forceSlowJpegMode,
+ int32_t deviceId,
+ int32_t devicePolicy,
/*out*/
sp<ICamera>* device) {
-
ATRACE_CALL();
Status ret = Status::ok();
- std::string unresolvedCameraId = cameraIdIntToStr(api1CameraId);
- std::string cameraIdStr = resolveCameraId(unresolvedCameraId,
- CameraThreadState::getCallingUid());
+ std::string cameraIdStr = cameraIdIntToStr(api1CameraId, deviceId, devicePolicy);
+ if (cameraIdStr.empty()) {
+ std::string msg = fmt::sprintf("Camera %d: Invalid camera id for device id %d",
+ api1CameraId, deviceId);
+ ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+ }
sp<Client> client = nullptr;
ret = connectHelper<ICameraClient,Client>(cameraClient, cameraIdStr, api1CameraId,
clientPackageName, /*systemNativeClient*/ false, {}, clientUid, clientPid, API_1,
/*shimUpdateOnly*/ false, /*oomScoreOffset*/ 0, targetSdkVersion,
- overrideToPortrait, forceSlowJpegMode, cameraIdStr, /*out*/client);
+ rotationOverride, forceSlowJpegMode, cameraIdStr, /*out*/client);
- if(!ret.isOk()) {
- logRejected(cameraIdStr, CameraThreadState::getCallingPid(), clientPackageName,
- toStdString(ret.toString8()));
+ if (!ret.isOk()) {
+ logRejected(cameraIdStr, getCallingPid(), clientPackageName, toStdString(ret.toString8()));
return ret;
}
@@ -2232,8 +2156,8 @@
const auto& mActivityManager = getActivityManager();
if (mActivityManager) {
mActivityManager->logFgsApiBegin(LOG_FGS_CAMERA_API,
- CameraThreadState::getCallingUid(),
- CameraThreadState::getCallingPid());
+ getCallingUid(),
+ getCallingPid());
}
return ret;
@@ -2263,9 +2187,9 @@
// and the serving thread is a non hwbinder thread, the client must have
// android.permission.SYSTEM_CAMERA permissions to connect.
- int cPid = CameraThreadState::getCallingPid();
- int cUid = CameraThreadState::getCallingUid();
- bool systemClient = doesClientHaveSystemUid();
+ int cPid = getCallingPid();
+ int cUid = getCallingUid();
+ bool systemClient = callerHasSystemUid();
SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
if (getSystemCameraKind(cameraId, &systemCameraKind) != OK) {
// This isn't a known camera ID, so it's not a system camera
@@ -2274,7 +2198,7 @@
}
// (1) Cameraserver trying to connect, accept.
- if (CameraThreadState::getCallingPid() == getpid()) {
+ if (isCallerCameraServerNotDelegating()) {
return false;
}
// (2)
@@ -2302,27 +2226,32 @@
const std::string& clientPackageName,
const std::optional<std::string>& clientFeatureId,
int clientUid, int oomScoreOffset, int targetSdkVersion,
- bool overrideToPortrait,
+ int rotationOverride, int32_t deviceId, int32_t devicePolicy,
/*out*/
sp<hardware::camera2::ICameraDeviceUser>* device) {
-
ATRACE_CALL();
RunThreadWithRealtimePriority priorityBump;
Status ret = Status::ok();
sp<CameraDeviceClient> client = nullptr;
std::string clientPackageNameAdj = clientPackageName;
- int callingPid = CameraThreadState::getCallingPid();
+ int callingPid = getCallingPid();
+ int callingUid = getCallingUid();
bool systemNativeClient = false;
- if (doesClientHaveSystemUid() && (clientPackageNameAdj.size() == 0)) {
- std::string systemClient =
- fmt::sprintf("client.pid<%d>", CameraThreadState::getCallingPid());
+ if (callerHasSystemUid() && (clientPackageNameAdj.size() == 0)) {
+ std::string systemClient = fmt::sprintf("client.pid<%d>", callingPid);
clientPackageNameAdj = systemClient;
systemNativeClient = true;
}
- const std::string cameraId = resolveCameraId(
- unresolvedCameraId,
- CameraThreadState::getCallingUid(),
- clientPackageNameAdj);
+
+ std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
+ devicePolicy);
+ if (!cameraIdOptional.has_value()) {
+ std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
+ unresolvedCameraId.c_str(), deviceId);
+ ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+ }
+ std::string cameraId = cameraIdOptional.value();
if (oomScoreOffset < 0) {
std::string msg =
@@ -2334,7 +2263,6 @@
}
userid_t clientUserId = multiuser_get_user_id(clientUid);
- int callingUid = CameraThreadState::getCallingUid();
if (clientUid == USE_CALLING_UID) {
clientUserId = multiuser_get_user_id(callingUid);
}
@@ -2351,8 +2279,8 @@
// enforce system camera permissions
if (oomScoreOffset > 0
&& !hasPermissionsForSystemCamera(cameraId, callingPid,
- CameraThreadState::getCallingUid())
- && !isTrustedCallingUid(CameraThreadState::getCallingUid())) {
+ callingUid)
+ && !isTrustedCallingUid(callingUid)) {
std::string msg = fmt::sprintf("Cannot change the priority of a client %s pid %d for "
"camera id %s without SYSTEM_CAMERA permissions",
clientPackageNameAdj.c_str(), callingPid, cameraId.c_str());
@@ -2363,10 +2291,10 @@
ret = connectHelper<hardware::camera2::ICameraDeviceCallbacks,CameraDeviceClient>(cameraCb,
cameraId, /*api1CameraId*/-1, clientPackageNameAdj, systemNativeClient, clientFeatureId,
clientUid, USE_CALLING_PID, API_2, /*shimUpdateOnly*/ false, oomScoreOffset,
- targetSdkVersion, overrideToPortrait, /*forceSlowJpegMode*/false, unresolvedCameraId,
+ targetSdkVersion, rotationOverride, /*forceSlowJpegMode*/false, unresolvedCameraId,
/*out*/client);
- if(!ret.isOk()) {
+ if (!ret.isOk()) {
logRejected(cameraId, callingPid, clientPackageNameAdj, toStdString(ret.toString8()));
return ret;
}
@@ -2392,8 +2320,8 @@
const auto& mActivityManager = getActivityManager();
if (mActivityManager) {
mActivityManager->logFgsApiBegin(LOG_FGS_CAMERA_API,
- CameraThreadState::getCallingUid(),
- CameraThreadState::getCallingPid());
+ callingUid,
+ callingPid);
}
return ret;
}
@@ -2416,12 +2344,8 @@
return true;
} else if (mSensorPrivacyPolicy->getCameraPrivacyState() == SensorPrivacyManager::DISABLED) {
return false;
- } else if ((mSensorPrivacyPolicy->getCameraPrivacyState()
- == SensorPrivacyManager::AUTOMOTIVE_DRIVER_ASSISTANCE_HELPFUL_APPS) ||
- (mSensorPrivacyPolicy->getCameraPrivacyState()
- == SensorPrivacyManager::AUTOMOTIVE_DRIVER_ASSISTANCE_REQUIRED_APPS) ||
- (mSensorPrivacyPolicy->getCameraPrivacyState() ==
- SensorPrivacyManager::AUTOMOTIVE_DRIVER_ASSISTANCE_APPS)) {
+ } else if (mSensorPrivacyPolicy->getCameraPrivacyState()
+ == SensorPrivacyManager::ENABLED_EXCEPT_ALLOWLISTED_APPS) {
if (hasPermissionsForCameraPrivacyAllowlist(callingPid, callingUid)) {
return false;
} else {
@@ -2435,7 +2359,23 @@
std::string packageName("");
sp<IPermissionController> permCtrl;
- permCtrl = getPermissionController();
+ if (flags::cache_permission_services()) {
+ permCtrl = getPermissionController();
+ } else {
+ sp<IServiceManager> sm = defaultServiceManager();
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wdeprecated-declarations"
+ // Using deprecated function to preserve functionality until the
+ // cache_permission_services flag is removed.
+ sp<IBinder> binder = sm->getService(toString16(kPermissionServiceName));
+#pragma clang diagnostic pop
+ if (binder == 0) {
+ ALOGE("Cannot get permission service");
+ permCtrl = nullptr;
+ } else {
+ permCtrl = interface_cast<IPermissionController>(binder);
+ }
+ }
if (permCtrl == nullptr) {
// Return empty package name and the further interaction
@@ -2465,14 +2405,14 @@
int api1CameraId, const std::string& clientPackageNameMaybe, bool systemNativeClient,
const std::optional<std::string>& clientFeatureId, int clientUid, int clientPid,
apiLevel effectiveApiLevel, bool shimUpdateOnly, int oomScoreOffset, int targetSdkVersion,
- bool overrideToPortrait, bool forceSlowJpegMode, const std::string& originalCameraId,
- /*out*/sp<CLIENT>& device) {
+ int rotationOverride, bool forceSlowJpegMode,
+ const std::string& originalCameraId, /*out*/sp<CLIENT>& device) {
binder::Status ret = binder::Status::ok();
bool isNonSystemNdk = false;
std::string clientPackageName;
int packageUid = (clientUid == USE_CALLING_UID) ?
- CameraThreadState::getCallingUid() : clientUid;
+ getCallingUid() : clientUid;
if (clientPackageNameMaybe.size() <= 0) {
// NDK calls don't come with package names, but we need one for various cases.
// Generally, there's a 1:1 mapping between UID and package name, but shared UIDs
@@ -2488,7 +2428,7 @@
int originalClientPid = 0;
int packagePid = (clientPid == USE_CALLING_PID) ?
- CameraThreadState::getCallingPid() : clientPid;
+ getCallingPid() : clientPid;
ALOGI("CameraService::connect call (PID %d \"%s\", camera ID %s) and "
"Camera API version %d", packagePid, clientPackageName.c_str(), cameraId.c_str(),
static_cast<int>(effectiveApiLevel));
@@ -2513,7 +2453,7 @@
}
// Enforce client permissions and do basic validity checks
- if(!(ret = validateConnectLocked(cameraId, clientPackageName,
+ if (!(ret = validateConnectLocked(cameraId, clientPackageName,
/*inout*/clientUid, /*inout*/clientPid, /*out*/originalClientPid)).isOk()) {
return ret;
}
@@ -2565,7 +2505,7 @@
int portraitRotation;
auto deviceVersionAndTransport =
- getDeviceVersion(cameraId, overrideToPortrait, /*out*/&portraitRotation,
+ getDeviceVersion(cameraId, rotationOverride, /*out*/&portraitRotation,
/*out*/&facing, /*out*/&orientation);
if (facing == -1) {
ALOGE("%s: Unable to get camera device \"%s\" facing", __FUNCTION__, cameraId.c_str());
@@ -2576,11 +2516,12 @@
sp<BasicClient> tmp = nullptr;
bool overrideForPerfClass = SessionConfigurationUtils::targetPerfClassPrimaryCamera(
mPerfClassPrimaryCameraIds, cameraId, targetSdkVersion);
+
if(!(ret = makeClient(this, cameraCb, clientPackageName, systemNativeClient,
clientFeatureId, cameraId, api1CameraId, facing,
orientation, clientPid, clientUid, getpid(),
deviceVersionAndTransport, effectiveApiLevel, overrideForPerfClass,
- overrideToPortrait, forceSlowJpegMode, originalCameraId,
+ rotationOverride, forceSlowJpegMode, originalCameraId,
/*out*/&tmp)).isOk()) {
return ret;
}
@@ -2644,7 +2585,7 @@
CameraMetadata chars;
bool rotateAndCropSupported = true;
err = mCameraProviderManager->getCameraCharacteristics(cameraId, overrideForPerfClass,
- &chars, overrideToPortrait);
+ &chars, rotationOverride);
if (err == OK) {
auto availableRotateCropEntry = chars.find(
ANDROID_SCALER_AVAILABLE_ROTATE_AND_CROP_MODES);
@@ -2660,7 +2601,8 @@
// Set rotate-and-crop override behavior
if (mOverrideRotateAndCropMode != ANDROID_SCALER_ROTATE_AND_CROP_AUTO) {
client->setRotateAndCropOverride(mOverrideRotateAndCropMode);
- } else if (overrideToPortrait && portraitRotation != 0) {
+ } else if (rotationOverride != hardware::ICameraService::ROTATION_OVERRIDE_NONE &&
+ portraitRotation != 0) {
uint8_t rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_AUTO;
switch (portraitRotation) {
case 90:
@@ -2676,6 +2618,8 @@
ALOGE("Unexpected portrait rotation: %d", portraitRotation);
break;
}
+ // Here we're communicating to the client the chosen rotate
+ // and crop mode to send to the HAL
client->setRotateAndCropOverride(rotateAndCropMode);
} else {
client->setRotateAndCropOverride(
@@ -2725,11 +2669,11 @@
mServiceLock.unlock();
// Clear caller identity temporarily so client disconnect PID
// checks work correctly
- int64_t token = CameraThreadState::clearCallingIdentity();
+ int64_t token = clearCallingIdentity();
// Note AppOp to trigger the "Unblock" dialog
client->noteAppOp();
client->disconnect();
- CameraThreadState::restoreCallingIdentity(token);
+ restoreCallingIdentity(token);
// Reacquire mServiceLock
mServiceLock.lock();
@@ -2871,7 +2815,8 @@
}
Status CameraService::turnOnTorchWithStrengthLevel(const std::string& unresolvedCameraId,
- int32_t torchStrength, const sp<IBinder>& clientBinder) {
+ int32_t torchStrength, const sp<IBinder>& clientBinder, int32_t deviceId,
+ int32_t devicePolicy) {
Mutex::Autolock lock(mServiceLock);
ATRACE_CALL();
@@ -2881,8 +2826,17 @@
"Torch client binder in null.");
}
- int uid = CameraThreadState::getCallingUid();
- const std::string cameraId = resolveCameraId(unresolvedCameraId, uid);
+ int uid = getCallingUid();
+ std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
+ devicePolicy);
+ if (!cameraIdOptional.has_value()) {
+ std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
+ unresolvedCameraId.c_str(), deviceId);
+ ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+ }
+ std::string cameraId = cameraIdOptional.value();
+
if (shouldRejectSystemCameraConnection(cameraId)) {
return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "Unable to change the strength level"
"for system only device %s: ", cameraId.c_str());
@@ -2989,7 +2943,7 @@
clientBinder->linkToDeath(this);
}
- int clientPid = CameraThreadState::getCallingPid();
+ int clientPid = getCallingPid();
ALOGI("%s: Torch strength for camera id %s changed to %d for client PID %d",
__FUNCTION__, cameraId.c_str(), torchStrength, clientPid);
if (!shouldSkipTorchStrengthUpdates) {
@@ -2999,7 +2953,7 @@
}
Status CameraService::setTorchMode(const std::string& unresolvedCameraId, bool enabled,
- const sp<IBinder>& clientBinder) {
+ const sp<IBinder>& clientBinder, int32_t deviceId, int32_t devicePolicy) {
Mutex::Autolock lock(mServiceLock);
ATRACE_CALL();
@@ -3009,8 +2963,16 @@
"Torch client Binder is null");
}
- int uid = CameraThreadState::getCallingUid();
- const std::string cameraId = resolveCameraId(unresolvedCameraId, uid);
+ int uid = getCallingUid();
+ std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
+ devicePolicy);
+ if (!cameraIdOptional.has_value()) {
+ std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
+ unresolvedCameraId.c_str(), deviceId);
+ ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+ }
+ std::string cameraId = cameraIdOptional.value();
if (shouldRejectSystemCameraConnection(cameraId)) {
return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "Unable to set torch mode"
@@ -3117,7 +3079,7 @@
}
}
- int clientPid = CameraThreadState::getCallingPid();
+ int clientPid = getCallingPid();
std::string torchState = enabled ? "on" : "off";
ALOGI("Torch for camera id %s turned %s for client PID %d", cameraId.c_str(),
torchState.c_str(), clientPid);
@@ -3137,7 +3099,7 @@
Status CameraService::notifySystemEvent(int32_t eventId,
const std::vector<int32_t>& args) {
- const int pid = CameraThreadState::getCallingPid();
+ const int pid = getCallingPid();
const int selfPid = getpid();
// Permission checks
@@ -3145,7 +3107,7 @@
// Ensure we're being called by system_server, or similar process with
// permissions to notify the camera service about system events
if (!checkCallingPermission(toString16(sCameraSendSystemEventsPermission))) {
- const int uid = CameraThreadState::getCallingUid();
+ const int uid = getCallingUid();
ALOGE("Permission Denial: cannot send updates to camera service about system"
" events from pid=%d, uid=%d", pid, uid);
return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
@@ -3211,7 +3173,7 @@
}
Status CameraService::notifyDeviceStateChange(int64_t newState) {
- const int pid = CameraThreadState::getCallingPid();
+ const int pid = getCallingPid();
const int selfPid = getpid();
// Permission checks
@@ -3219,7 +3181,7 @@
// Ensure we're being called by system_server, or similar process with
// permissions to notify the camera service about system events
if (!checkCallingPermission(toString16(sCameraSendSystemEventsPermission))) {
- const int uid = CameraThreadState::getCallingUid();
+ const int uid = getCallingUid();
ALOGE("Permission Denial: cannot send updates to camera service about device"
" state changes from pid=%d, uid=%d", pid, uid);
return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
@@ -3242,7 +3204,7 @@
Status CameraService::notifyDisplayConfigurationChange() {
ATRACE_CALL();
- const int callingPid = CameraThreadState::getCallingPid();
+ const int callingPid = getCallingPid();
const int selfPid = getpid();
// Permission checks
@@ -3250,7 +3212,7 @@
// Ensure we're being called by system_server, or similar process with
// permissions to notify the camera service about system events
if (!checkCallingPermission(toString16(sCameraSendSystemEventsPermission))) {
- const int uid = CameraThreadState::getCallingUid();
+ const int uid = getCallingUid();
ALOGE("Permission Denial: cannot send updates to camera service about orientation"
" changes from pid=%d, uid=%d", callingPid, uid);
return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
@@ -3300,7 +3262,8 @@
std::vector<std::unordered_set<std::string>> concurrentCameraCombinations =
mCameraProviderManager->getConcurrentCameraIds();
for (auto &combination : concurrentCameraCombinations) {
- std::vector<std::string> validCombination;
+ std::vector<std::pair<std::string, int32_t>> validCombination;
+ int32_t firstDeviceId = kInvalidDeviceId;
for (auto &cameraId : combination) {
// if the camera state is not present, skip
auto state = getCameraState(cameraId);
@@ -3315,7 +3278,17 @@
if (shouldRejectSystemCameraConnection(cameraId)) {
continue;
}
- validCombination.push_back(cameraId);
+ auto [cameraOwnerDeviceId, mappedCameraId] =
+ mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(cameraId);
+ if (firstDeviceId == kInvalidDeviceId) {
+ firstDeviceId = cameraOwnerDeviceId;
+ } else if (firstDeviceId != cameraOwnerDeviceId) {
+ // Found an invalid combination which contains cameras with different device id's,
+ // hence discard it.
+ validCombination.clear();
+ break;
+ }
+ validCombination.push_back({mappedCameraId, cameraOwnerDeviceId});
}
if (validCombination.size() != 0) {
concurrentCameraIds->push_back(std::move(validCombination));
@@ -3324,25 +3297,10 @@
return Status::ok();
}
-bool CameraService::hasCameraPermissions() const {
- int callingPid = CameraThreadState::getCallingPid();
- int callingUid = CameraThreadState::getCallingUid();
- AttributionSourceState attributionSource{};
- attributionSource.pid = callingPid;
- attributionSource.uid = callingUid;
- bool res = checkPermission(std::string(), sCameraPermission,
- attributionSource, std::string(), AppOpsManager::OP_NONE);
-
- bool hasPermission = ((callingPid == getpid()) || res);
- if (!hasPermission) {
- ALOGE("%s: pid %d doesn't have camera permissions", __FUNCTION__, callingPid);
- }
- return hasPermission;
-}
-
Status CameraService::isConcurrentSessionConfigurationSupported(
const std::vector<CameraIdAndSessionConfiguration>& cameraIdsAndSessionConfigurations,
- int targetSdkVersion, /*out*/bool* isSupported) {
+ int targetSdkVersion, int32_t deviceId, int32_t devicePolicy,
+ /*out*/bool* isSupported) {
if (!isSupported) {
ALOGE("%s: isSupported is NULL", __FUNCTION__);
return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, "isSupported is NULL");
@@ -3354,8 +3312,26 @@
"Camera subsystem is not available");
}
+ for (auto cameraIdAndSessionConfiguration : cameraIdsAndSessionConfigurations) {
+ std::optional<std::string> cameraIdOptional =
+ resolveCameraId(cameraIdAndSessionConfiguration.mCameraId, deviceId, devicePolicy);
+ if (!cameraIdOptional.has_value()) {
+ std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
+ cameraIdAndSessionConfiguration.mCameraId.c_str(), deviceId);
+ ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+ }
+ cameraIdAndSessionConfiguration.mCameraId = cameraIdOptional.value();
+ }
+
// Check for camera permissions
- if (!hasCameraPermissions()) {
+ int callingPid = getCallingPid();
+ int callingUid = getCallingUid();
+ bool hasCameraPermission = ((callingPid == getpid()) ||
+ hasPermissionsForCamera(callingPid, callingUid,
+ devicePolicy == IVirtualDeviceManagerNative::DEVICE_POLICY_DEFAULT
+ ? kDefaultDeviceId : deviceId));
+ if (!hasCameraPermission) {
return STATUS_ERROR(ERROR_PERMISSION_DENIED,
"android.permission.CAMERA needed to call"
"isConcurrentSessionConfigurationSupported");
@@ -3389,7 +3365,6 @@
/*out*/
std::vector<hardware::CameraStatus> *cameraStatuses,
bool isVendorListener, bool isProcessLocalTest) {
-
ATRACE_CALL();
ALOGV("%s: Add listener %p", __FUNCTION__, listener.get());
@@ -3399,15 +3374,9 @@
return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, "Null listener given to addListener");
}
- auto clientUid = CameraThreadState::getCallingUid();
- auto clientPid = CameraThreadState::getCallingPid();
- AttributionSourceState attributionSource{};
- attributionSource.uid = clientUid;
- attributionSource.pid = clientPid;
-
- bool openCloseCallbackAllowed = checkPermission(std::string(),
- sCameraOpenCloseListenerPermission, attributionSource, std::string(),
- AppOpsManager::OP_NONE);
+ auto clientPid = getCallingPid();
+ auto clientUid = getCallingUid();
+ bool openCloseCallbackAllowed = hasPermissionsForOpenCloseListener(clientPid, clientUid);
Mutex::Autolock lock(mServiceLock);
@@ -3443,9 +3412,14 @@
{
Mutex::Autolock lock(mCameraStatesLock);
for (auto& i : mCameraStates) {
- cameraStatuses->emplace_back(i.first,
+ // Get the device id and app-visible camera id for the given HAL-visible camera id.
+ auto [deviceId, mappedCameraId] =
+ mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(i.first);
+
+ cameraStatuses->emplace_back(mappedCameraId,
mapToInterface(i.second->getStatus()), i.second->getUnavailablePhysicalIds(),
- openCloseCallbackAllowed ? i.second->getClientPackage() : std::string());
+ openCloseCallbackAllowed ? i.second->getClientPackage() : std::string(),
+ deviceId);
}
}
// Remove the camera statuses that should be hidden from the client, we do
@@ -3454,19 +3428,37 @@
// the same time.
cameraStatuses->erase(std::remove_if(cameraStatuses->begin(), cameraStatuses->end(),
[this, &isVendorListener, &clientPid, &clientUid](const hardware::CameraStatus& s) {
- SystemCameraKind deviceKind = SystemCameraKind::PUBLIC;
- if (getSystemCameraKind(s.cameraId, &deviceKind) != OK) {
- ALOGE("%s: Invalid camera id %s, skipping status update",
- __FUNCTION__, s.cameraId.c_str());
- return true;
- }
- return shouldSkipStatusUpdates(deviceKind, isVendorListener, clientPid,
- clientUid);}), cameraStatuses->end());
+ std::string cameraId = s.cameraId;
+ std::optional<std::string> cameraIdOptional = resolveCameraId(s.cameraId,
+ s.deviceId, IVirtualDeviceManagerNative::DEVICE_POLICY_CUSTOM);
+ if (!cameraIdOptional.has_value()) {
+ std::string msg =
+ fmt::sprintf(
+ "Camera %s: Invalid camera id for device id %d",
+ s.cameraId.c_str(), s.deviceId);
+ ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+ return true;
+ }
+ cameraId = cameraIdOptional.value();
+ SystemCameraKind deviceKind = SystemCameraKind::PUBLIC;
+ if (getSystemCameraKind(cameraId, &deviceKind) != OK) {
+ ALOGE("%s: Invalid camera id %s, skipping status update",
+ __FUNCTION__, s.cameraId.c_str());
+ return true;
+ }
+ return shouldSkipStatusUpdates(deviceKind, isVendorListener, clientPid,
+ clientUid);
+ }), cameraStatuses->end());
- //cameraStatuses will have non-eligible camera ids removed.
+ // cameraStatuses will have non-eligible camera ids removed.
std::set<std::string> idsChosenForCallback;
for (const auto &s : *cameraStatuses) {
- idsChosenForCallback.insert(s.cameraId);
+ // Add only default device cameras here, as virtual cameras currently don't support torch
+ // anyway. Note that this is a simplification of the implementation here, and we should
+ // change this when virtual cameras support torch.
+ if (s.deviceId == kDefaultDeviceId) {
+ idsChosenForCallback.insert(s.cameraId);
+ }
}
/*
@@ -3480,7 +3472,8 @@
// The camera id is visible to the client. Fine to send torch
// callback.
if (idsChosenForCallback.find(id) != idsChosenForCallback.end()) {
- listener->onTorchStatusChanged(mapToInterface(mTorchStatusMap.valueAt(i)), id);
+ listener->onTorchStatusChanged(mapToInterface(mTorchStatusMap.valueAt(i)), id,
+ kDefaultDeviceId);
}
}
}
@@ -3543,13 +3536,10 @@
return ret;
}
-Status CameraService::supportsCameraApi(const std::string& unresolvedCameraId, int apiVersion,
+Status CameraService::supportsCameraApi(const std::string& cameraId, int apiVersion,
/*out*/ bool *isSupported) {
ATRACE_CALL();
- const std::string cameraId = resolveCameraId(
- unresolvedCameraId, CameraThreadState::getCallingUid());
-
ALOGV("%s: for camera ID = %s", __FUNCTION__, cameraId.c_str());
switch (apiVersion) {
@@ -3563,7 +3553,10 @@
}
int portraitRotation;
- auto deviceVersionAndTransport = getDeviceVersion(cameraId, false, &portraitRotation);
+ auto deviceVersionAndTransport =
+ getDeviceVersion(cameraId,
+ /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+ &portraitRotation);
if (deviceVersionAndTransport.first == -1) {
std::string msg = fmt::sprintf("Unknown camera ID %s", cameraId.c_str());
ALOGE("%s: %s", __FUNCTION__, msg.c_str());
@@ -3608,13 +3601,10 @@
return Status::ok();
}
-Status CameraService::isHiddenPhysicalCamera(const std::string& unresolvedCameraId,
+Status CameraService::isHiddenPhysicalCamera(const std::string& cameraId,
/*out*/ bool *isSupported) {
ATRACE_CALL();
- const std::string cameraId = resolveCameraId(unresolvedCameraId,
- CameraThreadState::getCallingUid());
-
ALOGV("%s: for camera ID = %s", __FUNCTION__, cameraId.c_str());
*isSupported = mCameraProviderManager->isHiddenPhysicalCamera(cameraId);
@@ -3630,11 +3620,25 @@
ATRACE_CALL();
if (!checkCallingPermission(toString16(sCameraInjectExternalCameraPermission))) {
- const int pid = CameraThreadState::getCallingPid();
- const int uid = CameraThreadState::getCallingUid();
+ const int pid = getCallingPid();
+ const int uid = getCallingUid();
ALOGE("Permission Denial: can't inject camera pid=%d, uid=%d", pid, uid);
return STATUS_ERROR(ERROR_PERMISSION_DENIED,
- "Permission Denial: no permission to inject camera");
+ "Permission Denial: no permission to inject camera");
+ }
+
+ // Do not allow any camera injection that injects or replaces a virtual camera.
+ auto [deviceIdForInternalCamera, _] =
+ mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(internalCamId);
+ if (deviceIdForInternalCamera != kDefaultDeviceId) {
+ return STATUS_ERROR(ICameraInjectionCallback::ERROR_INJECTION_UNSUPPORTED,
+ "Cannot replace a virtual camera");
+ }
+ [[maybe_unused]] auto [deviceIdForExternalCamera, unusedMappedCameraId] =
+ mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(externalCamId);
+ if (deviceIdForExternalCamera != kDefaultDeviceId) {
+ return STATUS_ERROR(ICameraInjectionCallback::ERROR_INJECTION_UNSUPPORTED,
+ "Cannot inject a virtual camera to replace an internal camera");
}
ALOGV(
@@ -3664,7 +3668,7 @@
mInjectionExternalCamId.c_str());
}
res = clientSp->injectCamera(mInjectionExternalCamId, mCameraProviderManager);
- if(res != OK) {
+ if (res != OK) {
mInjectionStatusListener->notifyInjectionError(mInjectionExternalCamId, res);
}
} else {
@@ -3701,7 +3705,6 @@
std::unique_ptr<AutoConditionLock> lock =
AutoConditionLock::waitAndAcquire(mServiceLockWrapper);
-
std::vector<sp<BasicClient>> evicted;
for (auto& i : mActiveClientManager.getAll()) {
auto clientSp = i->getValue();
@@ -3833,13 +3836,13 @@
mServiceLock.unlock();
// Clear caller identity temporarily so client disconnect PID checks work correctly
- int64_t token = CameraThreadState::clearCallingIdentity();
+ int64_t token = clearCallingIdentity();
for (auto& i : evicted) {
i->disconnect();
}
- CameraThreadState::restoreCallingIdentity(token);
+ restoreCallingIdentity(token);
// Reacquire mServiceLock
mServiceLock.lock();
@@ -3970,7 +3973,6 @@
// We share the media players for shutter and recording sound for all clients.
// A reference count is kept to determine when we will actually release the
// media players.
-
sp<MediaPlayer> CameraService::newMediaPlayer(const char *file) {
sp<MediaPlayer> mp = new MediaPlayer();
status_t error;
@@ -4050,21 +4052,23 @@
CameraService::Client::Client(const sp<CameraService>& cameraService,
const sp<ICameraClient>& cameraClient,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
const std::string& clientPackageName, bool systemNativeClient,
const std::optional<std::string>& clientFeatureId,
const std::string& cameraIdStr,
int api1CameraId, int cameraFacing, int sensorOrientation,
int clientPid, uid_t clientUid,
- int servicePid, bool overrideToPortrait) :
+ int servicePid, int rotationOverride) :
CameraService::BasicClient(cameraService,
IInterface::asBinder(cameraClient),
+ attributionAndPermissionUtils,
clientPackageName, systemNativeClient, clientFeatureId,
cameraIdStr, cameraFacing, sensorOrientation,
clientPid, clientUid,
- servicePid, overrideToPortrait),
+ servicePid, rotationOverride),
mCameraId(api1CameraId)
{
- int callingPid = CameraThreadState::getCallingPid();
+ int callingPid = getCallingPid();
LOG1("Client::Client E (pid %d, id %d)", callingPid, mCameraId);
mRemoteCallback = cameraClient;
@@ -4088,10 +4092,12 @@
CameraService::BasicClient::BasicClient(const sp<CameraService>& cameraService,
const sp<IBinder>& remoteCallback,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
const std::string& clientPackageName, bool nativeClient,
const std::optional<std::string>& clientFeatureId, const std::string& cameraIdStr,
int cameraFacing, int sensorOrientation, int clientPid, uid_t clientUid,
- int servicePid, bool overrideToPortrait):
+ int servicePid, int rotationOverride):
+ AttributionAndPermissionUtilsEncapsulator(attributionAndPermissionUtils),
mDestructionStarted(false),
mCameraIdStr(cameraIdStr), mCameraFacing(cameraFacing), mOrientation(sensorOrientation),
mClientPackageName(clientPackageName), mSystemNativeClient(nativeClient),
@@ -4099,7 +4105,7 @@
mClientPid(clientPid), mClientUid(clientUid),
mServicePid(servicePid),
mDisconnected(false), mUidIsTrusted(false),
- mOverrideToPortrait(overrideToPortrait),
+ mRotationOverride(rotationOverride),
mAudioRestriction(hardware::camera2::ICameraDeviceUser::AUDIO_RESTRICTION_NONE),
mRemoteBinder(remoteCallback),
mOpsActive(false),
@@ -4161,8 +4167,8 @@
const auto& mActivityManager = getActivityManager();
if (mActivityManager) {
mActivityManager->logFgsApiEnd(LOG_FGS_CAMERA_API,
- CameraThreadState::getCallingUid(),
- CameraThreadState::getCallingPid());
+ getCallingUid(),
+ getCallingPid());
}
return res;
@@ -4172,7 +4178,7 @@
// No dumping of clients directly over Binder,
// must go through CameraService::dump
android_errorWriteWithInfoLog(SN_EVENT_LOG_ID, "26265403",
- CameraThreadState::getCallingUid(), NULL, 0);
+ getCallingUid(), NULL, 0);
return OK;
}
@@ -4270,8 +4276,10 @@
// connection has been fully established and at that time camera muting
// capabilities are unknown.
if (!isUidActive || !isCameraPrivacyEnabled) {
- ALOGI("Camera %s: Access for \"%s\" has been restricted",
- mCameraIdStr.c_str(), mClientPackageName.c_str());
+ ALOGI("Camera %s: Access for \"%s\" has been restricted."
+ "uid active: %s, privacy enabled: %s", mCameraIdStr.c_str(),
+ mClientPackageName.c_str(), isUidActive ? "true" : "false",
+ isCameraPrivacyEnabled ? "true" : "false");
// Return the same error as for device policy manager rejection
return -EACCES;
}
@@ -4505,7 +4513,7 @@
// Reset the client PID to allow server-initiated disconnect,
// and to prevent further calls by client.
- mClientPid = CameraThreadState::getCallingPid();
+ mClientPid = getCallingPid();
CaptureResultExtras resultExtras; // a dummy result (invalid)
notifyError(hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISABLED, resultExtras);
disconnect();
@@ -4938,7 +4946,7 @@
bool CameraService::SensorPrivacyPolicy::isCameraPrivacyEnabled(const String16& packageName) {
if (!hasCameraPrivacyFeature()) {
- return SensorPrivacyManager::DISABLED;
+ return false;
}
return mSpm.isCameraPrivacyEnabled(packageName);
}
@@ -4979,9 +4987,7 @@
// if sensor privacy is enabled then block all clients from accessing the camera
if (state == SensorPrivacyManager::ENABLED) {
service->blockAllClients();
- } else if ((state == SensorPrivacyManager::AUTOMOTIVE_DRIVER_ASSISTANCE_APPS)
- || (state == SensorPrivacyManager::AUTOMOTIVE_DRIVER_ASSISTANCE_HELPFUL_APPS)
- || (state == SensorPrivacyManager::AUTOMOTIVE_DRIVER_ASSISTANCE_REQUIRED_APPS)) {
+ } else if (state == SensorPrivacyManager::ENABLED_EXCEPT_ALLOWLISTED_APPS) {
service->blockPrivacyEnabledClients();
}
return binder::Status::ok();
@@ -5097,7 +5103,6 @@
}
}
-
// ----------------------------------------------------------------------------
// CameraClientManager
// ----------------------------------------------------------------------------
@@ -5339,8 +5344,8 @@
if (checkCallingPermission(toString16(sDumpPermission)) == false) {
dprintf(fd, "Permission Denial: can't dump CameraService from pid=%d, uid=%d\n",
- CameraThreadState::getCallingPid(),
- CameraThreadState::getCallingUid());
+ getCallingPid(),
+ getCallingUid());
return NO_ERROR;
}
bool locked = tryLock(mServiceLock);
@@ -5589,7 +5594,7 @@
* binder driver
*/
// PID here is approximate and can be wrong.
- logClientDied(CameraThreadState::getCallingPid(), "Binder died unexpectedly");
+ logClientDied(getCallingPid(), "Binder died unexpectedly");
// check torch client
handleTorchClientBinderDied(who);
@@ -5628,6 +5633,36 @@
return;
}
+ if (vd_flags::camera_device_awareness() && status == StatusInternal::PRESENT) {
+ CameraMetadata cameraInfo;
+ status_t res = mCameraProviderManager->getCameraCharacteristics(
+ cameraId, false, &cameraInfo, hardware::ICameraService::ROTATION_OVERRIDE_NONE);
+ if (res != OK) {
+ ALOGW("%s: Not able to get camera characteristics for camera id %s",
+ __FUNCTION__, cameraId.c_str());
+ } else {
+ int32_t deviceId = getDeviceId(cameraInfo);
+ if (deviceId != kDefaultDeviceId) {
+ const auto &lensFacingEntry = cameraInfo.find(ANDROID_LENS_FACING);
+ camera_metadata_enum_android_lens_facing_t androidLensFacing =
+ static_cast<camera_metadata_enum_android_lens_facing_t>(
+ lensFacingEntry.data.u8[0]);
+ std::string mappedCameraId;
+ if (androidLensFacing == ANDROID_LENS_FACING_BACK) {
+ mappedCameraId = kVirtualDeviceBackCameraId;
+ } else if (androidLensFacing == ANDROID_LENS_FACING_FRONT) {
+ mappedCameraId = kVirtualDeviceFrontCameraId;
+ } else {
+ ALOGD("%s: Not adding entry for an external camera of a virtual device",
+ __func__);
+ }
+ if (!mappedCameraId.empty()) {
+ mVirtualDeviceCameraIdMapper.addCamera(cameraId, deviceId, mappedCameraId);
+ }
+ }
+ }
+ }
+
// Collect the logical cameras without holding mStatusLock in updateStatus
// as that can lead to a deadlock(b/162192331).
auto logicalCameraIds = getLogicalCameras(cameraId);
@@ -5636,55 +5671,47 @@
state->updateStatus(status, cameraId, rejectSourceStates, [this, &deviceKind,
&logicalCameraIds]
(const std::string& cameraId, StatusInternal status) {
+ // Get the device id and app-visible camera id for the given HAL-visible camera id.
+ auto [deviceId, mappedCameraId] =
+ mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(cameraId);
- if (status != StatusInternal::ENUMERATING) {
- // Update torch status if it has a flash unit.
- Mutex::Autolock al(mTorchStatusMutex);
- TorchModeStatus torchStatus;
- if (getTorchStatusLocked(cameraId, &torchStatus) !=
- NAME_NOT_FOUND) {
- TorchModeStatus newTorchStatus =
- status == StatusInternal::PRESENT ?
- TorchModeStatus::AVAILABLE_OFF :
- TorchModeStatus::NOT_AVAILABLE;
- if (torchStatus != newTorchStatus) {
- onTorchStatusChangedLocked(cameraId, newTorchStatus, deviceKind);
+ if (status != StatusInternal::ENUMERATING) {
+ // Update torch status if it has a flash unit.
+ Mutex::Autolock al(mTorchStatusMutex);
+ TorchModeStatus torchStatus;
+ if (getTorchStatusLocked(cameraId, &torchStatus) !=
+ NAME_NOT_FOUND) {
+ TorchModeStatus newTorchStatus =
+ status == StatusInternal::PRESENT ?
+ TorchModeStatus::AVAILABLE_OFF :
+ TorchModeStatus::NOT_AVAILABLE;
+ if (torchStatus != newTorchStatus) {
+ onTorchStatusChangedLocked(cameraId, newTorchStatus, deviceKind);
+ }
}
}
- }
- Mutex::Autolock lock(mStatusListenerLock);
- notifyPhysicalCameraStatusLocked(mapToInterface(status), cameraId,
- logicalCameraIds, deviceKind);
+ Mutex::Autolock lock(mStatusListenerLock);
+ notifyPhysicalCameraStatusLocked(mapToInterface(status), mappedCameraId,
+ logicalCameraIds, deviceKind, deviceId);
- for (auto& listener : mListenerList) {
- bool isVendorListener = listener->isVendorListener();
- if (shouldSkipStatusUpdates(deviceKind, isVendorListener,
- listener->getListenerPid(), listener->getListenerUid())) {
- ALOGV("Skipping discovery callback for system-only camera device %s",
- cameraId.c_str());
- continue;
- }
- auto ret = listener->getListener()->onStatusChanged(mapToInterface(status),
- cameraId);
- listener->handleBinderStatus(ret,
- "%s: Failed to trigger onStatusChanged callback for %d:%d: %d",
- __FUNCTION__, listener->getListenerUid(), listener->getListenerPid(),
- ret.exceptionCode());
- // Also trigger the callbacks for cameras that were remapped to the current
- // cameraId for the specific package that this listener belongs to.
- std::vector<std::string> remappedCameraIds =
- findOriginalIdsForRemappedCameraId(cameraId, listener->getListenerUid());
- for (auto& remappedCameraId : remappedCameraIds) {
- ret = listener->getListener()->onStatusChanged(
- mapToInterface(status), remappedCameraId);
+ for (auto& listener : mListenerList) {
+ bool isVendorListener = listener->isVendorListener();
+ if (shouldSkipStatusUpdates(deviceKind, isVendorListener,
+ listener->getListenerPid(), listener->getListenerUid())) {
+ ALOGV("Skipping discovery callback for system-only camera device %s",
+ cameraId.c_str());
+ continue;
+ }
+
+ auto ret = listener->getListener()->onStatusChanged(mapToInterface(status),
+ mappedCameraId, deviceId);
listener->handleBinderStatus(ret,
- "%s: Failed to trigger onStatusChanged callback for %d:%d: %d",
+ "%s: Failed to trigger onStatusChanged callback for %d:%d: %d",
__FUNCTION__, listener->getListenerUid(), listener->getListenerPid(),
ret.exceptionCode());
}
- }
- });
+ });
}
void CameraService::updateOpenCloseStatus(const std::string& cameraId, bool open,
@@ -5701,6 +5728,10 @@
state->setClientPackage(std::string());
}
+ // Get the device id and app-visible camera id for the given HAL-visible camera id.
+ auto [deviceId, mappedCameraId] =
+ mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(cameraId);
+
Mutex::Autolock lock(mStatusListenerLock);
for (const auto& it : mListenerList) {
@@ -5710,9 +5741,10 @@
binder::Status ret;
if (open) {
- ret = it->getListener()->onCameraOpened(cameraId, clientPackageName);
+ ret = it->getListener()->onCameraOpened(mappedCameraId, clientPackageName,
+ deviceId);
} else {
- ret = it->getListener()->onCameraClosed(cameraId);
+ ret = it->getListener()->onCameraClosed(mappedCameraId, deviceId);
}
it->handleBinderStatus(ret,
@@ -5805,7 +5837,7 @@
void CameraService::notifyPhysicalCameraStatusLocked(int32_t status,
const std::string& physicalCameraId, const std::list<std::string>& logicalCameraIds,
- SystemCameraKind deviceKind) {
+ SystemCameraKind deviceKind, int32_t deviceId) {
// mStatusListenerLock is expected to be locked
for (const auto& logicalCameraId : logicalCameraIds) {
for (auto& listener : mListenerList) {
@@ -5819,7 +5851,7 @@
continue;
}
auto ret = listener->getListener()->onPhysicalCameraStatusChanged(status,
- logicalCameraId, physicalCameraId);
+ logicalCameraId, physicalCameraId, deviceId);
listener->handleBinderStatus(ret,
"%s: Failed to trigger onPhysicalCameraStatusChanged for %d:%d: %d",
__FUNCTION__, listener->getListenerUid(), listener->getListenerPid(),
@@ -5828,7 +5860,6 @@
}
}
-
void CameraService::blockClientsForUid(uid_t uid) {
const auto clients = mActiveClientManager.getAll();
for (auto& current : clients) {
@@ -5909,8 +5940,6 @@
return handleWatchCommand(args, in, out);
} else if (args.size() >= 2 && args[0] == toString16("set-watchdog")) {
return handleSetCameraServiceWatchdog(args);
- } else if (args.size() >= 4 && args[0] == toString16("remap-camera-id")) {
- return handleCameraIdRemapping(args, err);
} else if (args.size() == 1 && args[0] == toString16("help")) {
printHelp(out);
return OK;
@@ -5919,23 +5948,6 @@
return BAD_VALUE;
}
-status_t CameraService::handleCameraIdRemapping(const Vector<String16>& args, int err) {
- uid_t uid = IPCThreadState::self()->getCallingUid();
- if (uid != AID_ROOT) {
- dprintf(err, "Must be adb root\n");
- return PERMISSION_DENIED;
- }
- if (args.size() != 4) {
- dprintf(err, "Expected format: remap-camera-id <PACKAGE> <Id0> <Id1>\n");
- return BAD_VALUE;
- }
- std::string packageName = toStdString(args[1]);
- std::string cameraIdToReplace = toStdString(args[2]);
- std::string cameraIdNew = toStdString(args[3]);
- remapCameraIds({{packageName, {{cameraIdToReplace, cameraIdNew}}}});
- return OK;
-}
-
status_t CameraService::handleSetUidState(const Vector<String16>& args, int err) {
std::string packageName = toStdString(args[1]);
@@ -6218,7 +6230,7 @@
" prints the monitored information in real time\n"
" Hit return to exit\n"
" clear clears all buffers storing information for watch command");
- return BAD_VALUE;
+ return BAD_VALUE;
}
status_t CameraService::startWatchingTags(const Vector<String16> &args, int outFd) {
@@ -6552,7 +6564,6 @@
" set-watchdog <VALUE> enables or disables the camera service watchdog\n"
" Valid values 0=disable, 1=enable\n"
" watch <start|stop|dump|print|clear> manages tag monitoring in connected clients\n"
- " remap-camera-id <PACKAGE> <Id0> <Id1> remaps camera ids. Must use adb root\n"
" help print this message\n");
}
@@ -6605,9 +6616,9 @@
mServiceLock.unlock();
// Clear caller identity temporarily so client disconnect PID checks work correctly
- int64_t token = CameraThreadState::clearCallingIdentity();
+ int64_t token = clearCallingIdentity();
clientSp->disconnect();
- CameraThreadState::restoreCallingIdentity(token);
+ restoreCallingIdentity(token);
// Reacquire mServiceLock
mServiceLock.lock();
@@ -6626,4 +6637,4 @@
mInjectionStatusListener->removeListener();
}
-}; // namespace android
+} // namespace android
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 140121e..9998fb8 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -21,10 +21,8 @@
#include <android/hardware/BnCameraService.h>
#include <android/hardware/BnSensorPrivacyListener.h>
#include <android/hardware/ICameraServiceListener.h>
-#include <android/hardware/CameraIdRemapping.h>
#include <android/hardware/camera2/BnCameraInjectionSession.h>
#include <android/hardware/camera2/ICameraInjectionCallback.h>
-#include <android/permission/PermissionChecker.h>
#include <cutils/multiuser.h>
#include <utils/Vector.h>
@@ -55,12 +53,15 @@
#include "utils/ClientManager.h"
#include "utils/IPCTransport.h"
#include "utils/CameraServiceProxyWrapper.h"
+#include "utils/AttributionAndPermissionUtils.h"
+#include "utils/VirtualDeviceCameraIdMapper.h"
#include <set>
#include <string>
#include <list>
#include <map>
#include <memory>
+#include <mutex>
#include <optional>
#include <utility>
#include <unordered_map>
@@ -79,7 +80,8 @@
public virtual ::android::hardware::BnCameraService,
public virtual IBinder::DeathRecipient,
public virtual CameraProviderManager::StatusListener,
- public virtual IServiceManager::LocalRegistrationCallback
+ public virtual IServiceManager::LocalRegistrationCallback,
+ public AttributionAndPermissionUtilsEncapsulator
{
friend class BinderService<CameraService>;
friend class CameraOfflineSessionClient;
@@ -121,7 +123,9 @@
// Non-null arguments for cameraServiceProxyWrapper should be provided for
// testing purposes only.
CameraService(std::shared_ptr<CameraServiceProxyWrapper>
- cameraServiceProxyWrapper = nullptr);
+ cameraServiceProxyWrapper = nullptr,
+ std::shared_ptr<AttributionAndPermissionUtils>
+ attributionAndPermissionUtils = nullptr);
virtual ~CameraService();
/////////////////////////////////////////////////////////////////////
@@ -147,14 +151,17 @@
/////////////////////////////////////////////////////////////////////
// ICameraService
// IMPORTANT: All binder calls that deal with logicalCameraId should use
- // resolveCameraId(logicalCameraId) to arrive at the correct cameraId to
- // perform the operation on (in case of Id Remapping).
- virtual binder::Status getNumberOfCameras(int32_t type, int32_t* numCameras);
+ // resolveCameraId(logicalCameraId, deviceId, devicePolicy) to arrive at the correct
+ // cameraId to perform the operation on (in case of contexts
+ // associated with virtual devices).
+ virtual binder::Status getNumberOfCameras(int32_t type, int32_t deviceId,
+ int32_t devicePolicy, int32_t* numCameras);
- virtual binder::Status getCameraInfo(int cameraId, bool overrideToPortrait,
- hardware::CameraInfo* cameraInfo) override;
+ virtual binder::Status getCameraInfo(int cameraId, int rotationOverride,
+ int32_t deviceId, int32_t devicePolicy, hardware::CameraInfo* cameraInfo) override;
virtual binder::Status getCameraCharacteristics(const std::string& cameraId,
- int targetSdkVersion, bool overrideToPortrait, CameraMetadata* cameraInfo) override;
+ int targetSdkVersion, int rotationOverride, int32_t deviceId,
+ int32_t devicePolicy, CameraMetadata* cameraInfo) override;
virtual binder::Status getCameraVendorTagDescriptor(
/*out*/
hardware::camera2::params::VendorTagDescriptor* desc);
@@ -165,15 +172,15 @@
virtual binder::Status connect(const sp<hardware::ICameraClient>& cameraClient,
int32_t cameraId, const std::string& clientPackageName,
int32_t clientUid, int clientPid, int targetSdkVersion,
- bool overrideToPortrait, bool forceSlowJpegMode,
- /*out*/
- sp<hardware::ICamera>* device) override;
+ int rotationOverride, bool forceSlowJpegMode, int32_t deviceId,
+ int32_t devicePolicy, /*out*/ sp<hardware::ICamera>* device) override;
virtual binder::Status connectDevice(
const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
const std::string& cameraId,
const std::string& clientPackageName, const std::optional<std::string>& clientFeatureId,
- int32_t clientUid, int scoreOffset, int targetSdkVersion, bool overrideToPortrait,
+ int32_t clientUid, int scoreOffset, int targetSdkVersion, int rotationOverride,
+ int32_t deviceId, int32_t devicePolicy,
/*out*/
sp<hardware::camera2::ICameraDeviceUser>* device);
@@ -189,7 +196,8 @@
virtual binder::Status isConcurrentSessionConfigurationSupported(
const std::vector<hardware::camera2::utils::CameraIdAndSessionConfiguration>& sessions,
- int targetSdkVersion, /*out*/bool* supported);
+ int targetSdkVersion, int32_t deviceId, int32_t devicePolicy,
+ /*out*/bool* supported);
virtual binder::Status getLegacyParameters(
int32_t cameraId,
@@ -197,13 +205,14 @@
std::string* parameters);
virtual binder::Status setTorchMode(const std::string& cameraId, bool enabled,
- const sp<IBinder>& clientBinder);
+ const sp<IBinder>& clientBinder, int32_t deviceId, int32_t devicePolicy);
virtual binder::Status turnOnTorchWithStrengthLevel(const std::string& cameraId,
- int32_t torchStrength, const sp<IBinder>& clientBinder);
+ int32_t torchStrength, const sp<IBinder>& clientBinder, int32_t deviceId,
+ int32_t devicePolicy);
- virtual binder::Status getTorchStrengthLevel(const std::string& cameraId,
- int32_t* torchStrength);
+ virtual binder::Status getTorchStrengthLevel(const std::string& cameraId, int32_t deviceId,
+ int32_t devicePolicy, int32_t* torchStrength);
virtual binder::Status notifySystemEvent(int32_t eventId,
const std::vector<int32_t>& args);
@@ -233,27 +242,25 @@
virtual binder::Status reportExtensionSessionStats(
const hardware::CameraExtensionSessionStats& stats, std::string* sessionKey /*out*/);
- virtual binder::Status remapCameraIds(const hardware::CameraIdRemapping&
- cameraIdRemapping);
-
virtual binder::Status injectSessionParams(
const std::string& cameraId,
const hardware::camera2::impl::CameraMetadataNative& sessionParams);
virtual binder::Status createDefaultRequest(const std::string& cameraId, int templateId,
+ int32_t deviceId, int32_t devicePolicy,
/*out*/
hardware::camera2::impl::CameraMetadataNative* request);
virtual binder::Status isSessionConfigurationWithParametersSupported(
- const std::string& cameraId,
+ const std::string& cameraId, int targetSdkVersion,
const SessionConfiguration& sessionConfiguration,
- /*out*/
- bool* supported);
+ int32_t deviceId, int32_t devicePolicy,
+ /*out*/ bool* supported);
virtual binder::Status getSessionCharacteristics(
- const std::string& cameraId, int targetSdkVersion, bool overrideToPortrait,
- const SessionConfiguration& sessionConfiguration,
- /*out*/ CameraMetadata* outMetadata);
+ const std::string& cameraId, int targetSdkVersion, int rotationOverride,
+ const SessionConfiguration& sessionConfiguration, int32_t deviceId,
+ int32_t devicePolicy, /*out*/ CameraMetadata* outMetadata);
// Extra permissions checks
virtual status_t onTransact(uint32_t code, const Parcel& data,
@@ -296,7 +303,8 @@
/////////////////////////////////////////////////////////////////////
// CameraDeviceFactory functionality
std::pair<int, IPCTransport> getDeviceVersion(const std::string& cameraId,
- bool overrideToPortrait, int* portraitRotation,
+ int rotationOverride,
+ int* portraitRotation,
int* facing = nullptr, int* orientation = nullptr);
/////////////////////////////////////////////////////////////////////
@@ -316,10 +324,20 @@
// Shared utilities
static binder::Status filterGetInfoErrorCode(status_t err);
+ /**
+ * Returns true if the device is an automotive device and cameraId is system
+ * only camera which has characteristic AUTOMOTIVE_LOCATION value as either
+ * AUTOMOTIVE_LOCATION_EXTERIOR_LEFT,AUTOMOTIVE_LOCATION_EXTERIOR_RIGHT,
+ * AUTOMOTIVE_LOCATION_EXTERIOR_FRONT or AUTOMOTIVE_LOCATION_EXTERIOR_REAR.
+ */
+ bool isAutomotiveExteriorSystemCamera(const std::string& cameraId) const;
+
/////////////////////////////////////////////////////////////////////
// CameraClient functionality
- class BasicClient : public virtual RefBase {
+ class BasicClient :
+ public virtual RefBase,
+ public AttributionAndPermissionUtilsEncapsulator {
friend class CameraService;
public:
virtual status_t initialize(sp<CameraProviderManager> manager,
@@ -336,7 +354,7 @@
}
bool getOverrideToPortrait() const {
- return mOverrideToPortrait;
+ return mRotationOverride == ICameraService::ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT;
}
// Disallows dumping over binder interface
@@ -430,6 +448,7 @@
protected:
BasicClient(const sp<CameraService>& cameraService,
const sp<IBinder>& remoteCallback,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
const std::string& clientPackageName,
bool nativeClient,
const std::optional<std::string>& clientFeatureId,
@@ -439,7 +458,7 @@
int clientPid,
uid_t clientUid,
int servicePid,
- bool overrideToPortrait);
+ int rotationOverride);
virtual ~BasicClient();
@@ -462,7 +481,7 @@
const pid_t mServicePid;
bool mDisconnected;
bool mUidIsTrusted;
- bool mOverrideToPortrait;
+ int mRotationOverride;
mutable Mutex mAudioRestrictionLock;
int32_t mAudioRestriction;
@@ -543,6 +562,7 @@
// Interface used by CameraService
Client(const sp<CameraService>& cameraService,
const sp<hardware::ICameraClient>& cameraClient,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
const std::string& clientPackageName,
bool systemNativeClient,
const std::optional<std::string>& clientFeatureId,
@@ -553,7 +573,7 @@
int clientPid,
uid_t clientUid,
int servicePid,
- bool overrideToPortrait);
+ int rotationOverride);
~Client();
// return our camera client
@@ -646,13 +666,6 @@
int32_t updateAudioRestrictionLocked();
private:
- /**
- * Returns true if the device is an automotive device and cameraId is system
- * only camera which has characteristic AUTOMOTIVE_LOCATION value as either
- * AUTOMOTIVE_LOCATION_EXTERIOR_LEFT,AUTOMOTIVE_LOCATION_EXTERIOR_RIGHT,
- * AUTOMOTIVE_LOCATION_EXTERIOR_FRONT or AUTOMOTIVE_LOCATION_EXTERIOR_REAR.
- */
- bool isAutomotiveExteriorSystemCamera(const std::string& cameraId) const;
// TODO: b/263304156 update this to make use of a death callback for more
// robust/fault tolerant logging
@@ -688,29 +701,6 @@
}
/**
- * Pre-grants the permission if the attribution source uid is for an automotive
- * privileged client. Otherwise uses system service permission checker to check
- * for the appropriate permission. If this function is called for accessing a specific
- * camera,then the cameraID must not be empty. CameraId is used only in case of automotive
- * privileged client so that permission is pre-granted only to access system camera device
- * which is located outside of the vehicle body frame because camera located inside the vehicle
- * cabin would need user permission.
- */
- bool checkPermission(const std::string& cameraId, const std::string& permission,
- const content::AttributionSourceState& attributionSource, const std::string& message,
- int32_t attributedOpCode) const;
-
- bool hasPermissionsForSystemCamera(const std::string& cameraId, int callingPid, int callingUid)
- const;
-
- bool hasPermissionsForCameraHeadlessSystemUser(const std::string& cameraId, int callingPid,
- int callingUid) const;
-
- bool hasCameraPermissions() const;
-
- bool hasPermissionsForCameraPrivacyAllowlist(int callingPid, int callingUid) const;
-
- /**
* Typesafe version of device status, containing both the HAL-layer and the service interface-
* layer values.
*/
@@ -894,10 +884,14 @@
// prevented from accessing the camera.
class SensorPrivacyPolicy : public hardware::BnSensorPrivacyListener,
public virtual IBinder::DeathRecipient,
- public virtual IServiceManager::LocalRegistrationCallback {
+ public virtual IServiceManager::LocalRegistrationCallback,
+ public AttributionAndPermissionUtilsEncapsulator {
public:
- explicit SensorPrivacyPolicy(wp<CameraService> service)
- : mService(service), mSensorPrivacyEnabled(false),
+ explicit SensorPrivacyPolicy(wp<CameraService> service,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils)
+ : AttributionAndPermissionUtilsEncapsulator(attributionAndPermissionUtils),
+ mService(service),
+ mSensorPrivacyEnabled(false),
mCameraPrivacyState(SensorPrivacyManager::DISABLED), mRegistered(false) {}
void registerSelf();
@@ -1011,7 +1005,8 @@
int api1CameraId, const std::string& clientPackageNameMaybe, bool systemNativeClient,
const std::optional<std::string>& clientFeatureId, int clientUid, int clientPid,
apiLevel effectiveApiLevel, bool shimUpdateOnly, int scoreOffset, int targetSdkVersion,
- bool overrideToPortrait, bool forceSlowJpegMode, const std::string& originalCameraId,
+ int rotationOverride, bool forceSlowJpegMode,
+ const std::string& originalCameraId,
/*out*/sp<CLIENT>& device);
// Lock guarding camera service state
@@ -1033,6 +1028,10 @@
// Adds client logs during closed session to the file pointed by fd.
void dumpClosedSessionClientLogs(int fd, const std::string& cameraId);
+ binder::Status isSessionConfigurationWithParametersSupportedUnsafe(
+ const std::string& cameraId, const SessionConfiguration& sessionConfiguration,
+ bool overrideForPerfClass, /*out*/ bool* supported);
+
// Mapping from camera ID -> state for each device, map is protected by mCameraStatesLock
std::map<std::string, std::shared_ptr<CameraState>> mCameraStates;
@@ -1040,44 +1039,18 @@
mutable Mutex mCameraStatesLock;
/**
- * Mapping from packageName -> {cameraIdToReplace -> newCameraIdtoUse}.
+ * Resolve the (potentially remapped) camera id for the given input camera id and the given
+ * device id and device policy (for the device associated with the context of the caller).
*
- * This specifies that for packageName, for every binder operation targeting
- * cameraIdToReplace, use newCameraIdToUse instead.
+ * For any context associated with a virtual device with custom camera policy, this will return
+ * the actual camera id if inputCameraId corresponds to the mapped id of a virtual camera
+ * (for virtual devices with custom camera policy, the back and front virtual cameras of that
+ * device would have 0 and 1 respectively as their mapped camera id).
*/
- typedef std::map<std::string, std::map<std::string, std::string>> TCameraIdRemapping;
- TCameraIdRemapping mCameraIdRemapping{};
- /** Mutex guarding mCameraIdRemapping. */
- Mutex mCameraIdRemappingLock;
-
- /** Parses cameraIdRemapping parcelable into the native cameraIdRemappingMap. */
- binder::Status parseCameraIdRemapping(
- const hardware::CameraIdRemapping& cameraIdRemapping,
- /* out */ TCameraIdRemapping* cameraIdRemappingMap);
-
- /**
- * Resolve the (potentially remapped) camera Id to use for packageName.
- *
- * This returns the Camera Id to use in case inputCameraId was remapped to a
- * different Id for the given packageName. Otherwise, it returns the inputCameraId.
- *
- * If the packageName is not provided, it will be inferred from the clientUid.
- */
- std::string resolveCameraId(
+ std::optional<std::string> resolveCameraId(
const std::string& inputCameraId,
- int clientUid,
- const std::string& packageName = "");
-
- /**
- * Updates the state of mCameraIdRemapping, while disconnecting active clients as necessary.
- */
- void remapCameraIds(const TCameraIdRemapping& cameraIdRemapping);
-
- /**
- * Finds the Camera Ids that were remapped to the inputCameraId for the given client.
- */
- std::vector<std::string> findOriginalIdsForRemappedCameraId(
- const std::string& inputCameraId, int clientUid);
+ int32_t deviceId,
+ int32_t devicePolicy);
// Circular buffer for storing event logging for dumps
RingBuffer<std::string> mEventLog;
@@ -1133,13 +1106,13 @@
* Returns the underlying camera Id string mapped to a camera id int
* Empty string is returned when the cameraIdInt is invalid.
*/
- std::string cameraIdIntToStr(int cameraIdInt);
+ std::string cameraIdIntToStr(int cameraIdInt, int32_t deviceId, int32_t devicePolicy);
/**
* Returns the underlying camera Id string mapped to a camera id int
* Empty string is returned when the cameraIdInt is invalid.
*/
- std::string cameraIdIntToStrLocked(int cameraIdInt);
+ std::string cameraIdIntToStrLocked(int cameraIdInt, int32_t deviceId, int32_t devicePolicy);
/**
* Remove a single client corresponding to the given camera id from the list of active clients.
@@ -1339,6 +1312,8 @@
*
* This method must be idempotent.
* This method acquires mStatusLock and mStatusListenerLock.
+ * For any virtual camera, this method must pass its mapped camera id and device id to
+ * ICameraServiceListeners (using mVirtualDeviceCameraIdMapper).
*/
void updateStatus(StatusInternal status,
const std::string& cameraId,
@@ -1392,7 +1367,8 @@
// notify physical camera status when the physical camera is public.
// Expects mStatusListenerLock to be locked.
void notifyPhysicalCameraStatusLocked(int32_t status, const std::string& physicalCameraId,
- const std::list<std::string>& logicalCameraIds, SystemCameraKind deviceKind);
+ const std::list<std::string>& logicalCameraIds, SystemCameraKind deviceKind,
+ int32_t virtualDeviceId);
// get list of logical cameras which are backed by physicalCameraId
std::list<std::string> getLogicalCameras(const std::string& physicalCameraId);
@@ -1507,6 +1483,12 @@
// responsibility to acquire mLogLock before calling this functions.
bool isClientWatchedLocked(const BasicClient *client);
+ // Filters out fingerprintable keys if the calling process does not have CAMERA permission.
+ // Note: function caller should ensure that shouldRejectSystemCameraConnection is checked
+ // for the calling process before calling this function.
+ binder::Status filterSensitiveMetadataIfNeeded(const std::string& cameraId,
+ CameraMetadata* metadata);
+
/**
* Get the current system time as a formatted string.
*/
@@ -1518,7 +1500,7 @@
const std::string& cameraId, int api1CameraId, int facing, int sensorOrientation,
int clientPid, uid_t clientUid, int servicePid,
std::pair<int, IPCTransport> deviceVersionAndIPCTransport, apiLevel effectiveApiLevel,
- bool overrideForPerfClass, bool overrideToPortrait, bool forceSlowJpegMode,
+ bool overrideForPerfClass, int rotationOverride, bool forceSlowJpegMode,
const std::string& originalCameraId,
/*out*/ sp<BasicClient>* client);
@@ -1571,10 +1553,6 @@
// Current zoom override value
int32_t mZoomOverrideValue = -1;
- // Utility instance over IPermissionChecker.
- std::unique_ptr<permission::PermissionChecker> mPermissionChecker =
- std::make_unique<permission::PermissionChecker>();
-
/**
* A listener class that implements the IBinder::DeathRecipient interface
* for use to call back the error state injected by the external camera, and
@@ -1632,6 +1610,8 @@
int64_t mDeviceState;
void updateTorchUidMapLocked(const std::string& cameraId, int uid);
+
+ VirtualDeviceCameraIdMapper mVirtualDeviceCameraIdMapper;
};
} // namespace android
diff --git a/services/camera/libcameraservice/CameraServiceWatchdog.h b/services/camera/libcameraservice/CameraServiceWatchdog.h
index afc432d..165dece 100644
--- a/services/camera/libcameraservice/CameraServiceWatchdog.h
+++ b/services/camera/libcameraservice/CameraServiceWatchdog.h
@@ -26,7 +26,7 @@
* and single call monitoring differently. See function documentation for
* more details.
* To disable/enable:
- * - adb shell cmd media.camera set-cameraservice-watchdog [0/1]
+ * - adb shell cmd media.camera set-watchdog [0/1]
*/
#pragma once
#include <chrono>
diff --git a/services/camera/libcameraservice/TEST_MAPPING b/services/camera/libcameraservice/TEST_MAPPING
index ca6cc58..6257aee 100644
--- a/services/camera/libcameraservice/TEST_MAPPING
+++ b/services/camera/libcameraservice/TEST_MAPPING
@@ -4,6 +4,17 @@
"name": "cameraservice_test"
}
],
+ "postsubmit": [
+ {
+ "name": "CtsVirtualDevicesCameraTestCases",
+ "options": [
+ {
+ "exclude-annotation": "androidx.test.filters.FlakyTest"
+ }
+ ],
+ "keywords": ["primary-device"]
+ }
+ ],
"imports": [
{
"path": "frameworks/av/camera"
diff --git a/services/camera/libcameraservice/aidl/AidlCameraService.cpp b/services/camera/libcameraservice/aidl/AidlCameraService.cpp
index 79dbfed..2886942 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraService.cpp
+++ b/services/camera/libcameraservice/aidl/AidlCameraService.cpp
@@ -26,6 +26,7 @@
#include <android/binder_ibinder.h>
#include <android/binder_manager.h>
#include <binder/Status.h>
+#include <camera/CameraUtils.h>
#include <hidl/HidlTransportSupport.h>
#include <utils/Utils.h>
@@ -37,6 +38,7 @@
using ::android::hardware::cameraservice::utils::conversion::aidl::cloneToAidl;
using ::android::hardware::cameraservice::utils::conversion::aidl::convertToAidl;
using ::android::hardware::cameraservice::utils::conversion::aidl::filterVndkKeys;
+using hardware::BnCameraService::ROTATION_OVERRIDE_NONE;
using ::ndk::ScopedAStatus;
// VNDK classes
@@ -89,7 +91,9 @@
::android::CameraMetadata cameraMetadata;
UStatus ret = mCameraService->getCameraCharacteristics(in_cameraId,
mVndkVersion,
- /* overrideToPortrait= */ false,
+ ROTATION_OVERRIDE_NONE,
+ kDefaultDeviceId,
+ /* devicePolicy= */ 0,
&cameraMetadata);
if (!ret.isOk()) {
if (ret.exceptionCode() != EX_SERVICE_SPECIFIC) {
@@ -147,7 +151,9 @@
hardware::ICameraService::USE_CALLING_UID,
/* scoreOffset= */ 0,
/* targetSdkVersion= */ __ANDROID_API_FUTURE__,
- /* overrideToPortrait= */ false,
+ ROTATION_OVERRIDE_NONE,
+ kDefaultDeviceId,
+ /* devicePolicy= */ 0,
&unstableDevice);
if (!serviceRet.isOk()) {
ALOGE("%s: Unable to connect to camera device: %s", __FUNCTION__,
diff --git a/services/camera/libcameraservice/aidl/AidlCameraServiceListener.cpp b/services/camera/libcameraservice/aidl/AidlCameraServiceListener.cpp
index d7ab0d9..dc5c7f5 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraServiceListener.cpp
+++ b/services/camera/libcameraservice/aidl/AidlCameraServiceListener.cpp
@@ -18,6 +18,7 @@
#include <aidl/AidlUtils.h>
#include <aidl/android/frameworks/cameraservice/common/Status.h>
#include <aidl/android/frameworks/cameraservice/service/CameraStatusAndId.h>
+#include <camera/CameraUtils.h>
#include <camera/StringUtils.h>
namespace android::frameworks::cameraservice::service::implementation {
@@ -28,7 +29,10 @@
using SStatus = ::aidl::android::frameworks::cameraservice::common::Status;
binder::Status AidlCameraServiceListener::onStatusChanged(
- int32_t status, const std::string& cameraId) {
+ int32_t status, const std::string& cameraId, int32_t deviceId) {
+ if (deviceId != kDefaultDeviceId) {
+ return binder::Status::ok();
+ }
SCameraDeviceStatus sStatus = convertCameraStatusToAidl(status);
auto ret = mBase->onStatusChanged(sStatus, cameraId);
LOG_STATUS_ERROR_IF_NOT_OK(ret, "onStatusChanged")
@@ -37,7 +41,10 @@
binder::Status AidlCameraServiceListener::onPhysicalCameraStatusChanged(
int32_t status, const std::string& cameraId,
- const std::string& physicalCameraId) {
+ const std::string& physicalCameraId, int32_t deviceId) {
+ if (deviceId != kDefaultDeviceId) {
+ return binder::Status::ok();
+ }
SCameraDeviceStatus sStatus = convertCameraStatusToAidl(status);
auto ret = mBase->onPhysicalCameraStatusChanged(sStatus, cameraId, physicalCameraId);
@@ -46,20 +53,22 @@
}
::android::binder::Status AidlCameraServiceListener::onTorchStatusChanged(
- int32_t, const std::string&) {
+ [[maybe_unused]] int32_t, [[maybe_unused]] const std::string&, int32_t) {
// We don't implement onTorchStatusChanged
return binder::Status::ok();
}
::android::binder::Status AidlCameraServiceListener::onTorchStrengthLevelChanged(
- const std::string&, int32_t) {
+ [[maybe_unused]] const std::string&, [[maybe_unused]] int32_t, [[maybe_unused]] int32_t) {
// We don't implement onTorchStrengthLevelChanged
return binder::Status::ok();
}
+
status_t AidlCameraServiceListener::linkToDeath(const sp<DeathRecipient>& recipient, void* cookie,
uint32_t flags) {
return mDeathPipe.linkToDeath(recipient, cookie, flags);
}
+
status_t AidlCameraServiceListener::unlinkToDeath(const wp<DeathRecipient>& recipient, void* cookie,
uint32_t flags,
wp<DeathRecipient>* outRecipient) {
diff --git a/services/camera/libcameraservice/aidl/AidlCameraServiceListener.h b/services/camera/libcameraservice/aidl/AidlCameraServiceListener.h
index 6483fe1..a7c32e3 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraServiceListener.h
+++ b/services/camera/libcameraservice/aidl/AidlCameraServiceListener.h
@@ -45,25 +45,28 @@
~AidlCameraServiceListener() = default;
::android::binder::Status onStatusChanged(int32_t status,
- const std::string& cameraId) override;
+ const std::string& cameraId, int32_t deviceId) override;
::android::binder::Status onPhysicalCameraStatusChanged(int32_t status,
const std::string& cameraId,
- const std::string& physicalCameraId) override;
+ const std::string& physicalCameraId,
+ int32_t deviceId) override;
::android::binder::Status onTorchStatusChanged(
- int32_t status, const std::string& cameraId) override;
+ int32_t status, const std::string& cameraId, int32_t deviceId) override;
::android::binder::Status onTorchStrengthLevelChanged(
- const std::string& cameraId, int32_t newStrengthLevel) override;
+ const std::string& cameraId, int32_t newStrengthLevel, int32_t deviceId) override;
binder::Status onCameraAccessPrioritiesChanged() override {
// TODO: no implementation yet.
return binder::Status::ok();
}
- binder::Status onCameraOpened(const std::string& /*cameraId*/,
- const std::string& /*clientPackageId*/) override {
+ binder::Status onCameraOpened([[maybe_unused]] const std::string& /*cameraId*/,
+ [[maybe_unused]] const std::string& /*clientPackageId*/,
+ [[maybe_unused]] int32_t /*deviceId*/) override {
// empty implementation
return binder::Status::ok();
}
- binder::Status onCameraClosed(const std::string& /*cameraId*/) override {
+ binder::Status onCameraClosed([[maybe_unused]] const std::string& /*cameraId*/,
+ [[maybe_unused]] int32_t /*deviceId*/) override {
// empty implementation
return binder::Status::ok();
}
diff --git a/services/camera/libcameraservice/aidl/AidlUtils.cpp b/services/camera/libcameraservice/aidl/AidlUtils.cpp
index 14e5fad..d23566c 100644
--- a/services/camera/libcameraservice/aidl/AidlUtils.cpp
+++ b/services/camera/libcameraservice/aidl/AidlUtils.cpp
@@ -15,9 +15,11 @@
*/
#define LOG_TAG "AidlUtils"
+//#define LOG_NDEBUG 0
#include <aidl/AidlUtils.h>
#include <aidl/ExtensionMetadataTags.h>
+#include <aidl/SessionCharacteristicsTags.h>
#include <aidl/VndkVersionMetadataTags.h>
#include <aidlcommonsupport/NativeHandle.h>
#include <camera/StringUtils.h>
@@ -313,6 +315,7 @@
if (vndkVersion == __ANDROID_API_FUTURE__) {
// VNDK version derived from ro.board.api_level is a version code-name that
// corresponds to the current SDK version.
+ ALOGV("%s: VNDK version is API FUTURE, not filtering any keys", __FUNCTION__);
return OK;
}
const auto &apiLevelToKeys =
@@ -321,9 +324,14 @@
// versions above the given one, need to have their keys filtered from the
// metadata in order to avoid metadata invalidation.
auto it = apiLevelToKeys.upper_bound(vndkVersion);
+ ALOGV("%s: VNDK version for filtering is %d", __FUNCTION__ , vndkVersion);
while (it != apiLevelToKeys.end()) {
for (const auto &key : it->second) {
status_t res = metadata.erase(key);
+ // Should be okay to not use get_local_camera_metadata_tag_name
+ // since we're not filtering vendor tags
+ ALOGV("%s: Metadata key being filtered is %s", __FUNCTION__ ,
+ get_camera_metadata_tag_name(key));
if (res != OK) {
ALOGE("%s metadata key %d could not be erased", __FUNCTION__, key);
return res;
@@ -334,6 +342,49 @@
return OK;
}
+status_t copySessionCharacteristics(const CameraMetadata& from, CameraMetadata* to,
+ int queryVersion) {
+ // Ensure the vendor ID are the same before attempting
+ // anything else. If vendor IDs differ we cannot safely copy the characteristics.
+ if (from.getVendorId() != to->getVendorId()) {
+ ALOGE("%s: Incompatible CameraMetadata objects. Vendor IDs differ. From: %lu; To: %lu",
+ __FUNCTION__, from.getVendorId(), to->getVendorId());
+ return BAD_VALUE;
+ }
+
+ // Allow public tags according to the queryVersion
+ std::unordered_set<uint32_t> validPublicTags;
+ auto last = api_level_to_session_characteristic_keys.upper_bound(queryVersion);
+ for (auto it = api_level_to_session_characteristic_keys.begin(); it != last; it++) {
+ validPublicTags.insert(it->second.cbegin(), it->second.cend());
+ }
+
+ const camera_metadata_t* src = from.getAndLock();
+ camera_metadata_ro_entry_t entry{};
+ for (size_t i = 0; i < get_camera_metadata_entry_count(src); i++) {
+ int ret = get_camera_metadata_ro_entry(src, i, &entry);
+ if (ret != OK) {
+ ALOGE("%s: Could not fetch entry at index %lu. Error: %d", __FUNCTION__, i, ret);
+ from.unlock(src);
+ return BAD_VALUE;
+ }
+
+ if (entry.tag < (uint32_t)VENDOR_SECTION_START &&
+ validPublicTags.find(entry.tag) == validPublicTags.end()) {
+ ALOGI("%s: Session Characteristics contains tag %s but not supported by query version "
+ "(%d)",
+ __FUNCTION__, get_camera_metadata_tag_name(entry.tag), queryVersion);
+ continue;
+ }
+
+ // The entry is either a vendor tag, or a valid session characteristic key.
+ // Copy over the value
+ to->update(entry);
+ }
+ from.unlock(src);
+ return OK;
+}
+
bool areExtensionKeysSupported(const CameraMetadata& metadata) {
auto requestKeys = metadata.find(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS);
if (requestKeys.count == 0) {
diff --git a/services/camera/libcameraservice/aidl/AidlUtils.h b/services/camera/libcameraservice/aidl/AidlUtils.h
index 562aa70..92e878e 100644
--- a/services/camera/libcameraservice/aidl/AidlUtils.h
+++ b/services/camera/libcameraservice/aidl/AidlUtils.h
@@ -122,6 +122,9 @@
status_t filterVndkKeys(int vndkVersion, CameraMetadata &metadata, bool isStatic = true);
+status_t copySessionCharacteristics(const CameraMetadata& from, CameraMetadata* to,
+ int queryVersion);
+
bool areExtensionKeysSupported(const CameraMetadata& metadata);
status_t filterExtensionKeys(CameraMetadata* metadata /*out*/);
diff --git a/services/camera/libcameraservice/aidl/SessionCharacteristicsTags.h b/services/camera/libcameraservice/aidl/SessionCharacteristicsTags.h
new file mode 100644
index 0000000..cefb8a6
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/SessionCharacteristicsTags.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <map>
+#include <vector>
+#pragma once
+/**
+ * ! Do not edit this file directly !
+ *
+ * Generated automatically from session_characteristics_tags.mako. To be included in
+ * libcameraservice only by aidl/AidlUtils.cpp.
+ */
+
+/**
+ * Mapping of session characteristics to the INFO_SESSION_CONFIGURATION_QUERY_VERSION value
+ * at which they were introduced.
+ */
+std::map<int, std::vector<camera_metadata_tag>> api_level_to_session_characteristic_keys {
+ {35,
+ {
+ ANDROID_CONTROL_ZOOM_RATIO_RANGE,
+ ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
+ }},
+};
diff --git a/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h b/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
index 7965474..0e1db5c 100644
--- a/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
+++ b/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
@@ -77,12 +77,6 @@
{34, {
ANDROID_CONTROL_AUTOFRAMING_AVAILABLE,
ANDROID_CONTROL_AVAILABLE_SETTINGS_OVERRIDES,
- ANDROID_CONTROL_LOW_LIGHT_BOOST_INFO_LUMINANCE_RANGE,
- ANDROID_EFV_PADDING_ZOOM_FACTOR_RANGE,
- ANDROID_FLASH_SINGLE_STRENGTH_DEFAULT_LEVEL,
- ANDROID_FLASH_SINGLE_STRENGTH_MAX_LEVEL,
- ANDROID_FLASH_TORCH_STRENGTH_DEFAULT_LEVEL,
- ANDROID_FLASH_TORCH_STRENGTH_MAX_LEVEL,
ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS,
ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS,
@@ -91,6 +85,15 @@
ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP,
} },
+ {35, {
+ ANDROID_CONTROL_LOW_LIGHT_BOOST_INFO_LUMINANCE_RANGE,
+ ANDROID_EFV_PADDING_ZOOM_FACTOR_RANGE,
+ ANDROID_FLASH_SINGLE_STRENGTH_DEFAULT_LEVEL,
+ ANDROID_FLASH_SINGLE_STRENGTH_MAX_LEVEL,
+ ANDROID_FLASH_TORCH_STRENGTH_DEFAULT_LEVEL,
+ ANDROID_FLASH_TORCH_STRENGTH_MAX_LEVEL,
+ ANDROID_INFO_SESSION_CONFIGURATION_QUERY_VERSION,
+ } },
};
/**
@@ -110,9 +113,14 @@
{34, {
ANDROID_CONTROL_AUTOFRAMING,
ANDROID_CONTROL_AUTOFRAMING_STATE,
- ANDROID_CONTROL_LOW_LIGHT_BOOST_STATE,
ANDROID_CONTROL_SETTINGS_OVERRIDE,
ANDROID_CONTROL_SETTINGS_OVERRIDING_FRAME_NUMBER,
+ ANDROID_EXTENSION_CURRENT_TYPE,
+ ANDROID_EXTENSION_STRENGTH,
+ ANDROID_SCALER_RAW_CROP_REGION,
+ } },
+ {35, {
+ ANDROID_CONTROL_LOW_LIGHT_BOOST_STATE,
ANDROID_EFV_AUTO_ZOOM,
ANDROID_EFV_AUTO_ZOOM_PADDING_REGION,
ANDROID_EFV_MAX_PADDING_ZOOM_FACTOR,
@@ -122,11 +130,8 @@
ANDROID_EFV_STABILIZATION_MODE,
ANDROID_EFV_TARGET_COORDINATES,
ANDROID_EFV_TRANSLATE_VIEWPORT,
- ANDROID_EXTENSION_CURRENT_TYPE,
- ANDROID_EXTENSION_STRENGTH,
ANDROID_FLASH_STRENGTH_LEVEL,
ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_SENSOR_CROP_REGION,
- ANDROID_SCALER_RAW_CROP_REGION,
ANDROID_STATISTICS_LENS_INTRINSIC_SAMPLES,
ANDROID_STATISTICS_LENS_INTRINSIC_TIMESTAMPS,
} },
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index caa6424..61577e4 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -38,7 +38,6 @@
#include "api1/client2/CallbackProcessor.h"
#include "api1/client2/ZslProcessor.h"
#include "device3/RotateAndCropMapper.h"
-#include "utils/CameraThreadState.h"
#include "utils/CameraServiceProxyWrapper.h"
#define ALOG1(...) ALOGD_IF(gLogLevel >= 1, __VA_ARGS__);
@@ -56,6 +55,7 @@
Camera2Client::Camera2Client(const sp<CameraService>& cameraService,
const sp<hardware::ICameraClient>& cameraClient,
std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
const std::string& clientPackageName,
const std::optional<std::string>& clientFeatureId,
const std::string& cameraDeviceId,
@@ -66,12 +66,13 @@
uid_t clientUid,
int servicePid,
bool overrideForPerfClass,
- bool overrideToPortrait,
+ int rotationOverride,
bool forceSlowJpegMode):
- Camera2ClientBase(cameraService, cameraClient, cameraServiceProxyWrapper, clientPackageName,
+ Camera2ClientBase(cameraService, cameraClient, cameraServiceProxyWrapper,
+ attributionAndPermissionUtils, clientPackageName,
false/*systemNativeClient - since no ndk for api1*/, clientFeatureId,
cameraDeviceId, api1CameraId, cameraFacing, sensorOrientation, clientPid,
- clientUid, servicePid, overrideForPerfClass, overrideToPortrait,
+ clientUid, servicePid, overrideForPerfClass, rotationOverride,
/*legacyClient*/ true),
mParameters(api1CameraId, cameraFacing),
mLatestRequestIds(kMaxRequestIds),
@@ -444,7 +445,7 @@
binder::Status res = binder::Status::ok();
// Allow both client and the cameraserver to disconnect at all times
- int callingPid = CameraThreadState::getCallingPid();
+ int callingPid = getCallingPid();
if (callingPid != mClientPid && callingPid != mServicePid) return res;
if (mDevice == 0) return res;
@@ -513,14 +514,14 @@
ALOGV("%s: E", __FUNCTION__);
Mutex::Autolock icl(mBinderSerializationLock);
- if (mClientPid != 0 && CameraThreadState::getCallingPid() != mClientPid) {
+ if (mClientPid != 0 && getCallingPid() != mClientPid) {
ALOGE("%s: Camera %d: Connection attempt from pid %d; "
"current locked to pid %d", __FUNCTION__,
- mCameraId, CameraThreadState::getCallingPid(), mClientPid);
+ mCameraId, getCallingPid(), mClientPid);
return BAD_VALUE;
}
- mClientPid = CameraThreadState::getCallingPid();
+ mClientPid = getCallingPid();
mRemoteCallback = client;
mSharedCameraCallbacks = client;
@@ -533,16 +534,16 @@
ALOGV("%s: E", __FUNCTION__);
Mutex::Autolock icl(mBinderSerializationLock);
ALOGV("%s: Camera %d: Lock call from pid %d; current client pid %d",
- __FUNCTION__, mCameraId, CameraThreadState::getCallingPid(), mClientPid);
+ __FUNCTION__, mCameraId, getCallingPid(), mClientPid);
if (mClientPid == 0) {
- mClientPid = CameraThreadState::getCallingPid();
+ mClientPid = getCallingPid();
return OK;
}
- if (mClientPid != CameraThreadState::getCallingPid()) {
+ if (mClientPid != getCallingPid()) {
ALOGE("%s: Camera %d: Lock call from pid %d; currently locked to pid %d",
- __FUNCTION__, mCameraId, CameraThreadState::getCallingPid(), mClientPid);
+ __FUNCTION__, mCameraId, getCallingPid(), mClientPid);
return EBUSY;
}
@@ -554,9 +555,9 @@
ALOGV("%s: E", __FUNCTION__);
Mutex::Autolock icl(mBinderSerializationLock);
ALOGV("%s: Camera %d: Unlock call from pid %d; current client pid %d",
- __FUNCTION__, mCameraId, CameraThreadState::getCallingPid(), mClientPid);
+ __FUNCTION__, mCameraId, getCallingPid(), mClientPid);
- if (mClientPid == CameraThreadState::getCallingPid()) {
+ if (mClientPid == getCallingPid()) {
SharedParameters::Lock l(mParameters);
if (l.mParameters.state == Parameters::RECORD ||
l.mParameters.state == Parameters::VIDEO_SNAPSHOT) {
@@ -570,7 +571,7 @@
}
ALOGE("%s: Camera %d: Unlock call from pid %d; currently locked to pid %d",
- __FUNCTION__, mCameraId, CameraThreadState::getCallingPid(), mClientPid);
+ __FUNCTION__, mCameraId, getCallingPid(), mClientPid);
return EBUSY;
}
@@ -1460,6 +1461,11 @@
int triggerId;
{
SharedParameters::Lock l(mParameters);
+ if (l.mParameters.state == Parameters::DISCONNECTED) {
+ ALOGE("%s: Camera %d has been disconnected.", __FUNCTION__, mCameraId);
+ return INVALID_OPERATION;
+ }
+
// Canceling does nothing in FIXED or INFINITY modes
if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED ||
l.mParameters.focusMode == Parameters::FOCUS_MODE_INFINITY) {
@@ -1644,7 +1650,7 @@
ALOGV("%s: Camera %d", __FUNCTION__, mCameraId);
Mutex::Autolock icl(mBinderSerializationLock);
// The camera service can unconditionally get the parameters at all times
- if (CameraThreadState::getCallingPid() != mServicePid && checkPid(__FUNCTION__) != OK) return String8();
+ if (getCallingPid() != mServicePid && checkPid(__FUNCTION__) != OK) return String8();
SharedParameters::ReadLock l(mParameters);
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index 2cb7af0..a0c9f2d 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -103,6 +103,7 @@
Camera2Client(const sp<CameraService>& cameraService,
const sp<hardware::ICameraClient>& cameraClient,
std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
const std::string& clientPackageName,
const std::optional<std::string>& clientFeatureId,
const std::string& cameraDeviceId,
@@ -113,7 +114,7 @@
uid_t clientUid,
int servicePid,
bool overrideForPerfClass,
- bool overrideToPortrait,
+ int rotationOverride,
bool forceSlowJpegMode);
virtual ~Camera2Client();
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 508d487..2990099 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -20,7 +20,6 @@
#include <com_android_internal_camera_flags.h>
#include <cutils/properties.h>
-#include <utils/CameraThreadState.h>
#include <utils/Log.h>
#include <utils/SessionConfigurationUtils.h>
#include <utils/Trace.h>
@@ -61,6 +60,7 @@
CameraDeviceClientBase::CameraDeviceClientBase(
const sp<CameraService>& cameraService,
const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
const std::string& clientPackageName,
bool systemNativeClient,
const std::optional<std::string>& clientFeatureId,
@@ -71,9 +71,10 @@
int clientPid,
uid_t clientUid,
int servicePid,
- bool overrideToPortrait) :
+ int rotationOverride) :
BasicClient(cameraService,
IInterface::asBinder(remoteCallback),
+ attributionAndPermissionUtils,
clientPackageName,
systemNativeClient,
clientFeatureId,
@@ -83,7 +84,7 @@
clientPid,
clientUid,
servicePid,
- overrideToPortrait),
+ rotationOverride),
mRemoteCallback(remoteCallback) {
}
@@ -92,6 +93,7 @@
CameraDeviceClient::CameraDeviceClient(const sp<CameraService>& cameraService,
const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
const std::string& clientPackageName,
bool systemNativeClient,
const std::optional<std::string>& clientFeatureId,
@@ -102,12 +104,13 @@
uid_t clientUid,
int servicePid,
bool overrideForPerfClass,
- bool overrideToPortrait,
+ int rotationOverride,
const std::string& originalCameraId) :
- Camera2ClientBase(cameraService, remoteCallback, cameraServiceProxyWrapper, clientPackageName,
+ Camera2ClientBase(cameraService, remoteCallback, cameraServiceProxyWrapper,
+ attributionAndPermissionUtils, clientPackageName,
systemNativeClient, clientFeatureId, cameraId, /*API1 camera ID*/ -1, cameraFacing,
sensorOrientation, clientPid, clientUid, servicePid, overrideForPerfClass,
- overrideToPortrait),
+ rotationOverride),
mInputStream(),
mStreamingRequestId(REQUEST_ID_NONE),
mRequestIdCounter(0),
@@ -534,27 +537,28 @@
// Save certain CaptureRequest settings
if (!request.mUserTag.empty()) {
- mUserTag = request.mUserTag;
+ mRunningSessionStats.mUserTag = request.mUserTag;
}
camera_metadata_entry entry =
physicalSettingsList.begin()->metadata.find(
ANDROID_CONTROL_VIDEO_STABILIZATION_MODE);
if (entry.count == 1) {
- mVideoStabilizationMode = entry.data.u8[0];
+ mRunningSessionStats.mVideoStabilizationMode = entry.data.u8[0];
}
- if (flags::log_ultrawide_usage()) {
+
+ if (!mRunningSessionStats.mUsedUltraWide && flags::log_ultrawide_usage()) {
entry = physicalSettingsList.begin()->metadata.find(
ANDROID_CONTROL_ZOOM_RATIO);
if (entry.count == 1 && entry.data.f[0] < 1.0f ) {
- mUsedUltraWide = true;
+ mRunningSessionStats.mUsedUltraWide = true;
}
}
- if (!mUsedSettingsOverrideZoom && flags::log_zoom_override_usage()) {
+ if (!mRunningSessionStats.mUsedSettingsOverrideZoom && flags::log_zoom_override_usage()) {
entry = physicalSettingsList.begin()->metadata.find(
ANDROID_CONTROL_SETTINGS_OVERRIDE);
if (entry.count == 1 && entry.data.i32[0] ==
ANDROID_CONTROL_SETTINGS_OVERRIDE_ZOOM) {
- mUsedSettingsOverrideZoom = true;
+ mRunningSessionStats.mUsedSettingsOverrideZoom = true;
}
}
}
@@ -888,6 +892,11 @@
Mutex::Autolock icl(mBinderSerializationLock);
+ if (!outputConfiguration.isComplete()) {
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+ "OutputConfiguration isn't valid!");
+ }
+
const std::vector<sp<IGraphicBufferProducer>>& bufferProducers =
outputConfiguration.getGraphicBufferProducers();
size_t numBufferProducers = bufferProducers.size();
@@ -904,7 +913,7 @@
bool useReadoutTimestamp = outputConfiguration.useReadoutTimestamp();
res = SessionConfigurationUtils::checkSurfaceType(numBufferProducers, deferredConsumer,
- outputConfiguration.getSurfaceType());
+ outputConfiguration.getSurfaceType(), /*isConfigurationComplete*/true);
if (!res.isOk()) {
return res;
}
@@ -947,7 +956,7 @@
res = SessionConfigurationUtils::createSurfaceFromGbp(streamInfo,
isStreamInfoValid, surface, bufferProducer, mCameraIdStr,
mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode, colorSpace);
+ streamUseCase, timestampBase, mirrorMode, colorSpace, /*respectSurfaceSize*/false);
if (!res.isOk())
return res;
@@ -1060,6 +1069,10 @@
if (!mDevice.get()) {
return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
}
+ if (!outputConfiguration.isComplete()) {
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+ "OutputConfiguration isn't valid!");
+ }
// Infer the surface info for deferred surface stream creation.
width = outputConfiguration.getWidth();
@@ -1252,6 +1265,10 @@
if (!mDevice.get()) {
return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
}
+ if (!outputConfiguration.isComplete()) {
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+ "OutputConfiguration isn't valid!");
+ }
const std::vector<sp<IGraphicBufferProducer> >& bufferProducers =
outputConfiguration.getGraphicBufferProducers();
@@ -1319,7 +1336,7 @@
res = SessionConfigurationUtils::createSurfaceFromGbp(outInfo,
/*isStreamInfoValid*/ false, surface, newOutputsMap.valueAt(i), mCameraIdStr,
mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode, colorSpace);
+ streamUseCase, timestampBase, mirrorMode, colorSpace, /*respectSurfaceSize*/false);
if (!res.isOk())
return res;
@@ -1632,6 +1649,11 @@
Mutex::Autolock icl(mBinderSerializationLock);
+ if (!outputConfiguration.isComplete()) {
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+ "OutputConfiguration isn't valid!");
+ }
+
const std::vector<sp<IGraphicBufferProducer> >& bufferProducers =
outputConfiguration.getGraphicBufferProducers();
const std::string &physicalId = outputConfiguration.getPhysicalCameraId();
@@ -1697,7 +1719,7 @@
res = SessionConfigurationUtils::createSurfaceFromGbp(mStreamInfoMap[streamId],
true /*isStreamInfoValid*/, surface, bufferProducer, mCameraIdStr,
mDevice->infoPhysical(physicalId), sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode, colorSpace);
+ streamUseCase, timestampBase, mirrorMode, colorSpace, /*respectSurfaceSize*/false);
if (!res.isOk())
return res;
@@ -1900,9 +1922,9 @@
sp<CameraOfflineSessionClient> offlineClient;
if (offlineSession.get() != nullptr) {
offlineClient = new CameraOfflineSessionClient(sCameraService,
- offlineSession, offlineCompositeStreamMap, cameraCb, mClientPackageName,
- mClientFeatureId, mCameraIdStr, mCameraFacing, mOrientation, mClientPid, mClientUid,
- mServicePid);
+ offlineSession, offlineCompositeStreamMap, cameraCb, mAttributionAndPermissionUtils,
+ mClientPackageName, mClientFeatureId, mCameraIdStr, mCameraFacing, mOrientation,
+ mClientPid, mClientUid, mServicePid);
ret = sCameraService->addOfflineClient(mCameraIdStr, offlineClient);
}
@@ -2043,6 +2065,7 @@
void CameraDeviceClient::notifyIdle(
int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+ std::pair<int32_t, int32_t> mostRequestedFpsRange,
const std::vector<hardware::CameraStreamStats>& streamStats) {
// Thread safe. Don't bother locking.
sp<hardware::camera2::ICameraDeviceCallbacks> remoteCb = getRemoteCallback();
@@ -2063,8 +2086,12 @@
}
}
Camera2ClientBase::notifyIdleWithUserTag(requestCount, resultErrorCount, deviceError,
- fullStreamStats, mUserTag, mVideoStabilizationMode, mUsedUltraWide,
- mUsedSettingsOverrideZoom);
+ mostRequestedFpsRange,
+ fullStreamStats,
+ mRunningSessionStats.mUserTag,
+ mRunningSessionStats.mVideoStabilizationMode,
+ mRunningSessionStats.mUsedUltraWide,
+ mRunningSessionStats.mUsedSettingsOverrideZoom);
}
void CameraDeviceClient::notifyShutter(const CaptureResultExtras& resultExtras,
@@ -2182,7 +2209,7 @@
// TODO: move to Camera2ClientBase
bool CameraDeviceClient::enforceRequestPermissions(CameraMetadata& metadata) {
- const int pid = CameraThreadState::getCallingPid();
+ const int pid = getCallingPid();
const int selfPid = getpid();
camera_metadata_entry_t entry;
@@ -2221,7 +2248,7 @@
String16 permissionString =
toString16("android.permission.CAMERA_DISABLE_TRANSMIT_LED");
if (!checkCallingPermission(permissionString)) {
- const int uid = CameraThreadState::getCallingUid();
+ const int uid = getCallingUid();
ALOGE("Permission Denial: "
"can't disable transmit LED pid=%d, uid=%d", pid, uid);
return false;
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index b2c9626..42f2752 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -50,6 +50,7 @@
protected:
CameraDeviceClientBase(const sp<CameraService>& cameraService,
const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
const std::string& clientPackageName,
bool systemNativeClient,
const std::optional<std::string>& clientFeatureId,
@@ -60,7 +61,7 @@
int clientPid,
uid_t clientUid,
int servicePid,
- bool overrideToPortrait);
+ int rotationOverride);
sp<hardware::camera2::ICameraDeviceCallbacks> mRemoteCallback;
};
@@ -181,6 +182,7 @@
CameraDeviceClient(const sp<CameraService>& cameraService,
const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
const std::string& clientPackageName,
bool clientPackageOverride,
const std::optional<std::string>& clientFeatureId,
@@ -191,7 +193,7 @@
uid_t clientUid,
int servicePid,
bool overrideForPerfClass,
- bool overrideToPortrait,
+ int rotationOverride,
const std::string& originalCameraId);
virtual ~CameraDeviceClient();
@@ -227,6 +229,7 @@
*/
virtual void notifyIdle(int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+ std::pair<int32_t, int32_t> mostRequestedFpsRange,
const std::vector<hardware::CameraStreamStats>& streamStats);
virtual void notifyError(int32_t errorCode,
const CaptureResultExtras& resultExtras);
@@ -362,14 +365,17 @@
// Override the camera characteristics for performance class primary cameras.
bool mOverrideForPerfClass;
- // The string representation of object passed into CaptureRequest.setTag.
- std::string mUserTag;
- // The last set video stabilization mode
- int mVideoStabilizationMode = -1;
- // Whether a zoom_ratio < 1.0 has been used during this session
- bool mUsedUltraWide = false;
- // Whether a zoom settings override has been used during this session
- bool mUsedSettingsOverrideZoom = false;
+ // Various fields used to collect session statistics
+ struct RunningSessionStats {
+ // The string representation of object passed into CaptureRequest.setTag.
+ std::string mUserTag;
+ // The last set video stabilization mode
+ int mVideoStabilizationMode = -1;
+ // Whether a zoom_ratio < 1.0 has been used during this session
+ bool mUsedUltraWide = false;
+ // Whether a zoom settings override has been used during this session
+ bool mUsedSettingsOverrideZoom = false;
+ } mRunningSessionStats;
// This only exists in case of camera ID Remapping.
const std::string mOriginalCameraId;
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
index dc9e0c1..9a1fdd6 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
@@ -19,7 +19,6 @@
//#define LOG_NDEBUG 0
#include "CameraOfflineSessionClient.h"
-#include "utils/CameraThreadState.h"
#include <utils/Trace.h>
#include <camera/StringUtils.h>
@@ -163,7 +162,7 @@
return res;
}
// Allow both client and the media server to disconnect at all times
- int callingPid = CameraThreadState::getCallingPid();
+ int callingPid = getCallingPid();
if (callingPid != mClientPid &&
callingPid != mServicePid) {
return res;
@@ -326,6 +325,7 @@
void CameraOfflineSessionClient::notifyIdle(
int64_t /*requestCount*/, int64_t /*resultErrorCount*/, bool /*deviceError*/,
+ std::pair<int32_t, int32_t> /*mostRequestedFpsRange*/,
const std::vector<hardware::CameraStreamStats>& /*streamStats*/) {
if (mRemoteCallback.get() != nullptr) {
mRemoteCallback->onDeviceIdle();
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
index 804498f..77de874 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
@@ -17,6 +17,7 @@
#ifndef ANDROID_SERVERS_CAMERA_PHOTOGRAPHY_CAMERAOFFLINESESSIONCLIENT_H
#define ANDROID_SERVERS_CAMERA_PHOTOGRAPHY_CAMERAOFFLINESESSIONCLIENT_H
+#include <android/hardware/ICameraService.h>
#include <android/hardware/camera2/BnCameraOfflineSession.h>
#include <android/hardware/camera2/ICameraDeviceCallbacks.h>
#include "common/FrameProcessorBase.h"
@@ -47,6 +48,7 @@
sp<CameraOfflineSessionBase> session,
const KeyedVector<sp<IBinder>, sp<CompositeStream>>& offlineCompositeStreamMap,
const sp<ICameraDeviceCallbacks>& remoteCallback,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
const std::string& clientPackageName,
const std::optional<std::string>& clientFeatureId,
const std::string& cameraIdStr, int cameraFacing, int sensorOrientation,
@@ -54,10 +56,11 @@
CameraService::BasicClient(
cameraService,
IInterface::asBinder(remoteCallback),
+ attributionAndPermissionUtils,
// (v)ndk doesn't have offline session support
clientPackageName, /*overridePackageName*/false, clientFeatureId,
cameraIdStr, cameraFacing, sensorOrientation, clientPid, clientUid, servicePid,
- /*overrideToPortrait*/false),
+ hardware::ICameraService::ROTATION_OVERRIDE_NONE),
mRemoteCallback(remoteCallback), mOfflineSession(session),
mCompositeStreamMap(offlineCompositeStreamMap) {}
@@ -110,6 +113,7 @@
void notifyShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp) override;
status_t notifyActive(float maxPreviewFps) override;
void notifyIdle(int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+ std::pair<int32_t, int32_t> mostRequestedFpsRange,
const std::vector<hardware::CameraStreamStats>& streamStats) override;
void notifyAutoFocus(uint8_t newState, int triggerId) override;
void notifyAutoExposure(uint8_t newState, int triggerId) override;
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
index 988446b..a1b9383 100644
--- a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
@@ -856,8 +856,9 @@
bool deviceError;
std::map<int, StreamStats> stats;
+ std::pair<int32_t, int32_t> mostRequestedFps;
mSessionStatsBuilder.buildAndReset(&streamStats->mRequestCount, &streamStats->mErrorCount,
- &deviceError, &stats);
+ &deviceError, &mostRequestedFps, &stats);
if (stats.find(mP010StreamId) != stats.end()) {
streamStats->mWidth = mBlobWidth;
streamStats->mHeight = mBlobHeight;
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index a126f61..352c6f8 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -27,8 +27,10 @@
#include <gui/Surface.h>
#include <gui/Surface.h>
+#include <android/hardware/ICameraService.h>
#include <camera/CameraSessionStats.h>
#include <camera/StringUtils.h>
+#include <com_android_window_flags.h>
#include "common/Camera2ClientBase.h"
@@ -37,12 +39,13 @@
#include "device3/Camera3Device.h"
#include "device3/aidl/AidlCamera3Device.h"
#include "device3/hidl/HidlCamera3Device.h"
-#include "utils/CameraThreadState.h"
namespace android {
using namespace camera2;
+namespace wm_flags = com::android::window::flags;
+
// Interface used by CameraService
template <typename TClientBase>
@@ -50,6 +53,7 @@
const sp<CameraService>& cameraService,
const sp<TCamCallbacks>& remoteCallback,
std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
const std::string& clientPackageName,
bool systemNativeClient,
const std::optional<std::string>& clientFeatureId,
@@ -61,11 +65,11 @@
uid_t clientUid,
int servicePid,
bool overrideForPerfClass,
- bool overrideToPortrait,
+ int rotationOverride,
bool legacyClient):
- TClientBase(cameraService, remoteCallback, clientPackageName, systemNativeClient,
- clientFeatureId, cameraId, api1CameraId, cameraFacing, sensorOrientation, clientPid,
- clientUid, servicePid, overrideToPortrait),
+ TClientBase(cameraService, remoteCallback, attributionAndPermissionUtils, clientPackageName,
+ systemNativeClient, clientFeatureId, cameraId, api1CameraId, cameraFacing,
+ sensorOrientation, clientPid, clientUid, servicePid, rotationOverride),
mSharedCameraCallbacks(remoteCallback),
mCameraServiceProxyWrapper(cameraServiceProxyWrapper),
mDeviceActive(false), mApi1CameraId(api1CameraId)
@@ -82,7 +86,7 @@
status_t Camera2ClientBase<TClientBase>::checkPid(const char* checkLocation)
const {
- int callingPid = CameraThreadState::getCallingPid();
+ int callingPid = TClientBase::getCallingPid();
if (callingPid == TClientBase::mClientPid) return NO_ERROR;
ALOGE("%s: attempt to use a locked camera from a different process"
@@ -115,14 +119,16 @@
case IPCTransport::HIDL:
mDevice =
new HidlCamera3Device(mCameraServiceProxyWrapper,
+ TClientBase::mAttributionAndPermissionUtils,
TClientBase::mCameraIdStr, mOverrideForPerfClass,
- TClientBase::mOverrideToPortrait, mLegacyClient);
+ TClientBase::mRotationOverride, mLegacyClient);
break;
case IPCTransport::AIDL:
mDevice =
new AidlCamera3Device(mCameraServiceProxyWrapper,
+ TClientBase::mAttributionAndPermissionUtils,
TClientBase::mCameraIdStr, mOverrideForPerfClass,
- TClientBase::mOverrideToPortrait, mLegacyClient);
+ TClientBase::mRotationOverride, mLegacyClient);
break;
default:
ALOGE("%s Invalid transport for camera id %s", __FUNCTION__,
@@ -135,16 +141,17 @@
return NO_INIT;
}
+ // Verify ops permissions
+ res = TClientBase::startCameraOps();
+ if (res != OK) {
+ TClientBase::finishCameraOps();
+ return res;
+ }
+
res = mDevice->initialize(providerPtr, monitorTags);
if (res != OK) {
ALOGE("%s: Camera %s: unable to initialize device: %s (%d)",
__FUNCTION__, TClientBase::mCameraIdStr.c_str(), strerror(-res), res);
- return res;
- }
-
- // Verify ops permissions
- res = TClientBase::startCameraOps();
- if (res != OK) {
TClientBase::finishCameraOps();
return res;
}
@@ -266,7 +273,7 @@
ALOGD("Camera %s: serializationLock acquired", TClientBase::mCameraIdStr.c_str());
binder::Status res = binder::Status::ok();
// Allow both client and the media server to disconnect at all times
- int callingPid = CameraThreadState::getCallingPid();
+ int callingPid = TClientBase::getCallingPid();
if (callingPid != TClientBase::mClientPid &&
callingPid != TClientBase::mServicePid) return res;
@@ -305,18 +312,18 @@
Mutex::Autolock icl(mBinderSerializationLock);
if (TClientBase::mClientPid != 0 &&
- CameraThreadState::getCallingPid() != TClientBase::mClientPid) {
+ TClientBase::getCallingPid() != TClientBase::mClientPid) {
ALOGE("%s: Camera %s: Connection attempt from pid %d; "
"current locked to pid %d",
__FUNCTION__,
TClientBase::mCameraIdStr.c_str(),
- CameraThreadState::getCallingPid(),
+ TClientBase::getCallingPid(),
TClientBase::mClientPid);
return BAD_VALUE;
}
- TClientBase::mClientPid = CameraThreadState::getCallingPid();
+ TClientBase::mClientPid = TClientBase::getCallingPid();
TClientBase::mRemoteCallback = client;
mSharedCameraCallbacks = client;
@@ -336,8 +343,9 @@
template <typename TClientBase>
void Camera2ClientBase<TClientBase>::notifyPhysicalCameraChange(const std::string &physicalId) {
- // We're only interested in this notification if overrideToPortrait is turned on.
- if (!TClientBase::mOverrideToPortrait) {
+ using android::hardware::ICameraService;
+ // We're only interested in this notification if rotationOverride is turned on.
+ if (TClientBase::mRotationOverride == ICameraService::ROTATION_OVERRIDE_NONE) {
return;
}
@@ -347,8 +355,13 @@
if (orientationEntry.count == 1) {
int orientation = orientationEntry.data.i32[0];
int rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_NONE;
-
- if (orientation == 0 || orientation == 180) {
+ bool landscapeSensor = (orientation == 0 || orientation == 180);
+ if (((TClientBase::mRotationOverride ==
+ ICameraService::ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT) && landscapeSensor) ||
+ ((wm_flags::camera_compat_for_freeform() &&
+ TClientBase::mRotationOverride ==
+ ICameraService::ROTATION_OVERRIDE_ROTATION_ONLY)
+ && !landscapeSensor)) {
rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_90;
}
@@ -377,6 +390,7 @@
template <typename TClientBase>
void Camera2ClientBase<TClientBase>::notifyIdleWithUserTag(
int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+ std::pair<int32_t, int32_t> mostRequestedFpsRange,
const std::vector<hardware::CameraStreamStats>& streamStats,
const std::string& userTag, int videoStabilizationMode, bool usedUltraWide,
bool usedZoomOverride) {
@@ -388,7 +402,7 @@
}
mCameraServiceProxyWrapper->logIdle(TClientBase::mCameraIdStr,
requestCount, resultErrorCount, deviceError, userTag, videoStabilizationMode,
- usedUltraWide, usedZoomOverride, streamStats);
+ usedUltraWide, usedZoomOverride, mostRequestedFpsRange, streamStats);
}
mDeviceActive = false;
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index 2bb90d9..c9d5735 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -21,6 +21,7 @@
#include "camera/CameraMetadata.h"
#include "camera/CaptureResult.h"
#include "utils/CameraServiceProxyWrapper.h"
+#include "utils/AttributionAndPermissionUtils.h"
#include "CameraServiceWatchdog.h"
namespace android {
@@ -51,6 +52,7 @@
Camera2ClientBase(const sp<CameraService>& cameraService,
const sp<TCamCallbacks>& remoteCallback,
std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
const std::string& clientPackageName,
bool systemNativeClient,
const std::optional<std::string>& clientFeatureId,
@@ -62,7 +64,7 @@
uid_t clientUid,
int servicePid,
bool overrideForPerfClass,
- bool overrideToPortrait,
+ int rotationOverride,
bool legacyClient = false);
virtual ~Camera2ClientBase();
@@ -84,6 +86,7 @@
virtual status_t notifyActive(float maxPreviewFps);
virtual void notifyIdle(int64_t /*requestCount*/, int64_t /*resultErrorCount*/,
bool /*deviceError*/,
+ std::pair<int32_t, int32_t> /*mostRequestedFpsRange*/,
const std::vector<hardware::CameraStreamStats>&) {}
virtual void notifyShutter(const CaptureResultExtras& resultExtras,
nsecs_t timestamp);
@@ -97,6 +100,7 @@
void notifyIdleWithUserTag(int64_t requestCount, int64_t resultErrorCount,
bool deviceError,
+ std::pair<int32_t, int32_t> mostRequestedFpsRange,
const std::vector<hardware::CameraStreamStats>& streamStats,
const std::string& userTag, int videoStabilizationMode,
bool usedUltraWide, bool usedZoomOverride);
diff --git a/services/camera/libcameraservice/common/CameraOfflineSessionBase.h b/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
index 976c47c..b1ba761 100644
--- a/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
+++ b/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
@@ -46,6 +46,7 @@
// May return an error since it checks appops
virtual status_t notifyActive(float maxPreviewFps) = 0;
virtual void notifyIdle(int64_t requestCount, int64_t resultError, bool deviceError,
+ std::pair<int32_t, int32_t> mostRequestedFpsRange,
const std::vector<hardware::CameraStreamStats>& streamStats) = 0;
// Required only for API2
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 15e2755..6416c11 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -38,6 +38,7 @@
#include <android/hidl/manager/1.2/IServiceManager.h>
#include <hidl/ServiceManagement.h>
#include <com_android_internal_camera_flags.h>
+#include <com_android_window_flags.h>
#include <functional>
#include <camera_metadata_hidden.h>
#include <android-base/parseint.h>
@@ -62,6 +63,7 @@
namespace flags = com::android::internal::camera::flags;
namespace vd_flags = android::companion::virtualdevice::flags;
+namespace wm_flags = com::android::window::flags;
namespace {
const bool kEnableLazyHal(property_get_bool("ro.camera.enableLazyHal", false));
@@ -403,13 +405,14 @@
}
status_t CameraProviderManager::getCameraInfo(const std::string &id,
- bool overrideToPortrait, int *portraitRotation, hardware::CameraInfo* info) const {
+ int rotationOverride, int *portraitRotation,
+ hardware::CameraInfo* info) const {
std::lock_guard<std::mutex> lock(mInterfaceMutex);
auto deviceInfo = findDeviceInfoLocked(id);
if (deviceInfo == nullptr) return NAME_NOT_FOUND;
- return deviceInfo->getCameraInfo(overrideToPortrait, portraitRotation, info);
+ return deviceInfo->getCameraInfo(rotationOverride, portraitRotation, info);
}
status_t CameraProviderManager::isSessionConfigurationSupported(const std::string& id,
@@ -421,8 +424,16 @@
return NAME_NOT_FOUND;
}
+ metadataGetter getMetadata = [this](const std::string &id,
+ bool overrideForPerfClass) {
+ CameraMetadata metadata;
+ this->getCameraCharacteristicsLocked(id, overrideForPerfClass,
+ &metadata,
+ hardware::ICameraService::ROTATION_OVERRIDE_NONE);
+ return metadata;
+ };
return deviceInfo->isSessionConfigurationSupported(configuration,
- overrideForPerfClass, checkSessionParams, status);
+ overrideForPerfClass, getMetadata, checkSessionParams, status);
}
status_t CameraProviderManager::createDefaultRequest(const std::string& cameraId,
@@ -439,9 +450,8 @@
return NAME_NOT_FOUND;
}
- camera_metadata_t *rawRequest;
status_t res = deviceInfo->createDefaultRequest(templateId,
- &rawRequest);
+ metadata);
if (res == BAD_VALUE) {
ALOGI("%s: template %d is not supported on this camera device",
@@ -453,15 +463,12 @@
return res;
}
- set_camera_metadata_vendor_id(rawRequest, deviceInfo->mProviderTagid);
- metadata->acquire(rawRequest);
-
return OK;
}
status_t CameraProviderManager::getSessionCharacteristics(
const std::string& id, const SessionConfiguration& configuration, bool overrideForPerfClass,
- bool overrideToPortrait, CameraMetadata* sessionCharacteristics /*out*/) const {
+ int rotationOverride, CameraMetadata* sessionCharacteristics /*out*/) const {
if (!flags::feature_combination_query()) {
return INVALID_OPERATION;
}
@@ -472,11 +479,11 @@
return NAME_NOT_FOUND;
}
- metadataGetter getMetadata = [this, overrideToPortrait](const std::string& id,
+ metadataGetter getMetadata = [this, rotationOverride](const std::string& id,
bool overrideForPerfClass) {
CameraMetadata metadata;
status_t ret = this->getCameraCharacteristicsLocked(id, overrideForPerfClass, &metadata,
- overrideToPortrait);
+ rotationOverride);
if (ret != OK) {
ALOGE("%s: Could not get CameraCharacteristics for device %s", __FUNCTION__,
id.c_str());
@@ -505,10 +512,10 @@
status_t CameraProviderManager::getCameraCharacteristics(const std::string &id,
bool overrideForPerfClass, CameraMetadata* characteristics,
- bool overrideToPortrait) const {
+ int rotationOverride) const {
std::lock_guard<std::mutex> lock(mInterfaceMutex);
return getCameraCharacteristicsLocked(id, overrideForPerfClass, characteristics,
- overrideToPortrait);
+ rotationOverride);
}
status_t CameraProviderManager::getHighestSupportedVersion(const std::string &id,
@@ -1091,20 +1098,6 @@
}
}
-CameraMetadata CameraProviderManager::ProviderInfo::DeviceInfo3::deviceInfo(
- const std::string &id) {
- if (id.empty()) {
- return mCameraCharacteristics;
- } else {
- if (mPhysicalCameraCharacteristics.find(id) != mPhysicalCameraCharacteristics.end()) {
- return mPhysicalCameraCharacteristics.at(id);
- } else {
- ALOGE("%s: Invalid physical camera id %s", __FUNCTION__, id.c_str());
- return mCameraCharacteristics;
- }
- }
-}
-
SystemCameraKind CameraProviderManager::ProviderInfo::DeviceInfo3::getSystemCameraKind() {
camera_metadata_entry_t entryCap;
entryCap = mCameraCharacteristics.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
@@ -1862,7 +1855,7 @@
auto& c = mCameraCharacteristics;
status_t res = c.update(ANDROID_INFO_SESSION_CONFIGURATION_QUERY_VERSION, &versionCode, 1);
-
+ mSessionConfigQueryVersion = versionCode;
return res;
}
@@ -2482,8 +2475,9 @@
device->hasFlashUnit() ? "true" : "false");
hardware::CameraInfo info;
int portraitRotation;
- status_t res = device->getCameraInfo(/*overrideToPortrait*/false, &portraitRotation,
- &info);
+ status_t res = device->getCameraInfo(
+ /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+ &portraitRotation, &info);
if (res != OK) {
dprintf(fd, " <Error reading camera info: %s (%d)>\n",
strerror(-res), res);
@@ -2494,7 +2488,7 @@
}
CameraMetadata info2;
res = device->getCameraCharacteristics(true /*overrideForPerfClass*/, &info2,
- /*overrideToPortrait*/false);
+ hardware::ICameraService::ROTATION_OVERRIDE_NONE);
if (res == INVALID_OPERATION) {
dprintf(fd, " API2 not directly supported\n");
} else if (res != OK) {
@@ -2772,10 +2766,15 @@
}
status_t CameraProviderManager::ProviderInfo::DeviceInfo3::getCameraInfo(
- bool overrideToPortrait, int *portraitRotation,
+ int rotationOverride, int *portraitRotation,
hardware::CameraInfo *info) const {
if (info == nullptr) return BAD_VALUE;
+ bool freeform_compat_enabled = wm_flags::camera_compat_for_freeform();
+ if (!freeform_compat_enabled &&
+ rotationOverride > hardware::ICameraService::ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT) {
+ ALOGW("Camera compat freeform flag disabled but rotation override is %d", rotationOverride);
+ }
camera_metadata_ro_entry facing =
mCameraCharacteristics.find(ANDROID_LENS_FACING);
if (facing.count == 1) {
@@ -2803,13 +2802,18 @@
return NAME_NOT_FOUND;
}
- if (overrideToPortrait && (info->orientation == 0 || info->orientation == 180)) {
+ if (rotationOverride == hardware::ICameraService::ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT
+ && (info->orientation == 0 || info->orientation == 180)) {
*portraitRotation = 90;
if (info->facing == hardware::CAMERA_FACING_FRONT) {
info->orientation = (360 + info->orientation - 90) % 360;
} else {
info->orientation = (360 + info->orientation + 90) % 360;
}
+ } else if (freeform_compat_enabled &&
+ rotationOverride == hardware::ICameraService::ROTATION_OVERRIDE_ROTATION_ONLY
+ && (info->orientation == 90 || info->orientation == 270)) {
+ *portraitRotation = info->facing == hardware::CAMERA_FACING_BACK ? 90 : 270;
} else {
*portraitRotation = 0;
}
@@ -2839,7 +2843,8 @@
}
status_t CameraProviderManager::ProviderInfo::DeviceInfo3::getCameraCharacteristics(
- bool overrideForPerfClass, CameraMetadata *characteristics, bool overrideToPortrait) {
+ bool overrideForPerfClass, CameraMetadata *characteristics,
+ int rotationOverride) {
if (characteristics == nullptr) return BAD_VALUE;
if (!overrideForPerfClass && mCameraCharNoPCOverride != nullptr) {
@@ -2848,7 +2853,7 @@
*characteristics = mCameraCharacteristics;
}
- if (overrideToPortrait) {
+ if (rotationOverride == hardware::ICameraService::ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT) {
const auto &lensFacingEntry = characteristics->find(ANDROID_LENS_FACING);
const auto &sensorOrientationEntry = characteristics->find(ANDROID_SENSOR_ORIENTATION);
uint8_t lensFacing = lensFacingEntry.data.u8[0];
@@ -3209,11 +3214,11 @@
status_t CameraProviderManager::getCameraCharacteristicsLocked(const std::string &id,
bool overrideForPerfClass, CameraMetadata* characteristics,
- bool overrideToPortrait) const {
+ int rotationOverride) const {
auto deviceInfo = findDeviceInfoLocked(id);
if (deviceInfo != nullptr) {
return deviceInfo->getCameraCharacteristics(overrideForPerfClass, characteristics,
- overrideToPortrait);
+ rotationOverride);
}
// Find hidden physical camera characteristics
@@ -3249,8 +3254,9 @@
hardware::CameraInfo info;
int portraitRotation;
- status_t res = deviceInfo->getCameraInfo(/*overrideToPortrait*/false, &portraitRotation,
- &info);
+ status_t res = deviceInfo->getCameraInfo(
+ /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+ &portraitRotation, &info);
if (res != OK) {
ALOGE("%s: Error reading camera info: %s (%d)", __FUNCTION__, strerror(-res), res);
continue;
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index 5ff3fcd..4a64b44 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -292,7 +292,8 @@
* Return the old camera API camera info
*/
status_t getCameraInfo(const std::string &id,
- bool overrideToPortrait, int *portraitRotation, hardware::CameraInfo* info) const;
+ int rotationOverride, int *portraitRotation,
+ hardware::CameraInfo* info) const;
/**
* Return API2 camera characteristics - returns NAME_NOT_FOUND if a device ID does
@@ -300,7 +301,7 @@
*/
status_t getCameraCharacteristics(const std::string &id,
bool overrideForPerfClass, CameraMetadata* characteristics,
- bool overrideToPortrait) const;
+ int rotationOverride) const;
status_t isConcurrentSessionConfigurationSupported(
const std::vector<hardware::camera2::utils::CameraIdAndSessionConfiguration>
@@ -331,7 +332,7 @@
status_t getSessionCharacteristics(const std::string& id,
const SessionConfiguration &configuration,
bool overrideForPerfClass,
- bool overrideToPortrait,
+ int rotationOverride,
CameraMetadata* sessionCharacteristics /*out*/) const;
/**
@@ -626,7 +627,8 @@
virtual status_t setTorchMode(bool enabled) = 0;
virtual status_t turnOnTorchWithStrengthLevel(int32_t torchStrength) = 0;
virtual status_t getTorchStrengthLevel(int32_t *torchStrength) = 0;
- virtual status_t getCameraInfo(bool overrideToPortrait,
+ virtual status_t getCameraInfo(
+ int rotationOverride,
int *portraitRotation,
hardware::CameraInfo *info) const = 0;
virtual bool isAPI1Compatible() const = 0;
@@ -634,7 +636,7 @@
virtual status_t getCameraCharacteristics(
[[maybe_unused]] bool overrideForPerfClass,
[[maybe_unused]] CameraMetadata *characteristics,
- [[maybe_unused]] bool overrideToPortrait) {
+ [[maybe_unused]] int rotationOverride) {
return INVALID_OPERATION;
}
virtual status_t getPhysicalCameraCharacteristics(
@@ -646,6 +648,7 @@
virtual status_t isSessionConfigurationSupported(
const SessionConfiguration &/*configuration*/,
bool /*overrideForPerfClass*/,
+ camera3::metadataGetter /*getMetadata*/,
bool /*checkSessionParams*/,
bool * /*status*/) {
return INVALID_OPERATION;
@@ -654,8 +657,7 @@
virtual status_t getSessionCharacteristics(
const SessionConfiguration &/*configuration*/,
bool /*overrideForPerfClass*/,
- camera3::metadataGetter /*getMetadata*/,
- CameraMetadata* /*sessionCharacteristics*/) {
+ camera3::metadataGetter /*getMetadata*/, CameraMetadata* /*outChars*/) {
return INVALID_OPERATION;
}
@@ -663,7 +665,7 @@
virtual void notifyDeviceStateChange(int64_t /*newState*/) {}
virtual status_t createDefaultRequest(
camera3::camera_request_template_t /*templateId*/,
- camera_metadata_t** /*metadata*/) {
+ CameraMetadata* /*metadata*/) {
return INVALID_OPERATION;
}
@@ -705,7 +707,8 @@
virtual status_t setTorchMode(bool enabled) = 0;
virtual status_t turnOnTorchWithStrengthLevel(int32_t torchStrength) = 0;
virtual status_t getTorchStrengthLevel(int32_t *torchStrength) = 0;
- virtual status_t getCameraInfo(bool overrideToPortrait,
+ virtual status_t getCameraInfo(
+ int rotationOverride,
int *portraitRotation,
hardware::CameraInfo *info) const override;
virtual bool isAPI1Compatible() const override;
@@ -713,7 +716,7 @@
virtual status_t getCameraCharacteristics(
bool overrideForPerfClass,
CameraMetadata *characteristics,
- bool overrideToPortrait) override;
+ int rotationOverride) override;
virtual status_t getPhysicalCameraCharacteristics(const std::string& physicalCameraId,
CameraMetadata *characteristics) const override;
virtual status_t filterSmallJpegSizes() override;
@@ -737,6 +740,10 @@
// Only contains characteristics for hidden physical cameras,
// not for public physical cameras.
std::unordered_map<std::string, CameraMetadata> mPhysicalCameraCharacteristics;
+ // Value filled in from addSessionConfigQueryVersionTag.
+ // Cached to make lookups faster
+ int mSessionConfigQueryVersion = 0;
+
void queryPhysicalCameraIds();
SystemCameraKind getSystemCameraKind();
status_t fixupMonochromeTags();
@@ -774,8 +781,6 @@
std::vector<int64_t>* stallDurations,
const camera_metadata_entry& halStreamConfigs,
const camera_metadata_entry& halStreamDurations);
-
- CameraMetadata deviceInfo(const std::string &id);
};
protected:
std::string mType;
@@ -915,7 +920,7 @@
const hardware::camera::common::V1_0::TorchModeStatus&);
status_t getCameraCharacteristicsLocked(const std::string &id, bool overrideForPerfClass,
- CameraMetadata* characteristics, bool overrideToPortrait) const;
+ CameraMetadata* characteristics, int rotationOverride) const;
void filterLogicalCameraIdsLocked(std::vector<std::string>& deviceIds) const;
status_t getSystemCameraKindLocked(const std::string& id, SystemCameraKind *kind) const;
diff --git a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
index a721d28..e76b750 100644
--- a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
@@ -17,6 +17,8 @@
#include "common/HalConversionsTemplated.h"
#include "common/CameraProviderInfoTemplated.h"
+#include <aidl/AidlUtils.h>
+
#include <com_android_internal_camera_flags.h>
#include <cutils/properties.h>
@@ -26,6 +28,7 @@
#include <android/hardware/ICameraService.h>
#include <camera_metadata_hidden.h>
+#include "device3/DistortionMapper.h"
#include "device3/ZoomRatioMapper.h"
#include <utils/SessionConfigurationUtils.h>
#include <utils/Trace.h>
@@ -42,8 +45,10 @@
using namespace aidl::android::hardware;
using namespace hardware::camera;
+using android::hardware::cameraservice::utils::conversion::aidl::copySessionCharacteristics;
using hardware::camera2::utils::CameraIdAndSessionConfiguration;
using hardware::ICameraService;
+using SessionConfigurationUtils::overrideDefaultRequestKeys;
using HalDeviceStatusType = aidl::android::hardware::camera::common::CameraDeviceStatus;
using ICameraProvider = aidl::android::hardware::camera::provider::ICameraProvider;
@@ -690,6 +695,14 @@
}
}
+ int deviceVersion = HARDWARE_DEVICE_API_VERSION(mVersion.get_major(), mVersion.get_minor());
+ if (deviceVersion >= CAMERA_DEVICE_API_VERSION_1_3) {
+ // This additional set of request keys must match the ones specified
+ // in ICameraDevice.isSessionConfigurationWithSettingsSupported.
+ mAdditionalKeysForFeatureQuery.insert(mAdditionalKeysForFeatureQuery.end(),
+ {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, ANDROID_CONTROL_AE_TARGET_FPS_RANGE});
+ }
+
if (!kEnableLazyHal) {
// Save HAL reference indefinitely
mSavedInterface = interface;
@@ -787,7 +800,7 @@
status_t AidlProviderInfo::AidlDeviceInfo3::isSessionConfigurationSupported(
const SessionConfiguration &configuration, bool overrideForPerfClass,
- bool checkSessionParams, bool *status) {
+ camera3::metadataGetter getMetadata, bool checkSessionParams, bool *status) {
auto operatingMode = configuration.getOperatingMode();
@@ -799,12 +812,10 @@
camera::device::StreamConfiguration streamConfiguration;
bool earlyExit = false;
- camera3::metadataGetter getMetadata = [this](const std::string &id,
- bool /*overrideForPerfClass*/) {return this->deviceInfo(id);};
auto bRes = SessionConfigurationUtils::convertToHALStreamCombination(configuration,
mId, mCameraCharacteristics, mCompositeJpegRDisabled, getMetadata,
mPhysicalIds, streamConfiguration, overrideForPerfClass, mProviderTagid,
- checkSessionParams, &earlyExit);
+ checkSessionParams, mAdditionalKeysForFeatureQuery, &earlyExit);
if (!bRes.isOk()) {
return UNKNOWN_ERROR;
@@ -851,7 +862,7 @@
}
status_t AidlProviderInfo::AidlDeviceInfo3::createDefaultRequest(
- camera3::camera_request_template_t templateId, camera_metadata_t** metadata) {
+ camera3::camera_request_template_t templateId, CameraMetadata* metadata) {
const std::shared_ptr<camera::device::ICameraDevice> interface =
startDeviceInterface();
@@ -887,11 +898,12 @@
}
const camera_metadata *r =
reinterpret_cast<const camera_metadata_t*>(request.metadata.data());
+ camera_metadata *rawRequest = nullptr;
size_t expectedSize = request.metadata.size();
int ret = validate_camera_metadata_structure(r, &expectedSize);
if (ret == OK || ret == CAMERA_METADATA_VALIDATION_SHIFTED) {
- *metadata = clone_camera_metadata(r);
- if (*metadata == nullptr) {
+ rawRequest = clone_camera_metadata(r);
+ if (rawRequest == nullptr) {
ALOGE("%s: Unable to clone camera metadata received from HAL",
__FUNCTION__);
res = UNKNOWN_ERROR;
@@ -901,18 +913,28 @@
res = UNKNOWN_ERROR;
}
+ set_camera_metadata_vendor_id(rawRequest, mProviderTagid);
+ metadata->acquire(rawRequest);
+
+ res = overrideDefaultRequestKeys(metadata);
+ if (res != OK) {
+ ALOGE("Unabled to override default request keys: %s (%d)",
+ strerror(-res), res);
+ return res;
+ }
+
return res;
}
status_t AidlProviderInfo::AidlDeviceInfo3::getSessionCharacteristics(
const SessionConfiguration &configuration, bool overrideForPerfClass,
- camera3::metadataGetter getMetadata, CameraMetadata *sessionCharacteristics) {
+ camera3::metadataGetter getMetadata, CameraMetadata* outChars) {
camera::device::StreamConfiguration streamConfiguration;
bool earlyExit = false;
auto res = SessionConfigurationUtils::convertToHALStreamCombination(configuration,
mId, mCameraCharacteristics, mCompositeJpegRDisabled, getMetadata,
mPhysicalIds, streamConfiguration, overrideForPerfClass, mProviderTagid,
- /*checkSessionParams*/true, &earlyExit);
+ /*checkSessionParams*/true, mAdditionalKeysForFeatureQuery, &earlyExit);
if (!res.isOk()) {
return UNKNOWN_ERROR;
@@ -932,24 +954,32 @@
aidl::android::hardware::camera::device::CameraMetadata chars;
::ndk::ScopedAStatus ret =
interface->getSessionCharacteristics(streamConfiguration, &chars);
- std::vector<uint8_t> &metadata = chars.metadata;
+ if (!ret.isOk()) {
+ ALOGE("%s: Unexpected binder error while getting session characteristics (%d): %s",
+ __FUNCTION__, ret.getExceptionCode(), ret.getMessage());
+ return mapToStatusT(ret);
+ }
- camera_metadata_t *buffer = reinterpret_cast<camera_metadata_t*>(metadata.data());
+ std::vector<uint8_t> &metadata = chars.metadata;
+ auto *buffer = reinterpret_cast<camera_metadata_t*>(metadata.data());
size_t expectedSize = metadata.size();
int resV = validate_camera_metadata_structure(buffer, &expectedSize);
if (resV == OK || resV == CAMERA_METADATA_VALIDATION_SHIFTED) {
set_camera_metadata_vendor_id(buffer, mProviderTagid);
- *sessionCharacteristics = buffer;
} else {
ALOGE("%s: Malformed camera metadata received from HAL", __FUNCTION__);
return BAD_VALUE;
}
- if (!ret.isOk()) {
- ALOGE("%s: Unexpected binder error: %s", __FUNCTION__, ret.getMessage());
- return mapToStatusT(ret);
- }
- return OK;
+ CameraMetadata rawSessionChars;
+ rawSessionChars = buffer; // clone buffer
+ rawSessionChars.sort(); // sort for faster lookups
+
+ *outChars = mCameraCharacteristics;
+ outChars->sort(); // sort for faster reads and (hopefully!) writes
+
+ return copySessionCharacteristics(/*from=*/rawSessionChars, /*to=*/outChars,
+ mSessionConfigQueryVersion);
}
status_t AidlProviderInfo::convertToAidlHALStreamCombinationAndCameraIdsLocked(
@@ -971,16 +1001,16 @@
SessionConfigurationUtils::targetPerfClassPrimaryCamera(
perfClassPrimaryCameraIds, cameraId, targetSdkVersion);
res = mManager->getCameraCharacteristicsLocked(cameraId, overrideForPerfClass, &deviceInfo,
- /*overrideToPortrait*/false);
+ hardware::ICameraService::ROTATION_OVERRIDE_NONE);
if (res != OK) {
return res;
}
camera3::metadataGetter getMetadata =
[this](const std::string &id, bool overrideForPerfClass) {
CameraMetadata physicalDeviceInfo;
- mManager->getCameraCharacteristicsLocked(id, overrideForPerfClass,
- &physicalDeviceInfo,
- /*overrideToPortrait*/false);
+ mManager->getCameraCharacteristicsLocked(
+ id, overrideForPerfClass, &physicalDeviceInfo,
+ hardware::ICameraService::ROTATION_OVERRIDE_NONE);
return physicalDeviceInfo;
};
std::vector<std::string> physicalCameraIds;
@@ -992,7 +1022,8 @@
mManager->isCompositeJpegRDisabledLocked(cameraId), getMetadata,
physicalCameraIds, streamConfiguration,
overrideForPerfClass, mProviderTagid,
- /*checkSessionParams*/false, &shouldExit);
+ /*checkSessionParams*/false, /*additionalKeys*/{},
+ &shouldExit);
if (!bStatus.isOk()) {
ALOGE("%s: convertToHALStreamCombination failed", __FUNCTION__);
return INVALID_OPERATION;
diff --git a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.h b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.h
index 0bfa7d4..1983cc3 100644
--- a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.h
+++ b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.h
@@ -127,20 +127,21 @@
virtual status_t isSessionConfigurationSupported(
const SessionConfiguration &/*configuration*/,
- bool overrideForPerfClass, bool checkSessionParams,
- bool *status/*status*/);
+ bool overrideForPerfClass, camera3::metadataGetter getMetadata,
+ bool checkSessionParams, bool *status/*status*/);
virtual status_t createDefaultRequest(
camera3::camera_request_template_t templateId,
- camera_metadata_t** metadata) override;
+ CameraMetadata* metadata) override;
virtual status_t getSessionCharacteristics(
const SessionConfiguration &/*configuration*/,
bool overrideForPerfClass, camera3::metadataGetter /*getMetadata*/,
- CameraMetadata *sessionCharacteristics /*sessionCharacteristics*/);
+ CameraMetadata */*outChars*/);
std::shared_ptr<aidl::android::hardware::camera::device::ICameraDevice>
startDeviceInterface();
+ std::vector<int32_t> mAdditionalKeysForFeatureQuery;
};
private:
diff --git a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
index 065f0c5..1e546fb 100644
--- a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
@@ -861,7 +861,7 @@
status_t HidlProviderInfo::HidlDeviceInfo3::isSessionConfigurationSupported(
const SessionConfiguration &configuration, bool overrideForPerfClass,
- bool checkSessionParams, bool *status) {
+ camera3::metadataGetter getMetadata, bool checkSessionParams, bool *status) {
if (checkSessionParams) {
// HIDL device doesn't support checking session parameters
@@ -870,8 +870,6 @@
hardware::camera::device::V3_7::StreamConfiguration configuration_3_7;
bool earlyExit = false;
- camera3::metadataGetter getMetadata = [this](const std::string &id,
- bool /*overrideForPerfClass*/) {return this->deviceInfo(id);};
auto bRes = SessionConfigurationUtils::convertToHALStreamCombination(configuration,
mId, mCameraCharacteristics, getMetadata, mPhysicalIds,
configuration_3_7, overrideForPerfClass, mProviderTagid,
@@ -959,7 +957,7 @@
SessionConfigurationUtils::targetPerfClassPrimaryCamera(
perfClassPrimaryCameraIds, cameraId, targetSdkVersion);
res = mManager->getCameraCharacteristicsLocked(cameraId, overrideForPerfClass, &deviceInfo,
- /*overrideToPortrait*/false);
+ hardware::ICameraService::ROTATION_OVERRIDE_NONE);
if (res != OK) {
return res;
}
@@ -967,7 +965,7 @@
[this](const std::string &id, bool overrideForPerfClass) {
CameraMetadata physicalDeviceInfo;
mManager->getCameraCharacteristicsLocked(id, overrideForPerfClass,
- &physicalDeviceInfo, /*overrideToPortrait*/false);
+ &physicalDeviceInfo, hardware::ICameraService::ROTATION_OVERRIDE_NONE);
return physicalDeviceInfo;
};
std::vector<std::string> physicalCameraIds;
diff --git a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.h b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.h
index 869bba0..2838f03 100644
--- a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.h
+++ b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.h
@@ -105,8 +105,8 @@
virtual status_t isSessionConfigurationSupported(
const SessionConfiguration &/*configuration*/,
- bool overrideForPerfClass, bool checkSessionParams,
- bool *status/*status*/);
+ bool overrideForPerfClass, camera3::metadataGetter /*getMetadata*/,
+ bool checkSessionParams, bool *status/*status*/);
sp<hardware::camera::device::V3_2::ICameraDevice> startDeviceInterface();
};
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index a51ddb7..97cfdac 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -65,7 +65,6 @@
#include "device3/Camera3InputStream.h"
#include "device3/Camera3OutputStream.h"
#include "device3/Camera3SharedOutputStream.h"
-#include "utils/CameraThreadState.h"
#include "utils/CameraTraces.h"
#include "utils/SchedulingPolicyUtils.h"
#include "utils/SessionConfigurationUtils.h"
@@ -85,8 +84,10 @@
namespace android {
Camera3Device::Camera3Device(std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
- const std::string &id, bool overrideForPerfClass, bool overrideToPortrait,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
+ const std::string &id, bool overrideForPerfClass, int rotationOverride,
bool legacyClient):
+ AttributionAndPermissionUtilsEncapsulator(attributionAndPermissionUtils),
mCameraServiceProxyWrapper(cameraServiceProxyWrapper),
mId(id),
mLegacyClient(legacyClient),
@@ -110,7 +111,7 @@
mLastTemplateId(-1),
mNeedFixupMonochromeTags(false),
mOverrideForPerfClass(overrideForPerfClass),
- mOverrideToPortrait(overrideToPortrait),
+ mRotationOverride(rotationOverride),
mRotateAndCropOverride(ANDROID_SCALER_ROTATE_AND_CROP_NONE),
mComposerOutput(false),
mAutoframingOverride(ANDROID_CONTROL_AUTOFRAMING_OFF),
@@ -186,7 +187,7 @@
/** Start up request queue thread */
mRequestThread = createNewRequestThread(
this, mStatusTracker, mInterface, sessionParamKeys,
- mUseHalBufManager, mSupportCameraMute, mOverrideToPortrait,
+ mUseHalBufManager, mSupportCameraMute, mRotationOverride,
mSupportZoomOverride);
res = mRequestThread->run((std::string("C3Dev-") + mId + "-ReqQueue").c_str());
if (res != OK) {
@@ -197,6 +198,8 @@
return res;
}
+ setCameraMuteLocked(mCameraMuteInitial);
+
mPreparerThread = new PreparerThread();
internalUpdateStatusLocked(STATUS_UNCONFIGURED);
@@ -1363,7 +1366,8 @@
status_t Camera3Device::filterParamsAndConfigureLocked(const CameraMetadata& params,
int operatingMode) {
CameraMetadata filteredParams;
- SessionConfigurationUtils::filterParameters(params, mDeviceInfo, mVendorTagId, filteredParams);
+ SessionConfigurationUtils::filterParameters(params, mDeviceInfo,
+ /*additionalKeys*/{}, mVendorTagId, filteredParams);
camera_metadata_entry_t availableSessionKeys = mDeviceInfo.find(
ANDROID_REQUEST_AVAILABLE_SESSION_KEYS);
@@ -1394,7 +1398,7 @@
request->mRotateAndCropAuto = false;
}
- overrideAutoRotateAndCrop(request, mOverrideToPortrait, mRotateAndCropOverride);
+ overrideAutoRotateAndCrop(request, mRotationOverride, mRotateAndCropOverride);
}
if (autoframingSessionKey) {
@@ -1433,7 +1437,7 @@
if (templateId <= 0 || templateId >= CAMERA_TEMPLATE_COUNT) {
android_errorWriteWithInfoLog(CameraService::SN_EVENT_LOG_ID, "26866110",
- CameraThreadState::getCallingUid(), nullptr, 0);
+ getCallingUid(), nullptr, 0);
return BAD_VALUE;
}
@@ -1484,29 +1488,13 @@
set_camera_metadata_vendor_id(rawRequest, mVendorTagId);
mRequestTemplateCache[templateId].acquire(rawRequest);
- // Override the template request with zoomRatioMapper
- res = mZoomRatioMappers[mId].initZoomRatioInTemplate(
- &mRequestTemplateCache[templateId]);
+ res = overrideDefaultRequestKeys(&mRequestTemplateCache[templateId]);
if (res != OK) {
- CLOGE("Failed to update zoom ratio for template %d: %s (%d)",
+ CLOGE("Failed to overrideDefaultRequestKeys for template %d: %s (%d)",
templateId, strerror(-res), res);
return res;
}
- // Fill in JPEG_QUALITY if not available
- if (!mRequestTemplateCache[templateId].exists(ANDROID_JPEG_QUALITY)) {
- static const uint8_t kDefaultJpegQuality = 95;
- mRequestTemplateCache[templateId].update(ANDROID_JPEG_QUALITY,
- &kDefaultJpegQuality, 1);
- }
-
- // Fill in AUTOFRAMING if not available
- if (!mRequestTemplateCache[templateId].exists(ANDROID_CONTROL_AUTOFRAMING)) {
- static const uint8_t kDefaultAutoframingMode = ANDROID_CONTROL_AUTOFRAMING_OFF;
- mRequestTemplateCache[templateId].update(ANDROID_CONTROL_AUTOFRAMING,
- &kDefaultAutoframingMode, 1);
- }
-
*request = mRequestTemplateCache[templateId];
mLastTemplateId = templateId;
}
@@ -1977,9 +1965,10 @@
// Get session stats from the builder, and notify the listener.
int64_t requestCount, resultErrorCount;
bool deviceError;
+ std::pair<int32_t, int32_t> mostRequestedFpsRange;
std::map<int, StreamStats> streamStatsMap;
mSessionStatsBuilder.buildAndReset(&requestCount, &resultErrorCount,
- &deviceError, &streamStatsMap);
+ &deviceError, &mostRequestedFpsRange, &streamStatsMap);
for (size_t i = 0; i < streamIds.size(); i++) {
int streamId = streamIds[i];
auto stats = streamStatsMap.find(streamId);
@@ -1997,7 +1986,8 @@
stats->second.mCaptureLatencyHistogram.end());
}
}
- listener->notifyIdle(requestCount, resultErrorCount, deviceError, streamStats);
+ listener->notifyIdle(requestCount, resultErrorCount, deviceError,
+ mostRequestedFpsRange, streamStats);
} else {
res = listener->notifyActive(sessionMaxPreviewFps);
}
@@ -2943,6 +2933,16 @@
physicalMetadata, outputBuffers, numOutputBuffers, inputStreamId);
}
+void Camera3Device::collectRequestStats(int64_t frameNumber, const CameraMetadata &request) {
+ if (flags::analytics_24q3()) {
+ auto entry = request.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE);
+ if (entry.count >= 2) {
+ mSessionStatsBuilder.incFpsRequestedCount(
+ entry.data.i32[0], entry.data.i32[1], frameNumber);
+ }
+ }
+}
+
void Camera3Device::cleanupNativeHandles(
std::vector<native_handle_t*> *handles, bool closeFd) {
if (handles == nullptr) {
@@ -3041,7 +3041,7 @@
sp<HalInterface> interface, const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait,
+ int rotationOverride,
bool supportSettingsOverride) :
Thread(/*canCallJava*/false),
mParent(parent),
@@ -3075,7 +3075,7 @@
mLatestSessionParams(sessionParamKeys.size()),
mUseHalBufManager(useHalBufManager),
mSupportCameraMute(supportCameraMute),
- mOverrideToPortrait(overrideToPortrait),
+ mRotationOverride(rotationOverride),
mSupportSettingsOverride(supportSettingsOverride) {
mStatusId = statusTracker->addComponent("RequestThread");
mVndkVersion = getVNDKVersionFromProp(__ANDROID_API_FUTURE__);
@@ -3483,7 +3483,8 @@
void Camera3Device::RequestThread::updateNextRequest(NextRequest& nextRequest) {
// Update the latest request sent to HAL
camera_capture_request_t& halRequest = nextRequest.halRequest;
- if (halRequest.settings != NULL) { // Don't update if they were unchanged
+ sp<Camera3Device> parent = mParent.promote();
+ if (halRequest.settings != nullptr) { // Don't update if they were unchanged
Mutex::Autolock al(mLatestRequestMutex);
camera_metadata_t* cloned = clone_camera_metadata(halRequest.settings);
@@ -3496,8 +3497,7 @@
CameraMetadata(cloned));
}
- sp<Camera3Device> parent = mParent.promote();
- if (parent != NULL) {
+ if (parent != nullptr) {
int32_t inputStreamId = -1;
if (halRequest.input_buffer != nullptr) {
inputStreamId = Camera3Stream::cast(halRequest.input_buffer->stream)->getId();
@@ -3509,8 +3509,11 @@
halRequest.num_output_buffers, inputStreamId);
}
}
+ if (parent != nullptr) {
+ parent->collectRequestStats(halRequest.frame_number, mLatestRequest);
+ }
- if (halRequest.settings != NULL) {
+ if (halRequest.settings != nullptr) {
nextRequest.captureRequest->mSettingsList.begin()->metadata.unlock(
halRequest.settings);
}
@@ -3610,10 +3613,12 @@
sp<CaptureRequest> captureRequest = nextRequest.captureRequest;
captureRequest->mTestPatternChanged = overrideTestPattern(captureRequest);
// Do not override rotate&crop for stream configurations that include
- // SurfaceViews(HW_COMPOSER) output, unless mOverrideToPortrait is set.
+ // SurfaceViews(HW_COMPOSER) output, unless mRotationOverride is set.
// The display rotation there will be compensated by NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY
- captureRequest->mRotateAndCropChanged = (mComposerOutput && !mOverrideToPortrait) ? false :
- overrideAutoRotateAndCrop(captureRequest);
+ using hardware::ICameraService::ROTATION_OVERRIDE_NONE;
+ captureRequest->mRotateAndCropChanged =
+ (mComposerOutput && (mRotationOverride == ROTATION_OVERRIDE_NONE)) ?
+ false : overrideAutoRotateAndCrop(captureRequest);
captureRequest->mAutoframingChanged = overrideAutoframing(captureRequest);
if (flags::inject_session_params()) {
injectSessionParams(captureRequest, mInjectedSessionParams);
@@ -3665,19 +3670,18 @@
cleanUpFailedRequests(/*sendRequestError*/ true);
// Check if any stream is abandoned.
checkAndStopRepeatingRequest();
+ // Inform waitUntilRequestProcessed thread of a failed request ID
+ wakeupLatestRequest(/*failedRequestId*/true, latestRequestId);
return true;
} else if (res != OK) {
cleanUpFailedRequests(/*sendRequestError*/ false);
+ // Inform waitUntilRequestProcessed thread of a failed request ID
+ wakeupLatestRequest(/*failedRequestId*/true, latestRequestId);
return false;
}
// Inform waitUntilRequestProcessed thread of a new request ID
- {
- Mutex::Autolock al(mLatestRequestMutex);
-
- mLatestRequestId = latestRequestId;
- mLatestRequestSignal.signal();
- }
+ wakeupLatestRequest(/*failedRequestId*/false, latestRequestId);
// Submit a batch of requests to HAL.
// Use flush lock only when submitting multilple requests in a batch.
@@ -4432,12 +4436,7 @@
hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST,
captureRequest->mResultExtras);
}
- {
- Mutex::Autolock al(mLatestRequestMutex);
-
- mLatestFailedRequestId = captureRequest->mResultExtras.requestId;
- mLatestRequestSignal.signal();
- }
+ wakeupLatestRequest(/*failedRequestId*/true, captureRequest->mResultExtras.requestId);
}
// Remove yet-to-be submitted inflight request from inflightMap
@@ -4889,16 +4888,16 @@
bool Camera3Device::RequestThread::overrideAutoRotateAndCrop(const sp<CaptureRequest> &request) {
ATRACE_CALL();
Mutex::Autolock l(mTriggerMutex);
- return Camera3Device::overrideAutoRotateAndCrop(request, this->mOverrideToPortrait,
+ return Camera3Device::overrideAutoRotateAndCrop(request, this->mRotationOverride,
this->mRotateAndCropOverride);
}
bool Camera3Device::overrideAutoRotateAndCrop(const sp<CaptureRequest> &request,
- bool overrideToPortrait,
+ int rotationOverride,
camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropOverride) {
ATRACE_CALL();
- if (overrideToPortrait) {
+ if (rotationOverride != hardware::ICameraService::ROTATION_OVERRIDE_NONE) {
uint8_t rotateAndCrop_u8 = rotateAndCropOverride;
CameraMetadata &metadata = request->mSettingsList.begin()->metadata;
metadata.update(ANDROID_SCALER_ROTATE_AND_CROP,
@@ -5099,6 +5098,20 @@
return OK;
}
+void Camera3Device::RequestThread::wakeupLatestRequest(
+ bool latestRequestFailed,
+ int32_t latestRequestId) {
+ Mutex::Autolock al(mLatestRequestMutex);
+
+ if (latestRequestFailed) {
+ mLatestFailedRequestId = latestRequestId;
+ } else {
+ mLatestRequestId = latestRequestId;
+ }
+ mLatestRequestSignal.signal();
+}
+
+
/**
* PreparerThread inner class methods
*/
@@ -5567,10 +5580,19 @@
ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
Mutex::Autolock l(mLock);
+ return setCameraMuteLocked(enabled);
+}
- if (mRequestThread == nullptr || !mSupportCameraMute) {
+status_t Camera3Device::setCameraMuteLocked(bool enabled) {
+ if (mRequestThread == nullptr) {
+ mCameraMuteInitial = enabled;
+ return OK;
+ }
+
+ if (!mSupportCameraMute) {
return INVALID_OPERATION;
}
+
int32_t muteMode =
!enabled ? ANDROID_SENSOR_TEST_PATTERN_MODE_OFF :
mSupportTestPatternSolidColor ? ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR :
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index c93e47a..9f414e8 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -47,6 +47,7 @@
#include "device3/Camera3OutputInterface.h"
#include "device3/Camera3OfflineSession.h"
#include "device3/Camera3StreamInterface.h"
+#include "utils/AttributionAndPermissionUtils.h"
#include "utils/TagMonitor.h"
#include "utils/IPCTransport.h"
#include "utils/LatencyHistogram.h"
@@ -79,13 +80,15 @@
public camera3::SetErrorInterface,
public camera3::InflightRequestUpdateInterface,
public camera3::RequestBufferInterface,
- public camera3::FlushBufferInterface {
+ public camera3::FlushBufferInterface,
+ public AttributionAndPermissionUtilsEncapsulator {
friend class HidlCamera3Device;
friend class AidlCamera3Device;
public:
explicit Camera3Device(std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
- const std::string& id, bool overrideForPerfClass, bool overrideToPortrait,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
+ const std::string& id, bool overrideForPerfClass, int rotationOverride,
bool legacyClient = false);
virtual ~Camera3Device();
@@ -305,6 +308,15 @@
status_t setCameraMute(bool enabled);
/**
+ * Mute the camera.
+ *
+ * When muted, black image data is output on all output streams.
+ * This method assumes the caller already acquired the 'mInterfaceLock'
+ * and 'mLock' locks.
+ */
+ status_t setCameraMuteLocked(bool enabled);
+
+ /**
* Enables/disables camera service watchdog
*/
status_t setCameraServiceWatchdog(bool enabled);
@@ -865,7 +877,7 @@
// Override rotate_and_crop control if needed
static bool overrideAutoRotateAndCrop(const sp<CaptureRequest> &request /*out*/,
- bool overrideToPortrait,
+ int rotationOverride,
camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropOverride);
// Override auto framing control if needed
@@ -902,7 +914,7 @@
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait,
+ int rotationOverride,
bool supportSettingsOverride);
~RequestThread();
@@ -1024,6 +1036,11 @@
const sp<CaptureRequest> &request,
const CameraMetadata& injectedSessionParams);
+ /**
+ * signal mLatestRequestmutex
+ **/
+ void wakeupLatestRequest(bool latestRequestFailed, int32_t latestRequestId);
+
protected:
virtual bool threadLoop();
@@ -1058,7 +1075,7 @@
static const nsecs_t kRequestTimeout = 50e6; // 50 ms
// TODO: does this need to be adjusted for long exposure requests?
- static const nsecs_t kRequestSubmitTimeout = 200e6; // 200 ms
+ static const nsecs_t kRequestSubmitTimeout = 500e6; // 500 ms
// Used to prepare a batch of requests.
struct NextRequest {
@@ -1212,7 +1229,7 @@
bool mUseHalBufManager = false;
std::set<int32_t > mHalBufManagedStreamIds;
const bool mSupportCameraMute;
- const bool mOverrideToPortrait;
+ const bool mRotationOverride;
const bool mSupportSettingsOverride;
int32_t mVndkVersion = -1;
};
@@ -1223,7 +1240,7 @@
const Vector<int32_t>& /*sessionParamKeys*/,
bool /*useHalBufManager*/,
bool /*supportCameraMute*/,
- bool /*overrideToPortrait*/,
+ int /*rotationOverride*/,
bool /*supportSettingsOverride*/) = 0;
sp<RequestThread> mRequestThread;
@@ -1390,6 +1407,10 @@
const camera_stream_buffer_t *outputBuffers, uint32_t numOutputBuffers,
int32_t inputStreamId);
+ // Collect any statistics that are based on the stream of capture requests sent
+ // to the HAL
+ void collectRequestStats(int64_t frameNumber, const CameraMetadata& request);
+
metadata_vendor_id_t mVendorTagId;
// Cached last requested template id
@@ -1501,13 +1522,17 @@
// Whether the camera framework overrides the device characteristics for
// app compatibility reasons.
- bool mOverrideToPortrait;
+ int mRotationOverride;
camera_metadata_enum_android_scaler_rotate_and_crop_t mRotateAndCropOverride;
bool mComposerOutput;
// Auto framing override value
camera_metadata_enum_android_control_autoframing mAutoframingOverride;
+ // Initial camera mute state stored before the request thread
+ // is active.
+ bool mCameraMuteInitial = false;
+
// Settings override value
int32_t mSettingsOverride; // -1 = use original, otherwise
// the settings override to use.
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
index 89e08a1..31707ec 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -36,6 +36,7 @@
#include <utils/SortedVector.h>
#include <utils/Trace.h>
+#include <android/hardware/ICameraService.h>
#include <android/hardware/camera2/ICameraDeviceCallbacks.h>
#include <android/hardware/camera/device/3.4/ICameraDeviceCallback.h>
@@ -675,8 +676,9 @@
states.listener->notifyPhysicalCameraChange(physicalId);
}
states.activePhysicalId = physicalId;
-
- if (!states.legacyClient && !states.overrideToPortrait) {
+ using hardware::ICameraService::ROTATION_OVERRIDE_NONE;
+ if (!states.legacyClient &&
+ states.rotationOverride == ROTATION_OVERRIDE_NONE) {
auto deviceInfo = states.physicalDeviceInfoMap.find(physicalId);
if (deviceInfo != states.physicalDeviceInfoMap.end()) {
auto orientation = deviceInfo->second.find(ANDROID_SENSOR_ORIENTATION);
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.h b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
index 75864d7..21965f5 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
@@ -154,7 +154,7 @@
bool legacyClient;
nsecs_t& minFrameDuration;
bool& isFixedFps;
- bool overrideToPortrait;
+ int rotationOverride;
std::string &activePhysicalId;
};
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
index e8ef692..57297bc 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
@@ -68,7 +68,6 @@
#include "device3/aidl/AidlCamera3OutputUtils.h"
#include "device3/aidl/AidlCamera3OfflineSession.h"
#include "CameraService.h"
-#include "utils/CameraThreadState.h"
#include "utils/SessionConfigurationUtils.h"
#include "utils/TraceHFR.h"
#include "utils/CameraServiceProxyWrapper.h"
@@ -173,10 +172,11 @@
AidlCamera3Device::AidlCamera3Device(
std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
- const std::string& id, bool overrideForPerfClass, bool overrideToPortrait,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
+ const std::string& id, bool overrideForPerfClass, int rotationOverride,
bool legacyClient) :
- Camera3Device(cameraServiceProxyWrapper, id, overrideForPerfClass, overrideToPortrait,
- legacyClient) {
+ Camera3Device(cameraServiceProxyWrapper, attributionAndPermissionUtils, id,
+ overrideForPerfClass, rotationOverride, legacyClient) {
mCallbacks = ndk::SharedRefBase::make<AidlCameraDeviceCallbacks>(this);
}
@@ -207,7 +207,7 @@
return INVALID_OPERATION;
}
res = manager->getCameraCharacteristics(mId, mOverrideForPerfClass, &mDeviceInfo,
- mOverrideToPortrait);
+ mRotationOverride);
if (res != OK) {
SET_ERR_L("Could not retrieve camera characteristics: %s (%d)", strerror(-res), res);
session->close();
@@ -223,7 +223,7 @@
// Do not override characteristics for physical cameras
res = manager->getCameraCharacteristics(
physicalId, /*overrideForPerfClass*/false, &mPhysicalDeviceInfoMap[physicalId],
- mOverrideToPortrait);
+ mRotationOverride);
if (res != OK) {
SET_ERR_L("Could not retrieve camera %s characteristics: %s (%d)",
physicalId.c_str(), strerror(-res), res);
@@ -417,7 +417,7 @@
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
*this, *(mInterface), mLegacyClient, mMinExpectedDuration, mIsFixedFps,
- mOverrideToPortrait, mActivePhysicalId}, mResultMetadataQueue
+ mRotationOverride, mActivePhysicalId}, mResultMetadataQueue
};
for (const auto& result : results) {
@@ -459,7 +459,7 @@
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
*this, *(mInterface), mLegacyClient, mMinExpectedDuration, mIsFixedFps,
- mOverrideToPortrait, mActivePhysicalId}, mResultMetadataQueue
+ mRotationOverride, mActivePhysicalId}, mResultMetadataQueue
};
for (const auto& msg : msgs) {
camera3::notify(states, msg, mSensorReadoutTimestampSupported);
@@ -1480,10 +1480,10 @@
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait,
+ int rotationOverride,
bool supportSettingsOverride) :
RequestThread(parent, statusTracker, interface, sessionParamKeys,
- useHalBufManager, supportCameraMute, overrideToPortrait,
+ useHalBufManager, supportCameraMute, rotationOverride,
supportSettingsOverride) {}
status_t AidlCamera3Device::AidlRequestThread::switchToOffline(
@@ -1714,10 +1714,10 @@
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait,
+ int rotationOverride,
bool supportSettingsOverride) {
return new AidlRequestThread(parent, statusTracker, interface, sessionParamKeys,
- useHalBufManager, supportCameraMute, overrideToPortrait,
+ useHalBufManager, supportCameraMute, rotationOverride,
supportSettingsOverride);
};
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
index f0a5f7e..abc3f9c 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
@@ -41,7 +41,8 @@
friend class AidlCameraDeviceCallbacks;
explicit AidlCamera3Device(
std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
- const std::string& id, bool overrideForPerfClass, bool overrideToPortrait,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
+ const std::string& id, bool overrideForPerfClass, int rotationOverride,
bool legacyClient = false);
virtual ~AidlCamera3Device() { }
@@ -183,7 +184,7 @@
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait,
+ int rotationOverride,
bool supportSettingsOverride);
status_t switchToOffline(
@@ -274,7 +275,7 @@
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait,
+ int rotationOverride,
bool supportSettingsOverride) override;
virtual sp<Camera3DeviceInjectionMethods>
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
index f8308df..cc32c2a 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
@@ -29,6 +29,7 @@
#include <utils/Trace.h>
+#include <android/hardware/ICameraService.h>
#include <android/hardware/camera2/ICameraDeviceCallbacks.h>
#include <android/binder_ibinder_platform.h>
#include <camera/StringUtils.h>
@@ -127,7 +128,7 @@
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
*this, mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
- /*overrideToPortrait*/false, activePhysicalId}, mResultMetadataQueue
+ hardware::ICameraService::ROTATION_OVERRIDE_NONE, activePhysicalId}, mResultMetadataQueue
};
std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -174,7 +175,7 @@
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
*this, mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
- /*overrideToPortrait*/false, activePhysicalId}, mResultMetadataQueue
+ hardware::ICameraService::ROTATION_OVERRIDE_NONE, activePhysicalId}, mResultMetadataQueue
};
for (const auto& msg : msgs) {
camera3::notify(states, msg, mSensorReadoutTimestampSupported);
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
index f2e618f..9dacaf6 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
@@ -166,7 +166,7 @@
}
res = manager->getCameraCharacteristics(mId, mOverrideForPerfClass, &mDeviceInfo,
- /*overrideToPortrait*/false);
+ hardware::ICameraService::ROTATION_OVERRIDE_NONE);
if (res != OK) {
SET_ERR_L("Could not retrieve camera characteristics: %s (%d)", strerror(-res), res);
session->close();
@@ -181,7 +181,7 @@
// Do not override characteristics for physical cameras
res = manager->getCameraCharacteristics(
physicalId, /*overrideForPerfClass*/false, &mPhysicalDeviceInfoMap[physicalId],
- /*overrideToPortrait*/false);
+ hardware::ICameraService::ROTATION_OVERRIDE_NONE);
if (res != OK) {
SET_ERR_L("Could not retrieve camera %s characteristics: %s (%d)",
physicalId.c_str(), strerror(-res), res);
@@ -370,7 +370,7 @@
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
- *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mOverrideToPortrait,
+ *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mRotationOverride,
mActivePhysicalId}, mResultMetadataQueue
};
@@ -433,7 +433,7 @@
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
- *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mOverrideToPortrait,
+ *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mRotationOverride,
mActivePhysicalId}, mResultMetadataQueue
};
@@ -481,7 +481,7 @@
mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
- *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mOverrideToPortrait,
+ *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mRotationOverride,
mActivePhysicalId}, mResultMetadataQueue
};
for (const auto& msg : msgs) {
@@ -717,10 +717,10 @@
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait,
+ int rotationOverride,
bool supportSettingsOverride) {
return new HidlRequestThread(parent, statusTracker, interface, sessionParamKeys,
- useHalBufManager, supportCameraMute, overrideToPortrait,
+ useHalBufManager, supportCameraMute, rotationOverride,
supportSettingsOverride);
};
@@ -1721,10 +1721,10 @@
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait,
+ int rotationOverride,
bool supportSettingsOverride) :
RequestThread(parent, statusTracker, interface, sessionParamKeys, useHalBufManager,
- supportCameraMute, overrideToPortrait, supportSettingsOverride) {}
+ supportCameraMute, rotationOverride, supportSettingsOverride) {}
status_t HidlCamera3Device::HidlRequestThread::switchToOffline(
const std::vector<int32_t>& streamsToKeep,
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
index 350b072..bcc4d80 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
@@ -33,10 +33,11 @@
explicit HidlCamera3Device(
std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
- const std::string& id, bool overrideForPerfClass, bool overrideToPortrait,
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
+ const std::string& id, bool overrideForPerfClass, int rotationOverride,
bool legacyClient = false) :
- Camera3Device(cameraServiceProxyWrapper, id, overrideForPerfClass, overrideToPortrait,
- legacyClient) { }
+ Camera3Device(cameraServiceProxyWrapper, attributionAndPermissionUtils, id,
+ overrideForPerfClass, rotationOverride, legacyClient) { }
virtual ~HidlCamera3Device() {}
@@ -178,7 +179,7 @@
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait,
+ int rotationOverride,
bool supportSettingsOverride);
status_t switchToOffline(
@@ -231,7 +232,7 @@
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait,
+ int rotationOverride,
bool supportSettingsOverride) override;
virtual sp<Camera3DeviceInjectionMethods>
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
index aa4b762..c26583e 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
@@ -22,6 +22,7 @@
#include <utils/Trace.h>
+#include <android/hardware/ICameraService.h>
#include <android/hardware/camera2/ICameraDeviceCallbacks.h>
#include <camera/StringUtils.h>
@@ -108,7 +109,7 @@
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
- /*overrideToPortrait*/false, activePhysicalId}, mResultMetadataQueue
+ hardware::ICameraService::ROTATION_OVERRIDE_NONE, activePhysicalId}, mResultMetadataQueue
};
std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -150,7 +151,7 @@
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
- /*overrideToPortrait*/false, activePhysicalId}, mResultMetadataQueue
+ hardware::ICameraService::ROTATION_OVERRIDE_NONE, activePhysicalId}, mResultMetadataQueue
};
std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -187,7 +188,7 @@
mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
- /*overrideToPortrait*/false, activePhysicalId}, mResultMetadataQueue
+ hardware::ICameraService::ROTATION_OVERRIDE_NONE, activePhysicalId}, mResultMetadataQueue
};
for (const auto& msg : msgs) {
camera3::notify(states, msg);
diff --git a/services/camera/libcameraservice/hidl/AidlCameraServiceListener.cpp b/services/camera/libcameraservice/hidl/AidlCameraServiceListener.cpp
index 2b81224..d28c7ab 100644
--- a/services/camera/libcameraservice/hidl/AidlCameraServiceListener.cpp
+++ b/services/camera/libcameraservice/hidl/AidlCameraServiceListener.cpp
@@ -16,6 +16,7 @@
#include <hidl/AidlCameraServiceListener.h>
#include <hidl/Utils.h>
+#include <camera/CameraUtils.h>
#include <camera/StringUtils.h>
namespace android {
@@ -29,7 +30,10 @@
typedef frameworks::cameraservice::service::V2_1::ICameraServiceListener HCameraServiceListener2_1;
binder::Status H2BCameraServiceListener::onStatusChanged(
- int32_t status, const std::string& cameraId) {
+ int32_t status, const std::string& cameraId, int32_t deviceId) {
+ if (deviceId != kDefaultDeviceId) {
+ return binder::Status::ok();
+ }
HCameraDeviceStatus hCameraDeviceStatus = convertToHidlCameraDeviceStatus(status);
CameraStatusAndId cameraStatusAndId;
cameraStatusAndId.deviceStatus = hCameraDeviceStatus;
@@ -44,7 +48,10 @@
binder::Status H2BCameraServiceListener::onPhysicalCameraStatusChanged(
int32_t status, const std::string& cameraId,
- const std::string& physicalCameraId) {
+ const std::string& physicalCameraId, int32_t deviceId) {
+ if (deviceId != kDefaultDeviceId) {
+ return binder::Status::ok();
+ }
auto cast2_1 = HCameraServiceListener2_1::castFrom(mBase);
sp<HCameraServiceListener2_1> interface2_1 = nullptr;
if (cast2_1.isOk()) {
@@ -66,13 +73,13 @@
}
::android::binder::Status H2BCameraServiceListener::onTorchStatusChanged(
- int32_t, const std::string&) {
+ [[maybe_unused]] int32_t, [[maybe_unused]] const std::string&, [[maybe_unused]] int32_t) {
// We don't implement onTorchStatusChanged
return binder::Status::ok();
}
::android::binder::Status H2BCameraServiceListener::onTorchStrengthLevelChanged(
- const std::string&, int32_t) {
+ [[maybe_unused]] const std::string&, [[maybe_unused]] int32_t, [[maybe_unused]] int32_t) {
return binder::Status::ok();
}
diff --git a/services/camera/libcameraservice/hidl/AidlCameraServiceListener.h b/services/camera/libcameraservice/hidl/AidlCameraServiceListener.h
index 91a4c16..78fca4e 100644
--- a/services/camera/libcameraservice/hidl/AidlCameraServiceListener.h
+++ b/services/camera/libcameraservice/hidl/AidlCameraServiceListener.h
@@ -47,25 +47,28 @@
~H2BCameraServiceListener() { }
virtual ::android::binder::Status onStatusChanged(int32_t status,
- const std::string& cameraId) override;
+ const std::string& cameraId, int32_t deviceId) override;
virtual ::android::binder::Status onPhysicalCameraStatusChanged(int32_t status,
const std::string& cameraId,
- const std::string& physicalCameraId) override;
+ const std::string& physicalCameraId,
+ int32_t deviceId) override;
virtual ::android::binder::Status onTorchStatusChanged(
- int32_t status, const std::string& cameraId) override;
+ int32_t status, const std::string& cameraId, int32_t deviceId) override;
virtual ::android::binder::Status onTorchStrengthLevelChanged(
- const std::string& cameraId, int32_t newStrengthLevel) override;
+ const std::string& cameraId, int32_t newStrengthLevel, int32_t deviceId) override;
virtual binder::Status onCameraAccessPrioritiesChanged() {
// TODO: no implementation yet.
return binder::Status::ok();
}
- virtual binder::Status onCameraOpened(const std::string& /*cameraId*/,
- const std::string& /*clientPackageId*/) {
+ virtual binder::Status onCameraOpened([[maybe_unused]] const std::string& /*cameraId*/,
+ [[maybe_unused]] const std::string& /*clientPackageId*/,
+ [[maybe_unused]] int32_t /*deviceId*/) {
// empty implementation
return binder::Status::ok();
}
- virtual binder::Status onCameraClosed(const std::string& /*cameraId*/) {
+ virtual binder::Status onCameraClosed([[maybe_unused]] const std::string& /*cameraId*/,
+ [[maybe_unused]] int32_t /*deviceId*/) {
// empty implementation
return binder::Status::ok();
}
diff --git a/services/camera/libcameraservice/hidl/HidlCameraService.cpp b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
index 1a5a6b9..d3b2a51 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraService.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
@@ -25,6 +25,7 @@
#include <hidl/HidlTransportSupport.h>
+#include <camera/CameraUtils.h>
#include <utils/Utils.h>
namespace android {
@@ -36,6 +37,7 @@
using frameworks::cameraservice::service::V2_0::implementation::HidlCameraService;
using hardware::hidl_vec;
+using hardware::BnCameraService::ROTATION_OVERRIDE_NONE;
using hardware::cameraservice::utils::conversion::convertToHidl;
using hardware::cameraservice::utils::conversion::B2HStatus;
using hardware::Void;
@@ -68,8 +70,8 @@
HStatus status = HStatus::NO_ERROR;
binder::Status serviceRet =
mAidlICameraService->getCameraCharacteristics(cameraId,
- /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
- &cameraMetadata);
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__, ROTATION_OVERRIDE_NONE,
+ kDefaultDeviceId, 0, &cameraMetadata);
HCameraMetadata hidlMetadata;
if (!serviceRet.isOk()) {
switch(serviceRet.serviceSpecificErrorCode()) {
@@ -120,8 +122,8 @@
binder::Status serviceRet = mAidlICameraService->connectDevice(
callbacks, cameraId, std::string(), {},
hardware::ICameraService::USE_CALLING_UID, 0/*oomScoreOffset*/,
- /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
- /*out*/&deviceRemote);
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__, ROTATION_OVERRIDE_NONE,
+ kDefaultDeviceId, /*devicePolicy*/0, /*out*/&deviceRemote);
HStatus status = HStatus::NO_ERROR;
if (!serviceRet.isOk()) {
ALOGE("%s: Unable to connect to camera device", __FUNCTION__);
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
index 9dd657c..c710671 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
@@ -57,6 +57,7 @@
"android.hardware.camera.device@3.5",
"android.hardware.camera.device@3.6",
"android.hardware.camera.device@3.7",
+ "camera_platform_flags_c_lib",
],
fuzz_config: {
cc: [
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
index 854c342..bce0faf 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
@@ -27,6 +27,7 @@
#include <android/hardware/camera2/BnCameraDeviceCallbacks.h>
#include <android/hardware/ICameraServiceListener.h>
#include <android/hardware/camera2/ICameraDeviceUser.h>
+#include <camera/CameraUtils.h>
#include <camera/camera2/OutputConfiguration.h>
#include <gui/BufferItemConsumer.h>
#include <gui/IGraphicBufferProducer.h>
@@ -39,6 +40,9 @@
using namespace hardware;
using namespace std;
+using ICameraService::ROTATION_OVERRIDE_NONE;
+using ICameraService::ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT;
+
const int32_t kPreviewThreshold = 8;
const int32_t kNumRequestsTested = 8;
const nsecs_t kPreviewTimeout = 5000000000; // .5 [s.]
@@ -147,7 +151,7 @@
mAutoFocusMessage = true;
mAutoFocusCondition.broadcast();
}
-};
+}
void CameraFuzzer::dataCallback(int32_t msgType, const sp<IMemory> & /*data*/,
camera_frame_metadata_t *) {
@@ -169,7 +173,7 @@
default:
break;
}
-};
+}
status_t CameraFuzzer::waitForPreviewStart() {
status_t rc = NO_ERROR;
@@ -215,7 +219,7 @@
} else {
camType = kCamType[mFuzzedDataProvider->ConsumeBool()];
}
- mCameraService->getNumberOfCameras(camType, &mNumCameras);
+ mCameraService->getNumberOfCameras(camType, kDefaultDeviceId, /*devicePolicy*/0, &mNumCameras);
}
void CameraFuzzer::getCameraInformation(int32_t cameraId) {
@@ -235,11 +239,13 @@
mCameraService->getCameraVendorTagCache(&cache);
CameraInfo cameraInfo;
- mCameraService->getCameraInfo(cameraId, /*overrideToPortrait*/false, &cameraInfo);
+ mCameraService->getCameraInfo(cameraId, ROTATION_OVERRIDE_NONE, kDefaultDeviceId,
+ /*devicePolicy*/0, &cameraInfo);
CameraMetadata metadata;
mCameraService->getCameraCharacteristics(cameraIdStr,
- /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &metadata);
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__, ROTATION_OVERRIDE_NONE,
+ kDefaultDeviceId, /*devicePolicy*/0, &metadata);
}
void CameraFuzzer::invokeCameraSound() {
@@ -321,12 +327,13 @@
std::string cameraIdStr = std::to_string(cameraId);
sp<IBinder> binder = new BBinder;
- mCameraService->setTorchMode(cameraIdStr, true, binder);
+ mCameraService->setTorchMode(cameraIdStr, true, binder, kDefaultDeviceId, /*devicePolicy*/0);
ALOGV("Turned torch on.");
int32_t torchStrength = rand() % 5 + 1;
ALOGV("Changing torch strength level to %d", torchStrength);
- mCameraService->turnOnTorchWithStrengthLevel(cameraIdStr, torchStrength, binder);
- mCameraService->setTorchMode(cameraIdStr, false, binder);
+ mCameraService->turnOnTorchWithStrengthLevel(cameraIdStr, torchStrength, binder,
+ kDefaultDeviceId, /*devicePolicy*/0);
+ mCameraService->setTorchMode(cameraIdStr, false, binder, kDefaultDeviceId, /*devicePolicy*/0);
ALOGV("Turned torch off.");
}
@@ -346,8 +353,9 @@
android::CameraService::USE_CALLING_UID,
android::CameraService::USE_CALLING_PID,
/*targetSdkVersion*/ __ANDROID_API_FUTURE__,
- /*overrideToPortrait*/true, /*forceSlowJpegMode*/false,
- &cameraDevice);
+ ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT,
+ /*forceSlowJpegMode*/false,
+ kDefaultDeviceId, /*devicePolicy*/0, &cameraDevice);
if (!rc.isOk()) {
// camera not connected
return;
@@ -484,20 +492,22 @@
public:
virtual ~TestCameraServiceListener() {};
- virtual binder::Status onStatusChanged(int32_t, const std::string&) {
+ virtual binder::Status onStatusChanged(int32_t /*status*/, const std::string& /*cameraId*/,
+ int32_t /*deviceId*/) {
return binder::Status::ok();
- };
+ }
virtual binder::Status onPhysicalCameraStatusChanged(int32_t /*status*/,
- const std::string& /*cameraId*/, const std::string& /*physicalCameraId*/) {
+ const std::string& /*cameraId*/, const std::string& /*physicalCameraId*/,
+ int32_t /*deviceId*/) {
// No op
return binder::Status::ok();
- };
+ }
virtual binder::Status onTorchStatusChanged(int32_t /*status*/,
- const std::string& /*cameraId*/) {
+ const std::string& /*cameraId*/, int32_t /*deviceId*/) {
return binder::Status::ok();
- };
+ }
virtual binder::Status onCameraAccessPrioritiesChanged() {
// No op
@@ -505,18 +515,18 @@
}
virtual binder::Status onCameraOpened(const std::string& /*cameraId*/,
- const std::string& /*clientPackageName*/) {
+ const std::string& /*clientPackageName*/, int32_t /*deviceId*/) {
// No op
return binder::Status::ok();
}
- virtual binder::Status onCameraClosed(const std::string& /*cameraId*/) {
+ virtual binder::Status onCameraClosed(const std::string& /*cameraId*/, int32_t /*deviceId*/) {
// No op
return binder::Status::ok();
}
virtual binder::Status onTorchStrengthLevelChanged(const std::string& /*cameraId*/,
- int32_t /*torchStrength*/) {
+ int32_t /*torchStrength*/, int32_t /*deviceId*/) {
// No op
return binder::Status::ok();
}
@@ -582,8 +592,9 @@
sp<hardware::camera2::ICameraDeviceUser> device;
mCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
- /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/true,
- &device);
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+ ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT,
+ kDefaultDeviceId, /*devicePolicy*/0, &device);
if (device == nullptr) {
continue;
}
diff --git a/services/camera/libcameraservice/tests/Android.bp b/services/camera/libcameraservice/tests/Android.bp
index 5072edd..55e2c9d 100644
--- a/services/camera/libcameraservice/tests/Android.bp
+++ b/services/camera/libcameraservice/tests/Android.bp
@@ -18,6 +18,49 @@
],
}
+cc_defaults {
+ name: "cameraservice_test_hostsupported",
+
+ // All test sources that can run on both host and device
+ // should be listed here
+ srcs: [
+ "ClientManagerTest.cpp",
+ "DepthProcessorTest.cpp",
+ "DistortionMapperTest.cpp",
+ "ExifUtilsTest.cpp",
+ "NV12Compressor.cpp",
+ "RotateAndCropMapperTest.cpp",
+ "SessionStatsBuilderTest.cpp",
+ "ZoomRatioTest.cpp",
+ ],
+
+ // All shared libs available on both host and device
+ // should be listed here
+ shared_libs: [
+ "libbase",
+ "libbinder",
+ "libcamera_metadata",
+ "libexif",
+ "libjpeg",
+ "liblog",
+ "libutils",
+ "camera_platform_flags_c_lib",
+ ],
+
+ static_libs: [
+ "libgmock",
+ ],
+
+ cflags: [
+ "-Wall",
+ "-Wextra",
+ "-Werror",
+ ],
+
+ test_suites: ["device-tests"],
+
+}
+
cc_test {
name: "cameraservice_test",
@@ -33,20 +76,15 @@
defaults: [
"libcameraservice_deps",
+ "cameraservice_test_hostsupported"
],
+ // Only include libs that can't be run host-side here
shared_libs: [
- "libbase",
- "libbinder",
"libcutils",
"libhidlbase",
- "liblog",
"libcamera_client",
- "libcamera_metadata",
"libui",
- "libutils",
- "libjpeg",
- "libexif",
"android.companion.virtualdevice.flags-aconfig-cc",
"android.hardware.camera.common@1.0",
"android.hardware.camera.device@1.0",
@@ -57,6 +95,7 @@
"camera_platform_flags_c_lib",
],
+ // Only include libs that can't be run host-side here
static_libs: [
"android.hardware.camera.provider@2.4",
"android.hardware.camera.provider@2.5",
@@ -64,76 +103,40 @@
"android.hardware.camera.provider@2.7",
"android.hardware.camera.provider-V3-ndk",
"libcameraservice",
- "libgmock",
"libflagtest",
],
+ // Only include sources that can't be run host-side here
srcs: [
"CameraPermissionsTest.cpp",
"CameraProviderManagerTest.cpp",
- "ClientManagerTest.cpp",
- "DepthProcessorTest.cpp",
- "DistortionMapperTest.cpp",
- "ExifUtilsTest.cpp",
- "NV12Compressor.cpp",
- "RotateAndCropMapperTest.cpp",
- "ZoomRatioTest.cpp",
],
- cflags: [
- "-Wall",
- "-Wextra",
- "-Werror",
- ],
-
- test_suites: ["device-tests"],
-
}
cc_test_host {
name: "cameraservice_test_host",
+ defaults: [
+ "cameraservice_test_hostsupported"
+ ],
+
include_dirs: [
"frameworks/av/camera/include",
"frameworks/av/camera/include/camera",
"frameworks/native/libs/binder/include_activitymanager"
],
+ // Only include libs that can't be run device-side here
shared_libs: [
"libactivity_manager_procstate_aidl-cpp",
- "libbase",
- "libbinder",
- "libcamera_metadata",
"libdynamic_depth",
- "libexif",
- "libjpeg",
- "liblog",
- "libutils",
- "camera_platform_flags_c_lib",
],
+ // Only include libs that can't be run device-side here
static_libs: [
"libcamera_client_host",
"libcameraservice_device_independent",
- "libgmock",
],
- srcs: [
- "ClientManagerTest.cpp",
- "DepthProcessorTest.cpp",
- "DistortionMapperTest.cpp",
- "ExifUtilsTest.cpp",
- "NV12Compressor.cpp",
- "RotateAndCropMapperTest.cpp",
- "ZoomRatioTest.cpp",
- ],
-
- cflags: [
- "-Wall",
- "-Wextra",
- "-Werror",
- ],
-
- test_suites: ["device-tests"],
-
}
diff --git a/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp b/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
index db43a02..cf86a05 100644
--- a/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
@@ -21,6 +21,8 @@
#include <private/android_filesystem_config.h>
+#include <camera/CameraUtils.h>
+
#include "../CameraService.h"
#include "../utils/CameraServiceProxyWrapper.h"
@@ -35,22 +37,23 @@
// Empty service listener.
class TestCameraServiceListener : public hardware::BnCameraServiceListener {
public:
- virtual ~TestCameraServiceListener() {};
+ virtual ~TestCameraServiceListener() {}
- virtual binder::Status onStatusChanged(int32_t , const std::string&) {
+ virtual binder::Status onStatusChanged(int32_t , const std::string&, int32_t) {
return binder::Status::ok();
- };
+ }
virtual binder::Status onPhysicalCameraStatusChanged(int32_t /*status*/,
- const std::string& /*cameraId*/, const std::string& /*physicalCameraId*/) {
+ const std::string& /*cameraId*/, const std::string& /*physicalCameraId*/,
+ int32_t /*deviceId*/) {
// No op
return binder::Status::ok();
- };
+ }
virtual binder::Status onTorchStatusChanged(int32_t /*status*/,
- const std::string& /*cameraId*/) {
+ const std::string& /*cameraId*/, int32_t /*deviceId*/) {
return binder::Status::ok();
- };
+ }
virtual binder::Status onCameraAccessPrioritiesChanged() {
// No op
@@ -58,18 +61,18 @@
}
virtual binder::Status onCameraOpened(const std::string& /*cameraId*/,
- const std::string& /*clientPackageName*/) {
+ const std::string& /*clientPackageName*/, int32_t /*deviceId*/) {
// No op
return binder::Status::ok();
}
- virtual binder::Status onCameraClosed(const std::string& /*cameraId*/) {
+ virtual binder::Status onCameraClosed(const std::string& /*cameraId*/, int32_t /*deviceId*/) {
// No op
return binder::Status::ok();
}
virtual binder::Status onTorchStrengthLevelChanged(const std::string& /*cameraId*/,
- int32_t /*torchStrength*/) {
+ int32_t /*torchStrength*/, int32_t /*deviceId*/) {
// No op
return binder::Status::ok();
}
@@ -227,7 +230,9 @@
binder::Status status =
sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
- /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &device);
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+ hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+ kDefaultDeviceId, /*devicePolicy*/0, &device);
AutoDisconnectDevice autoDisconnect(device);
ASSERT_TRUE(!status.isOk()) << "connectDevice returned OK status";
ASSERT_EQ(status.serviceSpecificErrorCode(), hardware::ICameraService::ERROR_DISABLED)
@@ -241,7 +246,9 @@
binder::Status status =
sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
- /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &device);
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+ hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+ kDefaultDeviceId, /*devicePolicy*/0, &device);
AutoDisconnectDevice autoDisconnect(device);
ASSERT_TRUE(status.isOk());
}
@@ -260,14 +267,18 @@
binder::Status status =
sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
- /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &deviceA);
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+ hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+ kDefaultDeviceId, /*devicePolicy*/0, &deviceA);
AutoDisconnectDevice autoDisconnectA(deviceA);
ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
" service specific error code " << status.serviceSpecificErrorCode();
status =
sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
- /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &deviceB);
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+ hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+ kDefaultDeviceId, /*devicePolicy*/0, &deviceB);
AutoDisconnectDevice autoDisconnectB(deviceB);
ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
" service specific error code " << status.serviceSpecificErrorCode();
@@ -288,14 +299,18 @@
binder::Status status =
sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
- /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &deviceA);
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+ hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+ kDefaultDeviceId, /*devicePolicy*/0, &deviceA);
AutoDisconnectDevice autoDisconnectA(deviceA);
ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
" service specific error code " << status.serviceSpecificErrorCode();
status =
sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
android::CameraService::USE_CALLING_UID, 1/*oomScoreDiff*/,
- /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &deviceB);
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+ hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+ kDefaultDeviceId, /*devicePolicy*/0, &deviceB);
AutoDisconnectDevice autoDisconnectB(deviceB);
ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
" service specific error code " << status.serviceSpecificErrorCode();
diff --git a/services/camera/libcameraservice/tests/SessionStatsBuilderTest.cpp b/services/camera/libcameraservice/tests/SessionStatsBuilderTest.cpp
new file mode 100644
index 0000000..3644358
--- /dev/null
+++ b/services/camera/libcameraservice/tests/SessionStatsBuilderTest.cpp
@@ -0,0 +1,69 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SessionStatsBuilderTest"
+
+#include <gtest/gtest.h>
+#include <utils/Errors.h>
+
+#include "../utils/SessionStatsBuilder.h"
+
+using namespace std;
+using namespace android;
+
+TEST(SessionStatsBuilderTest, FpsHistogramTest) {
+ SessionStatsBuilder b{};
+
+ int64_t requestCount, resultErrorCount;
+ bool deviceError;
+ pair<int32_t, int32_t> mostRequestedFpsRange;
+ map<int, StreamStats> streamStatsMap;
+
+ // Verify we get the most common FPS
+ int64_t fc = 0;
+ for (size_t i = 0; i < 10; i++, fc++) b.incFpsRequestedCount(30, 30, fc);
+ for (size_t i = 0; i < 15; i++, fc++) b.incFpsRequestedCount(15, 30, fc);
+ for (size_t i = 0; i < 20; i++, fc++) b.incFpsRequestedCount(15, 15, fc);
+ for (size_t i = 0; i < 10; i++, fc++) b.incFpsRequestedCount(60, 60, fc);
+
+ b.buildAndReset(&requestCount, &resultErrorCount,
+ &deviceError, &mostRequestedFpsRange, &streamStatsMap);
+ ASSERT_EQ(mostRequestedFpsRange, make_pair(15, 15)) << "Incorrect most common FPS selected";
+
+ // Verify empty stats behavior
+ b.buildAndReset(&requestCount, &resultErrorCount,
+ &deviceError, &mostRequestedFpsRange, &streamStatsMap);
+ ASSERT_EQ(mostRequestedFpsRange, make_pair(0, 0)) << "Incorrect empty stats FPS reported";
+
+ // Verify one frame behavior
+ b.incFpsRequestedCount(30, 30, 1);
+ b.buildAndReset(&requestCount, &resultErrorCount,
+ &deviceError, &mostRequestedFpsRange, &streamStatsMap);
+ ASSERT_EQ(mostRequestedFpsRange, make_pair(30, 30)) << "Incorrect single-frame FPS reported";
+
+ // Verify overflow stats behavior
+ fc = 0;
+ for (size_t range = 1; range < SessionStatsBuilder::FPS_HISTOGRAM_MAX_SIZE + 2; range++) {
+ int count = SessionStatsBuilder::FPS_HISTOGRAM_MAX_SIZE * 3;
+ for (size_t i = 0; i < count - range; i++, fc++) b.incFpsRequestedCount(range, range, fc);
+ }
+ // Should have the oldest bucket dropped, so second oldest should be most common
+ b.buildAndReset(&requestCount, &resultErrorCount,
+ &deviceError, &mostRequestedFpsRange, &streamStatsMap);
+ ASSERT_EQ(mostRequestedFpsRange, make_pair(2, 2)) << "Incorrect stats overflow behavior";
+
+}
diff --git a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.cpp b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.cpp
new file mode 100644
index 0000000..93b440b
--- /dev/null
+++ b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.cpp
@@ -0,0 +1,221 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "AttributionAndPermissionUtils.h"
+
+#include <binder/AppOpsManager.h>
+#include <binder/PermissionController.h>
+#include <com_android_internal_camera_flags.h>
+#include <cutils/properties.h>
+#include <private/android_filesystem_config.h>
+
+#include "CameraService.h"
+
+#include <binder/IPCThreadState.h>
+#include <hwbinder/IPCThreadState.h>
+#include <binderthreadstate/CallerUtils.h>
+
+namespace android {
+
+namespace flags = com::android::internal::camera::flags;
+
+const std::string AttributionAndPermissionUtils::sDumpPermission("android.permission.DUMP");
+const std::string AttributionAndPermissionUtils::sManageCameraPermission(
+ "android.permission.MANAGE_CAMERA");
+const std::string AttributionAndPermissionUtils::sCameraPermission(
+ "android.permission.CAMERA");
+const std::string AttributionAndPermissionUtils::sSystemCameraPermission(
+ "android.permission.SYSTEM_CAMERA");
+const std::string AttributionAndPermissionUtils::sCameraHeadlessSystemUserPermission(
+ "android.permission.CAMERA_HEADLESS_SYSTEM_USER");
+const std::string AttributionAndPermissionUtils::sCameraPrivacyAllowlistPermission(
+ "android.permission.CAMERA_PRIVACY_ALLOWLIST");
+const std::string AttributionAndPermissionUtils::sCameraSendSystemEventsPermission(
+ "android.permission.CAMERA_SEND_SYSTEM_EVENTS");
+const std::string AttributionAndPermissionUtils::sCameraOpenCloseListenerPermission(
+ "android.permission.CAMERA_OPEN_CLOSE_LISTENER");
+const std::string AttributionAndPermissionUtils::sCameraInjectExternalCameraPermission(
+ "android.permission.CAMERA_INJECT_EXTERNAL_CAMERA");
+
+int AttributionAndPermissionUtils::getCallingUid() {
+ if (getCurrentServingCall() == BinderCallType::HWBINDER) {
+ return hardware::IPCThreadState::self()->getCallingUid();
+ }
+ return IPCThreadState::self()->getCallingUid();
+}
+
+int AttributionAndPermissionUtils::getCallingPid() {
+ if (getCurrentServingCall() == BinderCallType::HWBINDER) {
+ return hardware::IPCThreadState::self()->getCallingPid();
+ }
+ return IPCThreadState::self()->getCallingPid();
+}
+
+int64_t AttributionAndPermissionUtils::clearCallingIdentity() {
+ if (getCurrentServingCall() == BinderCallType::HWBINDER) {
+ return hardware::IPCThreadState::self()->clearCallingIdentity();
+ }
+ return IPCThreadState::self()->clearCallingIdentity();
+}
+
+void AttributionAndPermissionUtils::restoreCallingIdentity(int64_t token) {
+ if (getCurrentServingCall() == BinderCallType::HWBINDER) {
+ hardware::IPCThreadState::self()->restoreCallingIdentity(token);
+ } else {
+ IPCThreadState::self()->restoreCallingIdentity(token);
+ }
+ return;
+}
+
+bool AttributionAndPermissionUtils::checkAutomotivePrivilegedClient(const std::string &cameraId,
+ const AttributionSourceState &attributionSource) {
+ if (isAutomotivePrivilegedClient(attributionSource.uid)) {
+ // If cameraId is empty, then it means that this check is not used for the
+ // purpose of accessing a specific camera, hence grant permission just
+ // based on uid to the automotive privileged client.
+ if (cameraId.empty())
+ return true;
+
+ auto cameraService = mCameraService.promote();
+ if (cameraService == nullptr) {
+ ALOGE("%s: CameraService unavailable.", __FUNCTION__);
+ return false;
+ }
+
+ // If this call is used for accessing a specific camera then cam_id must be provided.
+ // In that case, only pre-grants the permission for accessing the exterior system only
+ // camera.
+ return cameraService->isAutomotiveExteriorSystemCamera(cameraId);
+ }
+
+ return false;
+}
+
+bool AttributionAndPermissionUtils::checkPermissionForPreflight(const std::string &cameraId,
+ const std::string &permission, const AttributionSourceState &attributionSource,
+ const std::string& message, int32_t attributedOpCode) {
+ if (checkAutomotivePrivilegedClient(cameraId, attributionSource)) {
+ return true;
+ }
+
+ if (!flags::cache_permission_services()) {
+ PermissionChecker permissionChecker;
+ return permissionChecker.checkPermissionForPreflight(
+ toString16(permission), attributionSource, toString16(message),
+ attributedOpCode) != PermissionChecker::PERMISSION_HARD_DENIED;
+ } else {
+ return mPermissionChecker->checkPermissionForPreflight(
+ toString16(permission), attributionSource, toString16(message),
+ attributedOpCode) != PermissionChecker::PERMISSION_HARD_DENIED;
+ }
+}
+
+// Can camera service trust the caller based on the calling UID?
+bool AttributionAndPermissionUtils::isTrustedCallingUid(uid_t uid) {
+ switch (uid) {
+ case AID_MEDIA: // mediaserver
+ case AID_CAMERASERVER: // cameraserver
+ case AID_RADIO: // telephony
+ return true;
+ default:
+ return false;
+ }
+}
+
+bool AttributionAndPermissionUtils::isAutomotiveDevice() {
+ // Checks the property ro.hardware.type and returns true if it is
+ // automotive.
+ char value[PROPERTY_VALUE_MAX] = {0};
+ property_get("ro.hardware.type", value, "");
+ return strncmp(value, "automotive", PROPERTY_VALUE_MAX) == 0;
+}
+
+bool AttributionAndPermissionUtils::isHeadlessSystemUserMode() {
+ // Checks if the device is running in headless system user mode
+ // by checking the property ro.fw.mu.headless_system_user.
+ char value[PROPERTY_VALUE_MAX] = {0};
+ property_get("ro.fw.mu.headless_system_user", value, "");
+ return strncmp(value, "true", PROPERTY_VALUE_MAX) == 0;
+}
+
+bool AttributionAndPermissionUtils::isAutomotivePrivilegedClient(int32_t uid) {
+ // Returns false if this is not an automotive device type.
+ if (!isAutomotiveDevice())
+ return false;
+
+ // Returns true if the uid is AID_AUTOMOTIVE_EVS which is a
+ // privileged client uid used for safety critical use cases such as
+ // rear view and surround view.
+ return uid == AID_AUTOMOTIVE_EVS;
+}
+
+status_t AttributionAndPermissionUtils::getUidForPackage(const std::string &packageName,
+ int userId, /*inout*/uid_t& uid, int err) {
+ PermissionController pc;
+ uid = pc.getPackageUid(toString16(packageName), 0);
+ if (uid <= 0) {
+ ALOGE("Unknown package: '%s'", packageName.c_str());
+ dprintf(err, "Unknown package: '%s'\n", packageName.c_str());
+ return BAD_VALUE;
+ }
+
+ if (userId < 0) {
+ ALOGE("Invalid user: %d", userId);
+ dprintf(err, "Invalid user: %d\n", userId);
+ return BAD_VALUE;
+ }
+
+ uid = multiuser_get_uid(userId, uid);
+ return NO_ERROR;
+}
+
+bool AttributionAndPermissionUtils::isCallerCameraServerNotDelegating() {
+ return (getCallingPid() == getpid());
+}
+
+bool AttributionAndPermissionUtils::hasPermissionsForCamera(const std::string& cameraId,
+ const AttributionSourceState& attributionSource) {
+ return checkPermissionForPreflight(cameraId, sCameraPermission,
+ attributionSource, std::string(), AppOpsManager::OP_NONE);
+}
+
+bool AttributionAndPermissionUtils::hasPermissionsForSystemCamera(const std::string& cameraId,
+ const AttributionSourceState& attributionSource, bool checkCameraPermissions) {
+ bool systemCameraPermission = checkPermissionForPreflight(cameraId,
+ sSystemCameraPermission, attributionSource, std::string(), AppOpsManager::OP_NONE);
+ return systemCameraPermission && (!checkCameraPermissions
+ || hasPermissionsForCamera(cameraId, attributionSource));
+}
+
+bool AttributionAndPermissionUtils::hasPermissionsForCameraHeadlessSystemUser(
+ const std::string& cameraId, const AttributionSourceState& attributionSource) {
+ return checkPermissionForPreflight(cameraId, sCameraHeadlessSystemUserPermission,
+ attributionSource, std::string(), AppOpsManager::OP_NONE);
+}
+
+bool AttributionAndPermissionUtils::hasPermissionsForCameraPrivacyAllowlist(
+ const AttributionSourceState& attributionSource) {
+ return checkPermissionForPreflight(std::string(), sCameraPrivacyAllowlistPermission,
+ attributionSource, std::string(), AppOpsManager::OP_NONE);
+}
+
+bool AttributionAndPermissionUtils::hasPermissionsForOpenCloseListener(
+ const AttributionSourceState& attributionSource) {
+ return checkPermissionForPreflight(std::string(), sCameraOpenCloseListenerPermission,
+ attributionSource, std::string(), AppOpsManager::OP_NONE);
+}
+
+} // namespace android
diff --git a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h
new file mode 100644
index 0000000..4f238ab
--- /dev/null
+++ b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h
@@ -0,0 +1,247 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef ANDROID_SERVERS_CAMERA_ATTRIBUTION_AND_PERMISSION_UTILS_H
+#define ANDROID_SERVERS_CAMERA_ATTRIBUTION_AND_PERMISSION_UTILS_H
+
+#include <android/content/AttributionSourceState.h>
+#include <android/permission/PermissionChecker.h>
+#include <binder/BinderService.h>
+#include <private/android_filesystem_config.h>
+
+namespace android {
+
+class CameraService;
+
+using content::AttributionSourceState;
+using permission::PermissionChecker;
+
+/**
+ * Utility class consolidating methods/data for verifying permissions and the identity of the
+ * caller.
+ */
+class AttributionAndPermissionUtils {
+ public:
+ AttributionAndPermissionUtils() { }
+ virtual ~AttributionAndPermissionUtils() {}
+
+ void setCameraService(wp<CameraService> cameraService) {
+ mCameraService = cameraService;
+ }
+
+ // Utilities handling Binder calling identities (previously in CameraThreadState)
+ virtual int getCallingUid();
+ virtual int getCallingPid();
+ virtual int64_t clearCallingIdentity();
+ virtual void restoreCallingIdentity(int64_t token);
+
+ /**
+ * Pre-grants the permission if the attribution source uid is for an automotive
+ * privileged client. Otherwise uses system service permission checker to check
+ * for the appropriate permission. If this function is called for accessing a specific
+ * camera,then the cameraID must not be empty. CameraId is used only in case of automotive
+ * privileged client so that permission is pre-granted only to access system camera device
+ * which is located outside of the vehicle body frame because camera located inside the vehicle
+ * cabin would need user permission.
+ */
+ virtual bool checkPermissionForPreflight(const std::string &cameraId,
+ const std::string &permission, const AttributionSourceState& attributionSource,
+ const std::string& message, int32_t attributedOpCode);
+
+ // Can camera service trust the caller based on the calling UID?
+ virtual bool isTrustedCallingUid(uid_t uid);
+
+ virtual bool isAutomotiveDevice();
+ virtual bool isHeadlessSystemUserMode();
+
+ /**
+ * Returns true if the client has uid AID_AUTOMOTIVE_EVS and the device is an automotive device.
+ */
+ virtual bool isAutomotivePrivilegedClient(int32_t uid);
+
+ virtual status_t getUidForPackage(const std::string &packageName, int userId,
+ /*inout*/uid_t& uid, int err);
+ virtual bool isCallerCameraServerNotDelegating();
+
+ // Utils for checking specific permissions
+ virtual bool hasPermissionsForCamera(const std::string& cameraId,
+ const AttributionSourceState& attributionSource);
+ virtual bool hasPermissionsForSystemCamera(const std::string& cameraId,
+ const AttributionSourceState& attributionSource, bool checkCameraPermissions = true);
+ virtual bool hasPermissionsForCameraHeadlessSystemUser(const std::string& cameraId,
+ const AttributionSourceState& attributionSource);
+ virtual bool hasPermissionsForCameraPrivacyAllowlist(
+ const AttributionSourceState& attributionSource);
+ virtual bool hasPermissionsForOpenCloseListener(
+ const AttributionSourceState& attributionSource);
+
+ static const std::string sDumpPermission;
+ static const std::string sManageCameraPermission;
+ static const std::string sCameraPermission;
+ static const std::string sSystemCameraPermission;
+ static const std::string sCameraHeadlessSystemUserPermission;
+ static const std::string sCameraPrivacyAllowlistPermission;
+ static const std::string sCameraSendSystemEventsPermission;
+ static const std::string sCameraOpenCloseListenerPermission;
+ static const std::string sCameraInjectExternalCameraPermission;
+
+ protected:
+ wp<CameraService> mCameraService;
+
+ bool checkAutomotivePrivilegedClient(const std::string &cameraId,
+ const AttributionSourceState &attributionSource);
+
+ private:
+ std::unique_ptr<permission::PermissionChecker> mPermissionChecker =
+ std::make_unique<permission::PermissionChecker>();
+};
+
+/**
+ * Class to be inherited by classes encapsulating AttributionAndPermissionUtils. Provides an
+ * additional utility layer above AttributionAndPermissionUtils calls, and avoids verbosity
+ * in the encapsulating class's methods.
+ */
+class AttributionAndPermissionUtilsEncapsulator {
+protected:
+ std::shared_ptr<AttributionAndPermissionUtils> mAttributionAndPermissionUtils;
+
+public:
+ AttributionAndPermissionUtilsEncapsulator(
+ std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils)
+ : mAttributionAndPermissionUtils(attributionAndPermissionUtils) { }
+
+ static AttributionSourceState buildAttributionSource(int callingPid, int callingUid) {
+ AttributionSourceState attributionSource{};
+ attributionSource.pid = callingPid;
+ attributionSource.uid = callingUid;
+ return attributionSource;
+ }
+
+ static AttributionSourceState buildAttributionSource(int callingPid, int callingUid,
+ int32_t deviceId) {
+ AttributionSourceState attributionSource = buildAttributionSource(callingPid, callingUid);
+ attributionSource.deviceId = deviceId;
+ return attributionSource;
+ }
+
+ static AttributionSourceState buildAttributionSource(int callingPid, int callingUid,
+ const std::string& packageName, int32_t deviceId) {
+ AttributionSourceState attributionSource = buildAttributionSource(callingPid, callingUid,
+ deviceId);
+ attributionSource.packageName = packageName;
+ return attributionSource;
+ }
+
+ int getCallingUid() const {
+ return mAttributionAndPermissionUtils->getCallingUid();
+ }
+
+ int getCallingPid() const {
+ return mAttributionAndPermissionUtils->getCallingPid();
+ }
+
+ int64_t clearCallingIdentity() const {
+ return mAttributionAndPermissionUtils->clearCallingIdentity();
+ }
+
+ void restoreCallingIdentity(int64_t token) const {
+ mAttributionAndPermissionUtils->restoreCallingIdentity(token);
+ }
+
+ // The word 'System' here does not refer to callers only on the system
+ // partition. They just need to have an android system uid.
+ bool callerHasSystemUid() const {
+ return (getCallingUid() < AID_APP_START);
+ }
+
+ bool hasPermissionsForCamera(int callingPid, int callingUid, int32_t deviceId) const {
+ return hasPermissionsForCamera(std::string(), callingPid, callingUid, deviceId);
+ }
+
+ bool hasPermissionsForCamera(int callingPid, int callingUid,
+ const std::string& packageName, int32_t deviceId) const {
+ return hasPermissionsForCamera(std::string(), callingPid, callingUid, packageName,
+ deviceId);
+ }
+
+ bool hasPermissionsForCamera(const std::string& cameraId, int callingPid,
+ int callingUid, int32_t deviceId) const {
+ auto attributionSource = buildAttributionSource(callingPid, callingUid,
+ deviceId);
+ return mAttributionAndPermissionUtils->hasPermissionsForCamera(cameraId, attributionSource);
+ }
+
+ bool hasPermissionsForCamera(const std::string& cameraId, int callingPid, int callingUid,
+ const std::string& packageName, int32_t deviceId) const {
+ auto attributionSource = buildAttributionSource(callingPid, callingUid, packageName,
+ deviceId);
+ return mAttributionAndPermissionUtils->hasPermissionsForCamera(cameraId, attributionSource);
+ }
+
+ bool hasPermissionsForSystemCamera(const std::string& cameraId, int callingPid, int callingUid,
+ bool checkCameraPermissions = true) const {
+ auto attributionSource = buildAttributionSource(callingPid, callingUid);
+ return mAttributionAndPermissionUtils->hasPermissionsForSystemCamera(
+ cameraId, attributionSource, checkCameraPermissions);
+ }
+
+ bool hasPermissionsForCameraHeadlessSystemUser(const std::string& cameraId, int callingPid,
+ int callingUid) const {
+ auto attributionSource = buildAttributionSource(callingPid, callingUid);
+ return mAttributionAndPermissionUtils->hasPermissionsForCameraHeadlessSystemUser(
+ cameraId, attributionSource);
+ }
+
+ bool hasPermissionsForCameraPrivacyAllowlist(int callingPid, int callingUid) const {
+ auto attributionSource = buildAttributionSource(callingPid, callingUid);
+ return mAttributionAndPermissionUtils->hasPermissionsForCameraPrivacyAllowlist(
+ attributionSource);
+ }
+
+ bool hasPermissionsForOpenCloseListener(int callingPid, int callingUid) const {
+ auto attributionSource = buildAttributionSource(callingPid, callingUid);
+ return mAttributionAndPermissionUtils->hasPermissionsForOpenCloseListener(
+ attributionSource);
+ }
+
+ bool isAutomotiveDevice() const {
+ return mAttributionAndPermissionUtils->isAutomotiveDevice();
+ }
+
+ bool isAutomotivePrivilegedClient(int32_t uid) const {
+ return mAttributionAndPermissionUtils->isAutomotivePrivilegedClient(uid);
+ }
+
+ bool isTrustedCallingUid(uid_t uid) const {
+ return mAttributionAndPermissionUtils->isTrustedCallingUid(uid);
+ }
+
+ bool isHeadlessSystemUserMode() const {
+ return mAttributionAndPermissionUtils->isHeadlessSystemUserMode();
+ }
+
+ status_t getUidForPackage(const std::string &packageName, int userId,
+ /*inout*/uid_t& uid, int err) const {
+ return mAttributionAndPermissionUtils->getUidForPackage(packageName, userId, uid, err);
+ }
+
+ bool isCallerCameraServerNotDelegating() const {
+ return mAttributionAndPermissionUtils->isCallerCameraServerNotDelegating();
+ }
+};
+
+} // namespace android
+
+#endif // ANDROID_SERVERS_CAMERA_ATTRIBUTION_AND_PERMISSION_UTILS_H
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
index 65e93a9..4afae9b 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
@@ -95,7 +95,8 @@
sp<hardware::ICameraServiceProxy>& proxyBinder,
int64_t requestCount, int64_t resultErrorCount, bool deviceError,
const std::string& userTag, int32_t videoStabilizationMode, bool usedUltraWide,
- bool usedZoomOverride, const std::vector<hardware::CameraStreamStats>& streamStats) {
+ bool usedZoomOverride, std::pair<int32_t, int32_t> mostRequestedFpsRange,
+ const std::vector<hardware::CameraStreamStats>& streamStats) {
Mutex::Autolock l(mLock);
mSessionStats.mNewCameraState = CameraSessionStats::CAMERA_STATE_IDLE;
@@ -106,6 +107,7 @@
mSessionStats.mVideoStabilizationMode = videoStabilizationMode;
mSessionStats.mUsedUltraWide = usedUltraWide;
mSessionStats.mUsedZoomOverride = usedZoomOverride;
+ mSessionStats.mMostRequestedFpsRange = mostRequestedFpsRange;
mSessionStats.mStreamStats = streamStats;
updateProxyDeviceState(proxyBinder);
@@ -281,7 +283,8 @@
void CameraServiceProxyWrapper::logIdle(const std::string& id,
int64_t requestCount, int64_t resultErrorCount, bool deviceError,
const std::string& userTag, int32_t videoStabilizationMode, bool usedUltraWide,
- bool usedZoomOverride, const std::vector<hardware::CameraStreamStats>& streamStats) {
+ bool usedZoomOverride, std::pair<int32_t, int32_t> mostRequestedFpsRange,
+ const std::vector<hardware::CameraStreamStats>& streamStats) {
std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
{
Mutex::Autolock l(mLock);
@@ -294,8 +297,9 @@
}
ALOGV("%s: id %s, requestCount %" PRId64 ", resultErrorCount %" PRId64 ", deviceError %d"
- ", userTag %s, videoStabilizationMode %d", __FUNCTION__, id.c_str(), requestCount,
- resultErrorCount, deviceError, userTag.c_str(), videoStabilizationMode);
+ ", userTag %s, videoStabilizationMode %d, most common FPS [%d,%d]",
+ __FUNCTION__, id.c_str(), requestCount, resultErrorCount, deviceError, userTag.c_str(),
+ videoStabilizationMode, mostRequestedFpsRange.first, mostRequestedFpsRange.second);
for (size_t i = 0; i < streamStats.size(); i++) {
ALOGV("%s: streamStats[%zu]: w %d h %d, requestedCount %" PRId64 ", dropCount %"
PRId64 ", startTimeMs %d" ,
@@ -306,7 +310,8 @@
sp<hardware::ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
sessionStats->onIdle(proxyBinder, requestCount, resultErrorCount, deviceError, userTag,
- videoStabilizationMode, usedUltraWide, usedZoomOverride, streamStats);
+ videoStabilizationMode, usedUltraWide, usedZoomOverride,
+ mostRequestedFpsRange, streamStats);
}
void CameraServiceProxyWrapper::logOpen(const std::string& id, int facing,
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
index 49b7a8c..b6a967f 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
@@ -62,7 +62,8 @@
void onIdle(sp<hardware::ICameraServiceProxy>& proxyBinder,
int64_t requestCount, int64_t resultErrorCount, bool deviceError,
const std::string& userTag, int32_t videoStabilizationMode, bool usedUltraWide,
- bool usedZoomOverride, const std::vector<hardware::CameraStreamStats>& streamStats);
+ bool usedZoomOverride, std::pair<int32_t, int32_t> mostRequestedFpsRange,
+ const std::vector<hardware::CameraStreamStats>& streamStats);
std::string updateExtensionSessionStats(
const hardware::CameraExtensionSessionStats& extStats);
@@ -111,7 +112,8 @@
void logIdle(const std::string& id,
int64_t requestCount, int64_t resultErrorCount, bool deviceError,
const std::string& userTag, int32_t videoStabilizationMode, bool usedUltraWide,
- bool usedZoomOverride, const std::vector<hardware::CameraStreamStats>& streamStats);
+ bool usedZoomOverride, std::pair<int32_t, int32_t> mostRequestedFpsRange,
+ const std::vector<hardware::CameraStreamStats>& streamStats);
// Ping camera service proxy for user update
void pingCameraServiceProxy();
diff --git a/services/camera/libcameraservice/utils/CameraThreadState.cpp b/services/camera/libcameraservice/utils/CameraThreadState.cpp
deleted file mode 100644
index 2352b80..0000000
--- a/services/camera/libcameraservice/utils/CameraThreadState.cpp
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "CameraThreadState.h"
-#include <binder/IPCThreadState.h>
-#include <hwbinder/IPCThreadState.h>
-#include <binderthreadstate/CallerUtils.h>
-#include <unistd.h>
-
-namespace android {
-
-int CameraThreadState::getCallingUid() {
- if (getCurrentServingCall() == BinderCallType::HWBINDER) {
- return hardware::IPCThreadState::self()->getCallingUid();
- }
- return IPCThreadState::self()->getCallingUid();
-}
-
-int CameraThreadState::getCallingPid() {
- if (getCurrentServingCall() == BinderCallType::HWBINDER) {
- return hardware::IPCThreadState::self()->getCallingPid();
- }
- return IPCThreadState::self()->getCallingPid();
-}
-
-int64_t CameraThreadState::clearCallingIdentity() {
- if (getCurrentServingCall() == BinderCallType::HWBINDER) {
- return hardware::IPCThreadState::self()->clearCallingIdentity();
- }
- return IPCThreadState::self()->clearCallingIdentity();
-}
-
-void CameraThreadState::restoreCallingIdentity(int64_t token) {
- if (getCurrentServingCall() == BinderCallType::HWBINDER) {
- hardware::IPCThreadState::self()->restoreCallingIdentity(token);
- } else {
- IPCThreadState::self()->restoreCallingIdentity(token);
- }
- return;
-}
-
-} // android
diff --git a/services/camera/libcameraservice/utils/SchedulingPolicyUtils.cpp b/services/camera/libcameraservice/utils/SchedulingPolicyUtils.cpp
index 92a1030..f3afc69 100644
--- a/services/camera/libcameraservice/utils/SchedulingPolicyUtils.cpp
+++ b/services/camera/libcameraservice/utils/SchedulingPolicyUtils.cpp
@@ -20,7 +20,6 @@
#include <pthread.h>
#include <sched.h>
-#include "CameraThreadState.h"
#include <private/android_filesystem_config.h>
#include <processgroup/processgroup.h>
#include <processgroup/sched_policy.h>
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index 11ef9b7..a7a2b5e 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -27,6 +27,7 @@
#include "device3/aidl/AidlCamera3Device.h"
#include "device3/hidl/HidlCamera3Device.h"
#include "device3/Camera3OutputStream.h"
+#include "device3/ZoomRatioMapper.h"
#include "system/graphics-base-v1.1.h"
#include <camera/StringUtils.h>
#include <ui/PublicFormat.h>
@@ -432,7 +433,7 @@
const std::string &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
const std::vector<int32_t> &sensorPixelModesUsed, int64_t dynamicRangeProfile,
int64_t streamUseCase, int timestampBase, int mirrorMode,
- int32_t colorSpace) {
+ int32_t colorSpace, bool respectSurfaceSize) {
// bufferProducer must be non-null
if (gbp == nullptr) {
std::string msg = fmt::sprintf("Camera %s: Surface is NULL", logicalCameraId.c_str());
@@ -529,8 +530,10 @@
// we can use the default stream configuration map
foundInMaxRes = true;
}
- // Round dimensions to the nearest dimensions available for this format
- if (flexibleConsumer && isPublicFormat(format) &&
+ // Round dimensions to the nearest dimensions available for this format.
+ // Only do the rounding if the client doesn't ask to respect the surface
+ // size.
+ if (flexibleConsumer && isPublicFormat(format) && !respectSurfaceSize &&
!SessionConfigurationUtils::roundBufferDimensionNearest(width, height,
format, dataSpace, physicalCameraMetadata, foundInMaxRes, /*out*/&width,
/*out*/&height)) {
@@ -684,7 +687,8 @@
metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
aidl::android::hardware::camera::device::StreamConfiguration &streamConfiguration,
bool overrideForPerfClass, metadata_vendor_id_t vendorTagId,
- bool checkSessionParams, bool *earlyExit) {
+ bool checkSessionParams, const std::vector<int32_t>& additionalKeys,
+ bool *earlyExit) {
using SensorPixelMode = aidl::android::hardware::camera::metadata::SensorPixelMode;
auto operatingMode = sessionConfiguration.getOperatingMode();
binder::Status res = checkOperatingMode(operatingMode, deviceInfo,
@@ -753,6 +757,7 @@
const std::vector<sp<IGraphicBufferProducer>>& bufferProducers =
it.getGraphicBufferProducers();
bool deferredConsumer = it.isDeferred();
+ bool isConfigurationComplete = it.isComplete();
const std::string &physicalCameraId = it.getPhysicalCameraId();
int64_t dynamicRangeProfile = it.getDynamicRangeProfile();
@@ -768,7 +773,8 @@
int32_t groupId = it.isMultiResolution() ? it.getSurfaceSetID() : -1;
OutputStreamInfo streamInfo;
- res = checkSurfaceType(numBufferProducers, deferredConsumer, it.getSurfaceType());
+ res = checkSurfaceType(numBufferProducers, deferredConsumer, it.getSurfaceType(),
+ isConfigurationComplete);
if (!res.isOk()) {
return res;
}
@@ -781,15 +787,38 @@
int64_t streamUseCase = it.getStreamUseCase();
int timestampBase = it.getTimestampBase();
int mirrorMode = it.getMirrorMode();
- if (deferredConsumer) {
+ // If the configuration is a deferred consumer, or a not yet completed
+ // configuration with no buffer producers attached.
+ if (deferredConsumer || (!isConfigurationComplete && numBufferProducers == 0)) {
streamInfo.width = it.getWidth();
streamInfo.height = it.getHeight();
- streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
- streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
auto surfaceType = it.getSurfaceType();
- streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE;
- if (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) {
- streamInfo.consumerUsage |= GraphicBuffer::USAGE_HW_COMPOSER;
+ switch (surfaceType) {
+ case OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE:
+ streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE;
+ streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+ streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
+ break;
+ case OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW:
+ streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE
+ | GraphicBuffer::USAGE_HW_COMPOSER;
+ streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+ streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
+ break;
+ case OutputConfiguration::SURFACE_TYPE_MEDIA_RECORDER:
+ case OutputConfiguration::SURFACE_TYPE_MEDIA_CODEC:
+ streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_VIDEO_ENCODER;
+ streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+ streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
+ break;
+ case OutputConfiguration::SURFACE_TYPE_IMAGE_READER:
+ streamInfo.consumerUsage = it.getUsage();
+ streamInfo.format = it.getFormat();
+ streamInfo.dataSpace = (android_dataspace)it.getDataspace();
+ break;
+ default:
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+ "Invalid surface type.");
}
streamInfo.dynamicRangeProfile = it.getDynamicRangeProfile();
if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed,
@@ -815,7 +844,8 @@
sp<Surface> surface;
res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
logicalCameraId, metadataChosen, sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode, colorSpace);
+ streamUseCase, timestampBase, mirrorMode, colorSpace,
+ /*respectSurfaceSize*/true);
if (!res.isOk())
return res;
@@ -884,7 +914,7 @@
CameraMetadata filteredParams;
filterParameters(sessionConfiguration.getSessionParameters(), deviceInfo,
- vendorTagId, filteredParams);
+ additionalKeys, vendorTagId, filteredParams);
camera_metadata_t* metadata = const_cast<camera_metadata_t*>(filteredParams.getAndLock());
uint8_t *metadataP = reinterpret_cast<uint8_t*>(metadata);
@@ -912,22 +942,37 @@
}
binder::Status checkSurfaceType(size_t numBufferProducers,
- bool deferredConsumer, int surfaceType) {
+ bool deferredConsumer, int surfaceType, bool isConfigurationComplete) {
if (numBufferProducers > MAX_SURFACES_PER_STREAM) {
ALOGE("%s: GraphicBufferProducer count %zu for stream exceeds limit of %d",
__FUNCTION__, numBufferProducers, MAX_SURFACES_PER_STREAM);
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Surface count is too high");
- } else if ((numBufferProducers == 0) && (!deferredConsumer)) {
+ } else if ((numBufferProducers == 0) && (!deferredConsumer) && isConfigurationComplete) {
ALOGE("%s: Number of consumers cannot be smaller than 1", __FUNCTION__);
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "No valid consumers.");
}
- bool validSurfaceType = ((surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) ||
- (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE));
-
- if (deferredConsumer && !validSurfaceType) {
- ALOGE("%s: Target surface has invalid surfaceType = %d.", __FUNCTION__, surfaceType);
- return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Target Surface is invalid");
+ if (deferredConsumer) {
+ bool validSurfaceType = (
+ (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) ||
+ (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE));
+ if (!validSurfaceType) {
+ std::string msg = fmt::sprintf("Deferred target surface has invalid "
+ "surfaceType = %d.", surfaceType);
+ ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+ }
+ } else if (!isConfigurationComplete && numBufferProducers == 0) {
+ bool validSurfaceType = (
+ (surfaceType == OutputConfiguration::SURFACE_TYPE_MEDIA_RECORDER) ||
+ (surfaceType == OutputConfiguration::SURFACE_TYPE_MEDIA_CODEC) ||
+ (surfaceType == OutputConfiguration::SURFACE_TYPE_IMAGE_READER));
+ if (!validSurfaceType) {
+ std::string msg = fmt::sprintf("OutputConfiguration target surface has invalid "
+ "surfaceType = %d.", surfaceType);
+ ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+ }
}
return binder::Status::ok();
@@ -1133,7 +1178,8 @@
}
void filterParameters(const CameraMetadata& src, const CameraMetadata& deviceInfo,
- metadata_vendor_id_t vendorTagId, CameraMetadata& dst) {
+ const std::vector<int32_t>& additionalTags, metadata_vendor_id_t vendorTagId,
+ CameraMetadata& dst) {
const CameraMetadata params(src);
camera_metadata_ro_entry_t availableSessionKeys = deviceInfo.find(
ANDROID_REQUEST_AVAILABLE_SESSION_KEYS);
@@ -1142,9 +1188,12 @@
filteredParams.getAndLock());
set_camera_metadata_vendor_id(meta, vendorTagId);
filteredParams.unlock(meta);
- for (size_t i = 0; i < availableSessionKeys.count; i++) {
- camera_metadata_ro_entry entry = params.find(
- availableSessionKeys.data.i32[i]);
+
+ std::unordered_set<int32_t> filteredTags(availableSessionKeys.data.i32,
+ availableSessionKeys.data.i32 + availableSessionKeys.count);
+ filteredTags.insert(additionalTags.begin(), additionalTags.end());
+ for (int32_t tag : filteredTags) {
+ camera_metadata_ro_entry entry = params.find(tag);
if (entry.count > 0) {
filteredParams.update(entry);
}
@@ -1152,6 +1201,29 @@
dst = std::move(filteredParams);
}
+status_t overrideDefaultRequestKeys(CameraMetadata *request) {
+ // Override the template request with ZoomRatioMapper
+ status_t res = ZoomRatioMapper::initZoomRatioInTemplate(request);
+ if (res != OK) {
+ ALOGE("Failed to update zoom ratio: %s (%d)", strerror(-res), res);
+ return res;
+ }
+
+ // Fill in JPEG_QUALITY if not available
+ if (!request->exists(ANDROID_JPEG_QUALITY)) {
+ static const uint8_t kDefaultJpegQuality = 95;
+ request->update(ANDROID_JPEG_QUALITY, &kDefaultJpegQuality, 1);
+ }
+
+ // Fill in AUTOFRAMING if not available
+ if (!request->exists(ANDROID_CONTROL_AUTOFRAMING)) {
+ static const uint8_t kDefaultAutoframingMode = ANDROID_CONTROL_AUTOFRAMING_OFF;
+ request->update(ANDROID_CONTROL_AUTOFRAMING, &kDefaultAutoframingMode, 1);
+ }
+
+ return OK;
+}
+
} // namespace SessionConfigurationUtils
} // namespace camera3
} // namespace android
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index 0545cea..3c0f109 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -112,7 +112,7 @@
const std::string &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
const std::vector<int32_t> &sensorPixelModesUsed, int64_t dynamicRangeProfile,
int64_t streamUseCase, int timestampBase, int mirrorMode,
- int32_t colorSpace);
+ int32_t colorSpace, bool respectSurfaceSize);
//check if format is 10-bit output compatible
bool is10bitCompatibleFormat(int32_t format, android_dataspace_t dataSpace);
@@ -143,10 +143,10 @@
const std::string &logicalCameraId);
binder::Status checkSurfaceType(size_t numBufferProducers,
-bool deferredConsumer, int surfaceType);
+ bool deferredConsumer, int surfaceType, bool isConfigurationComplete);
binder::Status checkOperatingMode(int operatingMode,
-const CameraMetadata &staticInfo, const std::string &cameraId);
+ const CameraMetadata &staticInfo, const std::string &cameraId);
binder::Status
convertToHALStreamCombination(
@@ -156,7 +156,8 @@
const std::vector<std::string> &physicalCameraIds,
aidl::android::hardware::camera::device::StreamConfiguration &streamConfiguration,
bool overrideForPerfClass, metadata_vendor_id_t vendorTagId,
- bool checkSessionParams, bool *earlyExit);
+ bool checkSessionParams, const std::vector<int32_t>& additionalKeys,
+ bool *earlyExit);
StreamConfigurationPair getStreamConfigurationPair(const CameraMetadata &metadata);
@@ -177,7 +178,10 @@
aidl::android::hardware::camera::device::RequestTemplate* tempId /*out*/);
void filterParameters(const CameraMetadata& src, const CameraMetadata& deviceInfo,
- metadata_vendor_id_t vendorTagId, CameraMetadata& dst);
+ const std::vector<int32_t>& additionalKeys, metadata_vendor_id_t vendorTagId,
+ CameraMetadata& dst);
+
+status_t overrideDefaultRequestKeys(CameraMetadata *request);
template <typename T> bool contains(std::set<T> container, T value) {
return container.find(value) != container.end();
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
index bf8ea84..cfa1815 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
@@ -113,7 +113,7 @@
auto ret = convertToHALStreamCombination(sessionConfiguration, logicalCameraId, deviceInfo,
false /*isCompositeJpegRDisabled*/, getMetadata, physicalCameraIds,
aidlStreamConfiguration, overrideForPerfClass, vendorTagId,
- /*checkSessionParams*/false, earlyExit);
+ /*checkSessionParams*/false, /*additionalKeys*/{}, earlyExit);
if (!ret.isOk()) {
return ret;
}
diff --git a/services/camera/libcameraservice/utils/SessionStatsBuilder.cpp b/services/camera/libcameraservice/utils/SessionStatsBuilder.cpp
index c3aac72..2bca4cb 100644
--- a/services/camera/libcameraservice/utils/SessionStatsBuilder.cpp
+++ b/services/camera/libcameraservice/utils/SessionStatsBuilder.cpp
@@ -48,19 +48,35 @@
void SessionStatsBuilder::buildAndReset(int64_t* requestCount,
int64_t* errorResultCount, bool* deviceError,
- std::map<int, StreamStats> *statsMap) {
+ std::pair<int32_t, int32_t>* mostRequestedFpsRange,
+ std::map<int, StreamStats>* statsMap) {
std::lock_guard<std::mutex> l(mLock);
*requestCount = mRequestCount;
*errorResultCount = mErrorResultCount;
*deviceError = mDeviceError;
*statsMap = mStatsMap;
+ int32_t minFps = 0, maxFps = 0;
+ if (mRequestedFpsRangeHistogram.size() > 0) {
+ auto mostCommonIt = mRequestedFpsRangeHistogram.begin();
+ for (auto it = mostCommonIt; it != mRequestedFpsRangeHistogram.end(); it++) {
+ if (it->second.first > mostCommonIt->second.first) {
+ mostCommonIt = it;
+ }
+ }
+ minFps = mostCommonIt->first >> 32;
+ maxFps = mostCommonIt->first & 0xFFFF'FFFFU;
+ }
+ *mostRequestedFpsRange = std::make_pair(minFps, maxFps);
+
// Reset internal states
mRequestCount = 0;
mErrorResultCount = 0;
mCounterStopped = false;
mDeviceError = false;
mUserTag.clear();
+ mRequestedFpsRangeHistogram.clear();
+
for (auto& streamStats : mStatsMap) {
StreamStats& streamStat = streamStats.second;
streamStat.mRequestedFrameCount = 0;
@@ -125,6 +141,31 @@
mDeviceError = true;
}
+void SessionStatsBuilder::incFpsRequestedCount(int32_t minFps, int32_t maxFps,
+ int64_t frameNumber) {
+ std::lock_guard<std::mutex> l(mLock);
+
+ // Stuff range into a 64-bit value to make hashing simple
+ uint64_t currentFpsTarget = minFps;
+ currentFpsTarget = currentFpsTarget << 32 | maxFps;
+
+ auto &stats = mRequestedFpsRangeHistogram[currentFpsTarget];
+ stats.first++;
+ stats.second = frameNumber;
+
+ // Ensure weird app input of target FPS ranges doesn't cause unbounded memory growth
+ if (mRequestedFpsRangeHistogram.size() > FPS_HISTOGRAM_MAX_SIZE) {
+ // Find oldest used fps to drop by last seen frame number
+ auto deleteIt = mRequestedFpsRangeHistogram.begin();
+ for (auto it = deleteIt; it != mRequestedFpsRangeHistogram.end(); it++) {
+ if (it->second.second < deleteIt->second.second) {
+ deleteIt = it;
+ }
+ }
+ mRequestedFpsRangeHistogram.erase(deleteIt);
+ }
+}
+
void StreamStats::updateLatencyHistogram(int32_t latencyMs) {
size_t i;
for (i = 0; i < mCaptureLatencyBins.size(); i++) {
diff --git a/services/camera/libcameraservice/utils/SessionStatsBuilder.h b/services/camera/libcameraservice/utils/SessionStatsBuilder.h
index 2936531..914c09e 100644
--- a/services/camera/libcameraservice/utils/SessionStatsBuilder.h
+++ b/services/camera/libcameraservice/utils/SessionStatsBuilder.h
@@ -22,6 +22,8 @@
#include <array>
#include <map>
#include <mutex>
+#include <unordered_map>
+#include <utility>
namespace android {
@@ -64,7 +66,8 @@
void buildAndReset(/*out*/int64_t* requestCount,
/*out*/int64_t* errorResultCount,
/*out*/bool* deviceError,
- /*out*/std::map<int, StreamStats> *statsMap);
+ /*out*/std::pair<int32_t, int32_t>* mostRequestedFpsRange,
+ /*out*/std::map<int, StreamStats>* statsMap);
// Stream specific counter
void startCounter(int streamId);
@@ -76,6 +79,13 @@
void incResultCounter(bool dropped);
void onDeviceError();
+ // Session specific statistics
+
+ // Limit on size of FPS range histogram
+ static const size_t FPS_HISTOGRAM_MAX_SIZE = 10;
+
+ void incFpsRequestedCount(int32_t minFps, int32_t maxFps, int64_t frameNumber);
+
SessionStatsBuilder() : mRequestCount(0), mErrorResultCount(0),
mCounterStopped(false), mDeviceError(false) {}
private:
@@ -85,6 +95,11 @@
bool mCounterStopped;
bool mDeviceError;
std::string mUserTag;
+
+ // Histogram of frame counts of requested target FPS ranges
+ // (min_fps << 32 | max_fps) -> (# of frames with this fps, last seen framenumber)
+ std::unordered_map<uint64_t, std::pair<int64_t, int64_t>> mRequestedFpsRangeHistogram;
+
// Map from stream id to stream statistics
std::map<int, StreamStats> mStatsMap;
};
diff --git a/services/camera/libcameraservice/utils/Utils.cpp b/services/camera/libcameraservice/utils/Utils.cpp
index 34c0ed8..76517dc 100644
--- a/services/camera/libcameraservice/utils/Utils.cpp
+++ b/services/camera/libcameraservice/utils/Utils.cpp
@@ -21,18 +21,20 @@
#include <com_android_internal_camera_flags.h>
#include <utils/Errors.h>
#include <utils/Log.h>
+#include <vendorsupport/api_level.h>
namespace android {
namespace flags = com::android::internal::camera::flags;
-constexpr const char *LEGACY_VNDK_VERSION_PROP = "ro.vndk.version";
-constexpr const char *BOARD_API_LEVEL_PROP = "ro.board.api_level";
+namespace {
+constexpr const char* LEGACY_VNDK_VERSION_PROP = "ro.vndk.version";
+constexpr const char* BOARD_API_LEVEL_PROP = "ro.board.api_level";
constexpr int MAX_VENDOR_API_LEVEL = 1000000;
constexpr int FIRST_VNDK_VERSION = 202404;
-int getVNDKVersionFromProp(int defaultVersion) {
- if (!com_android_internal_camera_flags_use_ro_board_api_level_for_vndk_version()) {
+int legacyGetVNDKVersionFromProp(int defaultVersion) {
+ if (!flags::use_ro_board_api_level_for_vndk_version()) {
return base::GetIntProperty(LEGACY_VNDK_VERSION_PROP, defaultVersion);
}
@@ -54,6 +56,24 @@
vndkVersion = (vndkVersion - FIRST_VNDK_VERSION) / 100;
return __ANDROID_API_V__ + vndkVersion;
}
+} // anonymous namespace
+
+int getVNDKVersionFromProp(int defaultVersion) {
+ if (!flags::use_system_api_for_vndk_version()) {
+ return legacyGetVNDKVersionFromProp(defaultVersion);
+ }
+
+ int vendorApiLevel = AVendorSupport_getVendorApiLevel();
+ if (vendorApiLevel == 0) {
+ // Couldn't find vendor API level, return default
+ return defaultVersion;
+ }
+
+ // Vendor API level for Android V and above are of the format YYYYMM starting with 202404.
+ // AVendorSupport_getSdkApiLevelOf maps them back to SDK API levels while leaving older
+ // values unchanged.
+ return AVendorSupport_getSdkApiLevelOf(vendorApiLevel);
+}
RunThreadWithRealtimePriority::RunThreadWithRealtimePriority(int tid)
: mTid(tid), mPreviousPolicy(sched_getscheduler(tid)) {
diff --git a/services/camera/libcameraservice/utils/VirtualDeviceCameraIdMapper.cpp b/services/camera/libcameraservice/utils/VirtualDeviceCameraIdMapper.cpp
new file mode 100644
index 0000000..22dd806
--- /dev/null
+++ b/services/camera/libcameraservice/utils/VirtualDeviceCameraIdMapper.cpp
@@ -0,0 +1,143 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "VirtualDeviceCameraIdMapper"
+
+#include <android_companion_virtualdevice_flags.h>
+#include <camera/CameraUtils.h>
+
+#include "VirtualDeviceCameraIdMapper.h"
+
+namespace android {
+
+namespace vd_flags = android::companion::virtualdevice::flags;
+
+void VirtualDeviceCameraIdMapper::addCamera(const std::string& cameraId,
+ int32_t deviceId, const std::string& mappedCameraId) {
+ if (!vd_flags::camera_device_awareness()) {
+ ALOGV("%s: Device-aware camera feature is not enabled", __func__);
+ return;
+ }
+
+ if (deviceId == kDefaultDeviceId) {
+ ALOGV("%s: Not adding entry for a camera of the default device", __func__);
+ return;
+ }
+
+ ALOGV("%s: Adding camera %s for device %d with mapped id %s", __func__, cameraId.c_str(),
+ deviceId, mappedCameraId.c_str());
+
+ std::scoped_lock lock(mLock);
+ mDeviceIdMappedCameraIdPairToCameraIdMap[{deviceId, mappedCameraId}] = cameraId;
+ mCameraIdToDeviceIdMappedCameraIdPairMap[cameraId] = {deviceId, mappedCameraId};
+}
+
+void VirtualDeviceCameraIdMapper::removeCamera(const std::string& cameraId) {
+ if (!vd_flags::camera_device_awareness()) {
+ ALOGV("%s: Device-aware camera feature is not enabled", __func__);
+ return;
+ }
+
+ auto deviceIdAndMappedCameraIdPair = getDeviceIdAndMappedCameraIdPair(cameraId);
+
+ std::scoped_lock lock(mLock);
+ mCameraIdToDeviceIdMappedCameraIdPairMap.erase(cameraId);
+ mDeviceIdMappedCameraIdPairToCameraIdMap.erase(deviceIdAndMappedCameraIdPair);
+}
+
+std::optional<std::string> VirtualDeviceCameraIdMapper::getActualCameraId(
+ int32_t deviceId, const std::string& mappedCameraId) const {
+ if (deviceId == kDefaultDeviceId) {
+ ALOGV("%s: Returning the camera id as the mapped camera id for camera %s, as it "
+ "belongs to the default device", __func__, mappedCameraId.c_str());
+ return mappedCameraId;
+ }
+
+ if (!vd_flags::camera_device_awareness()) {
+ ALOGV("%s: Device-aware camera feature is not enabled, returning the camera id as "
+ "the mapped camera id for camera %s", __func__, mappedCameraId.c_str());
+ return mappedCameraId;
+ }
+
+ std::scoped_lock lock(mLock);
+ auto iterator = mDeviceIdMappedCameraIdPairToCameraIdMap.find(
+ {deviceId, mappedCameraId});
+ if (iterator == mDeviceIdMappedCameraIdPairToCameraIdMap.end()) {
+ ALOGV("%s: No entry found for device id %d and mapped camera id %s", __func__,
+ deviceId, mappedCameraId.c_str());
+ return std::nullopt;
+ }
+ return iterator->second;
+}
+
+std::pair<int32_t, std::string> VirtualDeviceCameraIdMapper::getDeviceIdAndMappedCameraIdPair(
+ const std::string& cameraId) const {
+ if (!vd_flags::camera_device_awareness()) {
+ ALOGV("%s: Device-aware camera feature is not enabled", __func__);
+ return std::make_pair(kDefaultDeviceId, cameraId);
+ }
+
+ std::scoped_lock lock(mLock);
+ auto iterator = mCameraIdToDeviceIdMappedCameraIdPairMap.find(cameraId);
+ if (iterator != mCameraIdToDeviceIdMappedCameraIdPairMap.end()) {
+ return iterator->second;
+ }
+ ALOGV("%s: No device id and mapped camera id found for camera id %s, so it must belong "
+ "to the default device ?", __func__, cameraId.c_str());
+ return std::make_pair(kDefaultDeviceId, cameraId);
+}
+
+int VirtualDeviceCameraIdMapper::getNumberOfCameras(int32_t deviceId) const {
+ if (!vd_flags::camera_device_awareness()) {
+ return 0;
+ }
+
+ int numOfCameras = 0;
+ std::scoped_lock lock(mLock);
+ for (const auto& [deviceIdMappedCameraIdPair, _]
+ : mDeviceIdMappedCameraIdPairToCameraIdMap) {
+ if (deviceIdMappedCameraIdPair.first == deviceId) {
+ numOfCameras++;
+ }
+ }
+ return numOfCameras;
+}
+
+std::optional<std::string> VirtualDeviceCameraIdMapper::getActualCameraId(
+ int api1CameraId, int32_t deviceId) const {
+ if (!vd_flags::camera_device_awareness()) {
+ ALOGV("%s: Device-aware camera feature is not enabled", __func__);
+ return std::nullopt;
+ }
+
+ int matchingCameraIndex = 0;
+ std::scoped_lock lock(mLock);
+ for (const auto& [deviceIdMappedCameraIdPair, actualCameraId]
+ : mDeviceIdMappedCameraIdPairToCameraIdMap) {
+ if (deviceIdMappedCameraIdPair.first == deviceId) {
+ if (matchingCameraIndex == api1CameraId) {
+ return actualCameraId;
+ }
+ matchingCameraIndex++;
+ }
+ }
+ ALOGV("%s: No entry found for device id %d and API 1 camera id %d", __func__,
+ deviceId, api1CameraId);
+ return std::nullopt;
+}
+
+} // namespace android
\ No newline at end of file
diff --git a/services/camera/libcameraservice/utils/VirtualDeviceCameraIdMapper.h b/services/camera/libcameraservice/utils/VirtualDeviceCameraIdMapper.h
new file mode 100644
index 0000000..fdfde23
--- /dev/null
+++ b/services/camera/libcameraservice/utils/VirtualDeviceCameraIdMapper.h
@@ -0,0 +1,89 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_VIRTUAL_DEVICE_CAMERA_ID_MAPPER_H
+#define ANDROID_SERVERS_CAMERA_VIRTUAL_DEVICE_CAMERA_ID_MAPPER_H
+
+#include <string>
+#include <map>
+#include <mutex>
+
+#include <utils/Mutex.h>
+
+namespace android {
+
+class VirtualDeviceCameraIdMapper {
+public:
+ VirtualDeviceCameraIdMapper() {}
+
+ virtual ~VirtualDeviceCameraIdMapper() {}
+
+ void addCamera(const std::string& cameraId, int32_t deviceId,
+ const std::string& mappedCameraId) EXCLUDES(mLock);
+
+ void removeCamera(const std::string& cameraId) EXCLUDES(mLock);
+
+ /**
+ * Return the actual camera id for a given device id (i.e., the id of the device owning
+ * the camera, for a virtual camera this would be the id of the virtual device, and for
+ * any other cameras this would be default device id, i.e., 0) and mapped camera
+ * id (for virtual devices, the back and front virtual cameras of that device would have
+ * 0 and 1 respectively as their mapped camera id, and for any other cameras this
+ * would be their actual camera id). When the camera device awareness flag is disabled,
+ * this will return the given camera id itself.
+ */
+ std::optional<std::string> getActualCameraId(int32_t deviceId,
+ const std::string& mappedCameraId) const EXCLUDES(mLock);
+
+ /**
+ * Return the device id (i.e., the id of the device owning the camera, for a virtual
+ * camera this would be the id of the virtual device, and for any other cameras this
+ * would be default device id, i.e., 0) and the mapped camera id (for virtual
+ * devices, the back and front virtual cameras of that device would have 0 and 1
+ * respectively as their mapped camera id, and for any other cameras this would
+ * be their actual camera id) for a given camera id. When the camera device awareness flag is
+ * disabled, this will return a pair of kDefaultDeviceId and the given cameraId.
+ */
+ std::pair<int32_t, std::string> getDeviceIdAndMappedCameraIdPair(
+ const std::string& cameraId) const EXCLUDES(mLock);
+
+ /**
+ * Return the number of virtual cameras corresponding to the legacy camera API
+ * getNumberOfCameras. When the camera device awareness flag is disabled, this will return 0.
+ */
+ int getNumberOfCameras(int32_t deviceId) const EXCLUDES(mLock);
+
+ /**
+ * Return the actual camera id corresponding to the virtual camera with the given API 1 camera
+ * id. When the camera device awareness flag is disabled, this will return std::nullopt.
+ */
+ std::optional<std::string> getActualCameraId(int api1CameraId, int32_t deviceId)
+ const EXCLUDES(mLock);
+
+private:
+ mutable std::mutex mLock;
+
+ // Map of (deviceId, app-visible cameraId) -> HAL-visible cameraId
+ std::map<std::pair<int32_t, std::string>, std::string>
+ mDeviceIdMappedCameraIdPairToCameraIdMap GUARDED_BY(mLock);
+ // Map of HAL-visible cameraId -> (deviceId, app-visible cameraId)
+ std::map<std::string, std::pair<int32_t, std::string>>
+ mCameraIdToDeviceIdMappedCameraIdPairMap GUARDED_BY(mLock);
+};
+
+} // namespace android
+
+#endif // ANDROID_SERVERS_CAMERA_VIRTUAL_DEVICE_CAMERA_ID_MAPPER_H
diff --git a/services/camera/virtualcamera/TEST_MAPPING b/services/camera/virtualcamera/TEST_MAPPING
index 25fca73..e976704 100644
--- a/services/camera/virtualcamera/TEST_MAPPING
+++ b/services/camera/virtualcamera/TEST_MAPPING
@@ -9,7 +9,8 @@
{
"exclude-annotation": "androidx.test.filters.FlakyTest"
}
- ]
+ ],
+ "keywords": ["primary-device"]
}
]
}
diff --git a/services/camera/virtualcamera/VirtualCameraDevice.cc b/services/camera/virtualcamera/VirtualCameraDevice.cc
index 7636cbd..fe9e0ed 100644
--- a/services/camera/virtualcamera/VirtualCameraDevice.cc
+++ b/services/camera/virtualcamera/VirtualCameraDevice.cc
@@ -28,6 +28,7 @@
#include <string>
#include <vector>
+#include "VirtualCameraService.h"
#include "VirtualCameraSession.h"
#include "aidl/android/companion/virtualcamera/SupportedStreamConfiguration.h"
#include "aidl/android/companion/virtualcamera/VirtualCameraConfiguration.h"
@@ -124,9 +125,9 @@
kOutputFormats.end();
}
-std::vector<MetadataBuilder::FpsRange> fpsRangesForInputConfig(
+std::vector<FpsRange> fpsRangesForInputConfig(
const std::vector<SupportedStreamConfiguration>& configs) {
- std::set<MetadataBuilder::FpsRange> availableRanges;
+ std::set<FpsRange> availableRanges;
for (const SupportedStreamConfiguration& config : configs) {
availableRanges.insert({.minFps = kMinFps, .maxFps = config.maxFps});
@@ -141,8 +142,7 @@
availableRanges.insert({.minFps = 30, .maxFps = 30});
}
- return std::vector<MetadataBuilder::FpsRange>(availableRanges.begin(),
- availableRanges.end());
+ return std::vector<FpsRange>(availableRanges.begin(), availableRanges.end());
}
std::optional<Resolution> getMaxResolution(
@@ -215,7 +215,8 @@
// TODO(b/301023410) - Populate camera characteristics according to camera configuration.
std::optional<CameraMetadata> initCameraCharacteristics(
const std::vector<SupportedStreamConfiguration>& supportedInputConfig,
- const SensorOrientation sensorOrientation, const LensFacing lensFacing) {
+ const SensorOrientation sensorOrientation, const LensFacing lensFacing,
+ const int32_t deviceId) {
if (!std::all_of(supportedInputConfig.begin(), supportedInputConfig.end(),
[](const SupportedStreamConfiguration& config) {
return isFormatSupportedForInput(
@@ -230,6 +231,7 @@
MetadataBuilder()
.setSupportedHardwareLevel(
ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL)
+ .setDeviceId(deviceId)
.setFlashAvailable(false)
.setLensFacing(
static_cast<camera_metadata_enum_android_lens_facing>(lensFacing))
@@ -243,6 +245,13 @@
{ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF})
.setAvailableNoiseReductionModes({ANDROID_NOISE_REDUCTION_MODE_OFF})
.setAvailableFaceDetectModes({ANDROID_STATISTICS_FACE_DETECT_MODE_OFF})
+ .setAvailableStreamUseCases(
+ {ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW,
+ ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE,
+ ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD,
+ ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL,
+ ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL})
.setAvailableTestPatternModes({ANDROID_SENSOR_TEST_PATTERN_MODE_OFF})
.setAvailableMaxDigitalZoom(1.0)
.setControlAvailableModes({ANDROID_CONTROL_MODE_AUTO})
@@ -297,18 +306,47 @@
ANDROID_CONTROL_ZOOM_RATIO,
ANDROID_FLASH_MODE,
ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
+ ANDROID_JPEG_ORIENTATION,
ANDROID_JPEG_QUALITY,
ANDROID_JPEG_THUMBNAIL_QUALITY,
+ ANDROID_JPEG_THUMBNAIL_SIZE,
ANDROID_NOISE_REDUCTION_MODE,
ANDROID_STATISTICS_FACE_DETECT_MODE})
- .setAvailableResultKeys(
- {ANDROID_COLOR_CORRECTION_ABERRATION_MODE, ANDROID_CONTROL_AE_MODE,
- ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
- ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_EFFECT_MODE,
- ANDROID_CONTROL_MODE, ANDROID_FLASH_MODE, ANDROID_FLASH_STATE,
- ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, ANDROID_JPEG_QUALITY,
- ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_LENS_FOCAL_LENGTH,
- ANDROID_SENSOR_TIMESTAMP, ANDROID_NOISE_REDUCTION_MODE})
+ .setAvailableResultKeys({
+ ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
+ ANDROID_CONTROL_AE_ANTIBANDING_MODE,
+ ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
+ ANDROID_CONTROL_AE_LOCK,
+ ANDROID_CONTROL_AE_MODE,
+ ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
+ ANDROID_CONTROL_AE_STATE,
+ ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
+ ANDROID_CONTROL_AF_MODE,
+ ANDROID_CONTROL_AF_STATE,
+ ANDROID_CONTROL_AF_TRIGGER,
+ ANDROID_CONTROL_AWB_LOCK,
+ ANDROID_CONTROL_AWB_MODE,
+ ANDROID_CONTROL_AWB_STATE,
+ ANDROID_CONTROL_CAPTURE_INTENT,
+ ANDROID_CONTROL_EFFECT_MODE,
+ ANDROID_CONTROL_MODE,
+ ANDROID_CONTROL_SCENE_MODE,
+ ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
+ ANDROID_STATISTICS_FACE_DETECT_MODE,
+ ANDROID_FLASH_MODE,
+ ANDROID_FLASH_STATE,
+ ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
+ ANDROID_JPEG_QUALITY,
+ ANDROID_JPEG_THUMBNAIL_QUALITY,
+ ANDROID_LENS_FOCAL_LENGTH,
+ ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
+ ANDROID_NOISE_REDUCTION_MODE,
+ ANDROID_REQUEST_PIPELINE_DEPTH,
+ ANDROID_SENSOR_TIMESTAMP,
+ ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
+ ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
+ ANDROID_STATISTICS_SCENE_FLICKER,
+ })
.setAvailableCapabilities(
{ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE});
@@ -361,13 +399,14 @@
} // namespace
VirtualCameraDevice::VirtualCameraDevice(
- const uint32_t cameraId, const VirtualCameraConfiguration& configuration)
+ const uint32_t cameraId, const VirtualCameraConfiguration& configuration,
+ int32_t deviceId)
: mCameraId(cameraId),
mVirtualCameraClientCallback(configuration.virtualCameraCallback),
mSupportedInputConfigurations(configuration.supportedStreamConfigs) {
std::optional<CameraMetadata> metadata = initCameraCharacteristics(
mSupportedInputConfigurations, configuration.sensorOrientation,
- configuration.lensFacing);
+ configuration.lensFacing, deviceId);
if (metadata.has_value()) {
mCameraCharacteristics = *metadata;
} else {
@@ -537,12 +576,18 @@
return cameraStatus(Status::OPERATION_NOT_SUPPORTED);
}
-binder_status_t VirtualCameraDevice::dump(int fd, const char** args,
- uint32_t numArgs) {
- // TODO(b/301023410) Implement.
- (void)fd;
- (void)args;
- (void)numArgs;
+binder_status_t VirtualCameraDevice::dump(int fd, const char**, uint32_t) {
+ ALOGD("Dumping virtual camera %d", mCameraId);
+ const char* indent = " ";
+ const char* doubleIndent = " ";
+ dprintf(fd, "%svirtual_camera %d belongs to virtual device %d\n", indent,
+ mCameraId,
+ getDeviceId(mCameraCharacteristics)
+ .value_or(VirtualCameraService::kDefaultDeviceId));
+ dprintf(fd, "%sSupportedStreamConfiguration:\n", indent);
+ for (auto& config : mSupportedInputConfigurations) {
+ dprintf(fd, "%s%s", doubleIndent, config.toString().c_str());
+ }
return STATUS_OK;
}
diff --git a/services/camera/virtualcamera/VirtualCameraDevice.h b/services/camera/virtualcamera/VirtualCameraDevice.h
index c274dc9..cba0674 100644
--- a/services/camera/virtualcamera/VirtualCameraDevice.h
+++ b/services/camera/virtualcamera/VirtualCameraDevice.h
@@ -24,6 +24,7 @@
#include "aidl/android/companion/virtualcamera/SupportedStreamConfiguration.h"
#include "aidl/android/companion/virtualcamera/VirtualCameraConfiguration.h"
#include "aidl/android/hardware/camera/device/BnCameraDevice.h"
+#include "system/camera_metadata.h"
#include "util/Util.h"
namespace android {
@@ -38,7 +39,8 @@
explicit VirtualCameraDevice(
uint32_t cameraId,
const aidl::android::companion::virtualcamera::VirtualCameraConfiguration&
- configuration);
+ configuration,
+ int32_t deviceId);
virtual ~VirtualCameraDevice() override = default;
@@ -121,6 +123,15 @@
// Default JPEG compression quality.
static constexpr uint8_t kDefaultJpegQuality = 80;
+ // Default JPEG orientation.
+ static constexpr uint8_t kDefaultJpegOrientation = 0;
+
+ // Default Make and Model for Exif
+ static constexpr char kDefaultMakeAndModel[] = "Android Virtual Camera";
+
+ static constexpr camera_metadata_enum_android_control_capture_intent_t
+ kDefaultCaptureIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
+
private:
std::shared_ptr<VirtualCameraDevice> sharedFromThis();
diff --git a/services/camera/virtualcamera/VirtualCameraProvider.cc b/services/camera/virtualcamera/VirtualCameraProvider.cc
index e4a68f5..67eaec0 100644
--- a/services/camera/virtualcamera/VirtualCameraProvider.cc
+++ b/services/camera/virtualcamera/VirtualCameraProvider.cc
@@ -42,10 +42,6 @@
using ::aidl::android::hardware::camera::provider::ConcurrentCameraIdCombination;
using ::aidl::android::hardware::camera::provider::ICameraProviderCallback;
-// TODO(b/301023410) Make camera id range configurable / dynamic
-// based on already registered devices.
-std::atomic_int VirtualCameraProvider::sNextId{42};
-
ndk::ScopedAStatus VirtualCameraProvider::setCallback(
const std::shared_ptr<ICameraProviderCallback>& in_callback) {
ALOGV("%s", __func__);
@@ -154,9 +150,16 @@
}
std::shared_ptr<VirtualCameraDevice> VirtualCameraProvider::createCamera(
- const VirtualCameraConfiguration& configuration) {
- auto camera =
- ndk::SharedRefBase::make<VirtualCameraDevice>(sNextId++, configuration);
+ const VirtualCameraConfiguration& configuration, const int cameraId,
+ const int32_t deviceId) {
+ if (cameraId < 0) {
+ ALOGE("%s: Cannot create camera with negative id. cameraId: %d", __func__,
+ cameraId);
+ return nullptr;
+ }
+
+ auto camera = ndk::SharedRefBase::make<VirtualCameraDevice>(
+ cameraId, configuration, deviceId);
std::shared_ptr<ICameraProviderCallback> callback;
{
const std::lock_guard<std::mutex> lock(mLock);
diff --git a/services/camera/virtualcamera/VirtualCameraProvider.h b/services/camera/virtualcamera/VirtualCameraProvider.h
index 11d3123..c536547 100644
--- a/services/camera/virtualcamera/VirtualCameraProvider.h
+++ b/services/camera/virtualcamera/VirtualCameraProvider.h
@@ -76,7 +76,8 @@
// Returns nullptr if creation was not successful.
std::shared_ptr<VirtualCameraDevice> createCamera(
const aidl::android::companion::virtualcamera::VirtualCameraConfiguration&
- configuration);
+ configuration,
+ int cameraId, int32_t deviceId);
std::shared_ptr<VirtualCameraDevice> getCamera(const std::string& name);
@@ -91,9 +92,6 @@
std::map<std::string, std::shared_ptr<VirtualCameraDevice>> mCameras
GUARDED_BY(mLock);
-
- // Numerical id to assign to next created camera.
- static std::atomic_int sNextId;
};
} // namespace virtualcamera
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.cc b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
index 9b0fc07..f5cf092 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.cc
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
@@ -14,7 +14,6 @@
* limitations under the License.
*/
-#include "system/camera_metadata.h"
#define LOG_TAG "VirtualCameraRenderThread"
#include "VirtualCameraRenderThread.h"
@@ -45,7 +44,10 @@
#include "android-base/thread_annotations.h"
#include "android/binder_auto_utils.h"
#include "android/hardware_buffer.h"
+#include "hardware/gralloc.h"
+#include "system/camera_metadata.h"
#include "ui/GraphicBuffer.h"
+#include "ui/Rect.h"
#include "util/EglFramebuffer.h"
#include "util/JpegUtil.h"
#include "util/MetadataUtil.h"
@@ -92,30 +94,75 @@
const std::chrono::nanoseconds timestamp,
const RequestSettings& requestSettings,
const Resolution reportedSensorSize) {
- std::unique_ptr<CameraMetadata> metadata =
+ // All of the keys used in the response needs to be referenced in
+ // availableResultKeys in CameraCharacteristics (see initCameraCharacteristics
+ // in VirtualCameraDevice.cc).
+ MetadataBuilder builder =
MetadataBuilder()
.setAberrationCorrectionMode(
ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF)
+ .setControlAeAvailableAntibandingModes(
+ {ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF})
+ .setControlAeAntibandingMode(ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF)
+ .setControlAeExposureCompensation(0)
+ .setControlAeLockAvailable(false)
+ .setControlAeLock(ANDROID_CONTROL_AE_LOCK_OFF)
.setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
.setControlAePrecaptureTrigger(
- ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
+ // Limited devices are expected to have precapture ae enabled and
+ // respond to cancellation request. Since we don't actuall support
+ // AE at all, let's just respect the cancellation expectation in
+ // case it's requested
+ requestSettings.aePrecaptureTrigger ==
+ ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
+ ? ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
+ : ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
+ .setControlAeState(ANDROID_CONTROL_AE_STATE_INACTIVE)
.setControlAfMode(ANDROID_CONTROL_AF_MODE_OFF)
+ .setControlAfTrigger(ANDROID_CONTROL_AF_TRIGGER_IDLE)
+ .setControlAfState(ANDROID_CONTROL_AF_STATE_INACTIVE)
.setControlAwbMode(ANDROID_CONTROL_AWB_MODE_AUTO)
+ .setControlAwbLock(ANDROID_CONTROL_AWB_LOCK_OFF)
+ .setControlAwbState(ANDROID_CONTROL_AWB_STATE_INACTIVE)
+ .setControlCaptureIntent(requestSettings.captureIntent)
.setControlEffectMode(ANDROID_CONTROL_EFFECT_MODE_OFF)
.setControlMode(ANDROID_CONTROL_MODE_AUTO)
+ .setControlSceneMode(ANDROID_CONTROL_SCENE_MODE_DISABLED)
+ .setControlVideoStabilizationMode(
+ ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF)
.setCropRegion(0, 0, reportedSensorSize.width,
reportedSensorSize.height)
.setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
.setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
+ .setFlashMode(ANDROID_FLASH_MODE_OFF)
.setFocalLength(VirtualCameraDevice::kFocalLength)
.setJpegQuality(requestSettings.jpegQuality)
+ .setJpegOrientation(requestSettings.jpegOrientation)
.setJpegThumbnailSize(requestSettings.thumbnailResolution.width,
requestSettings.thumbnailResolution.height)
.setJpegThumbnailQuality(requestSettings.thumbnailJpegQuality)
+ .setLensOpticalStabilizationMode(
+ ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF)
.setNoiseReductionMode(ANDROID_NOISE_REDUCTION_MODE_OFF)
.setPipelineDepth(kPipelineDepth)
.setSensorTimestamp(timestamp)
- .build();
+ .setStatisticsHotPixelMapMode(
+ ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF)
+ .setStatisticsLensShadingMapMode(
+ ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF)
+ .setStatisticsSceneFlicker(ANDROID_STATISTICS_SCENE_FLICKER_NONE);
+
+ if (requestSettings.fpsRange.has_value()) {
+ builder.setControlAeTargetFpsRange(requestSettings.fpsRange.value());
+ }
+
+ if (requestSettings.gpsCoordinates.has_value()) {
+ const GpsCoordinates& coordinates = requestSettings.gpsCoordinates.value();
+ builder.setJpegGpsCoordinates(coordinates);
+ }
+
+ std::unique_ptr<CameraMetadata> metadata = builder.build();
+
if (metadata == nullptr) {
ALOGE("%s: Failed to build capture result metadata", __func__);
return CameraMetadata();
@@ -192,12 +239,24 @@
}
std::vector<uint8_t> createExif(
- Resolution imageSize, const std::vector<uint8_t>& compressedThumbnail = {}) {
+ Resolution imageSize, const CameraMetadata resultMetadata,
+ const std::vector<uint8_t>& compressedThumbnail = {}) {
std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create());
exifUtils->initialize();
- exifUtils->setImageWidth(imageSize.width);
- exifUtils->setImageHeight(imageSize.height);
- // TODO(b/324383963) Set Make/Model and orientation.
+
+ // Make a copy of the metadata in order to converting it the HAL metadata
+ // format (as opposed to the AIDL class) and use the setFromMetadata method
+ // from ExifUtil
+ camera_metadata_t* rawSettings =
+ clone_camera_metadata((camera_metadata_t*)resultMetadata.metadata.data());
+ if (rawSettings != nullptr) {
+ android::hardware::camera::common::helper::CameraMetadata halMetadata(
+ rawSettings);
+ exifUtils->setFromMetadata(halMetadata, imageSize.width, imageSize.height);
+ }
+ exifUtils->setMake(VirtualCameraDevice::kDefaultMakeAndModel);
+ exifUtils->setModel(VirtualCameraDevice::kDefaultMakeAndModel);
+ exifUtils->setFlash(0);
std::vector<uint8_t> app1Data;
@@ -341,20 +400,69 @@
EglTextureProgram::TextureFormat::RGBA);
mEglSurfaceTexture = std::make_unique<EglSurfaceTexture>(
mInputSurfaceSize.width, mInputSurfaceSize.height);
+
+ sp<Surface> inputSurface = mEglSurfaceTexture->getSurface();
+ if (mTestMode) {
+ inputSurface->connect(NATIVE_WINDOW_API_CPU, false, nullptr);
+ }
mInputSurfacePromise.set_value(mEglSurfaceTexture->getSurface());
while (std::unique_ptr<ProcessCaptureRequestTask> task = dequeueTask()) {
processCaptureRequest(*task);
}
+ // Destroy EGL utilities still on the render thread.
+ mEglSurfaceTexture.reset();
+ mEglTextureRgbProgram.reset();
+ mEglTextureYuvProgram.reset();
+ mEglDisplayContext.reset();
+
ALOGV("Render thread exiting");
}
void VirtualCameraRenderThread::processCaptureRequest(
const ProcessCaptureRequestTask& request) {
- const std::chrono::nanoseconds timestamp =
+ if (mTestMode) {
+ // In test mode let's just render something to the Surface ourselves.
+ renderTestPatternYCbCr420(mEglSurfaceTexture->getSurface(),
+ request.getFrameNumber());
+ }
+
+ std::chrono::nanoseconds timestamp =
std::chrono::duration_cast<std::chrono::nanoseconds>(
std::chrono::steady_clock::now().time_since_epoch());
+ std::chrono::nanoseconds lastAcquisitionTimestamp(
+ mLastAcquisitionTimestampNanoseconds.exchange(timestamp.count(),
+ std::memory_order_relaxed));
+
+ if (request.getRequestSettings().fpsRange) {
+ const int maxFps =
+ std::max(1, request.getRequestSettings().fpsRange->maxFps);
+ const std::chrono::nanoseconds minFrameDuration(
+ static_cast<uint64_t>(1e9 / maxFps));
+ const std::chrono::nanoseconds frameDuration =
+ timestamp - lastAcquisitionTimestamp;
+ if (frameDuration < minFrameDuration) {
+ // We're too fast for the configured maxFps, let's wait a bit.
+ const std::chrono::nanoseconds sleepTime =
+ minFrameDuration - frameDuration;
+ ALOGV("Current frame duration would be %" PRIu64
+ " ns corresponding to, "
+ "sleeping for %" PRIu64
+ " ns before updating texture to match maxFps %d",
+ static_cast<uint64_t>(frameDuration.count()),
+ static_cast<uint64_t>(sleepTime.count()), maxFps);
+
+ std::this_thread::sleep_for(sleepTime);
+ timestamp = std::chrono::duration_cast<std::chrono::nanoseconds>(
+ std::chrono::steady_clock::now().time_since_epoch());
+ mLastAcquisitionTimestampNanoseconds.store(timestamp.count(),
+ std::memory_order_relaxed);
+ }
+ }
+
+ // Acquire new (most recent) image from the Surface.
+ mEglSurfaceTexture->updateTexture();
CaptureResult captureResult;
captureResult.fmqResultSize = 0;
@@ -369,14 +477,6 @@
const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
captureResult.outputBuffers.resize(buffers.size());
- if (mTestMode) {
- // In test mode let's just render something to the Surface ourselves.
- renderTestPatternYCbCr420(mEglSurfaceTexture->getSurface(),
- request.getFrameNumber());
- }
-
- mEglSurfaceTexture->updateTexture();
-
for (int i = 0; i < buffers.size(); ++i) {
const CaptureRequestBuffer& reqBuffer = buffers[i];
StreamBuffer& resBuffer = captureResult.outputBuffers[i];
@@ -395,7 +495,8 @@
auto status = streamConfig->format == PixelFormat::BLOB
? renderIntoBlobStreamBuffer(
reqBuffer.getStreamId(), reqBuffer.getBufferId(),
- request.getRequestSettings(), reqBuffer.getFence())
+ captureResult.result, request.getRequestSettings(),
+ reqBuffer.getFence())
: renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
reqBuffer.getBufferId(),
reqBuffer.getFence());
@@ -431,7 +532,7 @@
return;
}
- ALOGD("%s: Successfully called processCaptureResult", __func__);
+ ALOGV("%s: Successfully called processCaptureResult", __func__);
}
void VirtualCameraRenderThread::flushCaptureRequest(
@@ -484,8 +585,9 @@
ALOGV("%s: Creating thumbnail with size %d x %d, quality %d", __func__,
resolution.width, resolution.height, quality);
+ Resolution bufferSize = roundTo2DctSize(resolution);
std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
- mEglDisplayContext->getEglDisplay(), resolution.width, resolution.height);
+ mEglDisplayContext->getEglDisplay(), bufferSize.width, bufferSize.height);
if (framebuffer == nullptr) {
ALOGE(
"Failed to allocate temporary framebuffer for JPEG thumbnail "
@@ -496,38 +598,23 @@
// TODO(b/324383963) Add support for letterboxing if the thumbnail size
// doesn't correspond
// to input texture aspect ratio.
- if (!renderIntoEglFramebuffer(*framebuffer).isOk()) {
+ if (!renderIntoEglFramebuffer(*framebuffer, /*fence=*/nullptr,
+ Rect(resolution.width, resolution.height))
+ .isOk()) {
ALOGE(
"Failed to render input texture into temporary framebuffer for JPEG "
"thumbnail");
return {};
}
- std::shared_ptr<AHardwareBuffer> inHwBuffer = framebuffer->getHardwareBuffer();
- GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(inHwBuffer.get());
-
- if (gBuffer->getPixelFormat() != HAL_PIXEL_FORMAT_YCbCr_420_888) {
- // This should never happen since we're allocating the temporary buffer
- // with YUV420 layout above.
- ALOGE("%s: Cannot compress non-YUV buffer (pixelFormat %d)", __func__,
- gBuffer->getPixelFormat());
- return {};
- }
-
- YCbCrLockGuard yCbCrLock(inHwBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN);
- if (yCbCrLock.getStatus() != NO_ERROR) {
- ALOGE("%s: Failed to lock graphic buffer while generating thumbnail: %d",
- __func__, yCbCrLock.getStatus());
- return {};
- }
-
std::vector<uint8_t> compressedThumbnail;
compressedThumbnail.resize(kJpegThumbnailBufferSize);
- ALOGE("%s: Compressing thumbnail %d x %d", __func__, gBuffer->getWidth(),
- gBuffer->getHeight());
- std::optional<size_t> compressedSize = compressJpeg(
- gBuffer->getWidth(), gBuffer->getHeight(), quality, *yCbCrLock, {},
- compressedThumbnail.size(), compressedThumbnail.data());
+ ALOGE("%s: Compressing thumbnail %d x %d", __func__, resolution.width,
+ resolution.height);
+ std::optional<size_t> compressedSize =
+ compressJpeg(resolution.width, resolution.height, quality,
+ framebuffer->getHardwareBuffer(), {},
+ compressedThumbnail.size(), compressedThumbnail.data());
if (!compressedSize.has_value()) {
ALOGE("%s: Failed to compress jpeg thumbnail", __func__);
return {};
@@ -537,7 +624,7 @@
}
ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoBlobStreamBuffer(
- const int streamId, const int bufferId,
+ const int streamId, const int bufferId, const CameraMetadata& resultMetadata,
const RequestSettings& requestSettings, sp<Fence> fence) {
std::shared_ptr<AHardwareBuffer> hwBuffer =
mSessionContext.fetchHardwareBuffer(streamId, bufferId);
@@ -558,15 +645,22 @@
// Let's create YUV framebuffer and render the surface into this.
// This will take care about rescaling as well as potential format conversion.
+ // The buffer dimensions need to be rounded to nearest multiple of JPEG DCT
+ // size, however we pass the viewport corresponding to size of the stream so
+ // the image will be only rendered to the area corresponding to the stream
+ // size.
+ Resolution bufferSize =
+ roundTo2DctSize(Resolution(stream->width, stream->height));
std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
- mEglDisplayContext->getEglDisplay(), stream->width, stream->height);
+ mEglDisplayContext->getEglDisplay(), bufferSize.width, bufferSize.height);
if (framebuffer == nullptr) {
ALOGE("Failed to allocate temporary framebuffer for JPEG compression");
return cameraStatus(Status::INTERNAL_ERROR);
}
// Render into temporary framebuffer.
- ndk::ScopedAStatus status = renderIntoEglFramebuffer(*framebuffer);
+ ndk::ScopedAStatus status = renderIntoEglFramebuffer(
+ *framebuffer, /*fence=*/nullptr, Rect(stream->width, stream->height));
if (!status.isOk()) {
ALOGE("Failed to render input texture into temporary framebuffer");
return status;
@@ -578,38 +672,14 @@
return cameraStatus(Status::INTERNAL_ERROR);
}
- std::shared_ptr<AHardwareBuffer> inHwBuffer = framebuffer->getHardwareBuffer();
- GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(inHwBuffer.get());
-
- if (gBuffer == nullptr) {
- ALOGE(
- "%s: Encountered invalid temporary buffer while rendering JPEG "
- "into BLOB stream",
- __func__);
- return cameraStatus(Status::INTERNAL_ERROR);
- }
-
- if (gBuffer->getPixelFormat() != HAL_PIXEL_FORMAT_YCbCr_420_888) {
- // This should never happen since we're allocating the temporary buffer
- // with YUV420 layout above.
- ALOGE("%s: Cannot compress non-YUV buffer (pixelFormat %d)", __func__,
- gBuffer->getPixelFormat());
- return cameraStatus(Status::INTERNAL_ERROR);
- }
-
- YCbCrLockGuard yCbCrLock(inHwBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN);
- if (yCbCrLock.getStatus() != OK) {
- return cameraStatus(Status::INTERNAL_ERROR);
- }
-
std::vector<uint8_t> app1ExifData =
- createExif(Resolution(stream->width, stream->height),
+ createExif(Resolution(stream->width, stream->height), resultMetadata,
createThumbnail(requestSettings.thumbnailResolution,
requestSettings.thumbnailJpegQuality));
std::optional<size_t> compressedSize = compressJpeg(
- gBuffer->getWidth(), gBuffer->getHeight(), requestSettings.jpegQuality,
- *yCbCrLock, app1ExifData, stream->bufferSize - sizeof(CameraBlob),
- (*planesLock).planes[0].data);
+ stream->width, stream->height, requestSettings.jpegQuality,
+ framebuffer->getHardwareBuffer(), app1ExifData,
+ stream->bufferSize - sizeof(CameraBlob), (*planesLock).planes[0].data);
if (!compressedSize.has_value()) {
ALOGE("%s: Failed to compress JPEG image", __func__);
@@ -663,7 +733,7 @@
}
ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoEglFramebuffer(
- EglFrameBuffer& framebuffer, sp<Fence> fence) {
+ EglFrameBuffer& framebuffer, sp<Fence> fence, std::optional<Rect> viewport) {
ALOGV("%s", __func__);
// Wait for fence to clear.
if (fence != nullptr && fence->isValid()) {
@@ -677,6 +747,11 @@
mEglDisplayContext->makeCurrent();
framebuffer.beforeDraw();
+ Rect viewportRect =
+ viewport.value_or(Rect(framebuffer.getWidth(), framebuffer.getHeight()));
+ glViewport(viewportRect.leftTop().x, viewportRect.leftTop().y,
+ viewportRect.getWidth(), viewportRect.getHeight());
+
sp<GraphicBuffer> textureBuffer = mEglSurfaceTexture->getCurrentBuffer();
if (textureBuffer == nullptr) {
// If there's no current buffer, nothing was written to the surface and
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.h b/services/camera/virtualcamera/VirtualCameraRenderThread.h
index 86dad0b..dfb6f7b 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.h
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.h
@@ -17,6 +17,7 @@
#ifndef ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERARENDERTHREAD_H
#define ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERARENDERTHREAD_H
+#include <atomic>
#include <cstdint>
#include <deque>
#include <future>
@@ -33,6 +34,7 @@
#include "util/EglFramebuffer.h"
#include "util/EglProgram.h"
#include "util/EglSurfaceTexture.h"
+#include "util/MetadataUtil.h"
#include "util/Util.h"
namespace android {
@@ -56,8 +58,15 @@
struct RequestSettings {
int jpegQuality = VirtualCameraDevice::kDefaultJpegQuality;
+ int jpegOrientation = VirtualCameraDevice::kDefaultJpegOrientation;
Resolution thumbnailResolution = Resolution(0, 0);
int thumbnailJpegQuality = VirtualCameraDevice::kDefaultJpegQuality;
+ std::optional<FpsRange> fpsRange;
+ camera_metadata_enum_android_control_capture_intent_t captureIntent =
+ VirtualCameraDevice::kDefaultCaptureIntent;
+ std::optional<GpsCoordinates> gpsCoordinates;
+ std::optional<camera_metadata_enum_android_control_ae_precapture_trigger>
+ aePrecaptureTrigger;
};
// Represents single capture request to fill set of buffers.
@@ -149,6 +158,8 @@
// Always called on render thread.
ndk::ScopedAStatus renderIntoBlobStreamBuffer(
const int streamId, const int bufferId,
+ const ::aidl::android::hardware::camera::device::CameraMetadata&
+ resultMetadata,
const RequestSettings& requestSettings, sp<Fence> fence = nullptr);
// Render current image to the YCbCr buffer.
@@ -162,8 +173,9 @@
// If fence is specified, this function will block until the fence is cleared
// before writing to the buffer.
// Always called on the render thread.
- ndk::ScopedAStatus renderIntoEglFramebuffer(EglFrameBuffer& framebuffer,
- sp<Fence> fence = nullptr);
+ ndk::ScopedAStatus renderIntoEglFramebuffer(
+ EglFrameBuffer& framebuffer, sp<Fence> fence = nullptr,
+ std::optional<Rect> viewport = std::nullopt);
// Camera callback
const std::shared_ptr<
@@ -184,6 +196,9 @@
std::condition_variable mCondVar;
volatile bool mPendingExit GUARDED_BY(mLock);
+ // Acquisition timestamp of last frame.
+ std::atomic<uint64_t> mLastAcquisitionTimestampNanoseconds;
+
// EGL helpers - constructed and accessed only from rendering thread.
std::unique_ptr<EglDisplayContext> mEglDisplayContext;
std::unique_ptr<EglTextureProgram> mEglTextureYuvProgram;
diff --git a/services/camera/virtualcamera/VirtualCameraService.cc b/services/camera/virtualcamera/VirtualCameraService.cc
index 1144997..b5b07f0 100644
--- a/services/camera/virtualcamera/VirtualCameraService.cc
+++ b/services/camera/virtualcamera/VirtualCameraService.cc
@@ -18,19 +18,28 @@
#define LOG_TAG "VirtualCameraService"
#include "VirtualCameraService.h"
+#include <algorithm>
+#include <array>
#include <cinttypes>
#include <cstdint>
-#include <cstdio>
#include <memory>
#include <mutex>
+#include <optional>
+#include <regex>
+#include <variant>
#include "VirtualCameraDevice.h"
#include "VirtualCameraProvider.h"
#include "aidl/android/companion/virtualcamera/Format.h"
+#include "aidl/android/companion/virtualcamera/LensFacing.h"
#include "aidl/android/companion/virtualcamera/VirtualCameraConfiguration.h"
#include "android/binder_auto_utils.h"
#include "android/binder_libbinder.h"
+#include "android/binder_status.h"
#include "binder/Status.h"
+#include "fmt/format.h"
+#include "util/EglDisplayContext.h"
+#include "util/EglUtil.h"
#include "util/Permissions.h"
#include "util/Util.h"
@@ -46,6 +55,10 @@
using ::aidl::android::companion::virtualcamera::SupportedStreamConfiguration;
using ::aidl::android::companion::virtualcamera::VirtualCameraConfiguration;
+// TODO(b/301023410) Make camera id range configurable / dynamic
+// based on already registered devices.
+std::atomic_int VirtualCameraService::sNextId{1000};
+
namespace {
constexpr int kVgaWidth = 640;
@@ -53,14 +66,26 @@
constexpr int kMaxFps = 60;
constexpr char kEnableTestCameraCmd[] = "enable_test_camera";
constexpr char kDisableTestCameraCmd[] = "disable_test_camera";
+constexpr char kHelp[] = "help";
constexpr char kShellCmdHelp[] = R"(
+Usage:
+ cmd virtual_camera command [--option=value]
Available commands:
* enable_test_camera
+ Options:
+ --camera_id=(ID) - override numerical ID for test camera instance
+ --lens_facing=(front|back|external) - specifies lens facing for test camera instance
* disable_test_camera
)";
constexpr char kCreateVirtualDevicePermission[] =
"android.permission.CREATE_VIRTUAL_DEVICE";
+constexpr std::array<const char*, 3> kRequiredEglExtensions = {
+ "GL_OES_EGL_image_external",
+ "GL_OES_EGL_image_external_essl3",
+ "GL_EXT_YUV_target",
+};
+
ndk::ScopedAStatus validateConfiguration(
const VirtualCameraConfiguration& configuration) {
if (configuration.supportedStreamConfigs.empty()) {
@@ -98,6 +123,83 @@
return ndk::ScopedAStatus::ok();
}
+enum class Command {
+ ENABLE_TEST_CAMERA,
+ DISABLE_TEST_CAMERA,
+ HELP,
+};
+
+struct CommandWithOptions {
+ Command command;
+ std::map<std::string, std::string> optionToValueMap;
+};
+
+std::optional<int> parseInt(const std::string& s) {
+ if (!std::all_of(s.begin(), s.end(), [](char c) { return std::isdigit(c); })) {
+ return std::nullopt;
+ }
+ int ret = atoi(s.c_str());
+ return ret > 0 ? std::optional(ret) : std::nullopt;
+}
+
+std::optional<LensFacing> parseLensFacing(const std::string& s) {
+ static const std::map<std::string, LensFacing> strToLensFacing{
+ {"front", LensFacing::FRONT},
+ {"back", LensFacing::BACK},
+ {"external", LensFacing::EXTERNAL}};
+ auto it = strToLensFacing.find(s);
+ return it == strToLensFacing.end() ? std::nullopt : std::optional(it->second);
+}
+
+std::variant<CommandWithOptions, std::string> parseCommand(
+ const char** args, const uint32_t numArgs) {
+ static const std::regex optionRegex("^--(\\w+)(?:=(.+))?$");
+ static const std::map<std::string, Command> strToCommand{
+ {kHelp, Command::HELP},
+ {kEnableTestCameraCmd, Command::ENABLE_TEST_CAMERA},
+ {kDisableTestCameraCmd, Command::DISABLE_TEST_CAMERA}};
+
+ if (numArgs < 1) {
+ return CommandWithOptions{.command = Command::HELP};
+ }
+
+ // We interpret the first argument as command;
+ auto it = strToCommand.find(args[0]);
+ if (it == strToCommand.end()) {
+ return "Unknown command: " + std::string(args[0]);
+ }
+
+ CommandWithOptions cmd{.command = it->second};
+
+ for (int i = 1; i < numArgs; i++) {
+ std::cmatch cm;
+ if (!std::regex_match(args[i], cm, optionRegex)) {
+ return "Not an option: " + std::string(args[i]);
+ }
+
+ cmd.optionToValueMap[cm[1]] = cm[2];
+ }
+
+ return cmd;
+};
+
+ndk::ScopedAStatus verifyRequiredEglExtensions() {
+ EglDisplayContext context;
+ for (const char* eglExtension : kRequiredEglExtensions) {
+ if (!isGlExtensionSupported(eglExtension)) {
+ ALOGE("%s not supported", eglExtension);
+ return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+ EX_UNSUPPORTED_OPERATION,
+ fmt::format(
+ "Cannot create virtual camera, because required EGL extension {} "
+ "is not supported on this system",
+ eglExtension)
+ .c_str());
+ }
+ }
+ return ndk::ScopedAStatus::ok();
+}
+
} // namespace
VirtualCameraService::VirtualCameraService(
@@ -109,7 +211,15 @@
ndk::ScopedAStatus VirtualCameraService::registerCamera(
const ::ndk::SpAIBinder& token,
- const VirtualCameraConfiguration& configuration, bool* _aidl_return) {
+ const VirtualCameraConfiguration& configuration, const int32_t deviceId,
+ bool* _aidl_return) {
+ return registerCamera(token, configuration, sNextId++, deviceId, _aidl_return);
+}
+
+ndk::ScopedAStatus VirtualCameraService::registerCamera(
+ const ::ndk::SpAIBinder& token,
+ const VirtualCameraConfiguration& configuration, const int cameraId,
+ const int32_t deviceId, bool* _aidl_return) {
if (!mPermissionProxy.checkCallingPermission(kCreateVirtualDevicePermission)) {
ALOGE("%s: caller (pid %d, uid %d) doesn't hold %s permission", __func__,
getpid(), getuid(), kCreateVirtualDevicePermission);
@@ -121,7 +231,13 @@
Status::EX_ILLEGAL_ARGUMENT);
}
- *_aidl_return = true;
+ if (mVerifyEglExtensions) {
+ auto status = verifyRequiredEglExtensions();
+ if (!status.isOk()) {
+ *_aidl_return = false;
+ return status;
+ }
+ }
auto status = validateConfiguration(configuration);
if (!status.isOk()) {
@@ -141,7 +257,7 @@
}
std::shared_ptr<VirtualCameraDevice> camera =
- mVirtualCameraProvider->createCamera(configuration);
+ mVirtualCameraProvider->createCamera(configuration, cameraId, deviceId);
if (camera == nullptr) {
ALOGE("Failed to create camera for binder token 0x%" PRIxPTR,
reinterpret_cast<uintptr_t>(token.get()));
@@ -151,6 +267,7 @@
}
mTokenToCameraName[token] = camera->getCameraName();
+ *_aidl_return = true;
return ndk::ScopedAStatus::ok();
}
@@ -175,11 +292,12 @@
mVirtualCameraProvider->removeCamera(it->second);
+ mTokenToCameraName.erase(it);
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus VirtualCameraService::getCameraId(
- const ::ndk::SpAIBinder& token, int32_t* _aidl_return) {
+ const ::ndk::SpAIBinder& token, int32_t* _aidl_return) {
if (!mPermissionProxy.checkCallingPermission(kCreateVirtualDevicePermission)) {
ALOGE("%s: caller (pid %d, uid %d) doesn't hold %s permission", __func__,
getpid(), getuid(), kCreateVirtualDevicePermission);
@@ -188,7 +306,7 @@
if (_aidl_return == nullptr) {
return ndk::ScopedAStatus::fromServiceSpecificError(
- Status::EX_ILLEGAL_ARGUMENT);
+ Status::EX_ILLEGAL_ARGUMENT);
}
auto camera = getCamera(token);
@@ -220,8 +338,7 @@
return mVirtualCameraProvider->getCamera(it->second);
}
-binder_status_t VirtualCameraService::handleShellCommand(int in, int out,
- int err,
+binder_status_t VirtualCameraService::handleShellCommand(int, int out, int err,
const char** args,
uint32_t numArgs) {
if (numArgs <= 0) {
@@ -230,27 +347,68 @@
return STATUS_OK;
}
- if (args == nullptr || args[0] == nullptr) {
+ auto isNullptr = [](const char* ptr) { return ptr == nullptr; };
+ if (args == nullptr || std::any_of(args, args + numArgs, isNullptr)) {
return STATUS_BAD_VALUE;
}
- const char* const cmd = args[0];
- if (strcmp(kEnableTestCameraCmd, cmd) == 0) {
- enableTestCameraCmd(in, err);
- } else if (strcmp(kDisableTestCameraCmd, cmd) == 0) {
- disableTestCameraCmd(in);
- } else {
- dprintf(out, kShellCmdHelp);
+
+ std::variant<CommandWithOptions, std::string> cmdOrErrorMessage =
+ parseCommand(args, numArgs);
+ if (std::holds_alternative<std::string>(cmdOrErrorMessage)) {
+ dprintf(err, "Error: %s\n",
+ std::get<std::string>(cmdOrErrorMessage).c_str());
+ return STATUS_BAD_VALUE;
}
+ const CommandWithOptions& cmd =
+ std::get<CommandWithOptions>(cmdOrErrorMessage);
+ binder_status_t status = STATUS_OK;
+ switch (cmd.command) {
+ case Command::HELP:
+ dprintf(out, kShellCmdHelp);
+ break;
+ case Command::ENABLE_TEST_CAMERA:
+ status = enableTestCameraCmd(out, err, cmd.optionToValueMap);
+ break;
+ case Command::DISABLE_TEST_CAMERA:
+ disableTestCameraCmd(out);
+ break;
+ }
+
+ fsync(err);
fsync(out);
- return STATUS_OK;
+ return status;
}
-void VirtualCameraService::enableTestCameraCmd(const int out, const int err) {
+binder_status_t VirtualCameraService::enableTestCameraCmd(
+ const int out, const int err,
+ const std::map<std::string, std::string>& options) {
if (mTestCameraToken != nullptr) {
- dprintf(out, "Test camera is already enabled (%s).",
+ dprintf(out, "Test camera is already enabled (%s).\n",
getCamera(mTestCameraToken)->getCameraName().c_str());
- return;
+ return STATUS_OK;
+ }
+
+ std::optional<int> cameraId;
+ auto it = options.find("camera_id");
+ if (it != options.end()) {
+ cameraId = parseInt(it->second);
+ if (!cameraId.has_value()) {
+ dprintf(err, "Invalid camera_id: %s\n, must be number > 0",
+ it->second.c_str());
+ return STATUS_BAD_VALUE;
+ }
+ }
+
+ std::optional<LensFacing> lensFacing;
+ it = options.find("lens_facing");
+ if (it != options.end()) {
+ lensFacing = parseLensFacing(it->second);
+ if (!lensFacing.has_value()) {
+ dprintf(err, "Invalid lens_facing: %s\n, must be front|back|external",
+ it->second.c_str());
+ return STATUS_BAD_VALUE;
+ }
}
sp<BBinder> token = sp<BBinder>::make();
@@ -262,14 +420,16 @@
.height = kVgaHeight,
Format::YUV_420_888,
.maxFps = kMaxFps});
- configuration.lensFacing = LensFacing::EXTERNAL;
- registerCamera(mTestCameraToken, configuration, &ret);
+ configuration.lensFacing = lensFacing.value_or(LensFacing::EXTERNAL);
+ registerCamera(mTestCameraToken, configuration, cameraId.value_or(sNextId++),
+ kDefaultDeviceId, &ret);
if (ret) {
- dprintf(out, "Successfully registered test camera %s",
+ dprintf(out, "Successfully registered test camera %s\n",
getCamera(mTestCameraToken)->getCameraName().c_str());
} else {
- dprintf(err, "Failed to create test camera");
+ dprintf(err, "Failed to create test camera\n");
}
+ return STATUS_OK;
}
void VirtualCameraService::disableTestCameraCmd(const int out) {
diff --git a/services/camera/virtualcamera/VirtualCameraService.h b/services/camera/virtualcamera/VirtualCameraService.h
index d573986..f04acb5 100644
--- a/services/camera/virtualcamera/VirtualCameraService.h
+++ b/services/camera/virtualcamera/VirtualCameraService.h
@@ -43,7 +43,14 @@
const ::ndk::SpAIBinder& token,
const ::aidl::android::companion::virtualcamera::VirtualCameraConfiguration&
configuration,
- bool* _aidl_return) override EXCLUDES(mLock);
+ int32_t deviceId, bool* _aidl_return) override EXCLUDES(mLock);
+
+ // Register camera corresponding to the binder token.
+ ndk::ScopedAStatus registerCamera(
+ const ::ndk::SpAIBinder& token,
+ const ::aidl::android::companion::virtualcamera::VirtualCameraConfiguration&
+ configuration,
+ int cameraId, int32_t deviceId, bool* _aidl_return) EXCLUDES(mLock);
// Unregisters camera corresponding to the binder token.
ndk::ScopedAStatus unregisterCamera(const ::ndk::SpAIBinder& token) override
@@ -62,14 +69,24 @@
binder_status_t handleShellCommand(int in, int out, int err, const char** args,
uint32_t numArgs) override;
+ // Do not verify presence on required EGL extensions when registering virtual
+ // camera. Only to be used by unit tests.
+ void disableEglVerificationForTest() {
+ mVerifyEglExtensions = false;
+ }
+
+ // Default virtual device id (the host device id)
+ static constexpr int kDefaultDeviceId = 0;
+
private:
// Create and enable test camera instance if there's none.
- void enableTestCameraCmd(int out, int err);
+ binder_status_t enableTestCameraCmd(
+ int out, int err, const std::map<std::string, std::string>& options);
// Disable and destroy test camera instance if there's one.
void disableTestCameraCmd(int out);
std::shared_ptr<VirtualCameraProvider> mVirtualCameraProvider;
-
+ bool mVerifyEglExtensions = true;
const PermissionsProxy& mPermissionProxy;
std::mutex mLock;
@@ -84,6 +101,9 @@
// Local binder token for test camera instance, or nullptr if there's none.
::ndk::SpAIBinder mTestCameraToken;
+
+ // Numerical id to assign to next created camera.
+ static std::atomic_int sNextId;
};
} // namespace virtualcamera
diff --git a/services/camera/virtualcamera/VirtualCameraSession.cc b/services/camera/virtualcamera/VirtualCameraSession.cc
index 2a691c1..28fa495 100644
--- a/services/camera/virtualcamera/VirtualCameraSession.cc
+++ b/services/camera/virtualcamera/VirtualCameraSession.cc
@@ -148,7 +148,7 @@
.setControlMode(ANDROID_CONTROL_MODE_AUTO)
.setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
.setControlAeExposureCompensation(0)
- .setControlAeTargetFpsRange(maxFps, maxFps)
+ .setControlAeTargetFpsRange(FpsRange{maxFps, maxFps})
.setControlAeAntibandingMode(ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO)
.setControlAePrecaptureTrigger(
ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
@@ -257,10 +257,16 @@
return RequestSettings{
.jpegQuality = getJpegQuality(metadata).value_or(
VirtualCameraDevice::kDefaultJpegQuality),
+ .jpegOrientation = getJpegOrientation(metadata),
.thumbnailResolution =
getJpegThumbnailSize(metadata).value_or(Resolution(0, 0)),
.thumbnailJpegQuality = getJpegThumbnailQuality(metadata).value_or(
- VirtualCameraDevice::kDefaultJpegQuality)};
+ VirtualCameraDevice::kDefaultJpegQuality),
+ .fpsRange = getFpsRange(metadata),
+ .captureIntent = getCaptureIntent(metadata).value_or(
+ ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW),
+ .gpsCoordinates = getGpsCoordinates(metadata),
+ .aePrecaptureTrigger = getPrecaptureTrigger(metadata)};
}
} // namespace
@@ -513,7 +519,7 @@
ndk::ScopedAStatus VirtualCameraSession::processCaptureRequest(
const CaptureRequest& request) {
- ALOGD("%s: request: %s", __func__, request.toString().c_str());
+ ALOGV("%s: request: %s", __func__, request.toString().c_str());
std::shared_ptr<ICameraDeviceCallback> cameraCallback = nullptr;
RequestSettings requestSettings;
diff --git a/services/camera/virtualcamera/VirtualCameraStream.cc b/services/camera/virtualcamera/VirtualCameraStream.cc
index 03da171..fad6cac 100644
--- a/services/camera/virtualcamera/VirtualCameraStream.cc
+++ b/services/camera/virtualcamera/VirtualCameraStream.cc
@@ -26,8 +26,6 @@
#include "EGL/egl.h"
#include "aidl/android/hardware/camera/device/Stream.h"
-#include "aidl/android/hardware/camera/device/StreamBuffer.h"
-#include "aidl/android/hardware/graphics/common/PixelFormat.h"
#include "aidlcommonsupport/NativeHandle.h"
#include "android/hardware_buffer.h"
#include "cutils/native_handle.h"
@@ -39,52 +37,33 @@
namespace virtualcamera {
using ::aidl::android::hardware::camera::device::Stream;
-using ::aidl::android::hardware::camera::device::StreamBuffer;
using ::aidl::android::hardware::common::NativeHandle;
-using ::aidl::android::hardware::graphics::common::PixelFormat;
namespace {
-sp<GraphicBuffer> createBlobGraphicBuffer(GraphicBufferMapper& mapper,
- buffer_handle_t bufferHandle) {
- uint64_t allocationSize;
- uint64_t usage;
- uint64_t layerCount;
- if (mapper.getAllocationSize(bufferHandle, &allocationSize) != NO_ERROR ||
- mapper.getUsage(bufferHandle, &usage) != NO_ERROR ||
- mapper.getLayerCount(bufferHandle, &layerCount) != NO_ERROR) {
- ALOGE("Error fetching metadata for the imported BLOB buffer handle.");
- return nullptr;
- }
-
- return sp<GraphicBuffer>::make(
- bufferHandle, GraphicBuffer::HandleWrapMethod::TAKE_HANDLE,
- allocationSize, /*height=*/1, static_cast<int>(ui::PixelFormat::BLOB),
- layerCount, usage, 0);
-}
-
-sp<GraphicBuffer> createYCbCr420GraphicBuffer(GraphicBufferMapper& mapper,
- buffer_handle_t bufferHandle) {
+sp<GraphicBuffer> createGraphicBuffer(GraphicBufferMapper& mapper,
+ const buffer_handle_t bufferHandle) {
uint64_t width;
uint64_t height;
uint64_t usage;
uint64_t layerCount;
+ ui::PixelFormat pixelFormat;
if (mapper.getWidth(bufferHandle, &width) != NO_ERROR ||
mapper.getHeight(bufferHandle, &height) != NO_ERROR ||
mapper.getUsage(bufferHandle, &usage) != NO_ERROR ||
- mapper.getLayerCount(bufferHandle, &layerCount) != NO_ERROR) {
+ mapper.getLayerCount(bufferHandle, &layerCount) != NO_ERROR ||
+ mapper.getPixelFormatRequested(bufferHandle, &pixelFormat) != NO_ERROR) {
ALOGE("Error fetching metadata for the imported YCbCr420 buffer handle.");
return nullptr;
}
return sp<GraphicBuffer>::make(
bufferHandle, GraphicBuffer::HandleWrapMethod::TAKE_HANDLE, width, height,
- static_cast<int>(ui::PixelFormat::YCBCR_420_888), /*layers=*/1, usage,
- width);
+ static_cast<int>(pixelFormat), layerCount, usage, width);
}
std::shared_ptr<AHardwareBuffer> importBufferInternal(
- const NativeHandle& aidlHandle, const Stream& streamConfig) {
+ const NativeHandle& aidlHandle) {
if (aidlHandle.fds.empty()) {
ALOGE("Empty handle - nothing to import");
return nullptr;
@@ -103,12 +82,9 @@
return nullptr;
}
- sp<GraphicBuffer> buf =
- streamConfig.format == PixelFormat::BLOB
- ? createBlobGraphicBuffer(mapper, bufferHandle)
- : createYCbCr420GraphicBuffer(mapper, bufferHandle);
+ sp<GraphicBuffer> buf = createGraphicBuffer(mapper, bufferHandle);
- if (buf->initCheck() != NO_ERROR) {
+ if (buf == nullptr || buf->initCheck() != NO_ERROR) {
ALOGE("Imported graphic buffer is not correcly initialized.");
return nullptr;
}
@@ -128,7 +104,7 @@
std::shared_ptr<AHardwareBuffer> VirtualCameraStream::importBuffer(
const ::aidl::android::hardware::camera::device::StreamBuffer& buffer) {
- auto hwBufferPtr = importBufferInternal(buffer.buffer, mStreamConfig);
+ auto hwBufferPtr = importBufferInternal(buffer.buffer);
if (hwBufferPtr != nullptr) {
std::lock_guard<std::mutex> lock(mLock);
mBuffers.emplace(std::piecewise_construct,
diff --git a/services/camera/virtualcamera/aidl/android/companion/virtualcamera/IVirtualCameraService.aidl b/services/camera/virtualcamera/aidl/android/companion/virtualcamera/IVirtualCameraService.aidl
index bb74f5c..1bd99be 100644
--- a/services/camera/virtualcamera/aidl/android/companion/virtualcamera/IVirtualCameraService.aidl
+++ b/services/camera/virtualcamera/aidl/android/companion/virtualcamera/IVirtualCameraService.aidl
@@ -28,7 +28,8 @@
* Registers a new camera with the virtual camera hal.
* @return true if the camera was successfully registered
*/
- boolean registerCamera(in IBinder token, in VirtualCameraConfiguration configuration);
+ boolean registerCamera(in IBinder token, in VirtualCameraConfiguration configuration,
+ int deviceId);
/**
* Unregisters the camera from the virtual camera hal. After this call the virtual camera won't
diff --git a/services/camera/virtualcamera/flags/Android.bp b/services/camera/virtualcamera/flags/Android.bp
index 5fa53d8..5fa8852 100644
--- a/services/camera/virtualcamera/flags/Android.bp
+++ b/services/camera/virtualcamera/flags/Android.bp
@@ -35,27 +35,3 @@
export_include_dirs: ["."],
defaults: ["virtual_device_build_flags_defaults"],
}
-
-soong_config_module_type {
- name: "virtual_device_build_flags_java_library",
- module_type: "java_library",
- config_namespace: "vdm",
- bool_variables: [
- "virtual_camera_service_enabled",
- ],
- properties: [
- "srcs",
- ],
-}
-
-virtual_device_build_flags_java_library {
- name: "virtual_device_build_flag_java",
- soong_config_variables: {
- virtual_camera_service_enabled: {
- srcs: ["java/enabled/**/*.java"],
- conditions_default: {
- srcs: ["java/disabled/**/*.java"],
- },
- },
- },
-}
diff --git a/services/camera/virtualcamera/flags/java/disabled/android/companion/virtualdevice/flags/VirtualCameraServiceBuildFlag.java b/services/camera/virtualcamera/flags/java/disabled/android/companion/virtualdevice/flags/VirtualCameraServiceBuildFlag.java
deleted file mode 100644
index 128d93c..0000000
--- a/services/camera/virtualcamera/flags/java/disabled/android/companion/virtualdevice/flags/VirtualCameraServiceBuildFlag.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Copyright 2024 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.companion.virtualdevice.flags;
-
-/** This file is included only if RELEASE_PACKAGE_VIRTUAL_CAMERA build flag isn't set.*/
-public class VirtualCameraServiceBuildFlag {
-
- public static boolean isVirtualCameraServiceBuildFlagEnabled() {
- return false;
- }
-}
diff --git a/services/camera/virtualcamera/flags/java/enabled/android/companion/virtualdevice/flags/VirtualCameraServiceBuildFlag.java b/services/camera/virtualcamera/flags/java/enabled/android/companion/virtualdevice/flags/VirtualCameraServiceBuildFlag.java
deleted file mode 100644
index 02816fb..0000000
--- a/services/camera/virtualcamera/flags/java/enabled/android/companion/virtualdevice/flags/VirtualCameraServiceBuildFlag.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Copyright 2024 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.companion.virtualdevice.flags;
-
-/** This file is included only if RELEASE_PACKAGE_VIRTUAL_CAMERA build flag is set.*/
-public class VirtualCameraServiceBuildFlag {
-
- public static boolean isVirtualCameraServiceBuildFlagEnabled() {
- return true;
- }
-}
diff --git a/services/camera/virtualcamera/tests/Android.bp b/services/camera/virtualcamera/tests/Android.bp
index c51b4a3..543cc10 100644
--- a/services/camera/virtualcamera/tests/Android.bp
+++ b/services/camera/virtualcamera/tests/Android.bp
@@ -17,6 +17,7 @@
],
srcs: [
"EglUtilTest.cc",
+ "JpegUtilTest.cc",
"VirtualCameraDeviceTest.cc",
"VirtualCameraProviderTest.cc",
"VirtualCameraRenderThreadTest.cc",
diff --git a/services/camera/virtualcamera/tests/JpegUtilTest.cc b/services/camera/virtualcamera/tests/JpegUtilTest.cc
new file mode 100644
index 0000000..e6481f0
--- /dev/null
+++ b/services/camera/virtualcamera/tests/JpegUtilTest.cc
@@ -0,0 +1,199 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <sys/types.h>
+
+#include "system/graphics.h"
+#define LOG_TAG "JpegUtilTest"
+
+#include <array>
+#include <cstdint>
+#include <cstring>
+
+#include "android/hardware_buffer.h"
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+#include "jpeglib.h"
+#include "util/JpegUtil.h"
+#include "util/Util.h"
+#include "utils/Errors.h"
+
+namespace android {
+namespace companion {
+namespace virtualcamera {
+namespace {
+
+using testing::Eq;
+using testing::Gt;
+using testing::Optional;
+using testing::VariantWith;
+
+constexpr int kOutputBufferSize = 1024 * 1024; // 1 MiB.
+constexpr int kJpegQuality = 80;
+
+// Create black YUV420 buffer for testing purposes.
+std::shared_ptr<AHardwareBuffer> createHardwareBufferForTest(const int width,
+ const int height) {
+ const AHardwareBuffer_Desc desc{.width = static_cast<uint32_t>(width),
+ .height = static_cast<uint32_t>(height),
+ .layers = 1,
+ .format = AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420,
+ .usage = AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN,
+ .stride = 0,
+ .rfu0 = 0,
+ .rfu1 = 0};
+
+ AHardwareBuffer* hwBufferPtr;
+ int status = AHardwareBuffer_allocate(&desc, &hwBufferPtr);
+ if (status != NO_ERROR) {
+ ALOGE(
+ "%s: Failed to allocate hardware buffer for temporary framebuffer: %d",
+ __func__, status);
+ return nullptr;
+ }
+
+ std::shared_ptr<AHardwareBuffer> hwBuffer(hwBufferPtr,
+ AHardwareBuffer_release);
+
+ YCbCrLockGuard yCbCrLock(hwBuffer, AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN);
+ const android_ycbcr& ycbr = (*yCbCrLock);
+
+ uint8_t* y = reinterpret_cast<uint8_t*>(ycbr.y);
+ for (int r = 0; r < height; r++) {
+ memset(y + r * ycbr.ystride, 0x00, width);
+ }
+
+ uint8_t* cb = reinterpret_cast<uint8_t*>(ycbr.cb);
+ uint8_t* cr = reinterpret_cast<uint8_t*>(ycbr.cr);
+ for (int r = 0; r < height / 2; r++) {
+ for (int c = 0; c < width / 2; c++) {
+ cb[r * ycbr.cstride + c * ycbr.chroma_step] = 0xff / 2;
+ cr[r * ycbr.cstride + c * ycbr.chroma_step] = 0xff / 2;
+ }
+ }
+
+ return hwBuffer;
+}
+
+// Decode JPEG header, return image resolution on success or error message on error.
+std::variant<std::string, Resolution> verifyHeaderAndGetResolution(
+ const uint8_t* data, int size) {
+ struct jpeg_decompress_struct ctx;
+ struct jpeg_error_mgr jerr;
+
+ struct DecompressionError {
+ bool success = true;
+ std::string error;
+ } result;
+
+ ctx.client_data = &result;
+
+ ctx.err = jpeg_std_error(&jerr);
+ ctx.err->error_exit = [](j_common_ptr cinfo) {
+ reinterpret_cast<DecompressionError*>(cinfo->client_data)->success = false;
+ };
+ ctx.err->output_message = [](j_common_ptr cinfo) {
+ char buffer[JMSG_LENGTH_MAX];
+ (*cinfo->err->format_message)(cinfo, buffer);
+ reinterpret_cast<DecompressionError*>(cinfo->client_data)->error = buffer;
+ ALOGE("libjpeg error: %s", buffer);
+ };
+
+ jpeg_create_decompress(&ctx);
+ jpeg_mem_src(&ctx, data, size);
+ jpeg_read_header(&ctx, /*require_image=*/true);
+
+ if (!result.success) {
+ jpeg_destroy_decompress(&ctx);
+ return result.error;
+ }
+
+ Resolution resolution(ctx.image_width, ctx.image_height);
+ jpeg_destroy_decompress(&ctx);
+ return resolution;
+}
+
+TEST(JpegUtil, roundToDctSize) {
+ EXPECT_THAT(roundTo2DctSize(Resolution(640, 480)), Eq(Resolution(640, 480)));
+ EXPECT_THAT(roundTo2DctSize(Resolution(5, 5)), Eq(Resolution(16, 16)));
+ EXPECT_THAT(roundTo2DctSize(Resolution(32, 32)), Eq(Resolution(32, 32)));
+ EXPECT_THAT(roundTo2DctSize(Resolution(33, 32)), Eq(Resolution(48, 32)));
+ EXPECT_THAT(roundTo2DctSize(Resolution(32, 33)), Eq(Resolution(32, 48)));
+}
+
+class JpegUtilTest : public ::testing::Test {
+ public:
+ void SetUp() override {
+ std::fill(mOutputBuffer.begin(), mOutputBuffer.end(), 0);
+ }
+
+ protected:
+ std::optional<size_t> compress(int imageWidth, int imageHeight,
+ std::shared_ptr<AHardwareBuffer> inBuffer) {
+ return compressJpeg(imageWidth, imageHeight, kJpegQuality, inBuffer,
+ /*app1ExifData=*/{}, mOutputBuffer.size(),
+ mOutputBuffer.data());
+ }
+
+ std::array<uint8_t, kOutputBufferSize> mOutputBuffer;
+};
+
+TEST_F(JpegUtilTest, compressImageSizeAlignedWithDctSucceeds) {
+ std::shared_ptr<AHardwareBuffer> inBuffer =
+ createHardwareBufferForTest(640, 480);
+
+ std::optional<size_t> compressedSize = compress(640, 480, inBuffer);
+
+ EXPECT_THAT(compressedSize, Optional(Gt(0)));
+ EXPECT_THAT(verifyHeaderAndGetResolution(mOutputBuffer.data(),
+ compressedSize.value()),
+ VariantWith<Resolution>(Resolution(640, 480)));
+}
+
+TEST_F(JpegUtilTest, compressImageSizeNotAlignedWidthDctSucceeds) {
+ std::shared_ptr<AHardwareBuffer> inBuffer =
+ createHardwareBufferForTest(640, 480);
+
+ std::optional<size_t> compressedSize = compress(630, 470, inBuffer);
+
+ EXPECT_THAT(compressedSize, Optional(Gt(0)));
+ EXPECT_THAT(verifyHeaderAndGetResolution(mOutputBuffer.data(),
+ compressedSize.value()),
+ VariantWith<Resolution>(Resolution(630, 470)));
+}
+
+TEST_F(JpegUtilTest, compressImageWithBufferNotAlignedWithDctFails) {
+ std::shared_ptr<AHardwareBuffer> inBuffer =
+ createHardwareBufferForTest(641, 480);
+
+ std::optional<size_t> compressedSize = compress(640, 480, inBuffer);
+
+ EXPECT_THAT(compressedSize, Eq(std::nullopt));
+}
+
+TEST_F(JpegUtilTest, compressImageWithBufferTooSmallFails) {
+ std::shared_ptr<AHardwareBuffer> inBuffer =
+ createHardwareBufferForTest(634, 464);
+
+ std::optional<size_t> compressedSize = compress(640, 480, inBuffer);
+
+ EXPECT_THAT(compressedSize, Eq(std::nullopt));
+}
+
+} // namespace
+} // namespace virtualcamera
+} // namespace companion
+} // namespace android
diff --git a/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc b/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc
index ad9d83b..3fe7c11 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc
@@ -64,6 +64,7 @@
constexpr int kHdWidth = 1280;
constexpr int kHdHeight = 720;
constexpr int kMaxFps = 30;
+constexpr int kDefaultDeviceId = 0;
const Stream kVgaYUV420Stream = Stream{
.streamType = StreamType::OUTPUT,
@@ -137,8 +138,8 @@
cameraCharacteristicsForInputFormat) {
const VirtualCameraConfigTestParam& param = GetParam();
std::shared_ptr<VirtualCameraDevice> camera =
- ndk::SharedRefBase::make<VirtualCameraDevice>(kCameraId,
- param.inputConfig);
+ ndk::SharedRefBase::make<VirtualCameraDevice>(
+ kCameraId, param.inputConfig, kDefaultDeviceId);
CameraMetadata metadata;
ASSERT_TRUE(camera->getCameraCharacteristics(&metadata).isOk());
@@ -293,15 +294,17 @@
public:
void SetUp() override {
mCamera = ndk::SharedRefBase::make<VirtualCameraDevice>(
- kCameraId, VirtualCameraConfiguration{
- .supportedStreamConfigs = {SupportedStreamConfiguration{
- .width = kVgaWidth,
- .height = kVgaHeight,
- .pixelFormat = Format::YUV_420_888,
- .maxFps = kMaxFps}},
- .virtualCameraCallback = nullptr,
- .sensorOrientation = SensorOrientation::ORIENTATION_0,
- .lensFacing = LensFacing::FRONT});
+ kCameraId,
+ VirtualCameraConfiguration{
+ .supportedStreamConfigs = {SupportedStreamConfiguration{
+ .width = kVgaWidth,
+ .height = kVgaHeight,
+ .pixelFormat = Format::YUV_420_888,
+ .maxFps = kMaxFps}},
+ .virtualCameraCallback = nullptr,
+ .sensorOrientation = SensorOrientation::ORIENTATION_0,
+ .lensFacing = LensFacing::FRONT},
+ kDefaultDeviceId);
}
protected:
@@ -360,6 +363,33 @@
ElementsAre(Resolution(0, 0), Resolution(240, 180)));
}
+TEST_F(VirtualCameraDeviceTest, dump) {
+ std::string expected = R"( virtual_camera 42 belongs to virtual device 0
+ SupportedStreamConfiguration:
+ SupportedStreamConfiguration{width: 640, height: 480, pixelFormat: YUV_420_888, maxFps: 30})";
+ int expectedSize = expected.size() * sizeof(char);
+ char buffer[expectedSize];
+
+ // Create an in memory fd
+ int fd = memfd_create("tmpFile", 0);
+ mCamera->dump(fd, {}, 0);
+
+ // Check that we wrote the expected size
+ int dumpSize = lseek(fd, 0, SEEK_END);
+
+ // Rewind and read from the fd
+ lseek(fd, 0, SEEK_SET);
+ read(fd, buffer, expectedSize);
+ close(fd);
+
+ // Check the content of the dump
+ std::string name = std::string(buffer, expectedSize);
+ ASSERT_EQ(expected, name);
+ // Check the size after the content to display the string mismatch when a
+ // failure occurs
+ ASSERT_EQ(expectedSize, dumpSize);
+}
+
} // namespace
} // namespace virtualcamera
} // namespace companion
diff --git a/services/camera/virtualcamera/tests/VirtualCameraProviderTest.cc b/services/camera/virtualcamera/tests/VirtualCameraProviderTest.cc
index ab647a4..f1b2a92 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraProviderTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraProviderTest.cc
@@ -53,6 +53,8 @@
constexpr int kVgaWidth = 640;
constexpr int kVgaHeight = 480;
constexpr int kMaxFps = 30;
+constexpr int kCameraId = 9999;
+constexpr int kDefaultDeviceId = 0;
constexpr char kVirtualCameraNameRegex[] =
"device@[0-9]+\\.[0-9]+/virtual/[0-9]+";
@@ -118,7 +120,7 @@
ASSERT_TRUE(mCameraProvider->setCallback(mMockCameraProviderCallback).isOk());
std::shared_ptr<VirtualCameraDevice> camera =
- mCameraProvider->createCamera(mInputConfig);
+ mCameraProvider->createCamera(mInputConfig, kCameraId, kDefaultDeviceId);
EXPECT_THAT(camera, Not(IsNull()));
EXPECT_THAT(camera->getCameraName(), MatchesRegex(kVirtualCameraNameRegex));
@@ -136,7 +138,7 @@
.WillOnce(Return(ndk::ScopedAStatus::ok()));
std::shared_ptr<VirtualCameraDevice> camera =
- mCameraProvider->createCamera(mInputConfig);
+ mCameraProvider->createCamera(mInputConfig, kCameraId, kDefaultDeviceId);
ASSERT_TRUE(mCameraProvider->setCallback(mMockCameraProviderCallback).isOk());
// Created camera should be in the list of cameras.
@@ -148,7 +150,7 @@
TEST_F(VirtualCameraProviderTest, RemoveCamera) {
ASSERT_TRUE(mCameraProvider->setCallback(mMockCameraProviderCallback).isOk());
std::shared_ptr<VirtualCameraDevice> camera =
- mCameraProvider->createCamera(mInputConfig);
+ mCameraProvider->createCamera(mInputConfig, kCameraId, kDefaultDeviceId);
EXPECT_CALL(*mMockCameraProviderCallback,
cameraDeviceStatusChange(Eq(camera->getCameraName()),
@@ -165,7 +167,7 @@
TEST_F(VirtualCameraProviderTest, RemoveNonExistingCamera) {
ASSERT_TRUE(mCameraProvider->setCallback(mMockCameraProviderCallback).isOk());
std::shared_ptr<VirtualCameraDevice> camera =
- mCameraProvider->createCamera(mInputConfig);
+ mCameraProvider->createCamera(mInputConfig, kCameraId, kDefaultDeviceId);
// Removing non-existing camera should fail.
const std::string cameraName = "DefinitelyNoTCamera";
diff --git a/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc b/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc
index d4d00a2..5927b05 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc
@@ -14,8 +14,11 @@
* limitations under the License.
*/
+#include <algorithm>
#include <cstdio>
+#include <iterator>
#include <memory>
+#include <regex>
#include "VirtualCameraService.h"
#include "aidl/android/companion/virtualcamera/BnVirtualCameraCallback.h"
@@ -29,6 +32,7 @@
#include "binder/Binder.h"
#include "gmock/gmock.h"
#include "gtest/gtest.h"
+#include "util/MetadataUtil.h"
#include "util/Permissions.h"
#include "utils/Errors.h"
@@ -44,15 +48,18 @@
using ::aidl::android::companion::virtualcamera::VirtualCameraConfiguration;
using ::aidl::android::hardware::camera::common::CameraDeviceStatus;
using ::aidl::android::hardware::camera::common::TorchModeStatus;
+using ::aidl::android::hardware::camera::device::CameraMetadata;
using ::aidl::android::hardware::camera::provider::BnCameraProviderCallback;
using ::aidl::android::hardware::graphics::common::PixelFormat;
using ::aidl::android::view::Surface;
using ::testing::_;
+using ::testing::ElementsAre;
using ::testing::Eq;
using ::testing::Ge;
using ::testing::IsEmpty;
using ::testing::IsNull;
using ::testing::Not;
+using ::testing::Optional;
using ::testing::Return;
using ::testing::SizeIs;
@@ -62,6 +69,7 @@
constexpr SensorOrientation kSensorOrientation =
SensorOrientation::ORIENTATION_0;
constexpr LensFacing kLensFacing = LensFacing::FRONT;
+constexpr int kDefaultDeviceId = 0;
constexpr char kCreateVirtualDevicePermissions[] =
"android.permission.CREATE_VIRTUAL_DEVICE";
@@ -110,6 +118,7 @@
mCameraProvider->setCallback(mMockCameraProviderCallback);
mCameraService = ndk::SharedRefBase::make<VirtualCameraService>(
mCameraProvider, mMockPermissionsProxy);
+ mCameraService->disableEglVerificationForTest();
ON_CALL(mMockPermissionsProxy, checkCallingPermission)
.WillByDefault(Return(true));
@@ -124,8 +133,8 @@
bool aidlRet;
ASSERT_TRUE(mCameraService
- ->registerCamera(mNdkOwnerToken,
- mVgaYUV420OnlyConfiguration, &aidlRet)
+ ->registerCamera(mNdkOwnerToken, mVgaYUV420OnlyConfiguration,
+ kDefaultDeviceId, &aidlRet)
.isOk());
ASSERT_TRUE(aidlRet);
}
@@ -134,12 +143,21 @@
close(mDevNullFd);
}
- void execute_shell_command(const std::string cmd) {
- std::array<const char*, 1> args{cmd.data()};
- ASSERT_THAT(
- mCameraService->handleShellCommand(mDevNullFd, mDevNullFd, mDevNullFd,
- args.data(), args.size()),
- Eq(NO_ERROR));
+ binder_status_t execute_shell_command(const std::string& cmd) {
+ const static std::regex whitespaceRegex("\\s+");
+ std::vector<std::string> tokens;
+ std::copy_if(
+ std::sregex_token_iterator(cmd.begin(), cmd.end(), whitespaceRegex, -1),
+ std::sregex_token_iterator(), std::back_inserter(tokens),
+ [](const std::string& token) { return !token.empty(); });
+
+ std::vector<const char*> argv;
+ argv.reserve(tokens.size());
+ std::transform(tokens.begin(), tokens.end(), std::back_inserter(argv),
+ [](const std::string& str) { return str.c_str(); });
+
+ return mCameraService->handleShellCommand(
+ mDevNullFd, mDevNullFd, mDevNullFd, argv.data(), argv.size());
}
std::vector<std::string> getCameraIds() {
@@ -148,6 +166,17 @@
return cameraIds;
}
+ std::optional<camera_metadata_enum_android_lens_facing> getCameraLensFacing(
+ const std::string& id) {
+ std::shared_ptr<VirtualCameraDevice> camera = mCameraProvider->getCamera(id);
+ if (camera == nullptr) {
+ return std::nullopt;
+ }
+ CameraMetadata metadata;
+ camera->getCameraCharacteristics(&metadata);
+ return getLensFacing(metadata);
+ }
+
protected:
std::shared_ptr<VirtualCameraService> mCameraService;
std::shared_ptr<VirtualCameraProvider> mCameraProvider;
@@ -169,10 +198,10 @@
ndk::SpAIBinder ndkToken(AIBinder_fromPlatformBinder(token));
bool aidlRet;
- ASSERT_TRUE(
- mCameraService
- ->registerCamera(ndkToken, mVgaYUV420OnlyConfiguration, &aidlRet)
- .isOk());
+ ASSERT_TRUE(mCameraService
+ ->registerCamera(ndkToken, mVgaYUV420OnlyConfiguration,
+ kDefaultDeviceId, &aidlRet)
+ .isOk());
EXPECT_TRUE(aidlRet);
EXPECT_THAT(getCameraIds(), SizeIs(1));
@@ -186,7 +215,9 @@
VirtualCameraConfiguration config =
createConfiguration(kVgaWidth, kVgaHeight, Format::RGBA_8888, kMaxFps);
- ASSERT_TRUE(mCameraService->registerCamera(ndkToken, config, &aidlRet).isOk());
+ ASSERT_TRUE(mCameraService
+ ->registerCamera(ndkToken, config, kDefaultDeviceId, &aidlRet)
+ .isOk());
EXPECT_TRUE(aidlRet);
EXPECT_THAT(getCameraIds(), SizeIs(1));
@@ -198,7 +229,7 @@
ASSERT_TRUE(mCameraService
->registerCamera(mNdkOwnerToken, mVgaYUV420OnlyConfiguration,
- &aidlRet)
+ kDefaultDeviceId, &aidlRet)
.isOk());
EXPECT_FALSE(aidlRet);
EXPECT_THAT(getCameraIds(), SizeIs(1));
@@ -209,7 +240,8 @@
ASSERT_FALSE(mCameraService
->registerCamera(mNdkOwnerToken,
- kEmptyVirtualCameraConfiguration, &aidlRet)
+ kEmptyVirtualCameraConfiguration,
+ kDefaultDeviceId, &aidlRet)
.isOk());
EXPECT_FALSE(aidlRet);
EXPECT_THAT(getCameraIds(), IsEmpty());
@@ -222,7 +254,9 @@
createConfiguration(kVgaWidth, kVgaHeight, Format::UNKNOWN, kMaxFps);
ASSERT_FALSE(
- mCameraService->registerCamera(mNdkOwnerToken, config, &aidlRet).isOk());
+ mCameraService
+ ->registerCamera(mNdkOwnerToken, config, kDefaultDeviceId, &aidlRet)
+ .isOk());
EXPECT_FALSE(aidlRet);
EXPECT_THAT(getCameraIds(), IsEmpty());
}
@@ -233,18 +267,9 @@
createConfiguration(1000000, 1000000, Format::YUV_420_888, kMaxFps);
ASSERT_FALSE(
- mCameraService->registerCamera(mNdkOwnerToken, config, &aidlRet).isOk());
- EXPECT_FALSE(aidlRet);
- EXPECT_THAT(getCameraIds(), IsEmpty());
-}
-
-TEST_F(VirtualCameraServiceTest, ConfigurationWithUnalignedResolutionFails) {
- bool aidlRet;
- VirtualCameraConfiguration config =
- createConfiguration(641, 481, Format::YUV_420_888, kMaxFps);
-
- ASSERT_FALSE(
- mCameraService->registerCamera(mNdkOwnerToken, config, &aidlRet).isOk());
+ mCameraService
+ ->registerCamera(mNdkOwnerToken, config, kDefaultDeviceId, &aidlRet)
+ .isOk());
EXPECT_FALSE(aidlRet);
EXPECT_THAT(getCameraIds(), IsEmpty());
}
@@ -255,7 +280,9 @@
createConfiguration(-1, kVgaHeight, Format::YUV_420_888, kMaxFps);
ASSERT_FALSE(
- mCameraService->registerCamera(mNdkOwnerToken, config, &aidlRet).isOk());
+ mCameraService
+ ->registerCamera(mNdkOwnerToken, config, kDefaultDeviceId, &aidlRet)
+ .isOk());
EXPECT_FALSE(aidlRet);
EXPECT_THAT(getCameraIds(), IsEmpty());
}
@@ -266,7 +293,9 @@
createConfiguration(kVgaWidth, kVgaHeight, Format::YUV_420_888, 0);
ASSERT_FALSE(
- mCameraService->registerCamera(mNdkOwnerToken, config, &aidlRet).isOk());
+ mCameraService
+ ->registerCamera(mNdkOwnerToken, config, kDefaultDeviceId, &aidlRet)
+ .isOk());
EXPECT_FALSE(aidlRet);
EXPECT_THAT(getCameraIds(), IsEmpty());
}
@@ -277,7 +306,9 @@
createConfiguration(kVgaWidth, kVgaHeight, Format::YUV_420_888, 90);
ASSERT_FALSE(
- mCameraService->registerCamera(mNdkOwnerToken, config, &aidlRet).isOk());
+ mCameraService
+ ->registerCamera(mNdkOwnerToken, config, kDefaultDeviceId, &aidlRet)
+ .isOk());
EXPECT_FALSE(aidlRet);
EXPECT_THAT(getCameraIds(), IsEmpty());
}
@@ -311,7 +342,7 @@
EXPECT_THAT(mCameraService
->registerCamera(mNdkOwnerToken, mVgaYUV420OnlyConfiguration,
- &aidlRet)
+ kDefaultDeviceId, &aidlRet)
.getExceptionCode(),
Eq(EX_SECURITY));
}
@@ -370,17 +401,61 @@
}
TEST_F(VirtualCameraServiceTest, TestCameraShellCmd) {
- execute_shell_command("enable_test_camera");
+ EXPECT_THAT(execute_shell_command("enable_test_camera"), Eq(NO_ERROR));
std::vector<std::string> cameraIdsAfterEnable = getCameraIds();
EXPECT_THAT(cameraIdsAfterEnable, SizeIs(1));
- execute_shell_command("disable_test_camera");
+ EXPECT_THAT(execute_shell_command("disable_test_camera"), Eq(NO_ERROR));
std::vector<std::string> cameraIdsAfterDisable = getCameraIds();
EXPECT_THAT(cameraIdsAfterDisable, IsEmpty());
}
+TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithId) {
+ EXPECT_THAT(execute_shell_command("enable_test_camera --camera_id=12345"),
+ Eq(NO_ERROR));
+
+ std::vector<std::string> cameraIdsAfterEnable = getCameraIds();
+ EXPECT_THAT(cameraIdsAfterEnable, ElementsAre("device@1.1/virtual/12345"));
+
+ EXPECT_THAT(execute_shell_command("disable_test_camera"), Eq(NO_ERROR));
+
+ std::vector<std::string> cameraIdsAfterDisable = getCameraIds();
+ EXPECT_THAT(cameraIdsAfterDisable, IsEmpty());
+}
+
+TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithInvalidId) {
+ EXPECT_THAT(
+ execute_shell_command("enable_test_camera --camera_id=NotNumericalId"),
+ Eq(STATUS_BAD_VALUE));
+}
+
+TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithUnknownCommand) {
+ EXPECT_THAT(execute_shell_command("brew_coffee --flavor=vanilla"),
+ Eq(STATUS_BAD_VALUE));
+}
+
+TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithMalformedOption) {
+ EXPECT_THAT(execute_shell_command("enable_test_camera **camera_id=12345"),
+ Eq(STATUS_BAD_VALUE));
+}
+
+TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithLensFacing) {
+ EXPECT_THAT(execute_shell_command("enable_test_camera --lens_facing=front"),
+ Eq(NO_ERROR));
+
+ std::vector<std::string> cameraIds = getCameraIds();
+ ASSERT_THAT(cameraIds, SizeIs(1));
+ EXPECT_THAT(getCameraLensFacing(cameraIds[0]),
+ Optional(Eq(ANDROID_LENS_FACING_FRONT)));
+}
+
+TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithInvalidLensFacing) {
+ EXPECT_THAT(execute_shell_command("enable_test_camera --lens_facing=west"),
+ Eq(STATUS_BAD_VALUE));
+}
+
} // namespace
} // namespace virtualcamera
} // namespace companion
diff --git a/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc b/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc
index 5f313a0..671e031 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc
@@ -47,6 +47,7 @@
constexpr int kStreamId = 0;
constexpr int kSecondStreamId = 1;
constexpr int kCameraId = 42;
+constexpr int kDefaultDeviceId = 0;
using ::aidl::android::companion::virtualcamera::BnVirtualCameraCallback;
using ::aidl::android::companion::virtualcamera::Format;
@@ -160,7 +161,8 @@
.maxFps = kMaxFps}},
.virtualCameraCallback = mMockVirtualCameraClientCallback,
.sensorOrientation = SensorOrientation::ORIENTATION_0,
- .lensFacing = LensFacing::FRONT});
+ .lensFacing = LensFacing::FRONT},
+ kDefaultDeviceId);
mVirtualCameraSession = ndk::SharedRefBase::make<VirtualCameraSession>(
mVirtualCameraDevice, mMockCameraDeviceCallback,
mMockVirtualCameraClientCallback);
@@ -305,11 +307,13 @@
std::shared_ptr<VirtualCameraSession> createSession(
const std::vector<SupportedStreamConfiguration>& supportedInputConfigs) {
mVirtualCameraDevice = ndk::SharedRefBase::make<VirtualCameraDevice>(
- kCameraId, VirtualCameraConfiguration{
- .supportedStreamConfigs = supportedInputConfigs,
- .virtualCameraCallback = mMockVirtualCameraClientCallback,
- .sensorOrientation = SensorOrientation::ORIENTATION_0,
- .lensFacing = LensFacing::FRONT});
+ kCameraId,
+ VirtualCameraConfiguration{
+ .supportedStreamConfigs = supportedInputConfigs,
+ .virtualCameraCallback = mMockVirtualCameraClientCallback,
+ .sensorOrientation = SensorOrientation::ORIENTATION_0,
+ .lensFacing = LensFacing::FRONT},
+ kDefaultDeviceId);
return ndk::SharedRefBase::make<VirtualCameraSession>(
mVirtualCameraDevice, mMockCameraDeviceCallback,
mMockVirtualCameraClientCallback);
diff --git a/services/camera/virtualcamera/util/EglUtil.cc b/services/camera/virtualcamera/util/EglUtil.cc
index 481d8f0..1c685f1 100644
--- a/services/camera/virtualcamera/util/EglUtil.cc
+++ b/services/camera/virtualcamera/util/EglUtil.cc
@@ -20,6 +20,7 @@
#include <cstring>
+#include "EglDisplayContext.h"
#include "GLES/gl.h"
#include "log/log.h"
@@ -27,6 +28,9 @@
namespace companion {
namespace virtualcamera {
+// Lower bound for maximum supported texture size is at least 2048x2048
+constexpr int kDefaultMaxTextureSize = 2048;
+
bool checkEglError(const char* operation) {
GLenum err = glGetError();
if (err == GL_NO_ERROR) {
@@ -45,6 +49,20 @@
return strstr(extensions, extension) != nullptr;
}
+int getMaximumTextureSize() {
+ static const int kMaxTextureSize = [] {
+ EglDisplayContext displayContext;
+ displayContext.makeCurrent();
+ int maxTextureSize = -1;
+ glGetIntegerv(GL_MAX_TEXTURE_SIZE, &maxTextureSize);
+ return maxTextureSize;
+ }();
+ if (kMaxTextureSize <= 0) {
+ return kDefaultMaxTextureSize;
+ }
+ return kMaxTextureSize;
+}
+
} // namespace virtualcamera
} // namespace companion
} // namespace android
diff --git a/services/camera/virtualcamera/util/EglUtil.h b/services/camera/virtualcamera/util/EglUtil.h
index 71640e3..f339b4e 100644
--- a/services/camera/virtualcamera/util/EglUtil.h
+++ b/services/camera/virtualcamera/util/EglUtil.h
@@ -27,6 +27,8 @@
// Returns true if the GL extension is supported, false otherwise.
bool isGlExtensionSupported(const char* extension);
+int getMaximumTextureSize();
+
} // namespace virtualcamera
} // namespace companion
} // namespace android
diff --git a/services/camera/virtualcamera/util/JpegUtil.cc b/services/camera/virtualcamera/util/JpegUtil.cc
index 8569eff..b034584 100644
--- a/services/camera/virtualcamera/util/JpegUtil.cc
+++ b/services/camera/virtualcamera/util/JpegUtil.cc
@@ -14,19 +14,20 @@
* limitations under the License.
*/
// #define LOG_NDEBUG 0
+#include "system/graphics.h"
#define LOG_TAG "JpegUtil"
-#include "JpegUtil.h"
-
#include <cstddef>
#include <cstdint>
#include <optional>
#include <vector>
+#include "JpegUtil.h"
#include "android/hardware_buffer.h"
#include "jpeglib.h"
#include "log/log.h"
#include "ui/GraphicBuffer.h"
#include "ui/GraphicBufferMapper.h"
+#include "util/Util.h"
#include "utils/Errors.h"
namespace android {
@@ -34,6 +35,8 @@
namespace virtualcamera {
namespace {
+constexpr int k2DCTSIZE = 2 * DCTSIZE;
+
class LibJpegContext {
public:
LibJpegContext(int width, int height, int quality, const size_t outBufferSize,
@@ -98,23 +101,55 @@
return *this;
}
- std::optional<size_t> compress(const android_ycbcr& ycbr) {
- // TODO(b/301023410) - Add support for compressing image sizes not aligned
- // with DCT size.
- if (mWidth % (2 * DCTSIZE) || (mHeight % (2 * DCTSIZE))) {
+ std::optional<size_t> compress(std::shared_ptr<AHardwareBuffer> inBuffer) {
+ GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(inBuffer.get());
+
+ if (gBuffer == nullptr) {
+ ALOGE("%s: Input graphic buffer is nullptr", __func__);
+ return std::nullopt;
+ }
+
+ if (gBuffer->getPixelFormat() != HAL_PIXEL_FORMAT_YCbCr_420_888) {
+ // This should never happen since we're allocating the temporary buffer
+ // with YUV420 layout above.
+ ALOGE("%s: Cannot compress non-YUV buffer (pixelFormat %d)", __func__,
+ gBuffer->getPixelFormat());
+ return std::nullopt;
+ }
+
+ YCbCrLockGuard yCbCrLock(inBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN);
+ if (yCbCrLock.getStatus() != OK) {
+ ALOGE("%s: Failed to lock the input buffer: %s", __func__,
+ statusToString(yCbCrLock.getStatus()).c_str());
+ return std::nullopt;
+ }
+ const android_ycbcr& ycbr = *yCbCrLock;
+
+ const int inBufferWidth = gBuffer->getWidth();
+ const int inBufferHeight = gBuffer->getHeight();
+
+ if (inBufferWidth % k2DCTSIZE || (inBufferHeight % k2DCTSIZE)) {
ALOGE(
- "%s: Compressing YUV420 image with size %dx%d not aligned with 2 * "
+ "%s: Compressing YUV420 buffer with size %dx%d not aligned with 2 * "
"DCTSIZE (%d) is not currently supported.",
- __func__, mWidth, mHeight, 2 * DCTSIZE);
+ __func__, inBufferWidth, inBufferHeight, DCTSIZE);
+ return std::nullopt;
+ }
+
+ if (inBufferWidth < mWidth || inBufferHeight < mHeight) {
+ ALOGE(
+ "%s: Input buffer has smaller size (%dx%d) than image to be "
+ "compressed (%dx%d)",
+ __func__, inBufferWidth, inBufferHeight, mWidth, mHeight);
return std::nullopt;
}
// Chroma planes have 1/2 resolution of the original image.
- const int cHeight = mHeight / 2;
- const int cWidth = mWidth / 2;
+ const int cHeight = inBufferHeight / 2;
+ const int cWidth = inBufferWidth / 2;
// Prepare arrays of pointers to scanlines of each plane.
- std::vector<JSAMPROW> yLines(mHeight);
+ std::vector<JSAMPROW> yLines(inBufferHeight);
std::vector<JSAMPROW> cbLines(cHeight);
std::vector<JSAMPROW> crLines(cHeight);
@@ -142,12 +177,12 @@
}
// Collect pointers to individual scanline of each plane.
- for (int i = 0; i < mHeight; ++i) {
+ for (int i = 0; i < inBufferHeight; ++i) {
yLines[i] = y + i * ycbr.ystride;
}
for (int i = 0; i < cHeight; ++i) {
- cbLines[i] = cb_plane.data() + i * (mWidth / 2);
- crLines[i] = cr_plane.data() + i * (mWidth / 2);
+ cbLines[i] = cb_plane.data() + i * cWidth;
+ crLines[i] = cr_plane.data() + i * cWidth;
}
return compress(yLines, cbLines, crLines);
@@ -254,17 +289,28 @@
boolean mSuccess = true;
};
+int roundTo2DCTMultiple(const int n) {
+ const int mod = n % k2DCTSIZE;
+ return mod == 0 ? n : n + (k2DCTSIZE - mod);
+}
+
} // namespace
std::optional<size_t> compressJpeg(const int width, const int height,
- const int quality, const android_ycbcr& ycbcr,
+ const int quality,
+ std::shared_ptr<AHardwareBuffer> inBuffer,
const std::vector<uint8_t>& app1ExifData,
size_t outBufferSize, void* outBuffer) {
LibJpegContext context(width, height, quality, outBufferSize, outBuffer);
if (!app1ExifData.empty()) {
context.setApp1Data(app1ExifData.data(), app1ExifData.size());
}
- return context.compress(ycbcr);
+ return context.compress(inBuffer);
+}
+
+Resolution roundTo2DctSize(const Resolution resolution) {
+ return Resolution(roundTo2DCTMultiple(resolution.width),
+ roundTo2DCTMultiple(resolution.height));
}
} // namespace virtualcamera
diff --git a/services/camera/virtualcamera/util/JpegUtil.h b/services/camera/virtualcamera/util/JpegUtil.h
index 83ed74b..184dd56 100644
--- a/services/camera/virtualcamera/util/JpegUtil.h
+++ b/services/camera/virtualcamera/util/JpegUtil.h
@@ -19,17 +19,20 @@
#include <optional>
-#include "system/graphics.h"
+#include "android/hardware_buffer.h"
+#include "util/Util.h"
namespace android {
namespace companion {
namespace virtualcamera {
// Jpeg-compress image into the output buffer.
-// * width - width of the image
-// * heigh - height of the image
+// * width - width of the image, can be less than width of inBuffer.
+// * heigh - height of the image, can be less than height of inBuffer.
// * quality - 0-100, higher number corresponds to higher quality.
-// * ycbr - android_ycbr structure describing layout of input YUV420 image.
+// * inBuffer - Input buffer, the dimensions of the buffer must be aligned to
+// 2*DCT_SIZE (16) to include necessary padding in case width and height of
+// image is not aligned with 2*DCT_SIZE.
// * app1ExifData - vector containing data to be included in APP1
// segment. Can be empty.
// * outBufferSize - capacity of the output buffer.
@@ -37,10 +40,14 @@
// Returns size of compressed data if the compression was successful,
// empty optional otherwise.
std::optional<size_t> compressJpeg(int width, int height, int quality,
- const android_ycbcr& ycbcr,
+ std::shared_ptr<AHardwareBuffer> inBuffer,
const std::vector<uint8_t>& app1ExifData,
size_t outBufferSize, void* outBuffer);
+// Round the resolution to the closest higher resolution where width and height
+// are divisible by 2*DCT_SIZE ().
+Resolution roundTo2DctSize(Resolution resolution);
+
} // namespace virtualcamera
} // namespace companion
} // namespace android
diff --git a/services/camera/virtualcamera/util/MetadataUtil.cc b/services/camera/virtualcamera/util/MetadataUtil.cc
index e3d9e28..31a8776 100644
--- a/services/camera/virtualcamera/util/MetadataUtil.cc
+++ b/services/camera/virtualcamera/util/MetadataUtil.cc
@@ -23,6 +23,8 @@
#include <cstdint>
#include <iterator>
#include <memory>
+#include <optional>
+#include <string>
#include <utility>
#include <variant>
#include <vector>
@@ -59,12 +61,17 @@
} // namespace
MetadataBuilder& MetadataBuilder::setSupportedHardwareLevel(
- camera_metadata_enum_android_info_supported_hardware_level_t hwLevel) {
+ const camera_metadata_enum_android_info_supported_hardware_level_t hwLevel) {
mEntryMap[ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL] =
asVectorOf<uint8_t>(hwLevel);
return *this;
}
+MetadataBuilder& MetadataBuilder::setDeviceId(int32_t deviceId) {
+ mEntryMap[ANDROID_INFO_DEVICE_ID] = std::vector<int32_t>({deviceId});
+ return *this;
+}
+
MetadataBuilder& MetadataBuilder::setFlashAvailable(bool flashAvailable) {
const uint8_t metadataVal = flashAvailable
? ANDROID_FLASH_INFO_AVAILABLE_TRUE
@@ -86,7 +93,7 @@
}
MetadataBuilder& MetadataBuilder::setLensFacing(
- camera_metadata_enum_android_lens_facing lensFacing) {
+ const camera_metadata_enum_android_lens_facing lensFacing) {
mEntryMap[ANDROID_LENS_FACING] = asVectorOf<uint8_t>(lensFacing);
return *this;
}
@@ -145,6 +152,15 @@
return *this;
}
+MetadataBuilder& MetadataBuilder::setAvailableStreamUseCases(
+ const std::vector<
+ camera_metadata_enum_android_scaler_available_stream_use_cases>&
+ availableUseCases) {
+ mEntryMap[ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES] =
+ convertTo<int64_t>(availableUseCases);
+ return *this;
+}
+
MetadataBuilder& MetadataBuilder::setFaceDetectMode(
const camera_metadata_enum_android_statistics_face_detect_mode_t
faceDetectMode) {
@@ -175,6 +191,12 @@
return *this;
}
+MetadataBuilder& MetadataBuilder::setControlSceneMode(
+ const camera_metadata_enum_android_control_scene_mode sceneMode) {
+ mEntryMap[ANDROID_CONTROL_SCENE_MODE] = asVectorOf<uint8_t>(sceneMode);
+ return *this;
+}
+
MetadataBuilder& MetadataBuilder::setControlAvailableEffects(
const std::vector<camera_metadata_enum_android_control_effect_mode>&
availableEffects) {
@@ -198,6 +220,14 @@
return *this;
}
+MetadataBuilder& MetadataBuilder::setControlVideoStabilizationMode(
+ const camera_metadata_enum_android_control_video_stabilization_mode
+ stabilizationMode) {
+ mEntryMap[ANDROID_CONTROL_VIDEO_STABILIZATION_MODE] =
+ asVectorOf<uint8_t>(stabilizationMode);
+ return *this;
+}
+
MetadataBuilder& MetadataBuilder::setControlAfAvailableModes(
const std::vector<camera_metadata_enum_android_control_af_mode_t>&
availableModes) {
@@ -212,6 +242,12 @@
return *this;
}
+MetadataBuilder& MetadataBuilder::setControlAfState(
+ const camera_metadata_enum_android_control_af_state afState) {
+ mEntryMap[ANDROID_CONTROL_AF_STATE] = asVectorOf<uint8_t>(afState);
+ return *this;
+}
+
// See ANDROID_CONTROL_AF_TRIGGER_MODE in CameraMetadataTag.aidl.
MetadataBuilder& MetadataBuilder::setControlAfTrigger(
const camera_metadata_enum_android_control_af_trigger_t trigger) {
@@ -232,14 +268,14 @@
}
MetadataBuilder& MetadataBuilder::setControlAeTargetFpsRange(
- const int32_t minFps, const int32_t maxFps) {
+ const FpsRange fpsRange) {
mEntryMap[ANDROID_CONTROL_AE_TARGET_FPS_RANGE] =
- std::vector<int32_t>({minFps, maxFps});
+ std::vector<int32_t>({fpsRange.minFps, fpsRange.maxFps});
return *this;
}
MetadataBuilder& MetadataBuilder::setControlAeMode(
- camera_metadata_enum_android_control_ae_mode_t mode) {
+ const camera_metadata_enum_android_control_ae_mode_t mode) {
mEntryMap[ANDROID_CONTROL_AE_MODE] = asVectorOf<uint8_t>(mode);
return *this;
}
@@ -277,6 +313,12 @@
return *this;
}
+MetadataBuilder& MetadataBuilder::setControlAwbState(
+ const camera_metadata_enum_android_control_awb_state awbState) {
+ mEntryMap[ANDROID_CONTROL_AWB_STATE] = asVectorOf<uint8_t>(awbState);
+ return *this;
+}
+
MetadataBuilder& MetadataBuilder::setControlAwbLockAvailable(
const bool awbLockAvailable) {
const uint8_t lockAvailable = awbLockAvailable
@@ -287,6 +329,12 @@
return *this;
}
+MetadataBuilder& MetadataBuilder::setControlAwbLock(
+ const camera_metadata_enum_android_control_awb_lock awbLock) {
+ mEntryMap[ANDROID_CONTROL_AWB_LOCK] = asVectorOf<uint8_t>(awbLock);
+ return *this;
+}
+
MetadataBuilder& MetadataBuilder::setControlAeAvailableAntibandingModes(
const std::vector<camera_metadata_enum_android_control_ae_antibanding_mode_t>&
antibandingModes) {
@@ -313,6 +361,12 @@
return *this;
}
+MetadataBuilder& MetadataBuilder::setControlAeLock(
+ const camera_metadata_enum_android_control_ae_lock aeLock) {
+ mEntryMap[ANDROID_CONTROL_AE_LOCK] = asVectorOf<uint8_t>(aeLock);
+ return *this;
+}
+
MetadataBuilder& MetadataBuilder::setControlAeRegions(
const std::vector<ControlRegion>& aeRegions) {
std::vector<int32_t> regions;
@@ -396,6 +450,29 @@
return *this;
}
+MetadataBuilder& MetadataBuilder::setJpegGpsCoordinates(
+ const GpsCoordinates& gpsCoordinates) {
+ mEntryMap[ANDROID_JPEG_GPS_COORDINATES] =
+ std::vector<double>({gpsCoordinates.latitude, gpsCoordinates.longitude,
+ gpsCoordinates.altitude});
+
+ if (!gpsCoordinates.provider.empty()) {
+ mEntryMap[ANDROID_JPEG_GPS_PROCESSING_METHOD] = std::vector<uint8_t>{
+ gpsCoordinates.provider.begin(), gpsCoordinates.provider.end()};
+ }
+
+ if (gpsCoordinates.timestamp.has_value()) {
+ mEntryMap[ANDROID_JPEG_GPS_TIMESTAMP] =
+ asVectorOf<int64_t>(gpsCoordinates.timestamp.value());
+ }
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setJpegOrientation(const int32_t orientation) {
+ mEntryMap[ANDROID_JPEG_ORIENTATION] = asVectorOf<int32_t>(orientation);
+ return *this;
+}
+
MetadataBuilder& MetadataBuilder::setJpegQuality(const uint8_t quality) {
mEntryMap[ANDROID_JPEG_QUALITY] = asVectorOf<uint8_t>(quality);
return *this;
@@ -421,7 +498,7 @@
}
MetadataBuilder& MetadataBuilder::setSyncMaxLatency(
- camera_metadata_enum_android_sync_max_latency latency) {
+ const camera_metadata_enum_android_sync_max_latency latency) {
mEntryMap[ANDROID_SYNC_MAX_LATENCY] = asVectorOf<int32_t>(latency);
return *this;
}
@@ -506,7 +583,7 @@
}
MetadataBuilder& MetadataBuilder::setNoiseReductionMode(
- camera_metadata_enum_android_noise_reduction_mode noiseReductionMode) {
+ const camera_metadata_enum_android_noise_reduction_mode noiseReductionMode) {
mEntryMap[ANDROID_NOISE_REDUCTION_MODE] =
asVectorOf<uint8_t>(noiseReductionMode);
return *this;
@@ -585,6 +662,43 @@
return *this;
}
+MetadataBuilder& MetadataBuilder::setControlAeState(
+ const camera_metadata_enum_android_control_ae_state aeState) {
+ mEntryMap[ANDROID_CONTROL_AE_STATE] = asVectorOf<uint8_t>(aeState);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setStatisticsSceneFlicker(
+ const camera_metadata_enum_android_statistics_scene_flicker sceneFlicker) {
+ mEntryMap[ANDROID_STATISTICS_SCENE_FLICKER] =
+ asVectorOf<uint8_t>(sceneFlicker);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setStatisticsHotPixelMapMode(
+ const camera_metadata_enum_android_statistics_hot_pixel_map_mode
+ hotPixelMapMode) {
+ mEntryMap[ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE] =
+ asVectorOf<uint8_t>(hotPixelMapMode);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setStatisticsLensShadingMapMode(
+ const camera_metadata_enum_android_statistics_lens_shading_map_mode
+ lensShadingMapMode) {
+ mEntryMap[ANDROID_STATISTICS_LENS_SHADING_MAP_MODE] =
+ asVectorOf<uint8_t>(lensShadingMapMode);
+ return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setLensOpticalStabilizationMode(
+ const camera_metadata_enum_android_lens_optical_stabilization_mode_t
+ opticalStabilizationMode) {
+ mEntryMap[ANDROID_LENS_OPTICAL_STABILIZATION_MODE] =
+ asVectorOf<uint8_t>(opticalStabilizationMode);
+ return *this;
+}
+
MetadataBuilder& MetadataBuilder::setAvailableRequestKeys(
const std::vector<int32_t>& keys) {
mEntryMap[ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS] = keys;
@@ -677,6 +791,20 @@
return *entry.data.i32;
}
+int32_t getJpegOrientation(
+ const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+ auto metadata =
+ reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+ camera_metadata_ro_entry_t entry;
+ if (find_camera_metadata_ro_entry(metadata, ANDROID_JPEG_ORIENTATION,
+ &entry) != OK) {
+ return 0;
+ }
+
+ return *entry.data.i32;
+}
+
std::optional<Resolution> getJpegThumbnailSize(
const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
auto metadata =
@@ -724,6 +852,115 @@
return thumbnailSizes;
}
+std::optional<FpsRange> getFpsRange(
+ const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+ auto metadata =
+ reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+ camera_metadata_ro_entry_t entry;
+ if (find_camera_metadata_ro_entry(
+ metadata, ANDROID_CONTROL_AE_TARGET_FPS_RANGE, &entry) != OK ||
+ entry.count != 2) {
+ return {};
+ }
+
+ FpsRange range{.minFps = entry.data.i32[0], .maxFps = entry.data.i32[1]};
+ return range;
+}
+
+std::optional<camera_metadata_enum_android_control_capture_intent>
+getCaptureIntent(const aidl::android::hardware::camera::device::CameraMetadata&
+ cameraMetadata) {
+ auto metadata =
+ reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+ camera_metadata_ro_entry_t entry;
+ if (find_camera_metadata_ro_entry(metadata, ANDROID_CONTROL_CAPTURE_INTENT,
+ &entry) != OK) {
+ return {};
+ }
+
+ return static_cast<camera_metadata_enum_android_control_capture_intent>(
+ entry.data.u8[0]);
+}
+
+std::optional<GpsCoordinates> getGpsCoordinates(
+ const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+ auto metadata =
+ reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+ camera_metadata_ro_entry_t entry;
+ if (find_camera_metadata_ro_entry(metadata, ANDROID_JPEG_GPS_COORDINATES,
+ &entry) != OK) {
+ return std::nullopt;
+ }
+
+ GpsCoordinates coordinates{.latitude = entry.data.d[0],
+ .longitude = entry.data.d[1],
+ .altitude = entry.data.d[2]};
+
+ if (find_camera_metadata_ro_entry(metadata, ANDROID_JPEG_GPS_TIMESTAMP,
+ &entry) == OK) {
+ coordinates.timestamp = entry.data.i64[0];
+ }
+
+ // According to types.hal, the string describing the GPS processing method has
+ // a 32 characters size
+ static constexpr float kGpsProviderStringLength = 32;
+ if (find_camera_metadata_ro_entry(
+ metadata, ANDROID_JPEG_GPS_PROCESSING_METHOD, &entry) == OK) {
+ coordinates.provider.assign(
+ reinterpret_cast<const char*>(entry.data.u8),
+ std::min(entry.count, static_cast<size_t>(kGpsProviderStringLength)));
+ }
+
+ return coordinates;
+}
+
+std::optional<camera_metadata_enum_android_lens_facing> getLensFacing(
+ const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+ auto metadata =
+ reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+ camera_metadata_ro_entry_t entry;
+ if (find_camera_metadata_ro_entry(metadata, ANDROID_LENS_FACING, &entry) !=
+ OK) {
+ return std::nullopt;
+ }
+
+ return static_cast<camera_metadata_enum_android_lens_facing>(entry.data.u8[0]);
+}
+
+std::optional<camera_metadata_enum_android_control_ae_precapture_trigger>
+getPrecaptureTrigger(
+ const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+ auto metadata =
+ reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+ camera_metadata_ro_entry_t entry;
+ if (find_camera_metadata_ro_entry(
+ metadata, ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &entry) != OK) {
+ return std::nullopt;
+ }
+
+ return static_cast<camera_metadata_enum_android_control_ae_precapture_trigger>(
+ entry.data.u8[0]);
+}
+
+std::optional<int32_t> getDeviceId(
+ const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+ auto metadata =
+ reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+ camera_metadata_ro_entry_t entry;
+ if (find_camera_metadata_ro_entry(metadata, ANDROID_INFO_DEVICE_ID, &entry) !=
+ OK) {
+ return std::nullopt;
+ }
+
+ return static_cast<int32_t>(entry.data.i32[0]);
+}
+
} // namespace virtualcamera
} // namespace companion
} // namespace android
diff --git a/services/camera/virtualcamera/util/MetadataUtil.h b/services/camera/virtualcamera/util/MetadataUtil.h
index b4d60cb..ca6f332 100644
--- a/services/camera/virtualcamera/util/MetadataUtil.h
+++ b/services/camera/virtualcamera/util/MetadataUtil.h
@@ -59,16 +59,6 @@
int32_t weight = 0;
};
- struct FpsRange {
- int32_t minFps;
- int32_t maxFps;
-
- bool operator<(const FpsRange& other) const {
- return maxFps == other.maxFps ? minFps < other.minFps
- : maxFps < other.maxFps;
- }
- };
-
MetadataBuilder() = default;
~MetadataBuilder() = default;
@@ -76,6 +66,9 @@
MetadataBuilder& setSupportedHardwareLevel(
camera_metadata_enum_android_info_supported_hardware_level_t hwLevel);
+ // See ANDROID_INFO_DEVICE_ID in CameraMetadataTag.aidl.
+ MetadataBuilder& setDeviceId(int32_t deviceId);
+
// Whether this camera device has a flash unit
// See ANDROID_FLASH_INFO_AVAILABLE in CameraMetadataTag.aidl.
MetadataBuilder& setFlashAvailable(bool flashAvailable);
@@ -136,6 +129,11 @@
const std::vector<camera_metadata_enum_android_sensor_test_pattern_mode>&
testPatternModes);
+ // See ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES in CameraCharacteristics.java
+ MetadataBuilder& setAvailableStreamUseCases(
+ const std::vector<
+ camera_metadata_enum_android_scaler_available_stream_use_cases>& availableUseCases);
+
// See ANDROID_STATISTICS_FACE_DETECT_MODE in CaptureRequest.java.
MetadataBuilder& setFaceDetectMode(
camera_metadata_enum_android_statistics_face_detect_mode_t faceDetectMode);
@@ -193,6 +191,10 @@
const std::vector<camera_metadata_enum_android_control_scene_mode>&
availableSceneModes);
+ // See ANDROID_CONTROL_SCENE_MODE in CameraMetadataTag.aidl
+ MetadataBuilder& setControlSceneMode(
+ camera_metadata_enum_android_control_scene_mode sceneMode);
+
// See ANDROID_CONTROL_AVAILABLE_EFFECTS in CameraMetadataTag.aidl.
MetadataBuilder& setControlAvailableEffects(
const std::vector<camera_metadata_enum_android_control_effect_mode>&
@@ -202,12 +204,17 @@
MetadataBuilder& setControlEffectMode(
camera_metadata_enum_android_control_effect_mode_t effectMode);
- // See ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES
+ // See ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES in CameraMetadataTag.aidl.
MetadataBuilder& setControlAvailableVideoStabilizationModes(
const std::vector<
camera_metadata_enum_android_control_video_stabilization_mode_t>&
videoStabilizationModes);
+ // See ANDROID_CONTROL_VIDEO_STABILIZATION_MODE in CameraMetadataTag.aidl.
+ MetadataBuilder& setControlVideoStabilizationMode(
+ camera_metadata_enum_android_control_video_stabilization_mode
+ stabilizationMode);
+
// See CONTROL_AE_AVAILABLE_ANTIBANDING_MODES in CameraCharacteristics.java.
MetadataBuilder& setControlAeAvailableAntibandingModes(
const std::vector<camera_metadata_enum_android_control_ae_antibanding_mode_t>&
@@ -256,7 +263,7 @@
const std::vector<FpsRange>& fpsRanges);
// See ANDROID_CONTROL_AE_TARGET_FPS_RANGE in CaptureRequest.java.
- MetadataBuilder& setControlAeTargetFpsRange(int32_t min, int32_t max);
+ MetadataBuilder& setControlAeTargetFpsRange(FpsRange fpsRange);
// See ANDROID_CONTROL_CAPTURE_INTENT in CameraMetadataTag.aidl.
MetadataBuilder& setControlCaptureIntent(
@@ -278,9 +285,21 @@
// See CONTROL_AWB_LOCK_AVAILABLE in CameraMetadataTag.aidl.
MetadataBuilder& setControlAwbLockAvailable(bool awbLockAvailable);
+ // See CONTROL_AWB_LOCK in CameraMetadataTag.aidl
+ MetadataBuilder& setControlAwbLock(
+ camera_metadata_enum_android_control_awb_lock awbLock);
+
// See CONTROL_AE_LOCK_AVAILABLE in CameraMetadataTag.aidl.
MetadataBuilder& setControlAeLockAvailable(bool aeLockAvailable);
+ // See CONTROL_AE_LOCK in CameraMetadataTag.aidl.
+ MetadataBuilder& setControlAeLock(
+ camera_metadata_enum_android_control_ae_lock aeLock);
+
+ // See CONTROL_AE_STATE in CameraMetadataTag.aidl
+ MetadataBuilder& setControlAeState(
+ camera_metadata_enum_android_control_ae_state aeState);
+
// See ANDROID_CONTROL_AE_REGIONS in CameraMetadataTag.aidl.
MetadataBuilder& setControlAeRegions(
const std::vector<ControlRegion>& aeRegions);
@@ -289,6 +308,10 @@
MetadataBuilder& setControlAwbRegions(
const std::vector<ControlRegion>& awbRegions);
+ // See ANDROID_CONTROL_AWB_STATE in CameraMetadataTag.aidl.
+ MetadataBuilder& setControlAwbState(
+ camera_metadata_enum_android_control_awb_state awbState);
+
// See ANDROID_SCALER_CROP_REGION in CaptureRequest.java.
MetadataBuilder& setCropRegion(int32_t x, int32_t y, int32_t width,
int32_t height);
@@ -297,6 +320,10 @@
MetadataBuilder& setControlAfRegions(
const std::vector<ControlRegion>& afRegions);
+ // See ANDROID_CONTROL_AF_STATE in CameraMetadataTag.aidl.
+ MetadataBuilder& setControlAfState(
+ camera_metadata_enum_android_control_af_state aeftate);
+
// The size of the compressed JPEG image, in bytes.
//
// See ANDROID_JPEG_SIZE in CameraMetadataTag.aidl.
@@ -309,6 +336,12 @@
MetadataBuilder& setJpegAvailableThumbnailSizes(
const std::vector<Resolution>& thumbnailSizes);
+ // See ANDROID_JPEG_GPS_COORDINATES.
+ MetadataBuilder& setJpegGpsCoordinates(const GpsCoordinates& gpsCoordinates);
+
+ // See JPEG_ORIENTATION in CaptureRequest.java.
+ MetadataBuilder& setJpegOrientation(int32_t orientation);
+
// See JPEG_QUALITY in CaptureRequest.java.
MetadataBuilder& setJpegQuality(uint8_t quality);
@@ -342,6 +375,24 @@
// See ANDROID_CONTROL_ZOOM_RATIO_RANGE in CameraMetadataTag.aidl.
MetadataBuilder& setControlZoomRatioRange(float min, float max);
+ // See ANDROID_STATISTICS_SCENE_FLICKER in CameraMetadataTag.aidl.
+ MetadataBuilder& setStatisticsSceneFlicker(
+ camera_metadata_enum_android_statistics_scene_flicker sceneFlicker);
+
+ // See ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE in CameraMetadataTag.aidl.
+ MetadataBuilder& setStatisticsHotPixelMapMode(
+ camera_metadata_enum_android_statistics_hot_pixel_map_mode mode);
+
+ // See ANDROID_STATISTICS_LENS_SHADING_MAP_MODE in CameraMetadataTag.aidl.
+ MetadataBuilder& setStatisticsLensShadingMapMode(
+ camera_metadata_enum_android_statistics_lens_shading_map_mode
+ lensShadingMapMode);
+
+ // See ANDROID_LENS_OPTICAL_STABILIZATION_MODE in CameraMetadataTag.aidl.
+ MetadataBuilder& setLensOpticalStabilizationMode(
+ camera_metadata_enum_android_lens_optical_stabilization_mode_t
+ opticalStabilizationMode);
+
// See ANDROID_REQUEST_AVAILABLE_CAPABILITIES in CameraMetadataTag.aidl.
MetadataBuilder& setAvailableRequestCapabilities(
const std::vector<
@@ -384,10 +435,11 @@
private:
// Maps metadata tags to vectors of values for the given tag.
- std::map<camera_metadata_tag_t,
- std::variant<std::vector<int64_t>, std::vector<int32_t>,
- std::vector<uint8_t>, std::vector<float>,
- std::vector<camera_metadata_rational_t>>>
+ std::map<
+ camera_metadata_tag_t,
+ std::variant<std::vector<int64_t>, std::vector<int32_t>,
+ std::vector<uint8_t>, std::vector<float>,
+ std::vector<camera_metadata_rational_t>, std::vector<double>>>
mEntryMap;
// Extend metadata with ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS.
bool mExtendWithAvailableCharacteristicsKeys = false;
@@ -397,6 +449,10 @@
std::optional<int32_t> getJpegQuality(
const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+// Return JPEG_ORIENTATION from metadata, or 0 if the key is not present
+int32_t getJpegOrientation(
+ const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
// Returns JPEG_THUMBNAIL_SIZE from metadata, or nullopt if the key is not present.
std::optional<Resolution> getJpegThumbnailSize(
const aidl::android::hardware::camera::device::CameraMetadata& metadata);
@@ -410,6 +466,28 @@
std::vector<Resolution> getJpegAvailableThumbnailSizes(
const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+std::optional<FpsRange> getFpsRange(
+ const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+std::optional<camera_metadata_enum_android_control_capture_intent> getCaptureIntent(
+ const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+// Returns ANDROID_JPEG_GPS_COORDINATES in a GpsCoordinate object or nullopt if
+// the key is not present.
+std::optional<GpsCoordinates> getGpsCoordinates(
+ const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+std::optional<camera_metadata_enum_android_lens_facing> getLensFacing(
+ const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+std::optional<camera_metadata_enum_android_control_ae_precapture_trigger>
+getPrecaptureTrigger(
+ const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata);
+
+// Returns the virtual device id. This is not the camera id.
+std::optional<int32_t> getDeviceId(
+ const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata);
+
} // namespace virtualcamera
} // namespace companion
} // namespace android
diff --git a/services/camera/virtualcamera/util/TestPatternHelper.cc b/services/camera/virtualcamera/util/TestPatternHelper.cc
index a00a1b8..274996a 100644
--- a/services/camera/virtualcamera/util/TestPatternHelper.cc
+++ b/services/camera/virtualcamera/util/TestPatternHelper.cc
@@ -15,6 +15,7 @@
*/
// #define LOG_NDEBUG 0
+
#define LOG_TAG "TestPatternHelper"
#include "TestPatternHelper.h"
@@ -23,6 +24,9 @@
#include <cstdint>
#include "log/log.h"
+#include "nativebase/nativebase.h"
+#include "system/graphics.h"
+#include "ui/GraphicBuffer.h"
#include "utils/Errors.h"
namespace android {
@@ -31,6 +35,10 @@
namespace {
+using namespace std::chrono_literals;
+
+static constexpr std::chrono::milliseconds kAcquireFenceTimeout = 500ms;
+
uint8_t julia(const std::complex<float> n, const std::complex<float> c) {
std::complex<float> z = n;
for (int i = 0; i < 64; i++) {
@@ -40,72 +48,103 @@
return 0xff;
}
-uint8_t pixelToFractal(const int x, const int y, const std::complex<float> c) {
- std::complex<float> n(float(x) / 640.0f - 0.5, float(y) / 480.0f - 0.5);
+uint8_t pixelToFractal(const int x, const int y, const int width,
+ const int height, const std::complex<float> c) {
+ std::complex<float> n(float(x) / float(width) - 0.5,
+ float(y) / float(height) - 0.5);
return julia(n * 5.f, c);
}
-void renderTestPatternYcbCr420(uint8_t* data_ptr, const int width,
+void renderTestPatternYcbCr420(const android_ycbcr& ycbr, const int width,
const int height, const int frameNumber) {
float time = float(frameNumber) / 120.0f;
const std::complex<float> c(std::sin(time), std::cos(time));
- uint8_t* y_data = data_ptr;
- uint8_t* uv_data = static_cast<uint8_t*>(y_data + width * height);
+ uint8_t* y = reinterpret_cast<uint8_t*>(ycbr.y);
+ uint8_t* cb = reinterpret_cast<uint8_t*>(ycbr.cb);
+ uint8_t* cr = reinterpret_cast<uint8_t*>(ycbr.cr);
- for (int i = 0; i < width; ++i) {
- for (int j = 0; j < height; ++j) {
- y_data[j * width + i] = pixelToFractal(i, j, c * 0.78f);
- if ((i & 1) && (j & 1)) {
- uv_data[((j / 2) * (width / 2) + i / 2) * 2] =
- static_cast<uint8_t>((float(i) / float(width)) * 255.f);
- uv_data[((j / 2) * (width / 2) + i / 2) * 2 + 1] =
- static_cast<uint8_t>((float(j) / float(height)) * 255.f);
- }
+ for (int row = 0; row < height; row++) {
+ for (int col = 0; col < width; col++) {
+ y[row * ycbr.ystride + col] =
+ pixelToFractal(col, row, width, height, c * 0.78f);
+ }
+ }
+
+ int cWidth = width / 2;
+ int cHeight = height / 2;
+ for (int row = 0; row < cHeight; row++) {
+ for (int col = 0; col < cWidth; col++) {
+ cb[row * ycbr.cstride + col * ycbr.chroma_step] =
+ static_cast<uint8_t>((float(col) / float(cWidth)) * 255.f);
+ cr[row * ycbr.cstride + col * ycbr.chroma_step] =
+ static_cast<uint8_t>((float(row) / float(cHeight)) * 255.f);
}
}
}
} // namespace
-// This is just to see some meaningfull image in the buffer for testing, only
-// works with YcbCr420.
-void renderTestPatternYCbCr420(const std::shared_ptr<AHardwareBuffer> buffer,
- const int frameNumber, const int fence) {
- AHardwareBuffer_Planes planes_info;
-
- AHardwareBuffer_Desc hwBufferDesc;
- AHardwareBuffer_describe(buffer.get(), &hwBufferDesc);
-
- const int width = hwBufferDesc.width;
- const int height = hwBufferDesc.height;
-
- int result = AHardwareBuffer_lockPlanes(buffer.get(),
- AHARDWAREBUFFER_USAGE_CPU_READ_RARELY,
- fence, nullptr, &planes_info);
- if (result != OK) {
- ALOGE("%s: Failed to lock planes: %d", __func__, result);
+void renderTestPatternYCbCr420(sp<Surface> surface, int frameNumber) {
+ if (surface == nullptr) {
+ ALOGE("%s: null surface, skipping render", __func__);
return;
}
- renderTestPatternYcbCr420(
- reinterpret_cast<uint8_t*>(planes_info.planes[0].data), width, height,
- frameNumber);
+ ANativeWindowBuffer* buffer;
+ int fenceFd;
+ int ret = ANativeWindow_dequeueBuffer(surface.get(), &buffer, &fenceFd);
+ if (ret != NO_ERROR) {
+ ALOGE(
+ "%s: Error while deuqueing buffer from surface, "
+ "ANativeWindow_dequeueBuffer returned %d",
+ __func__, ret);
+ return;
+ }
- AHardwareBuffer_unlock(buffer.get(), nullptr);
-}
+ if (buffer == nullptr) {
+ ALOGE("%s: ANativeWindowBuffer is null after dequeing", __func__);
+ return;
+ }
-void renderTestPatternYCbCr420(sp<Surface> surface, int frameNumber) {
- ANativeWindow_Buffer buffer;
- surface->lock(&buffer, nullptr);
+ sp<Fence> fence = sp<Fence>::make(fenceFd);
+ if (fence->isValid()) {
+ ret = fence->wait(kAcquireFenceTimeout.count());
+ if (ret != NO_ERROR) {
+ ALOGE("%s: Timeout while waiting for the fence to clear", __func__);
+ ANativeWindow_queueBuffer(surface.get(), buffer, fence->dup());
+ return;
+ }
+ }
- ALOGV("buffer: %dx%d stride %d, pixfmt %d", buffer.width, buffer.height,
- buffer.stride, buffer.format);
+ sp<GraphicBuffer> gBuffer = GraphicBuffer::from(buffer);
+ android_ycbcr ycbr;
- renderTestPatternYcbCr420(reinterpret_cast<uint8_t*>(buffer.bits),
- buffer.width, buffer.height, frameNumber);
+ ret = gBuffer->lockAsyncYCbCr(GraphicBuffer::USAGE_SW_WRITE_OFTEN, &ycbr,
+ fence->dup());
+ if (ret != NO_ERROR) {
+ ALOGE("%s: Failed to lock buffer retrieved from surface, ret %d", __func__,
+ ret);
+ return;
+ }
- surface->unlockAndPost();
+ renderTestPatternYcbCr420(ycbr, gBuffer->getWidth(), gBuffer->getHeight(),
+ frameNumber);
+
+ ret = gBuffer->unlock();
+ if (ret != NO_ERROR) {
+ ALOGE("%s: Failed to unlock buffer, ret %d", __func__, ret);
+ return;
+ }
+
+ ret = ANativeWindow_queueBuffer(surface.get(), buffer, /*fenceFd=*/-1);
+ if (ret != NO_ERROR) {
+ ALOGE(
+ "%s: Error while queing buffer to surface, ANativeWindow_queueBuffer "
+ "returned %d",
+ __func__, ret);
+ return;
+ }
}
} // namespace virtualcamera
diff --git a/services/camera/virtualcamera/util/TestPatternHelper.h b/services/camera/virtualcamera/util/TestPatternHelper.h
index aca1cdd..f842b29 100644
--- a/services/camera/virtualcamera/util/TestPatternHelper.h
+++ b/services/camera/virtualcamera/util/TestPatternHelper.h
@@ -17,20 +17,12 @@
#ifndef ANDROID_COMPANION_VIRTUALCAMERA_TESTPATTERNHELPER_H
#define ANDROID_COMPANION_VIRTUALCAMERA_TESTPATTERNHELPER_H
-#include <memory>
-
-#include "android/hardware_buffer.h"
#include "gui/Surface.h"
namespace android {
namespace companion {
namespace virtualcamera {
-// Helper function filling hardware buffer with test pattern for debugging /
-// testing purposes.
-void renderTestPatternYCbCr420(std::shared_ptr<AHardwareBuffer> buffer,
- int frameNumber, int fence = -1);
-
// Helper function for rendering test pattern into Surface.
void renderTestPatternYCbCr420(sp<Surface> surface, int frameNumber);
diff --git a/services/camera/virtualcamera/util/Util.cc b/services/camera/virtualcamera/util/Util.cc
index b2048bc..4aff60f 100644
--- a/services/camera/virtualcamera/util/Util.cc
+++ b/services/camera/virtualcamera/util/Util.cc
@@ -23,6 +23,7 @@
#include <cstdint>
#include <memory>
+#include "EglUtil.h"
#include "android/hardware_buffer.h"
#include "jpeglib.h"
#include "ui/GraphicBuffer.h"
@@ -35,11 +36,6 @@
using ::aidl::android::companion::virtualcamera::Format;
using ::aidl::android::hardware::common::NativeHandle;
-// Lower bound for maximal supported texture size is at least 2048x2048
-// but on most platforms will be more.
-// TODO(b/301023410) - Query actual max texture size.
-constexpr int kMaxTextureSize = 2048;
-constexpr int kLibJpegDctSize = DCTSIZE;
constexpr int kMaxFpsUpperLimit = 60;
constexpr std::array<Format, 2> kSupportedFormats{Format::YUV_420_888,
@@ -69,7 +65,6 @@
if (gBuffer == nullptr) {
return;
}
- gBuffer->unlock();
status_t status = gBuffer->unlock();
if (status != NO_ERROR) {
ALOGE("Failed to unlock graphic buffer: %s", statusToString(status).c_str());
@@ -141,15 +136,9 @@
return false;
}
- if (width <= 0 || height <= 0 || width > kMaxTextureSize ||
- height > kMaxTextureSize) {
- return false;
- }
-
- if (width % kLibJpegDctSize != 0 || height % kLibJpegDctSize != 0) {
- // Input dimension needs to be multiple of libjpeg DCT size.
- // TODO(b/301023410) This restriction can be removed once we add support for
- // unaligned jpeg compression.
+ int maxTextureSize = getMaximumTextureSize();
+ if (width <= 0 || height <= 0 || width > maxTextureSize ||
+ height > maxTextureSize) {
return false;
}
diff --git a/services/camera/virtualcamera/util/Util.h b/services/camera/virtualcamera/util/Util.h
index faae010..291e105 100644
--- a/services/camera/virtualcamera/util/Util.h
+++ b/services/camera/virtualcamera/util/Util.h
@@ -20,6 +20,8 @@
#include <cmath>
#include <cstdint>
#include <memory>
+#include <optional>
+#include <string>
#include "aidl/android/companion/virtualcamera/Format.h"
#include "aidl/android/hardware/camera/common/Status.h"
@@ -142,6 +144,26 @@
int height = 0;
};
+struct FpsRange {
+ int32_t minFps;
+ int32_t maxFps;
+
+ bool operator<(const FpsRange& other) const {
+ return maxFps == other.maxFps ? minFps < other.minFps
+ : maxFps < other.maxFps;
+ }
+};
+
+struct GpsCoordinates {
+ // Represented by a double[] in metadata with index 0 for
+ // latitude and index 1 for longitude, 2 for altitude.
+ double_t latitude;
+ double_t longitude;
+ double_t altitude;
+ std::optional<int64_t> timestamp;
+ std::string provider;
+};
+
inline bool isApproximatellySameAspectRatio(const Resolution r1,
const Resolution r2) {
static constexpr float kAspectRatioEpsilon = 0.05;
diff --git a/services/medialog/Android.bp b/services/medialog/Android.bp
index 8088ef0..fdb56e5 100644
--- a/services/medialog/Android.bp
+++ b/services/medialog/Android.bp
@@ -30,7 +30,7 @@
],
cflags: [
- "-Werror",
"-Wall",
+ "-Werror",
],
}
diff --git a/services/medialog/fuzzer/Android.bp b/services/medialog/fuzzer/Android.bp
index bf90f43..6b4ee5f 100644
--- a/services/medialog/fuzzer/Android.bp
+++ b/services/medialog/fuzzer/Android.bp
@@ -31,12 +31,12 @@
"frameworks/av/services/medialog",
],
cflags: [
- "-Werror",
"-Wall",
+ "-Werror",
],
fuzz_config: {
cc: [
- "android-media-fuzzing-reports@google.com",
+ "android-audio-fuzzing-reports@google.com",
],
componentid: 155276,
hotlists: [
diff --git a/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp b/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
index 433332c..c6793a9 100644
--- a/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
+++ b/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
@@ -33,6 +33,8 @@
constexpr size_t kLogItemsLowWater = 1;
// high water mark
constexpr size_t kLogItemsHighWater = 2;
+constexpr size_t kMaxItemLength = 16;
+constexpr size_t kMaxApis = 64;
class MediaMetricsServiceFuzzer {
public:
@@ -304,10 +306,11 @@
}
FuzzedDataProvider fdp2 = FuzzedDataProvider(data, size);
-
- while (fdp2.remaining_bytes()) {
+ size_t apiCount = 0;
+ while (fdp2.remaining_bytes() && ++apiCount <= kMaxApis) {
// make a test item
- auto item = std::make_shared<mediametrics::Item>(fdp2.ConsumeRandomLengthString().c_str());
+ auto item = std::make_shared<mediametrics::Item>(
+ fdp2.ConsumeRandomLengthString(kMaxItemLength).c_str());
(*item).set("event", fdp2.ConsumeRandomLengthString().c_str());
// get the actions and execute them
diff --git a/services/mediaresourcemanager/ResourceManagerMetrics.cpp b/services/mediaresourcemanager/ResourceManagerMetrics.cpp
index cd00937..a8a1de1 100644
--- a/services/mediaresourcemanager/ResourceManagerMetrics.cpp
+++ b/services/mediaresourcemanager/ResourceManagerMetrics.cpp
@@ -109,23 +109,17 @@
return CodecBucketUnspecified;
}
-static bool getLogMessage(int hwCount, int swCount, std::stringstream& logMsg) {
- bool update = false;
- logMsg.clear();
+static std::string getLogMessage(const std::string& firstKey, const long& firstValue,
+ const std::string& secondKey, const long& secondValue) {
- if (hwCount > 0) {
- logMsg << " HW: " << hwCount;
- update = true;
+ std::stringstream logMsg;
+ if (firstValue > 0) {
+ logMsg << firstKey << firstValue;
}
- if (swCount > 0) {
- logMsg << " SW: " << swCount;
- update = true;
+ if (secondValue > 0) {
+ logMsg << secondKey << secondValue;
}
-
- if (update) {
- logMsg << " ] ";
- }
- return update;
+ return logMsg.str();
}
ResourceManagerMetrics::ResourceManagerMetrics(const sp<ProcessInfoInterface>& processInfo) {
@@ -364,6 +358,15 @@
std::scoped_lock lock(mLock);
// post MediaCodecConcurrentUsageReported for this terminated pid.
pushConcurrentUsageReport(pid, uid);
+ // Remove all the metrics associated with this process.
+ std::map<int32_t, ConcurrentCodecs>::iterator it1 = mProcessConcurrentCodecsMap.find(pid);
+ if (it1 != mProcessConcurrentCodecsMap.end()) {
+ mProcessConcurrentCodecsMap.erase(it1);
+ }
+ std::map<int32_t, PixelCount>::iterator it2 = mProcessPixelsMap.find(pid);
+ if (it2 != mProcessPixelsMap.end()) {
+ mProcessPixelsMap.erase(it2);
+ }
}
void ResourceManagerMetrics::pushConcurrentUsageReport(int32_t pid, uid_t uid) {
@@ -400,24 +403,30 @@
std::stringstream peakCodecLog;
peakCodecLog << "Peak { ";
- std::stringstream logMsg;
- if (getLogMessage(peakHwAudioEncoderCount, peakSwAudioEncoderCount, logMsg)) {
- peakCodecLog << "AudioEnc[" << logMsg.str();
+ std::string logMsg;
+ logMsg = getLogMessage(" HW: ", peakHwAudioEncoderCount, " SW: ", peakSwAudioEncoderCount);
+ if (!logMsg.empty()) {
+ peakCodecLog << "AudioEnc[ " << logMsg << " ] ";
}
- if (getLogMessage(peakHwAudioDecoderCount, peakSwAudioDecoderCount, logMsg)) {
- peakCodecLog << "AudioDec[" << logMsg.str();
+ logMsg = getLogMessage(" HW: ", peakHwAudioDecoderCount, " SW: ", peakSwAudioDecoderCount);
+ if (!logMsg.empty()) {
+ peakCodecLog << "AudioDec[" << logMsg << " ] ";
}
- if (getLogMessage(peakHwVideoEncoderCount, peakSwVideoEncoderCount, logMsg)) {
- peakCodecLog << "VideoEnc[" << logMsg.str();
+ logMsg = getLogMessage(" HW: ", peakHwVideoEncoderCount, " SW: ", peakSwVideoEncoderCount);
+ if (!logMsg.empty()) {
+ peakCodecLog << "VideoEnc[" << logMsg << " ] ";
}
- if (getLogMessage(peakHwVideoDecoderCount, peakSwVideoDecoderCount, logMsg)) {
- peakCodecLog << "VideoDec[" << logMsg.str();
+ logMsg = getLogMessage(" HW: ", peakHwVideoDecoderCount, " SW: ", peakSwVideoDecoderCount);
+ if (!logMsg.empty()) {
+ peakCodecLog << "VideoDec[" << logMsg << " ] ";
}
- if (getLogMessage(peakHwImageEncoderCount, peakSwImageEncoderCount, logMsg)) {
- peakCodecLog << "ImageEnc[" << logMsg.str();
+ logMsg = getLogMessage(" HW: ", peakHwImageEncoderCount, " SW: ", peakSwImageEncoderCount);
+ if (!logMsg.empty()) {
+ peakCodecLog << "ImageEnc[" << logMsg << " ] ";
}
- if (getLogMessage(peakHwImageDecoderCount, peakSwImageDecoderCount, logMsg)) {
- peakCodecLog << "ImageDec[" << logMsg.str();
+ logMsg = getLogMessage(" HW: ", peakHwImageDecoderCount, " SW: ", peakSwImageDecoderCount);
+ if (!logMsg.empty()) {
+ peakCodecLog << "ImageDec[" << logMsg << " ] ";
}
peakCodecLog << "}";
@@ -705,4 +714,114 @@
return 0;
}
+static std::string getConcurrentInstanceCount(const std::map<std::string, int>& resourceMap) {
+ if (resourceMap.empty()) {
+ return "";
+ }
+ std::stringstream concurrentInstanceInfo;
+ for (const auto& [name, count] : resourceMap) {
+ if (count > 0) {
+ concurrentInstanceInfo << " Name: " << name << " Instances: " << count << "\n";
+ }
+ }
+
+ std::string info = concurrentInstanceInfo.str();
+ if (info.empty()) {
+ return "";
+ }
+ return " Current Concurrent Codec Instances:\n" + info;
+}
+
+static std::string getAppsPixelCount(const std::map<int32_t, PixelCount>& pixelMap) {
+ if (pixelMap.empty()) {
+ return "";
+ }
+ std::stringstream pixelInfo;
+ for (const auto& [pid, pixelCount] : pixelMap) {
+ std::string logMsg = getLogMessage(" Current Pixels: ", pixelCount.mCurrent,
+ " Peak Pixels: ", pixelCount.mPeak);
+ if (!logMsg.empty()) {
+ pixelInfo << " PID[" << pid << "]: {" << logMsg << " }\n";
+ }
+ }
+
+ return " Applications Pixel Usage:\n" + pixelInfo.str();
+}
+
+static std::string getCodecUsageMetrics(const ConcurrentCodecsMap& codecsMap) {
+ int peakHwAudioEncoderCount = codecsMap[HwAudioEncoder];
+ int peakHwAudioDecoderCount = codecsMap[HwAudioDecoder];
+ int peakHwVideoEncoderCount = codecsMap[HwVideoEncoder];
+ int peakHwVideoDecoderCount = codecsMap[HwVideoDecoder];
+ int peakHwImageEncoderCount = codecsMap[HwImageEncoder];
+ int peakHwImageDecoderCount = codecsMap[HwImageDecoder];
+ int peakSwAudioEncoderCount = codecsMap[SwAudioEncoder];
+ int peakSwAudioDecoderCount = codecsMap[SwAudioDecoder];
+ int peakSwVideoEncoderCount = codecsMap[SwVideoEncoder];
+ int peakSwVideoDecoderCount = codecsMap[SwVideoDecoder];
+ int peakSwImageEncoderCount = codecsMap[SwImageEncoder];
+ int peakSwImageDecoderCount = codecsMap[SwImageDecoder];
+ std::stringstream usageMetrics;
+ std::string logMsg;
+ logMsg = getLogMessage(" HW: ", peakHwAudioEncoderCount, " SW: ", peakSwAudioEncoderCount);
+ if (!logMsg.empty()) {
+ usageMetrics << "AudioEnc[" << logMsg << " ] ";
+ }
+ logMsg = getLogMessage(" HW: ", peakHwAudioDecoderCount, " SW: ", peakSwAudioDecoderCount);
+ if (!logMsg.empty()) {
+ usageMetrics << "AudioDec[" << logMsg << " ] ";
+ }
+ logMsg = getLogMessage(" HW: ", peakHwVideoEncoderCount, " SW: ", peakSwVideoEncoderCount);
+ if (!logMsg.empty()) {
+ usageMetrics << "VideoEnc[" << logMsg << " ] ";
+ }
+ logMsg = getLogMessage(" HW: ", peakHwVideoDecoderCount, " SW: ", peakSwVideoDecoderCount);
+ if (!logMsg.empty()) {
+ usageMetrics << "VideoDec[" << logMsg << " ] ";
+ }
+ logMsg = getLogMessage(" HW: ", peakHwImageEncoderCount, " SW: ", peakSwImageEncoderCount);
+ if (!logMsg.empty()) {
+ usageMetrics << "ImageEnc[" << logMsg << " ] ";
+ }
+ logMsg = getLogMessage(" HW: ", peakHwImageDecoderCount, " SW: ", peakSwImageDecoderCount);
+ if (!logMsg.empty()) {
+ usageMetrics << "ImageDec[" << logMsg << " ] ";
+ }
+
+ return usageMetrics.str();
+}
+
+static std::string getAppsCodecUsageMetrics(
+ const std::map<int32_t, ConcurrentCodecs>& processCodecsMap) {
+ if (processCodecsMap.empty()) {
+ return "";
+ }
+ std::stringstream codecUsage;
+ std::string info;
+ for (const auto& [pid, codecMap] : processCodecsMap) {
+ codecUsage << " PID[" << pid << "]: ";
+ info = getCodecUsageMetrics(codecMap.mCurrent);
+ if (!info.empty()) {
+ codecUsage << "Current Codec Usage: { " << info << "} ";
+ }
+ info = getCodecUsageMetrics(codecMap.mPeak);
+ if (!info.empty()) {
+ codecUsage << "Peak Codec Usage: { " << info << "}";
+ }
+ codecUsage << "\n";
+ }
+
+ return " Applications Codec Usage:\n" + codecUsage.str();
+}
+
+
+std::string ResourceManagerMetrics::dump() const {
+ std::string metricsLog(" Metrics logs:\n");
+ metricsLog += getConcurrentInstanceCount(mConcurrentResourceCountMap);
+ metricsLog += getAppsPixelCount(mProcessPixelsMap);
+ metricsLog += getAppsCodecUsageMetrics(mProcessConcurrentCodecsMap);
+
+ return std::move(metricsLog);
+}
+
} // namespace android
diff --git a/services/mediaresourcemanager/ResourceManagerMetrics.h b/services/mediaresourcemanager/ResourceManagerMetrics.h
index 7a5a89f..9904f7d 100644
--- a/services/mediaresourcemanager/ResourceManagerMetrics.h
+++ b/services/mediaresourcemanager/ResourceManagerMetrics.h
@@ -171,6 +171,9 @@
// Get the current concurrent pixel count (associated with the video codecs) for the process.
long getCurrentConcurrentPixelCount(int pid) const;
+ // retrieves metrics log.
+ std::string dump() const;
+
private:
ResourceManagerMetrics(const ResourceManagerMetrics&) = delete;
ResourceManagerMetrics(ResourceManagerMetrics&&) = delete;
@@ -204,9 +207,9 @@
// Map of resources (name) and number of concurrent instances
std::map<std::string, int> mConcurrentResourceCountMap;
- // Map of concurrent codes by CodecBucket across the system.
+ // Map of concurrent codecs by CodecBucket across the system.
ConcurrentCodecsMap mConcurrentCodecsMap;
- // Map of concurrent and peak codes by CodecBucket for each process/application.
+ // Map of concurrent and peak codecs by CodecBucket for each process/application.
std::map<int32_t, ConcurrentCodecs> mProcessConcurrentCodecsMap;
// Uid Observer to monitor the application termination.
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index d37d893..9c2fb7c 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -108,10 +108,17 @@
serviceLog = mServiceLog->toString(" " /* linePrefix */);
}
- // Get all the resource (and overload pid) logs
+ // Get all the resource (and overload pid) log.
std::string resourceLog;
getResourceDump(resourceLog);
+ // Get all the metrics log.
+ std::string metricsLog;
+ {
+ std::scoped_lock lock{mLock};
+ metricsLog = mResourceManagerMetrics->dump();
+ }
+
const size_t SIZE = 256;
char buffer[SIZE];
snprintf(buffer, SIZE, "ResourceManagerService: %p\n", this);
@@ -123,11 +130,16 @@
supportsSecureWithNonSecureCodec);
result.append(buffer);
+ // Add resource log.
result.append(resourceLog.c_str());
+ // Add service log.
result.append(" Events logs (most recent at top):\n");
result.append(serviceLog);
+ // Add metrics log.
+ result.append(metricsLog.c_str());
+
write(fd, result.c_str(), result.size());
return OK;
}
diff --git a/services/mediaresourcemanager/test/Android.bp b/services/mediaresourcemanager/test/Android.bp
index 6a64823..5dfec30 100644
--- a/services/mediaresourcemanager/test/Android.bp
+++ b/services/mediaresourcemanager/test/Android.bp
@@ -29,6 +29,9 @@
"libactivitymanager_aidl",
"server_configurable_flags",
],
+ defaults: [
+ "aconfig_lib_cc_static_link.defaults",
+ ],
include_dirs: [
"frameworks/av/include",
"frameworks/av/services/mediaresourcemanager",
diff --git a/services/oboeservice/AAudioServiceEndpointPlay.cpp b/services/oboeservice/AAudioServiceEndpointPlay.cpp
index 637405d..5d6e2ae 100644
--- a/services/oboeservice/AAudioServiceEndpointPlay.cpp
+++ b/services/oboeservice/AAudioServiceEndpointPlay.cpp
@@ -88,7 +88,8 @@
}
aaudio_stream_state_t state = clientStream->getState();
- if (state == AAUDIO_STREAM_STATE_STOPPING) {
+ if (state == AAUDIO_STREAM_STATE_STOPPING ||
+ state == AAUDIO_STREAM_STATE_PAUSING) {
allowUnderflow = false; // just read what is already in the FIFO
} else if (state != AAUDIO_STREAM_STATE_STARTED) {
continue; // this stream is not running so skip it.
diff --git a/services/oboeservice/AAudioServiceStreamBase.cpp b/services/oboeservice/AAudioServiceStreamBase.cpp
index dc70c79..adbfc21 100644
--- a/services/oboeservice/AAudioServiceStreamBase.cpp
+++ b/services/oboeservice/AAudioServiceStreamBase.cpp
@@ -307,6 +307,8 @@
.set(AMEDIAMETRICS_PROP_STATUS, (int32_t)result)
.record(); });
+ setState(AAUDIO_STREAM_STATE_PAUSING);
+
sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
if (endpoint == nullptr) {
ALOGE("%s() has no endpoint", __func__);
diff --git a/services/oboeservice/Android.bp b/services/oboeservice/Android.bp
index 12ce17f..e3601a1 100644
--- a/services/oboeservice/Android.bp
+++ b/services/oboeservice/Android.bp
@@ -69,10 +69,10 @@
"-android-cloexec-dup", // found in AAudioServiceEndpointMMAP.cpp
"-bugprone-narrowing-conversions", // found in several interface from size_t to int32_t
- "-google-readability-casting", // C++ casts not always necessary and may be verbose
- "-google-readability-todo", // do not require TODO(info)
"-google-build-using-namespace", // Reenable and fix later.
"-google-global-names-in-headers", // found in several files
+ "-google-readability-casting", // C++ casts not always necessary and may be verbose
+ "-google-readability-todo", // do not require TODO(info)
"-misc-non-private-member-variables-in-classes", // found in aidl generated files
@@ -82,27 +82,27 @@
name: "libaaudioservice_dependencies",
shared_libs: [
+ "aaudio-aidl-cpp",
+ "com.android.media.aaudio-aconfig-cc",
+ "framework-permission-aidl-cpp",
"libaaudio_internal",
"libaudioclient",
+ "libaudioclient_aidl_conversion",
"libaudioutils",
- "libmedia_helper",
- "libmediametrics",
- "libmediautils",
"libbase",
"libbinder",
"libcutils",
"liblog",
+ "libmedia_helper",
+ "libmediametrics",
+ "libmediautils",
"libutils",
- "aaudio-aidl-cpp",
- "framework-permission-aidl-cpp",
- "libaudioclient_aidl_conversion",
"packagemanager_aidl-cpp",
- "com.android.media.aaudio-aconfig-cc",
],
static_libs: [
"libaudioflinger",
- ]
+ ],
}
cc_library_static {
@@ -110,8 +110,8 @@
name: "libaaudioservice",
defaults: [
- "libaaudioservice_dependencies",
"latest_android_media_audio_common_types_cpp_shared",
+ "libaaudioservice_dependencies",
],
srcs: [
@@ -137,15 +137,15 @@
],
cflags: [
- "-Wthread-safety",
- "-Wno-unused-parameter",
"-Wall",
"-Werror",
+ "-Wno-unused-parameter",
+ "-Wthread-safety",
],
export_shared_lib_headers: [
- "libaaudio_internal",
"framework-permission-aidl-cpp",
+ "libaaudio_internal",
],
header_libs: [
@@ -153,8 +153,8 @@
],
include_dirs: [
- "frameworks/av/media/libnbaio/include_mono",
"frameworks/av/media/libnbaio/include",
+ "frameworks/av/media/libnbaio/include_mono",
],
tidy: true,
@@ -162,5 +162,5 @@
tidy_checks_as_errors: tidy_errors,
tidy_flags: [
"-format-style=file",
- ]
+ ],
}
diff --git a/services/oboeservice/fuzzer/Android.bp b/services/oboeservice/fuzzer/Android.bp
index 31ed8ac..97825b3 100644
--- a/services/oboeservice/fuzzer/Android.bp
+++ b/services/oboeservice/fuzzer/Android.bp
@@ -37,22 +37,22 @@
"oboeservice_fuzzer.cpp",
],
shared_libs: [
+ "aaudio-aidl-cpp",
+ "com.android.media.aaudio-aconfig-cc",
+ "framework-permission-aidl-cpp",
"libaaudio_internal",
"libaudioclient",
+ "libaudioclient_aidl_conversion",
"libaudioflinger",
"libaudioutils",
- "libmedia_helper",
- "libmediametrics",
- "libmediautils",
"libbase",
"libbinder",
"libcutils",
"liblog",
+ "libmedia_helper",
+ "libmediametrics",
+ "libmediautils",
"libutils",
- "aaudio-aidl-cpp",
- "framework-permission-aidl-cpp",
- "libaudioclient_aidl_conversion",
- "com.android.media.aaudio-aconfig-cc",
],
static_libs: [
"libaaudioservice",
diff --git a/services/tuner/TunerDemux.cpp b/services/tuner/TunerDemux.cpp
index 92fa970..a80a88e 100644
--- a/services/tuner/TunerDemux.cpp
+++ b/services/tuner/TunerDemux.cpp
@@ -50,7 +50,9 @@
}
TunerDemux::~TunerDemux() {
- close();
+ if (!isClosed) {
+ close();
+ }
mDemux = nullptr;
mTunerService = nullptr;
}
@@ -125,6 +127,7 @@
}
::ndk::ScopedAStatus TunerDemux::close() {
+ isClosed = true;
return mDemux->close();
}
diff --git a/services/tuner/TunerDemux.h b/services/tuner/TunerDemux.h
index 0c71987..17dd7e0 100644
--- a/services/tuner/TunerDemux.h
+++ b/services/tuner/TunerDemux.h
@@ -64,6 +64,7 @@
shared_ptr<IDemux> mDemux;
int mDemuxId;
shared_ptr<TunerService> mTunerService;
+ bool isClosed = false;
};
} // namespace tuner
diff --git a/services/tuner/TunerDescrambler.cpp b/services/tuner/TunerDescrambler.cpp
index ffe0be9..c1214bd 100644
--- a/services/tuner/TunerDescrambler.cpp
+++ b/services/tuner/TunerDescrambler.cpp
@@ -41,7 +41,9 @@
}
TunerDescrambler::~TunerDescrambler() {
- close();
+ if (!isClosed) {
+ close();
+ }
mDescrambler = nullptr;
}
@@ -75,6 +77,7 @@
}
::ndk::ScopedAStatus TunerDescrambler::close() {
+ isClosed = true;
return mDescrambler->close();
}
diff --git a/services/tuner/TunerDescrambler.h b/services/tuner/TunerDescrambler.h
index b1d5fb9..434fc5d 100644
--- a/services/tuner/TunerDescrambler.h
+++ b/services/tuner/TunerDescrambler.h
@@ -48,6 +48,7 @@
private:
shared_ptr<IDescrambler> mDescrambler;
+ bool isClosed = false;
};
} // namespace tuner
diff --git a/services/tuner/TunerDvr.cpp b/services/tuner/TunerDvr.cpp
index fcee966..0e1b0fa 100644
--- a/services/tuner/TunerDvr.cpp
+++ b/services/tuner/TunerDvr.cpp
@@ -37,7 +37,9 @@
}
TunerDvr::~TunerDvr() {
- close();
+ if (!isClosed) {
+ close();
+ }
mDvr = nullptr;
}
@@ -92,6 +94,7 @@
}
::ndk::ScopedAStatus TunerDvr::close() {
+ isClosed = true;
return mDvr->close();
}
diff --git a/services/tuner/TunerDvr.h b/services/tuner/TunerDvr.h
index 2330e7b..1fb7a5c 100644
--- a/services/tuner/TunerDvr.h
+++ b/services/tuner/TunerDvr.h
@@ -77,6 +77,7 @@
private:
shared_ptr<IDvr> mDvr;
DvrType mType;
+ bool isClosed = false;
};
} // namespace tuner
diff --git a/services/tuner/TunerFilter.cpp b/services/tuner/TunerFilter.cpp
index 478e7ea..84a2b4e 100644
--- a/services/tuner/TunerFilter.cpp
+++ b/services/tuner/TunerFilter.cpp
@@ -47,7 +47,9 @@
mTunerService(tuner) {}
TunerFilter::~TunerFilter() {
- close();
+ if (!isClosed) {
+ close();
+ }
freeSharedFilterToken("");
{
Mutex::Autolock _l(mLock);
@@ -266,6 +268,7 @@
mStarted = false;
mShared = false;
mClientPid = -1;
+ isClosed = true;
return res;
}
diff --git a/services/tuner/TunerFilter.h b/services/tuner/TunerFilter.h
index f6178c4..06735aa 100644
--- a/services/tuner/TunerFilter.h
+++ b/services/tuner/TunerFilter.h
@@ -116,6 +116,7 @@
shared_ptr<FilterCallback> mFilterCallback;
Mutex mLock;
shared_ptr<TunerService> mTunerService;
+ bool isClosed = false;
};
} // namespace tuner
diff --git a/services/tuner/TunerFrontend.cpp b/services/tuner/TunerFrontend.cpp
index 1e93d95..081596a 100644
--- a/services/tuner/TunerFrontend.cpp
+++ b/services/tuner/TunerFrontend.cpp
@@ -37,7 +37,9 @@
}
TunerFrontend::~TunerFrontend() {
- close();
+ if (!isClosed) {
+ close();
+ }
mFrontend = nullptr;
mId = -1;
}
@@ -89,6 +91,7 @@
}
::ndk::ScopedAStatus TunerFrontend::close() {
+ isClosed = true;
return mFrontend->close();
}
diff --git a/services/tuner/TunerFrontend.h b/services/tuner/TunerFrontend.h
index da471fb..9612124 100644
--- a/services/tuner/TunerFrontend.h
+++ b/services/tuner/TunerFrontend.h
@@ -83,6 +83,7 @@
private:
int mId;
shared_ptr<IFrontend> mFrontend;
+ bool isClosed = false;
};
} // namespace tuner
diff --git a/services/tuner/TunerLnb.cpp b/services/tuner/TunerLnb.cpp
index 2fb6135..d27a978 100644
--- a/services/tuner/TunerLnb.cpp
+++ b/services/tuner/TunerLnb.cpp
@@ -36,7 +36,9 @@
}
TunerLnb::~TunerLnb() {
- close();
+ if (!isClosed) {
+ close();
+ }
mLnb = nullptr;
mId = -1;
}
@@ -70,6 +72,7 @@
}
::ndk::ScopedAStatus TunerLnb::close() {
+ isClosed = true;
return mLnb->close();
}
diff --git a/services/tuner/TunerLnb.h b/services/tuner/TunerLnb.h
index 72988a6..b0222d7 100644
--- a/services/tuner/TunerLnb.h
+++ b/services/tuner/TunerLnb.h
@@ -66,6 +66,7 @@
private:
int mId;
shared_ptr<ILnb> mLnb;
+ bool isClosed = false;
};
} // namespace tuner
diff --git a/services/tuner/TunerTimeFilter.cpp b/services/tuner/TunerTimeFilter.cpp
index 385a063..7a4e200 100644
--- a/services/tuner/TunerTimeFilter.cpp
+++ b/services/tuner/TunerTimeFilter.cpp
@@ -35,7 +35,9 @@
}
TunerTimeFilter::~TunerTimeFilter() {
- close();
+ if (!isClosed) {
+ close();
+ }
mTimeFilter = nullptr;
}
@@ -64,6 +66,7 @@
}
::ndk::ScopedAStatus TunerTimeFilter::close() {
+ isClosed = true;
return mTimeFilter->close();
}
diff --git a/services/tuner/TunerTimeFilter.h b/services/tuner/TunerTimeFilter.h
index 31a47cd..7e40ebe 100644
--- a/services/tuner/TunerTimeFilter.h
+++ b/services/tuner/TunerTimeFilter.h
@@ -45,6 +45,7 @@
private:
shared_ptr<ITimeFilter> mTimeFilter;
+ bool isClosed = false;
};
} // namespace tuner