Merge Android U (ab/10368041)
Bug: 291102124
Merged-In: Ied8e295ae059db07463ba06d3e6d747659b2757f
Change-Id: Ib79234b765308e957b682871b2178b66769f5660
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index 981c569..a45365a 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -57,7 +57,6 @@
"api1/client2/StreamingProcessor.cpp",
"api1/client2/JpegProcessor.cpp",
"api1/client2/CallbackProcessor.cpp",
- "api1/client2/JpegCompressor.cpp",
"api1/client2/CaptureSequencer.cpp",
"api1/client2/ZslProcessor.cpp",
"api2/CameraDeviceClient.cpp",
@@ -66,6 +65,7 @@
"api2/DepthCompositeStream.cpp",
"api2/HeicEncoderInfoManager.cpp",
"api2/HeicCompositeStream.cpp",
+ "api2/JpegRCompositeStream.cpp",
"device3/BufferUtils.cpp",
"device3/Camera3Device.cpp",
"device3/Camera3OfflineSession.cpp",
@@ -95,6 +95,12 @@
"hidl/HidlCameraDeviceUser.cpp",
"hidl/HidlCameraService.cpp",
"hidl/Utils.cpp",
+ "aidl/AidlCameraDeviceCallbacks.cpp",
+ "aidl/AidlCameraDeviceUser.cpp",
+ "aidl/AidlCameraService.cpp",
+ "aidl/AidlCameraServiceListener.cpp",
+ "aidl/AidlUtils.cpp",
+ "aidl/DeathPipe.cpp",
"utils/CameraServiceProxyWrapper.cpp",
"utils/CameraThreadState.cpp",
"utils/CameraTraces.cpp",
@@ -114,6 +120,7 @@
],
shared_libs: [
+ "libactivitymanager_aidl",
"libbase",
"libdl",
"libexif",
@@ -137,6 +144,7 @@
"libhidlbase",
"libimage_io",
"libjpeg",
+ "libultrahdr",
"libmedia_codeclist",
"libmedia_omx",
"libmemunreachable",
@@ -151,19 +159,22 @@
"android.frameworks.cameraservice.service@2.2",
"android.frameworks.cameraservice.device@2.0",
"android.frameworks.cameraservice.device@2.1",
+ "android.frameworks.cameraservice.common-V1-ndk",
+ "android.frameworks.cameraservice.service-V1-ndk",
+ "android.frameworks.cameraservice.device-V1-ndk",
"android.hardware.camera.common@1.0",
"android.hardware.camera.provider@2.4",
"android.hardware.camera.provider@2.5",
"android.hardware.camera.provider@2.6",
"android.hardware.camera.provider@2.7",
- "android.hardware.camera.provider-V1-ndk",
+ "android.hardware.camera.provider-V2-ndk",
"android.hardware.camera.device@3.2",
"android.hardware.camera.device@3.3",
"android.hardware.camera.device@3.4",
"android.hardware.camera.device@3.5",
"android.hardware.camera.device@3.6",
"android.hardware.camera.device@3.7",
- "android.hardware.camera.device-V1-ndk",
+ "android.hardware.camera.device-V2-ndk",
"media_permission-aidl-cpp",
],
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 28bb781..3e7af3d 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -35,8 +35,10 @@
#include <android/hardware/ICamera.h>
#include <android/hardware/ICameraClient.h>
+#include <aidl/AidlCameraService.h>
#include <android-base/macros.h>
#include <android-base/parseint.h>
+#include <android/permission/PermissionChecker.h>
#include <binder/ActivityManager.h>
#include <binder/AppOpsManager.h>
#include <binder/IPCThreadState.h>
@@ -68,6 +70,8 @@
#include <private/android_filesystem_config.h>
#include <system/camera_vendor_tags.h>
#include <system/camera_metadata.h>
+#include <binder/IServiceManager.h>
+#include <binder/IActivityManager.h>
#include <camera/StringUtils.h>
#include <system/camera.h>
@@ -82,6 +86,9 @@
namespace {
const char* kPermissionServiceName = "permission";
+ const char* kActivityServiceName = "activity";
+ const char* kSensorPrivacyServiceName = "sensor_privacy";
+ const char* kAppopsServiceName = "appops";
}; // namespace anonymous
namespace android {
@@ -89,6 +96,7 @@
using binder::Status;
using namespace camera3;
using frameworks::cameraservice::service::V2_0::implementation::HidlCameraService;
+using frameworks::cameraservice::service::implementation::AidlCameraService;
using hardware::ICamera;
using hardware::ICameraClient;
using hardware::ICameraServiceListener;
@@ -121,6 +129,8 @@
"android.permission.CAMERA_OPEN_CLOSE_LISTENER");
static const std::string
sCameraInjectExternalCameraPermission("android.permission.CAMERA_INJECT_EXTERNAL_CAMERA");
+// Constant integer for FGS Logging, used to denote the API type for logger
+static const int LOG_FGS_CAMERA_API = 1;
const char *sFileName = "lastOpenSessionDumpFile";
static constexpr int32_t kSystemNativeClientScore = resource_policy::PERCEPTIBLE_APP_ADJ;
static constexpr int32_t kSystemNativeClientState =
@@ -133,7 +143,10 @@
// Set to keep track of logged service error events.
static std::set<std::string> sServiceErrorEventSet;
-CameraService::CameraService() :
+CameraService::CameraService(
+ std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper) :
+ mCameraServiceProxyWrapper(cameraServiceProxyWrapper == nullptr ?
+ std::make_shared<CameraServiceProxyWrapper>() : cameraServiceProxyWrapper),
mEventLog(DEFAULT_EVENT_LOG_LENGTH),
mNumberOfCameras(0),
mNumberOfCamerasWithoutSystemCamera(0),
@@ -153,6 +166,20 @@
return (CameraThreadState::getCallingUid() < AID_APP_START);
}
+// Enable processes with isolated AID to request the binder
+void CameraService::instantiate() {
+ CameraService::publish(true);
+}
+
+void CameraService::onServiceRegistration(const String16& name, const sp<IBinder>&) {
+ if (name != toString16(kAppopsServiceName)) {
+ return;
+ }
+
+ ALOGV("appops service registered. setting camera audio restriction");
+ mAppOps.setCameraAudioRestriction(mAudioRestriction);
+}
+
void CameraService::onFirstRef()
{
@@ -177,16 +204,33 @@
mSensorPrivacyPolicy = new SensorPrivacyPolicy(this);
mSensorPrivacyPolicy->registerSelf();
mInjectionStatusListener = new InjectionStatusListener(this);
- mAppOps.setCameraAudioRestriction(mAudioRestriction);
+
+ // appops function setCamerAudioRestriction uses getService which
+ // is blocking till the appops service is ready. To enable early
+ // boot availability for cameraservice, use checkService which is
+ // non blocking and register for notifications
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<IBinder> binder = sm->checkService(toString16(kAppopsServiceName));
+ if (!binder) {
+ sm->registerForNotifications(toString16(kAppopsServiceName), this);
+ } else {
+ mAppOps.setCameraAudioRestriction(mAudioRestriction);
+ }
+
sp<HidlCameraService> hcs = HidlCameraService::getInstance(this);
if (hcs->registerAsService() != android::OK) {
- ALOGE("%s: Failed to register default android.frameworks.cameraservice.service@1.0",
+ // Deprecated, so it will fail to register on newer devices
+ ALOGW("%s: Did not register default android.frameworks.cameraservice.service@2.2",
__FUNCTION__);
}
+ if (!AidlCameraService::registerService(this)) {
+ ALOGE("%s: Failed to register default AIDL VNDK CameraService", __FUNCTION__);
+ }
+
// This needs to be last call in this function, so that it's as close to
// ServiceManager::addService() as possible.
- CameraServiceProxyWrapper::pingCameraServiceProxy();
+ mCameraServiceProxyWrapper->pingCameraServiceProxy();
ALOGI("CameraService pinged cameraservice proxy");
}
@@ -265,7 +309,10 @@
cameraId.c_str());
continue;
}
- i->getListener()->onTorchStatusChanged(mapToInterface(status), cameraId);
+ auto ret = i->getListener()->onTorchStatusChanged(mapToInterface(status),
+ cameraId);
+ i->handleBinderStatus(ret, "%s: Failed to trigger onTorchStatusChanged for %d:%d: %d",
+ __FUNCTION__, i->getListenerUid(), i->getListenerPid(), ret.exceptionCode());
}
}
@@ -532,8 +579,12 @@
id.c_str());
continue;
}
- listener->getListener()->onPhysicalCameraStatusChanged(mapToInterface(newStatus),
- id, physicalId);
+ auto ret = listener->getListener()->onPhysicalCameraStatusChanged(
+ mapToInterface(newStatus), id, physicalId);
+ listener->handleBinderStatus(ret,
+ "%s: Failed to trigger onPhysicalCameraStatusChanged for %d:%d: %d",
+ __FUNCTION__, listener->getListenerUid(), listener->getListenerPid(),
+ ret.exceptionCode());
}
}
}
@@ -574,7 +625,10 @@
int32_t newStrengthLevel) {
Mutex::Autolock lock(mStatusListenerLock);
for (auto& i : mListenerList) {
- i->getListener()->onTorchStrengthLevelChanged(cameraId, newStrengthLevel);
+ auto ret = i->getListener()->onTorchStrengthLevelChanged(cameraId, newStrengthLevel);
+ i->handleBinderStatus(ret,
+ "%s: Failed to trigger onTorchStrengthLevelChanged for %d:%d: %d", __FUNCTION__,
+ i->getListenerUid(), i->getListenerPid(), ret.exceptionCode());
}
}
@@ -631,11 +685,18 @@
broadcastTorchModeStatus(cameraId, newStatus, systemCameraKind);
}
-static bool hasPermissionsForSystemCamera(int callingPid, int callingUid,
- bool logPermissionFailure = false) {
- return checkPermission(toString16(sSystemCameraPermission), callingPid, callingUid,
- logPermissionFailure) &&
- checkPermission(toString16(sCameraPermission), callingPid, callingUid);
+static bool hasPermissionsForSystemCamera(int callingPid, int callingUid) {
+ permission::PermissionChecker permissionChecker;
+ AttributionSourceState attributionSource{};
+ attributionSource.pid = callingPid;
+ attributionSource.uid = callingUid;
+ bool checkPermissionForSystemCamera = permissionChecker.checkPermissionForPreflight(
+ toString16(sSystemCameraPermission), attributionSource, String16(),
+ AppOpsManager::OP_NONE) != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+ bool checkPermissionForCamera = permissionChecker.checkPermissionForPreflight(
+ toString16(sCameraPermission), attributionSource, String16(),
+ AppOpsManager::OP_NONE) != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+ return checkPermissionForSystemCamera && checkPermissionForCamera;
}
Status CameraService::getNumberOfCameras(int32_t type, int32_t* numCameras) {
@@ -714,8 +775,14 @@
const std::vector<std::string> *deviceIds = &mNormalDeviceIdsWithoutSystemCamera;
auto callingPid = CameraThreadState::getCallingPid();
auto callingUid = CameraThreadState::getCallingUid();
- if (checkPermission(toString16(sSystemCameraPermission), callingPid, callingUid,
- /*logPermissionFailure*/false) || getpid() == callingPid) {
+ permission::PermissionChecker permissionChecker;
+ AttributionSourceState attributionSource{};
+ attributionSource.pid = callingPid;
+ attributionSource.uid = callingUid;
+ bool checkPermissionForSystemCamera = permissionChecker.checkPermissionForPreflight(
+ toString16(sSystemCameraPermission), attributionSource, String16(),
+ AppOpsManager::OP_NONE) != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+ if (checkPermissionForSystemCamera || getpid() == callingPid) {
deviceIds = &mNormalDeviceIds;
}
if (cameraIdInt < 0 || cameraIdInt >= static_cast<int>(deviceIds->size())) {
@@ -784,9 +851,16 @@
// If it's not calling from cameraserver, check the permission only if
// android.permission.CAMERA is required. If android.permission.SYSTEM_CAMERA was needed,
// it would've already been checked in shouldRejectSystemCameraConnection.
+ permission::PermissionChecker permissionChecker;
+ AttributionSourceState attributionSource{};
+ attributionSource.pid = callingPid;
+ attributionSource.uid = callingUid;
+ bool checkPermissionForCamera = permissionChecker.checkPermissionForPreflight(
+ toString16(sCameraPermission), attributionSource, String16(),
+ AppOpsManager::OP_NONE) != permission::PermissionChecker::PERMISSION_HARD_DENIED;
if ((callingPid != getpid()) &&
(deviceKind != SystemCameraKind::SYSTEM_ONLY_CAMERA) &&
- !checkPermission(toString16(sCameraPermission), callingPid, callingUid)) {
+ !checkPermissionForCamera) {
res = cameraInfo->removePermissionEntries(
mCameraProviderManager->getProviderTagIdLocked(cameraId),
&tagsRemoved);
@@ -965,17 +1039,20 @@
}
if (effectiveApiLevel == API_1) { // Camera1 API route
sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
- *client = new Camera2Client(cameraService, tmp, packageName, featureId,
- cameraId, api1CameraId, facing, sensorOrientation, clientPid, clientUid,
- servicePid, overrideForPerfClass, overrideToPortrait, forceSlowJpegMode);
+ *client = new Camera2Client(cameraService, tmp, cameraService->mCameraServiceProxyWrapper,
+ packageName, featureId, cameraId,
+ api1CameraId, facing, sensorOrientation,
+ clientPid, clientUid, servicePid, overrideForPerfClass, overrideToPortrait,
+ forceSlowJpegMode);
ALOGI("%s: Camera1 API (legacy), override to portrait %d, forceSlowJpegMode %d",
__FUNCTION__, overrideToPortrait, forceSlowJpegMode);
} else { // Camera2 API route
sp<hardware::camera2::ICameraDeviceCallbacks> tmp =
static_cast<hardware::camera2::ICameraDeviceCallbacks*>(cameraCb.get());
- *client = new CameraDeviceClient(cameraService, tmp, packageName,
- systemNativeClient, featureId, cameraId, facing, sensorOrientation,
- clientPid, clientUid, servicePid, overrideForPerfClass, overrideToPortrait);
+ *client = new CameraDeviceClient(cameraService, tmp,
+ cameraService->mCameraServiceProxyWrapper, packageName, systemNativeClient,
+ featureId, cameraId, facing, sensorOrientation, clientPid, clientUid, servicePid,
+ overrideForPerfClass, overrideToPortrait);
ALOGI("%s: Camera2 API, override to portrait %d", __FUNCTION__, overrideToPortrait);
}
return Status::ok();
@@ -1218,6 +1295,9 @@
Status CameraService::validateClientPermissionsLocked(const std::string& cameraId,
const std::string& clientName, int& clientUid, int& clientPid,
/*out*/int& originalClientPid) const {
+ permission::PermissionChecker permissionChecker;
+ AttributionSourceState attributionSource{};
+
int callingPid = CameraThreadState::getCallingPid();
int callingUid = CameraThreadState::getCallingUid();
@@ -1264,9 +1344,14 @@
// If it's not calling from cameraserver, check the permission if the
// device isn't a system only camera (shouldRejectSystemCameraConnection already checks for
// android.permission.SYSTEM_CAMERA for system only camera devices).
+ attributionSource.pid = clientPid;
+ attributionSource.uid = clientUid;
+ attributionSource.packageName = clientName;
+ bool checkPermissionForCamera = permissionChecker.checkPermissionForPreflight(
+ toString16(sCameraPermission), attributionSource, String16(),
+ AppOpsManager::OP_NONE) != permission::PermissionChecker::PERMISSION_HARD_DENIED;
if (callingPid != getpid() &&
- (deviceKind != SystemCameraKind::SYSTEM_ONLY_CAMERA) &&
- !checkPermission(toString16(sCameraPermission), clientPid, clientUid)) {
+ (deviceKind != SystemCameraKind::SYSTEM_ONLY_CAMERA) && !checkPermissionForCamera) {
ALOGE("Permission Denial: can't use the camera pid=%d, uid=%d", clientPid, clientUid);
return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
"Caller \"%s\" (PID %d, UID %d) cannot open camera \"%s\" without camera permission",
@@ -1605,6 +1690,15 @@
}
*device = client;
+
+ const sp<IServiceManager> sm(defaultServiceManager());
+ const auto& mActivityManager = getActivityManager();
+ if (mActivityManager) {
+ mActivityManager->logFgsApiBegin(LOG_FGS_CAMERA_API,
+ CameraThreadState::getCallingUid(),
+ CameraThreadState::getCallingPid());
+ }
+
return ret;
}
@@ -1656,7 +1750,7 @@
// characteristics) even if clients don't have android.permission.CAMERA. We do not want the
// same behavior for system camera devices.
if (!systemClient && systemCameraKind == SystemCameraKind::SYSTEM_ONLY_CAMERA &&
- !hasPermissionsForSystemCamera(cPid, cUid, /*logPermissionFailure*/true)) {
+ !hasPermissionsForSystemCamera(cPid, cUid)) {
ALOGW("Rejecting access to system only camera %s, inadequete permissions",
cameraId.c_str());
return true;
@@ -1703,7 +1797,7 @@
clientUserId = multiuser_get_user_id(callingUid);
}
- if (CameraServiceProxyWrapper::isCameraDisabled(clientUserId)) {
+ if (mCameraServiceProxyWrapper->isCameraDisabled(clientUserId)) {
std::string msg = "Camera disabled by device policy";
ALOGE("%s: %s", __FUNCTION__, msg.c_str());
return STATUS_ERROR(ERROR_DISABLED, msg.c_str());
@@ -1711,7 +1805,8 @@
// enforce system camera permissions
if (oomScoreOffset > 0 &&
- !hasPermissionsForSystemCamera(callingPid, CameraThreadState::getCallingUid())) {
+ !hasPermissionsForSystemCamera(callingPid, CameraThreadState::getCallingUid()) &&
+ !isTrustedCallingUid(CameraThreadState::getCallingUid())) {
std::string msg = fmt::sprintf("Cannot change the priority of a client %s pid %d for "
"camera id %s without SYSTEM_CAMERA permissions",
clientPackageNameAdj.c_str(), callingPid, cameraId.c_str());
@@ -1747,6 +1842,13 @@
ALOGE("%s: Error while creating the file: %s", __FUNCTION__, sFileName);
}
}
+ const sp<IServiceManager> sm(defaultServiceManager());
+ const auto& mActivityManager = getActivityManager();
+ if (mActivityManager) {
+ mActivityManager->logFgsApiBegin(LOG_FGS_CAMERA_API,
+ CameraThreadState::getCallingUid(),
+ CameraThreadState::getCallingPid());
+ }
return ret;
}
@@ -1980,8 +2082,17 @@
client->setRotateAndCropOverride(rotateAndCropMode);
} else {
client->setRotateAndCropOverride(
- CameraServiceProxyWrapper::getRotateAndCropOverride(
- clientPackageName, facing, multiuser_get_user_id(clientUid)));
+ mCameraServiceProxyWrapper->getRotateAndCropOverride(
+ clientPackageName, facing, multiuser_get_user_id(clientUid)));
+ }
+
+ // Set autoframing override behaviour
+ if (mOverrideAutoframingMode != ANDROID_CONTROL_AUTOFRAMING_AUTO) {
+ client->setAutoframingOverride(mOverrideAutoframingMode);
+ } else {
+ client->setAutoframingOverride(
+ mCameraServiceProxyWrapper->getAutoframingOverride(
+ clientPackageName));
}
// Set camera muting behavior
@@ -2024,6 +2135,7 @@
client->setImageDumpMask(mImageDumpMask);
client->setStreamUseCaseOverrides(mStreamUseCaseOverrides);
+ client->setZoomOverride(mZoomOverrideValue);
} // lock is destroyed, allow further connect calls
// Important: release the mutex here so the client can call back into the service from its
@@ -2031,7 +2143,7 @@
device = client;
int32_t openLatencyMs = ns2ms(systemTime() - openTimeNs);
- CameraServiceProxyWrapper::logOpen(cameraId, facing, clientPackageName,
+ mCameraServiceProxyWrapper->logOpen(cameraId, facing, clientPackageName,
effectiveApiLevel, isNonSystemNdk, openLatencyMs);
{
@@ -2098,6 +2210,10 @@
onlineClientDesc->getOwnerId(), onlinePriority.getState(),
// native clients don't have offline processing support.
/*ommScoreOffset*/ 0, /*systemNativeClient*/false);
+ if (offlineClientDesc == nullptr) {
+ ALOGE("%s: Offline client descriptor was NULL", __FUNCTION__);
+ return BAD_VALUE;
+ }
// Allow only one offline device per camera
auto incompatibleClients = mActiveClientManager.getIncompatibleClients(offlineClientDesc);
@@ -2461,9 +2577,20 @@
for (const auto& it : mListenerList) {
auto ret = it->getListener()->onCameraAccessPrioritiesChanged();
- if (!ret.isOk()) {
- ALOGE("%s: Failed to trigger permission callback: %d", __FUNCTION__,
- ret.exceptionCode());
+ it->handleBinderStatus(ret, "%s: Failed to trigger permission callback for %d:%d: %d",
+ __FUNCTION__, it->getListenerUid(), it->getListenerPid(), ret.exceptionCode());
+ }
+}
+
+void CameraService::notifyMonitoredUids(const std::unordered_set<uid_t> ¬ifyUidSet) {
+ Mutex::Autolock lock(mStatusListenerLock);
+
+ for (const auto& it : mListenerList) {
+ if (notifyUidSet.find(it->getListenerUid()) != notifyUidSet.end()) {
+ ALOGV("%s: notifying uid %d", __FUNCTION__, it->getListenerUid());
+ auto ret = it->getListener()->onCameraAccessPrioritiesChanged();
+ it->handleBinderStatus(ret, "%s: Failed to trigger permission callback for %d:%d: %d",
+ __FUNCTION__, it->getListenerUid(), it->getListenerPid(), ret.exceptionCode());
}
}
}
@@ -2528,7 +2655,7 @@
const auto basicClient = current->getValue();
if (basicClient.get() != nullptr && !basicClient->getOverrideToPortrait()) {
basicClient->setRotateAndCropOverride(
- CameraServiceProxyWrapper::getRotateAndCropOverride(
+ mCameraServiceProxyWrapper->getRotateAndCropOverride(
basicClient->getPackageName(),
basicClient->getCameraFacing(),
multiuser_get_user_id(basicClient->getClientUid())));
@@ -2599,8 +2726,14 @@
// Check for camera permissions
int callingPid = CameraThreadState::getCallingPid();
int callingUid = CameraThreadState::getCallingUid();
- if ((callingPid != getpid()) && !checkPermission(toString16(sCameraPermission), callingPid,
- callingUid)) {
+ permission::PermissionChecker permissionChecker;
+ AttributionSourceState attributionSource{};
+ attributionSource.pid = callingPid;
+ attributionSource.uid = callingUid;
+ bool checkPermissionForCamera = permissionChecker.checkPermissionForPreflight(
+ toString16(sCameraPermission), attributionSource, String16(),
+ AppOpsManager::OP_NONE) != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+ if ((callingPid != getpid()) && !checkPermissionForCamera) {
ALOGE("%s: pid %d doesn't have camera permissions", __FUNCTION__, callingPid);
return STATUS_ERROR(ERROR_PERMISSION_DENIED,
"android.permission.CAMERA needed to call"
@@ -2647,8 +2780,13 @@
auto clientUid = CameraThreadState::getCallingUid();
auto clientPid = CameraThreadState::getCallingPid();
- bool openCloseCallbackAllowed = checkPermission(toString16(sCameraOpenCloseListenerPermission),
- clientPid, clientUid, /*logPermissionFailure*/false);
+ permission::PermissionChecker permissionChecker;
+ AttributionSourceState attributionSource{};
+ attributionSource.uid = clientUid;
+ attributionSource.pid = clientPid;
+ bool openCloseCallbackAllowed = permissionChecker.checkPermissionForPreflight(
+ toString16(sCameraOpenCloseListenerPermission), attributionSource, String16(),
+ AppOpsManager::OP_NONE) != permission::PermissionChecker::PERMISSION_HARD_DENIED;
Mutex::Autolock lock(mServiceLock);
@@ -2677,7 +2815,7 @@
// permissions the listener process has / whether it is a vendor listener. Since it might be
// eligible to listen to other camera ids.
mListenerList.emplace_back(serviceListener);
- mUidPolicy->registerMonitorUid(clientUid);
+ mUidPolicy->registerMonitorUid(clientUid, /*openCamera*/false);
}
/* Collect current devices and status */
@@ -2745,7 +2883,7 @@
Mutex::Autolock lock(mStatusListenerLock);
for (auto it = mListenerList.begin(); it != mListenerList.end(); it++) {
if (IInterface::asBinder((*it)->getListener()) == IInterface::asBinder(listener)) {
- mUidPolicy->unregisterMonitorUid((*it)->getListenerUid());
+ mUidPolicy->unregisterMonitorUid((*it)->getListenerUid(), /*closeCamera*/false);
IInterface::asBinder(listener)->unlinkToDeath(*it);
mListenerList.erase(it);
return Status::ok();
@@ -2910,6 +3048,13 @@
return binder::Status::ok();
}
+Status CameraService::reportExtensionSessionStats(
+ const hardware::CameraExtensionSessionStats& stats, std::string* sessionKey /*out*/) {
+ ALOGV("%s: reported %s", __FUNCTION__, stats.toString().c_str());
+ *sessionKey = mCameraServiceProxyWrapper->updateExtensionStats(stats);
+ return Status::ok();
+}
+
void CameraService::removeByClient(const BasicClient* client) {
Mutex::Autolock lock(mServiceLock);
for (auto& i : mActiveClientManager.getAll()) {
@@ -3386,6 +3531,13 @@
// client shouldn't be able to call into us anymore
mClientPid = 0;
+ const auto& mActivityManager = getActivityManager();
+ if (mActivityManager) {
+ mActivityManager->logFgsApiEnd(LOG_FGS_CAMERA_API,
+ CameraThreadState::getCallingUid(),
+ CameraThreadState::getCallingPid());
+ }
+
return res;
}
@@ -3478,6 +3630,11 @@
mClientPackageName);
bool isCameraPrivacyEnabled =
sCameraService->mSensorPrivacyPolicy->isCameraPrivacyEnabled();
+ // We don't want to return EACCESS if the CameraPrivacy is enabled.
+ // We prefer to successfully open the camera and perform camera muting
+ // or blocking in connectHelper as handleAppOpMode can be called before the
+ // connection has been fully established and at that time camera muting
+ // capabilities are unknown.
if (!isUidActive || !isCameraPrivacyEnabled) {
ALOGI("Camera %s: Access for \"%s\" has been restricted",
mCameraIdStr.c_str(), mClientPackageName.c_str());
@@ -3516,7 +3673,7 @@
// Transition device availability listeners from PRESENT -> NOT_AVAILABLE
sCameraService->updateStatus(StatusInternal::NOT_AVAILABLE, mCameraIdStr);
- sCameraService->mUidPolicy->registerMonitorUid(mClientUid);
+ sCameraService->mUidPolicy->registerMonitorUid(mClientUid, /*openCamera*/true);
// Notify listeners of camera open/close status
sCameraService->updateOpenCloseStatus(mCameraIdStr, true/*open*/, mClientPackageName);
@@ -3625,7 +3782,7 @@
}
mOpsCallback.clear();
- sCameraService->mUidPolicy->unregisterMonitorUid(mClientUid);
+ sCameraService->mUidPolicy->unregisterMonitorUid(mClientUid, /*closeCamera*/true);
// Notify listeners of camera open/close status
sCameraService->updateOpenCloseStatus(mCameraIdStr, false/*open*/, mClientPackageName);
@@ -3732,20 +3889,44 @@
// UidPolicy
// ----------------------------------------------------------------------------
-void CameraService::UidPolicy::registerSelf() {
+void CameraService::UidPolicy::registerWithActivityManager() {
Mutex::Autolock _l(mUidLock);
+ int32_t emptyUidArray[] = { };
if (mRegistered) return;
status_t res = mAm.linkToDeath(this);
- mAm.registerUidObserver(this, ActivityManager::UID_OBSERVER_GONE
+ mAm.registerUidObserverForUids(this, ActivityManager::UID_OBSERVER_GONE
| ActivityManager::UID_OBSERVER_IDLE
| ActivityManager::UID_OBSERVER_ACTIVE | ActivityManager::UID_OBSERVER_PROCSTATE
| ActivityManager::UID_OBSERVER_PROC_OOM_ADJ,
ActivityManager::PROCESS_STATE_UNKNOWN,
- toString16(kServiceName));
+ toString16(kServiceName), emptyUidArray, 0, mObserverToken);
if (res == OK) {
mRegistered = true;
ALOGV("UidPolicy: Registered with ActivityManager");
+ } else {
+ ALOGE("UidPolicy: Failed to register with ActivityManager: 0x%08x", res);
+ }
+}
+
+void CameraService::UidPolicy::onServiceRegistration(const String16& name, const sp<IBinder>&) {
+ if (name != toString16(kActivityServiceName)) {
+ return;
+ }
+
+ registerWithActivityManager();
+}
+
+void CameraService::UidPolicy::registerSelf() {
+ // Use check service to see if the activity service is available
+ // If not available then register for notifications, instead of blocking
+ // till the service is ready
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<IBinder> binder = sm->checkService(toString16(kActivityServiceName));
+ if (!binder) {
+ sm->registerForNotifications(toString16(kActivityServiceName), this);
+ } else {
+ registerWithActivityManager();
}
}
@@ -3804,24 +3985,54 @@
}
}
-void CameraService::UidPolicy::onUidProcAdjChanged(uid_t uid) {
- bool procAdjChange = false;
+/**
+ * When the OOM adj of the uid owning the camera changes, a different uid waiting on camera
+ * privileges may take precedence if the owner's new OOM adj is greater than the waiting package.
+ * Here, we track which monitoredUid has the camera, and track its adj relative to other
+ * monitoredUids. If it is revised above some other monitoredUid, signal
+ * onCameraAccessPrioritiesChanged. This only needs to capture the case where there are two
+ * foreground apps in split screen - state changes will capture all other cases.
+ */
+void CameraService::UidPolicy::onUidProcAdjChanged(uid_t uid, int32_t adj) {
+ std::unordered_set<uid_t> notifyUidSet;
{
Mutex::Autolock _l(mUidLock);
- if (mMonitoredUids.find(uid) != mMonitoredUids.end()) {
- procAdjChange = true;
+ auto it = mMonitoredUids.find(uid);
+
+ if (it != mMonitoredUids.end()) {
+ if (it->second.hasCamera) {
+ for (auto &monitoredUid : mMonitoredUids) {
+ if (monitoredUid.first != uid && adj > monitoredUid.second.procAdj) {
+ ALOGV("%s: notify uid %d", __FUNCTION__, monitoredUid.first);
+ notifyUidSet.emplace(monitoredUid.first);
+ }
+ }
+ ALOGV("%s: notify uid %d", __FUNCTION__, uid);
+ notifyUidSet.emplace(uid);
+ } else {
+ for (auto &monitoredUid : mMonitoredUids) {
+ if (monitoredUid.second.hasCamera && adj < monitoredUid.second.procAdj) {
+ ALOGV("%s: notify uid %d", __FUNCTION__, uid);
+ notifyUidSet.emplace(uid);
+ }
+ }
+ }
+ it->second.procAdj = adj;
}
}
- if (procAdjChange) {
+ if (notifyUidSet.size() > 0) {
sp<CameraService> service = mService.promote();
if (service != nullptr) {
- service->notifyMonitoredUids();
+ service->notifyMonitoredUids(notifyUidSet);
}
}
}
-void CameraService::UidPolicy::registerMonitorUid(uid_t uid) {
+/**
+ * Register a uid for monitoring, and note whether it owns a camera.
+ */
+void CameraService::UidPolicy::registerMonitorUid(uid_t uid, bool openCamera) {
Mutex::Autolock _l(mUidLock);
auto it = mMonitoredUids.find(uid);
if (it != mMonitoredUids.end()) {
@@ -3829,18 +4040,36 @@
} else {
MonitoredUid monitoredUid;
monitoredUid.procState = ActivityManager::PROCESS_STATE_NONEXISTENT;
+ monitoredUid.procAdj = resource_policy::UNKNOWN_ADJ;
monitoredUid.refCount = 1;
- mMonitoredUids.emplace(std::pair<uid_t, MonitoredUid>(uid, monitoredUid));
+ it = mMonitoredUids.emplace(std::pair<uid_t, MonitoredUid>(uid, monitoredUid)).first;
+ status_t res = mAm.addUidToObserver(mObserverToken, toString16(kServiceName), uid);
+ if (res != OK) {
+ ALOGE("UidPolicy: Failed to add uid to observer: 0x%08x", res);
+ }
+ }
+
+ if (openCamera) {
+ it->second.hasCamera = true;
}
}
-void CameraService::UidPolicy::unregisterMonitorUid(uid_t uid) {
+/**
+ * Unregister a uid for monitoring, and note whether it lost ownership of a camera.
+ */
+void CameraService::UidPolicy::unregisterMonitorUid(uid_t uid, bool closeCamera) {
Mutex::Autolock _l(mUidLock);
auto it = mMonitoredUids.find(uid);
if (it != mMonitoredUids.end()) {
it->second.refCount--;
if (it->second.refCount == 0) {
mMonitoredUids.erase(it);
+ status_t res = mAm.removeUidFromObserver(mObserverToken, toString16(kServiceName), uid);
+ if (res != OK) {
+ ALOGE("UidPolicy: Failed to remove uid from observer: 0x%08x", res);
+ }
+ } else if (closeCamera) {
+ it->second.hasCamera = false;
}
} else {
ALOGE("%s: Trying to unregister uid: %d which is not monitored!", __FUNCTION__, uid);
@@ -3962,7 +4191,9 @@
// ----------------------------------------------------------------------------
// SensorPrivacyPolicy
// ----------------------------------------------------------------------------
-void CameraService::SensorPrivacyPolicy::registerSelf() {
+
+void CameraService::SensorPrivacyPolicy::registerWithSensorPrivacyManager()
+{
Mutex::Autolock _l(mSensorPrivacyLock);
if (mRegistered) {
return;
@@ -3977,6 +4208,27 @@
}
}
+void CameraService::SensorPrivacyPolicy::onServiceRegistration(const String16& name,
+ const sp<IBinder>&) {
+ if (name != toString16(kSensorPrivacyServiceName)) {
+ return;
+ }
+
+ registerWithSensorPrivacyManager();
+}
+
+void CameraService::SensorPrivacyPolicy::registerSelf() {
+ // Use checkservice to see if the sensor_privacy service is available
+ // If service is not available then register for notification
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<IBinder> binder = sm->checkService(toString16(kSensorPrivacyServiceName));
+ if (!binder) {
+ sm->registerForNotifications(toString16(kSensorPrivacyServiceName),this);
+ } else {
+ registerWithSensorPrivacyManager();
+ }
+}
+
void CameraService::SensorPrivacyPolicy::unregisterSelf() {
Mutex::Autolock _l(mSensorPrivacyLock);
mSpm.removeSensorPrivacyListener(this);
@@ -3986,6 +4238,10 @@
}
bool CameraService::SensorPrivacyPolicy::isSensorPrivacyEnabled() {
+ if (!mRegistered) {
+ registerWithSensorPrivacyManager();
+ }
+
Mutex::Autolock _l(mSensorPrivacyLock);
return mSensorPrivacyEnabled;
}
@@ -4692,8 +4948,12 @@
cameraId.c_str());
continue;
}
- listener->getListener()->onStatusChanged(mapToInterface(status),
+ auto ret = listener->getListener()->onStatusChanged(mapToInterface(status),
cameraId);
+ listener->handleBinderStatus(ret,
+ "%s: Failed to trigger onStatusChanged callback for %d:%d: %d",
+ __FUNCTION__, listener->getListenerUid(), listener->getListenerPid(),
+ ret.exceptionCode());
}
});
}
@@ -4725,10 +4985,10 @@
} else {
ret = it->getListener()->onCameraClosed(cameraId);
}
- if (!ret.isOk()) {
- ALOGE("%s: Failed to trigger onCameraOpened/onCameraClosed callback: %d", __FUNCTION__,
- ret.exceptionCode());
- }
+
+ it->handleBinderStatus(ret,
+ "%s: Failed to trigger onCameraOpened/onCameraClosed callback for %d:%d: %d",
+ __FUNCTION__, it->getListenerUid(), it->getListenerPid(), ret.exceptionCode());
}
}
@@ -4829,8 +5089,12 @@
physicalCameraId.c_str());
continue;
}
- listener->getListener()->onPhysicalCameraStatusChanged(status,
+ auto ret = listener->getListener()->onPhysicalCameraStatusChanged(status,
logicalCameraId, physicalCameraId);
+ listener->handleBinderStatus(ret,
+ "%s: Failed to trigger onPhysicalCameraStatusChanged for %d:%d: %d",
+ __FUNCTION__, listener->getListenerUid(), listener->getListenerPid(),
+ ret.exceptionCode());
}
}
}
@@ -4878,6 +5142,10 @@
return handleSetRotateAndCrop(args);
} else if (args.size() >= 1 && args[0] == toString16("get-rotate-and-crop")) {
return handleGetRotateAndCrop(out);
+ } else if (args.size() >= 2 && args[0] == toString16("set-autoframing")) {
+ return handleSetAutoframing(args);
+ } else if (args.size() >= 1 && args[0] == toString16("get-autoframing")) {
+ return handleGetAutoframing(out);
} else if (args.size() >= 2 && args[0] == toString16("set-image-dump-mask")) {
return handleSetImageDumpMask(args);
} else if (args.size() >= 1 && args[0] == toString16("get-image-dump-mask")) {
@@ -4887,7 +5155,10 @@
} else if (args.size() >= 2 && args[0] == toString16("set-stream-use-case-override")) {
return handleSetStreamUseCaseOverrides(args);
} else if (args.size() >= 1 && args[0] == toString16("clear-stream-use-case-override")) {
- return handleClearStreamUseCaseOverrides();
+ handleClearStreamUseCaseOverrides();
+ return OK;
+ } else if (args.size() >= 1 && args[0] == toString16("set-zoom-override")) {
+ return handleSetZoomOverride(args);
} else if (args.size() >= 2 && args[0] == toString16("watch")) {
return handleWatchCommand(args, in, out);
} else if (args.size() >= 2 && args[0] == toString16("set-watchdog")) {
@@ -4985,6 +5256,34 @@
return OK;
}
+status_t CameraService::handleSetAutoframing(const Vector<String16>& args) {
+ char* end;
+ int autoframingValue = (int) strtol(toStdString(args[1]).c_str(), &end, /*base=*/10);
+ if ((*end != '\0') ||
+ (autoframingValue != ANDROID_CONTROL_AUTOFRAMING_OFF &&
+ autoframingValue != ANDROID_CONTROL_AUTOFRAMING_ON &&
+ autoframingValue != ANDROID_CONTROL_AUTOFRAMING_AUTO)) {
+ return BAD_VALUE;
+ }
+
+ Mutex::Autolock lock(mServiceLock);
+ mOverrideAutoframingMode = autoframingValue;
+
+ if (autoframingValue == ANDROID_CONTROL_AUTOFRAMING_AUTO) return OK;
+
+ const auto clients = mActiveClientManager.getAll();
+ for (auto& current : clients) {
+ if (current != nullptr) {
+ const auto basicClient = current->getValue();
+ if (basicClient.get() != nullptr) {
+ basicClient->setAutoframingOverride(autoframingValue);
+ }
+ }
+ }
+
+ return OK;
+}
+
status_t CameraService::handleSetCameraServiceWatchdog(const Vector<String16>& args) {
int enableWatchdog = atoi(toStdString(args[1]).c_str());
@@ -5013,6 +5312,12 @@
return dprintf(out, "rotateAndCrop override: %d\n", mOverrideRotateAndCropMode);
}
+status_t CameraService::handleGetAutoframing(int out) {
+ Mutex::Autolock lock(mServiceLock);
+
+ return dprintf(out, "autoframing override: %d\n", mOverrideAutoframingMode);
+}
+
status_t CameraService::handleSetImageDumpMask(const Vector<String16>& args) {
char *endPtr;
errno = 0;
@@ -5077,6 +5382,8 @@
useCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL;
} else if (arg == "VIDEO_CALL") {
useCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL;
+ } else if (arg == "CROPPED_RAW") {
+ useCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW;
} else {
ALOGE("%s: Invalid stream use case %s", __FUNCTION__, arg.c_str());
return BAD_VALUE;
@@ -5090,9 +5397,35 @@
return OK;
}
-status_t CameraService::handleClearStreamUseCaseOverrides() {
+void CameraService::handleClearStreamUseCaseOverrides() {
Mutex::Autolock lock(mServiceLock);
mStreamUseCaseOverrides.clear();
+}
+
+status_t CameraService::handleSetZoomOverride(const Vector<String16>& args) {
+ char* end;
+ int zoomOverrideValue = strtol(toStdString(args[1]).c_str(), &end, /*base=*/10);
+ if ((*end != '\0') ||
+ (zoomOverrideValue != -1 &&
+ zoomOverrideValue != ANDROID_CONTROL_SETTINGS_OVERRIDE_OFF &&
+ zoomOverrideValue != ANDROID_CONTROL_SETTINGS_OVERRIDE_ZOOM)) {
+ return BAD_VALUE;
+ }
+
+ Mutex::Autolock lock(mServiceLock);
+ mZoomOverrideValue = zoomOverrideValue;
+
+ const auto clients = mActiveClientManager.getAll();
+ for (auto& current : clients) {
+ if (current != nullptr) {
+ const auto basicClient = current->getValue();
+ if (basicClient.get() != nullptr) {
+ if (basicClient->supportsZoomOverride()) {
+ basicClient->setZoomOverride(mZoomOverrideValue);
+ }
+ }
+ }
+ }
return OK;
}
@@ -5433,6 +5766,9 @@
" set-rotate-and-crop <ROTATION> overrides the rotate-and-crop value for AUTO backcompat\n"
" Valid values 0=0 deg, 1=90 deg, 2=180 deg, 3=270 deg, 4=No override\n"
" get-rotate-and-crop returns the current override rotate-and-crop value\n"
+ " set-autoframing <VALUE> overrides the autoframing value for AUTO\n"
+ " Valid values 0=false, 1=true, 2=auto\n"
+ " get-autoframing returns the current override autoframing value\n"
" set-image-dump-mask <MASK> specifies the formats to be saved to disk\n"
" Valid values 0=OFF, 1=ON for JPEG\n"
" get-image-dump-mask returns the current image-dump-mask value\n"
@@ -5444,8 +5780,10 @@
" last use case is assigned to all the remaining streams. In case of multiple\n"
" streams with the same resolution, the tie-breaker is (JPEG, RAW, YUV, and PRIV)\n"
" Valid values are (case sensitive): DEFAULT, PREVIEW, STILL_CAPTURE, VIDEO_RECORD,\n"
- " PREVIEW_VIDEO_STILL, VIDEO_CALL\n"
+ " PREVIEW_VIDEO_STILL, VIDEO_CALL, CROPPED_RAW\n"
" clear-stream-use-case-override clear the stream use case override\n"
+ " set-zoom-override <-1/0/1> enable or disable zoom override\n"
+ " Valid values -1: do not override, 0: override to OFF, 1: override to ZOOM\n"
" watch <start|stop|dump|print|clear> manages tag monitoring in connected clients\n"
" help print this message\n");
}
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 3ee1e6e..bc65293 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -29,6 +29,8 @@
#include <binder/ActivityManager.h>
#include <binder/AppOpsManager.h>
#include <binder/BinderService.h>
+#include <binder/IServiceManager.h>
+#include <binder/IActivityManager.h>
#include <binder/IAppOpsCallback.h>
#include <binder/IUidObserver.h>
#include <hardware/camera.h>
@@ -48,6 +50,7 @@
#include "utils/AutoConditionLock.h"
#include "utils/ClientManager.h"
#include "utils/IPCTransport.h"
+#include "utils/CameraServiceProxyWrapper.h"
#include <set>
#include <string>
@@ -70,7 +73,8 @@
public BinderService<CameraService>,
public virtual ::android::hardware::BnCameraService,
public virtual IBinder::DeathRecipient,
- public virtual CameraProviderManager::StatusListener
+ public virtual CameraProviderManager::StatusListener,
+ public virtual IServiceManager::LocalRegistrationCallback
{
friend class BinderService<CameraService>;
friend class CameraOfflineSessionClient;
@@ -97,10 +101,19 @@
// Event log ID
static const int SN_EVENT_LOG_ID = 0x534e4554;
+ // Register camera service
+ static void instantiate();
+
// Implementation of BinderService<T>
static char const* getServiceName() { return "media.camera"; }
- CameraService();
+ // Implementation of IServiceManager::LocalRegistrationCallback
+ virtual void onServiceRegistration(const String16& name, const sp<IBinder>& binder) override;
+
+ // Non-null arguments for cameraServiceProxyWrapper should be provided for
+ // testing purposes only.
+ CameraService(std::shared_ptr<CameraServiceProxyWrapper>
+ cameraServiceProxyWrapper = nullptr);
virtual ~CameraService();
/////////////////////////////////////////////////////////////////////
@@ -206,6 +219,9 @@
/*out*/
sp<hardware::camera2::ICameraInjectionSession>* cameraInjectionSession);
+ virtual binder::Status reportExtensionSessionStats(
+ const hardware::CameraExtensionSessionStats& stats, std::string* sessionKey /*out*/);
+
// Extra permissions checks
virtual status_t onTransact(uint32_t code, const Parcel& data,
Parcel* reply, uint32_t flags);
@@ -221,6 +237,7 @@
// Monitored UIDs availability notification
void notifyMonitoredUids();
+ void notifyMonitoredUids(const std::unordered_set<uid_t> ¬ifyUidSet);
// Stores current open session device info in temp file.
void cacheDump();
@@ -340,6 +357,9 @@
// Override rotate-and-crop AUTO behavior
virtual status_t setRotateAndCropOverride(uint8_t rotateAndCrop) = 0;
+ // Override autoframing AUTO behaviour
+ virtual status_t setAutoframingOverride(uint8_t autoframingValue) = 0;
+
// Whether the client supports camera muting (black only output)
virtual bool supportsCameraMute() = 0;
@@ -356,6 +376,12 @@
// Clear stream use case overrides
virtual void clearStreamUseCaseOverrides() = 0;
+ // Whether the client supports camera zoom override
+ virtual bool supportsZoomOverride() = 0;
+
+ // Set/reset zoom override
+ virtual status_t setZoomOverride(int32_t zoomOverride) = 0;
+
// The injection camera session to replace the internal camera
// session.
virtual status_t injectCamera(const std::string& injectedCamId,
@@ -510,7 +536,6 @@
virtual bool canCastToApiClient(apiLevel level) const;
void setImageDumpMask(int /*mask*/) { }
- void setStreamUseCaseOverrides(const std::vector<int64_t>& /*usecaseOverrides*/) { }
protected:
// Initialized in constructor
@@ -585,6 +610,20 @@
private:
+ // TODO: b/263304156 update this to make use of a death callback for more
+ // robust/fault tolerant logging
+ static const sp<IActivityManager>& getActivityManager() {
+ static const char* kActivityService = "activity";
+ static const auto activityManager = []() -> sp<IActivityManager> {
+ const sp<IServiceManager> sm(defaultServiceManager());
+ if (sm != nullptr) {
+ return interface_cast<IActivityManager>(sm->checkService(String16(kActivityService)));
+ }
+ return nullptr;
+ }();
+ return activityManager;
+ }
+
/**
* Typesafe version of device status, containing both the HAL-layer and the service interface-
* layer values.
@@ -705,7 +744,10 @@
// Observer for UID lifecycle enforcing that UIDs in idle
// state cannot use the camera to protect user privacy.
- class UidPolicy : public BnUidObserver, public virtual IBinder::DeathRecipient {
+ class UidPolicy :
+ public BnUidObserver,
+ public virtual IBinder::DeathRecipient,
+ public virtual IServiceManager::LocalRegistrationCallback {
public:
explicit UidPolicy(sp<CameraService> service)
: mRegistered(false), mService(service) {}
@@ -722,23 +764,30 @@
void onUidIdle(uid_t uid, bool disabled) override;
void onUidStateChanged(uid_t uid, int32_t procState, int64_t procStateSeq,
int32_t capability) override;
- void onUidProcAdjChanged(uid_t uid) override;
+ void onUidProcAdjChanged(uid_t uid, int adj) override;
void addOverrideUid(uid_t uid, const std::string &callingPackage, bool active);
void removeOverrideUid(uid_t uid, const std::string &callingPackage);
- void registerMonitorUid(uid_t uid);
- void unregisterMonitorUid(uid_t uid);
+ void registerMonitorUid(uid_t uid, bool openCamera);
+ void unregisterMonitorUid(uid_t uid, bool closeCamera);
+ // Implementation of IServiceManager::LocalRegistrationCallback
+ virtual void onServiceRegistration(const String16& name,
+ const sp<IBinder>& binder) override;
// IBinder::DeathRecipient implementation
virtual void binderDied(const wp<IBinder> &who);
private:
bool isUidActiveLocked(uid_t uid, const std::string &callingPackage);
int32_t getProcStateLocked(uid_t uid);
- void updateOverrideUid(uid_t uid, const std::string &callingPackage, bool active, bool insert);
+ void updateOverrideUid(uid_t uid, const std::string &callingPackage, bool active,
+ bool insert);
+ void registerWithActivityManager();
struct MonitoredUid {
int32_t procState;
+ int32_t procAdj;
+ bool hasCamera;
size_t refCount;
};
@@ -750,12 +799,14 @@
// Monitored uid map
std::unordered_map<uid_t, MonitoredUid> mMonitoredUids;
std::unordered_map<uid_t, bool> mOverrideUids;
+ sp<IBinder> mObserverToken;
}; // class UidPolicy
// If sensor privacy is enabled then all apps, including those that are active, should be
// prevented from accessing the camera.
class SensorPrivacyPolicy : public hardware::BnSensorPrivacyListener,
- public virtual IBinder::DeathRecipient {
+ public virtual IBinder::DeathRecipient,
+ public virtual IServiceManager::LocalRegistrationCallback {
public:
explicit SensorPrivacyPolicy(wp<CameraService> service)
: mService(service), mSensorPrivacyEnabled(false), mRegistered(false) {}
@@ -769,6 +820,9 @@
binder::Status onSensorPrivacyChanged(int toggleType, int sensor,
bool enabled);
+ // Implementation of IServiceManager::LocalRegistrationCallback
+ virtual void onServiceRegistration(const String16& name,
+ const sp<IBinder>& binder) override;
// IBinder::DeathRecipient implementation
virtual void binderDied(const wp<IBinder> &who);
@@ -780,12 +834,15 @@
bool mRegistered;
bool hasCameraPrivacyFeature();
+ void registerWithSensorPrivacyManager();
};
sp<UidPolicy> mUidPolicy;
sp<SensorPrivacyPolicy> mSensorPrivacyPolicy;
+ std::shared_ptr<CameraServiceProxyWrapper> mCameraServiceProxyWrapper;
+
// Delay-load the Camera HAL module
virtual void onFirstRef();
@@ -1086,6 +1143,29 @@
return IInterface::asBinder(mListener)->linkToDeath(this);
}
+ template<typename... args_t>
+ void handleBinderStatus(const binder::Status &ret, const char *logOnError,
+ args_t... args) {
+ if (!ret.isOk() &&
+ (ret.exceptionCode() != binder::Status::Exception::EX_TRANSACTION_FAILED
+ || !mLastTransactFailed)) {
+ ALOGE(logOnError, args...);
+ }
+
+ // If the transaction failed, the process may have died (or other things, see
+ // b/28321379). Mute consecutive errors from this listener to avoid log spam.
+ if (ret.exceptionCode() == binder::Status::Exception::EX_TRANSACTION_FAILED) {
+ if (!mLastTransactFailed) {
+ ALOGE("%s: Muting similar errors from listener %d:%d", __FUNCTION__,
+ mListenerUid, mListenerPid);
+ }
+ mLastTransactFailed = true;
+ } else {
+ // Reset mLastTransactFailed when binder becomes healthy again.
+ mLastTransactFailed = false;
+ }
+ }
+
virtual void binderDied(const wp<IBinder> &/*who*/) {
auto parent = mParent.promote();
if (parent.get() != nullptr) {
@@ -1106,6 +1186,9 @@
int mListenerPid = -1;
bool mIsVendorListener = false;
bool mOpenCloseCallbackAllowed = false;
+
+ // Flag for preventing log spam when binder becomes unhealthy
+ bool mLastTransactFailed = false;
};
// Guarded by mStatusListenerMutex
@@ -1218,6 +1301,12 @@
// Get the rotate-and-crop AUTO override behavior
status_t handleGetRotateAndCrop(int out);
+ // Set the autoframing AUTO override behaviour.
+ status_t handleSetAutoframing(const Vector<String16>& args);
+
+ // Get the autoframing AUTO override behaviour
+ status_t handleGetAutoframing(int out);
+
// Set the mask for image dump to disk
status_t handleSetImageDumpMask(const Vector<String16>& args);
@@ -1231,7 +1320,10 @@
status_t handleSetStreamUseCaseOverrides(const Vector<String16>& args);
// Clear the stream use case overrides
- status_t handleClearStreamUseCaseOverrides();
+ void handleClearStreamUseCaseOverrides();
+
+ // Set or clear the zoom override flag
+ status_t handleSetZoomOverride(const Vector<String16>& args);
// Handle 'watch' command as passed through 'cmd'
status_t handleWatchCommand(const Vector<String16> &args, int inFd, int outFd);
@@ -1318,6 +1410,9 @@
// Current override cmd rotate-and-crop mode; AUTO means no override
uint8_t mOverrideRotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_AUTO;
+ // Current autoframing mode
+ uint8_t mOverrideAutoframingMode = ANDROID_CONTROL_AUTOFRAMING_AUTO;
+
// Current image dump mask
uint8_t mImageDumpMask = 0;
@@ -1330,6 +1425,9 @@
// Current stream use case overrides
std::vector<int64_t> mStreamUseCaseOverrides;
+ // Current zoom override value
+ int32_t mZoomOverrideValue = -1;
+
/**
* A listener class that implements the IBinder::DeathRecipient interface
* for use to call back the error state injected by the external camera, and
diff --git a/services/camera/libcameraservice/CameraServiceWatchdog.cpp b/services/camera/libcameraservice/CameraServiceWatchdog.cpp
index e101dd3..1c1bd24 100644
--- a/services/camera/libcameraservice/CameraServiceWatchdog.cpp
+++ b/services/camera/libcameraservice/CameraServiceWatchdog.cpp
@@ -17,6 +17,8 @@
#define LOG_TAG "CameraServiceWatchdog"
#include "CameraServiceWatchdog.h"
+#include "android/set_abort_message.h"
+#include "utils/CameraServiceProxyWrapper.h"
namespace android {
@@ -35,13 +37,18 @@
{
AutoMutex _l(mWatchdogLock);
- for (auto it = tidToCycleCounterMap.begin(); it != tidToCycleCounterMap.end(); it++) {
+ for (auto it = mTidMap.begin(); it != mTidMap.end(); it++) {
uint32_t currentThreadId = it->first;
- tidToCycleCounterMap[currentThreadId]++;
+ mTidMap[currentThreadId].cycles++;
- if (tidToCycleCounterMap[currentThreadId] >= mMaxCycles) {
- ALOGW("CameraServiceWatchdog triggering abort for pid: %d", getpid());
+ if (mTidMap[currentThreadId].cycles >= mMaxCycles) {
+ std::string abortMessage = getAbortMessage(mTidMap[currentThreadId].functionName);
+ android_set_abort_message(abortMessage.c_str());
+ ALOGW("CameraServiceWatchdog triggering abort for pid: %d tid: %d", getpid(),
+ currentThreadId);
+ mCameraServiceProxyWrapper->logClose(mCameraId, 0 /*latencyMs*/,
+ true /*deviceError*/);
// We use abort here so we can get a tombstone for better
// debugging.
abort();
@@ -52,13 +59,19 @@
return true;
}
+std::string CameraServiceWatchdog::getAbortMessage(const std::string& functionName) {
+ std::string res = "CameraServiceWatchdog triggering abort during "
+ + functionName;
+ return res;
+}
+
void CameraServiceWatchdog::requestExit()
{
Thread::requestExit();
AutoMutex _l(mWatchdogLock);
- tidToCycleCounterMap.clear();
+ mTidMap.clear();
if (mPause) {
mPause = false;
@@ -81,18 +94,21 @@
{
AutoMutex _l(mWatchdogLock);
- tidToCycleCounterMap.erase(tid);
+ mTidMap.erase(tid);
- if (tidToCycleCounterMap.empty()) {
+ if (mTidMap.empty()) {
mPause = true;
}
}
-void CameraServiceWatchdog::start(uint32_t tid)
+void CameraServiceWatchdog::start(uint32_t tid, const char* functionName)
{
AutoMutex _l(mWatchdogLock);
- tidToCycleCounterMap[tid] = 0;
+ MonitoredFunction monitoredFunction = {};
+ monitoredFunction.cycles = 0;
+ monitoredFunction.functionName = functionName;
+ mTidMap[tid] = monitoredFunction;
if (mPause) {
mPause = false;
diff --git a/services/camera/libcameraservice/CameraServiceWatchdog.h b/services/camera/libcameraservice/CameraServiceWatchdog.h
index e35d69e..9f25865 100644
--- a/services/camera/libcameraservice/CameraServiceWatchdog.h
+++ b/services/camera/libcameraservice/CameraServiceWatchdog.h
@@ -36,10 +36,12 @@
#include <utils/Log.h>
#include <unordered_map>
+#include "utils/CameraServiceProxyWrapper.h"
+
// Used to wrap the call of interest in start and stop calls
-#define WATCH(toMonitor) watchThread([&]() { return toMonitor;}, gettid())
+#define WATCH(toMonitor) watchThread([&]() { return toMonitor;}, gettid(), __FUNCTION__)
#define WATCH_CUSTOM_TIMER(toMonitor, cycles, cycleLength) \
- watchThread([&]() { return toMonitor;}, gettid(), cycles, cycleLength);
+ watchThread([&]() { return toMonitor;}, gettid(), __FUNCTION__, cycles, cycleLength);
// Default cycles and cycle length values used to calculate permitted elapsed time
const static size_t kMaxCycles = 100;
@@ -49,13 +51,24 @@
class CameraServiceWatchdog : public Thread {
-public:
- explicit CameraServiceWatchdog() : mPause(true), mMaxCycles(kMaxCycles),
- mCycleLengthMs(kCycleLengthMs), mEnabled(true) {};
+struct MonitoredFunction {
+ uint32_t cycles;
+ std::string functionName;
+};
- explicit CameraServiceWatchdog (size_t maxCycles, uint32_t cycleLengthMs, bool enabled) :
- mPause(true), mMaxCycles(maxCycles), mCycleLengthMs(cycleLengthMs), mEnabled(enabled)
- {};
+public:
+ explicit CameraServiceWatchdog(const std::string &cameraId,
+ std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper) :
+ mCameraId(cameraId), mPause(true), mMaxCycles(kMaxCycles),
+ mCycleLengthMs(kCycleLengthMs), mEnabled(true),
+ mCameraServiceProxyWrapper(cameraServiceProxyWrapper) {};
+
+ explicit CameraServiceWatchdog (const std::string &cameraId, size_t maxCycles,
+ uint32_t cycleLengthMs, bool enabled,
+ std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper) :
+ mCameraId(cameraId), mPause(true), mMaxCycles(maxCycles),
+ mCycleLengthMs(cycleLengthMs), mEnabled(enabled),
+ mCameraServiceProxyWrapper(cameraServiceProxyWrapper) {};
virtual ~CameraServiceWatchdog() {};
@@ -66,7 +79,8 @@
/** Used to wrap monitored calls in start and stop functions using custom timer values */
template<typename T>
- auto watchThread(T func, uint32_t tid, uint32_t cycles, uint32_t cycleLength) {
+ auto watchThread(T func, uint32_t tid, const char* functionName, uint32_t cycles,
+ uint32_t cycleLength) {
decltype(func()) res;
if (cycles != mMaxCycles || cycleLength != mCycleLengthMs) {
@@ -75,24 +89,24 @@
// Lock for mEnabled
mEnabledLock.lock();
- sp<CameraServiceWatchdog> tempWatchdog =
- new CameraServiceWatchdog(cycles, cycleLength, mEnabled);
+ sp<CameraServiceWatchdog> tempWatchdog = new CameraServiceWatchdog(
+ mCameraId, cycles, cycleLength, mEnabled, mCameraServiceProxyWrapper);
mEnabledLock.unlock();
status_t status = tempWatchdog->run("CameraServiceWatchdog");
if (status != OK) {
ALOGE("Unable to watch thread: %s (%d)", strerror(-status), status);
- res = watchThread(func, tid);
+ res = watchThread(func, tid, functionName);
return res;
}
- res = tempWatchdog->watchThread(func, tid);
+ res = tempWatchdog->watchThread(func, tid, functionName);
tempWatchdog->requestExit();
tempWatchdog.clear();
} else {
// If custom timer values are equivalent to set class timer values, use
// current thread
- res = watchThread(func, tid);
+ res = watchThread(func, tid, functionName);
}
return res;
@@ -100,12 +114,12 @@
/** Used to wrap monitored calls in start and stop functions using class timer values */
template<typename T>
- auto watchThread(T func, uint32_t tid) {
+ auto watchThread(T func, uint32_t tid, const char* functionName) {
decltype(func()) res;
AutoMutex _l(mEnabledLock);
if (mEnabled) {
- start(tid);
+ start(tid, functionName);
res = func();
stop(tid);
} else {
@@ -121,7 +135,7 @@
* Start adds a cycle counter for the calling thread. When threadloop is blocked/paused,
* start() unblocks and starts the watchdog
*/
- void start(uint32_t tid);
+ void start(uint32_t tid, const char* functionName);
/**
* If there are no calls left to be monitored, stop blocks/pauses threadloop
@@ -129,17 +143,24 @@
*/
void stop(uint32_t tid);
+ std::string getAbortMessage(const std::string& functionName);
+
virtual bool threadLoop();
Mutex mWatchdogLock; // Lock for condition variable
Mutex mEnabledLock; // Lock for enabled status
Condition mWatchdogCondition; // Condition variable for stop/start
+ std::string mCameraId; // Camera Id the watchdog belongs to
bool mPause; // True if tid map is empty
uint32_t mMaxCycles; // Max cycles
uint32_t mCycleLengthMs; // Length of time elapsed per cycle
bool mEnabled; // True if watchdog is enabled
- std::unordered_map<uint32_t, uint32_t> tidToCycleCounterMap; // Thread Id to cycle counter map
+ std::shared_ptr<CameraServiceProxyWrapper> mCameraServiceProxyWrapper;
+
+ std::unordered_map<uint32_t, MonitoredFunction> mTidMap; // Thread Id to MonitoredFunction type
+ // which retrieves the num of cycles
+ // and name of the function
};
} // namespace android
diff --git a/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.cpp b/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.cpp
new file mode 100644
index 0000000..e648a36
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.cpp
@@ -0,0 +1,212 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AidlCameraDeviceCallbacks"
+
+#include <aidl/AidlCameraDeviceCallbacks.h>
+#include <aidl/AidlUtils.h>
+#include <aidl/android/frameworks/cameraservice/common/Status.h>
+#include <hidl/Utils.h>
+#include <utility>
+
+namespace android::frameworks::cameraservice::device::implementation {
+
+// VNDK classes
+using SCameraMetadata = ::aidl::android::frameworks::cameraservice::device::CameraMetadata;
+using SCaptureResultExtras =
+ ::aidl::android::frameworks::cameraservice::device::CaptureResultExtras;
+using SPhysicalCaptureResultInfo =
+ ::aidl::android::frameworks::cameraservice::device::PhysicalCaptureResultInfo;
+using SStatus = ::aidl::android::frameworks::cameraservice::common::Status;
+// NDK classes
+using UCaptureResultExtras = ::android::hardware::camera2::impl::CaptureResultExtras;
+using UPhysicalCaptureResultInfo = ::android::hardware::camera2::impl::PhysicalCaptureResultInfo;
+
+using ::android::hardware::cameraservice::utils::conversion::aidl::convertToAidl;
+using ::android::hardware::cameraservice::utils::conversion::aidl::filterVndkKeys;
+
+const char *AidlCameraDeviceCallbacks::kResultKey = "CaptureResult";
+
+
+bool AidlCameraDeviceCallbacks::initializeLooper(int vndkVersion) {
+ mCbLooper = new ALooper;
+ mCbLooper->setName("cs-looper");
+ status_t err = mCbLooper->start(/*runOnCallingThread*/ false, /*canCallJava*/ false,
+ PRIORITY_DEFAULT);
+ if (err !=OK) {
+ ALOGE("Unable to start camera device callback looper");
+ return false;
+ }
+ mHandler = new CallbackHandler(this, vndkVersion);
+ mCbLooper->registerHandler(mHandler);
+ return true;
+}
+
+AidlCameraDeviceCallbacks::AidlCameraDeviceCallbacks(
+ const std::shared_ptr<SICameraDeviceCallback>& base):
+ mBase(base), mDeathPipe(this, base->asBinder()) {}
+
+AidlCameraDeviceCallbacks::~AidlCameraDeviceCallbacks() {
+ if (mCbLooper != nullptr) {
+ if (mHandler != nullptr) {
+ mCbLooper->unregisterHandler(mHandler->id());
+ }
+ mCbLooper->stop();
+ }
+ mCbLooper.clear();
+ mHandler.clear();
+}
+
+binder::Status AidlCameraDeviceCallbacks::onDeviceError(
+ int32_t errorCode, const CaptureResultExtras& resultExtras) {
+ using hardware::cameraservice::utils::conversion::aidl::convertToAidl;
+ SCaptureResultExtras cre = convertToAidl(resultExtras);
+ auto ret = mBase->onDeviceError(convertToAidl(errorCode), cre);
+ LOG_STATUS_ERROR_IF_NOT_OK(ret, "onDeviceError")
+ return binder::Status::ok();
+}
+
+binder::Status AidlCameraDeviceCallbacks::onDeviceIdle() {
+ auto ret = mBase->onDeviceIdle();
+ LOG_STATUS_ERROR_IF_NOT_OK(ret, "onDeviceIdle")
+ return binder::Status::ok();
+}
+
+binder::Status AidlCameraDeviceCallbacks::onCaptureStarted(
+ const CaptureResultExtras& resultExtras, int64_t timestamp) {
+ using hardware::cameraservice::utils::conversion::aidl::convertToAidl;
+ SCaptureResultExtras hCaptureResultExtras = convertToAidl(resultExtras);
+ auto ret = mBase->onCaptureStarted(hCaptureResultExtras, timestamp);
+ LOG_STATUS_ERROR_IF_NOT_OK(ret, "onCaptureStarted")
+ return binder::Status::ok();
+}
+
+void AidlCameraDeviceCallbacks::convertResultMetadataToAidl(const camera_metadata_t* src,
+ SCaptureMetadataInfo* dst) {
+ // First try writing to fmq.
+ size_t metadata_size = get_camera_metadata_size(src);
+ if ((metadata_size > 0) &&
+ (mCaptureResultMetadataQueue->availableToWrite() > 0)) {
+ if (mCaptureResultMetadataQueue->write((int8_t *)src, metadata_size)) {
+ dst->set<SCaptureMetadataInfo::fmqMetadataSize>(metadata_size);
+ } else {
+ ALOGW("%s Couldn't use fmq, falling back to hwbinder", __FUNCTION__);
+ SCameraMetadata metadata;
+ hardware::cameraservice::utils::conversion::aidl::cloneToAidl(src, &metadata);
+ dst->set<SCaptureMetadataInfo::metadata>(std::move(metadata));
+ }
+ }
+}
+
+void AidlCameraDeviceCallbacks::CallbackHandler::onMessageReceived(const sp<AMessage> &msg) {
+ sp<RefBase> obj = nullptr;
+ sp<ResultWrapper> resultWrapper = nullptr;
+ bool found = false;
+ switch (msg->what()) {
+ case kWhatResultReceived:
+ found = msg->findObject(kResultKey, &obj);
+ if (!found || obj == nullptr) {
+ ALOGE("Cannot find result object in callback message");
+ return;
+ }
+ resultWrapper = static_cast<ResultWrapper*>(obj.get());
+ processResultMessage(resultWrapper);
+ break;
+ default:
+ ALOGE("Unknown callback sent");
+ break;
+ }
+ }
+
+void AidlCameraDeviceCallbacks::CallbackHandler::processResultMessage(
+ sp<ResultWrapper> &resultWrapper) {
+ sp<AidlCameraDeviceCallbacks> converter = mConverter.promote();
+ if (converter == nullptr) {
+ ALOGE("Callback wrapper has died, result callback cannot be made");
+ return;
+ }
+ CameraMetadataNative &result = resultWrapper->mResult;
+ auto resultExtras = resultWrapper->mResultExtras;
+ SCaptureResultExtras convResultExtras =
+ hardware::cameraservice::utils::conversion::aidl::convertToAidl(resultExtras);
+
+ // Convert Metadata into HCameraMetadata;
+ SCaptureMetadataInfo captureMetadataInfo;
+ if (filterVndkKeys(mVndkVersion, result, /*isStatic*/false) != OK) {
+ ALOGE("%s: filtering vndk keys from result failed, not sending onResultReceived callback",
+ __FUNCTION__);
+ return;
+ }
+ const camera_metadata_t *rawMetadata = result.getAndLock();
+ converter->convertResultMetadataToAidl(rawMetadata, &captureMetadataInfo);
+ result.unlock(rawMetadata);
+
+ auto &physicalCaptureResultInfos = resultWrapper->mPhysicalCaptureResultInfos;
+ std::vector<SPhysicalCaptureResultInfo> stableCaptureResInfo =
+ convertToAidl(physicalCaptureResultInfos, converter->mCaptureResultMetadataQueue);
+ auto ret = converter->mBase->onResultReceived(captureMetadataInfo,
+ convResultExtras,
+ stableCaptureResInfo);
+
+ LOG_STATUS_ERROR_IF_NOT_OK(ret, "OnResultReceived")
+}
+
+binder::Status AidlCameraDeviceCallbacks::onResultReceived(
+ const CameraMetadataNative& result,
+ const UCaptureResultExtras& resultExtras,
+ const ::std::vector<UPhysicalCaptureResultInfo>& physicalCaptureResultInfos) {
+ // Wrap CameraMetadata, resultExtras and physicalCaptureResultInfos in on
+ // sp<RefBase>-able structure and post it.
+ sp<ResultWrapper> resultWrapper = new ResultWrapper(const_cast<CameraMetadataNative &>(result),
+ resultExtras, physicalCaptureResultInfos);
+ sp<AMessage> msg = new AMessage(kWhatResultReceived, mHandler);
+ msg->setObject(kResultKey, resultWrapper);
+ msg->post();
+ return binder::Status::ok();
+}
+
+binder::Status AidlCameraDeviceCallbacks::onPrepared(int32_t streamId) {
+ auto ret = mBase->onPrepared(streamId);
+ LOG_STATUS_ERROR_IF_NOT_OK(ret, "onPrepared")
+ return binder::Status::ok();
+}
+
+binder::Status AidlCameraDeviceCallbacks::onRepeatingRequestError(
+ int64_t lastFrameNumber,
+ int32_t repeatingRequestId) {
+ auto ret =
+ mBase->onRepeatingRequestError(lastFrameNumber, repeatingRequestId);
+ LOG_STATUS_ERROR_IF_NOT_OK(ret, "onRepeatingRequestError")
+ return binder::Status::ok();
+}
+
+binder::Status AidlCameraDeviceCallbacks::onRequestQueueEmpty() {
+ // not implemented
+ return binder::Status::ok();
+}
+
+status_t AidlCameraDeviceCallbacks::linkToDeath(const sp<DeathRecipient>& recipient,
+ void* cookie, uint32_t flags) {
+ return mDeathPipe.linkToDeath(recipient, cookie, flags);
+}
+status_t AidlCameraDeviceCallbacks::unlinkToDeath(const wp<DeathRecipient>& recipient,
+ void* cookie,
+ uint32_t flags,
+ wp<DeathRecipient>* outRecipient) {
+ return mDeathPipe.unlinkToDeath(recipient, cookie, flags, outRecipient);
+}
+
+} // namespace android::frameworks::cameraservice::device::implementation
diff --git a/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.h b/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.h
new file mode 100644
index 0000000..5cff5b3
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/AidlCameraDeviceCallbacks.h
@@ -0,0 +1,134 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERADEVICECALLBACKS_H_
+#define FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERADEVICECALLBACKS_H_
+
+#include <CameraService.h>
+#include <aidl/DeathPipe.h>
+#include <aidl/android/frameworks/cameraservice/device/BnCameraDeviceCallback.h>
+#include <aidl/android/frameworks/cameraservice/device/CaptureMetadataInfo.h>
+#include <aidl/android/frameworks/cameraservice/device/PhysicalCaptureResultInfo.h>
+#include <android/hardware/camera2/BnCameraDeviceCallbacks.h>
+#include <fmq/AidlMessageQueue.h>
+#include <media/stagefright/foundation/AHandler.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <mutex>
+#include <thread>
+#include <utility>
+
+namespace android::frameworks::cameraservice::device::implementation {
+
+// VNDK classes
+using SCaptureMetadataInfo = ::aidl::android::frameworks::cameraservice::device::CaptureMetadataInfo;
+using SICameraDeviceCallback =
+ ::aidl::android::frameworks::cameraservice::device::ICameraDeviceCallback;
+// NDK classes
+using UBnCameraDeviceCallbacks = ::android::hardware::camera2::BnCameraDeviceCallbacks;
+
+using ::aidl::android::hardware::common::fmq::MQDescriptor;
+using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
+using ::android::AidlMessageQueue;
+using ::android::frameworks::cameraservice::utils::DeathPipe;
+using ::android::hardware::camera2::impl::CameraMetadataNative;
+
+using CaptureResultMetadataQueue = AidlMessageQueue<int8_t, SynchronizedReadWrite>;
+
+class AidlCameraDeviceCallbacks : public UBnCameraDeviceCallbacks {
+ public:
+ explicit AidlCameraDeviceCallbacks(const std::shared_ptr<SICameraDeviceCallback>& base);
+
+ ~AidlCameraDeviceCallbacks() override;
+
+ bool initializeLooper(int vndkVersion);
+
+ binder::Status onDeviceError(int32_t errorCode,
+ const CaptureResultExtras& resultExtras) override;
+
+ binder::Status onDeviceIdle() override;
+
+ binder::Status onCaptureStarted(const CaptureResultExtras& resultExtras,
+ int64_t timestamp) override;
+
+ binder::Status onResultReceived(
+ const CameraMetadataNative& result, const CaptureResultExtras& resultExtras,
+ const std::vector<PhysicalCaptureResultInfo>& physicalCaptureResultInfos) override;
+
+ binder::Status onPrepared(int32_t streamId) override;
+
+ binder::Status onRepeatingRequestError(int64_t lastFrameNumber,
+ int32_t repeatingRequestId) override;
+
+ binder::Status onRequestQueueEmpty() override;
+
+ status_t linkToDeath(const sp<DeathRecipient>& recipient, void* cookie,
+ uint32_t flags) override;
+ status_t unlinkToDeath(const wp<DeathRecipient>& recipient, void* cookie, uint32_t flags,
+ wp<DeathRecipient>* outRecipient) override;
+
+ void setCaptureResultMetadataQueue(std::shared_ptr<CaptureResultMetadataQueue> metadataQueue) {
+ mCaptureResultMetadataQueue = std::move(metadataQueue);
+ }
+
+ private:
+ // Wrapper struct so that parameters to onResultReceived callback may be
+ // sent through an AMessage.
+ struct ResultWrapper : public RefBase {
+ CameraMetadataNative mResult;
+ CaptureResultExtras mResultExtras;
+ std::vector<PhysicalCaptureResultInfo> mPhysicalCaptureResultInfos;
+
+ ResultWrapper(CameraMetadataNative &result,
+ CaptureResultExtras resultExtras,
+ std::vector<PhysicalCaptureResultInfo> physicalCaptureResultInfos) :
+ // TODO: make this std::movable
+ mResult(result),
+ mResultExtras(std::move(resultExtras)),
+ mPhysicalCaptureResultInfos(std::move(physicalCaptureResultInfos)) { }
+ };
+
+ struct CallbackHandler : public AHandler {
+ public:
+ void onMessageReceived(const sp<AMessage> &msg) override;
+ CallbackHandler(AidlCameraDeviceCallbacks *converter, int vndkVersion) :
+ mConverter(converter), mVndkVersion(vndkVersion) { }
+ private:
+ void processResultMessage(sp<ResultWrapper> &resultWrapper);
+ wp<AidlCameraDeviceCallbacks> mConverter = nullptr;
+ int mVndkVersion = -1;
+ };
+
+ void convertResultMetadataToAidl(const camera_metadata * src, SCaptureMetadataInfo * dst);
+ enum {
+ kWhatResultReceived,
+ };
+
+ static const char *kResultKey;
+
+ private:
+ std::shared_ptr<SICameraDeviceCallback> mBase;
+ std::shared_ptr<CaptureResultMetadataQueue> mCaptureResultMetadataQueue = nullptr;
+ sp<CallbackHandler> mHandler = nullptr;
+ sp<ALooper> mCbLooper = nullptr;
+
+ // Pipes death subscription from current NDK interface to VNDK mBase.
+ // Should consume calls to linkToDeath and unlinkToDeath.
+ DeathPipe mDeathPipe;
+};
+
+} // namespace android::frameworks::cameraservice::device::implementation
+#endif // FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERADEVICECALLBACKS_H_
diff --git a/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp
new file mode 100644
index 0000000..402f8a2
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp
@@ -0,0 +1,280 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AidlCameraDeviceUser"
+
+#include "AidlCameraDeviceUser.h"
+#include <aidl/AidlUtils.h>
+#include <aidl/android/frameworks/cameraservice/device/CaptureMetadataInfo.h>
+#include <android-base/properties.h>
+
+namespace android::frameworks::cameraservice::device::implementation {
+
+// VNDK classes
+using SCaptureMetadataInfo = ::aidl::android::frameworks::cameraservice::device::CaptureMetadataInfo;
+// NDK classes
+using UOutputConfiguration = ::android::hardware::camera2::params::OutputConfiguration;
+using USessionConfiguration = ::android::hardware::camera2::params::SessionConfiguration;
+using UStatus = ::android::binder::Status;
+using USubmitInfo = ::android::hardware::camera2::utils::SubmitInfo;
+
+using ::android::CameraMetadata;
+using ::android::hardware::cameraservice::utils::conversion::aidl::cloneFromAidl;
+using ::android::hardware::cameraservice::utils::conversion::aidl::cloneToAidl;
+using ::android::hardware::cameraservice::utils::conversion::aidl::convertFromAidl;
+using ::android::hardware::cameraservice::utils::conversion::aidl::convertToAidl;
+using ::android::hardware::cameraservice::utils::conversion::aidl::filterVndkKeys;
+using ::ndk::ScopedAStatus;
+
+namespace {
+constexpr int32_t CAMERA_REQUEST_METADATA_QUEUE_SIZE = 1 << 20 /* 1 MB */;
+constexpr int32_t CAMERA_RESULT_METADATA_QUEUE_SIZE = 1 << 20 /* 1 MB */;
+
+inline ScopedAStatus fromSStatus(const SStatus& s) {
+ return s == SStatus::NO_ERROR ? ScopedAStatus::ok()
+ : ScopedAStatus::fromServiceSpecificError(
+ static_cast<int32_t>(s));
+}
+inline ScopedAStatus fromUStatus(const UStatus& status) {
+ return status.isOk() ? ScopedAStatus::ok() : fromSStatus(convertToAidl(status));
+}
+} // anonymous namespace
+
+AidlCameraDeviceUser::AidlCameraDeviceUser(const sp<UICameraDeviceUser>& deviceRemote):
+ mDeviceRemote(deviceRemote) {
+ mInitSuccess = initDevice();
+ mVndkVersion = base::GetIntProperty("ro.vndk.version", __ANDROID_API_FUTURE__);
+}
+
+bool AidlCameraDeviceUser::initDevice() {
+ // TODO: Get request and result metadata queue size from a system property.
+ int32_t reqFMQSize = CAMERA_REQUEST_METADATA_QUEUE_SIZE;
+
+ mCaptureRequestMetadataQueue =
+ std::make_unique<CaptureRequestMetadataQueue>(static_cast<size_t>(reqFMQSize),
+ false /* non blocking */);
+ if (!mCaptureRequestMetadataQueue->isValid()) {
+ ALOGE("%s: invalid request fmq", __FUNCTION__);
+ return false;
+ }
+
+ int32_t resFMQSize = CAMERA_RESULT_METADATA_QUEUE_SIZE;
+ mCaptureResultMetadataQueue =
+ std::make_shared<CaptureResultMetadataQueue>(static_cast<size_t>(resFMQSize),
+ false /* non blocking */);
+ if (!mCaptureResultMetadataQueue->isValid()) {
+ ALOGE("%s: invalid result fmq", __FUNCTION__);
+ return false;
+ }
+ return true;
+}
+
+ndk::ScopedAStatus AidlCameraDeviceUser::getCaptureRequestMetadataQueue(
+ MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) {
+ if (mInitSuccess) {
+ *_aidl_return = mCaptureRequestMetadataQueue->dupeDesc();
+ }
+ return ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus AidlCameraDeviceUser::getCaptureResultMetadataQueue(
+ MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) {
+ if (mInitSuccess) {
+ *_aidl_return = mCaptureResultMetadataQueue->dupeDesc();
+ }
+ return ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus AidlCameraDeviceUser::prepare(int32_t in_streamId) {
+ UStatus ret = mDeviceRemote->prepare(in_streamId);
+ return fromUStatus(ret);
+}
+
+ndk::ScopedAStatus AidlCameraDeviceUser::submitRequestList(
+ const std::vector<SCaptureRequest>& in_requestList, bool in_isRepeating,
+ SSubmitInfo* _aidl_return) {
+ USubmitInfo submitInfo;
+ std::vector<UCaptureRequest> requests;
+ for (const auto& req: in_requestList) {
+ requests.emplace_back();
+ if (!convertRequestFromAidl(req, &requests.back())) {
+ ALOGE("%s: Failed to convert AIDL CaptureRequest.", __FUNCTION__);
+ return fromSStatus(SStatus::ILLEGAL_ARGUMENT);
+ }
+ }
+ UStatus ret = mDeviceRemote->submitRequestList(requests,
+ in_isRepeating, &submitInfo);
+ if (!ret.isOk()) {
+ ALOGE("%s: Failed submitRequestList to cameraservice: %s",
+ __FUNCTION__, ret.toString8().string());
+ return fromUStatus(ret);
+ }
+ mRequestId = submitInfo.mRequestId;
+ convertToAidl(submitInfo, _aidl_return);
+ return ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus AidlCameraDeviceUser::cancelRepeatingRequest(int64_t* _aidl_return) {
+ UStatus ret = mDeviceRemote->cancelRequest(mRequestId, _aidl_return);
+ return fromUStatus(ret);
+}
+
+ScopedAStatus AidlCameraDeviceUser::beginConfigure() {
+ UStatus ret = mDeviceRemote->beginConfigure();
+ return fromUStatus(ret);
+}
+
+ndk::ScopedAStatus AidlCameraDeviceUser::endConfigure(SStreamConfigurationMode in_operatingMode,
+ const SCameraMetadata& in_sessionParams,
+ int64_t in_startTimeNs) {
+ CameraMetadata metadata;
+ if (!cloneFromAidl(in_sessionParams, &metadata)) {
+ return fromSStatus(SStatus::ILLEGAL_ARGUMENT);
+ }
+
+ std::vector<int32_t> offlineStreamIds;
+ UStatus ret = mDeviceRemote->endConfigure(convertFromAidl(in_operatingMode),
+ metadata, in_startTimeNs,
+ &offlineStreamIds);
+ return fromUStatus(ret);
+}
+
+ndk::ScopedAStatus AidlCameraDeviceUser::createStream(
+ const SOutputConfiguration& in_outputConfiguration, int32_t* _aidl_return) {
+ UOutputConfiguration outputConfig = convertFromAidl(in_outputConfiguration);
+ int32_t newStreamId;
+ UStatus ret = mDeviceRemote->createStream(outputConfig, &newStreamId);
+ if (!ret.isOk()) {
+ ALOGE("%s: Failed to create stream: %s", __FUNCTION__, ret.toString8().string());
+ }
+ *_aidl_return = newStreamId;
+ return fromUStatus(ret);
+}
+
+ndk::ScopedAStatus AidlCameraDeviceUser::createDefaultRequest(STemplateId in_templateId,
+ SCameraMetadata* _aidl_return) {
+ CameraMetadata metadata;
+ UStatus ret = mDeviceRemote->createDefaultRequest(convertFromAidl(in_templateId),
+ &metadata);
+ if (!ret.isOk()) {
+ ALOGE("%s: Failed to create default request: %s", __FUNCTION__, ret.toString8().string());
+ return fromUStatus(ret);
+ }
+
+ if (filterVndkKeys(mVndkVersion, metadata, /*isStatic*/false) != OK) {
+ ALOGE("%s: Unable to filter vndk metadata keys for version %d",
+ __FUNCTION__, mVndkVersion);
+ return fromSStatus(SStatus::UNKNOWN_ERROR);
+ }
+
+ const camera_metadata_t* rawMetadata = metadata.getAndLock();
+ cloneToAidl(rawMetadata, _aidl_return);
+ metadata.unlock(rawMetadata);
+ return ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus AidlCameraDeviceUser::waitUntilIdle() {
+ UStatus ret = mDeviceRemote->waitUntilIdle();
+ return fromUStatus(ret);
+}
+
+ndk::ScopedAStatus AidlCameraDeviceUser::flush(int64_t* _aidl_return) {
+ UStatus ret = mDeviceRemote->flush(_aidl_return);
+ return fromUStatus(ret);
+}
+
+ndk::ScopedAStatus AidlCameraDeviceUser::updateOutputConfiguration(
+ int32_t in_streamId, const SOutputConfiguration& in_outputConfiguration) {
+ UOutputConfiguration outputConfig = convertFromAidl(in_outputConfiguration);
+ UStatus ret = mDeviceRemote->updateOutputConfiguration(in_streamId, outputConfig);
+ if (!ret.isOk()) {
+ ALOGE("%s: Failed to update output config for stream id: %d: %s",
+ __FUNCTION__, in_streamId, ret.toString8().string());
+ }
+ return fromUStatus(ret);
+}
+ndk::ScopedAStatus AidlCameraDeviceUser::isSessionConfigurationSupported(
+ const SSessionConfiguration& in_sessionConfiguration, bool* _aidl_return) {
+ USessionConfiguration sessionConfig = convertFromAidl(in_sessionConfiguration);
+ UStatus ret = mDeviceRemote->isSessionConfigurationSupported(sessionConfig,
+ _aidl_return);
+ return fromUStatus(ret);
+}
+ndk::ScopedAStatus AidlCameraDeviceUser::deleteStream(int32_t in_streamId) {
+ UStatus ret = mDeviceRemote->deleteStream(in_streamId);
+ return fromUStatus(ret);
+}
+ndk::ScopedAStatus AidlCameraDeviceUser::disconnect() {
+ UStatus ret = mDeviceRemote->disconnect();
+ return fromUStatus(ret);
+}
+bool AidlCameraDeviceUser::convertRequestFromAidl(
+ const SCaptureRequest& src, UCaptureRequest* dst) {
+ dst->mIsReprocess = false;
+ for (const auto& streamAndWindowId : src.streamAndWindowIds) {
+ dst->mStreamIdxList.push_back(streamAndWindowId.streamId);
+ dst->mSurfaceIdxList.push_back(streamAndWindowId.windowId);
+ }
+
+ return copyPhysicalCameraSettings(src.physicalCameraSettings,
+ &(dst->mPhysicalCameraSettings));
+}
+bool AidlCameraDeviceUser::copyPhysicalCameraSettings(
+ const std::vector<SPhysicalCameraSettings>& src,
+ std::vector<UCaptureRequest::PhysicalCameraSettings>* dst) {
+ bool converted = false;
+ for (auto &e : src) {
+ dst->emplace_back();
+ CaptureRequest::PhysicalCameraSettings &physicalCameraSetting =
+ dst->back();
+ physicalCameraSetting.id = e.id;
+
+ // Read the settings either from the fmq or straightaway from the
+ // request. We don't need any synchronization, since submitRequestList
+ // is guaranteed to be called serially by the client if it decides to
+ // use fmq.
+ if (e.settings.getTag() == SCaptureMetadataInfo::fmqMetadataSize) {
+ /**
+ * Get settings from the fmq.
+ */
+ SCameraMetadata settingsFmq;
+ int64_t metadataSize = e.settings.get<SCaptureMetadataInfo::fmqMetadataSize>();
+ settingsFmq.metadata.resize(metadataSize);
+ int8_t* metadataPtr = (int8_t*) settingsFmq.metadata.data();
+ bool read = mCaptureRequestMetadataQueue->read(metadataPtr,
+ metadataSize);
+ if (!read) {
+ ALOGE("%s capture request settings could't be read from fmq size", __FUNCTION__);
+ converted = false;
+ } else {
+ converted = cloneFromAidl(settingsFmq, &physicalCameraSetting.settings);
+ }
+ } else {
+ /**
+ * The settings metadata is contained in request settings field.
+ */
+ converted = cloneFromAidl(e.settings.get<SCaptureMetadataInfo::metadata>(),
+ &physicalCameraSetting.settings);
+ }
+ if (!converted) {
+ ALOGE("%s: Unable to convert physicalCameraSettings from HIDL to AIDL.", __FUNCTION__);
+ return false;
+ }
+ }
+ return true;
+}
+
+} // namespace android::frameworks::cameraservice::device::implementation
\ No newline at end of file
diff --git a/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.h b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.h
new file mode 100644
index 0000000..8014951
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.h
@@ -0,0 +1,117 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERADEVICEUSER_H_
+#define FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERADEVICEUSER_H_
+
+#include <CameraService.h>
+#include <aidl/android/frameworks/cameraservice/common/Status.h>
+#include <aidl/android/frameworks/cameraservice/device/BnCameraDeviceUser.h>
+#include <aidl/android/frameworks/cameraservice/device/CameraMetadata.h>
+#include <aidl/android/frameworks/cameraservice/device/OutputConfiguration.h>
+#include <aidl/android/frameworks/cameraservice/device/PhysicalCameraSettings.h>
+#include <aidl/android/frameworks/cameraservice/device/SessionConfiguration.h>
+#include <aidl/android/frameworks/cameraservice/device/StreamConfigurationMode.h>
+#include <aidl/android/frameworks/cameraservice/device/SubmitInfo.h>
+#include <aidl/android/frameworks/cameraservice/device/TemplateId.h>
+#include <aidl/android/hardware/common/fmq/MQDescriptor.h>
+#include <android/hardware/camera2/ICameraDeviceCallbacks.h>
+#include <fmq/AidlMessageQueue.h>
+#include <memory>
+
+namespace android::frameworks::cameraservice::device::implementation {
+
+using ::aidl::android::hardware::common::fmq::MQDescriptor;
+using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
+using ::android::AidlMessageQueue;
+using CaptureRequestMetadataQueue = AidlMessageQueue<int8_t, SynchronizedReadWrite>;
+using CaptureResultMetadataQueue = AidlMessageQueue<int8_t, SynchronizedReadWrite>;
+
+// Stable NDK classes
+using SBnCameraDeviceUser = ::aidl::android::frameworks::cameraservice::device::BnCameraDeviceUser;
+using SCameraMetadata = ::aidl::android::frameworks::cameraservice::device::CameraMetadata;
+using SCaptureRequest = ::aidl::android::frameworks::cameraservice::device::CaptureRequest;
+using SOutputConfiguration =
+ ::aidl::android::frameworks::cameraservice::device::OutputConfiguration;
+using SPhysicalCameraSettings =
+ ::aidl::android::frameworks::cameraservice::device::PhysicalCameraSettings;
+using SSessionConfiguration =
+ ::aidl::android::frameworks::cameraservice::device::SessionConfiguration;
+using SStatus = ::aidl::android::frameworks::cameraservice::common::Status;
+using SStreamConfigurationMode =
+ ::aidl::android::frameworks::cameraservice::device::StreamConfigurationMode;
+using SSubmitInfo = ::aidl::android::frameworks::cameraservice::device::SubmitInfo;
+using STemplateId = ::aidl::android::frameworks::cameraservice::device::TemplateId;
+// Unstable NDK classes
+using UCaptureRequest= ::android::hardware::camera2::CaptureRequest;
+using UICameraDeviceUser = ::android::hardware::camera2::ICameraDeviceUser;
+
+static constexpr int32_t REQUEST_ID_NONE = -1;
+
+class AidlCameraDeviceUser final : public SBnCameraDeviceUser {
+ public:
+ explicit AidlCameraDeviceUser(const sp<UICameraDeviceUser> &deviceRemote);
+ ~AidlCameraDeviceUser() override = default;
+
+ ndk::ScopedAStatus beginConfigure() override;
+ ndk::ScopedAStatus cancelRepeatingRequest(int64_t* _aidl_return) override;
+ ndk::ScopedAStatus createDefaultRequest(STemplateId in_templateId,
+ SCameraMetadata* _aidl_return) override;
+ ndk::ScopedAStatus createStream(const SOutputConfiguration& in_outputConfiguration,
+ int32_t* _aidl_return) override;
+ ndk::ScopedAStatus deleteStream(int32_t in_streamId) override;
+ ndk::ScopedAStatus disconnect() override;
+ ndk::ScopedAStatus endConfigure(SStreamConfigurationMode in_operatingMode,
+ const SCameraMetadata& in_sessionParams,
+ int64_t in_startTimeNs) override;
+ ndk::ScopedAStatus flush(int64_t* _aidl_return) override;
+ ndk::ScopedAStatus getCaptureRequestMetadataQueue(
+ MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) override;
+ ndk::ScopedAStatus getCaptureResultMetadataQueue(
+ MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) override;
+ ndk::ScopedAStatus isSessionConfigurationSupported(
+ const SSessionConfiguration& in_sessionConfiguration, bool* _aidl_return) override;
+ ndk::ScopedAStatus prepare(int32_t in_streamId) override;
+ ndk::ScopedAStatus submitRequestList(const std::vector<SCaptureRequest>& in_requestList,
+ bool in_isRepeating, SSubmitInfo* _aidl_return) override;
+ ndk::ScopedAStatus updateOutputConfiguration(
+ int32_t in_streamId, const SOutputConfiguration& in_outputConfiguration) override;
+ ndk::ScopedAStatus waitUntilIdle() override;
+
+ [[nodiscard]] bool initStatus() const { return mInitSuccess; }
+
+ std::shared_ptr<CaptureResultMetadataQueue> getCaptureResultMetadataQueue() {
+ return mCaptureResultMetadataQueue;
+ }
+
+ private:
+ bool initDevice();
+
+ bool convertRequestFromAidl(const SCaptureRequest &src, UCaptureRequest *dst);
+ bool copyPhysicalCameraSettings(const std::vector<SPhysicalCameraSettings> &src,
+ std::vector<CaptureRequest::PhysicalCameraSettings> *dst);
+
+ const sp<UICameraDeviceUser> mDeviceRemote;
+ std::unique_ptr<CaptureRequestMetadataQueue> mCaptureRequestMetadataQueue = nullptr;
+ std::shared_ptr<CaptureResultMetadataQueue> mCaptureResultMetadataQueue = nullptr;
+ bool mInitSuccess = false;
+ int32_t mRequestId = REQUEST_ID_NONE;
+ int mVndkVersion = -1;
+};
+
+} // namespace android::frameworks::cameraservice::device::implementation
+
+#endif // FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERADEVICEUSER_H_
diff --git a/services/camera/libcameraservice/aidl/AidlCameraService.cpp b/services/camera/libcameraservice/aidl/AidlCameraService.cpp
new file mode 100644
index 0000000..a62f6de
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/AidlCameraService.cpp
@@ -0,0 +1,328 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AidlCameraService"
+
+#include "AidlCameraService.h"
+#include <aidl/AidlCameraDeviceCallbacks.h>
+#include <aidl/AidlCameraDeviceUser.h>
+#include <aidl/AidlCameraServiceListener.h>
+#include <aidl/AidlUtils.h>
+#include <aidl/android/frameworks/cameraservice/common/CameraMetadataType.h>
+#include <android-base/properties.h>
+#include <android/binder_ibinder.h>
+#include <android/binder_manager.h>
+#include <binder/Status.h>
+#include <hidl/HidlTransportSupport.h>
+
+namespace android::frameworks::cameraservice::service::implementation {
+
+using ::android::frameworks::cameraservice::device::implementation::AidlCameraDeviceCallbacks;
+using ::android::frameworks::cameraservice::device::implementation::AidlCameraDeviceUser;
+using ::android::hardware::cameraservice::utils::conversion::aidl::areBindersEqual;
+using ::android::hardware::cameraservice::utils::conversion::aidl::cloneToAidl;
+using ::android::hardware::cameraservice::utils::conversion::aidl::convertToAidl;
+using ::android::hardware::cameraservice::utils::conversion::aidl::filterVndkKeys;
+using ::ndk::ScopedAStatus;
+
+// VNDK classes
+using SCameraMetadataType = ::aidl::android::frameworks::cameraservice::common::CameraMetadataType;
+using SVendorTag = ::aidl::android::frameworks::cameraservice::common::VendorTag;
+using SVendorTagSection = ::aidl::android::frameworks::cameraservice::common::VendorTagSection;
+// NDK classes
+using UICameraService = ::android::hardware::ICameraService;
+using UStatus = ::android::binder::Status;
+
+namespace {
+inline ScopedAStatus fromSStatus(const SStatus& s) {
+ return s == SStatus::NO_ERROR ? ScopedAStatus::ok()
+ : ScopedAStatus::fromServiceSpecificError(
+ static_cast<int32_t>(s));
+}
+inline ScopedAStatus fromUStatus(const UStatus& s) {
+ return s.isOk() ? ScopedAStatus::ok() : fromSStatus(convertToAidl(s));
+}
+} // anonymous namespace
+
+std::shared_ptr<AidlCameraService> kCameraService;
+
+bool AidlCameraService::registerService(::android::CameraService* cameraService) {
+ kCameraService = SharedRefBase::make<AidlCameraService>(cameraService);
+ std::string serviceName = SBnCameraService::descriptor;
+ serviceName += "/default";
+ bool isDeclared = AServiceManager_isDeclared(serviceName.c_str());
+ if (!isDeclared) {
+ ALOGI("%s: AIDL vndk not declared.", __FUNCTION__);
+ return false;
+ }
+
+ binder_exception_t registered = AServiceManager_addService(
+ kCameraService->asBinder().get(), serviceName.c_str());
+ ALOGE_IF(registered != EX_NONE,
+ "%s: AIDL VNDK declared, but failed to register service: %d",
+ __FUNCTION__, registered);
+ return registered == EX_NONE;
+}
+
+AidlCameraService::AidlCameraService(::android::CameraService* cameraService):
+ mCameraService(cameraService) {
+ mVndkVersion = base::GetIntProperty("ro.vndk.version", __ANDROID_API_FUTURE__);
+}
+ScopedAStatus AidlCameraService::getCameraCharacteristics(const std::string& in_cameraId,
+ SCameraMetadata* _aidl_return) {
+ if (_aidl_return == nullptr) { return fromSStatus(SStatus::ILLEGAL_ARGUMENT); }
+
+ ::android::CameraMetadata cameraMetadata;
+ UStatus ret = mCameraService->getCameraCharacteristics(in_cameraId,
+ mVndkVersion,
+ /* overrideToPortrait= */ false,
+ &cameraMetadata);
+ if (!ret.isOk()) {
+ if (ret.exceptionCode() != EX_SERVICE_SPECIFIC) {
+ ALOGE("%s: Transaction error when getting camera characteristics"
+ " from camera service: %d.",
+ __FUNCTION__ , ret.exceptionCode());
+ return fromUStatus(ret);
+ }
+ switch (ret.serviceSpecificErrorCode()) {
+ case UICameraService::ERROR_ILLEGAL_ARGUMENT:
+ ALOGE("%s: Camera ID %s does not exist!", __FUNCTION__, in_cameraId.c_str());
+ return fromSStatus(SStatus::ILLEGAL_ARGUMENT);
+ default:
+ ALOGE("Get camera characteristics from camera service failed: %s",
+ ret.toString8().string());
+ return fromUStatus(ret);
+ }
+ }
+
+ if (filterVndkKeys(mVndkVersion, cameraMetadata) != OK) {
+ ALOGE("%s: Unable to filter vndk metadata keys for version %d",
+ __FUNCTION__, mVndkVersion);
+ return fromSStatus(SStatus::UNKNOWN_ERROR);
+ }
+
+ const camera_metadata_t* rawMetadata = cameraMetadata.getAndLock();
+ cloneToAidl(rawMetadata, _aidl_return);
+ cameraMetadata.unlock(rawMetadata);
+
+ return ScopedAStatus::ok();
+}
+ndk::ScopedAStatus AidlCameraService::connectDevice(
+ const std::shared_ptr<SICameraDeviceCallback>& in_callback,
+ const std::string& in_cameraId,
+ std::shared_ptr<SICameraDeviceUser>* _aidl_return) {
+ // Here, we first get NDK ICameraDeviceUser from mCameraService, then save
+ // that interface in the newly created AidlCameraDeviceUser impl class.
+ if (mCameraService == nullptr) {
+ return fromSStatus(SStatus::UNKNOWN_ERROR);
+ }
+ sp<hardware::camera2::ICameraDeviceUser> unstableDevice = nullptr;
+ // Create a hardware::camera2::ICameraDeviceCallback object which internally
+ // calls callback functions passed through hCallback.
+ sp<AidlCameraDeviceCallbacks> hybridCallbacks = new AidlCameraDeviceCallbacks(in_callback);
+ if (!hybridCallbacks->initializeLooper(mVndkVersion)) {
+ ALOGE("Unable to handle callbacks on device, cannot connect");
+ return fromSStatus(SStatus::UNKNOWN_ERROR);
+ }
+ sp<hardware::camera2::ICameraDeviceCallbacks> callbacks = hybridCallbacks;
+ binder::Status serviceRet = mCameraService->connectDevice(
+ callbacks,
+ in_cameraId,
+ std::string(),
+ /* clientFeatureId= */{},
+ hardware::ICameraService::USE_CALLING_UID,
+ /* scoreOffset= */ 0,
+ /* targetSdkVersion= */ __ANDROID_API_FUTURE__,
+ /* overrideToPortrait= */ false,
+ &unstableDevice);
+ if (!serviceRet.isOk()) {
+ ALOGE("%s: Unable to connect to camera device: %s", __FUNCTION__,
+ serviceRet.toString8().c_str());
+ return fromUStatus(serviceRet);
+ }
+
+ // Now we create a AidlCameraDeviceUser class, store the unstableDevice in it,
+ // and return that back. All calls on that interface will be forwarded to
+ // the NDK AIDL interface.
+ std::shared_ptr<AidlCameraDeviceUser> stableDevice =
+ ndk::SharedRefBase::make<AidlCameraDeviceUser>(unstableDevice);
+ if (!stableDevice->initStatus()) {
+ ALOGE("%s: Unable to initialize camera device AIDL wrapper", __FUNCTION__);
+ return fromSStatus(SStatus::UNKNOWN_ERROR);
+ }
+ hybridCallbacks->setCaptureResultMetadataQueue(
+ stableDevice->getCaptureResultMetadataQueue());
+ *_aidl_return = stableDevice;
+ return ScopedAStatus::ok();
+}
+void AidlCameraService::addToListenerCacheLocked(
+ std::shared_ptr<SICameraServiceListener> stableCsListener,
+ sp<UICameraServiceListener> csListener) {
+ mListeners.emplace_back(std::make_pair(stableCsListener, csListener));
+}
+sp<UICameraServiceListener> AidlCameraService::searchListenerCacheLocked(
+ const std::shared_ptr<SICameraServiceListener>& listener, bool removeIfFound) {
+ // Go through the mListeners list and compare the listener with the VNDK AIDL
+ // listener registered.
+ if (listener == nullptr) {
+ return nullptr;
+ }
+
+ auto it = mListeners.begin();
+ sp<UICameraServiceListener> csListener = nullptr;
+ for (;it != mListeners.end(); it++) {
+ if (areBindersEqual(listener->asBinder(), it->first->asBinder())) {
+ break;
+ }
+ }
+ if (it != mListeners.end()) {
+ csListener = it->second;
+ if (removeIfFound) {
+ mListeners.erase(it);
+ }
+ }
+ return csListener;
+}
+ndk::ScopedAStatus AidlCameraService::addListener(
+ const std::shared_ptr<SICameraServiceListener>& in_listener,
+ std::vector<SCameraStatusAndId>* _aidl_return) {
+ std::vector<hardware::CameraStatus> cameraStatusAndIds{};
+ SStatus status = addListenerInternal(
+ in_listener, &cameraStatusAndIds);
+ if (status != SStatus::NO_ERROR) {
+ return fromSStatus(status);
+ }
+
+ // Convert cameraStatusAndIds to VNDK AIDL
+ convertToAidl(cameraStatusAndIds, _aidl_return);
+ return ScopedAStatus::ok();
+}
+SStatus AidlCameraService::addListenerInternal(
+ const std::shared_ptr<SICameraServiceListener>& listener,
+ std::vector<hardware::CameraStatus>* cameraStatusAndIds) {
+ if (mCameraService == nullptr) {
+ return SStatus::UNKNOWN_ERROR;
+ }
+ if (listener == nullptr || cameraStatusAndIds == nullptr) {
+ ALOGE("%s listener and cameraStatusAndIds must not be NULL", __FUNCTION__);
+ return SStatus::ILLEGAL_ARGUMENT;
+ }
+ sp<UICameraServiceListener> csListener = nullptr;
+ // Check the cache for previously registered callbacks
+ {
+ Mutex::Autolock l(mListenerListLock);
+ csListener = searchListenerCacheLocked(listener);
+ if (csListener == nullptr) {
+ // Wrap a listener with AidlCameraServiceListener and pass it to
+ // CameraService.
+ csListener = sp<AidlCameraServiceListener>::make(listener);
+ // Add to cache
+ addToListenerCacheLocked(listener, csListener);
+ } else {
+ ALOGE("%s: Trying to add a listener %p already registered",
+ __FUNCTION__, listener.get());
+ return SStatus::ILLEGAL_ARGUMENT;
+ }
+ }
+ binder::Status serviceRet =
+ mCameraService->addListenerHelper(csListener, cameraStatusAndIds, true);
+ if (!serviceRet.isOk()) {
+ ALOGE("%s: Unable to add camera device status listener", __FUNCTION__);
+ return convertToAidl(serviceRet);
+ }
+
+ cameraStatusAndIds->erase(std::remove_if(cameraStatusAndIds->begin(),
+ cameraStatusAndIds->end(),
+ [this](const hardware::CameraStatus& s) {
+ bool supportsHAL3 = false;
+ binder::Status sRet =
+ mCameraService->supportsCameraApi(s.cameraId,
+ UICameraService::API_VERSION_2, &supportsHAL3);
+ return !sRet.isOk() || !supportsHAL3;
+ }), cameraStatusAndIds->end());
+
+ return SStatus::NO_ERROR;
+}
+ndk::ScopedAStatus AidlCameraService::removeListener(
+ const std::shared_ptr<SICameraServiceListener>& in_listener) {
+ if (in_listener == nullptr) {
+ ALOGE("%s listener must not be NULL", __FUNCTION__);
+ return fromSStatus(SStatus::ILLEGAL_ARGUMENT);
+ }
+ sp<UICameraServiceListener> csListener = nullptr;
+ {
+ Mutex::Autolock l(mListenerListLock);
+ csListener = searchListenerCacheLocked(in_listener, /*removeIfFound*/true);
+ }
+ if (csListener != nullptr) {
+ mCameraService->removeListener(csListener);
+ } else {
+ ALOGE("%s Removing unregistered listener %p", __FUNCTION__, in_listener.get());
+ return fromSStatus(SStatus::ILLEGAL_ARGUMENT);
+ }
+ return ScopedAStatus::ok();
+}
+ndk::ScopedAStatus AidlCameraService::getCameraVendorTagSections(
+ std::vector<SProviderIdAndVendorTagSections>* _aidl_return) {
+ sp<VendorTagDescriptorCache> gCache = VendorTagDescriptorCache::getGlobalVendorTagCache();
+ if (gCache == nullptr) {
+ return fromSStatus(SStatus::UNKNOWN_ERROR);
+ }
+
+ const std::unordered_map<metadata_vendor_id_t, sp<android::VendorTagDescriptor>>
+ &vendorIdsAndTagDescs = gCache->getVendorIdsAndTagDescriptors();
+ if (vendorIdsAndTagDescs.empty()) {
+ return fromSStatus(SStatus::UNKNOWN_ERROR);
+ }
+
+ std::vector<SProviderIdAndVendorTagSections>& tagIdAndVendorTagSections = *_aidl_return;
+ tagIdAndVendorTagSections.resize(vendorIdsAndTagDescs.size());
+ size_t j = 0;
+ for (auto &vendorIdAndTagDescs : vendorIdsAndTagDescs) {
+ std::vector<SVendorTagSection> vendorTagSections;
+ sp<VendorTagDescriptor> desc = vendorIdAndTagDescs.second;
+ const SortedVector<String8>* sectionNames = desc->getAllSectionNames();
+ size_t numSections = sectionNames->size();
+ std::vector<std::vector<SVendorTag>> tagsBySection(numSections);
+ int tagCount = desc->getTagCount();
+ if (tagCount <= 0) {
+ continue;
+ }
+ std::vector<uint32_t> tags(tagCount);
+ desc->getTagArray(tags.data());
+ for (int i = 0; i < tagCount; i++) {
+ SVendorTag vt;
+ vt.tagId = tags[i];
+ vt.tagName = desc->getTagName(tags[i]);
+ vt.tagType = (SCameraMetadataType) desc->getTagType(tags[i]);
+ ssize_t sectionIdx = desc->getSectionIndex(tags[i]);
+ tagsBySection[sectionIdx].push_back(vt);
+ }
+ vendorTagSections.resize(numSections);
+ for (size_t s = 0; s < numSections; s++) {
+ vendorTagSections[s].sectionName = (*sectionNames)[s].string();
+ vendorTagSections[s].tags = tagsBySection[s];
+ }
+ SProviderIdAndVendorTagSections & prvdrIdAndVendorTagSection =
+ tagIdAndVendorTagSections[j];
+ prvdrIdAndVendorTagSection.providerId = vendorIdAndTagDescs.first;
+ prvdrIdAndVendorTagSection.vendorTagSections = std::move(vendorTagSections);
+ j++;
+ }
+ return ScopedAStatus::ok();
+}
+
+} // namespace android::frameworks::cameraservice::service::implementation
diff --git a/services/camera/libcameraservice/aidl/AidlCameraService.h b/services/camera/libcameraservice/aidl/AidlCameraService.h
new file mode 100644
index 0000000..4c67ac7
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/AidlCameraService.h
@@ -0,0 +1,85 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERASERVICE_H_
+#define FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERASERVICE_H_
+
+#include <CameraService.h>
+#include <aidl/android/frameworks/cameraservice/common/Status.h>
+#include <aidl/android/frameworks/cameraservice/service/BnCameraService.h>
+
+namespace android::frameworks::cameraservice::service::implementation {
+
+// VNDK classes
+using SBnCameraService = ::aidl::android::frameworks::cameraservice::service::BnCameraService;
+using SCameraMetadata = ::aidl::android::frameworks::cameraservice::device::CameraMetadata;
+using SCameraStatusAndId = ::aidl::android::frameworks::cameraservice::service::CameraStatusAndId;
+using SICameraDeviceCallback =
+ ::aidl::android::frameworks::cameraservice::device::ICameraDeviceCallback;
+using SICameraDeviceUser = ::aidl::android::frameworks::cameraservice::device::ICameraDeviceUser;
+using SICameraServiceListener =
+ ::aidl::android::frameworks::cameraservice::service::ICameraServiceListener;
+using SProviderIdAndVendorTagSections =
+ ::aidl::android::frameworks::cameraservice::common::ProviderIdAndVendorTagSections;
+using SStatus = ::aidl::android::frameworks::cameraservice::common::Status;
+// NDK classes
+using UICameraServiceListener = ::android::hardware::ICameraServiceListener;
+
+class AidlCameraService: public SBnCameraService {
+ public:
+ static bool registerService(::android::CameraService* cameraService);
+
+ explicit AidlCameraService(::android::CameraService* cameraService);
+ ~AidlCameraService() override = default;
+ ndk::ScopedAStatus getCameraCharacteristics(const std::string& in_cameraId,
+ SCameraMetadata* _aidl_return) override;
+
+ ndk::ScopedAStatus connectDevice(const std::shared_ptr<SICameraDeviceCallback>& in_callback,
+ const std::string& in_cameraId,
+ std::shared_ptr<SICameraDeviceUser>* _aidl_return) override;
+
+ ndk::ScopedAStatus addListener(const std::shared_ptr<SICameraServiceListener>& in_listener,
+ std::vector<SCameraStatusAndId>* _aidl_return) override;
+
+ ndk::ScopedAStatus getCameraVendorTagSections(
+ std::vector<SProviderIdAndVendorTagSections>* _aidl_return) override;
+
+ ndk::ScopedAStatus removeListener(
+ const std::shared_ptr<SICameraServiceListener>& in_listener) override;
+
+ private:
+ void addToListenerCacheLocked(std::shared_ptr<SICameraServiceListener> stableCsListener,
+ sp<hardware::ICameraServiceListener> csListener);
+
+ sp<UICameraServiceListener> searchListenerCacheLocked(
+ const std::shared_ptr<SICameraServiceListener>& listener, bool removeIfFound = false);
+
+ SStatus addListenerInternal(const std::shared_ptr<SICameraServiceListener>& listener,
+ std::vector<hardware::CameraStatus>* cameraStatusAndIds);
+
+
+ ::android::CameraService* mCameraService;
+
+ Mutex mListenerListLock;
+ std::list<std::pair<std::shared_ptr<SICameraServiceListener>,
+ sp<UICameraServiceListener>>> mListeners;
+ int mVndkVersion = -1;
+
+};
+
+} // namespace android::frameworks::cameraservice::service::implementation
+
+#endif // FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERASERVICE_H_
diff --git a/services/camera/libcameraservice/aidl/AidlCameraServiceListener.cpp b/services/camera/libcameraservice/aidl/AidlCameraServiceListener.cpp
new file mode 100644
index 0000000..d7ab0d9
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/AidlCameraServiceListener.cpp
@@ -0,0 +1,69 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <aidl/AidlCameraServiceListener.h>
+#include <aidl/AidlUtils.h>
+#include <aidl/android/frameworks/cameraservice/common/Status.h>
+#include <aidl/android/frameworks/cameraservice/service/CameraStatusAndId.h>
+#include <camera/StringUtils.h>
+
+namespace android::frameworks::cameraservice::service::implementation {
+
+using ::android::hardware::cameraservice::utils::conversion::aidl::convertCameraStatusToAidl;
+// VNDK classes
+using SCameraStatusAndId = ::aidl::android::frameworks::cameraservice::service::CameraStatusAndId;
+using SStatus = ::aidl::android::frameworks::cameraservice::common::Status;
+
+binder::Status AidlCameraServiceListener::onStatusChanged(
+ int32_t status, const std::string& cameraId) {
+ SCameraDeviceStatus sStatus = convertCameraStatusToAidl(status);
+ auto ret = mBase->onStatusChanged(sStatus, cameraId);
+ LOG_STATUS_ERROR_IF_NOT_OK(ret, "onStatusChanged")
+ return binder::Status::ok();
+}
+
+binder::Status AidlCameraServiceListener::onPhysicalCameraStatusChanged(
+ int32_t status, const std::string& cameraId,
+ const std::string& physicalCameraId) {
+ SCameraDeviceStatus sStatus = convertCameraStatusToAidl(status);
+
+ auto ret = mBase->onPhysicalCameraStatusChanged(sStatus, cameraId, physicalCameraId);
+ LOG_STATUS_ERROR_IF_NOT_OK(ret, "onPhysicalCameraStatusChanged")
+ return binder::Status::ok();
+}
+
+::android::binder::Status AidlCameraServiceListener::onTorchStatusChanged(
+ int32_t, const std::string&) {
+ // We don't implement onTorchStatusChanged
+ return binder::Status::ok();
+}
+
+::android::binder::Status AidlCameraServiceListener::onTorchStrengthLevelChanged(
+ const std::string&, int32_t) {
+ // We don't implement onTorchStrengthLevelChanged
+ return binder::Status::ok();
+}
+status_t AidlCameraServiceListener::linkToDeath(const sp<DeathRecipient>& recipient, void* cookie,
+ uint32_t flags) {
+ return mDeathPipe.linkToDeath(recipient, cookie, flags);
+}
+status_t AidlCameraServiceListener::unlinkToDeath(const wp<DeathRecipient>& recipient, void* cookie,
+ uint32_t flags,
+ wp<DeathRecipient>* outRecipient) {
+ return mDeathPipe.unlinkToDeath(recipient, cookie, flags, outRecipient);
+}
+
+} // namespace android::frameworks::cameraservice::service::implementation
diff --git a/services/camera/libcameraservice/aidl/AidlCameraServiceListener.h b/services/camera/libcameraservice/aidl/AidlCameraServiceListener.h
new file mode 100644
index 0000000..6483fe1
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/AidlCameraServiceListener.h
@@ -0,0 +1,86 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERASERVICELISTENER_H_
+#define FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERASERVICELISTENER_H_
+
+
+#include <aidl/DeathPipe.h>
+#include <aidl/android/frameworks/cameraservice/service/CameraDeviceStatus.h>
+#include <aidl/android/frameworks/cameraservice/service/ICameraServiceListener.h>
+#include <android/hardware/BnCameraServiceListener.h>
+
+namespace android::frameworks::cameraservice::service::implementation {
+
+using ::android::frameworks::cameraservice::utils::DeathPipe;
+
+// VNDK classes
+using SCameraDeviceStatus = ::aidl::android::frameworks::cameraservice::service::CameraDeviceStatus;
+using SICameraServiceListener =
+ ::aidl::android::frameworks::cameraservice::service::ICameraServiceListener;
+// NDK classes
+using UBnCameraServiceListener = ::android::hardware::BnCameraServiceListener;
+
+/**
+ * A simple shim to pass calls from CameraService to VNDK client.
+ */
+class AidlCameraServiceListener : public UBnCameraServiceListener {
+ public:
+ AidlCameraServiceListener(const std::shared_ptr<SICameraServiceListener>& base):
+ mBase(base), mDeathPipe(this, base->asBinder()) {}
+
+ ~AidlCameraServiceListener() = default;
+
+ ::android::binder::Status onStatusChanged(int32_t status,
+ const std::string& cameraId) override;
+ ::android::binder::Status onPhysicalCameraStatusChanged(int32_t status,
+ const std::string& cameraId,
+ const std::string& physicalCameraId) override;
+
+ ::android::binder::Status onTorchStatusChanged(
+ int32_t status, const std::string& cameraId) override;
+ ::android::binder::Status onTorchStrengthLevelChanged(
+ const std::string& cameraId, int32_t newStrengthLevel) override;
+ binder::Status onCameraAccessPrioritiesChanged() override {
+ // TODO: no implementation yet.
+ return binder::Status::ok();
+ }
+ binder::Status onCameraOpened(const std::string& /*cameraId*/,
+ const std::string& /*clientPackageId*/) override {
+ // empty implementation
+ return binder::Status::ok();
+ }
+ binder::Status onCameraClosed(const std::string& /*cameraId*/) override {
+ // empty implementation
+ return binder::Status::ok();
+ }
+
+ status_t linkToDeath(const sp<DeathRecipient>& recipient, void* cookie,
+ uint32_t flags) override;
+ status_t unlinkToDeath(const wp<DeathRecipient>& recipient, void* cookie, uint32_t flags,
+ wp<DeathRecipient>* outRecipient) override;
+
+ private:
+ std::shared_ptr<SICameraServiceListener> mBase;
+
+ // Pipes death subscription to current NDK AIDL interface to VNDK mBase.
+ // Should consume calls to linkToDeath and unlinkToDeath.
+ DeathPipe mDeathPipe;
+};
+
+} // android
+
+#endif // FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLCAMERASERVICELISTENER_H_
\ No newline at end of file
diff --git a/services/camera/libcameraservice/aidl/AidlUtils.cpp b/services/camera/libcameraservice/aidl/AidlUtils.cpp
new file mode 100644
index 0000000..7291c5f
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/AidlUtils.cpp
@@ -0,0 +1,305 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AidlUtils"
+
+#include <aidl/AidlUtils.h>
+#include <aidl/VndkVersionMetadataTags.h>
+#include <aidlcommonsupport/NativeHandle.h>
+#include <device3/Camera3StreamInterface.h>
+#include <gui/bufferqueue/1.0/H2BGraphicBufferProducer.h>
+#include <mediautils/AImageReaderUtils.h>
+#include <camera/StringUtils.h>
+
+namespace android::hardware::cameraservice::utils::conversion::aidl {
+
+using aimg::AImageReader_getHGBPFromHandle;
+using hardware::graphics::bufferqueue::V1_0::utils::H2BGraphicBufferProducer;
+
+// Note: existing data in dst will be gone. Caller still owns the memory of src
+void cloneToAidl(const camera_metadata_t* src, SCameraMetadata* dst) {
+ if (src == nullptr) {
+ ALOGW("%s:attempt to convert empty metadata to AIDL", __FUNCTION__);
+ return;
+ }
+ size_t size = get_camera_metadata_size(src);
+ uint8_t* startPtr = (uint8_t*)src;
+ uint8_t* endPtr = startPtr + size;
+ dst->metadata.assign(startPtr, endPtr);
+}
+
+// The camera metadata here is cloned. Since we're reading metadata over
+// the binder we would need to clone it in order to avoid alignment issues.
+bool cloneFromAidl(const SCameraMetadata &src, CameraMetadata *dst) {
+ const camera_metadata_t *buffer =
+ reinterpret_cast<const camera_metadata_t*>(src.metadata.data());
+ size_t expectedSize = src.metadata.size();
+ if (buffer != nullptr) {
+ int res = validate_camera_metadata_structure(buffer, &expectedSize);
+ if (res == OK || res == CAMERA_METADATA_VALIDATION_SHIFTED) {
+ *dst = buffer;
+ } else {
+ ALOGE("%s: Malformed camera metadata received from HAL", __FUNCTION__);
+ return false;
+ }
+ }
+ return true;
+}
+
+int32_t convertFromAidl(SStreamConfigurationMode streamConfigurationMode) {
+ switch (streamConfigurationMode) {
+ case SStreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE:
+ return camera2::ICameraDeviceUser::CONSTRAINED_HIGH_SPEED_MODE;
+ case SStreamConfigurationMode::NORMAL_MODE:
+ return camera2::ICameraDeviceUser::NORMAL_MODE;
+ default:
+ // TODO: Fix this
+ return camera2::ICameraDeviceUser::VENDOR_MODE_START;
+ }
+}
+
+UOutputConfiguration convertFromAidl(const SOutputConfiguration &src) {
+ std::vector<sp<IGraphicBufferProducer>> iGBPs;
+ auto &windowHandles = src.windowHandles;
+ iGBPs.reserve(windowHandles.size());
+
+ for (auto &handle : windowHandles) {
+ native_handle_t* nh = makeFromAidl(handle);
+ iGBPs.push_back(new H2BGraphicBufferProducer(AImageReader_getHGBPFromHandle(nh)));
+ native_handle_delete(nh);
+ }
+ UOutputConfiguration outputConfiguration(
+ iGBPs, convertFromAidl(src.rotation), src.physicalCameraId,
+ src.windowGroupId, OutputConfiguration::SURFACE_TYPE_UNKNOWN, 0, 0,
+ (windowHandles.size() > 1));
+ return outputConfiguration;
+}
+
+USessionConfiguration convertFromAidl(const SSessionConfiguration &src) {
+ USessionConfiguration sessionConfig(src.inputWidth, src.inputHeight,
+ src.inputFormat, static_cast<int>(src.operationMode));
+
+ for (const auto& os : src.outputStreams) {
+ UOutputConfiguration config = convertFromAidl(os);
+ sessionConfig.addOutputConfiguration(config);
+ }
+
+ return sessionConfig;
+}
+
+int convertFromAidl(SOutputConfiguration::Rotation rotation) {
+ switch(rotation) {
+ case SOutputConfiguration::Rotation::R270:
+ return android::camera3::CAMERA_STREAM_ROTATION_270;
+ case SOutputConfiguration::Rotation::R180:
+ return android::camera3::CAMERA_STREAM_ROTATION_180;
+ case SOutputConfiguration::Rotation::R90:
+ return android::camera3::CAMERA_STREAM_ROTATION_90;
+ case SOutputConfiguration::Rotation::R0:
+ default:
+ return android::camera3::CAMERA_STREAM_ROTATION_0;
+ }
+}
+
+int32_t convertFromAidl(STemplateId templateId) {
+ switch(templateId) {
+ case STemplateId::PREVIEW:
+ return camera2::ICameraDeviceUser::TEMPLATE_PREVIEW;
+ case STemplateId::STILL_CAPTURE:
+ return camera2::ICameraDeviceUser::TEMPLATE_STILL_CAPTURE;
+ case STemplateId::RECORD:
+ return camera2::ICameraDeviceUser::TEMPLATE_RECORD;
+ case STemplateId::VIDEO_SNAPSHOT:
+ return camera2::ICameraDeviceUser::TEMPLATE_VIDEO_SNAPSHOT;
+ case STemplateId::ZERO_SHUTTER_LAG:
+ return camera2::ICameraDeviceUser::TEMPLATE_ZERO_SHUTTER_LAG;
+ case STemplateId::MANUAL:
+ return camera2::ICameraDeviceUser::TEMPLATE_MANUAL;
+ }
+}
+
+void convertToAidl(const camera2::utils::SubmitInfo& submitInfo, SSubmitInfo* hSubmitInfo) {
+ hSubmitInfo->requestId = submitInfo.mRequestId;
+ hSubmitInfo->lastFrameNumber = submitInfo.mLastFrameNumber;
+}
+
+
+SStatus convertToAidl(const binder::Status &status) {
+ if (status.isOk()) {
+ return SStatus::NO_ERROR;
+ }
+ if (status.exceptionCode() != EX_SERVICE_SPECIFIC) {
+ return SStatus::UNKNOWN_ERROR;
+ }
+
+ switch (status.serviceSpecificErrorCode()) {
+ case hardware::ICameraService::ERROR_DISCONNECTED:
+ return SStatus::DISCONNECTED;
+ case hardware::ICameraService::ERROR_CAMERA_IN_USE:
+ return SStatus::CAMERA_IN_USE;
+ case hardware::ICameraService::ERROR_MAX_CAMERAS_IN_USE:
+ return SStatus::MAX_CAMERAS_IN_USE;
+ case hardware::ICameraService::ERROR_ILLEGAL_ARGUMENT:
+ return SStatus::ILLEGAL_ARGUMENT;
+ case hardware::ICameraService::ERROR_DEPRECATED_HAL:
+ // Should not reach here since we filtered legacy HALs earlier
+ return SStatus::DEPRECATED_HAL;
+ case hardware::ICameraService::ERROR_DISABLED:
+ return SStatus::DISABLED;
+ case hardware::ICameraService::ERROR_PERMISSION_DENIED:
+ return SStatus::PERMISSION_DENIED;
+ case hardware::ICameraService::ERROR_INVALID_OPERATION:
+ return SStatus::INVALID_OPERATION;
+ default:
+ return SStatus::UNKNOWN_ERROR;
+ }
+}
+
+SCaptureResultExtras convertToAidl(const UCaptureResultExtras &src) {
+ SCaptureResultExtras dst;
+ dst.requestId = src.requestId;
+ dst.burstId = src.burstId;
+ dst.frameNumber = src.frameNumber;
+ dst.partialResultCount = src.partialResultCount;
+ dst.errorStreamId = src.errorStreamId;
+ dst.errorPhysicalCameraId = src.errorPhysicalCameraId;
+ return dst;
+}
+
+SErrorCode convertToAidl(int32_t errorCode) {
+ switch(errorCode) {
+ case camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISCONNECTED:
+ return SErrorCode::CAMERA_DISCONNECTED;
+ case camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE :
+ return SErrorCode::CAMERA_DEVICE;
+ case camera2::ICameraDeviceCallbacks::ERROR_CAMERA_SERVICE:
+ return SErrorCode::CAMERA_SERVICE;
+ case camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST:
+ return SErrorCode::CAMERA_REQUEST;
+ case camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT:
+ return SErrorCode::CAMERA_RESULT;
+ case camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER:
+ return SErrorCode::CAMERA_BUFFER;
+ case camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISABLED:
+ return SErrorCode::CAMERA_DISABLED;
+ case camera2::ICameraDeviceCallbacks::ERROR_CAMERA_INVALID_ERROR:
+ return SErrorCode::CAMERA_INVALID_ERROR;
+ default:
+ return SErrorCode::CAMERA_UNKNOWN_ERROR;
+ }
+}
+
+std::vector<SPhysicalCaptureResultInfo> convertToAidl(
+ const std::vector<UPhysicalCaptureResultInfo>& src,
+ std::shared_ptr<CaptureResultMetadataQueue>& fmq) {
+ std::vector<SPhysicalCaptureResultInfo> dst;
+ dst.resize(src.size());
+ size_t i = 0;
+ for (auto &physicalCaptureResultInfo : src) {
+ dst[i++] = convertToAidl(physicalCaptureResultInfo, fmq);
+ }
+ return dst;
+}
+
+SPhysicalCaptureResultInfo convertToAidl(const UPhysicalCaptureResultInfo & src,
+ std::shared_ptr<CaptureResultMetadataQueue> & fmq) {
+ SPhysicalCaptureResultInfo dst;
+ dst.physicalCameraId = src.mPhysicalCameraId;
+
+ const camera_metadata_t *rawMetadata = src.mPhysicalCameraMetadata.getAndLock();
+ // Try using fmq at first.
+ size_t metadata_size = get_camera_metadata_size(rawMetadata);
+ if ((metadata_size > 0) && (fmq->availableToWrite() > 0)) {
+ if (fmq->write((int8_t *)rawMetadata, metadata_size)) {
+ dst.physicalCameraMetadata.set<SCaptureMetadataInfo::fmqMetadataSize>(metadata_size);
+ } else {
+ ALOGW("%s Couldn't use fmq, falling back to hwbinder", __FUNCTION__);
+ SCameraMetadata metadata;
+ cloneToAidl(rawMetadata, &metadata);
+ dst.physicalCameraMetadata.set<SCaptureMetadataInfo::metadata>(std::move(metadata));
+ }
+ }
+ src.mPhysicalCameraMetadata.unlock(rawMetadata);
+ return dst;
+}
+
+void convertToAidl(const std::vector<hardware::CameraStatus> &src,
+ std::vector<SCameraStatusAndId>* dst) {
+ dst->resize(src.size());
+ size_t i = 0;
+ for (const auto &statusAndId : src) {
+ auto &a = (*dst)[i++];
+ a.cameraId = statusAndId.cameraId;
+ a.deviceStatus = convertCameraStatusToAidl(statusAndId.status);
+ size_t numUnvailPhysicalCameras = statusAndId.unavailablePhysicalIds.size();
+ a.unavailPhysicalCameraIds.resize(numUnvailPhysicalCameras);
+ for (size_t j = 0; j < numUnvailPhysicalCameras; j++) {
+ a.unavailPhysicalCameraIds[j] = statusAndId.unavailablePhysicalIds[j];
+ }
+ }
+}
+
+SCameraDeviceStatus convertCameraStatusToAidl(int32_t src) {
+ SCameraDeviceStatus deviceStatus = SCameraDeviceStatus::STATUS_UNKNOWN;
+ switch(src) {
+ case hardware::ICameraServiceListener::STATUS_NOT_PRESENT:
+ deviceStatus = SCameraDeviceStatus::STATUS_NOT_PRESENT;
+ break;
+ case hardware::ICameraServiceListener::STATUS_PRESENT:
+ deviceStatus = SCameraDeviceStatus::STATUS_PRESENT;
+ break;
+ case hardware::ICameraServiceListener::STATUS_ENUMERATING:
+ deviceStatus = SCameraDeviceStatus::STATUS_ENUMERATING;
+ break;
+ case hardware::ICameraServiceListener::STATUS_NOT_AVAILABLE:
+ deviceStatus = SCameraDeviceStatus::STATUS_NOT_AVAILABLE;
+ break;
+ default:
+ break;
+ }
+ return deviceStatus;
+}
+
+bool areBindersEqual(const ndk::SpAIBinder& b1, const ndk::SpAIBinder& b2) {
+ return !AIBinder_lt(b1.get(), b2.get()) && !AIBinder_lt(b2.get(), b1.get());
+}
+
+status_t filterVndkKeys(int vndkVersion, CameraMetadata &metadata, bool isStatic) {
+ if (vndkVersion == __ANDROID_API_FUTURE__) {
+ // VNDK version in ro.vndk.version is a version code-name that
+ // corresponds to the current version.
+ return OK;
+ }
+ const auto &apiLevelToKeys =
+ isStatic ? static_api_level_to_keys : dynamic_api_level_to_keys;
+ // Find the vndk versions above the given vndk version. All the vndk
+ // versions above the given one, need to have their keys filtered from the
+ // metadata in order to avoid metadata invalidation.
+ auto it = apiLevelToKeys.upper_bound(vndkVersion);
+ while (it != apiLevelToKeys.end()) {
+ for (const auto &key : it->second) {
+ status_t res = metadata.erase(key);
+ if (res != OK) {
+ ALOGE("%s metadata key %d could not be erased", __FUNCTION__, key);
+ return res;
+ }
+ }
+ it++;
+ }
+ return OK;
+}
+
+} // namespace android::hardware::cameraservice::utils::conversion::aidl
diff --git a/services/camera/libcameraservice/aidl/AidlUtils.h b/services/camera/libcameraservice/aidl/AidlUtils.h
new file mode 100644
index 0000000..c89d7ff
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/AidlUtils.h
@@ -0,0 +1,127 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLUTILS_H_
+#define FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLUTILS_H_
+
+#include <aidl/android/frameworks/cameraservice/common/Status.h>
+#include <aidl/android/frameworks/cameraservice/device/CameraMetadata.h>
+#include <aidl/android/frameworks/cameraservice/device/CaptureResultExtras.h>
+#include <aidl/android/frameworks/cameraservice/device/ErrorCode.h>
+#include <aidl/android/frameworks/cameraservice/device/CaptureMetadataInfo.h>
+#include <aidl/android/frameworks/cameraservice/device/OutputConfiguration.h>
+#include <aidl/android/frameworks/cameraservice/device/PhysicalCaptureResultInfo.h>
+#include <aidl/android/frameworks/cameraservice/device/SessionConfiguration.h>
+#include <aidl/android/frameworks/cameraservice/device/StreamConfigurationMode.h>
+#include <aidl/android/frameworks/cameraservice/device/SubmitInfo.h>
+#include <aidl/android/frameworks/cameraservice/device/TemplateId.h>
+#include <aidl/android/frameworks/cameraservice/service/CameraDeviceStatus.h>
+#include <aidl/android/frameworks/cameraservice/service/CameraStatusAndId.h>
+#include <android/hardware/ICameraService.h>
+#include <android/hardware/camera2/ICameraDeviceUser.h>
+#include <android/hardware/graphics/bufferqueue/1.0/IGraphicBufferProducer.h>
+#include <camera/CameraMetadata.h>
+#include <fmq/AidlMessageQueue.h>
+#include <hardware/camera.h>
+
+namespace android::hardware::cameraservice::utils::conversion::aidl {
+
+using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
+using ::android::AidlMessageQueue;
+using ::android::CameraMetadata;
+using CaptureResultMetadataQueue = AidlMessageQueue<int8_t, SynchronizedReadWrite>;
+
+// VNDK classes
+using SCameraDeviceStatus = ::aidl::android::frameworks::cameraservice::service::CameraDeviceStatus;
+using SCameraMetadata = ::aidl::android::frameworks::cameraservice::device::CameraMetadata;
+using SCameraStatusAndId = ::aidl::android::frameworks::cameraservice::service::CameraStatusAndId;
+using SCaptureResultExtras =
+ ::aidl::android::frameworks::cameraservice::device::CaptureResultExtras;
+using SErrorCode = ::aidl::android::frameworks::cameraservice::device::ErrorCode;
+using SCaptureMetadataInfo = ::aidl::android::frameworks::cameraservice::device::CaptureMetadataInfo;
+using SOutputConfiguration =
+ ::aidl::android::frameworks::cameraservice::device::OutputConfiguration;
+using SPhysicalCaptureResultInfo =
+ ::aidl::android::frameworks::cameraservice::device::PhysicalCaptureResultInfo;
+using SSessionConfiguration =
+ ::aidl::android::frameworks::cameraservice::device::SessionConfiguration;
+using SStatus = ::aidl::android::frameworks::cameraservice::common::Status;
+using SStreamConfigurationMode =
+ ::aidl::android::frameworks::cameraservice::device::StreamConfigurationMode;
+using SSubmitInfo = ::aidl::android::frameworks::cameraservice::device::SubmitInfo;
+using STemplateId = ::aidl::android::frameworks::cameraservice::device::TemplateId;
+// NDK classes
+using UCaptureResultExtras = ::android::hardware::camera2::impl::CaptureResultExtras;
+using UOutputConfiguration = ::android::hardware::camera2::params::OutputConfiguration;
+using UPhysicalCaptureResultInfo = ::android::hardware::camera2::impl::PhysicalCaptureResultInfo;
+using USessionConfiguration = ::android::hardware::camera2::params::SessionConfiguration;
+
+// Common macro to log errors returned from stable AIDL calls
+#define LOG_STATUS_ERROR_IF_NOT_OK(status, callName) \
+ if (!(status).isOk()) { \
+ if ((status).getExceptionCode() == EX_SERVICE_SPECIFIC) { \
+ SStatus errStatus = static_cast<SStatus>((status).getServiceSpecificError()); \
+ ALOGE("%s: %s callback failed: %s", __FUNCTION__, callName, \
+ toString(errStatus).c_str()); \
+ } else { \
+ ALOGE("%s: Transaction failed during %s: %d", __FUNCTION__, callName, \
+ (status).getExceptionCode()); \
+ } \
+ }
+
+// Note: existing data in dst will be gone. Caller still owns the memory of src
+void cloneToAidl(const camera_metadata_t *src, SCameraMetadata* dst);
+
+bool cloneFromAidl(const SCameraMetadata &src, CameraMetadata *dst);
+
+int32_t convertFromAidl(SStreamConfigurationMode streamConfigurationMode);
+
+UOutputConfiguration convertFromAidl(const SOutputConfiguration &src);
+
+USessionConfiguration convertFromAidl(const SSessionConfiguration &src);
+
+int convertFromAidl(SOutputConfiguration::Rotation rotation);
+
+int32_t convertFromAidl(STemplateId templateId);
+
+void convertToAidl(const hardware::camera2::utils::SubmitInfo &submitInfo,
+ SSubmitInfo *hSubmitInfo);
+
+SStatus convertToAidl(const binder::Status &status);
+
+SCaptureResultExtras convertToAidl(const UCaptureResultExtras &captureResultExtras);
+
+SErrorCode convertToAidl(int32_t errorCode);
+
+std::vector<SPhysicalCaptureResultInfo> convertToAidl(
+ const std::vector<UPhysicalCaptureResultInfo>& src,
+ std::shared_ptr<CaptureResultMetadataQueue>& fmq);
+
+SPhysicalCaptureResultInfo convertToAidl(const UPhysicalCaptureResultInfo& src,
+ std::shared_ptr<CaptureResultMetadataQueue>& fmq);
+
+void convertToAidl(const std::vector<hardware::CameraStatus> &src,
+ std::vector<SCameraStatusAndId>* dst);
+
+SCameraDeviceStatus convertCameraStatusToAidl(int32_t src);
+
+bool areBindersEqual(const ndk::SpAIBinder& b1, const ndk::SpAIBinder& b2);
+
+status_t filterVndkKeys(int vndkVersion, CameraMetadata &metadata, bool isStatic = true);
+
+} // namespace android::hardware::cameraservice::utils::conversion::aidl
+
+#endif // FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLUTILS_H_
diff --git a/services/camera/libcameraservice/aidl/DeathPipe.cpp b/services/camera/libcameraservice/aidl/DeathPipe.cpp
new file mode 100644
index 0000000..de46411
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/DeathPipe.cpp
@@ -0,0 +1,99 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "DeathPipe"
+
+#include "DeathPipe.h"
+
+namespace android::frameworks::cameraservice::utils {
+
+DeathPipe::DeathPipe(IBinder* parent, const ::ndk::SpAIBinder& binder):
+ mParent(parent), mAIBinder(binder) {
+ mDeathRecipient = ::ndk::ScopedAIBinder_DeathRecipient(
+ AIBinder_DeathRecipient_new(DeathPipe::onDeathCallback));
+ // Set an unlinked callback that allows Obituaries to be deallocated
+ AIBinder_DeathRecipient_setOnUnlinked(mDeathRecipient.get(),
+ DeathPipe::onUnlinkedCallback);
+}
+
+status_t DeathPipe::linkToDeath(const sp<IBinder::DeathRecipient>& recipient,
+ void* cookie, uint32_t flags) {
+ LOG_ALWAYS_FATAL_IF(recipient == nullptr, "%s: recipient must be non-nullptr", __FUNCTION__);
+ std::lock_guard<std::mutex> _l(mLock);
+
+ // Create and immortalize an obituary before linking it to death.
+ // The created Obituary can now only be garbage collected if it is unlinked from death
+ std::shared_ptr<Obituary> obituary = std::make_shared<Obituary>(recipient, cookie,
+ flags, /* who= */ mParent);
+ obituary->immortalize();
+
+ // Ensure that "cookie" is a pointer to an immortal obituary.
+ // AIBinder_linkToDeath calls DeathPipe::onUnlinkedCallback if linking to death fails, marking
+ // it for garbage collection
+ binder_status_t ret = AIBinder_linkToDeath(mAIBinder.get(),
+ mDeathRecipient.get(),
+ /* cookie= */ obituary.get());
+ if (ret != STATUS_OK) {
+ return DEAD_OBJECT;
+ }
+ mObituaries.emplace_back(obituary);
+ return NO_ERROR;
+}
+
+status_t DeathPipe::unlinkToDeath(const wp<IBinder::DeathRecipient>& recipient,
+ void* cookie, uint32_t flags,
+ wp<IBinder::DeathRecipient>* outRecipient) {
+ std::lock_guard<std::mutex> _l(mLock);
+ // Temporary Obituary for checking equality
+ std::shared_ptr<Obituary> inObituary = std::make_shared<Obituary>(recipient, cookie,
+ flags, mParent);
+ for (auto it = mObituaries.begin(); it != mObituaries.end(); it++) {
+ if ((*inObituary) == (**it)) {
+ if (outRecipient != nullptr) {
+ *outRecipient = (*it)->recipient;
+ }
+ // Unlink the found Obituary from death. AIBinder_unlinkToDeath calls
+ // DeathPipe::onUnlinkedCallback with the given cookie when unlinking is done
+ binder_status_t ret = AIBinder_unlinkToDeath(mAIBinder.get(),
+ mDeathRecipient.get(),
+ /* cookie= */ (*it).get());
+ mObituaries.erase(it);
+ return ret == STATUS_OK ? NO_ERROR : DEAD_OBJECT;
+ }
+ }
+ return NAME_NOT_FOUND;
+}
+
+DeathPipe::~DeathPipe() = default;
+
+
+void DeathPipe::onDeathCallback(void* cookie) {
+ // Cookie will always be a pointer to a valid immortal Obituary
+ Obituary* obituary = static_cast<Obituary*>(cookie);
+ obituary->onDeath();
+ // Don't call Obituary::clear() because VNDK Binder will call DeathPipe::onUnlinkedCallback()
+ // when it is ready
+}
+
+void DeathPipe::onUnlinkedCallback(void* cookie) {
+ // Cookie will always be a pointer to a valid immortal Obituary.
+ Obituary* obituary = static_cast<Obituary*>(cookie);
+ // Mark obituary to be garbage collected if needed. onDeathCallback won't be called with
+ // this particular cookie after this.
+ obituary->clear();
+}
+
+} // namespace android::frameworks::cameraservice::utils
\ No newline at end of file
diff --git a/services/camera/libcameraservice/aidl/DeathPipe.h b/services/camera/libcameraservice/aidl/DeathPipe.h
new file mode 100644
index 0000000..a816dd0
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/DeathPipe.h
@@ -0,0 +1,129 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_DEATHPIPE_H_
+#define FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_DEATHPIPE_H_
+
+#include <android/binder_auto_utils.h>
+#include <android/binder_ibinder.h>
+#include <binder/Parcel.h>
+#include <list>
+
+namespace android::frameworks::cameraservice::utils {
+
+/**
+ * This is a helper class to pipe death notifications from VNDK {@code AIBinder} to
+ * S/NDK {@code IBinder}.
+ *
+ * To use this class, create a DeathPipe member object as a field of NDK interface
+ * implementation, and forward functions {@code BBinder::linkToDeath} and
+ * {@code BBinder::unlinkToDeath} to corresponding DeathPipe functions.
+ */
+class DeathPipe {
+ public:
+ /**
+ * @param parent the NDK Binder object. Assumed to live longer than the DeathPipe
+ * object
+ * @param binder the VNDK Binder object which DeathPipe with subscribe to.
+ */
+ explicit DeathPipe(IBinder* parent, const ::ndk::SpAIBinder& binder);
+ ~DeathPipe();
+
+ status_t linkToDeath(const sp<IBinder::DeathRecipient>& recipient, void* cookie,
+ uint32_t flags);
+ status_t unlinkToDeath(const wp<IBinder::DeathRecipient>& recipient,
+ void* cookie, uint32_t flags, wp<IBinder::DeathRecipient>* outRecipient);
+
+ // Static functions that will be called by VNDK binder upon death or unlinking
+ static void onDeathCallback(void* cookie);
+ static void onUnlinkedCallback(void* cookie);
+
+ private:
+ /**
+ * {@code Obituary} is a tiny container that contains some metadata to pass VNDK binder's
+ * death notification to the NDK binder. A pointer to the Obituary is used as the
+ * {@code cookie} in VNDK binder's death notification.
+ *
+ * Theoretically, the VNDK binder might send out death notification after the DeathPipe
+ * object is destroyed, so care must be taken to ensure that Obituaries aren't accidentally
+ * destroyed before VNDK binder stops using its cookies.
+ *
+ */
+ struct Obituary: public std::enable_shared_from_this<Obituary> {
+ wp<IBinder::DeathRecipient> recipient; // NDK death recipient
+ void *cookie; // cookie sent by the NDK recipient
+ uint32_t flags; // flags sent by the NDK recipient
+ wp<IBinder> who; // NDK binder whose death 'recipient' subscribed to
+
+ // Self ptr to ensure we don't destroy this obituary while it can still be notified by the
+ // VNDK Binder. When populated with Obituary::immortalize, this Obituary won't be
+ // garbage collected until Obituary::clear is called.
+ std::shared_ptr<Obituary> mSelfPtr;
+
+ Obituary(const wp<IBinder::DeathRecipient>& recipient, void* cookie,
+ uint32_t flags, IBinder* who) :
+ recipient(recipient), cookie(cookie), flags(flags),
+ who(who), mSelfPtr(nullptr) {}
+
+ // Function to be called when the VNDK Binder dies. Pipes the notification to the relevant
+ // NDK recipient if it still exists
+ void onDeath() const {
+ sp<IBinder::DeathRecipient> r = recipient.promote();
+ if (r == nullptr) { return; }
+ r->binderDied(who);
+ };
+
+ // Should be called before calling AIBinder_linkToDeath. Once this function returns this
+ // Obituary won't be garbage collected until Obituary::clear is called.
+ void immortalize() {
+ mSelfPtr = shared_from_this();
+ }
+
+ // Should be called when this Obituary can be garbage collected.
+ // Typically, after the Obituary is no longer linked to a VNDK DeathRecipient
+ void clear() {
+ mSelfPtr = nullptr;
+ }
+
+ bool operator==(const Obituary& rhs) const {
+ return recipient == rhs.recipient &&
+ cookie == rhs.cookie &&
+ flags == rhs.flags &&
+ who == rhs.who;
+ }
+ };
+
+ // Parent to which the cameraservice wants to subscribe to for death notification
+ IBinder* mParent;
+
+ // VNDK Binder object to which the death notification will be bound to. If it dies,
+ // cameraservice will be notified as if mParent died.
+ ::ndk::SpAIBinder mAIBinder;
+
+ // Owning VNDK's deathRecipient ensures that all linked death notifications are cleaned up
+ // when this class destructs.
+ ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
+
+ // Lock to protect access to fields below.
+ std::mutex mLock;
+ // List of all obituaries created by DeathPipe, used to unlink death subscription
+ std::list<std::shared_ptr<Obituary>> mObituaries;
+
+};
+
+} // namespace android::frameworks::cameraservice::utils
+
+#endif // FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_DEATHPIPE_H_
diff --git a/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h b/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
similarity index 80%
rename from services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h
rename to services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
index ae4d5dd..48c804d 100644
--- a/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h
+++ b/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
@@ -21,7 +21,7 @@
* ! Do not edit this file directly !
*
* Generated automatically from vndk_camera_metadata_tags.mako. To be included in libcameraservice
- * only by hidl/Utils.cpp.
+ * only by aidl/AidlUtils.cpp.
*/
/**
@@ -74,6 +74,17 @@
ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES,
ANDROID_SENSOR_READOUT_TIMESTAMP,
} },
+ {34, {
+ ANDROID_CONTROL_AUTOFRAMING_AVAILABLE,
+ ANDROID_CONTROL_AVAILABLE_SETTINGS_OVERRIDES,
+ ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS,
+ ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
+ ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS,
+ ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS_MAXIMUM_RESOLUTION,
+ ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS,
+ ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
+ ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP,
+ } },
};
/**
@@ -90,4 +101,13 @@
ANDROID_SENSOR_PIXEL_MODE,
ANDROID_SENSOR_RAW_BINNING_FACTOR_USED,
} },
+ {34, {
+ ANDROID_CONTROL_AUTOFRAMING,
+ ANDROID_CONTROL_AUTOFRAMING_STATE,
+ ANDROID_CONTROL_SETTINGS_OVERRIDE,
+ ANDROID_CONTROL_SETTINGS_OVERRIDING_FRAME_NUMBER,
+ ANDROID_EXTENSION_CURRENT_TYPE,
+ ANDROID_EXTENSION_STRENGTH,
+ ANDROID_SCALER_RAW_CROP_REGION,
+ } },
};
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 9dead7f..b388e5a 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -55,6 +55,7 @@
Camera2Client::Camera2Client(const sp<CameraService>& cameraService,
const sp<hardware::ICameraClient>& cameraClient,
+ std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
const std::string& clientPackageName,
const std::optional<std::string>& clientFeatureId,
const std::string& cameraDeviceId,
@@ -67,7 +68,7 @@
bool overrideForPerfClass,
bool overrideToPortrait,
bool forceSlowJpegMode):
- Camera2ClientBase(cameraService, cameraClient, clientPackageName,
+ Camera2ClientBase(cameraService, cameraClient, cameraServiceProxyWrapper, clientPackageName,
false/*systemNativeClient - since no ndk for api1*/, clientFeatureId,
cameraDeviceId, api1CameraId, cameraFacing, sensorOrientation, clientPid,
clientUid, servicePid, overrideForPerfClass, overrideToPortrait,
@@ -82,9 +83,7 @@
SharedParameters::Lock l(mParameters);
l.mParameters.state = Parameters::DISCONNECTED;
- if (forceSlowJpegMode) {
- l.mParameters.isSlowJpegModeForced = true;
- }
+ l.mParameters.isSlowJpegModeForced = forceSlowJpegMode;
}
status_t Camera2Client::initialize(sp<CameraProviderManager> manager,
@@ -144,19 +143,44 @@
std::string threadName = std::string("C2-") + std::to_string(mCameraId);
mFrameProcessor = new FrameProcessor(mDevice, this);
- mFrameProcessor->run((threadName + "-FrameProc").c_str());
+ res = mFrameProcessor->run((threadName + "-FrameProc").c_str());
+ if (res != OK) {
+ ALOGE("%s: Unable to start frame processor thread: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
mCaptureSequencer = new CaptureSequencer(this);
- mCaptureSequencer->run((threadName + "-CaptureSeq").c_str());
+ res = mCaptureSequencer->run((threadName + "-CaptureSeq").c_str());
+ if (res != OK) {
+ ALOGE("%s: Unable to start capture sequencer thread: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
mJpegProcessor = new JpegProcessor(this, mCaptureSequencer);
- mJpegProcessor->run((threadName + "-JpegProc").c_str());
+ res = mJpegProcessor->run((threadName + "-JpegProc").c_str());
+ if (res != OK) {
+ ALOGE("%s: Unable to start jpeg processor thread: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
mZslProcessor = new ZslProcessor(this, mCaptureSequencer);
- mZslProcessor->run((threadName + "-ZslProc").c_str());
+ res = mZslProcessor->run((threadName + "-ZslProc").c_str());
+ if (res != OK) {
+ ALOGE("%s: Unable to start zsl processor thread: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
mCallbackProcessor = new CallbackProcessor(this);
- mCallbackProcessor->run((threadName + "-CallbkProc").c_str());
+ res = mCallbackProcessor->run((threadName + "-CallbkProc").c_str());
+ if (res != OK) {
+ ALOGE("%s: Unable to start callback processor thread: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
if (gLogLevel >= 1) {
SharedParameters::Lock l(mParameters);
@@ -471,12 +495,13 @@
ALOGV("Camera %d: Disconnecting device", mCameraId);
+ bool hasDeviceError = mDevice->hasDeviceError();
mDevice->disconnect();
CameraService::Client::disconnect();
int32_t closeLatencyMs = ns2ms(systemTime() - startTime);
- CameraServiceProxyWrapper::logClose(mCameraIdStr, closeLatencyMs);
+ mCameraServiceProxyWrapper->logClose(mCameraIdStr, closeLatencyMs, hasDeviceError);
return res;
}
@@ -2332,6 +2357,13 @@
static_cast<camera_metadata_enum_android_scaler_rotate_and_crop_t>(rotateAndCrop));
}
+status_t Camera2Client::setAutoframingOverride(uint8_t autoframingValue) {
+ if (autoframingValue > ANDROID_CONTROL_AUTOFRAMING_AUTO) return BAD_VALUE;
+
+ return mDevice->setAutoframingAutoBehavior(
+ static_cast<camera_metadata_enum_android_control_autoframing_t>(autoframingValue));
+}
+
bool Camera2Client::supportsCameraMute() {
return mDevice->supportsCameraMute();
}
@@ -2349,6 +2381,14 @@
mDevice->clearStreamUseCaseOverrides();
}
+bool Camera2Client::supportsZoomOverride() {
+ return mDevice->supportsZoomOverride();
+}
+
+status_t Camera2Client::setZoomOverride(int zoomOverride) {
+ return mDevice->setZoomOverride(zoomOverride);
+}
+
status_t Camera2Client::waitUntilCurrentRequestIdLocked() {
int32_t activeRequestId = mStreamingProcessor->getActiveRequestId();
if (activeRequestId != 0) {
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index 5b4d547..fe12690 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -86,6 +86,7 @@
virtual status_t setAudioRestriction(int mode);
virtual int32_t getGlobalAudioRestriction();
virtual status_t setRotateAndCropOverride(uint8_t rotateAndCrop);
+ virtual status_t setAutoframingOverride(uint8_t autoframingMode);
virtual bool supportsCameraMute();
virtual status_t setCameraMute(bool enabled);
@@ -96,12 +97,16 @@
const std::vector<int64_t>& useCaseOverrides);
virtual void clearStreamUseCaseOverrides();
+ virtual bool supportsZoomOverride();
+ virtual status_t setZoomOverride(int32_t zoomOverride);
+
/**
* Interface used by CameraService
*/
Camera2Client(const sp<CameraService>& cameraService,
const sp<hardware::ICameraClient>& cameraClient,
+ std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
const std::string& clientPackageName,
const std::optional<std::string>& clientFeatureId,
const std::string& cameraDeviceId,
diff --git a/services/camera/libcameraservice/api1/client2/JpegCompressor.cpp b/services/camera/libcameraservice/api1/client2/JpegCompressor.cpp
deleted file mode 100644
index 01951a0..0000000
--- a/services/camera/libcameraservice/api1/client2/JpegCompressor.cpp
+++ /dev/null
@@ -1,221 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "Camera2-JpegCompressor"
-
-#include <utils/Log.h>
-#include <ui/GraphicBufferMapper.h>
-
-#include "JpegCompressor.h"
-
-namespace android {
-namespace camera2 {
-
-JpegCompressor::JpegCompressor():
- Thread(false),
- mIsBusy(false),
- mCaptureTime(0) {
-}
-
-JpegCompressor::~JpegCompressor() {
- ALOGV("%s", __FUNCTION__);
- Mutex::Autolock lock(mMutex);
-}
-
-status_t JpegCompressor::start(const Vector<CpuConsumer::LockedBuffer*>& buffers,
- nsecs_t captureTime) {
- ALOGV("%s", __FUNCTION__);
- Mutex::Autolock busyLock(mBusyMutex);
-
- if (mIsBusy) {
- ALOGE("%s: Already processing a buffer!", __FUNCTION__);
- return INVALID_OPERATION;
- }
-
- mIsBusy = true;
-
- mBuffers = buffers;
- mCaptureTime = captureTime;
-
- status_t res;
- res = run("JpegCompressor");
- if (res != OK) {
- ALOGE("%s: Unable to start up compression thread: %s (%d)",
- __FUNCTION__, strerror(-res), res);
- //delete mBuffers; // necessary?
- }
- return res;
-}
-
-status_t JpegCompressor::cancel() {
- ALOGV("%s", __FUNCTION__);
- requestExitAndWait();
- return OK;
-}
-
-status_t JpegCompressor::readyToRun() {
- ALOGV("%s", __FUNCTION__);
- return OK;
-}
-
-bool JpegCompressor::threadLoop() {
- ALOGV("%s", __FUNCTION__);
-
- mAuxBuffer = mBuffers[0]; // input
- mJpegBuffer = mBuffers[1]; // output
-
- // Set up error management
- mJpegErrorInfo = NULL;
- JpegError error;
- error.parent = this;
-
- mCInfo.err = jpeg_std_error(&error);
- mCInfo.err->error_exit = jpegErrorHandler;
-
- jpeg_create_compress(&mCInfo);
- if (checkError("Error initializing compression")) return false;
-
- // Route compressed data straight to output stream buffer
- JpegDestination jpegDestMgr;
- jpegDestMgr.parent = this;
- jpegDestMgr.init_destination = jpegInitDestination;
- jpegDestMgr.empty_output_buffer = jpegEmptyOutputBuffer;
- jpegDestMgr.term_destination = jpegTermDestination;
-
- mCInfo.dest = &jpegDestMgr;
-
- // Set up compression parameters
- mCInfo.image_width = mAuxBuffer->width;
- mCInfo.image_height = mAuxBuffer->height;
- mCInfo.input_components = 1; // 3;
- mCInfo.in_color_space = JCS_GRAYSCALE; // JCS_RGB
-
- ALOGV("%s: image_width = %d, image_height = %d", __FUNCTION__, mCInfo.image_width, mCInfo.image_height);
-
- jpeg_set_defaults(&mCInfo);
- if (checkError("Error configuring defaults")) return false;
-
- // Do compression
- jpeg_start_compress(&mCInfo, TRUE);
- if (checkError("Error starting compression")) return false;
-
- size_t rowStride = mAuxBuffer->stride;// * 3;
- const size_t kChunkSize = 32;
- while (mCInfo.next_scanline < mCInfo.image_height) {
- JSAMPROW chunk[kChunkSize];
- for (size_t i = 0 ; i < kChunkSize; i++) {
- chunk[i] = (JSAMPROW)
- (mAuxBuffer->data + (i + mCInfo.next_scanline) * rowStride);
- }
- jpeg_write_scanlines(&mCInfo, chunk, kChunkSize);
- if (checkError("Error while compressing")) return false;
- if (exitPending()) {
- ALOGV("%s: Cancel called, exiting early", __FUNCTION__);
- cleanUp();
- return false;
- }
- }
-
- jpeg_finish_compress(&mCInfo);
- if (checkError("Error while finishing compression")) return false;
-
- cleanUp();
- return false;
-}
-
-bool JpegCompressor::isBusy() {
- ALOGV("%s", __FUNCTION__);
- Mutex::Autolock busyLock(mBusyMutex);
- return mIsBusy;
-}
-
-// old function -- TODO: update for new buffer type
-bool JpegCompressor::isStreamInUse(uint32_t /*id*/) {
- ALOGV("%s", __FUNCTION__);
- Mutex::Autolock lock(mBusyMutex);
-
- if (mBuffers.size() && mIsBusy) {
- for (size_t i = 0; i < mBuffers.size(); i++) {
-// if ( mBuffers[i].streamId == (int)id ) return true;
- }
- }
- return false;
-}
-
-bool JpegCompressor::waitForDone(nsecs_t timeout) {
- ALOGV("%s", __FUNCTION__);
- Mutex::Autolock lock(mBusyMutex);
- status_t res = OK;
- if (mIsBusy) {
- res = mDone.waitRelative(mBusyMutex, timeout);
- }
- return (res == OK);
-}
-
-bool JpegCompressor::checkError(const char *msg) {
- ALOGV("%s", __FUNCTION__);
- if (mJpegErrorInfo) {
- char errBuffer[JMSG_LENGTH_MAX];
- mJpegErrorInfo->err->format_message(mJpegErrorInfo, errBuffer);
- ALOGE("%s: %s: %s",
- __FUNCTION__, msg, errBuffer);
- cleanUp();
- mJpegErrorInfo = NULL;
- return true;
- }
- return false;
-}
-
-void JpegCompressor::cleanUp() {
- ALOGV("%s", __FUNCTION__);
- jpeg_destroy_compress(&mCInfo);
- Mutex::Autolock lock(mBusyMutex);
- mIsBusy = false;
- mDone.signal();
-}
-
-void JpegCompressor::jpegErrorHandler(j_common_ptr cinfo) {
- ALOGV("%s", __FUNCTION__);
- JpegError *error = static_cast<JpegError*>(cinfo->err);
- error->parent->mJpegErrorInfo = cinfo;
-}
-
-void JpegCompressor::jpegInitDestination(j_compress_ptr cinfo) {
- ALOGV("%s", __FUNCTION__);
- JpegDestination *dest= static_cast<JpegDestination*>(cinfo->dest);
- ALOGV("%s: Setting destination to %p, size %zu",
- __FUNCTION__, dest->parent->mJpegBuffer->data, kMaxJpegSize);
- dest->next_output_byte = (JOCTET*)(dest->parent->mJpegBuffer->data);
- dest->free_in_buffer = kMaxJpegSize;
-}
-
-boolean JpegCompressor::jpegEmptyOutputBuffer(j_compress_ptr /*cinfo*/) {
- ALOGV("%s", __FUNCTION__);
- ALOGE("%s: JPEG destination buffer overflow!",
- __FUNCTION__);
- return true;
-}
-
-void JpegCompressor::jpegTermDestination(j_compress_ptr cinfo) {
- (void) cinfo; // TODO: clean up
- ALOGV("%s", __FUNCTION__);
- ALOGV("%s: Done writing JPEG data. %zu bytes left in buffer",
- __FUNCTION__, cinfo->dest->free_in_buffer);
-}
-
-}; // namespace camera2
-}; // namespace android
diff --git a/services/camera/libcameraservice/api1/client2/JpegCompressor.h b/services/camera/libcameraservice/api1/client2/JpegCompressor.h
deleted file mode 100644
index 589a2fd..0000000
--- a/services/camera/libcameraservice/api1/client2/JpegCompressor.h
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-/**
- * This class simulates a hardware JPEG compressor. It receives image buffers
- * in RGBA_8888 format, processes them in a worker thread, and then pushes them
- * out to their destination stream.
- */
-
-#ifndef ANDROID_SERVERS_CAMERA_JPEGCOMPRESSOR_H
-#define ANDROID_SERVERS_CAMERA_JPEGCOMPRESSOR_H
-
-#include "utils/Thread.h"
-#include "utils/Mutex.h"
-#include "utils/Timers.h"
-#include "utils/Vector.h"
-//#include "Base.h"
-#include <stdio.h>
-#include <gui/CpuConsumer.h>
-
-extern "C" {
-#include <jpeglib.h>
-}
-
-
-namespace android {
-namespace camera2 {
-
-class JpegCompressor: private Thread, public virtual RefBase {
- public:
-
- JpegCompressor();
- ~JpegCompressor();
-
- // Start compressing COMPRESSED format buffers; JpegCompressor takes
- // ownership of the Buffers vector.
- status_t start(const Vector<CpuConsumer::LockedBuffer*>& buffers,
- nsecs_t captureTime);
-
- status_t cancel();
-
- bool isBusy();
- bool isStreamInUse(uint32_t id);
-
- bool waitForDone(nsecs_t timeout);
-
- // TODO: Measure this
- static const size_t kMaxJpegSize = 300000;
-
- private:
- Mutex mBusyMutex;
- Mutex mMutex;
- bool mIsBusy;
- Condition mDone;
- nsecs_t mCaptureTime;
-
- Vector<CpuConsumer::LockedBuffer*> mBuffers;
- CpuConsumer::LockedBuffer *mJpegBuffer;
- CpuConsumer::LockedBuffer *mAuxBuffer;
-
- jpeg_compress_struct mCInfo;
-
- struct JpegError : public jpeg_error_mgr {
- JpegCompressor *parent;
- };
- j_common_ptr mJpegErrorInfo;
-
- struct JpegDestination : public jpeg_destination_mgr {
- JpegCompressor *parent;
- };
-
- static void jpegErrorHandler(j_common_ptr cinfo);
-
- static void jpegInitDestination(j_compress_ptr cinfo);
- static boolean jpegEmptyOutputBuffer(j_compress_ptr cinfo);
- static void jpegTermDestination(j_compress_ptr cinfo);
-
- bool checkError(const char *msg);
- void cleanUp();
-
- /**
- * Inherited Thread virtual overrides
- */
- private:
- virtual status_t readyToRun();
- virtual bool threadLoop();
-};
-
-}; // namespace camera2
-}; // namespace android
-
-#endif
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index d5ea689..aa3d1bb 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -43,6 +43,7 @@
int cameraFacing) :
cameraId(cameraId),
cameraFacing(cameraFacing),
+ isSlowJpegModeForced(false),
info(NULL),
mDefaultSceneMode(ANDROID_CONTROL_SCENE_MODE_DISABLED) {
}
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index d54ba46..c60f327 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -32,12 +32,12 @@
#include "device3/Camera3Device.h"
#include "device3/Camera3OutputStream.h"
#include "api2/CameraDeviceClient.h"
-#include "utils/CameraServiceProxyWrapper.h"
#include <camera_metadata_hidden.h>
#include "DepthCompositeStream.h"
#include "HeicCompositeStream.h"
+#include "JpegRCompositeStream.h"
// Convenience methods for constructing binder::Status objects for error returns
@@ -88,6 +88,7 @@
CameraDeviceClient::CameraDeviceClient(const sp<CameraService>& cameraService,
const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
+ std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
const std::string& clientPackageName,
bool systemNativeClient,
const std::optional<std::string>& clientFeatureId,
@@ -99,9 +100,10 @@
int servicePid,
bool overrideForPerfClass,
bool overrideToPortrait) :
- Camera2ClientBase(cameraService, remoteCallback, clientPackageName, systemNativeClient,
- clientFeatureId, cameraId, /*API1 camera ID*/ -1, cameraFacing, sensorOrientation,
- clientPid, clientUid, servicePid, overrideForPerfClass, overrideToPortrait),
+ Camera2ClientBase(cameraService, remoteCallback, cameraServiceProxyWrapper, clientPackageName,
+ systemNativeClient, clientFeatureId, cameraId, /*API1 camera ID*/ -1, cameraFacing,
+ sensorOrientation, clientPid, clientUid, servicePid, overrideForPerfClass,
+ overrideToPortrait),
mInputStream(),
mStreamingRequestId(REQUEST_ID_NONE),
mRequestIdCounter(0),
@@ -129,7 +131,12 @@
mFrameProcessor = new FrameProcessorBase(mDevice);
std::string threadName = std::string("CDU-") + mCameraIdStr + "-FrameProc";
- mFrameProcessor->run(threadName.c_str());
+ res = mFrameProcessor->run(threadName.c_str());
+ if (res != OK) {
+ ALOGE("%s: Unable to start frame processor thread: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
mFrameProcessor->registerListener(camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MIN_ID,
camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MAX_ID,
@@ -183,11 +190,11 @@
// Cache physical camera ids corresponding to this device and also the high
// resolution sensors in this device + physical camera ids
mProviderManager->isLogicalCamera(mCameraIdStr, &mPhysicalCameraIds);
- if (isUltraHighResolutionSensor(mCameraIdStr)) {
+ if (supportsUltraHighResolutionCapture(mCameraIdStr)) {
mHighResolutionSensors.insert(mCameraIdStr);
}
for (auto &physicalId : mPhysicalCameraIds) {
- if (isUltraHighResolutionSensor(physicalId)) {
+ if (supportsUltraHighResolutionCapture(physicalId)) {
mHighResolutionSensors.insert(physicalId);
}
}
@@ -694,7 +701,7 @@
nsecs_t configureEnd = systemTime();
int32_t configureDurationMs = ns2ms(configureEnd) - startTimeMs;
- CameraServiceProxyWrapper::logStreamConfigured(mCameraIdStr, operatingMode,
+ mCameraServiceProxyWrapper->logStreamConfigured(mCameraIdStr, operatingMode,
false /*internalReconfig*/, configureDurationMs);
}
@@ -882,6 +889,8 @@
int64_t streamUseCase = outputConfiguration.getStreamUseCase();
int timestampBase = outputConfiguration.getTimestampBase();
int mirrorMode = outputConfiguration.getMirrorMode();
+ int32_t colorSpace = outputConfiguration.getColorSpace();
+ bool useReadoutTimestamp = outputConfiguration.useReadoutTimestamp();
res = SessionConfigurationUtils::checkSurfaceType(numBufferProducers, deferredConsumer,
outputConfiguration.getSurfaceType());
@@ -927,7 +936,7 @@
res = SessionConfigurationUtils::createSurfaceFromGbp(streamInfo,
isStreamInfoValid, surface, bufferProducer, mCameraIdStr,
mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode);
+ streamUseCase, timestampBase, mirrorMode, colorSpace);
if (!res.isOk())
return res;
@@ -949,19 +958,26 @@
bool isDepthCompositeStream =
camera3::DepthCompositeStream::isDepthCompositeStream(surfaces[0]);
bool isHeicCompositeStream = camera3::HeicCompositeStream::isHeicCompositeStream(surfaces[0]);
- if (isDepthCompositeStream || isHeicCompositeStream) {
+ bool isJpegRCompositeStream =
+ camera3::JpegRCompositeStream::isJpegRCompositeStream(surfaces[0]) &&
+ !mDevice->isCompositeJpegRDisabled();
+ if (isDepthCompositeStream || isHeicCompositeStream || isJpegRCompositeStream) {
sp<CompositeStream> compositeStream;
if (isDepthCompositeStream) {
compositeStream = new camera3::DepthCompositeStream(mDevice, getRemoteCallback());
- } else {
+ } else if (isHeicCompositeStream) {
compositeStream = new camera3::HeicCompositeStream(mDevice, getRemoteCallback());
+ } else {
+ compositeStream = new camera3::JpegRCompositeStream(mDevice, getRemoteCallback());
}
err = compositeStream->createStream(surfaces, deferredConsumer, streamInfo.width,
streamInfo.height, streamInfo.format,
static_cast<camera_stream_rotation_t>(outputConfiguration.getRotation()),
&streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
- outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution);
+ outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution,
+ streamInfo.colorSpace, streamInfo.dynamicRangeProfile, streamInfo.streamUseCase,
+ useReadoutTimestamp);
if (err == OK) {
Mutex::Autolock l(mCompositeLock);
mCompositeStreamMap.add(IInterface::asBinder(surfaces[0]->getIGraphicBufferProducer()),
@@ -974,7 +990,8 @@
&streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution,
/*consumerUsage*/0, streamInfo.dynamicRangeProfile, streamInfo.streamUseCase,
- streamInfo.timestampBase, streamInfo.mirrorMode);
+ streamInfo.timestampBase, streamInfo.mirrorMode, streamInfo.colorSpace,
+ useReadoutTimestamp);
}
if (err != OK) {
@@ -1025,6 +1042,7 @@
int width, height, format, surfaceType;
uint64_t consumerUsage;
android_dataspace dataSpace;
+ int32_t colorSpace;
status_t err;
binder::Status res;
@@ -1038,6 +1056,7 @@
surfaceType = outputConfiguration.getSurfaceType();
format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
+ colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
// Hardcode consumer usage flags: SurfaceView--0x900, SurfaceTexture--0x100.
consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE;
if (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) {
@@ -1055,7 +1074,7 @@
outputConfiguration.getSensorPixelModesUsed();
if (SessionConfigurationUtils::checkAndOverrideSensorPixelModesUsed(
sensorPixelModesUsed, format, width, height, getStaticInfo(cameraIdUsed),
- /*allowRounding*/ false, &overriddenSensorPixelModesUsed) != OK) {
+ &overriddenSensorPixelModesUsed) != OK) {
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
"sensor pixel modes used not valid for deferred stream");
}
@@ -1070,7 +1089,8 @@
outputConfiguration.isMultiResolution(), consumerUsage,
outputConfiguration.getDynamicRangeProfile(),
outputConfiguration.getStreamUseCase(),
- outputConfiguration.getMirrorMode());
+ outputConfiguration.getMirrorMode(),
+ outputConfiguration.useReadoutTimestamp());
if (err != OK) {
res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
@@ -1087,7 +1107,8 @@
outputConfiguration.getDynamicRangeProfile(),
outputConfiguration.getStreamUseCase(),
outputConfiguration.getTimestampBase(),
- outputConfiguration.getMirrorMode()));
+ outputConfiguration.getMirrorMode(),
+ colorSpace));
ALOGV("%s: Camera %s: Successfully created a new stream ID %d for a deferred surface"
" (%d x %d) stream with format 0x%x.",
@@ -1277,6 +1298,7 @@
int64_t streamUseCase = outputConfiguration.getStreamUseCase();
int timestampBase = outputConfiguration.getTimestampBase();
int64_t dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
+ int32_t colorSpace = outputConfiguration.getColorSpace();
int mirrorMode = outputConfiguration.getMirrorMode();
for (size_t i = 0; i < newOutputsMap.size(); i++) {
@@ -1285,7 +1307,7 @@
res = SessionConfigurationUtils::createSurfaceFromGbp(outInfo,
/*isStreamInfoValid*/ false, surface, newOutputsMap.valueAt(i), mCameraIdStr,
mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode);
+ streamUseCase, timestampBase, mirrorMode, colorSpace);
if (!res.isOk())
return res;
@@ -1460,7 +1482,7 @@
binder::Status CameraDeviceClient::prepare(int streamId) {
ATRACE_CALL();
- ALOGV("%s", __FUNCTION__);
+ ALOGV("%s stream id %d", __FUNCTION__, streamId);
binder::Status res;
if (!(res = checkPidStatus(__FUNCTION__)).isOk()) return res;
@@ -1500,7 +1522,7 @@
binder::Status CameraDeviceClient::prepare2(int maxCount, int streamId) {
ATRACE_CALL();
- ALOGV("%s", __FUNCTION__);
+ ALOGV("%s stream id %d", __FUNCTION__, streamId);
binder::Status res;
if (!(res = checkPidStatus(__FUNCTION__)).isOk()) return res;
@@ -1644,7 +1666,8 @@
const std::vector<int32_t> &sensorPixelModesUsed =
outputConfiguration.getSensorPixelModesUsed();
int64_t dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
- int64_t streamUseCase= outputConfiguration.getStreamUseCase();
+ int32_t colorSpace = outputConfiguration.getColorSpace();
+ int64_t streamUseCase = outputConfiguration.getStreamUseCase();
int timestampBase = outputConfiguration.getTimestampBase();
int mirrorMode = outputConfiguration.getMirrorMode();
for (auto& bufferProducer : bufferProducers) {
@@ -1660,7 +1683,7 @@
res = SessionConfigurationUtils::createSurfaceFromGbp(mStreamInfoMap[streamId],
true /*isStreamInfoValid*/, surface, bufferProducer, mCameraIdStr,
mDevice->infoPhysical(physicalId), sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode);
+ streamUseCase, timestampBase, mirrorMode, colorSpace);
if (!res.isOk())
return res;
@@ -1744,6 +1767,13 @@
static_cast<camera_metadata_enum_android_scaler_rotate_and_crop_t>(rotateAndCrop));
}
+status_t CameraDeviceClient::setAutoframingOverride(uint8_t autoframingValue) {
+ if (autoframingValue > ANDROID_CONTROL_AUTOFRAMING_AUTO) return BAD_VALUE;
+
+ return mDevice->setAutoframingAutoBehavior(
+ static_cast<camera_metadata_enum_android_control_autoframing_t>(autoframingValue));
+}
+
bool CameraDeviceClient::supportsCameraMute() {
return mDevice->supportsCameraMute();
}
@@ -1761,6 +1791,14 @@
mDevice->clearStreamUseCaseOverrides();
}
+bool CameraDeviceClient::supportsZoomOverride() {
+ return mDevice->supportsZoomOverride();
+}
+
+status_t CameraDeviceClient::setZoomOverride(int32_t zoomOverride) {
+ return mDevice->setZoomOverride(zoomOverride);
+}
+
binder::Status CameraDeviceClient::switchToOffline(
const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
const std::vector<int>& offlineOutputIds,
@@ -1813,7 +1851,9 @@
for (const auto& gbp : mConfiguredOutputs.valueAt(index).getGraphicBufferProducers()) {
sp<Surface> s = new Surface(gbp, false /*controlledByApp*/);
isCompositeStream = camera3::DepthCompositeStream::isDepthCompositeStream(s) ||
- camera3::HeicCompositeStream::isHeicCompositeStream(s);
+ camera3::HeicCompositeStream::isHeicCompositeStream(s) ||
+ (camera3::JpegRCompositeStream::isJpegRCompositeStream(s) &&
+ !mDevice->isCompositeJpegRDisabled());
if (isCompositeStream) {
auto compositeIdx = mCompositeStreamMap.indexOfKey(IInterface::asBinder(gbp));
if (compositeIdx == NAME_NOT_FOUND) {
@@ -1996,8 +2036,20 @@
if (remoteCb != 0) {
remoteCb->onDeviceIdle();
}
+
+ std::vector<hardware::CameraStreamStats> fullStreamStats = streamStats;
+ {
+ Mutex::Autolock l(mCompositeLock);
+ for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
+ hardware::CameraStreamStats compositeStats;
+ mCompositeStreamMap.valueAt(i)->getStreamStats(&compositeStats);
+ if (compositeStats.mWidth > 0) {
+ fullStreamStats.push_back(compositeStats);
+ }
+ }
+ }
Camera2ClientBase::notifyIdleWithUserTag(requestCount, resultErrorCount, deviceError,
- streamStats, mUserTag, mVideoStabilizationMode);
+ fullStreamStats, mUserTag, mVideoStabilizationMode);
}
void CameraDeviceClient::notifyShutter(const CaptureResultExtras& resultExtras,
@@ -2020,6 +2072,7 @@
// Thread safe. Don't bother locking.
sp<hardware::camera2::ICameraDeviceCallbacks> remoteCb = getRemoteCallback();
if (remoteCb != 0) {
+ ALOGV("%s: stream id %d", __FUNCTION__, streamId);
remoteCb->onPrepared(streamId);
}
}
@@ -2074,10 +2127,11 @@
mCompositeStreamMap.clear();
}
+ bool hasDeviceError = mDevice->hasDeviceError();
Camera2ClientBase::detachDevice();
int32_t closeLatencyMs = ns2ms(systemTime() - startTime);
- CameraServiceProxyWrapper::logClose(mCameraIdStr, closeLatencyMs);
+ mCameraServiceProxyWrapper->logClose(mCameraIdStr, closeLatencyMs, hasDeviceError);
}
/** Device-related methods */
@@ -2215,9 +2269,9 @@
return mDevice->infoPhysical(cameraId);
}
-bool CameraDeviceClient::isUltraHighResolutionSensor(const std::string &cameraId) {
+bool CameraDeviceClient::supportsUltraHighResolutionCapture(const std::string &cameraId) {
const CameraMetadata &deviceInfo = getStaticInfo(cameraId);
- return SessionConfigurationUtils::isUltraHighResolutionSensor(deviceInfo);
+ return SessionConfigurationUtils::supportsUltraHighResolutionCapture(deviceInfo);
}
bool CameraDeviceClient::isSensorPixelModeConsistent(
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 4b330f3..45c904a 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -29,6 +29,7 @@
#include "common/FrameProcessorBase.h"
#include "common/Camera2ClientBase.h"
#include "CompositeStream.h"
+#include "utils/CameraServiceProxyWrapper.h"
#include "utils/SessionConfigurationUtils.h"
using android::camera3::OutputStreamInfo;
@@ -179,6 +180,7 @@
CameraDeviceClient(const sp<CameraService>& cameraService,
const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
+ std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
const std::string& clientPackageName,
bool clientPackageOverride,
const std::optional<std::string>& clientFeatureId,
@@ -197,9 +199,14 @@
virtual status_t setRotateAndCropOverride(uint8_t rotateAndCrop) override;
+ virtual status_t setAutoframingOverride(uint8_t autoframingValue) override;
+
virtual bool supportsCameraMute();
virtual status_t setCameraMute(bool enabled);
+ virtual bool supportsZoomOverride() override;
+ virtual status_t setZoomOverride(int32_t zoomOverride) override;
+
virtual status_t dump(int fd, const Vector<String16>& args);
virtual status_t dumpClient(int fd, const Vector<String16>& args);
@@ -238,7 +245,7 @@
// Calculate the ANativeWindow transform from android.sensor.orientation
status_t getRotationTransformLocked(int mirrorMode, /*out*/int32_t* transform);
- bool isUltraHighResolutionSensor(const std::string &cameraId);
+ bool supportsUltraHighResolutionCapture(const std::string &cameraId);
bool isSensorPixelModeConsistent(const std::list<int> &streamIdList,
const CameraMetadata &settings);
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
index 89c05b0..99bdb0e 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
@@ -49,7 +49,12 @@
mFrameProcessor = new camera2::FrameProcessorBase(mOfflineSession);
std::string threadName = fmt::sprintf("Offline-%s-FrameProc", mCameraIdStr.c_str());
- mFrameProcessor->run(threadName.c_str());
+ res = mFrameProcessor->run(threadName.c_str());
+ if (res != OK) {
+ ALOGE("%s: Unable to start frame processor thread: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
mFrameProcessor->registerListener(camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MIN_ID,
camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MAX_ID,
@@ -81,6 +86,10 @@
return OK;
}
+status_t CameraOfflineSessionClient::setAutoframingOverride(uint8_t) {
+ return OK;
+}
+
bool CameraOfflineSessionClient::supportsCameraMute() {
// Offline mode doesn't support muting
return false;
@@ -97,6 +106,14 @@
void CameraOfflineSessionClient::clearStreamUseCaseOverrides() {
}
+bool CameraOfflineSessionClient::supportsZoomOverride() {
+ return false;
+}
+
+status_t CameraOfflineSessionClient::setZoomOverride(int32_t /*zoomOverride*/) {
+ return INVALID_OPERATION;
+}
+
status_t CameraOfflineSessionClient::dump(int fd, const Vector<String16>& args) {
return BasicClient::dump(fd, args);
}
@@ -247,7 +264,7 @@
mOpsActive = true;
// Transition device state to OPEN
- sCameraService->mUidPolicy->registerMonitorUid(mClientUid);
+ sCameraService->mUidPolicy->registerMonitorUid(mClientUid, /*openCamera*/true);
return OK;
}
@@ -271,7 +288,7 @@
}
mOpsCallback.clear();
- sCameraService->mUidPolicy->unregisterMonitorUid(mClientUid);
+ sCameraService->mUidPolicy->unregisterMonitorUid(mClientUid, /*closeCamera*/true);
return OK;
}
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
index 4a5b1f2..70bad03 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
@@ -82,6 +82,8 @@
status_t setRotateAndCropOverride(uint8_t rotateAndCrop) override;
+ status_t setAutoframingOverride(uint8_t autoframingValue) override;
+
bool supportsCameraMute() override;
status_t setCameraMute(bool enabled) override;
@@ -92,6 +94,10 @@
void clearStreamUseCaseOverrides() override;
+ bool supportsZoomOverride() override;
+
+ status_t setZoomOverride(int32_t zoomOverride) override;
+
// permissions management
status_t startCameraOps() override;
status_t finishCameraOps() override;
diff --git a/services/camera/libcameraservice/api2/CompositeStream.cpp b/services/camera/libcameraservice/api2/CompositeStream.cpp
index 3221d74..8f53458 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/CompositeStream.cpp
@@ -49,7 +49,8 @@
camera_stream_rotation_t rotation, int * id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> * surfaceIds,
- int streamSetId, bool isShared, bool isMultiResolution) {
+ int streamSetId, bool isShared, bool isMultiResolution, int32_t colorSpace,
+ int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp) {
if (hasDeferredConsumer) {
ALOGE("%s: Deferred consumers not supported in case of composite streams!",
__FUNCTION__);
@@ -75,7 +76,8 @@
}
return createInternalStreams(consumers, hasDeferredConsumer, width, height, format, rotation,
- id, physicalCameraId, sensorPixelModesUsed, surfaceIds, streamSetId, isShared);
+ id, physicalCameraId, sensorPixelModesUsed, surfaceIds, streamSetId, isShared,
+ colorSpace, dynamicProfile, streamUseCase, useReadoutTimestamp);
}
status_t CompositeStream::deleteStream() {
@@ -85,6 +87,7 @@
mCaptureResults.clear();
mFrameNumberMap.clear();
mErrorFrameNumbers.clear();
+ mRequestTimeMap.clear();
}
return deleteInternalStreams();
@@ -95,6 +98,8 @@
Mutex::Autolock l(mMutex);
if (!mErrorState && (streamId == getStreamId())) {
mPendingCaptureResults.emplace(frameNumber, CameraMetadata());
+ auto ts = systemTime();
+ mRequestTimeMap.emplace(frameNumber, ts);
}
}
@@ -109,6 +114,11 @@
void CompositeStream::eraseResult(int64_t frameNumber) {
Mutex::Autolock l(mMutex);
+ auto requestTimeIt = mRequestTimeMap.find(frameNumber);
+ if (requestTimeIt != mRequestTimeMap.end()) {
+ mRequestTimeMap.erase(requestTimeIt);
+ }
+
auto it = mPendingCaptureResults.find(frameNumber);
if (it == mPendingCaptureResults.end()) {
return;
diff --git a/services/camera/libcameraservice/api2/CompositeStream.h b/services/camera/libcameraservice/api2/CompositeStream.h
index ec16dde..1b7fc6e 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.h
+++ b/services/camera/libcameraservice/api2/CompositeStream.h
@@ -46,7 +46,8 @@
camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds,
- int streamSetId, bool isShared, bool isMultiResolution);
+ int streamSetId, bool isShared, bool isMultiResolution, int32_t colorSpace,
+ int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp);
status_t deleteStream();
@@ -59,7 +60,8 @@
camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds,
- int streamSetId, bool isShared) = 0;
+ int streamSetId, bool isShared, int32_t colorSpace,
+ int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp) = 0;
// Release all internal streams and corresponding resources.
virtual status_t deleteInternalStreams() = 0;
@@ -81,6 +83,9 @@
// Notify when shutter notify is triggered
virtual void onShutter(const CaptureResultExtras& /*resultExtras*/, nsecs_t /*timestamp*/) {}
+ // Get composite stream stats
+ virtual void getStreamStats(hardware::CameraStreamStats* streamStats /*out*/) = 0;
+
void onResultAvailable(const CaptureResult& result);
bool onError(int32_t errorCode, const CaptureResultExtras& resultExtras);
@@ -138,6 +143,9 @@
// Keeps a set buffer/result frame numbers for any errors detected during processing.
std::set<int64_t> mErrorFrameNumbers;
+ // Frame number to request time map
+ std::unordered_map<int64_t, nsecs_t> mRequestTimeMap;
+
};
}; //namespace camera3
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
index 01fe78b..1bd0b85 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
@@ -99,7 +99,7 @@
}
getSupportedDepthSizes(staticInfo, /*maxResolution*/false, &mSupportedDepthSizes);
- if (SessionConfigurationUtils::isUltraHighResolutionSensor(staticInfo)) {
+ if (SessionConfigurationUtils::supportsUltraHighResolutionCapture(staticInfo)) {
getSupportedDepthSizes(staticInfo, true, &mSupportedDepthSizesMaximumResolution);
}
}
@@ -582,7 +582,8 @@
camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds,
- int /*streamSetId*/, bool /*isShared*/) {
+ int /*streamSetId*/, bool /*isShared*/, int32_t /*colorSpace*/,
+ int64_t /*dynamicProfile*/, int64_t /*streamUseCase*/, bool useReadoutTimestamp) {
if (mSupportedDepthSizes.empty()) {
ALOGE("%s: This camera device doesn't support any depth map streams!", __FUNCTION__);
return INVALID_OPERATION;
@@ -613,7 +614,14 @@
mBlobSurface = new Surface(producer);
ret = device->createStream(mBlobSurface, width, height, format, kJpegDataSpace, rotation,
- id, physicalCameraId, sensorPixelModesUsed, surfaceIds);
+ id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
+ camera3::CAMERA3_STREAM_SET_ID_INVALID, /*isShared*/false, /*isMultiResolution*/false,
+ /*consumerUsage*/0, ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+ ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+ OutputConfiguration::MIRROR_MODE_AUTO,
+ ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ useReadoutTimestamp);
if (ret == OK) {
mBlobStreamId = *id;
mBlobSurfaceId = (*surfaceIds)[0];
@@ -630,7 +638,14 @@
std::vector<int> depthSurfaceId;
ret = device->createStream(mDepthSurface, depthWidth, depthHeight, kDepthMapPixelFormat,
kDepthMapDataSpace, rotation, &mDepthStreamId, physicalCameraId, sensorPixelModesUsed,
- &depthSurfaceId);
+ &depthSurfaceId, camera3::CAMERA3_STREAM_SET_ID_INVALID, /*isShared*/false,
+ /*isMultiResolution*/false, /*consumerUsage*/0,
+ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+ ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+ OutputConfiguration::MIRROR_MODE_AUTO,
+ ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ useReadoutTimestamp);
if (ret == OK) {
mDepthSurfaceId = depthSurfaceId[0];
} else {
@@ -887,7 +902,7 @@
return BAD_VALUE;
}
- if (SessionConfigurationUtils::isUltraHighResolutionSensor(ch)) {
+ if (SessionConfigurationUtils::supportsUltraHighResolutionCapture(ch)) {
getSupportedDepthSizes(ch, /*maxResolution*/true, &depthSizesMaximumResolution);
if (depthSizesMaximumResolution.empty()) {
ALOGE("%s: No depth stream configurations for maximum resolution present",
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.h b/services/camera/libcameraservice/api2/DepthCompositeStream.h
index a8c40ae..f797f9c 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.h
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.h
@@ -53,7 +53,8 @@
camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds,
- int streamSetId, bool isShared) override;
+ int streamSetId, bool isShared, int32_t colorSpace,
+ int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp) override;
status_t deleteInternalStreams() override;
status_t configureStream() override;
status_t insertGbp(SurfaceMap* /*out*/outSurfaceMap, Vector<int32_t>* /*out*/outputStreamIds,
@@ -68,6 +69,9 @@
static status_t getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/);
+ // Get composite stream stats
+ void getStreamStats(hardware::CameraStreamStats*) override {};
+
protected:
bool threadLoop() override;
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index 97c1ae1..68e9ad4 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -121,8 +121,8 @@
camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds,
- int /*streamSetId*/, bool /*isShared*/) {
-
+ int /*streamSetId*/, bool /*isShared*/, int32_t colorSpace,
+ int64_t /*dynamicProfile*/, int64_t /*streamUseCase*/, bool useReadoutTimestamp) {
sp<CameraDeviceBase> device = mDevice.promote();
if (!device.get()) {
ALOGE("%s: Invalid camera device!", __FUNCTION__);
@@ -148,7 +148,14 @@
res = device->createStream(mAppSegmentSurface, mAppSegmentMaxSize, 1, format,
kAppSegmentDataSpace, rotation, &mAppSegmentStreamId, physicalCameraId,
- sensorPixelModesUsed,surfaceIds);
+ sensorPixelModesUsed, surfaceIds, camera3::CAMERA3_STREAM_SET_ID_INVALID,
+ /*isShared*/false, /*isMultiResolution*/false,
+ /*consumerUsage*/0, ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+ ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+ OutputConfiguration::MIRROR_MODE_AUTO,
+ colorSpace,
+ useReadoutTimestamp);
if (res == OK) {
mAppSegmentSurfaceId = (*surfaceIds)[0];
} else {
@@ -184,7 +191,14 @@
int srcStreamFmt = mUseGrid ? HAL_PIXEL_FORMAT_YCbCr_420_888 :
HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
res = device->createStream(mMainImageSurface, width, height, srcStreamFmt, kHeifDataSpace,
- rotation, id, physicalCameraId, sensorPixelModesUsed, &sourceSurfaceId);
+ rotation, id, physicalCameraId, sensorPixelModesUsed, &sourceSurfaceId,
+ camera3::CAMERA3_STREAM_SET_ID_INVALID, /*isShared*/false, /*isMultiResolution*/false,
+ /*consumerUsage*/0, ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+ ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+ OutputConfiguration::MIRROR_MODE_AUTO,
+ colorSpace,
+ useReadoutTimestamp);
if (res == OK) {
mMainImageSurfaceId = sourceSurfaceId[0];
mMainImageStreamId = *id;
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.h b/services/camera/libcameraservice/api2/HeicCompositeStream.h
index 78c5f02..b539cdd 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.h
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.h
@@ -47,8 +47,8 @@
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
- std::vector<int> *surfaceIds,
- int streamSetId, bool isShared) override;
+ std::vector<int> *surfaceIds, int streamSetId, bool isShared, int32_t colorSpace,
+ int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp) override;
status_t deleteInternalStreams() override;
@@ -75,6 +75,9 @@
static status_t getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/);
+ // Get composite stream stats
+ void getStreamStats(hardware::CameraStreamStats*) override {};
+
static bool isSizeSupportedByHeifEncoder(int32_t width, int32_t height,
bool* useHeic, bool* useGrid, int64_t* stall, AString* hevcName = nullptr);
static bool isInMemoryTempFileSupported();
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
new file mode 100644
index 0000000..988446b
--- /dev/null
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
@@ -0,0 +1,879 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "hardware/gralloc.h"
+#include "system/graphics-base-v1.0.h"
+#include "system/graphics-base-v1.1.h"
+#define LOG_TAG "Camera3-JpegRCompositeStream"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+
+#include <aidl/android/hardware/camera/device/CameraBlob.h>
+#include <aidl/android/hardware/camera/device/CameraBlobId.h>
+
+#include "common/CameraProviderManager.h"
+#include <gui/Surface.h>
+#include <ultrahdr/jpegr.h>
+#include <utils/ExifUtils.h>
+#include <utils/Log.h>
+#include "utils/SessionConfigurationUtils.h"
+#include <utils/Trace.h>
+
+#include "JpegRCompositeStream.h"
+
+namespace android {
+namespace camera3 {
+
+using aidl::android::hardware::camera::device::CameraBlob;
+using aidl::android::hardware::camera::device::CameraBlobId;
+
+JpegRCompositeStream::JpegRCompositeStream(sp<CameraDeviceBase> device,
+ wp<hardware::camera2::ICameraDeviceCallbacks> cb) :
+ CompositeStream(device, cb),
+ mBlobStreamId(-1),
+ mBlobSurfaceId(-1),
+ mP010StreamId(-1),
+ mP010SurfaceId(-1),
+ mBlobWidth(0),
+ mBlobHeight(0),
+ mP010BufferAcquired(false),
+ mBlobBufferAcquired(false),
+ mOutputColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
+ mOutputStreamUseCase(0),
+ mFirstRequestLatency(-1),
+ mProducerListener(new ProducerListener()),
+ mMaxJpegBufferSize(-1),
+ mUHRMaxJpegBufferSize(-1),
+ mStaticInfo(device->info()) {
+ auto entry = mStaticInfo.find(ANDROID_JPEG_MAX_SIZE);
+ if (entry.count > 0) {
+ mMaxJpegBufferSize = entry.data.i32[0];
+ } else {
+ ALOGW("%s: Maximum jpeg size absent from camera characteristics", __FUNCTION__);
+ }
+
+ mUHRMaxJpegSize =
+ SessionConfigurationUtils::getMaxJpegResolution(mStaticInfo,
+ /*ultraHighResolution*/true);
+ mDefaultMaxJpegSize =
+ SessionConfigurationUtils::getMaxJpegResolution(mStaticInfo,
+ /*isUltraHighResolution*/false);
+
+ mUHRMaxJpegBufferSize =
+ SessionConfigurationUtils::getUHRMaxJpegBufferSize(mUHRMaxJpegSize, mDefaultMaxJpegSize,
+ mMaxJpegBufferSize);
+}
+
+JpegRCompositeStream::~JpegRCompositeStream() {
+ mBlobConsumer.clear(),
+ mBlobSurface.clear(),
+ mBlobStreamId = -1;
+ mBlobSurfaceId = -1;
+ mP010Consumer.clear();
+ mP010Surface.clear();
+ mP010Consumer = nullptr;
+ mP010Surface = nullptr;
+}
+
+void JpegRCompositeStream::compilePendingInputLocked() {
+ CpuConsumer::LockedBuffer imgBuffer;
+
+ while (mSupportInternalJpeg && !mInputJpegBuffers.empty() && !mBlobBufferAcquired) {
+ auto it = mInputJpegBuffers.begin();
+ auto res = mBlobConsumer->lockNextBuffer(&imgBuffer);
+ if (res == NOT_ENOUGH_DATA) {
+ // Can not lock any more buffers.
+ break;
+ } else if (res != OK) {
+ ALOGE("%s: Error locking blob image buffer: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ mPendingInputFrames[*it].error = true;
+ mInputJpegBuffers.erase(it);
+ continue;
+ }
+
+ if (*it != imgBuffer.timestamp) {
+ ALOGW("%s: Expecting jpeg buffer with time stamp: %" PRId64 " received buffer with "
+ "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
+ }
+
+ if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
+ (mPendingInputFrames[imgBuffer.timestamp].error)) {
+ mBlobConsumer->unlockBuffer(imgBuffer);
+ } else {
+ mPendingInputFrames[imgBuffer.timestamp].jpegBuffer = imgBuffer;
+ mBlobBufferAcquired = true;
+ }
+ mInputJpegBuffers.erase(it);
+ }
+
+ while (!mInputP010Buffers.empty() && !mP010BufferAcquired) {
+ auto it = mInputP010Buffers.begin();
+ auto res = mP010Consumer->lockNextBuffer(&imgBuffer);
+ if (res == NOT_ENOUGH_DATA) {
+ // Can not lock any more buffers.
+ break;
+ } else if (res != OK) {
+ ALOGE("%s: Error receiving P010 image buffer: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ mPendingInputFrames[*it].error = true;
+ mInputP010Buffers.erase(it);
+ continue;
+ }
+
+ if (*it != imgBuffer.timestamp) {
+ ALOGW("%s: Expecting P010 buffer with time stamp: %" PRId64 " received buffer with "
+ "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
+ }
+
+ if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
+ (mPendingInputFrames[imgBuffer.timestamp].error)) {
+ mP010Consumer->unlockBuffer(imgBuffer);
+ } else {
+ mPendingInputFrames[imgBuffer.timestamp].p010Buffer = imgBuffer;
+ mP010BufferAcquired = true;
+ }
+ mInputP010Buffers.erase(it);
+ }
+
+ while (!mCaptureResults.empty()) {
+ auto it = mCaptureResults.begin();
+ // Negative timestamp indicates that something went wrong during the capture result
+ // collection process.
+ if (it->first >= 0) {
+ auto frameNumber = std::get<0>(it->second);
+ mPendingInputFrames[it->first].frameNumber = frameNumber;
+ mPendingInputFrames[it->first].result = std::get<1>(it->second);
+ mSessionStatsBuilder.incResultCounter(false /*dropped*/);
+ }
+ mCaptureResults.erase(it);
+ }
+
+ while (!mFrameNumberMap.empty()) {
+ auto it = mFrameNumberMap.begin();
+ auto frameNumber = it->first;
+ mPendingInputFrames[it->second].frameNumber = frameNumber;
+ auto requestTimeIt = mRequestTimeMap.find(frameNumber);
+ if (requestTimeIt != mRequestTimeMap.end()) {
+ mPendingInputFrames[it->second].requestTimeNs = requestTimeIt->second;
+ mRequestTimeMap.erase(requestTimeIt);
+ }
+ mFrameNumberMap.erase(it);
+ }
+
+ auto it = mErrorFrameNumbers.begin();
+ while (it != mErrorFrameNumbers.end()) {
+ bool frameFound = false;
+ for (auto &inputFrame : mPendingInputFrames) {
+ if (inputFrame.second.frameNumber == *it) {
+ inputFrame.second.error = true;
+ frameFound = true;
+ break;
+ }
+ }
+
+ if (frameFound) {
+ mSessionStatsBuilder.incCounter(mP010StreamId, true /*dropped*/,
+ 0 /*captureLatencyMs*/);
+ it = mErrorFrameNumbers.erase(it);
+ } else {
+ ALOGW("%s: Not able to find failing input with frame number: %" PRId64, __FUNCTION__,
+ *it);
+ it++;
+ }
+ }
+}
+
+bool JpegRCompositeStream::getNextReadyInputLocked(int64_t *currentTs /*inout*/) {
+ if (currentTs == nullptr) {
+ return false;
+ }
+
+ bool newInputAvailable = false;
+ for (const auto& it : mPendingInputFrames) {
+ if ((!it.second.error) && (it.second.p010Buffer.data != nullptr) &&
+ (it.second.requestTimeNs != -1) &&
+ ((it.second.jpegBuffer.data != nullptr) || !mSupportInternalJpeg) &&
+ (it.first < *currentTs)) {
+ *currentTs = it.first;
+ newInputAvailable = true;
+ }
+ }
+
+ return newInputAvailable;
+}
+
+int64_t JpegRCompositeStream::getNextFailingInputLocked(int64_t *currentTs /*inout*/) {
+ int64_t ret = -1;
+ if (currentTs == nullptr) {
+ return ret;
+ }
+
+ for (const auto& it : mPendingInputFrames) {
+ if (it.second.error && !it.second.errorNotified && (it.first < *currentTs)) {
+ *currentTs = it.first;
+ ret = it.second.frameNumber;
+ }
+ }
+
+ return ret;
+}
+
+status_t JpegRCompositeStream::processInputFrame(nsecs_t ts, const InputFrame &inputFrame) {
+ status_t res;
+ sp<ANativeWindow> outputANW = mOutputSurface;
+ ANativeWindowBuffer *anb;
+ int fenceFd;
+ void *dstBuffer;
+
+ size_t maxJpegRBufferSize = 0;
+ if (mMaxJpegBufferSize > 0) {
+ // If this is an ultra high resolution sensor and the input frames size
+ // is > default res jpeg.
+ if (mUHRMaxJpegSize.width != 0 &&
+ inputFrame.jpegBuffer.width * inputFrame.jpegBuffer.height >
+ mDefaultMaxJpegSize.width * mDefaultMaxJpegSize.height) {
+ maxJpegRBufferSize = mUHRMaxJpegBufferSize;
+ } else {
+ maxJpegRBufferSize = mMaxJpegBufferSize;
+ }
+ } else {
+ maxJpegRBufferSize = inputFrame.p010Buffer.width * inputFrame.p010Buffer.height;
+ }
+
+ uint8_t jpegQuality = 100;
+ auto entry = inputFrame.result.find(ANDROID_JPEG_QUALITY);
+ if (entry.count > 0) {
+ jpegQuality = entry.data.u8[0];
+ }
+
+ if ((res = native_window_set_buffers_dimensions(mOutputSurface.get(), maxJpegRBufferSize, 1))
+ != OK) {
+ ALOGE("%s: Unable to configure stream buffer dimensions"
+ " %zux%u for stream %d", __FUNCTION__, maxJpegRBufferSize, 1U, mP010StreamId);
+ return res;
+ }
+
+ res = outputANW->dequeueBuffer(mOutputSurface.get(), &anb, &fenceFd);
+ if (res != OK) {
+ ALOGE("%s: Error retrieving output buffer: %s (%d)", __FUNCTION__, strerror(-res),
+ res);
+ return res;
+ }
+
+ sp<GraphicBuffer> gb = GraphicBuffer::from(anb);
+ GraphicBufferLocker gbLocker(gb);
+ res = gbLocker.lockAsync(&dstBuffer, fenceFd);
+ if (res != OK) {
+ ALOGE("%s: Error trying to lock output buffer fence: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
+ return res;
+ }
+
+ if ((gb->getWidth() < maxJpegRBufferSize) || (gb->getHeight() != 1)) {
+ ALOGE("%s: Blob buffer size mismatch, expected %zux%u received %dx%d", __FUNCTION__,
+ maxJpegRBufferSize, 1, gb->getWidth(), gb->getHeight());
+ outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
+ return BAD_VALUE;
+ }
+
+ size_t actualJpegRSize = 0;
+ ultrahdr::jpegr_uncompressed_struct p010;
+ ultrahdr::jpegr_compressed_struct jpegR;
+ ultrahdr::JpegR jpegREncoder;
+
+ p010.height = inputFrame.p010Buffer.height;
+ p010.width = inputFrame.p010Buffer.width;
+ p010.colorGamut = ultrahdr::ultrahdr_color_gamut::ULTRAHDR_COLORGAMUT_BT2100;
+ p010.data = inputFrame.p010Buffer.data;
+ p010.chroma_data = inputFrame.p010Buffer.dataCb;
+ // Strides are expected to be in pixels not bytes
+ p010.luma_stride = inputFrame.p010Buffer.stride / 2;
+ p010.chroma_stride = inputFrame.p010Buffer.chromaStride / 2;
+
+ jpegR.data = dstBuffer;
+ jpegR.maxLength = maxJpegRBufferSize;
+
+ ultrahdr::ultrahdr_transfer_function transferFunction;
+ switch (mP010DynamicRange) {
+ case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
+ case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
+ transferFunction = ultrahdr::ultrahdr_transfer_function::ULTRAHDR_TF_PQ;
+ break;
+ default:
+ transferFunction = ultrahdr::ultrahdr_transfer_function::ULTRAHDR_TF_HLG;
+ }
+
+ if (mSupportInternalJpeg) {
+ ultrahdr::jpegr_compressed_struct jpeg;
+
+ jpeg.data = inputFrame.jpegBuffer.data;
+ jpeg.length = android::camera2::JpegProcessor::findJpegSize(inputFrame.jpegBuffer.data,
+ inputFrame.jpegBuffer.width);
+ if (jpeg.length == 0) {
+ ALOGW("%s: Failed to find input jpeg size, default to using entire buffer!",
+ __FUNCTION__);
+ jpeg.length = inputFrame.jpegBuffer.width;
+ }
+
+ if (mOutputColorSpace == ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3) {
+ jpeg.colorGamut = ultrahdr::ultrahdr_color_gamut::ULTRAHDR_COLORGAMUT_P3;
+ } else {
+ jpeg.colorGamut = ultrahdr::ultrahdr_color_gamut::ULTRAHDR_COLORGAMUT_BT709;
+ }
+
+ res = jpegREncoder.encodeJPEGR(&p010, &jpeg, transferFunction, &jpegR);
+ } else {
+ const uint8_t* exifBuffer = nullptr;
+ size_t exifBufferSize = 0;
+ std::unique_ptr<ExifUtils> utils(ExifUtils::create());
+ utils->initializeEmpty();
+ utils->setFromMetadata(inputFrame.result, mStaticInfo, inputFrame.p010Buffer.width,
+ inputFrame.p010Buffer.height);
+ if (utils->generateApp1()) {
+ exifBuffer = utils->getApp1Buffer();
+ exifBufferSize = utils->getApp1Length();
+ } else {
+ ALOGE("%s: Unable to generate App1 buffer", __FUNCTION__);
+ }
+
+ ultrahdr::jpegr_exif_struct exif;
+ exif.data = reinterpret_cast<void*>(const_cast<uint8_t*>(exifBuffer));
+ exif.length = exifBufferSize;
+
+ res = jpegREncoder.encodeJPEGR(&p010, transferFunction, &jpegR, jpegQuality, &exif);
+ }
+
+ if (res != OK) {
+ ALOGE("%s: Error trying to encode JPEG/R: %s (%d)", __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+
+ actualJpegRSize = jpegR.length;
+
+ size_t finalJpegRSize = actualJpegRSize + sizeof(CameraBlob);
+ if (finalJpegRSize > maxJpegRBufferSize) {
+ ALOGE("%s: Final jpeg buffer not large enough for the jpeg blob header", __FUNCTION__);
+ outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
+ return NO_MEMORY;
+ }
+
+ res = native_window_set_buffers_timestamp(mOutputSurface.get(), ts);
+ if (res != OK) {
+ ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)", __FUNCTION__,
+ getStreamId(), strerror(-res), res);
+ return res;
+ }
+
+ ALOGV("%s: Final jpeg size: %zu", __func__, finalJpegRSize);
+ uint8_t* header = static_cast<uint8_t *> (dstBuffer) +
+ (gb->getWidth() - sizeof(CameraBlob));
+ CameraBlob blobHeader = {
+ .blobId = CameraBlobId::JPEG,
+ .blobSizeBytes = static_cast<int32_t>(actualJpegRSize)
+ };
+ memcpy(header, &blobHeader, sizeof(CameraBlob));
+
+ if (inputFrame.requestTimeNs != -1) {
+ auto captureLatency = ns2ms(systemTime() - inputFrame.requestTimeNs);
+ mSessionStatsBuilder.incCounter(mP010StreamId, false /*dropped*/, captureLatency);
+ if (mFirstRequestLatency == -1) {
+ mFirstRequestLatency = captureLatency;
+ }
+ }
+ outputANW->queueBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
+
+ return res;
+}
+
+void JpegRCompositeStream::releaseInputFrameLocked(InputFrame *inputFrame /*out*/) {
+ if (inputFrame == nullptr) {
+ return;
+ }
+
+ if (inputFrame->p010Buffer.data != nullptr) {
+ mP010Consumer->unlockBuffer(inputFrame->p010Buffer);
+ inputFrame->p010Buffer.data = nullptr;
+ mP010BufferAcquired = false;
+ }
+
+ if (inputFrame->jpegBuffer.data != nullptr) {
+ mBlobConsumer->unlockBuffer(inputFrame->jpegBuffer);
+ inputFrame->jpegBuffer.data = nullptr;
+ mBlobBufferAcquired = false;
+ }
+
+ if ((inputFrame->error || mErrorState) && !inputFrame->errorNotified) {
+ //TODO: Figure out correct requestId
+ notifyError(inputFrame->frameNumber, -1 /*requestId*/);
+ inputFrame->errorNotified = true;
+ mSessionStatsBuilder.incCounter(mP010StreamId, true /*dropped*/, 0 /*captureLatencyMs*/);
+ }
+}
+
+void JpegRCompositeStream::releaseInputFramesLocked(int64_t currentTs) {
+ auto it = mPendingInputFrames.begin();
+ while (it != mPendingInputFrames.end()) {
+ if (it->first <= currentTs) {
+ releaseInputFrameLocked(&it->second);
+ it = mPendingInputFrames.erase(it);
+ } else {
+ it++;
+ }
+ }
+}
+
+bool JpegRCompositeStream::threadLoop() {
+ int64_t currentTs = INT64_MAX;
+ bool newInputAvailable = false;
+
+ {
+ Mutex::Autolock l(mMutex);
+
+ if (mErrorState) {
+ // In case we landed in error state, return any pending buffers and
+ // halt all further processing.
+ compilePendingInputLocked();
+ releaseInputFramesLocked(currentTs);
+ return false;
+ }
+
+ while (!newInputAvailable) {
+ compilePendingInputLocked();
+ newInputAvailable = getNextReadyInputLocked(¤tTs);
+ if (!newInputAvailable) {
+ auto failingFrameNumber = getNextFailingInputLocked(¤tTs);
+ if (failingFrameNumber >= 0) {
+ // We cannot erase 'mPendingInputFrames[currentTs]' at this point because it is
+ // possible for two internal stream buffers to fail. In such scenario the
+ // composite stream should notify the client about a stream buffer error only
+ // once and this information is kept within 'errorNotified'.
+ // Any present failed input frames will be removed on a subsequent call to
+ // 'releaseInputFramesLocked()'.
+ releaseInputFrameLocked(&mPendingInputFrames[currentTs]);
+ currentTs = INT64_MAX;
+ }
+
+ auto ret = mInputReadyCondition.waitRelative(mMutex, kWaitDuration);
+ if (ret == TIMED_OUT) {
+ return true;
+ } else if (ret != OK) {
+ ALOGE("%s: Timed wait on condition failed: %s (%d)", __FUNCTION__,
+ strerror(-ret), ret);
+ return false;
+ }
+ }
+ }
+ }
+
+ auto res = processInputFrame(currentTs, mPendingInputFrames[currentTs]);
+ Mutex::Autolock l(mMutex);
+ if (res != OK) {
+ ALOGE("%s: Failed processing frame with timestamp: %" PRIu64 ": %s (%d)", __FUNCTION__,
+ currentTs, strerror(-res), res);
+ mPendingInputFrames[currentTs].error = true;
+ }
+
+ releaseInputFramesLocked(currentTs);
+
+ return true;
+}
+
+bool JpegRCompositeStream::isJpegRCompositeStream(const sp<Surface> &surface) {
+ if (CameraProviderManager::kFrameworkJpegRDisabled) {
+ return false;
+ }
+ ANativeWindow *anw = surface.get();
+ status_t err;
+ int format;
+ if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
+ ALOGE("%s: Failed to query Surface format: %s (%d)", __FUNCTION__, strerror(-err),
+ err);
+ return false;
+ }
+
+ int dataspace;
+ if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE, &dataspace)) != OK) {
+ ALOGE("%s: Failed to query Surface dataspace: %s (%d)", __FUNCTION__, strerror(-err),
+ err);
+ return false;
+ }
+
+ if ((format == HAL_PIXEL_FORMAT_BLOB) && (dataspace == static_cast<int>(kJpegRDataSpace))) {
+ return true;
+ }
+
+ return false;
+}
+
+void JpegRCompositeStream::deriveDynamicRangeAndDataspace(int64_t dynamicProfile,
+ int64_t* /*out*/dynamicRange, int64_t* /*out*/dataSpace) {
+ if ((dynamicRange == nullptr) || (dataSpace == nullptr)) {
+ return;
+ }
+
+ switch (dynamicProfile) {
+ case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
+ case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
+ *dynamicRange = dynamicProfile;
+ *dataSpace = HAL_DATASPACE_BT2020_ITU_PQ;
+ break;
+ case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF:
+ case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO:
+ case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM:
+ case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO:
+ *dynamicRange = dynamicProfile;
+ *dataSpace = HAL_DATASPACE_BT2020_ITU_HLG;
+ break;
+ default:
+ *dynamicRange = kP010DefaultDynamicRange;
+ *dataSpace = kP010DefaultDataSpace;
+ }
+
+}
+
+status_t JpegRCompositeStream::createInternalStreams(const std::vector<sp<Surface>>& consumers,
+ bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
+ camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
+ const std::unordered_set<int32_t> &sensorPixelModesUsed,
+ std::vector<int> *surfaceIds,
+ int /*streamSetId*/, bool /*isShared*/, int32_t colorSpace,
+ int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp) {
+ sp<CameraDeviceBase> device = mDevice.promote();
+ if (!device.get()) {
+ ALOGE("%s: Invalid camera device!", __FUNCTION__);
+ return NO_INIT;
+ }
+
+ deriveDynamicRangeAndDataspace(dynamicProfile, &mP010DynamicRange, &mP010DataSpace);
+ mSupportInternalJpeg = CameraProviderManager::isConcurrentDynamicRangeCaptureSupported(
+ mStaticInfo, mP010DynamicRange,
+ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD);
+
+ sp<IGraphicBufferProducer> producer;
+ sp<IGraphicBufferConsumer> consumer;
+ BufferQueue::createBufferQueue(&producer, &consumer);
+ mP010Consumer = new CpuConsumer(consumer, /*maxLockedBuffers*/1, /*controlledByApp*/ true);
+ mP010Consumer->setFrameAvailableListener(this);
+ mP010Consumer->setName(String8("Camera3-P010CompositeStream"));
+ mP010Surface = new Surface(producer);
+
+ auto ret = device->createStream(mP010Surface, width, height, kP010PixelFormat,
+ static_cast<android_dataspace>(mP010DataSpace), rotation,
+ id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
+ camera3::CAMERA3_STREAM_SET_ID_INVALID, false /*isShared*/, false /*isMultiResolution*/,
+ GRALLOC_USAGE_SW_READ_OFTEN, mP010DynamicRange, streamUseCase,
+ OutputConfiguration::TIMESTAMP_BASE_DEFAULT, OutputConfiguration::MIRROR_MODE_AUTO,
+ ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED, useReadoutTimestamp);
+ if (ret == OK) {
+ mP010StreamId = *id;
+ mP010SurfaceId = (*surfaceIds)[0];
+ mOutputSurface = consumers[0];
+ } else {
+ return ret;
+ }
+
+ if (mSupportInternalJpeg) {
+ BufferQueue::createBufferQueue(&producer, &consumer);
+ mBlobConsumer = new CpuConsumer(consumer, /*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
+ mBlobConsumer->setFrameAvailableListener(this);
+ mBlobConsumer->setName(String8("Camera3-JpegRCompositeStream"));
+ mBlobSurface = new Surface(producer);
+ std::vector<int> blobSurfaceId;
+ ret = device->createStream(mBlobSurface, width, height, format,
+ kJpegDataSpace, rotation, &mBlobStreamId, physicalCameraId, sensorPixelModesUsed,
+ &blobSurfaceId,
+ /*streamSetI*/ camera3::CAMERA3_STREAM_SET_ID_INVALID,
+ /*isShared*/ false,
+ /*isMultiResolution*/ false,
+ /*consumerUsage*/ GRALLOC_USAGE_SW_READ_OFTEN,
+ /*dynamicProfile*/ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+ streamUseCase,
+ /*timestampBase*/ OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+ /*mirrorMode*/ OutputConfiguration::MIRROR_MODE_AUTO,
+ /*colorSpace*/ colorSpace, useReadoutTimestamp);
+ if (ret == OK) {
+ mBlobSurfaceId = blobSurfaceId[0];
+ } else {
+ return ret;
+ }
+
+ ret = registerCompositeStreamListener(mBlobStreamId);
+ if (ret != OK) {
+ ALOGE("%s: Failed to register jpeg stream listener!", __FUNCTION__);
+ return ret;
+ }
+ }
+
+ ret = registerCompositeStreamListener(getStreamId());
+ if (ret != OK) {
+ ALOGE("%s: Failed to register P010 stream listener!", __FUNCTION__);
+ return ret;
+ }
+
+ mOutputColorSpace = colorSpace;
+ mOutputStreamUseCase = streamUseCase;
+ mBlobWidth = width;
+ mBlobHeight = height;
+
+ return ret;
+}
+
+status_t JpegRCompositeStream::configureStream() {
+ if (isRunning()) {
+ // Processing thread is already running, nothing more to do.
+ return NO_ERROR;
+ }
+
+ if (mOutputSurface.get() == nullptr) {
+ ALOGE("%s: No valid output surface set!", __FUNCTION__);
+ return NO_INIT;
+ }
+
+ auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mProducerListener);
+ if (res != OK) {
+ ALOGE("%s: Unable to connect to native window for stream %d",
+ __FUNCTION__, mP010StreamId);
+ return res;
+ }
+
+ if ((res = native_window_set_buffers_format(mOutputSurface.get(), HAL_PIXEL_FORMAT_BLOB))
+ != OK) {
+ ALOGE("%s: Unable to configure stream buffer format for stream %d", __FUNCTION__,
+ mP010StreamId);
+ return res;
+ }
+
+ if ((res = native_window_set_usage(mOutputSurface.get(),
+ GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN)) != OK) {
+ ALOGE("%s: Unable to configure stream buffer usage for stream %d", __FUNCTION__,
+ mP010StreamId);
+ return res;
+ }
+
+ int maxProducerBuffers;
+ ANativeWindow *anw = mP010Surface.get();
+ if ((res = anw->query(anw, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxProducerBuffers)) != OK) {
+ ALOGE("%s: Unable to query consumer undequeued"
+ " buffer count for stream %d", __FUNCTION__, mP010StreamId);
+ return res;
+ }
+
+ ANativeWindow *anwConsumer = mOutputSurface.get();
+ int maxConsumerBuffers;
+ if ((res = anwConsumer->query(anwConsumer, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
+ &maxConsumerBuffers)) != OK) {
+ ALOGE("%s: Unable to query consumer undequeued"
+ " buffer count for stream %d", __FUNCTION__, mP010StreamId);
+ return res;
+ }
+
+ if ((res = native_window_set_buffer_count(
+ anwConsumer, maxProducerBuffers + maxConsumerBuffers)) != OK) {
+ ALOGE("%s: Unable to set buffer count for stream %d", __FUNCTION__, mP010StreamId);
+ return res;
+ }
+
+ mSessionStatsBuilder.addStream(mP010StreamId);
+
+ run("JpegRCompositeStreamProc");
+
+ return NO_ERROR;
+}
+
+status_t JpegRCompositeStream::deleteInternalStreams() {
+ // The 'CameraDeviceClient' parent will delete the P010 stream
+ requestExit();
+
+ auto ret = join();
+ if (ret != OK) {
+ ALOGE("%s: Failed to join with the main processing thread: %s (%d)", __FUNCTION__,
+ strerror(-ret), ret);
+ }
+
+ if (mBlobStreamId >= 0) {
+ // Camera devices may not be valid after switching to offline mode.
+ // In this case, all offline streams including internal composite streams
+ // are managed and released by the offline session.
+ sp<CameraDeviceBase> device = mDevice.promote();
+ if (device.get() != nullptr) {
+ ret = device->deleteStream(mBlobStreamId);
+ }
+
+ mBlobStreamId = -1;
+ }
+
+ if (mOutputSurface != nullptr) {
+ mOutputSurface->disconnect(NATIVE_WINDOW_API_CAMERA);
+ mOutputSurface.clear();
+ }
+
+ return ret;
+}
+
+void JpegRCompositeStream::onFrameAvailable(const BufferItem& item) {
+ if (item.mDataSpace == kJpegDataSpace) {
+ ALOGV("%s: Jpeg buffer with ts: %" PRIu64 " ms. arrived!",
+ __func__, ns2ms(item.mTimestamp));
+
+ Mutex::Autolock l(mMutex);
+ if (!mErrorState) {
+ mInputJpegBuffers.push_back(item.mTimestamp);
+ mInputReadyCondition.signal();
+ }
+ } else if (item.mDataSpace == static_cast<android_dataspace_t>(mP010DataSpace)) {
+ ALOGV("%s: P010 buffer with ts: %" PRIu64 " ms. arrived!", __func__,
+ ns2ms(item.mTimestamp));
+
+ Mutex::Autolock l(mMutex);
+ if (!mErrorState) {
+ mInputP010Buffers.push_back(item.mTimestamp);
+ mInputReadyCondition.signal();
+ }
+ } else {
+ ALOGE("%s: Unexpected data space: 0x%x", __FUNCTION__, item.mDataSpace);
+ }
+}
+
+status_t JpegRCompositeStream::insertGbp(SurfaceMap* /*out*/outSurfaceMap,
+ Vector<int32_t> * /*out*/outputStreamIds, int32_t* /*out*/currentStreamId) {
+ if (outputStreamIds == nullptr) {
+ return BAD_VALUE;
+ }
+
+ if (outSurfaceMap->find(mP010StreamId) == outSurfaceMap->end()) {
+ outputStreamIds->push_back(mP010StreamId);
+ }
+ (*outSurfaceMap)[mP010StreamId].push_back(mP010SurfaceId);
+
+ if (mSupportInternalJpeg) {
+ if (outSurfaceMap->find(mBlobStreamId) == outSurfaceMap->end()) {
+ outputStreamIds->push_back(mBlobStreamId);
+ }
+ (*outSurfaceMap)[mBlobStreamId].push_back(mBlobSurfaceId);
+ }
+
+ if (currentStreamId != nullptr) {
+ *currentStreamId = mP010StreamId;
+ }
+
+ return NO_ERROR;
+}
+
+status_t JpegRCompositeStream::insertCompositeStreamIds(
+ std::vector<int32_t>* compositeStreamIds /*out*/) {
+ if (compositeStreamIds == nullptr) {
+ return BAD_VALUE;
+ }
+
+ compositeStreamIds->push_back(mP010StreamId);
+ if (mSupportInternalJpeg) {
+ compositeStreamIds->push_back(mBlobStreamId);
+ }
+
+ return OK;
+}
+
+void JpegRCompositeStream::onResultError(const CaptureResultExtras& resultExtras) {
+ // Processing can continue even in case of result errors.
+ // At the moment Jpeg/R composite stream processing relies mainly on static camera
+ // characteristics data. The actual result data can be used for the jpeg quality but
+ // in case it is absent we can default to maximum.
+ eraseResult(resultExtras.frameNumber);
+ mSessionStatsBuilder.incResultCounter(true /*dropped*/);
+}
+
+bool JpegRCompositeStream::onStreamBufferError(const CaptureResultExtras& resultExtras) {
+ bool ret = false;
+ // Buffer errors concerning internal composite streams should not be directly visible to
+ // camera clients. They must only receive a single buffer error with the public composite
+ // stream id.
+ if ((resultExtras.errorStreamId == mP010StreamId) ||
+ (resultExtras.errorStreamId == mBlobStreamId)) {
+ flagAnErrorFrameNumber(resultExtras.frameNumber);
+ ret = true;
+ }
+
+ return ret;
+}
+
+status_t JpegRCompositeStream::getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
+ const CameraMetadata& staticInfo,
+ std::vector<OutputStreamInfo>* compositeOutput /*out*/) {
+ if (compositeOutput == nullptr) {
+ return BAD_VALUE;
+ }
+
+ int64_t dynamicRange, dataSpace;
+ deriveDynamicRangeAndDataspace(streamInfo.dynamicRangeProfile, &dynamicRange, &dataSpace);
+
+ compositeOutput->clear();
+ compositeOutput->push_back({});
+ (*compositeOutput)[0].width = streamInfo.width;
+ (*compositeOutput)[0].height = streamInfo.height;
+ (*compositeOutput)[0].format = kP010PixelFormat;
+ (*compositeOutput)[0].dataSpace = static_cast<android_dataspace_t>(dataSpace);
+ (*compositeOutput)[0].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
+ (*compositeOutput)[0].dynamicRangeProfile = dynamicRange;
+ (*compositeOutput)[0].colorSpace =
+ ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
+
+ if (CameraProviderManager::isConcurrentDynamicRangeCaptureSupported(staticInfo,
+ streamInfo.dynamicRangeProfile,
+ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD)) {
+ compositeOutput->push_back({});
+ (*compositeOutput)[1].width = streamInfo.width;
+ (*compositeOutput)[1].height = streamInfo.height;
+ (*compositeOutput)[1].format = HAL_PIXEL_FORMAT_BLOB;
+ (*compositeOutput)[1].dataSpace = kJpegDataSpace;
+ (*compositeOutput)[1].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
+ (*compositeOutput)[1].dynamicRangeProfile =
+ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
+ (*compositeOutput)[1].colorSpace = streamInfo.colorSpace;
+ }
+
+ return NO_ERROR;
+}
+
+void JpegRCompositeStream::getStreamStats(hardware::CameraStreamStats* streamStats) {
+ if ((streamStats == nullptr) || (mFirstRequestLatency != -1)) {
+ return;
+ }
+
+ bool deviceError;
+ std::map<int, StreamStats> stats;
+ mSessionStatsBuilder.buildAndReset(&streamStats->mRequestCount, &streamStats->mErrorCount,
+ &deviceError, &stats);
+ if (stats.find(mP010StreamId) != stats.end()) {
+ streamStats->mWidth = mBlobWidth;
+ streamStats->mHeight = mBlobHeight;
+ streamStats->mFormat = HAL_PIXEL_FORMAT_BLOB;
+ streamStats->mDataSpace = static_cast<int>(kJpegRDataSpace);
+ streamStats->mDynamicRangeProfile = mP010DynamicRange;
+ streamStats->mColorSpace = mOutputColorSpace;
+ streamStats->mStreamUseCase = mOutputStreamUseCase;
+ streamStats->mStartLatencyMs = mFirstRequestLatency;
+ streamStats->mHistogramType = hardware::CameraStreamStats::HISTOGRAM_TYPE_CAPTURE_LATENCY;
+ streamStats->mHistogramBins.assign(stats[mP010StreamId].mCaptureLatencyBins.begin(),
+ stats[mP010StreamId].mCaptureLatencyBins.end());
+ streamStats->mHistogramCounts.assign(stats[mP010StreamId].mCaptureLatencyHistogram.begin(),
+ stats[mP010StreamId].mCaptureLatencyHistogram.end());
+ }
+}
+
+}; // namespace camera3
+}; // namespace android
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.h b/services/camera/libcameraservice/api2/JpegRCompositeStream.h
new file mode 100644
index 0000000..016d57c
--- /dev/null
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.h
@@ -0,0 +1,155 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_CAMERA3_JPEG_R_COMPOSITE_STREAM_H
+#define ANDROID_SERVERS_CAMERA_CAMERA3_JPEG_R_COMPOSITE_STREAM_H
+
+#include <gui/CpuConsumer.h>
+#include "aidl/android/hardware/graphics/common/Dataspace.h"
+#include "system/graphics-base-v1.1.h"
+
+#include "api1/client2/JpegProcessor.h"
+#include "utils/SessionStatsBuilder.h"
+
+#include "CompositeStream.h"
+
+namespace android {
+
+class CameraDeviceClient;
+class CameraMetadata;
+class Surface;
+
+namespace camera3 {
+
+class JpegRCompositeStream : public CompositeStream, public Thread,
+ public CpuConsumer::FrameAvailableListener {
+
+public:
+ JpegRCompositeStream(sp<CameraDeviceBase> device,
+ wp<hardware::camera2::ICameraDeviceCallbacks> cb);
+ ~JpegRCompositeStream() override;
+
+ static bool isJpegRCompositeStream(const sp<Surface> &surface);
+
+ // CompositeStream overrides
+ status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
+ bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
+ camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
+ const std::unordered_set<int32_t> &sensorPixelModesUsed,
+ std::vector<int> *surfaceIds,
+ int streamSetId, bool isShared, int32_t colorSpace,
+ int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp) override;
+ status_t deleteInternalStreams() override;
+ status_t configureStream() override;
+ status_t insertGbp(SurfaceMap* /*out*/outSurfaceMap, Vector<int32_t>* /*out*/outputStreamIds,
+ int32_t* /*out*/currentStreamId) override;
+ status_t insertCompositeStreamIds(std::vector<int32_t>* compositeStreamIds /*out*/) override;
+ int getStreamId() override { return mP010StreamId; }
+
+ // CpuConsumer listener implementation
+ void onFrameAvailable(const BufferItem& item) override;
+
+ // Return stream information about the internal camera streams
+ static status_t getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
+ const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/);
+
+ // Get composite stream stats
+ void getStreamStats(hardware::CameraStreamStats* streamStats) override;
+
+protected:
+
+ bool threadLoop() override;
+ bool onStreamBufferError(const CaptureResultExtras& resultExtras) override;
+ void onResultError(const CaptureResultExtras& resultExtras) override;
+
+private:
+ struct InputFrame {
+ CpuConsumer::LockedBuffer p010Buffer;
+ CpuConsumer::LockedBuffer jpegBuffer;
+ CameraMetadata result;
+ bool error;
+ bool errorNotified;
+ int64_t frameNumber;
+ int32_t requestId;
+ nsecs_t requestTimeNs;
+
+ InputFrame() : error(false), errorNotified(false), frameNumber(-1), requestId(-1),
+ requestTimeNs(-1) { }
+ };
+
+ status_t processInputFrame(nsecs_t ts, const InputFrame &inputFrame);
+
+ // Buffer/Results handling
+ void compilePendingInputLocked();
+ void releaseInputFrameLocked(InputFrame *inputFrame /*out*/);
+ void releaseInputFramesLocked(int64_t currentTs);
+
+ // Find first complete and valid frame with smallest timestamp
+ bool getNextReadyInputLocked(int64_t *currentTs /*inout*/);
+
+ // Find next failing frame number with smallest timestamp and return respective frame number
+ int64_t getNextFailingInputLocked(int64_t *currentTs /*inout*/);
+
+ static void deriveDynamicRangeAndDataspace(int64_t dynamicProfile, int64_t* /*out*/dynamicRange,
+ int64_t* /*out*/dataSpace);
+
+ static const nsecs_t kWaitDuration = 10000000; // 10 ms
+ static const auto kP010PixelFormat = HAL_PIXEL_FORMAT_YCBCR_P010;
+ static const auto kP010DefaultDataSpace = HAL_DATASPACE_BT2020_ITU_HLG;
+ static const auto kP010DefaultDynamicRange =
+ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10;
+ static const auto kJpegDataSpace = HAL_DATASPACE_V0_JFIF;
+ static const auto kJpegRDataSpace =
+ aidl::android::hardware::graphics::common::Dataspace::JPEG_R;
+
+ bool mSupportInternalJpeg = false;
+ int64_t mP010DataSpace = HAL_DATASPACE_BT2020_HLG;
+ int64_t mP010DynamicRange =
+ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10;
+ int mBlobStreamId, mBlobSurfaceId, mP010StreamId, mP010SurfaceId;
+ size_t mBlobWidth, mBlobHeight;
+ sp<CpuConsumer> mBlobConsumer, mP010Consumer;
+ bool mP010BufferAcquired, mBlobBufferAcquired;
+ sp<Surface> mP010Surface, mBlobSurface, mOutputSurface;
+ int32_t mOutputColorSpace;
+ int64_t mOutputStreamUseCase;
+ nsecs_t mFirstRequestLatency;
+ sp<ProducerListener> mProducerListener;
+
+ ssize_t mMaxJpegBufferSize;
+ ssize_t mUHRMaxJpegBufferSize;
+
+ camera3::Size mDefaultMaxJpegSize;
+ camera3::Size mUHRMaxJpegSize;
+
+ // Keep all incoming P010 buffer timestamps pending further processing.
+ std::vector<int64_t> mInputP010Buffers;
+
+ // Keep all incoming Jpeg/Blob buffer timestamps pending further processing.
+ std::vector<int64_t> mInputJpegBuffers;
+
+ // Map of all input frames pending further processing.
+ std::unordered_map<int64_t, InputFrame> mPendingInputFrames;
+
+ const CameraMetadata mStaticInfo;
+
+ SessionStatsBuilder mSessionStatsBuilder;
+};
+
+}; //namespace camera3
+}; //namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 0fe15a8..a54ba9b 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -38,7 +38,6 @@
#include "device3/aidl/AidlCamera3Device.h"
#include "device3/hidl/HidlCamera3Device.h"
#include "utils/CameraThreadState.h"
-#include "utils/CameraServiceProxyWrapper.h"
namespace android {
@@ -50,6 +49,7 @@
Camera2ClientBase<TClientBase>::Camera2ClientBase(
const sp<CameraService>& cameraService,
const sp<TCamCallbacks>& remoteCallback,
+ std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
const std::string& clientPackageName,
bool systemNativeClient,
const std::optional<std::string>& clientFeatureId,
@@ -67,6 +67,7 @@
clientFeatureId, cameraId, api1CameraId, cameraFacing, sensorOrientation, clientPid,
clientUid, servicePid, overrideToPortrait),
mSharedCameraCallbacks(remoteCallback),
+ mCameraServiceProxyWrapper(cameraServiceProxyWrapper),
mDeviceActive(false), mApi1CameraId(api1CameraId)
{
ALOGI("Camera %s: Opened. Client: %s (PID %d, UID %d)", cameraId.c_str(),
@@ -104,11 +105,6 @@
TClientBase::mCameraIdStr.c_str());
status_t res;
- // Verify ops permissions
- res = TClientBase::startCameraOps();
- if (res != OK) {
- return res;
- }
IPCTransport providerTransport = IPCTransport::INVALID;
res = providerPtr->getCameraIdIPCTransport(TClientBase::mCameraIdStr,
&providerTransport);
@@ -118,12 +114,14 @@
switch (providerTransport) {
case IPCTransport::HIDL:
mDevice =
- new HidlCamera3Device(TClientBase::mCameraIdStr, mOverrideForPerfClass,
+ new HidlCamera3Device(mCameraServiceProxyWrapper,
+ TClientBase::mCameraIdStr, mOverrideForPerfClass,
TClientBase::mOverrideToPortrait, mLegacyClient);
break;
case IPCTransport::AIDL:
mDevice =
- new AidlCamera3Device(TClientBase::mCameraIdStr, mOverrideForPerfClass,
+ new AidlCamera3Device(mCameraServiceProxyWrapper,
+ TClientBase::mCameraIdStr, mOverrideForPerfClass,
TClientBase::mOverrideToPortrait, mLegacyClient);
break;
default:
@@ -144,12 +142,30 @@
return res;
}
+ // Verify ops permissions
+ res = TClientBase::startCameraOps();
+ if (res != OK) {
+ TClientBase::finishCameraOps();
+ return res;
+ }
+
wp<NotificationListener> weakThis(this);
res = mDevice->setNotifyCallback(weakThis);
+ if (res != OK) {
+ ALOGE("%s: Camera %s: Unable to set notify callback: %s (%d)",
+ __FUNCTION__, TClientBase::mCameraIdStr.c_str(), strerror(-res), res);
+ return res;
+ }
/** Start watchdog thread */
- mCameraServiceWatchdog = new CameraServiceWatchdog();
- mCameraServiceWatchdog->run("Camera2ClientBaseWatchdog");
+ mCameraServiceWatchdog = new CameraServiceWatchdog(TClientBase::mCameraIdStr,
+ mCameraServiceProxyWrapper);
+ res = mCameraServiceWatchdog->run("Camera2ClientBaseWatchdog");
+ if (res != OK) {
+ ALOGE("%s: Unable to start camera service watchdog thread: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
return OK;
}
@@ -167,8 +183,8 @@
mCameraServiceWatchdog.clear();
}
- ALOGI("Closed Camera %s. Client was: %s (PID %d, UID %u)",
- TClientBase::mCameraIdStr.c_str(),
+ ALOGI("%s: Client object's dtor for Camera Id %s completed. Client was: %s (PID %d, UID %u)",
+ __FUNCTION__, TClientBase::mCameraIdStr.c_str(),
TClientBase::mClientPackageName.c_str(),
mInitialClientPid, TClientBase::mClientUid);
}
@@ -374,7 +390,7 @@
TClientBase::mCameraIdStr.c_str(), res);
return res;
}
- CameraServiceProxyWrapper::logActive(TClientBase::mCameraIdStr, maxPreviewFps);
+ mCameraServiceProxyWrapper->logActive(TClientBase::mCameraIdStr, maxPreviewFps);
}
mDeviceActive = true;
@@ -393,7 +409,7 @@
ALOGE("%s: Camera %s: Error finishing streaming ops: %d", __FUNCTION__,
TClientBase::mCameraIdStr.c_str(), res);
}
- CameraServiceProxyWrapper::logIdle(TClientBase::mCameraIdStr,
+ mCameraServiceProxyWrapper->logIdle(TClientBase::mCameraIdStr,
requestCount, resultErrorCount, deviceError, userTag, videoStabilizationMode,
streamStats);
}
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index 2ad2367..30c763d 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -19,6 +19,7 @@
#include "common/CameraDeviceBase.h"
#include "camera/CaptureResult.h"
+#include "utils/CameraServiceProxyWrapper.h"
#include "CameraServiceWatchdog.h"
namespace android {
@@ -48,6 +49,7 @@
// TODO: too many params, move into a ClientArgs<T>
Camera2ClientBase(const sp<CameraService>& cameraService,
const sp<TCamCallbacks>& remoteCallback,
+ std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
const std::string& clientPackageName,
bool systemNativeClient,
const std::optional<std::string>& clientFeatureId,
@@ -142,6 +144,7 @@
pid_t mInitialClientPid;
bool mOverrideForPerfClass = false;
bool mLegacyClient = false;
+ std::shared_ptr<CameraServiceProxyWrapper> mCameraServiceProxyWrapper;
virtual sp<IBinder> asBinderWrapper() {
return IInterface::asBinder(this);
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index be38b9f..017da0f 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -113,6 +113,8 @@
*/
virtual const CameraMetadata& infoPhysical(const std::string& physicalId) const = 0;
+ virtual bool isCompositeJpegRDisabled() const { return false; };
+
struct PhysicalCameraSettings {
std::string cameraId;
CameraMetadata metadata;
@@ -126,6 +128,9 @@
int32_t mOriginalTestPatternMode = 0;
int32_t mOriginalTestPatternData[4] = {};
+ // Original value of SETTINGS_OVERRIDE so that they can be restored if
+ // camera service isn't overwriting the app value.
+ int32_t mOriginalSettingsOverride = ANDROID_CONTROL_SETTINGS_OVERRIDE_OFF;
};
typedef List<PhysicalCameraSettings> PhysicalCameraSettingsList;
@@ -192,7 +197,10 @@
int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) = 0;
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ bool useReadoutTimestamp = false)
+ = 0;
/**
* Create an output stream of the requested size, format, rotation and
@@ -213,7 +221,10 @@
int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) = 0;
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ bool useReadoutTimestamp = false)
+ = 0;
/**
* Create an input stream of width, height, and format.
@@ -235,11 +246,13 @@
bool dataSpaceOverridden;
android_dataspace originalDataSpace;
int64_t dynamicRangeProfile;
+ int32_t colorSpace;
StreamInfo() : width(0), height(0), format(0), formatOverridden(false), originalFormat(0),
dataSpace(HAL_DATASPACE_UNKNOWN), dataSpaceOverridden(false),
originalDataSpace(HAL_DATASPACE_UNKNOWN),
- dynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD){}
+ dynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
+ colorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED) {}
/**
* Check whether the format matches the current or the original one in case
* it got overridden.
@@ -434,6 +447,14 @@
camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropValue) = 0;
/**
+ * Set the current behavior for the AUTOFRAMING control when in AUTO.
+ *
+ * The value must be one of the AUTOFRAMING_* values besides AUTO.
+ */
+ virtual status_t setAutoframingAutoBehavior(
+ camera_metadata_enum_android_control_autoframing_t autoframingValue) = 0;
+
+ /**
* Whether camera muting (producing black-only output) is supported.
*
* Calling setCameraMute(true) when this returns false will return an
@@ -449,6 +470,14 @@
virtual status_t setCameraMute(bool enabled) = 0;
/**
+ * Whether the camera device supports zoom override.
+ */
+ virtual bool supportsZoomOverride() = 0;
+
+ // Set/reset zoom override
+ virtual status_t setZoomOverride(int32_t zoomOverride) = 0;
+
+ /**
* Enable/disable camera service watchdog
*/
virtual status_t setCameraServiceWatchdog(bool enabled) = 0;
@@ -459,6 +488,11 @@
virtual wp<camera3::StatusTracker> getStatusTracker() = 0;
/**
+ * If the device is in eror state
+ */
+ virtual bool hasDeviceError() = 0;
+
+ /**
* Set bitmask for image dump flag
*/
void setImageDumpMask(int mask) { mImageDumpMask = mask; }
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 230d5b6..23051ef 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -14,6 +14,8 @@
* limitations under the License.
*/
+#include "system/graphics-base-v1.0.h"
+#include "system/graphics-base-v1.1.h"
#define LOG_TAG "CameraProviderManager"
#define ATRACE_TAG ATRACE_TAG_CAMERA
//#define LOG_NDEBUG 0
@@ -40,6 +42,7 @@
#include <cutils/properties.h>
#include <hwbinder/IPCThreadState.h>
#include <utils/Trace.h>
+#include <ui/PublicFormat.h>
#include <camera/StringUtils.h>
#include "api2/HeicCompositeStream.h"
@@ -60,6 +63,8 @@
} // anonymous namespace
const float CameraProviderManager::kDepthARTolerance = .1f;
+const bool CameraProviderManager::kFrameworkJpegRDisabled =
+ property_get_bool("ro.camera.disableJpegR", false);
CameraProviderManager::HidlServiceInteractionProxyImpl
CameraProviderManager::sHidlServiceInteractionProxy{};
@@ -312,6 +317,18 @@
return deviceInfo->supportNativeZoomRatio();
}
+bool CameraProviderManager::isCompositeJpegRDisabled(const std::string &id) const {
+ std::lock_guard<std::mutex> lock(mInterfaceMutex);
+ return isCompositeJpegRDisabledLocked(id);
+}
+
+bool CameraProviderManager::isCompositeJpegRDisabledLocked(const std::string &id) const {
+ auto deviceInfo = findDeviceInfoLocked(id);
+ if (deviceInfo == nullptr) return false;
+
+ return deviceInfo->isCompositeJpegRDisabled();
+}
+
status_t CameraProviderManager::getResourceCost(const std::string &id,
CameraResourceCost* cost) const {
std::lock_guard<std::mutex> lock(mInterfaceMutex);
@@ -1000,19 +1017,21 @@
auto availableDurations = ch.find(tag);
if (availableDurations.count > 0) {
// Duration entry contains 4 elements (format, width, height, duration)
- for (size_t i = 0; i < availableDurations.count; i += 4) {
- for (const auto& size : sizes) {
- int64_t width = std::get<0>(size);
- int64_t height = std::get<1>(size);
+ for (const auto& size : sizes) {
+ int64_t width = std::get<0>(size);
+ int64_t height = std::get<1>(size);
+ for (size_t i = 0; i < availableDurations.count; i += 4) {
if ((availableDurations.data.i64[i] == format) &&
(availableDurations.data.i64[i+1] == width) &&
(availableDurations.data.i64[i+2] == height)) {
durations->push_back(availableDurations.data.i64[i+3]);
+ break;
}
}
}
}
}
+
void CameraProviderManager::ProviderInfo::DeviceInfo3::getSupportedDynamicDepthDurations(
const std::vector<int64_t>& depthDurations, const std::vector<int64_t>& blobDurations,
std::vector<int64_t> *dynamicDepthDurations /*out*/) {
@@ -1072,6 +1091,212 @@
}
}
+bool CameraProviderManager::isConcurrentDynamicRangeCaptureSupported(
+ const CameraMetadata& deviceInfo, int64_t profile, int64_t concurrentProfile) {
+ auto entry = deviceInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+ if (entry.count == 0) {
+ return false;
+ }
+
+ const auto it = std::find(entry.data.u8, entry.data.u8 + entry.count,
+ ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT);
+ if (it == entry.data.u8 + entry.count) {
+ return false;
+ }
+
+ entry = deviceInfo.find(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP);
+ if (entry.count == 0 || ((entry.count % 3) != 0)) {
+ return false;
+ }
+
+ for (size_t i = 0; i < entry.count; i += 3) {
+ if (entry.data.i64[i] == profile) {
+ if ((entry.data.i64[i+1] == 0) || (entry.data.i64[i+1] & concurrentProfile)) {
+ return true;
+ }
+ }
+ }
+
+ return false;
+}
+
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::deriveJpegRTags(bool maxResolution) {
+ if (kFrameworkJpegRDisabled || mCompositeJpegRDisabled) {
+ return OK;
+ }
+
+ const int32_t scalerSizesTag =
+ SessionConfigurationUtils::getAppropriateModeTag(
+ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxResolution);
+ const int32_t scalerMinFrameDurationsTag = SessionConfigurationUtils::getAppropriateModeTag(
+ ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, maxResolution);
+ const int32_t scalerStallDurationsTag =
+ SessionConfigurationUtils::getAppropriateModeTag(
+ ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, maxResolution);
+
+ const int32_t jpegRSizesTag =
+ SessionConfigurationUtils::getAppropriateModeTag(
+ ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS, maxResolution);
+ const int32_t jpegRStallDurationsTag =
+ SessionConfigurationUtils::getAppropriateModeTag(
+ ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS, maxResolution);
+ const int32_t jpegRMinFrameDurationsTag =
+ SessionConfigurationUtils::getAppropriateModeTag(
+ ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS, maxResolution);
+
+ auto& c = mCameraCharacteristics;
+ std::vector<int32_t> supportedChTags;
+ auto chTags = c.find(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
+ if (chTags.count == 0) {
+ ALOGE("%s: No supported camera characteristics keys!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ std::vector<std::tuple<size_t, size_t>> supportedP010Sizes, supportedBlobSizes;
+ auto capabilities = c.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+ if (capabilities.count == 0) {
+ ALOGE("%s: Supported camera capabilities is empty!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ auto end = capabilities.data.u8 + capabilities.count;
+ bool isTenBitOutputSupported = std::find(capabilities.data.u8, end,
+ ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT) != end;
+ if (!isTenBitOutputSupported) {
+ // No 10-bit support, nothing more to do.
+ return OK;
+ }
+
+ if (!isConcurrentDynamicRangeCaptureSupported(c,
+ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10,
+ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) &&
+ !property_get_bool("ro.camera.enableCompositeAPI0JpegR", false)) {
+ // API0, P010 only Jpeg/R support is meant to be used only as a reference due to possible
+ // impact on quality and performance.
+ // This data path will be turned off by default and individual device builds must enable
+ // 'ro.camera.enableCompositeAPI0JpegR' in order to experiment using it.
+ mCompositeJpegRDisabled = true;
+ return OK;
+ }
+
+ getSupportedSizes(c, scalerSizesTag,
+ static_cast<android_pixel_format_t>(HAL_PIXEL_FORMAT_BLOB), &supportedBlobSizes);
+ getSupportedSizes(c, scalerSizesTag,
+ static_cast<android_pixel_format_t>(HAL_PIXEL_FORMAT_YCBCR_P010), &supportedP010Sizes);
+ auto it = supportedP010Sizes.begin();
+ while (it != supportedP010Sizes.end()) {
+ if (std::find(supportedBlobSizes.begin(), supportedBlobSizes.end(), *it) ==
+ supportedBlobSizes.end()) {
+ it = supportedP010Sizes.erase(it);
+ } else {
+ it++;
+ }
+ }
+ if (supportedP010Sizes.empty()) {
+ // Nothing to do in this case.
+ return OK;
+ }
+
+ std::vector<int32_t> jpegREntries;
+ for (const auto& it : supportedP010Sizes) {
+ int32_t entry[4] = {HAL_PIXEL_FORMAT_BLOB, static_cast<int32_t> (std::get<0>(it)),
+ static_cast<int32_t> (std::get<1>(it)),
+ ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_OUTPUT };
+ jpegREntries.insert(jpegREntries.end(), entry, entry + 4);
+ }
+
+ std::vector<int64_t> blobMinDurations, blobStallDurations;
+ std::vector<int64_t> jpegRMinDurations, jpegRStallDurations;
+
+ // We use the jpeg stall and min frame durations to approximate the respective jpeg/r
+ // durations.
+ getSupportedDurations(c, scalerMinFrameDurationsTag, HAL_PIXEL_FORMAT_BLOB,
+ supportedP010Sizes, &blobMinDurations);
+ getSupportedDurations(c, scalerStallDurationsTag, HAL_PIXEL_FORMAT_BLOB,
+ supportedP010Sizes, &blobStallDurations);
+ if (blobStallDurations.empty() || blobMinDurations.empty() ||
+ supportedP010Sizes.size() != blobMinDurations.size() ||
+ blobMinDurations.size() != blobStallDurations.size()) {
+ ALOGE("%s: Unexpected number of available blob durations! %zu vs. %zu with "
+ "supportedP010Sizes size: %zu", __FUNCTION__, blobMinDurations.size(),
+ blobStallDurations.size(), supportedP010Sizes.size());
+ return BAD_VALUE;
+ }
+
+ auto itDuration = blobMinDurations.begin();
+ auto itSize = supportedP010Sizes.begin();
+ while (itDuration != blobMinDurations.end()) {
+ int64_t entry[4] = {HAL_PIXEL_FORMAT_BLOB, static_cast<int32_t> (std::get<0>(*itSize)),
+ static_cast<int32_t> (std::get<1>(*itSize)), *itDuration};
+ jpegRMinDurations.insert(jpegRMinDurations.end(), entry, entry + 4);
+ itDuration++; itSize++;
+ }
+
+ itDuration = blobStallDurations.begin();
+ itSize = supportedP010Sizes.begin();
+ while (itDuration != blobStallDurations.end()) {
+ int64_t entry[4] = {HAL_PIXEL_FORMAT_BLOB, static_cast<int32_t> (std::get<0>(*itSize)),
+ static_cast<int32_t> (std::get<1>(*itSize)), *itDuration};
+ jpegRStallDurations.insert(jpegRStallDurations.end(), entry, entry + 4);
+ itDuration++; itSize++;
+ }
+
+ supportedChTags.reserve(chTags.count + 3);
+ supportedChTags.insert(supportedChTags.end(), chTags.data.i32,
+ chTags.data.i32 + chTags.count);
+ supportedChTags.push_back(jpegRSizesTag);
+ supportedChTags.push_back(jpegRMinFrameDurationsTag);
+ supportedChTags.push_back(jpegRStallDurationsTag);
+ c.update(jpegRSizesTag, jpegREntries.data(), jpegREntries.size());
+ c.update(jpegRMinFrameDurationsTag, jpegRMinDurations.data(), jpegRMinDurations.size());
+ c.update(jpegRStallDurationsTag, jpegRStallDurations.data(), jpegRStallDurations.size());
+ c.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, supportedChTags.data(),
+ supportedChTags.size());
+
+ auto colorSpaces = c.find(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP);
+ if (colorSpaces.count > 0 && !maxResolution) {
+ bool displayP3Support = false;
+ int64_t dynamicRange = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
+ for (size_t i = 0; i < colorSpaces.count; i += 3) {
+ auto colorSpace = colorSpaces.data.i64[i];
+ auto format = colorSpaces.data.i64[i+1];
+ bool formatMatch = (format == static_cast<int64_t>(PublicFormat::JPEG)) ||
+ (format == static_cast<int64_t>(PublicFormat::UNKNOWN));
+ bool colorSpaceMatch =
+ colorSpace == ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3;
+ if (formatMatch && colorSpaceMatch) {
+ displayP3Support = true;
+ }
+
+ // Jpeg/R will support the same dynamic range profiles as P010
+ if (format == static_cast<int64_t>(PublicFormat::YCBCR_P010)) {
+ dynamicRange |= colorSpaces.data.i64[i+2];
+ }
+ }
+ if (displayP3Support) {
+ std::vector<int64_t> supportedColorSpaces;
+ // Jpeg/R must support the default system as well ase display P3 color space
+ supportedColorSpaces.reserve(colorSpaces.count + 3*2);
+ supportedColorSpaces.insert(supportedColorSpaces.end(), colorSpaces.data.i64,
+ colorSpaces.data.i64 + colorSpaces.count);
+
+ supportedColorSpaces.push_back(static_cast<int64_t>(
+ ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SRGB));
+ supportedColorSpaces.push_back(static_cast<int64_t>(PublicFormat::JPEG_R));
+ supportedColorSpaces.push_back(dynamicRange);
+
+ supportedColorSpaces.push_back(static_cast<int64_t>(
+ ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3));
+ supportedColorSpaces.push_back(static_cast<int64_t>(PublicFormat::JPEG_R));
+ supportedColorSpaces.push_back(dynamicRange);
+ c.update(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP,
+ supportedColorSpaces.data(), supportedColorSpaces.size());
+ }
+ }
+
+ return OK;
+}
+
status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addDynamicDepthTags(
bool maxResolution) {
const int32_t depthExclTag = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE;
@@ -1356,6 +1581,19 @@
return res;
}
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addAutoframingTags() {
+ status_t res = OK;
+ auto& c = mCameraCharacteristics;
+
+ auto availableAutoframingEntry = c.find(ANDROID_CONTROL_AUTOFRAMING_AVAILABLE);
+ if (availableAutoframingEntry.count == 0) {
+ uint8_t defaultAutoframingEntry = ANDROID_CONTROL_AUTOFRAMING_AVAILABLE_FALSE;
+ res = c.update(ANDROID_CONTROL_AUTOFRAMING_AVAILABLE,
+ &defaultAutoframingEntry, 1);
+ }
+ return res;
+}
+
status_t CameraProviderManager::ProviderInfo::DeviceInfo3::addPreCorrectionActiveArraySize() {
status_t res = OK;
auto& c = mCameraCharacteristics;
@@ -2302,6 +2540,10 @@
(mDeviceStateOrientationMap.find(newState) != mDeviceStateOrientationMap.end())) {
mCameraCharacteristics.update(ANDROID_SENSOR_ORIENTATION,
&mDeviceStateOrientationMap[newState], 1);
+ if (mCameraCharNoPCOverride.get() != nullptr) {
+ mCameraCharNoPCOverride->update(ANDROID_SENSOR_ORIENTATION,
+ &mDeviceStateOrientationMap[newState], 1);
+ }
}
}
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index 98298ea..a2ec576 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -248,6 +248,11 @@
bool supportNativeZoomRatio(const std::string &id) const;
/**
+ * Return true if the camera device has no composite Jpeg/R support.
+ */
+ bool isCompositeJpegRDisabled(const std::string &id) const;
+
+ /**
* Return the resource cost of this camera device
*/
status_t getResourceCost(const std::string &id,
@@ -407,7 +412,11 @@
status_t notifyUsbDeviceEvent(int32_t eventId, const std::string &usbDeviceId);
+ static bool isConcurrentDynamicRangeCaptureSupported(const CameraMetadata& deviceInfo,
+ int64_t profile, int64_t concurrentProfile);
+
static const float kDepthARTolerance;
+ static const bool kFrameworkJpegRDisabled;
private:
// All private members, unless otherwise noted, expect mInterfaceMutex to be locked before use
mutable std::mutex mInterfaceMutex;
@@ -564,6 +573,7 @@
bool hasFlashUnit() const { return mHasFlashUnit; }
bool supportNativeZoomRatio() const { return mSupportNativeZoomRatio; }
+ bool isCompositeJpegRDisabled() const { return mCompositeJpegRDisabled; }
virtual status_t setTorchMode(bool enabled) = 0;
virtual status_t turnOnTorchWithStrengthLevel(int32_t torchStrength) = 0;
virtual status_t getTorchStrengthLevel(int32_t *torchStrength) = 0;
@@ -605,13 +615,14 @@
mParentProvider(parentProvider), mTorchStrengthLevel(0),
mTorchMaximumStrengthLevel(0), mTorchDefaultStrengthLevel(0),
mHasFlashUnit(false), mSupportNativeZoomRatio(false),
- mPublicCameraIds(publicCameraIds) {}
+ mPublicCameraIds(publicCameraIds), mCompositeJpegRDisabled(false) {}
virtual ~DeviceInfo() {}
protected:
bool mHasFlashUnit; // const after constructor
bool mSupportNativeZoomRatio; // const after constructor
const std::vector<std::string>& mPublicCameraIds;
+ bool mCompositeJpegRDisabled;
};
std::vector<std::unique_ptr<DeviceInfo>> mDevices;
std::unordered_set<std::string> mUniqueCameraIds;
@@ -673,7 +684,9 @@
status_t fixupTorchStrengthTags();
status_t addDynamicDepthTags(bool maxResolution = false);
status_t deriveHeicTags(bool maxResolution = false);
+ status_t deriveJpegRTags(bool maxResolution = false);
status_t addRotateCropTags();
+ status_t addAutoframingTags();
status_t addPreCorrectionActiveArraySize();
status_t addReadoutTimestampTag(bool readoutTimestampSupported = true);
@@ -798,6 +811,8 @@
// No guarantees on the order of traversal
ProviderInfo::DeviceInfo* findDeviceInfoLocked(const std::string& id) const;
+ bool isCompositeJpegRDisabledLocked(const std::string &id) const;
+
// Map external providers to USB devices in order to handle USB hotplug
// events for lazy HALs
std::pair<std::vector<std::string>, sp<ProviderInfo>>
diff --git a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
index 06d97ce..5e79d6b 100644
--- a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
@@ -483,6 +483,9 @@
}
}
+ mCompositeJpegRDisabled = mCameraCharacteristics.exists(
+ ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS);
+
mSystemCameraKind = getSystemCameraKind();
status_t res = fixupMonochromeTags();
@@ -501,8 +504,13 @@
ALOGE("%s: Unable to derive HEIC tags based on camera and media capabilities: %s (%d)",
__FUNCTION__, strerror(-res), res);
}
-
- if (camera3::SessionConfigurationUtils::isUltraHighResolutionSensor(mCameraCharacteristics)) {
+ res = deriveJpegRTags();
+ if (OK != res) {
+ ALOGE("%s: Unable to derive Jpeg/R tags based on camera and media capabilities: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ }
+ using camera3::SessionConfigurationUtils::supportsUltraHighResolutionCapture;
+ if (supportsUltraHighResolutionCapture(mCameraCharacteristics)) {
status_t status = addDynamicDepthTags(/*maxResolution*/true);
if (OK != status) {
ALOGE("%s: Failed appending dynamic depth tags for maximum resolution mode: %s (%d)",
@@ -514,6 +522,12 @@
ALOGE("%s: Unable to derive HEIC tags based on camera and media capabilities for"
"maximum resolution mode: %s (%d)", __FUNCTION__, strerror(-status), status);
}
+
+ status = deriveJpegRTags(/*maxResolution*/true);
+ if (OK != status) {
+ ALOGE("%s: Unable to derive Jpeg/R tags based on camera and media capabilities for"
+ "maximum resolution mode: %s (%d)", __FUNCTION__, strerror(-status), status);
+ }
}
res = addRotateCropTags();
@@ -521,6 +535,11 @@
ALOGE("%s: Unable to add default SCALER_ROTATE_AND_CROP tags: %s (%d)", __FUNCTION__,
strerror(-res), res);
}
+ res = addAutoframingTags();
+ if (OK != res) {
+ ALOGE("%s: Unable to add default AUTOFRAMING tags: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ }
res = addPreCorrectionActiveArraySize();
if (OK != res) {
ALOGE("%s: Unable to add PRE_CORRECTION_ACTIVE_ARRAY_SIZE: %s (%d)", __FUNCTION__,
@@ -550,6 +569,11 @@
"ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL tags: %s (%d)", __FUNCTION__,
strerror(-res), res);
}
+
+ // b/247038031: In case of system_server crash, camera_server is
+ // restarted as well. If flashlight is turned on before the crash, it
+ // may be stuck to be on. As a workaround, set torch mode to be OFF.
+ interface->setTorchMode(false);
} else {
mHasFlashUnit = false;
}
@@ -711,8 +735,8 @@
camera::device::StreamConfiguration streamConfiguration;
bool earlyExit = false;
auto bRes = SessionConfigurationUtils::convertToHALStreamCombination(configuration,
- mId, mCameraCharacteristics, getMetadata, mPhysicalIds,
- streamConfiguration, overrideForPerfClass, &earlyExit);
+ mId, mCameraCharacteristics, mCompositeJpegRDisabled, getMetadata,
+ mPhysicalIds, streamConfiguration, overrideForPerfClass, &earlyExit);
if (!bRes.isOk()) {
return UNKNOWN_ERROR;
@@ -777,7 +801,8 @@
bStatus =
SessionConfigurationUtils::convertToHALStreamCombination(
cameraIdAndSessionConfig.mSessionConfiguration,
- cameraId, deviceInfo, getMetadata,
+ cameraId, deviceInfo,
+ mManager->isCompositeJpegRDisabledLocked(cameraId), getMetadata,
physicalCameraIds, streamConfiguration,
overrideForPerfClass, &shouldExit);
if (!bStatus.isOk()) {
diff --git a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
index 3b501dc..bf7a471 100644
--- a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
@@ -624,7 +624,7 @@
__FUNCTION__, strerror(-res), res);
}
- if (SessionConfigurationUtils::isUltraHighResolutionSensor(mCameraCharacteristics)) {
+ if (SessionConfigurationUtils::supportsUltraHighResolutionCapture(mCameraCharacteristics)) {
status_t status = addDynamicDepthTags(/*maxResolution*/true);
if (OK != status) {
ALOGE("%s: Failed appending dynamic depth tags for maximum resolution mode: %s (%d)",
@@ -643,6 +643,11 @@
ALOGE("%s: Unable to add default SCALER_ROTATE_AND_CROP tags: %s (%d)", __FUNCTION__,
strerror(-res), res);
}
+ res = addAutoframingTags();
+ if (OK != res) {
+ ALOGE("%s: Unable to add default AUTOFRAMING tags: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ }
res = addPreCorrectionActiveArraySize();
if (OK != res) {
ALOGE("%s: Unable to add PRE_CORRECTION_ACTIVE_ARRAY_SIZE: %s (%d)", __FUNCTION__,
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 3f380ea..a0e2778 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -54,18 +54,19 @@
#include <android/hardware/camera/device/3.7/ICameraInjectionSession.h>
#include <android/hardware/camera2/ICameraDeviceUser.h>
-#include "utils/CameraTraces.h"
-#include "mediautils/SchedulingPolicyService.h"
-#include "device3/Camera3Device.h"
-#include "device3/Camera3OutputStream.h"
-#include "device3/Camera3InputStream.h"
-#include "device3/Camera3FakeStream.h"
-#include "device3/Camera3SharedOutputStream.h"
#include "CameraService.h"
+#include "aidl/android/hardware/graphics/common/Dataspace.h"
+#include "aidl/AidlUtils.h"
+#include "device3/Camera3Device.h"
+#include "device3/Camera3FakeStream.h"
+#include "device3/Camera3InputStream.h"
+#include "device3/Camera3OutputStream.h"
+#include "device3/Camera3SharedOutputStream.h"
+#include "mediautils/SchedulingPolicyService.h"
#include "utils/CameraThreadState.h"
+#include "utils/CameraTraces.h"
#include "utils/SessionConfigurationUtils.h"
#include "utils/TraceHFR.h"
-#include "utils/CameraServiceProxyWrapper.h"
#include <algorithm>
#include <tuple>
@@ -75,12 +76,15 @@
namespace android {
-Camera3Device::Camera3Device(const std::string &id, bool overrideForPerfClass, bool overrideToPortrait,
+Camera3Device::Camera3Device(std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
+ const std::string &id, bool overrideForPerfClass, bool overrideToPortrait,
bool legacyClient):
+ mCameraServiceProxyWrapper(cameraServiceProxyWrapper),
mId(id),
mLegacyClient(legacyClient),
mOperatingMode(NO_MODE),
mIsConstrainedHighSpeedConfiguration(false),
+ mIsCompositeJpegRDisabled(false),
mStatus(STATUS_UNINITIALIZED),
mStatusWaiters(0),
mUsePartialResult(false),
@@ -101,6 +105,8 @@
mOverrideToPortrait(overrideToPortrait),
mRotateAndCropOverride(ANDROID_SCALER_ROTATE_AND_CROP_NONE),
mComposerOutput(false),
+ mAutoframingOverride(ANDROID_CONTROL_AUTOFRAMING_OFF),
+ mSettingsOverride(-1),
mActivePhysicalId("")
{
ATRACE_CALL();
@@ -170,10 +176,21 @@
}
}
+ camera_metadata_entry_t availableSettingsOverrides = mDeviceInfo.find(
+ ANDROID_CONTROL_AVAILABLE_SETTINGS_OVERRIDES);
+ for (size_t i = 0; i < availableSettingsOverrides.count; i++) {
+ if (availableSettingsOverrides.data.i32[i] ==
+ ANDROID_CONTROL_SETTINGS_OVERRIDE_ZOOM) {
+ mSupportZoomOverride = true;
+ break;
+ }
+ }
+
/** Start up request queue thread */
mRequestThread = createNewRequestThread(
this, mStatusTracker, mInterface, sessionParamKeys,
- mUseHalBufManager, mSupportCameraMute, mOverrideToPortrait);
+ mUseHalBufManager, mSupportCameraMute, mOverrideToPortrait,
+ mSupportZoomOverride);
res = mRequestThread->run((std::string("C3Dev-") + mId + "-ReqQueue").c_str());
if (res != OK) {
SET_ERR_L("Unable to start request queue thread: %s (%d)",
@@ -221,7 +238,7 @@
mZoomRatioMappers[mId] = ZoomRatioMapper(&mDeviceInfo,
mSupportNativeZoomRatio, usePrecorrectArray);
- if (SessionConfigurationUtils::isUltraHighResolutionSensor(mDeviceInfo)) {
+ if (SessionConfigurationUtils::supportsUltraHighResolutionCapture(mDeviceInfo)) {
mUHRCropAndMeteringRegionMappers[mId] =
UHRCropAndMeteringRegionMapper(mDeviceInfo, usePrecorrectArray);
}
@@ -234,7 +251,7 @@
mInjectionMethods = createCamera3DeviceInjectionMethods(this);
/** Start watchdog thread */
- mCameraServiceWatchdog = new CameraServiceWatchdog();
+ mCameraServiceWatchdog = new CameraServiceWatchdog(mId, mCameraServiceProxyWrapper);
res = mCameraServiceWatchdog->run("CameraServiceWatchdog");
if (res != OK) {
SET_ERR_L("Unable to start camera service watchdog thread: %s (%d)",
@@ -251,113 +268,110 @@
status_t Camera3Device::disconnectImpl() {
ATRACE_CALL();
+ Mutex::Autolock il(mInterfaceLock);
+
ALOGI("%s: E", __FUNCTION__);
status_t res = OK;
std::vector<wp<Camera3StreamInterface>> streams;
+ nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
{
- Mutex::Autolock il(mInterfaceLock);
- nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
- {
- Mutex::Autolock l(mLock);
- if (mStatus == STATUS_UNINITIALIZED) return res;
+ Mutex::Autolock l(mLock);
+ if (mStatus == STATUS_UNINITIALIZED) return res;
- if (mRequestThread != NULL) {
- if (mStatus == STATUS_ACTIVE || mStatus == STATUS_ERROR) {
- res = mRequestThread->clear();
+ if (mRequestThread != NULL) {
+ if (mStatus == STATUS_ACTIVE || mStatus == STATUS_ERROR) {
+ res = mRequestThread->clear();
+ if (res != OK) {
+ SET_ERR_L("Can't stop streaming");
+ // Continue to close device even in case of error
+ } else {
+ res = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration,
+ /*requestThreadInvocation*/ false);
if (res != OK) {
- SET_ERR_L("Can't stop streaming");
+ SET_ERR_L("Timeout waiting for HAL to drain (% " PRIi64 " ns)",
+ maxExpectedDuration);
// Continue to close device even in case of error
- } else {
- res = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration);
- if (res != OK) {
- SET_ERR_L("Timeout waiting for HAL to drain (% " PRIi64 " ns)",
- maxExpectedDuration);
- // Continue to close device even in case of error
- }
}
}
- // Signal to request thread that we're not expecting any
- // more requests. This will be true since once we're in
- // disconnect and we've cleared off the request queue, the
- // request thread can't receive any new requests through
- // binder calls - since disconnect holds
- // mBinderSerialization lock.
- mRequestThread->setRequestClearing();
}
+ // Signal to request thread that we're not expecting any
+ // more requests. This will be true since once we're in
+ // disconnect and we've cleared off the request queue, the
+ // request thread can't receive any new requests through
+ // binder calls - since disconnect holds
+ // mBinderSerialization lock.
+ mRequestThread->setRequestClearing();
+ }
- if (mStatus == STATUS_ERROR) {
- CLOGE("Shutting down in an error state");
- }
+ if (mStatus == STATUS_ERROR) {
+ CLOGE("Shutting down in an error state");
+ }
- if (mStatusTracker != NULL) {
- mStatusTracker->requestExit();
- }
+ if (mStatusTracker != NULL) {
+ mStatusTracker->requestExit();
+ }
- if (mRequestThread != NULL) {
- mRequestThread->requestExit();
- }
+ if (mRequestThread != NULL) {
+ mRequestThread->requestExit();
+ }
- streams.reserve(mOutputStreams.size() + (mInputStream != nullptr ? 1 : 0));
- for (size_t i = 0; i < mOutputStreams.size(); i++) {
- streams.push_back(mOutputStreams[i]);
- }
- if (mInputStream != nullptr) {
- streams.push_back(mInputStream);
- }
+ streams.reserve(mOutputStreams.size() + (mInputStream != nullptr ? 1 : 0));
+ for (size_t i = 0; i < mOutputStreams.size(); i++) {
+ streams.push_back(mOutputStreams[i]);
+ }
+ if (mInputStream != nullptr) {
+ streams.push_back(mInputStream);
}
}
- // Joining done without holding mLock and mInterfaceLock, otherwise deadlocks may ensue
- // as the threads try to access parent state (b/143513518)
+
+ // Joining done without holding mLock, otherwise deadlocks may ensue
+ // as the threads try to access parent state
if (mRequestThread != NULL && mStatus != STATUS_ERROR) {
// HAL may be in a bad state, so waiting for request thread
// (which may be stuck in the HAL processCaptureRequest call)
// could be dangerous.
- // give up mInterfaceLock here and then lock it again. Could this lead
- // to other deadlocks
mRequestThread->join();
}
+
+ if (mStatusTracker != NULL) {
+ mStatusTracker->join();
+ }
+
+ if (mInjectionMethods->isInjecting()) {
+ mInjectionMethods->stopInjection();
+ }
+
+ HalInterface* interface;
{
- Mutex::Autolock il(mInterfaceLock);
- if (mStatusTracker != NULL) {
- mStatusTracker->join();
- }
+ Mutex::Autolock l(mLock);
+ mRequestThread.clear();
+ Mutex::Autolock stLock(mTrackerLock);
+ mStatusTracker.clear();
+ interface = mInterface.get();
+ }
- if (mInjectionMethods->isInjecting()) {
- mInjectionMethods->stopInjection();
- }
+ // Call close without internal mutex held, as the HAL close may need to
+ // wait on assorted callbacks,etc, to complete before it can return.
+ mCameraServiceWatchdog->WATCH(interface->close());
- HalInterface* interface;
- {
- Mutex::Autolock l(mLock);
- mRequestThread.clear();
- Mutex::Autolock stLock(mTrackerLock);
- mStatusTracker.clear();
- interface = mInterface.get();
- }
+ flushInflightRequests();
- // Call close without internal mutex held, as the HAL close may need to
- // wait on assorted callbacks,etc, to complete before it can return.
- mCameraServiceWatchdog->WATCH(interface->close());
+ {
+ Mutex::Autolock l(mLock);
+ mInterface->clear();
+ mOutputStreams.clear();
+ mInputStream.clear();
+ mDeletedStreams.clear();
+ mBufferManager.clear();
+ internalUpdateStatusLocked(STATUS_UNINITIALIZED);
+ }
- flushInflightRequests();
-
- {
- Mutex::Autolock l(mLock);
- mInterface->clear();
- mOutputStreams.clear();
- mInputStream.clear();
- mDeletedStreams.clear();
- mBufferManager.clear();
- internalUpdateStatusLocked(STATUS_UNINITIALIZED);
- }
-
- for (auto& weakStream : streams) {
- sp<Camera3StreamInterface> stream = weakStream.promote();
- if (stream != nullptr) {
- ALOGE("%s: Stream %d leaked! strong reference (%d)!",
- __FUNCTION__, stream->getId(), stream->getStrongCount() - 1);
- }
+ for (auto& weakStream : streams) {
+ sp<Camera3StreamInterface> stream = weakStream.promote();
+ if (stream != nullptr) {
+ ALOGE("%s: Stream %d leaked! strong reference (%d)!",
+ __FUNCTION__, stream->getId(), stream->getStrongCount() - 1);
}
}
ALOGI("%s: X", __FUNCTION__);
@@ -409,7 +423,7 @@
// Get max jpeg size (area-wise) for default sensor pixel mode
camera3::Size maxDefaultJpegResolution =
SessionConfigurationUtils::getMaxJpegResolution(info,
- /*isUltraHighResolutionSensor*/false);
+ /*supportsUltraHighResolutionCapture*/false);
// Get max jpeg size (area-wise) for max resolution sensor pixel mode / 0 if
// not ultra high res sensor
camera3::Size uhrMaxJpegResolution =
@@ -827,7 +841,7 @@
}
if (res == OK) {
- waitUntilStateThenRelock(/*active*/true, kActiveTimeout);
+ waitUntilStateThenRelock(/*active*/true, kActiveTimeout, /*requestThreadInvocation*/false);
if (res != OK) {
SET_ERR_L("Can't transition to active in %f seconds!",
kActiveTimeout/1e9);
@@ -952,7 +966,8 @@
break;
case STATUS_ACTIVE:
ALOGV("%s: Stopping activity to reconfigure streams", __FUNCTION__);
- res = internalPauseAndWaitLocked(maxExpectedDuration);
+ res = internalPauseAndWaitLocked(maxExpectedDuration,
+ /*requestThreadInvocation*/ false);
if (res != OK) {
SET_ERR_L("Can't pause captures to reconfigure streams!");
return res;
@@ -1003,7 +1018,7 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
uint64_t consumerUsage, int64_t dynamicRangeProfile, int64_t streamUseCase,
- int timestampBase, int mirrorMode) {
+ int timestampBase, int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) {
ATRACE_CALL();
if (consumer == nullptr) {
@@ -1017,7 +1032,7 @@
return createStream(consumers, /*hasDeferredConsumer*/ false, width, height,
format, dataSpace, rotation, id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
streamSetId, isShared, isMultiResolution, consumerUsage, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode);
+ streamUseCase, timestampBase, mirrorMode, colorSpace, useReadoutTimestamp);
}
static bool isRawFormat(int format) {
@@ -1039,7 +1054,7 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
uint64_t consumerUsage, int64_t dynamicRangeProfile, int64_t streamUseCase,
- int timestampBase, int mirrorMode) {
+ int timestampBase, int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) {
ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
@@ -1048,10 +1063,11 @@
ALOGV("Camera %s: Creating new stream %d: %d x %d, format %d, dataspace %d rotation %d"
" consumer usage %" PRIu64 ", isShared %d, physicalCameraId %s, isMultiResolution %d"
" dynamicRangeProfile 0x%" PRIx64 ", streamUseCase %" PRId64 ", timestampBase %d,"
- " mirrorMode %d",
+ " mirrorMode %d, colorSpace %d, useReadoutTimestamp %d",
mId.c_str(), mNextStreamId, width, height, format, dataSpace, rotation,
consumerUsage, isShared, physicalCameraId.c_str(), isMultiResolution,
- dynamicRangeProfile, streamUseCase, timestampBase, mirrorMode);
+ dynamicRangeProfile, streamUseCase, timestampBase, mirrorMode, colorSpace,
+ useReadoutTimestamp);
status_t res;
bool wasActive = false;
@@ -1069,7 +1085,8 @@
break;
case STATUS_ACTIVE:
ALOGV("%s: Stopping activity to reconfigure streams", __FUNCTION__);
- res = internalPauseAndWaitLocked(maxExpectedDuration);
+ res = internalPauseAndWaitLocked(maxExpectedDuration,
+ /*requestThreadInvocation*/ false);
if (res != OK) {
SET_ERR_L("Can't pause captures to reconfigure streams!");
return res;
@@ -1122,7 +1139,7 @@
width, height, blobBufferSize, format, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode);
+ timestampBase, mirrorMode, colorSpace, useReadoutTimestamp);
} else if (format == HAL_PIXEL_FORMAT_RAW_OPAQUE) {
bool maxResolution =
sensorPixelModesUsed.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
@@ -1137,25 +1154,25 @@
width, height, rawOpaqueBufferSize, format, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode);
+ timestampBase, mirrorMode, colorSpace, useReadoutTimestamp);
} else if (isShared) {
newStream = new Camera3SharedOutputStream(mNextStreamId, consumers,
width, height, format, consumerUsage, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
mUseHalBufManager, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode);
+ timestampBase, mirrorMode, colorSpace, useReadoutTimestamp);
} else if (consumers.size() == 0 && hasDeferredConsumer) {
newStream = new Camera3OutputStream(mNextStreamId,
width, height, format, consumerUsage, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode);
+ timestampBase, mirrorMode, colorSpace, useReadoutTimestamp);
} else {
newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
width, height, format, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode);
+ timestampBase, mirrorMode, colorSpace, useReadoutTimestamp);
}
size_t consumerCount = consumers.size();
@@ -1243,6 +1260,7 @@
streamInfo->dataSpaceOverridden = stream->isDataSpaceOverridden();
streamInfo->originalDataSpace = stream->getOriginalDataSpace();
streamInfo->dynamicRangeProfile = stream->getDynamicRangeProfile();
+ streamInfo->colorSpace = stream->getColorSpace();
return OK;
}
@@ -1364,6 +1382,7 @@
filteredParams.unlock(meta);
if (availableSessionKeys.count > 0) {
bool rotateAndCropSessionKey = false;
+ bool autoframingSessionKey = false;
for (size_t i = 0; i < availableSessionKeys.count; i++) {
camera_metadata_ro_entry entry = params.find(
availableSessionKeys.data.i32[i]);
@@ -1373,23 +1392,37 @@
if (ANDROID_SCALER_ROTATE_AND_CROP == availableSessionKeys.data.i32[i]) {
rotateAndCropSessionKey = true;
}
+ if (ANDROID_CONTROL_AUTOFRAMING == availableSessionKeys.data.i32[i]) {
+ autoframingSessionKey = true;
+ }
}
- if (rotateAndCropSessionKey) {
+ if (rotateAndCropSessionKey || autoframingSessionKey) {
sp<CaptureRequest> request = new CaptureRequest();
PhysicalCameraSettings settingsList;
settingsList.metadata = filteredParams;
request->mSettingsList.push_back(settingsList);
- auto rotateAndCropEntry = filteredParams.find(ANDROID_SCALER_ROTATE_AND_CROP);
- if (rotateAndCropEntry.count > 0 &&
- rotateAndCropEntry.data.u8[0] == ANDROID_SCALER_ROTATE_AND_CROP_AUTO) {
- request->mRotateAndCropAuto = true;
- } else {
- request->mRotateAndCropAuto = false;
+ if (rotateAndCropSessionKey) {
+ auto rotateAndCropEntry = filteredParams.find(ANDROID_SCALER_ROTATE_AND_CROP);
+ if (rotateAndCropEntry.count > 0 &&
+ rotateAndCropEntry.data.u8[0] == ANDROID_SCALER_ROTATE_AND_CROP_AUTO) {
+ request->mRotateAndCropAuto = true;
+ } else {
+ request->mRotateAndCropAuto = false;
+ }
+
+ overrideAutoRotateAndCrop(request, mOverrideToPortrait, mRotateAndCropOverride);
}
- overrideAutoRotateAndCrop(request, mOverrideToPortrait, mRotateAndCropOverride);
+ if (autoframingSessionKey) {
+ auto autoframingEntry = filteredParams.find(ANDROID_CONTROL_AUTOFRAMING);
+ if (autoframingEntry.count > 0 &&
+ autoframingEntry.data.u8[0] == ANDROID_CONTROL_AUTOFRAMING_AUTO) {
+ overrideAutoframing(request, mAutoframingOverride);
+ }
+ }
+
filteredParams = request->mSettingsList.begin()->metadata;
}
}
@@ -1486,6 +1519,13 @@
&kDefaultJpegQuality, 1);
}
+ // Fill in AUTOFRAMING if not available
+ if (!mRequestTemplateCache[templateId].exists(ANDROID_CONTROL_AUTOFRAMING)) {
+ static const uint8_t kDefaultAutoframingMode = ANDROID_CONTROL_AUTOFRAMING_OFF;
+ mRequestTemplateCache[templateId].update(ANDROID_CONTROL_AUTOFRAMING,
+ &kDefaultAutoframingMode, 1);
+ }
+
*request = mRequestTemplateCache[templateId];
mLastTemplateId = templateId;
}
@@ -1519,7 +1559,8 @@
}
ALOGV("%s: Camera %s: Waiting until idle (%" PRIi64 "ns)", __FUNCTION__, mId.c_str(),
maxExpectedDuration);
- status_t res = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration);
+ status_t res = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration,
+ /*requestThreadInvocation*/ false);
if (res != OK) {
mStatusTracker->dumpActiveComponents();
SET_ERR_L("Error waiting for HAL to drain: %s (%d)", strerror(-res),
@@ -1530,12 +1571,14 @@
void Camera3Device::internalUpdateStatusLocked(Status status) {
mStatus = status;
- mRecentStatusUpdates.add(mStatus);
+ mStatusIsInternal = mPauseStateNotify ? true : false;
+ mRecentStatusUpdates.add({mStatus, mStatusIsInternal});
mStatusChanged.broadcast();
}
// Pause to reconfigure
-status_t Camera3Device::internalPauseAndWaitLocked(nsecs_t maxExpectedDuration) {
+status_t Camera3Device::internalPauseAndWaitLocked(nsecs_t maxExpectedDuration,
+ bool requestThreadInvocation) {
if (mRequestThread.get() != nullptr) {
mRequestThread->setPaused(true);
} else {
@@ -1544,8 +1587,10 @@
ALOGV("%s: Camera %s: Internal wait until idle (% " PRIi64 " ns)", __FUNCTION__, mId.c_str(),
maxExpectedDuration);
- status_t res = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration);
+ status_t res = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration,
+ requestThreadInvocation);
if (res != OK) {
+ mStatusTracker->dumpActiveComponents();
SET_ERR_L("Can't idle device in %f seconds!",
maxExpectedDuration/1e9);
}
@@ -1561,7 +1606,9 @@
ALOGV("%s: Camera %s: Internal wait until active (% " PRIi64 " ns)", __FUNCTION__, mId.c_str(),
kActiveTimeout);
- res = waitUntilStateThenRelock(/*active*/ true, kActiveTimeout);
+ // internalResumeLocked is always called from a binder thread.
+ res = waitUntilStateThenRelock(/*active*/ true, kActiveTimeout,
+ /*requestThreadInvocation*/ false);
if (res != OK) {
SET_ERR_L("Can't transition to active in %f seconds!",
kActiveTimeout/1e9);
@@ -1570,7 +1617,8 @@
return OK;
}
-status_t Camera3Device::waitUntilStateThenRelock(bool active, nsecs_t timeout) {
+status_t Camera3Device::waitUntilStateThenRelock(bool active, nsecs_t timeout,
+ bool requestThreadInvocation) {
status_t res = OK;
size_t startIndex = 0;
@@ -1599,7 +1647,12 @@
bool stateSeen = false;
nsecs_t startTime = systemTime();
do {
- if (active == (mStatus == STATUS_ACTIVE)) {
+ if (mStatus == STATUS_ERROR) {
+ // Device in error state. Return right away.
+ break;
+ }
+ if (active == (mStatus == STATUS_ACTIVE) &&
+ (requestThreadInvocation || !mStatusIsInternal)) {
// Desired state is current
break;
}
@@ -1621,9 +1674,19 @@
"%s: Skipping status updates in Camera3Device, may result in deadlock.",
__FUNCTION__);
- // Encountered desired state since we began waiting
+ // Encountered desired state since we began waiting. Internal invocations coming from
+ // request threads (such as reconfigureCamera) should be woken up immediately, whereas
+ // invocations from binder threads (such as createInputStream) should only be woken up if
+ // they are not paused. This avoids intermediate pause signals from reconfigureCamera as it
+ // changes the status to active right after.
for (size_t i = startIndex; i < mRecentStatusUpdates.size(); i++) {
- if (active == (mRecentStatusUpdates[i] == STATUS_ACTIVE) ) {
+ if (mRecentStatusUpdates[i].status == STATUS_ERROR) {
+ // Device in error state. Return right away.
+ stateSeen = true;
+ break;
+ }
+ if (active == (mRecentStatusUpdates[i].status == STATUS_ACTIVE) &&
+ (requestThreadInvocation || !mRecentStatusUpdates[i].isInternal)) {
stateSeen = true;
break;
}
@@ -1855,7 +1918,7 @@
camera_metadata_entry minDurations =
mDeviceInfo.find(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS);
for (size_t i = 0; i < minDurations.count; i += 4) {
- if (minDurations.data.i64[i] == stream->getFormat()
+ if (minDurations.data.i64[i] == stream->getOriginalFormat()
&& minDurations.data.i64[i+1] == stream->getWidth()
&& minDurations.data.i64[i+2] == stream->getHeight()) {
int64_t minFrameDuration = minDurations.data.i64[i+3];
@@ -1911,10 +1974,11 @@
streamUseCase = camera3Stream->getStreamUseCase();
}
streamStats.emplace_back(stream->getWidth(), stream->getHeight(),
- stream->getFormat(), streamMaxPreviewFps, stream->getDataSpace(), usage,
+ stream->getOriginalFormat(), streamMaxPreviewFps, stream->getDataSpace(), usage,
stream->getMaxHalBuffers(),
stream->getMaxTotalBuffers() - stream->getMaxHalBuffers(),
- stream->getDynamicRangeProfile(), streamUseCase);
+ stream->getDynamicRangeProfile(), streamUseCase,
+ stream->getColorSpace());
}
}
}
@@ -2164,6 +2228,15 @@
newRequest->mRotateAndCropAuto = false;
}
+ auto autoframingEntry =
+ newRequest->mSettingsList.begin()->metadata.find(ANDROID_CONTROL_AUTOFRAMING);
+ if (autoframingEntry.count > 0 &&
+ autoframingEntry.data.u8[0] == ANDROID_CONTROL_AUTOFRAMING_AUTO) {
+ newRequest->mAutoframingAuto = true;
+ } else {
+ newRequest->mAutoframingAuto = false;
+ }
+
auto zoomRatioEntry =
newRequest->mSettingsList.begin()->metadata.find(ANDROID_CONTROL_ZOOM_RATIO);
if (zoomRatioEntry.count > 0 &&
@@ -2195,6 +2268,16 @@
}
}
+ if (mSupportZoomOverride) {
+ for (auto& settings : newRequest->mSettingsList) {
+ auto settingsOverrideEntry =
+ settings.metadata.find(ANDROID_CONTROL_SETTINGS_OVERRIDE);
+ settings.mOriginalSettingsOverride = settingsOverrideEntry.count > 0 ?
+ settingsOverrideEntry.data.i32[0] :
+ ANDROID_CONTROL_SETTINGS_OVERRIDE_OFF;
+ }
+ }
+
return newRequest;
}
@@ -2251,7 +2334,9 @@
nsecs_t startTime = systemTime();
- Mutex::Autolock il(mInterfaceLock);
+ // We must not hold mInterfaceLock here since this function is called from
+ // RequestThread::threadLoop and holding mInterfaceLock could lead to
+ // deadlocks (http://b/143513518)
nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
Mutex::Autolock l(mLock);
@@ -2268,7 +2353,16 @@
mPauseStateNotify = true;
mStatusTracker->markComponentIdle(clientStatusId, Fence::NO_FENCE);
- rc = internalPauseAndWaitLocked(maxExpectedDuration);
+ // This is essentially the same as calling rc = internalPauseAndWaitLocked(..), except that
+ // we don't want to call setPaused(true) to avoid it interfering with setPaused() called
+ // from createInputStream/createStream.
+ rc = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration,
+ /*requestThreadInvocation*/ true);
+ if (rc != OK) {
+ mStatusTracker->dumpActiveComponents();
+ SET_ERR_L("Can't idle device in %f seconds!",
+ maxExpectedDuration/1e9);
+ }
}
if (rc == NO_ERROR) {
@@ -2283,6 +2377,9 @@
//present streams end up with outstanding buffers that will
//not get drained.
internalUpdateStatusLocked(STATUS_ACTIVE);
+
+ mCameraServiceProxyWrapper->logStreamConfigured(mId, mOperatingMode,
+ true /*internalReconfig*/, ns2ms(systemTime() - startTime));
} else if (rc == DEAD_OBJECT) {
// DEAD_OBJECT can be returned if either the consumer surface is
// abandoned, or the HAL has died.
@@ -2298,9 +2395,6 @@
ALOGE("%s: Failed to pause streaming: %d", __FUNCTION__, rc);
}
- CameraServiceProxyWrapper::logStreamConfigured(mId, mOperatingMode, true /*internalReconfig*/,
- ns2ms(systemTime() - startTime));
-
if (markClientActive) {
mStatusTracker->markComponentActive(clientStatusId);
}
@@ -2426,7 +2520,10 @@
if (outputStream->format == HAL_PIXEL_FORMAT_BLOB) {
size_t k = i + ((mInputStream != nullptr) ? 1 : 0); // Input stream if present should
// always occupy the initial entry.
- if (outputStream->data_space == HAL_DATASPACE_V0_JFIF) {
+ if ((outputStream->data_space == HAL_DATASPACE_V0_JFIF) ||
+ (outputStream->data_space ==
+ static_cast<android_dataspace_t>(
+ aidl::android::hardware::graphics::common::Dataspace::JPEG_R))) {
bufferSizes[k] = static_cast<uint32_t>(
getJpegBufferSize(infoPhysical(outputStream->physical_camera_id),
outputStream->width, outputStream->height));
@@ -2456,8 +2553,9 @@
// max_buffers, usage, and priv fields, as well as data_space and format
// fields for IMPLEMENTATION_DEFINED formats.
+ int64_t logId = mCameraServiceProxyWrapper->getCurrentLogIdForCamera(mId);
const camera_metadata_t *sessionBuffer = sessionParams.getAndLock();
- res = mInterface->configureStreams(sessionBuffer, &config, bufferSizes);
+ res = mInterface->configureStreams(sessionBuffer, &config, bufferSizes, logId);
sessionParams.unlock(sessionBuffer);
if (res == BAD_VALUE) {
@@ -2720,7 +2818,7 @@
int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
bool hasAppCallback, nsecs_t minExpectedDuration, nsecs_t maxExpectedDuration,
bool isFixedFps, const std::set<std::set<std::string>>& physicalCameraIds,
- bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto,
+ bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto, bool autoframingAuto,
const std::set<std::string>& cameraIdsWithZoom,
const SurfaceMap& outputSurfaces, nsecs_t requestTimeNs) {
ATRACE_CALL();
@@ -2729,8 +2827,8 @@
ssize_t res;
res = mInFlightMap.add(frameNumber, InFlightRequest(numBuffers, resultExtras, hasInput,
hasAppCallback, minExpectedDuration, maxExpectedDuration, isFixedFps, physicalCameraIds,
- isStillCapture, isZslCapture, rotateAndCropAuto, cameraIdsWithZoom, requestTimeNs,
- outputSurfaces));
+ isStillCapture, isZslCapture, rotateAndCropAuto, autoframingAuto, cameraIdsWithZoom,
+ requestTimeNs, outputSurfaces));
if (res < 0) return res;
if (mInFlightMap.size() == 1) {
@@ -2920,7 +3018,8 @@
sp<HalInterface> interface, const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait) :
+ bool overrideToPortrait,
+ bool supportSettingsOverride) :
Thread(/*canCallJava*/false),
mParent(parent),
mStatusTracker(statusTracker),
@@ -2938,9 +3037,11 @@
mCurrentAfTriggerId(0),
mCurrentPreCaptureTriggerId(0),
mRotateAndCropOverride(ANDROID_SCALER_ROTATE_AND_CROP_NONE),
+ mAutoframingOverride(ANDROID_CONTROL_AUTOFRAMING_OFF),
mComposerOutput(false),
mCameraMute(ANDROID_SENSOR_TEST_PATTERN_MODE_OFF),
mCameraMuteChanged(false),
+ mSettingsOverride(ANDROID_CONTROL_SETTINGS_OVERRIDE_OFF),
mRepeatingLastFrameNumber(
hardware::camera2::ICameraDeviceUser::NO_IN_FLIGHT_REPEATING_FRAMES),
mPrepareVideoStream(false),
@@ -2950,8 +3051,10 @@
mLatestSessionParams(sessionParamKeys.size()),
mUseHalBufManager(useHalBufManager),
mSupportCameraMute(supportCameraMute),
- mOverrideToPortrait(overrideToPortrait) {
+ mOverrideToPortrait(overrideToPortrait),
+ mSupportSettingsOverride(supportSettingsOverride) {
mStatusId = statusTracker->addComponent("RequestThread");
+ mVndkVersion = property_get_int32("ro.vndk.version", __ANDROID_API_FUTURE__);
}
Camera3Device::RequestThread::~RequestThread() {}
@@ -3477,6 +3580,7 @@
// The display rotation there will be compensated by NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY
captureRequest->mRotateAndCropChanged = (mComposerOutput && !mOverrideToPortrait) ? false :
overrideAutoRotateAndCrop(captureRequest);
+ captureRequest->mAutoframingChanged = overrideAutoframing(captureRequest);
}
// 'mNextRequests' will at this point contain either a set of HFR batched requests
@@ -3502,7 +3606,6 @@
if (parent != nullptr) {
mReconfigured |= parent->reconfigureCamera(mLatestSessionParams, mStatusId);
}
- setPaused(false);
if (mNextRequests[0].captureRequest->mInputStream != nullptr) {
mNextRequests[0].captureRequest->mInputStream->restoreConfiguredState();
@@ -3625,12 +3728,15 @@
mPrevTriggers = triggerCount;
bool testPatternChanged = overrideTestPattern(captureRequest);
+ bool settingsOverrideChanged = overrideSettingsOverride(captureRequest);
// If the request is the same as last, or we had triggers now or last time or
// changing overrides this time
bool newRequest =
(mPrevRequest != captureRequest || triggersMixedIn ||
- captureRequest->mRotateAndCropChanged || testPatternChanged) &&
+ captureRequest->mRotateAndCropChanged ||
+ captureRequest->mAutoframingChanged ||
+ testPatternChanged || settingsOverrideChanged) &&
// Request settings are all the same within one batch, so only treat the first
// request in a batch as new
!(batchedRequest && i > 0);
@@ -3746,6 +3852,17 @@
}
captureRequest->mRotationAndCropUpdated = true;
}
+
+ for (it = captureRequest->mSettingsList.begin();
+ it != captureRequest->mSettingsList.end(); it++) {
+ res = hardware::cameraservice::utils::conversion::aidl::filterVndkKeys(
+ mVndkVersion, it->metadata, false /*isStatic*/);
+ if (res != OK) {
+ SET_ERR("RequestThread: Failed during VNDK filter of capture requests "
+ "%d: %s (%d)", halRequest->frame_number, strerror(-res), res);
+ return INVALID_OPERATION;
+ }
+ }
}
}
@@ -3959,7 +4076,8 @@
expectedDurationInfo.maxDuration,
expectedDurationInfo.isFixedFps,
requestedPhysicalCameras, isStillCapture, isZslCapture,
- captureRequest->mRotateAndCropAuto, mPrevCameraIdsWithZoom,
+ captureRequest->mRotateAndCropAuto, captureRequest->mAutoframingAuto,
+ mPrevCameraIdsWithZoom,
(mUseHalBufManager) ? uniqueSurfaceIdMap :
SurfaceMap{}, captureRequest->mRequestTimeNs);
ALOGVV("%s: registered in flight requestId = %" PRId32 ", frameNumber = %" PRId64
@@ -4097,6 +4215,14 @@
return OK;
}
+status_t Camera3Device::RequestThread::setAutoframingAutoBehaviour(
+ camera_metadata_enum_android_control_autoframing_t autoframingValue) {
+ ATRACE_CALL();
+ Mutex::Autolock l(mTriggerMutex);
+ mAutoframingOverride = autoframingValue;
+ return OK;
+}
+
status_t Camera3Device::RequestThread::setComposerSurface(bool composerSurfacePresent) {
ATRACE_CALL();
Mutex::Autolock l(mTriggerMutex);
@@ -4114,6 +4240,13 @@
return OK;
}
+status_t Camera3Device::RequestThread::setZoomOverride(int32_t zoomOverride) {
+ ATRACE_CALL();
+ Mutex::Autolock l(mTriggerMutex);
+ mSettingsOverride = zoomOverride;
+ return OK;
+}
+
nsecs_t Camera3Device::getExpectedInFlightDuration() {
ATRACE_CALL();
std::lock_guard<std::mutex> l(mInFlightLock);
@@ -4169,6 +4302,12 @@
mStreamUseCaseOverrides.clear();
}
+bool Camera3Device::hasDeviceError() {
+ Mutex::Autolock il(mInterfaceLock);
+ Mutex::Autolock l(mLock);
+ return mStatus == STATUS_ERROR;
+}
+
void Camera3Device::RequestThread::cleanUpFailedRequests(bool sendRequestError) {
if (mNextRequests.empty()) {
return;
@@ -4715,6 +4854,38 @@
return false;
}
+bool Camera3Device::overrideAutoframing(const sp<CaptureRequest> &request /*out*/,
+ camera_metadata_enum_android_control_autoframing_t autoframingOverride) {
+ CameraMetadata &metadata = request->mSettingsList.begin()->metadata;
+ auto autoframingEntry = metadata.find(ANDROID_CONTROL_AUTOFRAMING);
+ if (autoframingEntry.count > 0) {
+ if (autoframingEntry.data.u8[0] == autoframingOverride) {
+ return false;
+ } else {
+ autoframingEntry.data.u8[0] = autoframingOverride;
+ return true;
+ }
+ } else {
+ uint8_t autoframing_u8 = autoframingOverride;
+ metadata.update(ANDROID_CONTROL_AUTOFRAMING,
+ &autoframing_u8, 1);
+ return true;
+ }
+
+ return false;
+}
+
+bool Camera3Device::RequestThread::overrideAutoframing(const sp<CaptureRequest> &request) {
+ ATRACE_CALL();
+
+ if (request->mAutoframingAuto) {
+ Mutex::Autolock l(mTriggerMutex);
+ return Camera3Device::overrideAutoframing(request, mAutoframingOverride);
+ }
+
+ return false;
+}
+
bool Camera3Device::RequestThread::overrideTestPattern(
const sp<CaptureRequest> &request) {
ATRACE_CALL();
@@ -4777,6 +4948,33 @@
return changed;
}
+bool Camera3Device::RequestThread::overrideSettingsOverride(
+ const sp<CaptureRequest> &request) {
+ ATRACE_CALL();
+
+ if (!mSupportSettingsOverride) return false;
+
+ Mutex::Autolock l(mTriggerMutex);
+
+ // For a multi-camera, only override the logical camera's metadata.
+ CameraMetadata &metadata = request->mSettingsList.begin()->metadata;
+ camera_metadata_entry entry = metadata.find(ANDROID_CONTROL_SETTINGS_OVERRIDE);
+ int32_t originalValue = request->mSettingsList.begin()->mOriginalSettingsOverride;
+ if (mSettingsOverride != -1 &&
+ (entry.count == 0 || entry.data.i32[0] != mSettingsOverride)) {
+ metadata.update(ANDROID_CONTROL_SETTINGS_OVERRIDE,
+ &mSettingsOverride, 1);
+ return true;
+ } else if (mSettingsOverride == -1 &&
+ (entry.count == 0 || entry.data.i32[0] != originalValue)) {
+ metadata.update(ANDROID_CONTROL_SETTINGS_OVERRIDE,
+ &originalValue, 1);
+ return true;
+ }
+
+ return false;
+}
+
status_t Camera3Device::RequestThread::setHalInterface(
sp<HalInterface> newHalInterface) {
if (newHalInterface.get() == nullptr) {
@@ -4846,7 +5044,8 @@
}
// queue up the work
- mPendingStreams.emplace(maxCount, stream);
+ mPendingStreams.push_back(
+ std::tuple<int, sp<camera3::Camera3StreamInterface>>(maxCount, stream));
ALOGV("%s: Stream %d queued for preparing", __FUNCTION__, stream->getId());
return OK;
@@ -4857,8 +5056,8 @@
Mutex::Autolock l(mLock);
- std::unordered_map<int, sp<camera3::Camera3StreamInterface> > pendingStreams;
- pendingStreams.insert(mPendingStreams.begin(), mPendingStreams.end());
+ std::list<std::tuple<int, sp<camera3::Camera3StreamInterface>>> pendingStreams;
+ pendingStreams.insert(pendingStreams.begin(), mPendingStreams.begin(), mPendingStreams.end());
sp<camera3::Camera3StreamInterface> currentStream = mCurrentStream;
int currentMaxCount = mCurrentMaxCount;
mPendingStreams.clear();
@@ -4879,18 +5078,19 @@
//of the streams in the pending list.
if (currentStream != nullptr) {
if (!mCurrentPrepareComplete) {
- pendingStreams.emplace(currentMaxCount, currentStream);
+ pendingStreams.push_back(std::tuple(currentMaxCount, currentStream));
}
}
- mPendingStreams.insert(pendingStreams.begin(), pendingStreams.end());
+ mPendingStreams.insert(mPendingStreams.begin(), pendingStreams.begin(), pendingStreams.end());
for (const auto& it : mPendingStreams) {
- it.second->cancelPrepare();
+ std::get<1>(it)->cancelPrepare();
}
}
status_t Camera3Device::PreparerThread::resume() {
ATRACE_CALL();
+ ALOGV("%s: PreparerThread", __FUNCTION__);
status_t res;
Mutex::Autolock l(mLock);
@@ -4903,10 +5103,10 @@
auto it = mPendingStreams.begin();
for (; it != mPendingStreams.end();) {
- res = it->second->startPrepare(it->first, true /*blockRequest*/);
+ res = std::get<1>(*it)->startPrepare(std::get<0>(*it), true /*blockRequest*/);
if (res == OK) {
if (listener != NULL) {
- listener->notifyPrepared(it->second->getId());
+ listener->notifyPrepared(std::get<1>(*it)->getId());
}
it = mPendingStreams.erase(it);
} else if (res != NOT_ENOUGH_DATA) {
@@ -4940,7 +5140,7 @@
Mutex::Autolock l(mLock);
for (const auto& it : mPendingStreams) {
- it.second->cancelPrepare();
+ std::get<1>(it)->cancelPrepare();
}
mPendingStreams.clear();
mCancelNow = true;
@@ -4971,8 +5171,8 @@
// Get next stream to prepare
auto it = mPendingStreams.begin();
- mCurrentStream = it->second;
- mCurrentMaxCount = it->first;
+ mCurrentMaxCount = std::get<0>(*it);
+ mCurrentStream = std::get<1>(*it);
mCurrentPrepareComplete = false;
mPendingStreams.erase(it);
ATRACE_ASYNC_BEGIN("stream prepare", mCurrentStream->getId());
@@ -5204,6 +5404,21 @@
return mRequestThread->setRotateAndCropAutoBehavior(rotateAndCropValue);
}
+status_t Camera3Device::setAutoframingAutoBehavior(
+ camera_metadata_enum_android_control_autoframing_t autoframingValue) {
+ ATRACE_CALL();
+ Mutex::Autolock il(mInterfaceLock);
+ Mutex::Autolock l(mLock);
+ if (mRequestThread == nullptr) {
+ return INVALID_OPERATION;
+ }
+ if (autoframingValue == ANDROID_CONTROL_AUTOFRAMING_AUTO) {
+ return BAD_VALUE;
+ }
+ mAutoframingOverride = autoframingValue;
+ return mRequestThread->setAutoframingAutoBehaviour(autoframingValue);
+}
+
bool Camera3Device::supportsCameraMute() {
Mutex::Autolock il(mInterfaceLock);
Mutex::Autolock l(mLock);
@@ -5226,6 +5441,25 @@
return mRequestThread->setCameraMute(muteMode);
}
+bool Camera3Device::supportsZoomOverride() {
+ Mutex::Autolock il(mInterfaceLock);
+ Mutex::Autolock l(mLock);
+
+ return mSupportZoomOverride;
+}
+
+status_t Camera3Device::setZoomOverride(int32_t zoomOverride) {
+ ATRACE_CALL();
+ Mutex::Autolock il(mInterfaceLock);
+ Mutex::Autolock l(mLock);
+
+ if (mRequestThread == nullptr || !mSupportZoomOverride) {
+ return INVALID_OPERATION;
+ }
+
+ return mRequestThread->setZoomOverride(zoomOverride);
+}
+
status_t Camera3Device::injectCamera(const std::string& injectedCamId,
sp<CameraProviderManager> manager) {
ALOGI("%s Injection camera: injectedCamId = %s", __FUNCTION__, injectedCamId.c_str());
@@ -5304,6 +5538,7 @@
// Start from an array of indexes in mStreamUseCaseOverrides, and sort them
// based first on size, and second on formats of [JPEG, RAW, YUV, PRIV].
+ // Refer to CameraService::printHelp for details.
std::vector<int> outputStreamsIndices(mOutputStreams.size());
for (size_t i = 0; i < outputStreamsIndices.size(); i++) {
outputStreamsIndices[i] = i;
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 7b89f9f..0c1bbcb 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -20,6 +20,7 @@
#include <utility>
#include <unordered_map>
#include <set>
+#include <tuple>
#include <utils/Condition.h>
#include <utils/Errors.h>
@@ -49,6 +50,7 @@
#include "utils/TagMonitor.h"
#include "utils/IPCTransport.h"
#include "utils/LatencyHistogram.h"
+#include "utils/CameraServiceProxyWrapper.h"
#include <camera_metadata_hidden.h>
using android::camera3::camera_capture_request_t;
@@ -82,7 +84,8 @@
friend class AidlCamera3Device;
public:
- explicit Camera3Device(const std::string& id, bool overrideForPerfClass, bool overrideToPortrait,
+ explicit Camera3Device(std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
+ const std::string& id, bool overrideForPerfClass, bool overrideToPortrait,
bool legacyClient = false);
virtual ~Camera3Device();
@@ -116,6 +119,7 @@
status_t dumpWatchedEventsToVector(std::vector<std::string> &out) override;
const CameraMetadata& info() const override;
const CameraMetadata& infoPhysical(const std::string& physicalId) const override;
+ bool isCompositeJpegRDisabled() const override { return mIsCompositeJpegRDisabled; };
// Capture and setStreamingRequest will configure streams if currently in
// idle state
@@ -150,7 +154,10 @@
ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) override;
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ bool useReadoutTimestamp = false)
+ override;
status_t createStream(const std::vector<sp<Surface>>& consumers,
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
@@ -165,7 +172,10 @@
ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) override;
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ bool useReadoutTimestamp = false)
+ override;
status_t createInputStream(
uint32_t width, uint32_t height, int format, bool isMultiResolution,
@@ -268,6 +278,14 @@
camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropValue);
/**
+ * Set the current behavior for the AUTOFRAMING control when in AUTO.
+ *
+ * The value must be one of the AUTOFRAMING_* values besides AUTO.
+ */
+ status_t setAutoframingAutoBehavior(
+ camera_metadata_enum_android_control_autoframing_t autoframingValue);
+
+ /**
* Whether camera muting (producing black-only output) is supported.
*
* Calling setCameraMute(true) when this returns false will return an
@@ -294,9 +312,20 @@
// Clear stream use case overrides
void clearStreamUseCaseOverrides();
+ /**
+ * Whether the camera device supports zoom override.
+ */
+ bool supportsZoomOverride();
+
+ // Set/reset zoom override
+ status_t setZoomOverride(int32_t zoomOverride);
+
// Get the status trackeer for the camera device
wp<camera3::StatusTracker> getStatusTracker() { return mStatusTracker; }
+ // Whether the device is in error state
+ bool hasDeviceError();
+
/**
* The injection camera session to replace the internal camera
* session.
@@ -333,8 +362,11 @@
// Constant to use for stream ID when one doesn't exist
static const int NO_STREAM = -1;
+ std::shared_ptr<CameraServiceProxyWrapper> mCameraServiceProxyWrapper;
+
// A lock to enforce serialization on the input/configure side
// of the public interface.
+ // Only locked by public methods inherited from CameraDeviceBase.
// Not locked by methods guarded by mOutputLock, since they may act
// concurrently to the input/configure side of the interface.
// Must be locked before mLock if both will be locked by a method
@@ -389,7 +421,7 @@
virtual status_t configureStreams(const camera_metadata_t * sessionParams,
/*inout*/ camera_stream_configuration_t * config,
- const std::vector<uint32_t>& bufferSizes) = 0;
+ const std::vector<uint32_t>& bufferSizes, int64_t logId) = 0;
// The injection camera configures the streams to hal.
virtual status_t configureInjectedStreams(
@@ -521,6 +553,7 @@
CameraMetadata mDeviceInfo;
bool mSupportNativeZoomRatio;
+ bool mIsCompositeJpegRDisabled;
std::unordered_map<std::string, CameraMetadata> mPhysicalDeviceInfoMap;
CameraMetadata mRequestTemplateCache[CAMERA_TEMPLATE_COUNT];
@@ -539,8 +572,15 @@
STATUS_ACTIVE
} mStatus;
+ struct StatusInfo {
+ Status status;
+ bool isInternal; // status triggered by internal reconfigureCamera.
+ };
+
+ bool mStatusIsInternal;
+
// Only clear mRecentStatusUpdates, mStatusWaiters from waitUntilStateThenRelock
- Vector<Status> mRecentStatusUpdates;
+ Vector<StatusInfo> mRecentStatusUpdates;
int mStatusWaiters;
Condition mStatusChanged;
@@ -606,6 +646,11 @@
// Indicates that the ROTATE_AND_CROP value within 'mSettingsList' was modified
// irrespective of the original value.
bool mRotateAndCropChanged = false;
+ // Whether this request has AUTOFRAMING_AUTO set, so need to override the AUTOFRAMING value
+ // in the capture request.
+ bool mAutoframingAuto;
+ // Indicates that the auto framing value within 'mSettingsList' was modified
+ bool mAutoframingChanged = false;
// Whether this capture request has its zoom ratio set to 1.0x before
// the framework overrides it for camera HAL consumption.
@@ -619,6 +664,8 @@
// Whether this capture request's rotation and crop update has been
// done.
bool mRotationAndCropUpdated = false;
+ // Whether this capture request's autoframing has been done.
+ bool mAutoframingUpdated = false;
// Whether this capture request's zoom ratio update has been done.
bool mZoomRatioUpdated = false;
// Whether this max resolution capture request's crop / metering region update has been
@@ -678,7 +725,8 @@
* CameraDeviceBase interface we shouldn't need to.
* Must be called with mLock and mInterfaceLock both held.
*/
- status_t internalPauseAndWaitLocked(nsecs_t maxExpectedDuration);
+ status_t internalPauseAndWaitLocked(nsecs_t maxExpectedDuration,
+ bool requestThreadInvocation);
/**
* Resume work after internalPauseAndWaitLocked()
@@ -697,7 +745,8 @@
* During the wait mLock is released.
*
*/
- status_t waitUntilStateThenRelock(bool active, nsecs_t timeout);
+ status_t waitUntilStateThenRelock(bool active, nsecs_t timeout,
+ bool requestThreadInvocation);
/**
* Implementation of waitUntilDrained. On success, will transition to IDLE state.
@@ -797,6 +846,10 @@
bool overrideToPortrait,
camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropOverride);
+ // Override auto framing control if needed
+ static bool overrideAutoframing(const sp<CaptureRequest> &request /*out*/,
+ camera_metadata_enum_android_control_autoframing_t autoframingOverride);
+
struct RequestTrigger {
// Metadata tag number, e.g. android.control.aePrecaptureTrigger
uint32_t metadataTag;
@@ -827,7 +880,8 @@
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait);
+ bool overrideToPortrait,
+ bool supportSettingsOverride);
~RequestThread();
void setNotificationListener(wp<NotificationListener> listener);
@@ -926,10 +980,16 @@
status_t setRotateAndCropAutoBehavior(
camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropValue);
+
+ status_t setAutoframingAutoBehaviour(
+ camera_metadata_enum_android_control_autoframing_t autoframingValue);
+
status_t setComposerSurface(bool composerSurfacePresent);
status_t setCameraMute(int32_t muteMode);
+ status_t setZoomOverride(int32_t zoomOverride);
+
status_t setHalInterface(sp<HalInterface> newHalInterface);
protected:
@@ -952,10 +1012,17 @@
// Override rotate_and_crop control if needed; returns true if the current value was changed
bool overrideAutoRotateAndCrop(const sp<CaptureRequest> &request /*out*/);
+ // Override autoframing control if needed; returns true if the current value was changed
+ bool overrideAutoframing(const sp<CaptureRequest> &request);
+
// Override test_pattern control if needed for camera mute; returns true
// if the current value was changed
bool overrideTestPattern(const sp<CaptureRequest> &request);
+ // Override settings override if needed for lower zoom latency; return
+ // true if the current value was changed
+ bool overrideSettingsOverride(const sp<CaptureRequest> &request);
+
static const nsecs_t kRequestTimeout = 50e6; // 50 ms
// TODO: does this need to be adjusted for long exposure requests?
@@ -1086,9 +1153,12 @@
uint32_t mCurrentAfTriggerId;
uint32_t mCurrentPreCaptureTriggerId;
camera_metadata_enum_android_scaler_rotate_and_crop_t mRotateAndCropOverride;
+ camera_metadata_enum_android_control_autoframing_t mAutoframingOverride;
bool mComposerOutput;
int32_t mCameraMute; // 0 = no mute, otherwise the TEST_PATTERN_MODE to use
bool mCameraMuteChanged;
+ int32_t mSettingsOverride; // -1 = use original, otherwise
+ // the settings override to use.
int64_t mRepeatingLastFrameNumber;
@@ -1108,6 +1178,8 @@
const bool mUseHalBufManager;
const bool mSupportCameraMute;
const bool mOverrideToPortrait;
+ const bool mSupportSettingsOverride;
+ int32_t mVndkVersion = -1;
};
virtual sp<RequestThread> createNewRequestThread(wp<Camera3Device> /*parent*/,
@@ -1116,7 +1188,8 @@
const Vector<int32_t>& /*sessionParamKeys*/,
bool /*useHalBufManager*/,
bool /*supportCameraMute*/,
- bool /*overrideToPortrait*/) = 0;
+ bool /*overrideToPortrait*/,
+ bool /*supportSettingsOverride*/) = 0;
sp<RequestThread> mRequestThread;
@@ -1137,7 +1210,7 @@
int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
bool callback, nsecs_t minExpectedDuration, nsecs_t maxExpectedDuration,
bool isFixedFps, const std::set<std::set<std::string>>& physicalCameraIds,
- bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto,
+ bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto, bool autoframingAuto,
const std::set<std::string>& cameraIdsWithZoom, const SurfaceMap& outputSurfaces,
nsecs_t requestTimeNs);
@@ -1194,7 +1267,7 @@
// Guarded by mLock
wp<NotificationListener> mListener;
- std::unordered_map<int, sp<camera3::Camera3StreamInterface> > mPendingStreams;
+ std::list<std::tuple<int, sp<camera3::Camera3StreamInterface>>> mPendingStreams;
bool mActive;
bool mCancelNow;
@@ -1381,6 +1454,8 @@
bool mSupportCameraMute = false;
// Whether the HAL supports SOLID_COLOR or BLACK if mSupportCameraMute is true
bool mSupportTestPatternSolidColor = false;
+ // Whether the HAL supports zoom settings override
+ bool mSupportZoomOverride = false;
// Whether the camera framework overrides the device characteristics for
// performance class.
@@ -1392,6 +1467,13 @@
camera_metadata_enum_android_scaler_rotate_and_crop_t mRotateAndCropOverride;
bool mComposerOutput;
+ // Auto framing override value
+ camera_metadata_enum_android_control_autoframing mAutoframingOverride;
+
+ // Settings override value
+ int32_t mSettingsOverride; // -1 = use original, otherwise
+ // the settings override to use.
+
// Current active physical id of the logical multi-camera, if any
std::string mActivePhysicalId;
diff --git a/services/camera/libcameraservice/device3/Camera3DeviceInjectionMethods.cpp b/services/camera/libcameraservice/device3/Camera3DeviceInjectionMethods.cpp
index 4640f2d..b0e4ca3 100644
--- a/services/camera/libcameraservice/device3/Camera3DeviceInjectionMethods.cpp
+++ b/services/camera/libcameraservice/device3/Camera3DeviceInjectionMethods.cpp
@@ -58,7 +58,8 @@
if (parent->mStatus == STATUS_ACTIVE) {
ALOGV("%s: Let the device be IDLE and the request thread is paused",
__FUNCTION__);
- res = parent->internalPauseAndWaitLocked(maxExpectedDuration);
+ res = parent->internalPauseAndWaitLocked(maxExpectedDuration,
+ /*requestThreadInvocation*/false);
if (res != OK) {
ALOGE("%s: Can't pause captures to inject camera!", __FUNCTION__);
return res;
@@ -117,7 +118,8 @@
if (parent->mStatus == STATUS_ACTIVE) {
ALOGV("%s: Let the device be IDLE and the request thread is paused",
__FUNCTION__);
- res = parent->internalPauseAndWaitLocked(maxExpectedDuration);
+ res = parent->internalPauseAndWaitLocked(maxExpectedDuration,
+ /*requestThreadInvocation*/false);
if (res != OK) {
ALOGE("%s: Can't pause captures to stop injection!", __FUNCTION__);
return res;
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
index 7d08089..c59138c 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
@@ -38,11 +38,12 @@
const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile, int64_t streamUseCase,
- bool deviceTimeBaseIsRealtime, int timestampBase) :
+ bool deviceTimeBaseIsRealtime, int timestampBase, int32_t colorSpace) :
Camera3Stream(id, type,
width, height, maxSize, format, dataSpace, rotation,
physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
- dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime, timestampBase),
+ dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime, timestampBase,
+ colorSpace),
mTotalBufferCount(0),
mMaxCachedBufferCount(0),
mHandoutTotalBufferCount(0),
@@ -95,6 +96,7 @@
}
lines << fmt::sprintf(" Dynamic Range Profile: 0x%" PRIx64 "\n",
camera_stream::dynamic_range_profile);
+ lines << fmt::sprintf(" Color Space: %d\n", camera_stream::color_space);
lines << fmt::sprintf(" Stream use case: %" PRId64 "\n", camera_stream::use_case);
lines << fmt::sprintf(" Timestamp base: %d\n", getTimestampBase());
lines << fmt::sprintf(" Frames produced: %d, last timestamp: %" PRId64 " ns\n",
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
index 1086955..239fc71 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
@@ -41,7 +41,8 @@
int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
- int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT);
+ int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
public:
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 290836c..7185895 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -24,6 +24,7 @@
#include <aidl/android/hardware/camera/device/CameraBlob.h>
#include <aidl/android/hardware/camera/device/CameraBlobId.h>
+#include "aidl/android/hardware/graphics/common/Dataspace.h"
#include <android-base/unique_fd.h>
#include <cutils/properties.h>
@@ -57,18 +58,18 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
- int mirrorMode) :
+ int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) :
Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
/*maxSize*/0, format, dataSpace, rotation,
physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
- timestampBase),
+ timestampBase, colorSpace),
mConsumer(consumer),
mTransform(0),
mTraceFirstBuffer(true),
mUseBufferManager(false),
mTimestampOffset(timestampOffset),
- mUseReadoutTime(false),
+ mUseReadoutTime(useReadoutTimestamp),
mConsumerUsage(0),
mDropBuffers(false),
mMirrorMode(mirrorMode),
@@ -92,17 +93,17 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
- int mirrorMode) :
+ int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) :
Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height, maxSize,
format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
setId, isMultiResolution, dynamicRangeProfile, streamUseCase,
- deviceTimeBaseIsRealtime, timestampBase),
+ deviceTimeBaseIsRealtime, timestampBase, colorSpace),
mConsumer(consumer),
mTransform(0),
mTraceFirstBuffer(true),
mUseBufferManager(false),
mTimestampOffset(timestampOffset),
- mUseReadoutTime(false),
+ mUseReadoutTime(useReadoutTimestamp),
mConsumerUsage(0),
mDropBuffers(false),
mMirrorMode(mirrorMode),
@@ -132,18 +133,18 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
- int mirrorMode) :
+ int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) :
Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
/*maxSize*/0, format, dataSpace, rotation,
physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
- timestampBase),
+ timestampBase, colorSpace),
mConsumer(nullptr),
mTransform(0),
mTraceFirstBuffer(true),
mUseBufferManager(false),
mTimestampOffset(timestampOffset),
- mUseReadoutTime(false),
+ mUseReadoutTime(useReadoutTimestamp),
mConsumerUsage(consumerUsage),
mDropBuffers(false),
mMirrorMode(mirrorMode),
@@ -181,18 +182,19 @@
int setId, bool isMultiResolution,
int64_t dynamicRangeProfile, int64_t streamUseCase,
bool deviceTimeBaseIsRealtime, int timestampBase,
- int mirrorMode) :
+ int mirrorMode, int32_t colorSpace,
+ bool useReadoutTimestamp) :
Camera3IOStreamBase(id, type, width, height,
/*maxSize*/0,
format, dataSpace, rotation,
physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
- timestampBase),
+ timestampBase, colorSpace),
mTransform(0),
mTraceFirstBuffer(true),
mUseBufferManager(false),
mTimestampOffset(timestampOffset),
- mUseReadoutTime(false),
+ mUseReadoutTime(useReadoutTimestamp),
mConsumerUsage(consumerUsage),
mDropBuffers(false),
mMirrorMode(mirrorMode),
@@ -332,7 +334,7 @@
status_t res =
gbLocker.lockAsync(
GraphicBuffer::USAGE_SW_READ_OFTEN | GraphicBuffer::USAGE_SW_WRITE_RARELY,
- &mapped, fenceFd.get());
+ &mapped, fenceFd.release());
if (res != OK) {
ALOGE("%s: Failed to lock the buffer: %s (%d)", __FUNCTION__, strerror(-res), res);
return res;
@@ -456,7 +458,10 @@
mTraceFirstBuffer = false;
}
// Fix CameraBlob id type discrepancy between HIDL and AIDL, details : http://b/229688810
- if (getFormat() == HAL_PIXEL_FORMAT_BLOB && getDataSpace() == HAL_DATASPACE_V0_JFIF) {
+ if (getFormat() == HAL_PIXEL_FORMAT_BLOB && (getDataSpace() == HAL_DATASPACE_V0_JFIF ||
+ (getDataSpace() ==
+ static_cast<android_dataspace_t>(
+ aidl::android::hardware::graphics::common::Dataspace::JPEG_R)))) {
if (mIPCTransport == IPCTransport::HIDL) {
fixUpHidlJpegBlobHeader(anwBuffer, anwReleaseFence);
}
@@ -467,7 +472,7 @@
}
}
- nsecs_t captureTime = (mUseReadoutTime && readoutTimestamp != 0 ?
+ nsecs_t captureTime = ((mUseReadoutTime || mSyncToDisplay) && readoutTimestamp != 0 ?
readoutTimestamp : timestamp) - mTimestampOffset;
if (mPreviewFrameSpacer != nullptr) {
nsecs_t readoutTime = (readoutTimestamp != 0 ? readoutTimestamp : timestamp)
@@ -482,7 +487,7 @@
bufferDeferred = true;
} else {
nsecs_t presentTime = mSyncToDisplay ?
- syncTimestampToDisplayLocked(captureTime) : captureTime;
+ syncTimestampToDisplayLocked(captureTime, releaseFence->dup()) : captureTime;
setTransform(transform, true/*mayChangeMirror*/);
res = native_window_set_buffers_timestamp(mConsumer.get(), presentTime);
@@ -513,10 +518,6 @@
mStreamUnpreparable = true;
}
- if (res != OK) {
- close(anwReleaseFence);
- }
-
*releaseFenceOut = releaseFence;
return res;
@@ -713,7 +714,8 @@
res = mPreviewFrameSpacer->run((std::string("PreviewSpacer-")
+ std::to_string(mId)).c_str());
if (res != OK) {
- ALOGE("%s: Unable to start preview spacer", __FUNCTION__);
+ ALOGE("%s: Unable to start preview spacer: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
return res;
}
}
@@ -722,16 +724,12 @@
mFrameCount = 0;
mLastTimestamp = 0;
- mUseReadoutTime =
- (timestampBase == OutputConfiguration::TIMESTAMP_BASE_READOUT_SENSOR || mSyncToDisplay);
-
if (isDeviceTimeBaseRealtime()) {
if (isDefaultTimeBase && !isConsumedByHWComposer() && !isVideoStream()) {
// Default time base, but not hardware composer or video encoder
mTimestampOffset = 0;
} else if (timestampBase == OutputConfiguration::TIMESTAMP_BASE_REALTIME ||
- timestampBase == OutputConfiguration::TIMESTAMP_BASE_SENSOR ||
- timestampBase == OutputConfiguration::TIMESTAMP_BASE_READOUT_SENSOR) {
+ timestampBase == OutputConfiguration::TIMESTAMP_BASE_SENSOR) {
mTimestampOffset = 0;
}
// If timestampBase is CHOREOGRAPHER SYNCED or MONOTONIC, leave
@@ -741,7 +739,7 @@
// Reverse offset for monotonicTime -> bootTime
mTimestampOffset = -mTimestampOffset;
} else {
- // If timestampBase is DEFAULT, MONOTONIC, SENSOR, READOUT_SENSOR or
+ // If timestampBase is DEFAULT, MONOTONIC, SENSOR or
// CHOREOGRAPHER_SYNCED, timestamp offset is 0.
mTimestampOffset = 0;
}
@@ -1327,7 +1325,7 @@
void* mapped = nullptr;
base::unique_fd fenceFd(dup(fence));
status_t res = graphicBuffer->lockAsync(GraphicBuffer::USAGE_SW_READ_OFTEN, &mapped,
- fenceFd.get());
+ fenceFd.release());
if (res != OK) {
ALOGE("%s: Failed to lock the buffer: %s (%d)", __FUNCTION__, strerror(-res), res);
return;
@@ -1414,7 +1412,7 @@
}
}
-nsecs_t Camera3OutputStream::syncTimestampToDisplayLocked(nsecs_t t) {
+nsecs_t Camera3OutputStream::syncTimestampToDisplayLocked(nsecs_t t, int releaseFence) {
nsecs_t currentTime = systemTime();
if (!mFixedFps) {
mLastCaptureTime = t;
@@ -1448,7 +1446,7 @@
//
nsecs_t captureInterval = t - mLastCaptureTime;
if (captureInterval > kSpacingResetIntervalNs) {
- for (size_t i = 0; i < VsyncEventData::kFrameTimelinesLength; i++) {
+ for (size_t i = 0; i < vsyncEventData.frameTimelinesLength; i++) {
const auto& timeline = vsyncEventData.frameTimelines[i];
if (timeline.deadlineTimestamp >= currentTime &&
timeline.expectedPresentationTime > minPresentT) {
@@ -1457,6 +1455,17 @@
mLastCaptureTime = t;
mLastPresentTime = presentT;
+ // If releaseFence is available, store the fence to check signal
+ // time later.
+ mRefVsyncData = vsyncEventData;
+ mReferenceCaptureTime = t;
+ mReferenceArrivalTime = currentTime;
+ if (releaseFence != -1) {
+ mReferenceFrameFence = new Fence(releaseFence);
+ } else {
+ mFenceSignalOffset = 0;
+ }
+
// Move the expected presentation time back by 1/3 of frame interval to
// mitigate the time drift. Due to time drift, if we directly use the
// expected presentation time, often times 2 expected presentation time
@@ -1466,6 +1475,36 @@
}
}
+ // If there is a reference frame release fence, get the signal time and
+ // update the captureToPresentOffset.
+ if (mReferenceFrameFence != nullptr) {
+ mFenceSignalOffset = 0;
+ nsecs_t signalTime = mReferenceFrameFence->getSignalTime();
+ // Now that the fence has signaled, recalculate the offsets based on
+ // the timeline which was actually latched
+ if (signalTime != INT64_MAX) {
+ for (size_t i = 0; i < mRefVsyncData.frameTimelinesLength; i++) {
+ const auto& timeline = mRefVsyncData.frameTimelines[i];
+ if (timeline.deadlineTimestamp >= signalTime) {
+ nsecs_t originalOffset = mCaptureToPresentOffset;
+ mCaptureToPresentOffset = timeline.expectedPresentationTime
+ - mReferenceCaptureTime;
+ mLastPresentTime = timeline.expectedPresentationTime;
+ mFenceSignalOffset = signalTime > mReferenceArrivalTime ?
+ signalTime - mReferenceArrivalTime : 0;
+
+ ALOGV("%s: Last deadline %" PRId64 " signalTime %" PRId64
+ " original offset %" PRId64 " new offset %" PRId64
+ " fencesignal offset %" PRId64, __FUNCTION__,
+ timeline.deadlineTimestamp, signalTime, originalOffset,
+ mCaptureToPresentOffset, mFenceSignalOffset);
+ break;
+ }
+ }
+ mReferenceFrameFence.clear();
+ }
+ }
+
nsecs_t idealPresentT = t + mCaptureToPresentOffset;
nsecs_t expectedPresentT = mLastPresentTime;
nsecs_t minDiff = INT64_MAX;
@@ -1509,6 +1548,7 @@
// Find best timestamp in the vsync timelines:
// - Only use at most kMaxTimelines timelines to avoid long latency
+ // - Add an extra timeline if display fence is used
// - closest to the ideal presentation time,
// - deadline timestamp is greater than the current time, and
// - For fixed FPS, if the capture interval doesn't deviate too much from refresh interval,
@@ -1517,7 +1557,9 @@
// - For variable FPS, or if the capture interval deviates from refresh
// interval for more than 5%, find a presentation time closest to the
// (lastPresentationTime + captureToPresentOffset) instead.
- int maxTimelines = std::min(kMaxTimelines, (int)VsyncEventData::kFrameTimelinesLength);
+ int fenceAdjustment = (mFenceSignalOffset > 0) ? 1 : 0;
+ int maxTimelines = std::min(kMaxTimelines + fenceAdjustment,
+ (int)vsyncEventData.frameTimelinesLength);
float biasForShortDelay = 1.0f;
for (int i = 0; i < maxTimelines; i ++) {
const auto& vsyncTime = vsyncEventData.frameTimelines[i];
@@ -1528,7 +1570,7 @@
biasForShortDelay = 1.0 - 2.0 * i / (maxTimelines - 1);
}
if (std::abs(vsyncTime.expectedPresentationTime - idealPresentT) < minDiff &&
- vsyncTime.deadlineTimestamp >= currentTime &&
+ vsyncTime.deadlineTimestamp >= currentTime + mFenceSignalOffset &&
((!cameraDisplayInSync && vsyncTime.expectedPresentationTime > minPresentT) ||
(cameraDisplayInSync && vsyncTime.expectedPresentationTime >
mLastPresentTime + minInterval +
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index c44b842..0b456c0 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -96,7 +96,9 @@
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ bool useReadoutTimestamp = false);
/**
* Set up a stream for formats that have a variable buffer size for the same
* dimensions, such as compressed JPEG.
@@ -113,7 +115,9 @@
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ bool useReadoutTimestamp = false);
/**
* Set up a stream with deferred consumer for formats that have 2 dimensions, such as
* RAW and YUV. The consumer must be set before using this stream for output. A valid
@@ -129,7 +133,9 @@
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ bool useReadoutTimestamp = false);
virtual ~Camera3OutputStream();
@@ -278,7 +284,9 @@
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ bool useReadoutTimestamp = false);
/**
* Note that we release the lock briefly in this function
@@ -438,7 +446,14 @@
static constexpr nsecs_t kTimelineThresholdNs = 1000000LL; // 1 millisecond
static constexpr float kMaxIntervalRatioDeviation = 0.05f;
static constexpr int kMaxTimelines = 2;
- nsecs_t syncTimestampToDisplayLocked(nsecs_t t);
+ nsecs_t syncTimestampToDisplayLocked(nsecs_t t, int releaseFence);
+
+ // In case of fence being used
+ sp<Fence> mReferenceFrameFence;
+ nsecs_t mReferenceCaptureTime = 0;
+ nsecs_t mReferenceArrivalTime = 0;
+ nsecs_t mFenceSignalOffset = 0;
+ VsyncEventData mRefVsyncData;
// Re-space frames by delaying queueBuffer so that frame delivery has
// the same cadence as capture. Default is on for SurfaceTexture bound
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
index f25137a..1e7e337 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -129,12 +129,71 @@
return res;
}
+status_t fixupAutoframingTags(CameraMetadata& resultMetadata) {
+ status_t res = OK;
+ camera_metadata_entry autoframingEntry =
+ resultMetadata.find(ANDROID_CONTROL_AUTOFRAMING);
+ if (autoframingEntry.count == 0) {
+ const uint8_t defaultAutoframingEntry = ANDROID_CONTROL_AUTOFRAMING_OFF;
+ res = resultMetadata.update(ANDROID_CONTROL_AUTOFRAMING, &defaultAutoframingEntry, 1);
+ if (res != OK) {
+ ALOGE("%s: Failed to update ANDROID_CONTROL_AUTOFRAMING: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+ }
+
+ camera_metadata_entry autoframingStateEntry =
+ resultMetadata.find(ANDROID_CONTROL_AUTOFRAMING_STATE);
+ if (autoframingStateEntry.count == 0) {
+ const uint8_t defaultAutoframingStateEntry = ANDROID_CONTROL_AUTOFRAMING_STATE_INACTIVE;
+ res = resultMetadata.update(ANDROID_CONTROL_AUTOFRAMING_STATE,
+ &defaultAutoframingStateEntry, 1);
+ if (res != OK) {
+ ALOGE("%s: Failed to update ANDROID_CONTROL_AUTOFRAMING_STATE: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+ }
+
+ return res;
+}
+
+void correctMeteringRegions(camera_metadata_t *meta) {
+ if (meta == nullptr) return;
+
+ uint32_t meteringRegionKeys[] = {
+ ANDROID_CONTROL_AE_REGIONS,
+ ANDROID_CONTROL_AWB_REGIONS,
+ ANDROID_CONTROL_AF_REGIONS };
+
+ for (uint32_t key : meteringRegionKeys) {
+ camera_metadata_entry_t entry;
+ int res = find_camera_metadata_entry(meta, key, &entry);
+ if (res != OK) continue;
+
+ for (size_t i = 0; i < entry.count; i += 5) {
+ if (entry.data.i32[0] > entry.data.i32[2]) {
+ ALOGW("%s: Invalid metering region (%d): left: %d, right: %d",
+ __FUNCTION__, key, entry.data.i32[0], entry.data.i32[2]);
+ entry.data.i32[2] = entry.data.i32[0];
+ }
+ if (entry.data.i32[1] > entry.data.i32[3]) {
+ ALOGW("%s: Invalid metering region (%d): top: %d, bottom: %d",
+ __FUNCTION__, key, entry.data.i32[1], entry.data.i32[3]);
+ entry.data.i32[3] = entry.data.i32[1];
+ }
+ }
+ }
+}
+
void insertResultLocked(CaptureOutputStates& states, CaptureResult *result, uint32_t frameNumber) {
if (result == nullptr) return;
camera_metadata_t *meta = const_cast<camera_metadata_t *>(
result->mMetadata.getAndLock());
set_camera_metadata_vendor_id(meta, states.vendorTagId);
+ correctMeteringRegions(meta);
result->mMetadata.unlock(meta);
if (result->mMetadata.update(ANDROID_REQUEST_FRAME_COUNT,
@@ -153,6 +212,7 @@
camera_metadata_t *pmeta = const_cast<camera_metadata_t *>(
physicalMetadata.mPhysicalCameraMetadata.getAndLock());
set_camera_metadata_vendor_id(pmeta, states.vendorTagId);
+ correctMeteringRegions(pmeta);
physicalMetadata.mPhysicalCameraMetadata.unlock(pmeta);
}
@@ -325,6 +385,22 @@
}
}
+ // Fix up autoframing metadata
+ res = fixupAutoframingTags(captureResult.mMetadata);
+ if (res != OK) {
+ SET_ERR("Failed to set autoframing defaults in result metadata: %s (%d)",
+ strerror(-res), res);
+ return;
+ }
+ for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
+ res = fixupAutoframingTags(physicalMetadata.mPhysicalCameraMetadata);
+ if (res != OK) {
+ SET_ERR("Failed to set autoframing defaults in physical result metadata: %s (%d)",
+ strerror(-res), res);
+ return;
+ }
+ }
+
for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
const std::string cameraId = physicalMetadata.mPhysicalCameraId;
auto mapper = states.distortionMappers.find(cameraId);
@@ -465,6 +541,32 @@
return found;
}
+const std::set<std::string>& getCameraIdsWithZoomLocked(
+ const InFlightRequestMap& inflightMap, const CameraMetadata& metadata,
+ const std::set<std::string>& cameraIdsWithZoom) {
+ camera_metadata_ro_entry overrideEntry =
+ metadata.find(ANDROID_CONTROL_SETTINGS_OVERRIDE);
+ camera_metadata_ro_entry frameNumberEntry =
+ metadata.find(ANDROID_CONTROL_SETTINGS_OVERRIDING_FRAME_NUMBER);
+ if (overrideEntry.count != 1
+ || overrideEntry.data.i32[0] != ANDROID_CONTROL_SETTINGS_OVERRIDE_ZOOM
+ || frameNumberEntry.count != 1) {
+ // No valid overriding frame number, skip
+ return cameraIdsWithZoom;
+ }
+
+ uint32_t overridingFrameNumber = frameNumberEntry.data.i32[0];
+ ssize_t idx = inflightMap.indexOfKey(overridingFrameNumber);
+ if (idx < 0) {
+ ALOGE("%s: Failed to find pending request #%d in inflight map",
+ __FUNCTION__, overridingFrameNumber);
+ return cameraIdsWithZoom;
+ }
+
+ const InFlightRequest &r = inflightMap.valueFor(overridingFrameNumber);
+ return r.cameraIdsWithZoom;
+}
+
void processCaptureResult(CaptureOutputStates& states, const camera_capture_result *result) {
ATRACE_CALL();
@@ -676,10 +778,12 @@
} else if (request.hasCallback) {
CameraMetadata metadata;
metadata = result->result;
+ auto cameraIdsWithZoom = getCameraIdsWithZoomLocked(
+ states.inflightMap, metadata, request.cameraIdsWithZoom);
sendCaptureResult(states, metadata, request.resultExtras,
collectedPartialResult, frameNumber,
hasInputBufferInRequest, request.zslCapture && request.stillCapture,
- request.rotateAndCropAuto, request.cameraIdsWithZoom,
+ request.rotateAndCropAuto, cameraIdsWithZoom,
request.physicalMetadatas);
}
}
@@ -906,11 +1010,13 @@
states.listener->notifyShutter(r.resultExtras, msg.timestamp);
}
// send pending result and buffers
+ const auto& cameraIdsWithZoom = getCameraIdsWithZoomLocked(
+ inflightMap, r.pendingMetadata, r.cameraIdsWithZoom);
sendCaptureResult(states,
r.pendingMetadata, r.resultExtras,
r.collectedPartialResult, msg.frame_number,
r.hasInputBuffer, r.zslCapture && r.stillCapture,
- r.rotateAndCropAuto, r.cameraIdsWithZoom, r.physicalMetadatas);
+ r.rotateAndCropAuto, cameraIdsWithZoom, r.physicalMetadatas);
}
returnAndRemovePendingOutputBuffers(
states.useHalBufManager, states.listener, r, states.sessionStatsBuilder);
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
index 64810d4..1191f05 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
@@ -35,12 +35,13 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
int setId, bool useHalBufManager, int64_t dynamicProfile,
int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
- int mirrorMode) :
+ int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) :
Camera3OutputStream(id, CAMERA_STREAM_OUTPUT, width, height,
format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
transport, consumerUsage, timestampOffset, setId,
/*isMultiResolution*/false, dynamicProfile, streamUseCase,
- deviceTimeBaseIsRealtime, timestampBase, mirrorMode),
+ deviceTimeBaseIsRealtime, timestampBase, mirrorMode, colorSpace,
+ useReadoutTimestamp),
mUseHalBufManager(useHalBufManager) {
size_t consumerCount = std::min(surfaces.size(), kMaxOutputs);
if (surfaces.size() > consumerCount) {
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
index 0caa90b..c2ff20e 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
@@ -45,7 +45,9 @@
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ bool useReadoutTimestamp = false);
virtual ~Camera3SharedOutputStream();
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 27269a6..23afa6e 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -56,7 +56,8 @@
const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
- int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase) :
+ int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
+ int32_t colorSpace) :
camera_stream(),
mId(id),
mSetId(setId),
@@ -96,6 +97,7 @@
camera_stream::sensor_pixel_modes_used = sensorPixelModesUsed;
camera_stream::dynamic_range_profile = dynamicRangeProfile;
camera_stream::use_case = streamUseCase;
+ camera_stream::color_space = colorSpace;
if ((format == HAL_PIXEL_FORMAT_BLOB || format == HAL_PIXEL_FORMAT_RAW_OPAQUE) &&
maxSize == 0) {
@@ -136,6 +138,10 @@
return camera_stream::data_space;
}
+int32_t Camera3Stream::getColorSpace() const {
+ return camera_stream::color_space;
+}
+
uint64_t Camera3Stream::getUsage() const {
return mUsage;
}
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index e451fa4..2bfaaab 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -166,6 +166,7 @@
uint32_t getHeight() const;
int getFormat() const;
android_dataspace getDataSpace() const;
+ int32_t getColorSpace() const;
uint64_t getUsage() const;
void setUsage(uint64_t usage);
void setFormatOverride(bool formatOverridden);
@@ -508,7 +509,8 @@
const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
- int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase);
+ int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
+ int32_t colorSpace);
wp<Camera3StreamBufferFreedListener> mBufferFreedListener;
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index d715306..7fa6273 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -67,6 +67,7 @@
std::unordered_set<int32_t> sensor_pixel_modes_used;
int64_t dynamic_range_profile;
int64_t use_case;
+ int32_t color_space;
} camera_stream_t;
typedef struct camera_stream_buffer {
@@ -114,20 +115,24 @@
int64_t streamUseCase;
int timestampBase;
int mirrorMode;
+ int32_t colorSpace;
OutputStreamInfo() :
width(-1), height(-1), format(-1), dataSpace(HAL_DATASPACE_UNKNOWN),
consumerUsage(0),
dynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
streamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
timestampBase(OutputConfiguration::TIMESTAMP_BASE_DEFAULT),
- mirrorMode(OutputConfiguration::MIRROR_MODE_AUTO) {}
+ mirrorMode(OutputConfiguration::MIRROR_MODE_AUTO),
+ colorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED) {}
OutputStreamInfo(int _width, int _height, int _format, android_dataspace _dataSpace,
uint64_t _consumerUsage, const std::unordered_set<int32_t>& _sensorPixelModesUsed,
- int64_t _dynamicRangeProfile, int _streamUseCase, int _timestampBase, int _mirrorMode) :
+ int64_t _dynamicRangeProfile, int _streamUseCase, int _timestampBase, int _mirrorMode,
+ int32_t _colorSpace) :
width(_width), height(_height), format(_format),
dataSpace(_dataSpace), consumerUsage(_consumerUsage),
sensorPixelModesUsed(_sensorPixelModesUsed), dynamicRangeProfile(_dynamicRangeProfile),
- streamUseCase(_streamUseCase), timestampBase(_timestampBase), mirrorMode(_mirrorMode) {}
+ streamUseCase(_streamUseCase), timestampBase(_timestampBase), mirrorMode(_mirrorMode),
+ colorSpace(_colorSpace) {}
};
// Utility class to lock and unlock a GraphicBuffer
@@ -206,6 +211,7 @@
virtual int getFormat() const = 0;
virtual int64_t getDynamicRangeProfile() const = 0;
virtual android_dataspace getDataSpace() const = 0;
+ virtual int32_t getColorSpace() const = 0;
virtual void setFormatOverride(bool formatOverriden) = 0;
virtual bool isFormatOverridden() const = 0;
virtual int getOriginalFormat() const = 0;
diff --git a/services/camera/libcameraservice/device3/DistortionMapper.cpp b/services/camera/libcameraservice/device3/DistortionMapper.cpp
index 15807bf..f0764b4 100644
--- a/services/camera/libcameraservice/device3/DistortionMapper.cpp
+++ b/services/camera/libcameraservice/device3/DistortionMapper.cpp
@@ -67,7 +67,7 @@
return res;
}
- bool mMaxResolution = SessionConfigurationUtils::isUltraHighResolutionSensor(deviceInfo);
+ bool mMaxResolution = SessionConfigurationUtils::supportsUltraHighResolutionCapture(deviceInfo);
if (mMaxResolution) {
res = setupStaticInfoLocked(deviceInfo, /*maxResolution*/true);
}
diff --git a/services/camera/libcameraservice/device3/InFlightRequest.h b/services/camera/libcameraservice/device3/InFlightRequest.h
index 4c19349..665ac73 100644
--- a/services/camera/libcameraservice/device3/InFlightRequest.h
+++ b/services/camera/libcameraservice/device3/InFlightRequest.h
@@ -181,6 +181,9 @@
// Indicates that ROTATE_AND_CROP was set to AUTO
bool rotateAndCropAuto;
+ // Indicates that AUTOFRAMING was set to AUTO
+ bool autoframingAuto;
+
// Requested camera ids (both logical and physical) with zoomRatio != 1.0f
std::set<std::string> cameraIdsWithZoom;
@@ -213,6 +216,7 @@
stillCapture(false),
zslCapture(false),
rotateAndCropAuto(false),
+ autoframingAuto(false),
requestTimeNs(0),
transform(-1) {
}
@@ -220,8 +224,9 @@
InFlightRequest(int numBuffers, CaptureResultExtras extras, bool hasInput,
bool hasAppCallback, nsecs_t minDuration, nsecs_t maxDuration, bool fixedFps,
const std::set<std::set<std::string>>& physicalCameraIdSet, bool isStillCapture,
- bool isZslCapture, bool rotateAndCropAuto, const std::set<std::string>& idsWithZoom,
- nsecs_t requestNs, const SurfaceMap& outSurfaces = SurfaceMap{}) :
+ bool isZslCapture, bool rotateAndCropAuto, bool autoframingAuto,
+ const std::set<std::string>& idsWithZoom, nsecs_t requestNs,
+ const SurfaceMap& outSurfaces = SurfaceMap{}) :
shutterTimestamp(0),
sensorTimestamp(0),
requestStatus(OK),
@@ -239,6 +244,7 @@
stillCapture(isStillCapture),
zslCapture(isZslCapture),
rotateAndCropAuto(rotateAndCropAuto),
+ autoframingAuto(autoframingAuto),
cameraIdsWithZoom(idsWithZoom),
requestTimeNs(requestNs),
outputSurfaces(outSurfaces),
diff --git a/services/camera/libcameraservice/device3/UHRCropAndMeteringRegionMapper.cpp b/services/camera/libcameraservice/device3/UHRCropAndMeteringRegionMapper.cpp
index c558d91..ce7097a 100644
--- a/services/camera/libcameraservice/device3/UHRCropAndMeteringRegionMapper.cpp
+++ b/services/camera/libcameraservice/device3/UHRCropAndMeteringRegionMapper.cpp
@@ -91,6 +91,8 @@
if (meteringRegionsSetEntry.count == 1 &&
meteringRegionsSetEntry.data.u8[0] == entry.second.second) {
// metering region set by client, doesn't need to be fixed.
+ ALOGV("%s: Metering region %u set by client, they don't need to be fixed",
+ __FUNCTION__, entry.first);
continue;
}
camera_metadata_entry meteringRegionEntry = request->find(entry.first);
@@ -121,6 +123,7 @@
if (cropRegionSetEntry.count == 1 &&
cropRegionSetEntry.data.u8[0] == ANDROID_SCALER_CROP_REGION_SET_TRUE) {
// crop regions set by client, doesn't need to be fixed.
+ ALOGV("%s: crop region set by client, doesn't need to be fixed", __FUNCTION__);
return;
}
camera_metadata_entry_t cropRegionEntry = request->find(ANDROID_SCALER_CROP_REGION);
diff --git a/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp b/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
index 27b00c9..aaa1b70 100644
--- a/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
+++ b/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
@@ -153,9 +153,9 @@
return;
}
- bool isUltraHighResolutionSensor =
- camera3::SessionConfigurationUtils::isUltraHighResolutionSensor(*deviceInfo);
- if (isUltraHighResolutionSensor) {
+ bool supportsUltraHighResolutionCapture =
+ camera3::SessionConfigurationUtils::supportsUltraHighResolutionCapture(*deviceInfo);
+ if (supportsUltraHighResolutionCapture) {
if (!SessionConfigurationUtils::getArrayWidthAndHeight(deviceInfo,
ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
&arrayMaximumResolutionW, &arrayMaximumResolutionH)) {
@@ -354,17 +354,8 @@
if (weight == 0) {
continue;
}
- // Top left (inclusive)
- scaleCoordinates(entry.data.i32 + j, 1, zoomRatio, true /*clamp*/, arrayWidth,
+ scaleRegion(entry.data.i32 + j, zoomRatio, arrayWidth,
arrayHeight);
- // Bottom right (exclusive): Use adjacent inclusive pixel to
- // calculate.
- entry.data.i32[j+2] -= 1;
- entry.data.i32[j+3] -= 1;
- scaleCoordinates(entry.data.i32 + j + 2, 1, zoomRatio, true /*clamp*/, arrayWidth,
- arrayHeight);
- entry.data.i32[j+2] += 1;
- entry.data.i32[j+3] += 1;
}
}
@@ -401,17 +392,8 @@
if (weight == 0) {
continue;
}
- // Top-left (inclusive)
- scaleCoordinates(entry.data.i32 + j, 1, 1.0 / zoomRatio, true /*clamp*/, arrayWidth,
+ scaleRegion(entry.data.i32 + j, 1.0 / zoomRatio, arrayWidth,
arrayHeight);
- // Bottom-right (exclusive): Use adjacent inclusive pixel to
- // calculate.
- entry.data.i32[j+2] -= 1;
- entry.data.i32[j+3] -= 1;
- scaleCoordinates(entry.data.i32 + j + 2, 1, 1.0 / zoomRatio, true /*clamp*/, arrayWidth,
- arrayHeight);
- entry.data.i32[j+2] += 1;
- entry.data.i32[j+3] += 1;
}
}
for (auto rect : kRectsToCorrect) {
@@ -470,6 +452,24 @@
}
}
+void ZoomRatioMapper::scaleRegion(int32_t* region, float scaleRatio,
+ int32_t arrayWidth, int32_t arrayHeight) {
+ // Top-left (inclusive)
+ scaleCoordinates(region, 1, scaleRatio, true /*clamp*/, arrayWidth,
+ arrayHeight);
+ // Bottom-right (exclusive): Use adjacent inclusive pixel to
+ // calculate.
+ region[2] -= 1;
+ region[3] -= 1;
+ scaleCoordinates(region + 2, 1, scaleRatio, true /*clamp*/, arrayWidth,
+ arrayHeight);
+ region[2] += 1;
+ region[3] += 1;
+ // Make sure bottom-right >= top-left
+ region[2] = std::max(region[0], region[2]);
+ region[3] = std::max(region[1], region[3]);
+}
+
void ZoomRatioMapper::scaleRects(int32_t* rects, int rectCount,
float scaleRatio, int32_t arrayWidth, int32_t arrayHeight) {
for (int i = 0; i < rectCount * 4; i += 4) {
diff --git a/services/camera/libcameraservice/device3/ZoomRatioMapper.h b/services/camera/libcameraservice/device3/ZoomRatioMapper.h
index b7a9e41..1aa8e78 100644
--- a/services/camera/libcameraservice/device3/ZoomRatioMapper.h
+++ b/services/camera/libcameraservice/device3/ZoomRatioMapper.h
@@ -69,6 +69,8 @@
public: // Visible for testing. Do not use concurently.
void scaleCoordinates(int32_t* coordPairs, int coordCount,
float scaleRatio, bool clamp, int32_t arrayWidth, int32_t arrayHeight);
+ void scaleRegion(int32_t* region, float scaleRatio,
+ int32_t arrayWidth, int32_t arrayHeight);
bool isValid() { return mIsValid; }
private:
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
index 1f9313e..af48dd6 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
@@ -163,9 +163,12 @@
return (uint64_t)usage;
}
-AidlCamera3Device::AidlCamera3Device(const std::string& id, bool overrideForPerfClass,
- bool overrideToPortrait, bool legacyClient) :
- Camera3Device(id, overrideForPerfClass, overrideToPortrait, legacyClient) {
+AidlCamera3Device::AidlCamera3Device(
+ std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
+ const std::string& id, bool overrideForPerfClass, bool overrideToPortrait,
+ bool legacyClient) :
+ Camera3Device(cameraServiceProxyWrapper, id, overrideForPerfClass, overrideToPortrait,
+ legacyClient) {
mCallbacks = ndk::SharedRefBase::make<AidlCameraDeviceCallbacks>(this);
}
@@ -203,6 +206,7 @@
return res;
}
mSupportNativeZoomRatio = manager->supportNativeZoomRatio(mId);
+ mIsCompositeJpegRDisabled = manager->isCompositeJpegRDisabled(mId);
std::vector<std::string> physicalCameraIds;
bool isLogical = manager->isLogicalCamera(mId, &physicalCameraIds);
@@ -236,7 +240,7 @@
&mPhysicalDeviceInfoMap[physicalId],
mSupportNativeZoomRatio, usePrecorrectArray);
- if (SessionConfigurationUtils::isUltraHighResolutionSensor(
+ if (SessionConfigurationUtils::supportsUltraHighResolutionCapture(
mPhysicalDeviceInfoMap[physicalId])) {
mUHRCropAndMeteringRegionMappers[physicalId] =
UHRCropAndMeteringRegionMapper(mPhysicalDeviceInfoMap[physicalId],
@@ -872,8 +876,9 @@
}
status_t AidlCamera3Device::AidlHalInterface::configureStreams(
- const camera_metadata_t *sessionParams,
- camera_stream_configuration *config, const std::vector<uint32_t>& bufferSizes) {
+ const camera_metadata_t *sessionParams,
+ camera_stream_configuration *config, const std::vector<uint32_t>& bufferSizes,
+ int64_t logId) {
using camera::device::StreamType;
using camera::device::StreamConfigurationMode;
@@ -915,6 +920,7 @@
cam3stream->getOriginalFormat() : src->format);
dst.dataSpace = mapToAidlDataspace(cam3stream->isDataSpaceOverridden() ?
cam3stream->getOriginalDataSpace() : src->data_space);
+ dst.colorSpace = src->color_space;
dst.bufferSize = bufferSizes[i];
if (!src->physical_camera_id.empty()) {
@@ -957,6 +963,7 @@
requestedConfiguration.streamConfigCounter = mNextStreamConfigCounter++;
requestedConfiguration.multiResolutionInputImage = config->input_is_multi_resolution;
+ requestedConfiguration.logId = logId;
auto err = mAidlSession->configureStreams(requestedConfiguration, &finalConfiguration);
if (!err.isOk()) {
ALOGE("%s: Transaction error: %s", __FUNCTION__, err.getMessage());
@@ -1413,9 +1420,10 @@
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait) :
+ bool overrideToPortrait,
+ bool supportSettingsOverride) :
RequestThread(parent, statusTracker, interface, sessionParamKeys, useHalBufManager,
- supportCameraMute, overrideToPortrait) {}
+ supportCameraMute, overrideToPortrait, supportSettingsOverride) {}
status_t AidlCamera3Device::AidlRequestThread::switchToOffline(
const std::vector<int32_t>& streamsToKeep,
@@ -1585,9 +1593,10 @@
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait) {
+ bool overrideToPortrait,
+ bool supportSettingsOverride) {
return new AidlRequestThread(parent, statusTracker, interface, sessionParamKeys,
- useHalBufManager, supportCameraMute, overrideToPortrait);
+ useHalBufManager, supportCameraMute, overrideToPortrait, supportSettingsOverride);
};
sp<Camera3Device::Camera3DeviceInjectionMethods>
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
index f4554d4..e0be367 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
@@ -39,7 +39,9 @@
using AidlRequestMetadataQueue = AidlMessageQueue<int8_t, SynchronizedReadWrite>;
class AidlCameraDeviceCallbacks;
friend class AidlCameraDeviceCallbacks;
- explicit AidlCamera3Device(const std::string& id, bool overrideForPerfClass, bool overrideToPortrait,
+ explicit AidlCamera3Device(
+ std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
+ const std::string& id, bool overrideForPerfClass, bool overrideToPortrait,
bool legacyClient = false);
virtual ~AidlCamera3Device() { }
@@ -99,7 +101,9 @@
virtual status_t configureStreams(const camera_metadata_t *sessionParams,
/*inout*/ camera_stream_configuration_t *config,
- const std::vector<uint32_t>& bufferSizes) override;
+ const std::vector<uint32_t>& bufferSizes,
+ int64_t logId) override;
+
// The injection camera configures the streams to hal.
virtual status_t configureInjectedStreams(
const camera_metadata_t* sessionParams,
@@ -175,7 +179,8 @@
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait);
+ bool overrideToPortrait,
+ bool supportSettingsOverride);
status_t switchToOffline(
const std::vector<int32_t>& streamsToKeep,
@@ -261,7 +266,8 @@
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait) override;
+ bool overrideToPortrait,
+ bool supportSettingsOverride) override;
virtual sp<Camera3DeviceInjectionMethods>
createCamera3DeviceInjectionMethods(wp<Camera3Device>) override;
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
index c22aad6..06af5ff 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
@@ -204,7 +204,7 @@
&mPhysicalDeviceInfoMap[physicalId],
mSupportNativeZoomRatio, usePrecorrectArray);
- if (SessionConfigurationUtils::isUltraHighResolutionSensor(
+ if (SessionConfigurationUtils::supportsUltraHighResolutionCapture(
mPhysicalDeviceInfoMap[physicalId])) {
mUHRCropAndMeteringRegionMappers[physicalId] =
UHRCropAndMeteringRegionMapper(mPhysicalDeviceInfoMap[physicalId],
@@ -705,9 +705,10 @@
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait) {
+ bool overrideToPortrait,
+ bool supportSettingsOverride) {
return new HidlRequestThread(parent, statusTracker, interface, sessionParamKeys,
- useHalBufManager, supportCameraMute, overrideToPortrait);
+ useHalBufManager, supportCameraMute, overrideToPortrait, supportSettingsOverride);
};
sp<Camera3Device::Camera3DeviceInjectionMethods>
@@ -881,7 +882,8 @@
status_t HidlCamera3Device::HidlHalInterface::configureStreams(
const camera_metadata_t *sessionParams,
- camera_stream_configuration *config, const std::vector<uint32_t>& bufferSizes) {
+ camera_stream_configuration *config, const std::vector<uint32_t>& bufferSizes,
+ int64_t /*logId*/) {
ATRACE_NAME("CameraHal::configureStreams");
if (!valid()) return INVALID_OPERATION;
status_t res = OK;
@@ -1701,9 +1703,10 @@
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait) :
+ bool overrideToPortrait,
+ bool supportSettingsOverride) :
RequestThread(parent, statusTracker, interface, sessionParamKeys, useHalBufManager,
- supportCameraMute, overrideToPortrait) {}
+ supportCameraMute, overrideToPortrait, supportSettingsOverride) {}
status_t HidlCamera3Device::HidlRequestThread::switchToOffline(
const std::vector<int32_t>& streamsToKeep,
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
index e64bcf0..2cfdf9d 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
@@ -31,9 +31,12 @@
public Camera3Device {
public:
- explicit HidlCamera3Device(const std::string& id, bool overrideForPerfClass, bool overrideToPortrait,
- bool legacyClient = false) : Camera3Device(id, overrideForPerfClass, overrideToPortrait,
- legacyClient) { }
+ explicit HidlCamera3Device(
+ std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
+ const std::string& id, bool overrideForPerfClass, bool overrideToPortrait,
+ bool legacyClient = false) :
+ Camera3Device(cameraServiceProxyWrapper, id, overrideForPerfClass, overrideToPortrait,
+ legacyClient) { }
virtual ~HidlCamera3Device() {}
@@ -108,7 +111,8 @@
virtual status_t configureStreams(const camera_metadata_t *sessionParams,
/*inout*/ camera_stream_configuration_t *config,
- const std::vector<uint32_t>& bufferSizes) override;
+ const std::vector<uint32_t>& bufferSizes,
+ int64_t logId) override;
// The injection camera configures the streams to hal.
virtual status_t configureInjectedStreams(
@@ -174,7 +178,8 @@
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait);
+ bool overrideToPortrait,
+ bool supportSettingsOverride);
status_t switchToOffline(
const std::vector<int32_t>& streamsToKeep,
@@ -222,7 +227,8 @@
const Vector<int32_t>& sessionParamKeys,
bool useHalBufManager,
bool supportCameraMute,
- bool overrideToPortrait) override;
+ bool overrideToPortrait,
+ bool supportSettingsOverride) override;
virtual sp<Camera3DeviceInjectionMethods>
createCamera3DeviceInjectionMethods(wp<Camera3Device>) override;
diff --git a/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.cpp b/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.cpp
index 3392db1..de51ffa 100644
--- a/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.cpp
+++ b/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.cpp
@@ -17,6 +17,7 @@
#include <hidl/AidlCameraDeviceCallbacks.h>
#include <hidl/Utils.h>
+#include <aidl/AidlUtils.h>
namespace android {
namespace frameworks {
@@ -144,7 +145,7 @@
// Convert Metadata into HCameraMetadata;
FmqSizeOrMetadata hResult;
- using hardware::cameraservice::utils::conversion::filterVndkKeys;
+ using hardware::cameraservice::utils::conversion::aidl::filterVndkKeys;
if (filterVndkKeys(mVndkVersion, result, /*isStatic*/false) != OK) {
ALOGE("%s: filtering vndk keys from result failed, not sending onResultReceived callback",
__FUNCTION__);
diff --git a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
index beedba8..59fc1cd 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
@@ -19,10 +19,12 @@
#include <gui/Surface.h>
#include <gui/bufferqueue/1.0/H2BGraphicBufferProducer.h>
+#include <aidl/AidlUtils.h>
#include <hidl/AidlCameraDeviceCallbacks.h>
#include <hidl/HidlCameraDeviceUser.h>
#include <hidl/Utils.h>
#include <android/hardware/camera/device/3.2/types.h>
+#include <android-base/properties.h>
namespace android {
namespace frameworks {
@@ -31,6 +33,7 @@
namespace V2_1 {
namespace implementation {
+using hardware::cameraservice::utils::conversion::aidl::filterVndkKeys;
using hardware::cameraservice::utils::conversion::convertToHidl;
using hardware::cameraservice::utils::conversion::convertFromHidl;
using hardware::cameraservice::utils::conversion::B2HStatus;
@@ -55,6 +58,7 @@
const sp<hardware::camera2::ICameraDeviceUser> &deviceRemote)
: mDeviceRemote(deviceRemote) {
mInitSuccess = initDevice();
+ mVndkVersion = base::GetIntProperty("ro.vndk.version", __ANDROID_API_FUTURE__);
}
bool HidlCameraDeviceUser::initDevice() {
@@ -235,8 +239,16 @@
android::CameraMetadata cameraMetadata;
binder::Status ret = mDeviceRemote->createDefaultRequest(convertFromHidl(templateId),
&cameraMetadata);
- HStatus hStatus = B2HStatus(ret);
+
HCameraMetadata hidlMetadata;
+ if (filterVndkKeys(mVndkVersion, cameraMetadata, /*isStatic*/false) != OK) {
+ ALOGE("%s: Unable to filter vndk metadata keys for version %d",
+ __FUNCTION__, mVndkVersion);
+ _hidl_cb(HStatus::UNKNOWN_ERROR, hidlMetadata);
+ return Void();
+ }
+
+ HStatus hStatus = B2HStatus(ret);
const camera_metadata_t *rawMetadata = cameraMetadata.getAndLock();
convertToHidl(rawMetadata, &hidlMetadata);
_hidl_cb(hStatus, hidlMetadata);
diff --git a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.h b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.h
index 0e2ab3d..a653ca2 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.h
+++ b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.h
@@ -127,6 +127,7 @@
std::shared_ptr<CaptureResultMetadataQueue> mCaptureResultMetadataQueue = nullptr;
bool mInitSuccess = false;
int32_t mRequestId = REQUEST_ID_NONE;
+ int mVndkVersion = -1;
};
} // implementation
diff --git a/services/camera/libcameraservice/hidl/HidlCameraService.cpp b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
index 90ba294..94bf653 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraService.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
@@ -21,6 +21,7 @@
#include <hidl/HidlCameraService.h>
#include <hidl/HidlCameraDeviceUser.h>
#include <hidl/Utils.h>
+#include <aidl/AidlUtils.h>
#include <hidl/HidlTransportSupport.h>
@@ -34,9 +35,9 @@
using frameworks::cameraservice::service::V2_0::implementation::HidlCameraService;
using hardware::hidl_vec;
using hardware::cameraservice::utils::conversion::convertToHidl;
-using hardware::cameraservice::utils::conversion::filterVndkKeys;
using hardware::cameraservice::utils::conversion::B2HStatus;
using hardware::Void;
+using hardware::cameraservice::utils::conversion::aidl::filterVndkKeys;
using device::V2_0::implementation::H2BCameraDeviceCallbacks;
using device::V2_1::implementation::HidlCameraDeviceUser;
diff --git a/services/camera/libcameraservice/hidl/Utils.cpp b/services/camera/libcameraservice/hidl/Utils.cpp
index ea05636..b5dddf7 100644
--- a/services/camera/libcameraservice/hidl/Utils.cpp
+++ b/services/camera/libcameraservice/hidl/Utils.cpp
@@ -15,7 +15,6 @@
*/
#include <hidl/Utils.h>
-#include <hidl/VndkVersionMetadataTags.h>
#include <gui/bufferqueue/1.0/H2BGraphicBufferProducer.h>
#include <cutils/native_handle.h>
#include <mediautils/AImageReaderUtils.h>
@@ -299,31 +298,6 @@
return hPhysicalCaptureResultInfos;
}
-status_t filterVndkKeys(int vndkVersion, CameraMetadata &metadata, bool isStatic) {
- if (vndkVersion == __ANDROID_API_FUTURE__) {
- // VNDK version in ro.vndk.version is a version code-name that
- // corresponds to the current version.
- return OK;
- }
- const auto &apiLevelToKeys =
- isStatic ? static_api_level_to_keys : dynamic_api_level_to_keys;
- // Find the vndk versions above the given vndk version. All the vndk
- // versions above the given one, need to have their keys filtered from the
- // metadata in order to avoid metadata invalidation.
- auto it = apiLevelToKeys.upper_bound(vndkVersion);
- while (it != apiLevelToKeys.end()) {
- for (const auto &key : it->second) {
- status_t res = metadata.erase(key);
- if (res != OK) {
- ALOGE("%s metadata key %d could not be erased", __FUNCTION__, key);
- return res;
- }
- }
- it++;
- }
- return OK;
-}
-
} //conversion
} // utils
} //cameraservice
diff --git a/services/camera/libcameraservice/hidl/Utils.h b/services/camera/libcameraservice/hidl/Utils.h
index e6d4393..ec06571 100644
--- a/services/camera/libcameraservice/hidl/Utils.h
+++ b/services/camera/libcameraservice/hidl/Utils.h
@@ -97,8 +97,6 @@
HStatus B2HStatus(const binder::Status &bStatus);
-status_t filterVndkKeys(int vndk_version, CameraMetadata &metadata, bool isStatic = true);
-
} // conversion
} // utils
} // cameraservice
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
index 4986199..921ad7d 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
@@ -49,7 +49,7 @@
"android.hardware.camera.provider@2.5",
"android.hardware.camera.provider@2.6",
"android.hardware.camera.provider@2.7",
- "android.hardware.camera.provider-V1-ndk",
+ "android.hardware.camera.provider-V2-ndk",
"android.hardware.camera.device@1.0",
"android.hardware.camera.device@3.2",
"android.hardware.camera.device@3.3",
@@ -61,6 +61,7 @@
fuzz_config: {
cc: [
"android-media-fuzzing-reports@google.com",
+ "android-camera-fwk-eng@google.com",
],
componentid: 155276,
libfuzzer_options: [
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
index 11a2d09..854c342 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
@@ -111,12 +111,15 @@
size_t mPreviewBufferCount = 0;
bool mAutoFocusMessage = false;
bool mSnapshotNotification = false;
+ bool mRecordingNotification = false;
mutable Mutex mPreviewLock;
mutable Condition mPreviewCondition;
mutable Mutex mAutoFocusLock;
mutable Condition mAutoFocusCondition;
mutable Mutex mSnapshotLock;
mutable Condition mSnapshotCondition;
+ mutable Mutex mRecordingLock;
+ mutable Condition mRecordingCondition;
void getNumCameras();
void getCameraInformation(int32_t cameraId);
@@ -125,6 +128,7 @@
void invokeDump();
void invokeShellCommand();
void invokeNotifyCalls();
+ void invokeTorchAPIs(int32_t cameraId);
// CameraClient interface
void notifyCallback(int32_t msgType, int32_t, int32_t) override;
@@ -152,6 +156,8 @@
Mutex::Autolock l(mPreviewLock);
++mPreviewBufferCount;
mPreviewCondition.broadcast();
+ mRecordingNotification = true;
+ mRecordingCondition.broadcast();
break;
}
case CAMERA_MSG_COMPRESSED_IMAGE: {
@@ -311,116 +317,155 @@
mCameraService->notifySystemEvent(eventId, args);
}
+void CameraFuzzer::invokeTorchAPIs(int32_t cameraId) {
+ std::string cameraIdStr = std::to_string(cameraId);
+ sp<IBinder> binder = new BBinder;
+
+ mCameraService->setTorchMode(cameraIdStr, true, binder);
+ ALOGV("Turned torch on.");
+ int32_t torchStrength = rand() % 5 + 1;
+ ALOGV("Changing torch strength level to %d", torchStrength);
+ mCameraService->turnOnTorchWithStrengthLevel(cameraIdStr, torchStrength, binder);
+ mCameraService->setTorchMode(cameraIdStr, false, binder);
+ ALOGV("Turned torch off.");
+}
+
void CameraFuzzer::invokeCameraAPIs() {
- for (int32_t cameraId = 0; cameraId < mNumCameras; ++cameraId) {
- getCameraInformation(cameraId);
+ /** In order to avoid the timeout issue caused due to multiple iteration of loops, the 'for'
+ * loops are removed and the 'cameraId', 'pictureSize' and 'videoSize' are derived using the
+ * FuzzedDataProvider from the available cameras and vectors of 'pictureSizes' and 'videoSizes'
+ */
+ int32_t cameraId = mFuzzedDataProvider->ConsumeIntegralInRange<int32_t>(0, mNumCameras - 1);
+ getCameraInformation(cameraId);
+ invokeTorchAPIs(cameraId);
- ::android::binder::Status rc;
- sp<ICamera> cameraDevice;
+ ::android::binder::Status rc;
+ sp<ICamera> cameraDevice;
- rc = mCameraService->connect(this, cameraId, std::string(),
- android::CameraService::USE_CALLING_UID, android::CameraService::USE_CALLING_PID,
- /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/true,
- /*forceSlowJpegMode*/false,
- &cameraDevice);
- if (!rc.isOk()) {
- // camera not connected
- return;
+ rc = mCameraService->connect(this, cameraId, std::string(),
+ android::CameraService::USE_CALLING_UID,
+ android::CameraService::USE_CALLING_PID,
+ /*targetSdkVersion*/ __ANDROID_API_FUTURE__,
+ /*overrideToPortrait*/true, /*forceSlowJpegMode*/false,
+ &cameraDevice);
+ if (!rc.isOk()) {
+ // camera not connected
+ return;
+ }
+ if (cameraDevice) {
+ sp<Surface> previewSurface;
+ sp<SurfaceControl> surfaceControl;
+ CameraParameters params(cameraDevice->getParameters());
+ String8 focusModes(params.get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES));
+ bool isAFSupported = false;
+ const char* focusMode = nullptr;
+
+ if (focusModes.contains(CameraParameters::FOCUS_MODE_AUTO)) {
+ isAFSupported = true;
+ } else if (focusModes.contains(CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE)) {
+ isAFSupported = true;
+ focusMode = CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE;
+ } else if (focusModes.contains(CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO)) {
+ isAFSupported = true;
+ focusMode = CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO;
+ } else if (focusModes.contains(CameraParameters::FOCUS_MODE_MACRO)) {
+ isAFSupported = true;
+ focusMode = CameraParameters::FOCUS_MODE_MACRO;
}
- if (cameraDevice) {
- sp<Surface> previewSurface;
- sp<SurfaceControl> surfaceControl;
- CameraParameters params(cameraDevice->getParameters());
- String8 focusModes(params.get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES));
- bool isAFSupported = false;
- const char *focusMode = nullptr;
+ if (nullptr != focusMode) {
+ params.set(CameraParameters::KEY_FOCUS_MODE, focusMode);
+ cameraDevice->setParameters(params.flatten());
+ }
+ int previewWidth, previewHeight;
+ params.getPreviewSize(&previewWidth, &previewHeight);
- if (focusModes.contains(CameraParameters::FOCUS_MODE_AUTO)) {
- isAFSupported = true;
- } else if (focusModes.contains(CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE)) {
- isAFSupported = true;
- focusMode = CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE;
- } else if (focusModes.contains(CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO)) {
- isAFSupported = true;
- focusMode = CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO;
- } else if (focusModes.contains(CameraParameters::FOCUS_MODE_MACRO)) {
- isAFSupported = true;
- focusMode = CameraParameters::FOCUS_MODE_MACRO;
- }
- if (nullptr != focusMode) {
- params.set(CameraParameters::KEY_FOCUS_MODE, focusMode);
- cameraDevice->setParameters(params.flatten());
- }
- int previewWidth, previewHeight;
- params.getPreviewSize(&previewWidth, &previewHeight);
+ mComposerClient = new SurfaceComposerClient;
+ mComposerClient->initCheck();
- mComposerClient = new SurfaceComposerClient;
- mComposerClient->initCheck();
-
- bool shouldPassInvalidLayerMetaData = mFuzzedDataProvider->ConsumeBool();
- int layerMetaData;
- if (shouldPassInvalidLayerMetaData) {
- layerMetaData = mFuzzedDataProvider->ConsumeIntegral<int>();
- } else {
- layerMetaData = kLayerMetadata[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(
+ bool shouldPassInvalidLayerMetaData = mFuzzedDataProvider->ConsumeBool();
+ int layerMetaData;
+ if (shouldPassInvalidLayerMetaData) {
+ layerMetaData = mFuzzedDataProvider->ConsumeIntegral<int>();
+ } else {
+ layerMetaData = kLayerMetadata[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(
0, kNumLayerMetaData - 1)];
- }
- surfaceControl = mComposerClient->createSurface(
+ }
+ surfaceControl = mComposerClient->createSurface(
String8("Test Surface"), previewWidth, previewHeight,
CameraParameters::previewFormatToEnum(params.getPreviewFormat()), layerMetaData);
- if (surfaceControl.get() != nullptr) {
- SurfaceComposerClient::Transaction{}
+ if (surfaceControl.get()) {
+ SurfaceComposerClient::Transaction{}
.setLayer(surfaceControl, 0x7fffffff)
.show(surfaceControl)
.apply();
- previewSurface = surfaceControl->getSurface();
+ previewSurface = surfaceControl->getSurface();
+ if (previewSurface.get()) {
cameraDevice->setPreviewTarget(previewSurface->getIGraphicBufferProducer());
}
- cameraDevice->setPreviewCallbackFlag(CAMERA_FRAME_CALLBACK_FLAG_CAMCORDER);
+ }
+ cameraDevice->setPreviewCallbackFlag(CAMERA_FRAME_CALLBACK_FLAG_CAMCORDER);
- Vector<Size> pictureSizes;
- params.getSupportedPictureSizes(pictureSizes);
+ Vector<Size> pictureSizes;
+ params.getSupportedPictureSizes(pictureSizes);
- for (size_t i = 0; i < pictureSizes.size(); ++i) {
- params.setPictureSize(pictureSizes[i].width, pictureSizes[i].height);
- cameraDevice->setParameters(params.flatten());
- cameraDevice->startPreview();
- waitForPreviewStart();
- cameraDevice->autoFocus();
- waitForEvent(mAutoFocusLock, mAutoFocusCondition, mAutoFocusMessage);
- bool shouldPassInvalidCameraMsg = mFuzzedDataProvider->ConsumeBool();
- int msgType;
- if (shouldPassInvalidCameraMsg) {
- msgType = mFuzzedDataProvider->ConsumeIntegral<int>();
- } else {
- msgType = kCameraMsg[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(
+ if (pictureSizes.size()) {
+ Size pictureSize = pictureSizes[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(
+ 0, pictureSizes.size() - 1)];
+ params.setPictureSize(pictureSize.width, pictureSize.height);
+ cameraDevice->setParameters(params.flatten());
+ cameraDevice->startPreview();
+ waitForPreviewStart();
+ cameraDevice->autoFocus();
+ waitForEvent(mAutoFocusLock, mAutoFocusCondition, mAutoFocusMessage);
+ bool shouldPassInvalidCameraMsg = mFuzzedDataProvider->ConsumeBool();
+ int msgType;
+ if (shouldPassInvalidCameraMsg) {
+ msgType = mFuzzedDataProvider->ConsumeIntegral<int>();
+ } else {
+ msgType = kCameraMsg[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(
0, kNumCameraMsg - 1)];
- }
- cameraDevice->takePicture(msgType);
-
- waitForEvent(mSnapshotLock, mSnapshotCondition, mSnapshotNotification);
}
+ cameraDevice->takePicture(msgType);
- Vector<Size> videoSizes;
- params.getSupportedVideoSizes(videoSizes);
+ waitForEvent(mSnapshotLock, mSnapshotCondition, mSnapshotNotification);
+ cameraDevice->stopPreview();
+ }
- for (size_t i = 0; i < videoSizes.size(); ++i) {
- params.setVideoSize(videoSizes[i].width, videoSizes[i].height);
+ Vector<Size> videoSizes;
+ params.getSupportedVideoSizes(videoSizes);
- cameraDevice->setParameters(params.flatten());
- cameraDevice->startPreview();
- waitForPreviewStart();
- cameraDevice->setVideoBufferMode(
+ if (videoSizes.size()) {
+ Size videoSize = videoSizes[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(
+ 0, videoSizes.size() - 1)];
+ params.setVideoSize(videoSize.width, videoSize.height);
+
+ cameraDevice->setParameters(params.flatten());
+ cameraDevice->startPreview();
+ waitForPreviewStart();
+ cameraDevice->setVideoBufferMode(
android::hardware::BnCamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE);
- cameraDevice->setVideoTarget(previewSurface->getIGraphicBufferProducer());
- cameraDevice->startRecording();
- cameraDevice->stopRecording();
+ sp<SurfaceControl> surfaceControlVideo = mComposerClient->createSurface(
+ String8("Test Surface Video"), previewWidth, previewHeight,
+ CameraParameters::previewFormatToEnum(params.getPreviewFormat()),
+ layerMetaData);
+ if (surfaceControlVideo.get()) {
+ SurfaceComposerClient::Transaction{}
+ .setLayer(surfaceControlVideo, 0x7fffffff)
+ .show(surfaceControlVideo)
+ .apply();
+ sp<Surface> previewSurfaceVideo = surfaceControlVideo->getSurface();
+ if (previewSurfaceVideo.get()) {
+ cameraDevice->setVideoTarget(previewSurfaceVideo->getIGraphicBufferProducer());
+ }
}
cameraDevice->stopPreview();
- cameraDevice->disconnect();
+ cameraDevice->startRecording();
+ waitForEvent(mRecordingLock, mRecordingCondition, mRecordingNotification);
+ cameraDevice->stopRecording();
}
+ cameraDevice->disconnect();
}
}
diff --git a/services/camera/libcameraservice/tests/Android.bp b/services/camera/libcameraservice/tests/Android.bp
index 3616572..5e2a3fb 100644
--- a/services/camera/libcameraservice/tests/Android.bp
+++ b/services/camera/libcameraservice/tests/Android.bp
@@ -27,8 +27,13 @@
"external/dynamic_depth/internal",
],
+ header_libs: [
+ "libmedia_headers",
+ ],
+
shared_libs: [
"libbase",
+ "libbinder",
"libcutils",
"libcameraservice",
"libhidlbase",
@@ -44,7 +49,7 @@
"android.hardware.camera.provider@2.5",
"android.hardware.camera.provider@2.6",
"android.hardware.camera.provider@2.7",
- "android.hardware.camera.provider-V1-ndk",
+ "android.hardware.camera.provider-V2-ndk",
"android.hardware.camera.device@1.0",
"android.hardware.camera.device@3.2",
"android.hardware.camera.device@3.4",
@@ -57,6 +62,7 @@
],
srcs: [
+ "CameraPermissionsTest.cpp",
"CameraProviderManagerTest.cpp",
"ClientManagerTest.cpp",
"DepthProcessorTest.cpp",
diff --git a/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp b/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
new file mode 100644
index 0000000..db43a02
--- /dev/null
+++ b/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
@@ -0,0 +1,303 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android/hardware/BnCameraServiceListener.h>
+#include <android/hardware/BnCameraServiceProxy.h>
+#include <android/hardware/camera2/BnCameraDeviceCallbacks.h>
+#include <android/hardware/ICameraService.h>
+
+#include <private/android_filesystem_config.h>
+
+#include "../CameraService.h"
+#include "../utils/CameraServiceProxyWrapper.h"
+
+#include <gtest/gtest.h>
+
+#include <memory>
+#include <vector>
+
+using namespace android;
+using namespace android::hardware::camera;
+
+// Empty service listener.
+class TestCameraServiceListener : public hardware::BnCameraServiceListener {
+public:
+ virtual ~TestCameraServiceListener() {};
+
+ virtual binder::Status onStatusChanged(int32_t , const std::string&) {
+ return binder::Status::ok();
+ };
+
+ virtual binder::Status onPhysicalCameraStatusChanged(int32_t /*status*/,
+ const std::string& /*cameraId*/, const std::string& /*physicalCameraId*/) {
+ // No op
+ return binder::Status::ok();
+ };
+
+ virtual binder::Status onTorchStatusChanged(int32_t /*status*/,
+ const std::string& /*cameraId*/) {
+ return binder::Status::ok();
+ };
+
+ virtual binder::Status onCameraAccessPrioritiesChanged() {
+ // No op
+ return binder::Status::ok();
+ }
+
+ virtual binder::Status onCameraOpened(const std::string& /*cameraId*/,
+ const std::string& /*clientPackageName*/) {
+ // No op
+ return binder::Status::ok();
+ }
+
+ virtual binder::Status onCameraClosed(const std::string& /*cameraId*/) {
+ // No op
+ return binder::Status::ok();
+ }
+
+ virtual binder::Status onTorchStrengthLevelChanged(const std::string& /*cameraId*/,
+ int32_t /*torchStrength*/) {
+ // No op
+ return binder::Status::ok();
+ }
+};
+
+// Empty device callback.
+class TestCameraDeviceCallbacks : public hardware::camera2::BnCameraDeviceCallbacks {
+public:
+ TestCameraDeviceCallbacks() {}
+
+ virtual ~TestCameraDeviceCallbacks() {}
+
+ virtual binder::Status onDeviceError(int /*errorCode*/,
+ const CaptureResultExtras& /*resultExtras*/) {
+ return binder::Status::ok();
+ }
+
+ virtual binder::Status onDeviceIdle() {
+ return binder::Status::ok();
+ }
+
+ virtual binder::Status onCaptureStarted(const CaptureResultExtras& /*resultExtras*/,
+ int64_t /*timestamp*/) {
+ return binder::Status::ok();
+ }
+
+ virtual binder::Status onResultReceived(const CameraMetadata& /*metadata*/,
+ const CaptureResultExtras& /*resultExtras*/,
+ const std::vector<PhysicalCaptureResultInfo>& /*physicalResultInfos*/) {
+ return binder::Status::ok();
+ }
+
+ virtual binder::Status onPrepared(int /*streamId*/) {
+ return binder::Status::ok();
+ }
+
+ virtual binder::Status onRepeatingRequestError(
+ int64_t /*lastFrameNumber*/, int32_t /*stoppedSequenceId*/) {
+ return binder::Status::ok();
+ }
+
+ virtual binder::Status onRequestQueueEmpty() {
+ return binder::Status::ok();
+ }
+};
+
+// Override isCameraDisabled from the CameraServiceProxy with a flag.
+class CameraServiceProxyOverride : public ::android::hardware::BnCameraServiceProxy {
+public:
+ CameraServiceProxyOverride() :
+ mCameraServiceProxy(CameraServiceProxyWrapper::getDefaultCameraServiceProxy()),
+ mCameraDisabled(false), mOverrideCameraDisabled(false)
+ { }
+
+ virtual binder::Status getRotateAndCropOverride(const std::string& packageName, int lensFacing,
+ int userId, int *ret) override {
+ return mCameraServiceProxy->getRotateAndCropOverride(packageName, lensFacing,
+ userId, ret);
+ }
+
+ virtual binder::Status getAutoframingOverride(const std::string& packageName, int *ret) override {
+ return mCameraServiceProxy->getAutoframingOverride(packageName, ret);
+ }
+
+ virtual binder::Status pingForUserUpdate() override {
+ return mCameraServiceProxy->pingForUserUpdate();
+ }
+
+ virtual binder::Status notifyCameraState(
+ const hardware::CameraSessionStats& cameraSessionStats) override {
+ return mCameraServiceProxy->notifyCameraState(cameraSessionStats);
+ }
+
+ virtual binder::Status isCameraDisabled(int userId, bool *ret) override {
+ if (mOverrideCameraDisabled) {
+ *ret = mCameraDisabled;
+ return binder::Status::ok();
+ }
+ return mCameraServiceProxy->isCameraDisabled(userId, ret);
+ }
+
+ void setCameraDisabled(bool cameraDisabled) {
+ mCameraDisabled = cameraDisabled;
+ }
+
+ void setOverrideCameraDisabled(bool overrideCameraDisabled) {
+ mOverrideCameraDisabled = overrideCameraDisabled;
+ }
+
+protected:
+ sp<hardware::ICameraServiceProxy> mCameraServiceProxy;
+ bool mCameraDisabled;
+ bool mOverrideCameraDisabled;
+};
+
+class AutoDisconnectDevice {
+public:
+ AutoDisconnectDevice(sp<hardware::camera2::ICameraDeviceUser> device) :
+ mDevice(device)
+ { }
+
+ ~AutoDisconnectDevice() {
+ if (mDevice != nullptr) {
+ mDevice->disconnect();
+ }
+ }
+
+private:
+ sp<hardware::camera2::ICameraDeviceUser> mDevice;
+};
+
+class CameraPermissionsTest : public ::testing::Test {
+protected:
+ static sp<CameraService> sCameraService;
+ static sp<CameraServiceProxyOverride> sCameraServiceProxy;
+ static std::shared_ptr<CameraServiceProxyWrapper> sCameraServiceProxyWrapper;
+ static uid_t sOldUid;
+
+ static void SetUpTestSuite() {
+ sOldUid = getuid();
+ setuid(AID_CAMERASERVER);
+ sCameraServiceProxy = new CameraServiceProxyOverride();
+ sCameraServiceProxyWrapper =
+ std::make_shared<CameraServiceProxyWrapper>(sCameraServiceProxy);
+ sCameraService = new CameraService(sCameraServiceProxyWrapper);
+ sCameraService->clearCachedVariables();
+ }
+
+ static void TearDownTestSuite() {
+ sCameraServiceProxyWrapper = nullptr;
+ sCameraServiceProxy = nullptr;
+ sCameraService = nullptr;
+ setuid(sOldUid);
+ }
+};
+
+sp<CameraService> CameraPermissionsTest::sCameraService = nullptr;
+sp<CameraServiceProxyOverride> CameraPermissionsTest::sCameraServiceProxy = nullptr;
+std::shared_ptr<CameraServiceProxyWrapper>
+CameraPermissionsTest::sCameraServiceProxyWrapper = nullptr;
+uid_t CameraPermissionsTest::sOldUid = 0;
+
+// Test that camera connections fail with ERROR_DISABLED when the camera is disabled via device
+// policy, and succeed when it isn't.
+TEST_F(CameraPermissionsTest, TestCameraDisabled) {
+ std::vector<hardware::CameraStatus> statuses;
+ sp<TestCameraServiceListener> serviceListener = new TestCameraServiceListener();
+ sCameraService->addListenerTest(serviceListener, &statuses);
+ sCameraServiceProxy->setOverrideCameraDisabled(true);
+
+ sCameraServiceProxy->setCameraDisabled(true);
+ for (auto s : statuses) {
+ sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
+ sp<hardware::camera2::ICameraDeviceUser> device;
+ binder::Status status =
+ sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
+ android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &device);
+ AutoDisconnectDevice autoDisconnect(device);
+ ASSERT_TRUE(!status.isOk()) << "connectDevice returned OK status";
+ ASSERT_EQ(status.serviceSpecificErrorCode(), hardware::ICameraService::ERROR_DISABLED)
+ << "connectDevice returned exception code " << status.exceptionCode();
+ }
+
+ sCameraServiceProxy->setCameraDisabled(false);
+ for (auto s : statuses) {
+ sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
+ sp<hardware::camera2::ICameraDeviceUser> device;
+ binder::Status status =
+ sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
+ android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &device);
+ AutoDisconnectDevice autoDisconnect(device);
+ ASSERT_TRUE(status.isOk());
+ }
+}
+
+// Test that consecutive camera connections succeed.
+TEST_F(CameraPermissionsTest, TestConsecutiveConnections) {
+ std::vector<hardware::CameraStatus> statuses;
+ sp<TestCameraServiceListener> serviceListener = new TestCameraServiceListener();
+ sCameraService->addListenerTest(serviceListener, &statuses);
+ sCameraServiceProxy->setOverrideCameraDisabled(false);
+
+ for (auto s : statuses) {
+ sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
+ sp<hardware::camera2::ICameraDeviceUser> deviceA, deviceB;
+ binder::Status status =
+ sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
+ android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &deviceA);
+ AutoDisconnectDevice autoDisconnectA(deviceA);
+ ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
+ " service specific error code " << status.serviceSpecificErrorCode();
+ status =
+ sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
+ android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &deviceB);
+ AutoDisconnectDevice autoDisconnectB(deviceB);
+ ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
+ " service specific error code " << status.serviceSpecificErrorCode();
+ }
+}
+
+// Test that consecutive camera connections succeed even when a nonzero oomScoreOffset is provided
+// in the second call.
+TEST_F(CameraPermissionsTest, TestConflictingOomScoreOffset) {
+ std::vector<hardware::CameraStatus> statuses;
+ sp<TestCameraServiceListener> serviceListener = new TestCameraServiceListener();
+ sCameraService->addListenerTest(serviceListener, &statuses);
+ sCameraServiceProxy->setOverrideCameraDisabled(false);
+
+ for (auto s : statuses) {
+ sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
+ sp<hardware::camera2::ICameraDeviceUser> deviceA, deviceB;
+ binder::Status status =
+ sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
+ android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &deviceA);
+ AutoDisconnectDevice autoDisconnectA(deviceA);
+ ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
+ " service specific error code " << status.serviceSpecificErrorCode();
+ status =
+ sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
+ android::CameraService::USE_CALLING_UID, 1/*oomScoreDiff*/,
+ /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &deviceB);
+ AutoDisconnectDevice autoDisconnectB(deviceB);
+ ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
+ " service specific error code " << status.serviceSpecificErrorCode();
+ }
+}
diff --git a/services/camera/libcameraservice/tests/ZoomRatioTest.cpp b/services/camera/libcameraservice/tests/ZoomRatioTest.cpp
index b3a1d18..badd47a 100644
--- a/services/camera/libcameraservice/tests/ZoomRatioTest.cpp
+++ b/services/camera/libcameraservice/tests/ZoomRatioTest.cpp
@@ -252,6 +252,19 @@
for (size_t i = 0; i < coords.size(); i++) {
EXPECT_LE(std::abs(coords[i] - expectedZoomOutCoords[i]), kMaxAllowedPixelError);
}
+
+ // Verify region zoom scaling doesn't generate invalid metering region
+ // (width < 0, or height < 0)
+ std::array<float, 3> scaleRatios = {10.0f, 1.0f, 0.1f};
+ for (float scaleRatio : scaleRatios) {
+ for (size_t i = 0; i < originalCoords.size(); i+= 2) {
+ int32_t coordinates[] = {originalCoords[i], originalCoords[i+1],
+ originalCoords[i], originalCoords[i+1]};
+ mapper.scaleRegion(coordinates, scaleRatio, width, height);
+ EXPECT_LE(coordinates[0], coordinates[2]);
+ EXPECT_LE(coordinates[1], coordinates[3]);
+ }
+ }
}
TEST(ZoomRatioTest, scaleCoordinatesTest) {
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
index f6ad2fe..d07bf6d 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
@@ -28,32 +28,41 @@
namespace android {
-using hardware::ICameraServiceProxy;
+using hardware::CameraExtensionSessionStats;
using hardware::CameraSessionStats;
+using hardware::ICameraServiceProxy;
-Mutex CameraServiceProxyWrapper::sProxyMutex;
-sp<hardware::ICameraServiceProxy> CameraServiceProxyWrapper::sCameraServiceProxy;
-
-Mutex CameraServiceProxyWrapper::mLock;
-std::map<std::string, std::shared_ptr<CameraServiceProxyWrapper::CameraSessionStatsWrapper>>
- CameraServiceProxyWrapper::mSessionStatsMap;
+namespace {
+// Sentinel value to be returned when extension session with a stale or invalid key is reported.
+const std::string POISON_EXT_STATS_KEY("poisoned_stats");
+} // anonymous namespace
/**
* CameraSessionStatsWrapper functions
*/
-void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onOpen() {
- Mutex::Autolock l(mLock);
-
- updateProxyDeviceState(mSessionStats);
+void CameraServiceProxyWrapper::CameraSessionStatsWrapper::updateProxyDeviceState(
+ sp<hardware::ICameraServiceProxy>& proxyBinder) {
+ if (proxyBinder == nullptr) return;
+ proxyBinder->notifyCameraState(mSessionStats);
}
-void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onClose(int32_t latencyMs) {
+void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onOpen(
+ sp<hardware::ICameraServiceProxy>& proxyBinder) {
+ Mutex::Autolock l(mLock);
+ updateProxyDeviceState(proxyBinder);
+}
+
+void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onClose(
+ sp<hardware::ICameraServiceProxy>& proxyBinder, int32_t latencyMs,
+ bool deviceError) {
Mutex::Autolock l(mLock);
mSessionStats.mNewCameraState = CameraSessionStats::CAMERA_STATE_CLOSED;
mSessionStats.mLatencyMs = latencyMs;
- updateProxyDeviceState(mSessionStats);
+ mSessionStats.mDeviceError = deviceError;
+ mSessionStats.mSessionIndex = 0;
+ updateProxyDeviceState(proxyBinder);
}
void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onStreamConfigured(
@@ -68,12 +77,14 @@
}
}
-void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onActive(float maxPreviewFps) {
+void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onActive(
+ sp<hardware::ICameraServiceProxy>& proxyBinder, float maxPreviewFps) {
Mutex::Autolock l(mLock);
mSessionStats.mNewCameraState = CameraSessionStats::CAMERA_STATE_ACTIVE;
mSessionStats.mMaxPreviewFps = maxPreviewFps;
- updateProxyDeviceState(mSessionStats);
+ mSessionStats.mSessionIndex++;
+ updateProxyDeviceState(proxyBinder);
// Reset mCreationDuration to -1 to distinguish between 1st session
// after configuration, and all other sessions after configuration.
@@ -81,6 +92,7 @@
}
void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onIdle(
+ sp<hardware::ICameraServiceProxy>& proxyBinder,
int64_t requestCount, int64_t resultErrorCount, bool deviceError,
const std::string& userTag, int32_t videoStabilizationMode,
const std::vector<hardware::CameraStreamStats>& streamStats) {
@@ -93,10 +105,76 @@
mSessionStats.mUserTag = userTag;
mSessionStats.mVideoStabilizationMode = videoStabilizationMode;
mSessionStats.mStreamStats = streamStats;
- updateProxyDeviceState(mSessionStats);
+
+ updateProxyDeviceState(proxyBinder);
mSessionStats.mInternalReconfigure = 0;
mSessionStats.mStreamStats.clear();
+ mSessionStats.mCameraExtensionSessionStats = {};
+}
+
+int64_t CameraServiceProxyWrapper::CameraSessionStatsWrapper::getLogId() {
+ Mutex::Autolock l(mLock);
+ return mSessionStats.mLogId;
+}
+
+std::string CameraServiceProxyWrapper::CameraSessionStatsWrapper::updateExtensionSessionStats(
+ const hardware::CameraExtensionSessionStats& extStats) {
+ Mutex::Autolock l(mLock);
+ CameraExtensionSessionStats& currStats = mSessionStats.mCameraExtensionSessionStats;
+ if (currStats.key != extStats.key) {
+ // Mismatched keys. Extensions stats likely reported for a closed session
+ ALOGW("%s: mismatched extensions stats key: current='%s' reported='%s'. Dropping stats.",
+ __FUNCTION__, toStdString(currStats.key).c_str(), toStdString(extStats.key).c_str());
+ return POISON_EXT_STATS_KEY; // return poisoned key to so future calls are
+ // definitely dropped.
+ }
+
+ // Matching keys...
+ if (currStats.key.size()) {
+ // non-empty matching keys. overwrite.
+ ALOGV("%s: Overwriting extension session stats: %s", __FUNCTION__,
+ extStats.toString().c_str());
+ currStats = extStats;
+ return toStdString(currStats.key);
+ }
+
+ // Matching empty keys...
+ if (mSessionStats.mClientName != toStdString(extStats.clientName)) {
+ ALOGW("%s: extension stats reported for unexpected package: current='%s' reported='%s'. "
+ "Dropping stats.", __FUNCTION__,
+ mSessionStats.mClientName.c_str(),
+ toStdString(extStats.clientName).c_str());
+ return POISON_EXT_STATS_KEY;
+ }
+
+ // Matching empty keys for the current client...
+ if (mSessionStats.mNewCameraState == CameraSessionStats::CAMERA_STATE_OPEN ||
+ mSessionStats.mNewCameraState == CameraSessionStats::CAMERA_STATE_IDLE) {
+ // Camera is open, but not active. It is possible that the active callback hasn't
+ // occurred yet. Keep the stats, but don't associate it with any session.
+ ALOGV("%s: extension stat reported for an open, but not active camera. "
+ "Saving stats, but not generating key.", __FUNCTION__);
+ currStats = extStats;
+ return {}; // Subsequent calls will handle setting the correct key.
+ }
+
+ if (mSessionStats.mNewCameraState == CameraSessionStats::CAMERA_STATE_ACTIVE) {
+ // camera is active. First call for the session!
+ currStats = extStats;
+
+ // Generate a new key from logId and sessionIndex.
+ std::ostringstream key;
+ key << mSessionStats.mSessionIndex << '/' << mSessionStats.mLogId;
+ currStats.key = String16(key.str().c_str());
+ ALOGV("%s: New extension session stats: %s", __FUNCTION__, currStats.toString().c_str());
+ return toStdString(currStats.key);
+ }
+
+ // Camera is closed. Probably a stale call.
+ ALOGW("%s: extension stats reported for closed camera id '%s'. Dropping stats.",
+ __FUNCTION__, mSessionStats.mCameraId.c_str());
+ return {};
}
/**
@@ -105,19 +183,26 @@
sp<ICameraServiceProxy> CameraServiceProxyWrapper::getCameraServiceProxy() {
#ifndef __BRILLO__
- Mutex::Autolock al(sProxyMutex);
- if (sCameraServiceProxy == nullptr) {
- sp<IServiceManager> sm = defaultServiceManager();
- // Use checkService because cameraserver normally starts before the
- // system server and the proxy service. So the long timeout that getService
- // has before giving up is inappropriate.
- sp<IBinder> binder = sm->checkService(String16("media.camera.proxy"));
- if (binder != nullptr) {
- sCameraServiceProxy = interface_cast<ICameraServiceProxy>(binder);
- }
+ Mutex::Autolock al(mProxyMutex);
+ if (mCameraServiceProxy == nullptr) {
+ mCameraServiceProxy = getDefaultCameraServiceProxy();
}
#endif
- return sCameraServiceProxy;
+ return mCameraServiceProxy;
+}
+
+sp<hardware::ICameraServiceProxy> CameraServiceProxyWrapper::getDefaultCameraServiceProxy() {
+#ifndef __BRILLO__
+ sp<IServiceManager> sm = defaultServiceManager();
+ // Use checkService because cameraserver normally starts before the
+ // system server and the proxy service. So the long timeout that getService
+ // has before giving up is inappropriate.
+ sp<IBinder> binder = sm->checkService(String16("media.camera.proxy"));
+ if (binder != nullptr) {
+ return interface_cast<ICameraServiceProxy>(binder);
+ }
+#endif
+ return nullptr;
}
void CameraServiceProxyWrapper::pingCameraServiceProxy() {
@@ -141,10 +226,19 @@
return ret;
}
-void CameraServiceProxyWrapper::updateProxyDeviceState(const CameraSessionStats& sessionStats) {
+int CameraServiceProxyWrapper::getAutoframingOverride(const std::string& packageName) {
sp<ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
- if (proxyBinder == nullptr) return;
- proxyBinder->notifyCameraState(sessionStats);
+ if (proxyBinder == nullptr) {
+ return ANDROID_CONTROL_AUTOFRAMING_OFF;
+ }
+ int ret = 0;
+ auto status = proxyBinder->getAutoframingOverride(packageName, &ret);
+ if (!status.isOk()) {
+ ALOGE("%s: Failed during autoframing override query: %s", __FUNCTION__,
+ status.exceptionMessage().c_str());
+ }
+
+ return ret;
}
void CameraServiceProxyWrapper::logStreamConfigured(const std::string& id,
@@ -152,12 +246,12 @@
std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
{
Mutex::Autolock l(mLock);
- sessionStats = mSessionStatsMap[id];
- if (sessionStats == nullptr) {
+ if (mSessionStatsMap.count(id) == 0) {
ALOGE("%s: SessionStatsMap should contain camera %s",
__FUNCTION__, id.c_str());
return;
}
+ sessionStats = mSessionStatsMap[id];
}
ALOGV("%s: id %s, operatingMode %d, internalConfig %d, latencyMs %d",
@@ -169,16 +263,17 @@
std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
{
Mutex::Autolock l(mLock);
- sessionStats = mSessionStatsMap[id];
- if (sessionStats == nullptr) {
+ if (mSessionStatsMap.count(id) == 0) {
ALOGE("%s: SessionStatsMap should contain camera %s when logActive is called",
__FUNCTION__, id.c_str());
return;
}
+ sessionStats = mSessionStatsMap[id];
}
ALOGV("%s: id %s", __FUNCTION__, id.c_str());
- sessionStats->onActive(maxPreviewFps);
+ sp<hardware::ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+ sessionStats->onActive(proxyBinder, maxPreviewFps);
}
void CameraServiceProxyWrapper::logIdle(const std::string& id,
@@ -188,13 +283,12 @@
std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
{
Mutex::Autolock l(mLock);
- sessionStats = mSessionStatsMap[id];
- }
-
- if (sessionStats == nullptr) {
- ALOGE("%s: SessionStatsMap should contain camera %s when logIdle is called",
+ if (mSessionStatsMap.count(id) == 0) {
+ ALOGE("%s: SessionStatsMap should contain camera %s when logIdle is called",
__FUNCTION__, id.c_str());
- return;
+ return;
+ }
+ sessionStats = mSessionStatsMap[id];
}
ALOGV("%s: id %s, requestCount %" PRId64 ", resultErrorCount %" PRId64 ", deviceError %d"
@@ -208,7 +302,8 @@
streamStats[i].mStartLatencyMs);
}
- sessionStats->onIdle(requestCount, resultErrorCount, deviceError, userTag,
+ sp<hardware::ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+ sessionStats->onIdle(proxyBinder, requestCount, resultErrorCount, deviceError, userTag,
videoStabilizationMode, streamStats);
}
@@ -229,19 +324,24 @@
apiLevel = CameraSessionStats::CAMERA_API_LEVEL_2;
}
- sessionStats = std::make_shared<CameraSessionStatsWrapper>(id, facing,
- CameraSessionStats::CAMERA_STATE_OPEN, clientPackageName,
- apiLevel, isNdk, latencyMs);
+ // Generate a new log ID for open events
+ int64_t logId = generateLogId(mRandomDevice);
+
+ sessionStats = std::make_shared<CameraSessionStatsWrapper>(
+ id, facing, CameraSessionStats::CAMERA_STATE_OPEN, clientPackageName,
+ apiLevel, isNdk, latencyMs, logId);
mSessionStatsMap.emplace(id, sessionStats);
ALOGV("%s: Adding id %s", __FUNCTION__, id.c_str());
}
ALOGV("%s: id %s, facing %d, effectiveApiLevel %d, isNdk %d, latencyMs %d",
__FUNCTION__, id.c_str(), facing, effectiveApiLevel, isNdk, latencyMs);
- sessionStats->onOpen();
+ sp<hardware::ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+ sessionStats->onOpen(proxyBinder);
}
-void CameraServiceProxyWrapper::logClose(const std::string& id, int32_t latencyMs) {
+void CameraServiceProxyWrapper::logClose(const std::string& id, int32_t latencyMs,
+ bool deviceError) {
std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
{
Mutex::Autolock l(mLock);
@@ -257,12 +357,15 @@
__FUNCTION__, id.c_str());
return;
}
+
mSessionStatsMap.erase(id);
- ALOGV("%s: Erasing id %s", __FUNCTION__, id.c_str());
+ ALOGV("%s: Erasing id %s, deviceError %d", __FUNCTION__, id.c_str(), deviceError);
}
- ALOGV("%s: id %s, latencyMs %d", __FUNCTION__, id.c_str(), latencyMs);
- sessionStats->onClose(latencyMs);
+ ALOGV("%s: id %s, latencyMs %d, deviceError %d", __FUNCTION__,
+ id.c_str(), latencyMs, deviceError);
+ sp<hardware::ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+ sessionStats->onClose(proxyBinder, latencyMs, deviceError);
}
bool CameraServiceProxyWrapper::isCameraDisabled(int userId) {
@@ -277,4 +380,48 @@
return ret;
}
-}; // namespace android
+int64_t CameraServiceProxyWrapper::getCurrentLogIdForCamera(const std::string& cameraId) {
+ std::shared_ptr<CameraSessionStatsWrapper> stats;
+ {
+ Mutex::Autolock _l(mLock);
+ if (mSessionStatsMap.count(cameraId) == 0) {
+ ALOGE("%s: SessionStatsMap should contain camera %s before asking for its logging ID.",
+ __FUNCTION__, cameraId.c_str());
+ return 0;
+ }
+
+ stats = mSessionStatsMap[cameraId];
+ }
+ return stats->getLogId();
+}
+
+int64_t CameraServiceProxyWrapper::generateLogId(std::random_device& randomDevice) {
+ int64_t ret = 0;
+ do {
+ // std::random_device generates 32 bits per call, so we call it twice
+ ret = randomDevice();
+ ret = ret << 32;
+ ret = ret | randomDevice();
+ } while (ret == 0); // 0 is not a valid identifier
+
+ return ret;
+}
+
+std::string CameraServiceProxyWrapper::updateExtensionStats(
+ const hardware::CameraExtensionSessionStats& extStats) {
+ std::shared_ptr<CameraSessionStatsWrapper> stats;
+ std::string cameraId = toStdString(extStats.cameraId);
+ {
+ Mutex::Autolock _l(mLock);
+ if (mSessionStatsMap.count(cameraId) == 0) {
+ ALOGE("%s CameraExtensionSessionStats reported for camera id that isn't open: %s",
+ __FUNCTION__, cameraId.c_str());
+ return {};
+ }
+
+ stats = mSessionStatsMap[cameraId];
+ return stats->updateExtensionSessionStats(extStats);
+ }
+}
+
+} // namespace android
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
index aee875f..1afe5b3 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
@@ -22,6 +22,7 @@
#include <utils/Mutex.h>
#include <utils/StrongPointer.h>
#include <utils/Timers.h>
+#include <random>
#include <string>
#include <camera/CameraSessionStats.h>
@@ -31,72 +32,106 @@
class CameraServiceProxyWrapper {
private:
// Guard mCameraServiceProxy
- static Mutex sProxyMutex;
+ Mutex mProxyMutex;
// Cached interface to the camera service proxy in system service
- static sp<hardware::ICameraServiceProxy> sCameraServiceProxy;
+ sp<hardware::ICameraServiceProxy> mCameraServiceProxy;
- struct CameraSessionStatsWrapper {
+ class CameraSessionStatsWrapper {
+ private:
hardware::CameraSessionStats mSessionStats;
Mutex mLock; // lock for per camera session stats
- CameraSessionStatsWrapper(const std::string& cameraId, int facing, int newCameraState,
- const std::string& clientName, int apiLevel, bool isNdk, int32_t latencyMs) :
- mSessionStats(cameraId, facing, newCameraState, clientName, apiLevel, isNdk, latencyMs)
- {}
+ /**
+ * Update the session stats of a given camera device (open/close/active/idle) with
+ * the camera proxy service in the system service
+ */
+ void updateProxyDeviceState(sp<hardware::ICameraServiceProxy>& proxyBinder);
- void onOpen();
- void onClose(int32_t latencyMs);
+ public:
+ CameraSessionStatsWrapper(const std::string& cameraId, int facing, int newCameraState,
+ const std::string& clientName, int apiLevel, bool isNdk,
+ int32_t latencyMs, int64_t logId)
+ : mSessionStats(cameraId, facing, newCameraState, clientName, apiLevel, isNdk,
+ latencyMs, logId) {}
+
+ void onOpen(sp<hardware::ICameraServiceProxy>& proxyBinder);
+ void onClose(sp<hardware::ICameraServiceProxy>& proxyBinder, int32_t latencyMs,
+ bool deviceError);
void onStreamConfigured(int operatingMode, bool internalReconfig, int32_t latencyMs);
- void onActive(float maxPreviewFps);
- void onIdle(int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+ void onActive(sp<hardware::ICameraServiceProxy>& proxyBinder, float maxPreviewFps);
+ void onIdle(sp<hardware::ICameraServiceProxy>& proxyBinder,
+ int64_t requestCount, int64_t resultErrorCount, bool deviceError,
const std::string& userTag, int32_t videoStabilizationMode,
const std::vector<hardware::CameraStreamStats>& streamStats);
+
+ std::string updateExtensionSessionStats(
+ const hardware::CameraExtensionSessionStats& extStats);
+
+ // Returns the logId associated with this event.
+ int64_t getLogId();
};
// Lock for camera session stats map
- static Mutex mLock;
+ Mutex mLock;
// Map from camera id to the camera's session statistics
- static std::map<std::string, std::shared_ptr<CameraSessionStatsWrapper>> mSessionStatsMap;
+ std::map<std::string, std::shared_ptr<CameraSessionStatsWrapper>> mSessionStatsMap;
- /**
- * Update the session stats of a given camera device (open/close/active/idle) with
- * the camera proxy service in the system service
- */
- static void updateProxyDeviceState(
- const hardware::CameraSessionStats& sessionStats);
+ std::random_device mRandomDevice; // pulls 32-bit random numbers from /dev/urandom
- static sp<hardware::ICameraServiceProxy> getCameraServiceProxy();
+ sp<hardware::ICameraServiceProxy> getCameraServiceProxy();
+
+ // Returns a randomly generated ID that is suitable for logging the event. A new identifier
+ // should only be generated for an open event. All other events for the cameraId should use the
+ // ID generated for the open event associated with them.
+ static int64_t generateLogId(std::random_device& randomDevice);
public:
+ CameraServiceProxyWrapper(sp<hardware::ICameraServiceProxy> serviceProxy = nullptr) :
+ mCameraServiceProxy(serviceProxy)
+ { }
+
+ static sp<hardware::ICameraServiceProxy> getDefaultCameraServiceProxy();
+
// Open
- static void logOpen(const std::string& id, int facing,
+ void logOpen(const std::string& id, int facing,
const std::string& clientPackageName, int apiLevel, bool isNdk,
int32_t latencyMs);
// Close
- static void logClose(const std::string& id, int32_t latencyMs);
+ void logClose(const std::string& id, int32_t latencyMs, bool deviceError);
// Stream configuration
- static void logStreamConfigured(const std::string& id, int operatingMode, bool internalReconfig,
+ void logStreamConfigured(const std::string& id, int operatingMode, bool internalReconfig,
int32_t latencyMs);
// Session state becomes active
- static void logActive(const std::string& id, float maxPreviewFps);
+ void logActive(const std::string& id, float maxPreviewFps);
// Session state becomes idle
- static void logIdle(const std::string& id,
+ void logIdle(const std::string& id,
int64_t requestCount, int64_t resultErrorCount, bool deviceError,
const std::string& userTag, int32_t videoStabilizationMode,
const std::vector<hardware::CameraStreamStats>& streamStats);
// Ping camera service proxy for user update
- static void pingCameraServiceProxy();
+ void pingCameraServiceProxy();
// Return the current top activity rotate and crop override.
- static int getRotateAndCropOverride(const std::string &packageName, int lensFacing, int userId);
+ int getRotateAndCropOverride(const std::string &packageName, int lensFacing, int userId);
+
+ // Return the current top activity autoframing.
+ int getAutoframingOverride(const std::string& packageName);
// Detect if the camera is disabled by device policy.
- static bool isCameraDisabled(int userId);
+ bool isCameraDisabled(int userId);
+
+ // Returns the logId currently associated with the given cameraId. See 'mLogId' in
+ // frameworks/av/camera/include/camera/CameraSessionStats.h for more details about this
+ // identifier. Returns a non-0 value on success.
+ int64_t getCurrentLogIdForCamera(const std::string& cameraId);
+
+ // Update the stored extension stats to the latest values
+ std::string updateExtensionStats(const hardware::CameraExtensionSessionStats& extStats);
};
} // android
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index c9520d5..f7257e3 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -19,6 +19,8 @@
#include "SessionConfigurationUtils.h"
#include "../api2/DepthCompositeStream.h"
#include "../api2/HeicCompositeStream.h"
+#include "aidl/android/hardware/graphics/common/Dataspace.h"
+#include "api2/JpegRCompositeStream.h"
#include "common/CameraDeviceBase.h"
#include "common/HalConversionsTemplated.h"
#include "../CameraService.h"
@@ -27,6 +29,7 @@
#include "device3/Camera3OutputStream.h"
#include "system/graphics-base-v1.1.h"
#include <camera/StringUtils.h>
+#include <ui/PublicFormat.h>
using android::camera3::OutputStreamInfo;
using android::camera3::OutputStreamInfo;
@@ -71,11 +74,11 @@
int32_t dynamicDepthKey =
SessionConfigurationUtils::getAppropriateModeTag(
- ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS);
+ ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS, maxRes);
int32_t heicKey =
SessionConfigurationUtils::getAppropriateModeTag(
- ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS);
+ ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, maxRes);
getStreamConfigurations(staticInfo, scalerKey, scm);
getStreamConfigurations(staticInfo, depthKey, scm);
@@ -128,7 +131,7 @@
size_t getUHRMaxJpegBufferSize(camera3::Size uhrMaxJpegSize,
camera3::Size defaultMaxJpegSize, size_t defaultMaxJpegBufferSize) {
- return ((float)uhrMaxJpegSize.width * uhrMaxJpegSize.height) /
+ return ((float)(uhrMaxJpegSize.width * uhrMaxJpegSize.height)) /
(defaultMaxJpegSize.width * defaultMaxJpegSize.height) * defaultMaxJpegBufferSize;
}
@@ -159,8 +162,13 @@
getAppropriateModeTag(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxResolution);
const int32_t heicSizesTag =
getAppropriateModeTag(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, maxResolution);
+ const int32_t jpegRSizesTag = getAppropriateModeTag(
+ ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS, maxResolution);
+ bool isJpegRDataSpace = (dataSpace == static_cast<android_dataspace_t>(
+ ::aidl::android::hardware::graphics::common::Dataspace::JPEG_R));
camera_metadata_ro_entry streamConfigs =
+ (isJpegRDataSpace) ? info.find(jpegRSizesTag) :
(dataSpace == HAL_DATASPACE_DEPTH) ? info.find(depthSizesTag) :
(dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) ?
info.find(heicSizesTag) :
@@ -194,6 +202,8 @@
if (bestWidth == -1) {
// Return false if no configurations for this format were listed
+ ALOGE("%s: No configurations for format %d width %d, height %d, maxResolution ? %s",
+ __FUNCTION__, format, width, height, maxResolution ? "true" : "false");
return false;
}
@@ -210,11 +220,18 @@
}
//check if format is 10-bit compatible
-bool is10bitCompatibleFormat(int32_t format) {
+bool is10bitCompatibleFormat(int32_t format, android_dataspace_t dataSpace) {
switch(format) {
case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
case HAL_PIXEL_FORMAT_YCBCR_P010:
return true;
+ case HAL_PIXEL_FORMAT_BLOB:
+ if (dataSpace == static_cast<android_dataspace_t>(
+ ::aidl::android::hardware::graphics::common::Dataspace::JPEG_R)) {
+ return true;
+ }
+
+ return false;
default:
return false;
}
@@ -283,6 +300,65 @@
}
}
+bool deviceReportsColorSpaces(const CameraMetadata& staticInfo) {
+ camera_metadata_ro_entry_t entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+ for (size_t i = 0; i < entry.count; ++i) {
+ uint8_t capability = entry.data.u8[i];
+ if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES) {
+ return true;
+ }
+ }
+
+ return false;
+}
+
+bool isColorSpaceSupported(int32_t colorSpace, int32_t format, android_dataspace dataSpace,
+ int64_t dynamicRangeProfile, const CameraMetadata& staticInfo) {
+ int64_t colorSpace64 = colorSpace;
+ int64_t format64 = format;
+
+ // Translate HAL format + data space to public format
+ if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace == HAL_DATASPACE_V0_JFIF) {
+ format64 = 0x100; // JPEG
+ } else if (format == HAL_PIXEL_FORMAT_BLOB
+ && dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) {
+ format64 = 0x48454946; // HEIC
+ } else if (format == HAL_PIXEL_FORMAT_BLOB
+ && dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_DYNAMIC_DEPTH)) {
+ format64 = 0x69656963; // DEPTH_JPEG
+ } else if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace == HAL_DATASPACE_DEPTH) {
+ return false; // DEPTH_POINT_CLOUD, not applicable
+ } else if (format == HAL_PIXEL_FORMAT_Y16 && dataSpace == HAL_DATASPACE_DEPTH) {
+ return false; // DEPTH16, not applicable
+ } else if (format == HAL_PIXEL_FORMAT_RAW16 && dataSpace == HAL_DATASPACE_DEPTH) {
+ return false; // RAW_DEPTH, not applicable
+ } else if (format == HAL_PIXEL_FORMAT_RAW10 && dataSpace == HAL_DATASPACE_DEPTH) {
+ return false; // RAW_DEPTH10, not applicable
+ } else if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace ==
+ static_cast<android_dataspace>(
+ ::aidl::android::hardware::graphics::common::Dataspace::JPEG_R)) {
+ format64 = static_cast<int64_t>(PublicFormat::JPEG_R);
+ }
+
+ camera_metadata_ro_entry_t entry =
+ staticInfo.find(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP);
+ for (size_t i = 0; i < entry.count; i += 3) {
+ bool isFormatCompatible = (format64 == entry.data.i64[i + 1]);
+ bool isDynamicProfileCompatible =
+ (dynamicRangeProfile & entry.data.i64[i + 2]) != 0;
+
+ if (colorSpace64 == entry.data.i64[i]
+ && isFormatCompatible
+ && isDynamicProfileCompatible) {
+ return true;
+ }
+ }
+
+ ALOGE("Color space %d, image format %" PRId64 ", and dynamic range 0x%" PRIx64
+ " combination not found", colorSpace, format64, dynamicRangeProfile);
+ return false;
+}
+
bool isPublicFormat(int32_t format)
{
switch(format) {
@@ -310,6 +386,23 @@
}
}
+bool dataSpaceFromColorSpace(android_dataspace *dataSpace, int32_t colorSpace) {
+ switch (colorSpace) {
+ case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SRGB:
+ *dataSpace = HAL_DATASPACE_V0_SRGB;
+ return true;
+ case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3:
+ *dataSpace = HAL_DATASPACE_DISPLAY_P3;
+ return true;
+ case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_BT2020_HLG:
+ *(reinterpret_cast<int32_t*>(dataSpace)) = HAL_DATASPACE_BT2020_HLG;
+ return true;
+ default:
+ ALOGE("%s: Unsupported color space %d", __FUNCTION__, colorSpace);
+ return false;
+ }
+}
+
bool isStreamUseCaseSupported(int64_t streamUseCase,
const CameraMetadata &deviceInfo) {
camera_metadata_ro_entry_t availableStreamUseCases =
@@ -337,7 +430,8 @@
sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
const std::string &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
const std::vector<int32_t> &sensorPixelModesUsed, int64_t dynamicRangeProfile,
- int64_t streamUseCase, int timestampBase, int mirrorMode) {
+ int64_t streamUseCase, int timestampBase, int mirrorMode,
+ int32_t colorSpace) {
// bufferProducer must be non-null
if (gbp == nullptr) {
std::string msg = fmt::sprintf("Camera %s: Surface is NULL", logicalCameraId.c_str());
@@ -401,6 +495,16 @@
return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
}
+ if (colorSpace != ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED &&
+ format != HAL_PIXEL_FORMAT_BLOB) {
+ if (!dataSpaceFromColorSpace(&dataSpace, colorSpace)) {
+ std::string msg = fmt::sprintf("Camera %s: color space %d not supported, failed to "
+ "convert to data space", logicalCameraId.c_str(), colorSpace);
+ ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+ }
+ }
+
// FIXME: remove this override since the default format should be
// IMPLEMENTATION_DEFINED. b/9487482 & b/35317944
if ((format >= HAL_PIXEL_FORMAT_RGBA_8888 && format <= HAL_PIXEL_FORMAT_BGRA_8888) &&
@@ -412,7 +516,7 @@
}
std::unordered_set<int32_t> overriddenSensorPixelModes;
if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed, format, width, height,
- physicalCameraMetadata, flexibleConsumer, &overriddenSensorPixelModes) != OK) {
+ physicalCameraMetadata, &overriddenSensorPixelModes) != OK) {
std::string msg = fmt::sprintf("Camera %s: sensor pixel modes for stream with "
"format %#x are not valid",logicalCameraId.c_str(), format);
ALOGE("%s: %s", __FUNCTION__, msg.c_str());
@@ -444,13 +548,23 @@
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
}
if (SessionConfigurationUtils::is10bitDynamicRangeProfile(dynamicRangeProfile) &&
- !SessionConfigurationUtils::is10bitCompatibleFormat(format)) {
+ !SessionConfigurationUtils::is10bitCompatibleFormat(format, dataSpace)) {
std::string msg = fmt::sprintf("Camera %s: No 10-bit supported stream configurations with "
"format %#x defined and profile %" PRIx64 ", failed to create output stream",
logicalCameraId.c_str(), format, dynamicRangeProfile);
ALOGE("%s: %s", __FUNCTION__, msg.c_str());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
}
+ if (colorSpace != ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED &&
+ SessionConfigurationUtils::deviceReportsColorSpaces(physicalCameraMetadata) &&
+ !SessionConfigurationUtils::isColorSpaceSupported(colorSpace, format, dataSpace,
+ dynamicRangeProfile, physicalCameraMetadata)) {
+ std::string msg = fmt::sprintf("Camera %s: Color space %d not supported, failed to "
+ "create output stream (pixel format %d dynamic range profile %" PRId64 ")",
+ logicalCameraId.c_str(), colorSpace, format, dynamicRangeProfile);
+ ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+ }
if (!SessionConfigurationUtils::isStreamUseCaseSupported(streamUseCase,
physicalCameraMetadata)) {
std::string msg = fmt::sprintf("Camera %s: stream use case %" PRId64 " not supported,"
@@ -484,6 +598,7 @@
streamInfo.streamUseCase = streamUseCase;
streamInfo.timestampBase = timestampBase;
streamInfo.mirrorMode = mirrorMode;
+ streamInfo.colorSpace = colorSpace;
return binder::Status::ok();
}
if (width != streamInfo.width) {
@@ -539,6 +654,7 @@
camera3::Camera3OutputStream::applyZSLUsageQuirk(streamInfo.format, &u);
stream->usage = AidlCamera3Device::mapToAidlConsumerUsage(u);
stream->dataSpace = AidlCamera3Device::mapToAidlDataspace(streamInfo.dataSpace);
+ stream->colorSpace = streamInfo.colorSpace;
stream->rotation = AidlCamera3Device::mapToAidlStreamRotation(rotation);
stream->id = -1; // Invalid stream id
stream->physicalCameraId = physicalId;
@@ -563,6 +679,7 @@
convertToHALStreamCombination(
const SessionConfiguration& sessionConfiguration,
const std::string &logicalCameraId, const CameraMetadata &deviceInfo,
+ bool isCompositeJpegRDisabled,
metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
aidl::android::hardware::camera::device::StreamConfiguration &streamConfiguration,
bool overrideForPerfClass, bool *earlyExit) {
@@ -637,6 +754,7 @@
const std::string &physicalCameraId = it.getPhysicalCameraId();
int64_t dynamicRangeProfile = it.getDynamicRangeProfile();
+ int32_t colorSpace = it.getColorSpace();
std::vector<int32_t> sensorPixelModesUsed = it.getSensorPixelModesUsed();
const CameraMetadata &physicalDeviceInfo = getMetadata(physicalCameraId,
overrideForPerfClass);
@@ -674,7 +792,7 @@
streamInfo.dynamicRangeProfile = it.getDynamicRangeProfile();
if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed,
streamInfo.format, streamInfo.width,
- streamInfo.height, metadataChosen, false /*flexibleConsumer*/,
+ streamInfo.height, metadataChosen,
&streamInfo.sensorPixelModesUsed) != OK) {
ALOGE("%s: Deferred surface sensor pixel modes not valid",
__FUNCTION__);
@@ -695,7 +813,7 @@
sp<Surface> surface;
res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
logicalCameraId, metadataChosen, sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode);
+ streamUseCase, timestampBase, mirrorMode, colorSpace);
if (!res.isOk())
return res;
@@ -705,7 +823,10 @@
camera3::DepthCompositeStream::isDepthCompositeStream(surface);
bool isHeicCompositeStream =
camera3::HeicCompositeStream::isHeicCompositeStream(surface);
- if (isDepthCompositeStream || isHeicCompositeStream) {
+ bool isJpegRCompositeStream =
+ camera3::JpegRCompositeStream::isJpegRCompositeStream(surface) &&
+ !isCompositeJpegRDisabled;
+ if (isDepthCompositeStream || isHeicCompositeStream || isJpegRCompositeStream) {
// We need to take in to account that composite streams can have
// additional internal camera streams.
std::vector<OutputStreamInfo> compositeStreams;
@@ -713,10 +834,14 @@
// TODO: Take care of composite streams.
ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo,
deviceInfo, &compositeStreams);
- } else {
+ } else if (isHeicCompositeStream) {
ret = camera3::HeicCompositeStream::getCompositeStreamInfo(streamInfo,
deviceInfo, &compositeStreams);
+ } else {
+ ret = camera3::JpegRCompositeStream::getCompositeStreamInfo(streamInfo,
+ deviceInfo, &compositeStreams);
}
+
if (ret != OK) {
std::string msg = fmt::sprintf(
"Camera %s: Failed adding composite streams: %s (%d)",
@@ -845,15 +970,17 @@
status_t checkAndOverrideSensorPixelModesUsed(
const std::vector<int32_t> &sensorPixelModesUsed, int format, int width, int height,
- const CameraMetadata &staticInfo, bool flexibleConsumer,
+ const CameraMetadata &staticInfo,
std::unordered_set<int32_t> *overriddenSensorPixelModesUsed) {
const std::unordered_set<int32_t> &sensorPixelModesUsedSet =
convertToSet(sensorPixelModesUsed);
- if (!isUltraHighResolutionSensor(staticInfo)) {
+ if (!supportsUltraHighResolutionCapture(staticInfo)) {
if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
sensorPixelModesUsedSet.end()) {
// invalid value for non ultra high res sensors
+ ALOGE("%s ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION used on a device which doesn't "
+ "support ultra high resolution capture", __FUNCTION__);
return BAD_VALUE;
}
overriddenSensorPixelModesUsed->clear();
@@ -874,35 +1001,40 @@
// Case 1: The client has not changed the sensor mode defaults. In this case, we check if the
// size + format of the OutputConfiguration is found exclusively in 1.
// If yes, add that sensorPixelMode to overriddenSensorPixelModes.
- // If no, add 'DEFAULT' to sensorPixelMode. This maintains backwards
- // compatibility.
+ // If no, add 'DEFAULT' and MAXIMUM_RESOLUTION to overriddenSensorPixelModes.
+ // This maintains backwards compatibility and also tells the framework the stream
+ // might be used in either sensor pixel mode.
if (sensorPixelModesUsedSet.size() == 0) {
- // Ambiguous case, default to only 'DEFAULT' mode.
+ // Ambiguous case, override to include both cases.
if (isInDefaultStreamConfigurationMap && isInMaximumResolutionStreamConfigurationMap) {
overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
- return OK;
- }
- // We don't allow flexible consumer for max resolution mode.
- if (isInMaximumResolutionStreamConfigurationMap) {
overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
return OK;
}
- if (isInDefaultStreamConfigurationMap || (flexibleConsumer && width < ROUNDING_WIDTH_CAP)) {
+ if (isInMaximumResolutionStreamConfigurationMap) {
+ overriddenSensorPixelModesUsed->insert(
+ ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
+ } else {
overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
- return OK;
}
- return BAD_VALUE;
+ return OK;
}
// Case2: The app has set sensorPixelModesUsed, we need to verify that they
// are valid / err out.
if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_DEFAULT) !=
sensorPixelModesUsedSet.end() && !isInDefaultStreamConfigurationMap) {
+ ALOGE("%s: ANDROID_SENSOR_PIXEL_MODE_DEFAULT set by client, but stream f: %d size %d x %d"
+ " isn't present in default stream configuration map", __FUNCTION__, format, width,
+ height);
return BAD_VALUE;
}
if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
sensorPixelModesUsedSet.end() && !isInMaximumResolutionStreamConfigurationMap) {
+ ALOGE("%s: ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION set by client, but stream f: "
+ "%d size %d x %d isn't present in default stream configuration map", __FUNCTION__,
+ format, width, height);
return BAD_VALUE;
}
*overriddenSensorPixelModesUsed = sensorPixelModesUsedSet;
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index 220d1f8..79d80ea 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -100,10 +100,11 @@
sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
const std::string &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
const std::vector<int32_t> &sensorPixelModesUsed, int64_t dynamicRangeProfile,
- int64_t streamUseCase, int timestampBase, int mirrorMode);
+ int64_t streamUseCase, int timestampBase, int mirrorMode,
+ int32_t colorSpace);
//check if format is 10-bit output compatible
-bool is10bitCompatibleFormat(int32_t format);
+bool is10bitCompatibleFormat(int32_t format, android_dataspace_t dataSpace);
// check if the dynamic range requires 10-bit output
bool is10bitDynamicRangeProfile(int64_t dynamicRangeProfile);
@@ -111,6 +112,13 @@
// Check if the device supports a given dynamicRangeProfile
bool isDynamicRangeProfileSupported(int64_t dynamicRangeProfile, const CameraMetadata& staticMeta);
+bool deviceReportsColorSpaces(const CameraMetadata& staticMeta);
+
+bool isColorSpaceSupported(int32_t colorSpace, int32_t format, android_dataspace dataSpace,
+ int64_t dynamicRangeProfile, const CameraMetadata& staticMeta);
+
+bool dataSpaceFromColorSpace(android_dataspace *dataSpace, int32_t colorSpace);
+
bool isStreamUseCaseSupported(int64_t streamUseCase, const CameraMetadata &deviceInfo);
void mapStreamInfo(const OutputStreamInfo &streamInfo,
@@ -133,7 +141,8 @@
convertToHALStreamCombination(
const SessionConfiguration& sessionConfiguration,
const std::string &logicalCameraId, const CameraMetadata &deviceInfo,
- metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
+ bool isCompositeJpegRDisabled, metadataGetter getMetadata,
+ const std::vector<std::string> &physicalCameraIds,
aidl::android::hardware::camera::device::StreamConfiguration &streamConfiguration,
bool overrideForPerfClass, bool *earlyExit);
@@ -141,7 +150,7 @@
status_t checkAndOverrideSensorPixelModesUsed(
const std::vector<int32_t> &sensorPixelModesUsed, int format, int width, int height,
- const CameraMetadata &staticInfo, bool flexibleConsumer,
+ const CameraMetadata &staticInfo,
std::unordered_set<int32_t> *overriddenSensorPixelModesUsed);
bool targetPerfClassPrimaryCamera(
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
index f63eea1..cf93d3b 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
@@ -111,8 +111,8 @@
bool overrideForPerfClass, bool *earlyExit) {
aidl::android::hardware::camera::device::StreamConfiguration aidlStreamConfiguration;
auto ret = convertToHALStreamCombination(sessionConfiguration, logicalCameraId, deviceInfo,
- getMetadata, physicalCameraIds, aidlStreamConfiguration, overrideForPerfClass,
- earlyExit);
+ false /*isCompositeJpegRDisabled*/, getMetadata, physicalCameraIds,
+ aidlStreamConfiguration, overrideForPerfClass, earlyExit);
if (!ret.isOk()) {
return ret;
}
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp
index 1efdc60..7d344f8 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp
@@ -49,12 +49,22 @@
return ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
case ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS:
return ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION;
+ case ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS:
+ return ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
+ case ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS:
+ return ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
+ case ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS:
+ return ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS_MAXIMUM_RESOLUTION;
case ANDROID_SENSOR_OPAQUE_RAW_SIZE:
return ANDROID_SENSOR_OPAQUE_RAW_SIZE_MAXIMUM_RESOLUTION;
case ANDROID_LENS_INTRINSIC_CALIBRATION:
return ANDROID_LENS_INTRINSIC_CALIBRATION_MAXIMUM_RESOLUTION;
case ANDROID_LENS_DISTORTION:
return ANDROID_LENS_DISTORTION_MAXIMUM_RESOLUTION;
+ case ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE:
+ return ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION;
+ case ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE:
+ return ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION;
default:
ALOGE("%s: Tag %d doesn't have a maximum resolution counterpart", __FUNCTION__,
defaultTag);
@@ -63,7 +73,62 @@
return -1;
}
-bool isUltraHighResolutionSensor(const CameraMetadata &deviceInfo) {
+static bool isKeyPresentWithCount(const CameraMetadata &deviceInfo, uint32_t tag, uint32_t count) {
+ auto countFound = deviceInfo.find(tag).count;
+ return (countFound != 0) && (countFound % count == 0);
+}
+
+static bool supportsKeysForBasicUltraHighResolutionCapture(const CameraMetadata &deviceInfo) {
+ // Check whether the following conditions are satisfied for reduced ultra high
+ // resolution support :
+ // 1) SENSOR_PIXEL_MODE is advertised in ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS
+ // 2) The following keys are present in CameraCharacteristics for basic functionality
+ // a) ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION
+ // b) ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION
+ // c) ANDROID_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION
+ // d) ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
+ // e) ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
+ // f) ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION
+ camera_metadata_ro_entry_t entryChar;
+ entryChar = deviceInfo.find(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS);
+ bool supportsSensorPixelMode = false;
+ for (size_t i = 0; i < entryChar.count; i++) {
+ int32_t key = entryChar.data.i32[i];
+ if (key == ANDROID_SENSOR_PIXEL_MODE) {
+ supportsSensorPixelMode = true;
+ break;
+ }
+ }
+ if (!supportsSensorPixelMode) {
+ return false;
+ }
+
+ // Basic sensor array size information tags are present
+ if (!isKeyPresentWithCount(deviceInfo, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION,
+ /*count*/2) ||
+ !isKeyPresentWithCount(deviceInfo,
+ ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
+ /*count*/4) ||
+ !isKeyPresentWithCount(deviceInfo,
+ ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION, /*count*/4) ||
+ !isKeyPresentWithCount(deviceInfo, ANDROID_SENSOR_INFO_BINNING_FACTOR, /*count*/2)) {
+ return false;
+ }
+
+ // Basic stream configuration tags are present
+ if (!isKeyPresentWithCount(deviceInfo,
+ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION, /*count*/4) ||
+ !isKeyPresentWithCount(deviceInfo,
+ ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION, /*count*/4) ||
+ !isKeyPresentWithCount(deviceInfo,
+ ANDROID_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION, /*count*/ 4)) {
+ return false;
+ }
+
+ return true;
+}
+
+bool supportsUltraHighResolutionCapture(const CameraMetadata &deviceInfo) {
camera_metadata_ro_entry_t entryCap;
entryCap = deviceInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
// Go through the capabilities and check if it has
@@ -74,7 +139,10 @@
return true;
}
}
- return false;
+
+ // If not, then check that the keys which guarantee basic supports for
+ // ultra high resolution capture are supported.
+ return supportsKeysForBasicUltraHighResolutionCapture(deviceInfo);
}
bool getArrayWidthAndHeight(const CameraMetadata *deviceInfo,
@@ -93,4 +161,4 @@
} // namespace SessionConfigurationUtils
} // namespace camera3
-} // namespace android
\ No newline at end of file
+} // namespace android
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.h b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.h
index 45b1e91..dac1824 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.h
@@ -22,7 +22,7 @@
namespace camera3 {
namespace SessionConfigurationUtils {
-bool isUltraHighResolutionSensor(const CameraMetadata &deviceInfo);
+bool supportsUltraHighResolutionCapture(const CameraMetadata &deviceInfo);
int32_t getAppropriateModeTag(int32_t defaultTag, bool maxResolution = false);
@@ -33,4 +33,4 @@
} // camera3
} // android
-#endif
\ No newline at end of file
+#endif