Camera2 multi-client support
Enable multiple clients to establish a shared session. Once the
session is created, allow each client to independently initiate and
terminate streaming using the `startStreaming` and `stopStreaming`
APIs. Additionally, primary clients can call create captureRequest
API. Ensure that all clients can simultaneously stream the camera
images.
Flag: com.android.internal.camera.flags.camera_multi_client
Bug:265196098
API-Coverage-Bug: 377371012
Test: A session is established with a shared configuration that
supports two streams: SurfaceView and ImageReader. The Java client
can create a session utilizing the SurfaceView stream. The native
client can create a session utilizing the ImageReader stream.
The Java client initiates streaming by creating a capture request and
using the `setRepeatingRequest` method. The native client initiates
streaming using the `startStreaming` API and the ImageReader surface
to receive camera images. Also, ran camera CTS tests on these CLs to
ensure we don't introduce any regressions.
Change-Id: I6ab4e5eef094e75c9b1230ad24afe1c50133df86
diff --git a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
index a9191eb..2145edd 100644
--- a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
+++ b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
@@ -36,6 +36,20 @@
SubmitInfo submitRequest(in CaptureRequest request, boolean streaming);
SubmitInfo submitRequestList(in CaptureRequest[] requestList, boolean streaming);
+ /**
+ * When a camera device is opened in shared mode, only the primary client can change capture
+ * parameters and submit capture requests. Secondary clients can use the startStreaming API to
+ * provide the stream and surface IDs they want to stream on. If the primary client has an
+ * ongoing repeating request, camera service will attach these surfaces to it. Otherwise,
+ * camera service will create a default capture request with a preview template.
+ *
+ * @param streamIdxArray stream ids of the target surfaces
+ * @param surfaceIdxArray surface ids of the target surfaces
+ * @return SubmitInfo data structure containing the request id of the capture request and the
+ * frame number of the last request, of the previous batch of repeating requests, if
+ * any. If there is no previous batch, the frame number returned will be -1.
+ */
+ SubmitInfo startStreaming(in int[] streamIdxArray, in int[] surfaceIdxArray);
/**
* Cancel the repeating request specified by requestId
diff --git a/camera/ndk/NdkCameraCaptureSession.cpp b/camera/ndk/NdkCameraCaptureSession.cpp
index 58370e5..06ee714 100644
--- a/camera/ndk/NdkCameraCaptureSession.cpp
+++ b/camera/ndk/NdkCameraCaptureSession.cpp
@@ -178,30 +178,39 @@
EXPORT
camera_status_t ACameraCaptureSessionShared_startStreaming(
- ACameraCaptureSession* /*session*/, ACameraCaptureSession_captureCallbacksV2* /*callbacks*/,
- int /*numOutputWindows*/, ANativeWindow** /*window*/,
- int* /*captureSequenceId*/) {
+ ACameraCaptureSession* session,
+ /*optional*/ACameraCaptureSession_captureCallbacksV2* callbacks,
+ int numOutputWindows, ANativeWindow** windows,
+ /*optional*/int* captureSequenceId) {
ATRACE_CALL();
- // Todo: need to add implementation
- return ACAMERA_OK;
+ return startStreamingTemplate(session, callbacks, numOutputWindows, windows,
+ captureSequenceId);
}
EXPORT
camera_status_t ACameraCaptureSessionShared_logicalCamera_startStreaming(
- ACameraCaptureSession* /*session*/,
- ACameraCaptureSession_logicalCamera_captureCallbacksV2* /*callbacks*/,
- int /*numOutputWindows*/, ANativeWindow** /*windows*/,
- int* /*captureSequenceId*/) {
+ ACameraCaptureSession* session,
+ /*optional*/ACameraCaptureSession_logicalCamera_captureCallbacksV2* callbacks,
+ int numOutputWindows, ANativeWindow** windows,
+ /*optional*/int* captureSequenceId) {
ATRACE_CALL();
- // Todo: need to add implementation
- return ACAMERA_OK;
+ return startStreamingTemplate(session, callbacks, numOutputWindows, windows,
+ captureSequenceId);
}
EXPORT
-camera_status_t ACameraCaptureSessionShared_stopStreaming(ACameraCaptureSession* /*session*/) {
+camera_status_t ACameraCaptureSessionShared_stopStreaming(ACameraCaptureSession* session) {
ATRACE_CALL();
- // Todo: need to add implementation
- return ACAMERA_OK;
+ if (session == nullptr) {
+ ALOGE("%s: Error: session is null", __FUNCTION__);
+ return ACAMERA_ERROR_INVALID_PARAMETER;
+ }
+
+ if (session->isClosed()) {
+ ALOGE("%s: session %p is already closed", __FUNCTION__, session);
+ return ACAMERA_ERROR_SESSION_CLOSED;
+ }
+ return session->stopStreaming();
}
EXPORT
diff --git a/camera/ndk/NdkCameraCaptureSession.inc b/camera/ndk/NdkCameraCaptureSession.inc
index 258e20d..3112735 100644
--- a/camera/ndk/NdkCameraCaptureSession.inc
+++ b/camera/ndk/NdkCameraCaptureSession.inc
@@ -68,3 +68,24 @@
return session->setRepeatingRequest(cbs, numRequests, requests, captureSequenceId);
}
+
+template <class CallbackType>
+camera_status_t startStreamingTemplate(ACameraCaptureSession* session,
+ /*optional*/CallbackType* callbacks,
+ int numOutputWindows, ANativeWindow** windows,
+ /*optional*/int* captureSequenceId) {
+ ATRACE_CALL();
+ if (session == nullptr || windows == nullptr || numOutputWindows < 1) {
+ ALOGE("%s: Error: invalid input: session %p, numOutputWindows %d, windows %p",
+ __FUNCTION__, session, numOutputWindows, windows);
+ return ACAMERA_ERROR_INVALID_PARAMETER;
+ }
+ if (session->isClosed()) {
+ ALOGE("%s: session %p is already closed", __FUNCTION__, session);
+ if (captureSequenceId) {
+ *captureSequenceId = CAPTURE_SEQUENCE_ID_NONE;
+ }
+ return ACAMERA_ERROR_SESSION_CLOSED;
+ }
+ return session->startStreaming(callbacks, numOutputWindows, windows, captureSequenceId);
+}
diff --git a/camera/ndk/NdkCameraDevice.cpp b/camera/ndk/NdkCameraDevice.cpp
index f2ec573..bc6b87a 100644
--- a/camera/ndk/NdkCameraDevice.cpp
+++ b/camera/ndk/NdkCameraDevice.cpp
@@ -59,6 +59,9 @@
__FUNCTION__, device, request);
return ACAMERA_ERROR_INVALID_PARAMETER;
}
+ if (device->isSharedMode() && !device->isPrimaryClient()) {
+ return ACAMERA_ERROR_UNSUPPORTED_OPERATION;
+ }
switch (templateId) {
case TEMPLATE_PREVIEW:
case TEMPLATE_STILL_CAPTURE:
@@ -86,6 +89,9 @@
__FUNCTION__, device, request, physicalCameraIdList);
return ACAMERA_ERROR_INVALID_PARAMETER;
}
+ if (device->isSharedMode() && !device->isPrimaryClient()) {
+ return ACAMERA_ERROR_UNSUPPORTED_OPERATION;
+ }
switch (templateId) {
case TEMPLATE_PREVIEW:
case TEMPLATE_STILL_CAPTURE:
diff --git a/camera/ndk/NdkCameraManager.cpp b/camera/ndk/NdkCameraManager.cpp
index 28cc9af..a2c34e3 100644
--- a/camera/ndk/NdkCameraManager.cpp
+++ b/camera/ndk/NdkCameraManager.cpp
@@ -227,6 +227,11 @@
__FUNCTION__, mgr, cameraId, callback, device, primaryClient);
return ACAMERA_ERROR_INVALID_PARAMETER;
}
+ bool sharedMode;
+ camera_status_t status = mgr->isCameraDeviceSharingSupported(cameraId, &sharedMode);
+ if ((status != ACAMERA_OK) || !sharedMode) {
+ return ACAMERA_ERROR_UNSUPPORTED_OPERATION;
+ }
return mgr->openCamera(cameraId, /*sharedMode*/true, callback, device, primaryClient);
}
diff --git a/camera/ndk/impl/ACameraCaptureSession.cpp b/camera/ndk/impl/ACameraCaptureSession.cpp
index 449c0b4..bda1f40 100644
--- a/camera/ndk/impl/ACameraCaptureSession.cpp
+++ b/camera/ndk/impl/ACameraCaptureSession.cpp
@@ -96,6 +96,9 @@
camera_status_t ret;
dev->lockDeviceForSessionOps();
{
+ if (dev->isSharedMode() && !dev->isPrimaryClient()) {
+ return ACAMERA_ERROR_UNSUPPORTED_OPERATION;
+ }
Mutex::Autolock _l(mSessionLock);
ret = dev->stopRepeatingLocked();
}
@@ -103,6 +106,27 @@
return ret;
}
+camera_status_t ACameraCaptureSession::stopStreaming() {
+#ifdef __ANDROID_VNDK__
+ std::shared_ptr<acam::CameraDevice> dev = getDevicePtr();
+#else
+ sp<acam::CameraDevice> dev = getDeviceSp();
+#endif
+ if (dev == nullptr) {
+ ALOGE("Error: Device associated with session %p has been closed!", this);
+ return ACAMERA_ERROR_SESSION_CLOSED;
+ }
+
+ camera_status_t ret;
+ dev->lockDeviceForSessionOps();
+ {
+ Mutex::Autolock _l(mSessionLock);
+ ret = dev->stopStreamingLocked();
+ }
+ dev->unlockDevice();
+ return ret;
+}
+
camera_status_t
ACameraCaptureSession::abortCaptures() {
#ifdef __ANDROID_VNDK__
@@ -118,6 +142,9 @@
camera_status_t ret;
dev->lockDeviceForSessionOps();
{
+ if (dev->isSharedMode() && !dev->isPrimaryClient()) {
+ return ACAMERA_ERROR_UNSUPPORTED_OPERATION;
+ }
Mutex::Autolock _l(mSessionLock);
ret = dev->flushLocked(this);
}
@@ -139,6 +166,9 @@
camera_status_t ret;
dev->lockDeviceForSessionOps();
{
+ if (dev->isSharedMode()) {
+ return ACAMERA_ERROR_UNSUPPORTED_OPERATION;
+ }
Mutex::Autolock _l(mSessionLock);
ret = dev->updateOutputConfigurationLocked(output);
}
@@ -160,6 +190,9 @@
camera_status_t ret;
dev->lockDeviceForSessionOps();
{
+ if (dev->isSharedMode()) {
+ return ACAMERA_ERROR_UNSUPPORTED_OPERATION;
+ }
Mutex::Autolock _l(mSessionLock);
ret = dev->prepareLocked(window);
}
diff --git a/camera/ndk/impl/ACameraCaptureSession.h b/camera/ndk/impl/ACameraCaptureSession.h
index 0d7a2c1..eb13b96 100644
--- a/camera/ndk/impl/ACameraCaptureSession.h
+++ b/camera/ndk/impl/ACameraCaptureSession.h
@@ -150,6 +150,12 @@
ACameraDevice* getDevice();
+ template<class T>
+ camera_status_t startStreaming(/*optional*/T* callbacks,
+ int numOutputWindows, ANativeWindow** windows, /*optional*/int* captureSequenceId);
+
+ camera_status_t stopStreaming();
+
private:
friend class android::acam::CameraDevice;
diff --git a/camera/ndk/impl/ACameraCaptureSession.inc b/camera/ndk/impl/ACameraCaptureSession.inc
index da535f8..695eb37 100644
--- a/camera/ndk/impl/ACameraCaptureSession.inc
+++ b/camera/ndk/impl/ACameraCaptureSession.inc
@@ -42,6 +42,9 @@
camera_status_t ret;
dev->lockDeviceForSessionOps();
{
+ if (dev->isSharedMode() && !dev->isPrimaryClient()) {
+ return ACAMERA_ERROR_UNSUPPORTED_OPERATION;
+ }
Mutex::Autolock _l(mSessionLock);
ret = dev->setRepeatingRequestsLocked(
this, cbs, numRequests, requests, captureSequenceId);
@@ -67,9 +70,37 @@
camera_status_t ret;
dev->lockDeviceForSessionOps();
{
+ if (dev->isSharedMode() && !dev->isPrimaryClient()) {
+ return ACAMERA_ERROR_UNSUPPORTED_OPERATION;
+ }
Mutex::Autolock _l(mSessionLock);
ret = dev->captureLocked(this, cbs, numRequests, requests, captureSequenceId);
}
dev->unlockDevice();
return ret;
}
+
+template <class T>
+camera_status_t ACameraCaptureSession::startStreaming(
+ /*optional*/T* callbacks, int numOutputWindows, ANativeWindow** windows,
+ /*optional*/int* captureSequenceId) {
+#ifdef __ANDROID_VNDK__
+ std::shared_ptr<acam::CameraDevice> dev = getDevicePtr();
+#else
+ sp<acam::CameraDevice> dev = getDeviceSp();
+#endif
+ if (dev == nullptr) {
+ ALOGE("Error: Device associated with session %p has been closed!", this);
+ return ACAMERA_ERROR_SESSION_CLOSED;
+ }
+
+ camera_status_t ret;
+ dev->lockDeviceForSessionOps();
+ {
+ Mutex::Autolock _l(mSessionLock);
+ ret = dev->startStreamingLocked(this, callbacks, numOutputWindows, windows,
+ captureSequenceId);
+ }
+ dev->unlockDevice();
+ return ret;
+}
diff --git a/camera/ndk/impl/ACameraDevice.cpp b/camera/ndk/impl/ACameraDevice.cpp
index 4d21467..704dfc1 100644
--- a/camera/ndk/impl/ACameraDevice.cpp
+++ b/camera/ndk/impl/ACameraDevice.cpp
@@ -272,6 +272,28 @@
}
}
+camera_status_t CameraDevice::stopStreamingLocked() {
+ camera_status_t ret = checkCameraClosedOrErrorLocked();
+ if (ret != ACAMERA_OK) {
+ ALOGE("%s: camera is in closed or error state %d", __FUNCTION__, ret);
+ return ret;
+ }
+ ret = stopRepeatingLocked();
+ if (ret != ACAMERA_OK) {
+ ALOGE("%s: error when trying to stop streaming %d", __FUNCTION__, ret);
+ return ret;
+ }
+ for (auto& outputTarget : mPreviewRequestOutputs) {
+ ACameraOutputTarget_free(outputTarget);
+ }
+ mPreviewRequestOutputs.clear();
+ if (mPreviewRequest) {
+ ACaptureRequest_free(mPreviewRequest);
+ mPreviewRequest = nullptr;
+ }
+ return ACAMERA_OK;
+}
+
camera_status_t CameraDevice::updateOutputConfigurationLocked(ACaptureSessionOutput *output) {
camera_status_t ret = checkCameraClosedOrErrorLocked();
if (ret != ACAMERA_OK) {
@@ -715,10 +737,14 @@
return ret;
}
- ret = waitUntilIdleLocked();
- if (ret != ACAMERA_OK) {
- ALOGE("Camera device %s wait until idle failed, ret %d", getId(), ret);
- return ret;
+ // If device is opened in shared mode, there can be multiple clients accessing the
+ // camera device. So do not wait for idle if the device is opened in shared mode.
+ if ((!flags::camera_multi_client()) || (!mSharedMode)) {
+ ret = waitUntilIdleLocked();
+ if (ret != ACAMERA_OK) {
+ ALOGE("Camera device %s wait until idle failed, ret %d", getId(), ret);
+ return ret;
+ }
}
// Send onReady to previous session
diff --git a/camera/ndk/impl/ACameraDevice.h b/camera/ndk/impl/ACameraDevice.h
index ea7d9b6..067923c 100644
--- a/camera/ndk/impl/ACameraDevice.h
+++ b/camera/ndk/impl/ACameraDevice.h
@@ -127,6 +127,7 @@
void setPrimaryClient(bool isPrimary) {mIsPrimaryClient = isPrimary;};
bool isPrimaryClient() {return mIsPrimaryClient;};
+ bool isSharedMode() {return mSharedMode;};
private:
friend ACameraCaptureSession;
@@ -143,6 +144,15 @@
camera_status_t waitUntilIdleLocked();
+ camera_status_t stopStreamingLocked();
+
+ template<class T>
+ camera_status_t startStreamingLocked(ACameraCaptureSession* session,
+ /*optional*/T* callbacks,
+ int numOutputWindows, ANativeWindow** windows, /*optional*/int* captureSequenceId);
+
+ ACaptureRequest* mPreviewRequest = nullptr;
+ std::vector<ACameraOutputTarget*> mPreviewRequestOutputs;
template<class T>
camera_status_t captureLocked(sp<ACameraCaptureSession> session,
@@ -476,11 +486,15 @@
mDevice->setPrimaryClient(isPrimary);
}
- inline bool isPrimaryClient() {
+ inline bool isPrimaryClient() const {
return mDevice->isPrimaryClient();
}
- private:
+ inline bool isSharedMode() const{
+ return mDevice->isSharedMode();
+ }
+
+ private:
android::sp<android::acam::CameraDevice> mDevice;
};
diff --git a/camera/ndk/impl/ACameraDevice.inc b/camera/ndk/impl/ACameraDevice.inc
index 1fc5352..7e70d39 100644
--- a/camera/ndk/impl/ACameraDevice.inc
+++ b/camera/ndk/impl/ACameraDevice.inc
@@ -126,5 +126,102 @@
return ACAMERA_OK;
}
+template<class T>
+camera_status_t CameraDevice::startStreamingLocked(ACameraCaptureSession* session,
+ /*optional*/T* callbacks, int numOutputWindows,
+ ANativeWindow** windows, /*optional*/int* captureSequenceId) {
+ camera_status_t ret = checkCameraClosedOrErrorLocked();
+ if (ret != ACAMERA_OK) {
+ ALOGE("%s: camera is in closed or error state %d", __FUNCTION__, ret);
+ return ret;
+ }
+ CameraMetadata rawPreviewRequest;
+ binder::Status remoteRet = mRemote->createDefaultRequest(TEMPLATE_PREVIEW, &rawPreviewRequest);
+ if (!remoteRet.isOk()) {
+ ALOGE("%s: Create capture request failed: %s", __FUNCTION__, remoteRet.toString8().c_str());
+ return ACAMERA_ERROR_UNKNOWN;
+ }
+ // ToDo: Check if the memory allocation can be freed automatically using either default_delete
+ // or ScopedAResource.
+ mPreviewRequest = new ACaptureRequest();
+ mPreviewRequest->settings = new ACameraMetadata(rawPreviewRequest.release(),
+ ACameraMetadata::ACM_REQUEST);
+ mPreviewRequest->targets = new ACameraOutputTargets();
+ for (int i = 0; i < numOutputWindows ; i++) {
+ ACameraOutputTarget* outputTarget = nullptr;
+ ret = ACameraOutputTarget_create(windows[i], &outputTarget);
+ if (ret != ACAMERA_OK) {
+ ALOGE("%s: error while ACameraOutputTarget_create %d", __FUNCTION__, ret);
+ return ret;
+ }
+ ret = ACaptureRequest_addTarget(mPreviewRequest, outputTarget);
+ if (ret != ACAMERA_OK) {
+ ALOGE("%s: error while ACaptureRequest_addTarget %d", __FUNCTION__, ret);
+ return ret;
+ }
+ mPreviewRequestOutputs.push_back(outputTarget);
+ }
+
+ sp<CaptureRequest> req;
+ ret = allocateCaptureRequest(mPreviewRequest, req);
+ if (ret != ACAMERA_OK) {
+ ALOGE("Convert capture request to internal format failure! ret %d", ret);
+ return ret;
+ }
+ if (req->mSurfaceList.empty()) {
+ ALOGE("Capture request without output target cannot be submitted!");
+ return ACAMERA_ERROR_INVALID_PARAMETER;
+ }
+
+ // In shared session mode, if there are other active clients streaming then
+ // stoprepeating does not actually send request to HAL to cancel the request.
+ // Cameraservice will use this call to remove this client surfaces provided in its
+ // previous streaming request. If this is the only client for the shared camera device
+ // then camerservice will ask HAL to cancel the previous repeating request.
+ ret = stopRepeatingLocked();
+ if (ret != ACAMERA_OK) {
+ ALOGE("Camera %s stop repeating failed! ret %d", getId(), ret);
+ return ret;
+ }
+
+ hardware::camera2::utils::SubmitInfo info;
+ std::vector<int> streamIds(req->mStreamIdxList.begin(), req->mStreamIdxList.end());
+ std::vector<int> surfaceIds(req->mSurfaceIdxList.begin(), req->mSurfaceIdxList.end());
+ remoteRet = mRemote->startStreaming(streamIds, surfaceIds, &info);
+ int sequenceId = info.mRequestId;
+ int64_t lastFrameNumber = info.mLastFrameNumber;
+ if (sequenceId < 0) {
+ ALOGE("Camera %s start streaming remote failure: ret %d", getId(), sequenceId);
+ return ACAMERA_ERROR_UNKNOWN;
+ }
+
+ Vector<sp<CaptureRequest> > requestsV;
+ requestsV.push_back(req);
+ CallbackHolder cbHolder(session, requestsV, true, callbacks);
+ mSequenceCallbackMap.insert(std::make_pair(sequenceId, cbHolder));
+
+ // stopRepeating above should have cleanup repeating sequence id
+ if (mRepeatingSequenceId != REQUEST_ID_NONE) {
+ setCameraDeviceErrorLocked(ACAMERA_ERROR_CAMERA_DEVICE);
+ return ACAMERA_ERROR_CAMERA_DEVICE;
+ }
+ mRepeatingSequenceId = sequenceId;
+
+ if (mIdle) {
+ sp<AMessage> msg = new AMessage(kWhatSessionStateCb, mHandler);
+ msg->setPointer(kContextKey, session->mUserSessionCallback.context);
+ msg->setObject(kSessionSpKey, session);
+ msg->setPointer(kCallbackFpKey, (void*) session->mUserSessionCallback.onActive);
+ postSessionMsgAndCleanup(msg);
+ }
+ mIdle = false;
+ mBusySession = session;
+
+ if (captureSequenceId) {
+ *captureSequenceId = sequenceId;
+ }
+ return ACAMERA_OK;
+}
+
} // namespace acam
} // namespace android
diff --git a/camera/ndk/include/camera/NdkCameraCaptureSession.h b/camera/ndk/include/camera/NdkCameraCaptureSession.h
index 06e1d34..c2aae1c 100644
--- a/camera/ndk/include/camera/NdkCameraCaptureSession.h
+++ b/camera/ndk/include/camera/NdkCameraCaptureSession.h
@@ -1242,8 +1242,7 @@
* </ul>
*/
camera_status_t ACameraCaptureSessionShared_stopStreaming(
- ACameraCaptureSession *sharedSession
-) __INTRODUCED_IN(36);
+ ACameraCaptureSession* sharedSession) __INTRODUCED_IN(36);
__END_DECLS
#endif /* _NDK_CAMERA_CAPTURE_SESSION_H */
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
index d3a8e0d..b65aedf 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
+++ b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
@@ -562,6 +562,28 @@
return ACAMERA_OK;
}
+camera_status_t CameraDevice::stopStreamingLocked() {
+ camera_status_t ret = checkCameraClosedOrErrorLocked();
+ if (ret != ACAMERA_OK) {
+ ALOGE("%s: camera is in closed or error state %d", __FUNCTION__, ret);
+ return ret;
+ }
+ ret = stopRepeatingLocked();
+ if (ret != ACAMERA_OK) {
+ ALOGE("%s: error when trying to stop streaming %d", __FUNCTION__, ret);
+ return ret;
+ }
+ for (auto& outputTarget : mPreviewRequestOutputs) {
+ ACameraOutputTarget_free(outputTarget);
+ }
+ mPreviewRequestOutputs.clear();
+ if (mPreviewRequest) {
+ ACaptureRequest_free(mPreviewRequest);
+ mPreviewRequest = nullptr;
+ }
+ return ACAMERA_OK;
+}
+
camera_status_t CameraDevice::flushLocked(ACameraCaptureSession* session) {
camera_status_t ret = checkCameraClosedOrErrorLocked();
if (ret != ACAMERA_OK) {
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDevice.h b/camera/ndk/ndk_vendor/impl/ACameraDevice.h
index 6ba30bb..5d03e95 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDevice.h
+++ b/camera/ndk/ndk_vendor/impl/ACameraDevice.h
@@ -157,6 +157,7 @@
void stopLooperAndDisconnect();
void setPrimaryClient(bool isPrimary) {mIsPrimaryClient = isPrimary;};
bool isPrimaryClient() {return mIsPrimaryClient;};
+ bool isSharedMode() {return mSharedMode;};
private:
friend ACameraCaptureSession;
@@ -195,6 +196,13 @@
/*out*/int* captureSequenceId,
bool isRepeating);
+ camera_status_t stopStreamingLocked();
+
+ template<class T>
+ camera_status_t startStreamingLocked(ACameraCaptureSession* session,
+ /*optional*/T* callbacks,
+ int numOutputWindows, ANativeWindow** windows, /*optional*/int* captureSequenceId);
+
void addRequestSettingsMetadata(ACaptureRequest *aCaptureRequest, sp<CaptureRequest> &req);
camera_status_t updateOutputConfigurationLocked(ACaptureSessionOutput *output);
@@ -237,6 +245,8 @@
ACameraDevice* mWrapper;
bool mSharedMode;
bool mIsPrimaryClient;
+ ACaptureRequest* mPreviewRequest = nullptr;
+ std::vector<ACameraOutputTarget*> mPreviewRequestOutputs;
// stream id -> pair of (ACameraWindowType* from application, OutputConfiguration used for
// camera service)
@@ -490,9 +500,12 @@
inline void setPrimaryClient(bool isPrimary) {
mDevice->setPrimaryClient(isPrimary);
}
- inline bool isPrimaryClient() {
+ inline bool isPrimaryClient() const {
return mDevice->isPrimaryClient();
}
+ inline bool isSharedMode() const {
+ return mDevice->isSharedMode();
+ }
private:
std::shared_ptr<android::acam::CameraDevice> mDevice;
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDeviceVendor.inc b/camera/ndk/ndk_vendor/impl/ACameraDeviceVendor.inc
index 1e724eb..1f568d2 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDeviceVendor.inc
+++ b/camera/ndk/ndk_vendor/impl/ACameraDeviceVendor.inc
@@ -147,5 +147,131 @@
return ACAMERA_OK;
}
+template<class T>
+camera_status_t CameraDevice::startStreamingLocked(ACameraCaptureSession* session,
+ /*optional*/T* callbacks, int numOutputWindows,
+ ANativeWindow** windows, /*optional*/int* captureSequenceId) {
+ camera_status_t ret = checkCameraClosedOrErrorLocked();
+ if (ret != ACAMERA_OK) {
+ ALOGE("%s: camera is in closed or error state %d", __FUNCTION__, ret);
+ return ret;
+ }
+ utils::AidlCameraMetadata aidlMetadata;
+ ndk::ScopedAStatus remoteRet = mRemote->createDefaultRequest(utils::TemplateId::PREVIEW,
+ &aidlMetadata);
+ if (!remoteRet.isOk()) {
+ if (remoteRet.getExceptionCode() == EX_SERVICE_SPECIFIC) {
+ Status errStatus = static_cast<Status>(remoteRet.getServiceSpecificError());
+ ALOGE("%s: submitRequestList call failed: %s",
+ __FUNCTION__, toString(errStatus).c_str());
+ return utils::convertFromAidl(errStatus);
+ } else {
+ ALOGE("%s: Transaction error for submitRequestList call: %d", __FUNCTION__,
+ remoteRet.getExceptionCode());
+ return ACAMERA_ERROR_UNKNOWN;
+ }
+ }
+ camera_metadata_t* rawPreviewRequest;
+ utils::cloneFromAidl(aidlMetadata, &rawPreviewRequest);
+ // ToDo: Check if the memory allocation can be freed automatically using either default_delete
+ // or ScopedAResource.
+ mPreviewRequest = new ACaptureRequest();
+ mPreviewRequest->settings = new ACameraMetadata(rawPreviewRequest,
+ ACameraMetadata::ACM_REQUEST);
+ mPreviewRequest->targets = new ACameraOutputTargets();
+ for (int i = 0; i < numOutputWindows ; i++) {
+ ACameraOutputTarget* outputTarget = nullptr;
+ ret = ACameraOutputTarget_create(windows[i], &outputTarget);
+ if (ret != ACAMERA_OK) {
+ ALOGE("%s: error while ACameraOutputTarget_create %d", __FUNCTION__, ret);
+ return ret;
+ }
+ ret = ACaptureRequest_addTarget(mPreviewRequest, outputTarget);
+ if (ret != ACAMERA_OK) {
+ ALOGE("%s: error while ACaptureRequest_addTarget %d", __FUNCTION__, ret);
+ return ret;
+ }
+ mPreviewRequestOutputs.push_back(outputTarget);
+ }
+
+ std::vector<sp<CaptureRequest>> requestsV;
+ sp<CaptureRequest> req;
+ ret = allocateCaptureRequestLocked(mPreviewRequest, req);
+ // We need to call this method since after submitRequestList is called,
+ // the request metadata queue might have removed the capture request
+ // metadata. Therefore we simply add the metadata to its wrapper class,
+ // so that it can be retrieved later.
+ addRequestSettingsMetadata(mPreviewRequest, req);
+ if (ret != ACAMERA_OK) {
+ ALOGE("Convert capture request to internal format failure! ret %d", ret);
+ return ret;
+ }
+ if (req->mCaptureRequest.streamAndWindowIds.size() == 0) {
+ ALOGE("Capture request without output target cannot be submitted!");
+ return ACAMERA_ERROR_INVALID_PARAMETER;
+ }
+ requestsV.push_back(req);
+ // In shared session mode, if there are other active clients streaming then
+ // stoprepeating does not actually send request to HAL to cancel the request.
+ // Cameraservice will use this call to remove this client surfaces provided in its
+ // previous streaming request. If this is the only client for the shared camera device
+ // then camerservice will ask HAL to cancel the previous repeating request.
+ ret = stopRepeatingLocked();
+ if (ret != ACAMERA_OK) {
+ ALOGE("Camera %s stop repeating failed! ret %d", getId(), ret);
+ return ret;
+ }
+ SubmitInfo info;
+ std::vector<int> streamIds;
+ std::vector<int> surfaceIds;
+ for (const auto& streamAndWindowId : req->mCaptureRequest.streamAndWindowIds) {
+ streamIds.push_back(streamAndWindowId.streamId);
+ surfaceIds.push_back(streamAndWindowId.windowId);
+ }
+ remoteRet = mRemote->startStreaming(streamIds, surfaceIds, &info);
+ if (!remoteRet.isOk()) {
+ if (remoteRet.getExceptionCode() == EX_SERVICE_SPECIFIC) {
+ Status errStatus = static_cast<Status>(remoteRet.getServiceSpecificError());
+ ALOGE("%s: startStreaming call failed: %s",
+ __FUNCTION__, toString(errStatus).c_str());
+ return utils::convertFromAidl(errStatus);
+ } else {
+ ALOGE("%s: Transaction error for startStreaming call: %d", __FUNCTION__,
+ remoteRet.getExceptionCode());
+ return ACAMERA_ERROR_UNKNOWN;
+ }
+ }
+
+ int32_t sequenceId = info.requestId;
+ int64_t lastFrameNumber = info.lastFrameNumber;
+ if (sequenceId < 0) {
+ ALOGE("Camera %s submit request remote failure: ret %d", getId(), sequenceId);
+ return ACAMERA_ERROR_UNKNOWN;
+ }
+ CallbackHolder cbHolder(session, requestsV, true, callbacks);
+ mSequenceCallbackMap.insert(std::make_pair(sequenceId, cbHolder));
+ // stopRepeating above should have cleanup repeating sequence id
+ if (mRepeatingSequenceId != REQUEST_ID_NONE) {
+ setCameraDeviceErrorLocked(ACAMERA_ERROR_CAMERA_DEVICE);
+ return ACAMERA_ERROR_CAMERA_DEVICE;
+ }
+ mRepeatingSequenceId = sequenceId;
+
+ if (mIdle) {
+ sp<AMessage> msg = new AMessage(kWhatSessionStateCb, mHandler);
+ msg->setPointer(kContextKey, session->mUserSessionCallback.context);
+ msg->setObject(kSessionSpKey, session);
+ msg->setPointer(kCallbackFpKey, (void*) session->mUserSessionCallback.onActive);
+ postSessionMsgAndCleanup(msg);
+ }
+ mIdle = false;
+ mBusySession = session;
+
+ if (captureSequenceId) {
+ *captureSequenceId = sequenceId;
+ }
+ return ACAMERA_OK;
+}
+
} // namespace acam
} // namespace android
diff --git a/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp
index fc987b2..eacfeac 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp
+++ b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp
@@ -202,6 +202,20 @@
return fromUStatus(ret);
}
+ndk::ScopedAStatus AidlCameraDeviceUser::startStreaming(
+ const std::vector<int32_t>& in_streamIdxArray,
+ const std::vector<int32_t>& in_surfaceIdxArray, SSubmitInfo* _aidl_return){
+ USubmitInfo submitInfo;
+ UStatus ret = mDeviceRemote->startStreaming(in_streamIdxArray, in_surfaceIdxArray, &submitInfo);
+ if (!ret.isOk()) {
+ ALOGE("%s: Failed to start streaming: %s", __FUNCTION__, ret.toString8().c_str());
+ return fromUStatus(ret);
+ }
+ mRequestId = submitInfo.mRequestId;
+ convertToAidl(submitInfo, _aidl_return);
+ return ScopedAStatus::ok();
+}
+
ndk::ScopedAStatus AidlCameraDeviceUser::flush(int64_t* _aidl_return) {
UStatus ret = mDeviceRemote->flush(_aidl_return);
return fromUStatus(ret);
diff --git a/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.h b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.h
index 8fa33f7..25464a5 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.h
+++ b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.h
@@ -99,6 +99,9 @@
ndk::ScopedAStatus isPrimaryClient(bool* _aidl_return) override;
+ ndk::ScopedAStatus startStreaming(const std::vector<int32_t>& in_streamIdxArray,
+ const std::vector<int32_t>& in_surfaceIdxArray, SSubmitInfo* _aidl_return) override;
+
private:
bool initDevice();
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index ebd8eb1..0bae305 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -60,6 +60,7 @@
using namespace camera2;
using namespace camera3;
using camera3::camera_stream_rotation_t::CAMERA_STREAM_ROTATION_0;
+using hardware::camera2::ICameraDeviceUser::NO_IN_FLIGHT_REPEATING_FRAMES;
namespace flags = com::android::internal::camera::flags;
@@ -94,6 +95,7 @@
sharedMode),
mInputStream(),
mStreamingRequestId(REQUEST_ID_NONE),
+ mStreamingRequestLastFrameNumber(NO_IN_FLIGHT_REPEATING_FRAMES),
mRequestIdCounter(0),
mOverrideForPerfClass(overrideForPerfClass),
mOriginalCameraId(originalCameraId),
@@ -118,13 +120,23 @@
return res;
}
- mFrameProcessor = new FrameProcessorBase(mDevice);
- std::string threadName = std::string("CDU-") + mCameraIdStr + "-FrameProc";
- res = mFrameProcessor->run(threadName.c_str());
- if (res != OK) {
- ALOGE("%s: Unable to start frame processor thread: %s (%d)",
- __FUNCTION__, strerror(-res), res);
- return res;
+ if (flags::camera_multi_client() && mSharedMode) {
+ // In shared camera device mode, there can be more than one clients and
+ // frame processor thread is started by shared camera device.
+ mFrameProcessor = mDevice->getSharedFrameProcessor();
+ if (mFrameProcessor == nullptr) {
+ ALOGE("%s: Unable to start frame processor thread", __FUNCTION__);
+ return UNKNOWN_ERROR;
+ }
+ } else {
+ mFrameProcessor = new FrameProcessorBase(mDevice);
+ std::string threadName = std::string("CDU-") + mCameraIdStr + "-FrameProc";
+ res = mFrameProcessor->run(threadName.c_str());
+ if (res != OK) {
+ ALOGE("%s: Unable to start frame processor thread: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
}
mFrameProcessor->registerListener(camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MIN_ID,
@@ -292,6 +304,97 @@
return intersection;
}
+binder::Status CameraDeviceClient::startStreaming(const std::vector<int>& streamIds,
+ const std::vector<int>& surfaceIds,
+ /*out*/
+ hardware::camera2::utils::SubmitInfo *submitInfo) {
+ ATRACE_CALL();
+ ALOGV("%s-start of function. Stream list size %zu. Surface list size %zu", __FUNCTION__,
+ streamIds.size(), surfaceIds.size());
+
+ binder::Status res = binder::Status::ok();
+ status_t err;
+ if ( !(res = checkPidStatus(__FUNCTION__) ).isOk()) {
+ return res;
+ }
+
+ Mutex::Autolock icl(mBinderSerializationLock);
+
+ if (!mDevice.get()) {
+ return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
+ }
+
+ if (!flags::camera_multi_client() || !mSharedMode) {
+ ALOGE("%s: Camera %s: Invalid operation.", __FUNCTION__, mCameraIdStr.c_str());
+ return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, "Invalid operation");
+ }
+
+ if (streamIds.empty() || surfaceIds.empty()) {
+ ALOGE("%s: Camera %s: Sent empty streamIds or surface Ids. Rejecting request.",
+ __FUNCTION__, mCameraIdStr.c_str());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Empty Stream or surface Ids");
+ }
+
+ if (streamIds.size() != surfaceIds.size()) {
+ ALOGE("%s: Camera %s: Sent different size array for stream and surface Ids.",
+ __FUNCTION__, mCameraIdStr.c_str());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+ "Stream and surface Ids are not of same size");
+ }
+
+ submitInfo->mRequestId = mRequestIdCounter;
+ SurfaceMap surfaceMap;
+ Vector<int32_t> outputStreamIds;
+ for (size_t i = 0; i < streamIds.size(); i++) {
+ int streamId = streamIds[i];
+ int surfaceIdx = surfaceIds[i];
+
+ ssize_t index = mConfiguredOutputs.indexOfKey(streamId);
+ if (index < 0) {
+ ALOGE("%s: Camera %s: Tried to start streaming with a surface that"
+ " we have not called createStream on: stream %d",
+ __FUNCTION__, mCameraIdStr.c_str(), streamId);
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+ "Start streaming targets Surface that is not part of current capture session");
+ }
+
+ const auto& surfaces = mConfiguredOutputs.valueAt(index).getSurfaces();
+ if ((size_t)surfaceIdx >= surfaces.size()) {
+ ALOGE("%s: Camera %s: Tried to start streaming with a surface that"
+ " we have not called createStream on: stream %d, surfaceIdx %d",
+ __FUNCTION__, mCameraIdStr.c_str(), streamId, surfaceIdx);
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+ "Start streaming targets Surface has invalid surface index");
+ }
+
+ res = insertSurfaceLocked(surfaces[surfaceIdx], &surfaceMap, &outputStreamIds, nullptr);
+
+ if (!res.isOk()) {
+ return res;
+ }
+ }
+
+ mRequestIdCounter++;
+ int sharedReqID;
+
+ err = mDevice->startStreaming(submitInfo->mRequestId, surfaceMap, &sharedReqID,
+ &(submitInfo->mLastFrameNumber));
+ if (err != OK) {
+ std::string msg = fmt::sprintf(
+ "Camera %s: Got error %s (%d) after trying to start streaming request",
+ mCameraIdStr.c_str(), strerror(-err), err);
+ ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+ return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
+ } else {
+ Mutex::Autolock idLock(mStreamingRequestIdLock);
+ mStreamingRequestId = submitInfo->mRequestId;
+ mSharedStreamingRequest = {sharedReqID, submitInfo->mRequestId};
+ }
+
+ ALOGV("%s: Camera %s: End of function", __FUNCTION__, mCameraIdStr.c_str());
+ return binder::Status::ok();
+}
+
binder::Status CameraDeviceClient::submitRequestList(
const std::vector<hardware::camera2::CaptureRequest>& requests,
bool streaming,
@@ -318,6 +421,12 @@
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Empty request list");
}
+ if (flags::camera_multi_client() && mSharedMode && !mIsPrimaryClient) {
+ ALOGE("%s: Camera %s: This client is not a primary client of the shared camera device.",
+ __FUNCTION__, mCameraIdStr.c_str());
+ return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, "Invalid Operation.");
+ }
+
List<const CameraDeviceBase::PhysicalCameraSettingsList> metadataRequestList;
std::list<SurfaceMap> surfaceMapList;
submitInfo->mRequestId = mRequestIdCounter;
@@ -594,9 +703,16 @@
}
mRequestIdCounter++;
+ int32_t sharedReqID;
if (streaming) {
- err = mDevice->setStreamingRequestList(metadataRequestList, surfaceMapList,
- &(submitInfo->mLastFrameNumber));
+ if (flags::camera_multi_client() && mSharedMode) {
+ err = mDevice->setSharedStreamingRequest(*metadataRequestList.begin(),
+ *surfaceMapList.begin(), &sharedReqID, &(submitInfo->mLastFrameNumber));
+ } else {
+ err = mDevice->setStreamingRequestList(metadataRequestList, surfaceMapList,
+ &(submitInfo->mLastFrameNumber));
+ }
+
if (err != OK) {
std::string msg = fmt::sprintf(
"Camera %s: Got error %s (%d) after trying to set streaming request",
@@ -607,10 +723,18 @@
} else {
Mutex::Autolock idLock(mStreamingRequestIdLock);
mStreamingRequestId = submitInfo->mRequestId;
+ if (flags::camera_multi_client() && mSharedMode) {
+ mSharedStreamingRequest = {sharedReqID, submitInfo->mRequestId};
+ }
}
} else {
- err = mDevice->captureList(metadataRequestList, surfaceMapList,
- &(submitInfo->mLastFrameNumber));
+ if (flags::camera_multi_client() && mSharedMode) {
+ err = mDevice->setSharedCaptureRequest(*metadataRequestList.begin(),
+ *surfaceMapList.begin(), &sharedReqID, &(submitInfo->mLastFrameNumber));
+ } else {
+ err = mDevice->captureList(metadataRequestList, surfaceMapList,
+ &(submitInfo->mLastFrameNumber));
+ }
if (err != OK) {
std::string msg = fmt::sprintf(
"Camera %s: Got error %s (%d) after trying to submit capture request",
@@ -619,6 +743,9 @@
res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION,
msg.c_str());
}
+ if (flags::camera_multi_client() && mSharedMode) {
+ mSharedRequestMap[sharedReqID] = submitInfo->mRequestId;
+ }
ALOGV("%s: requestId = %d ", __FUNCTION__, submitInfo->mRequestId);
}
@@ -652,12 +779,19 @@
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
}
- err = mDevice->clearStreamingRequest(lastFrameNumber);
+ if (flags::camera_multi_client() && mSharedMode) {
+ err = mDevice->clearSharedStreamingRequest(lastFrameNumber);
+ } else {
+ err = mDevice->clearStreamingRequest(lastFrameNumber);
+ }
if (err == OK) {
ALOGV("%s: Camera %s: Successfully cleared streaming request",
__FUNCTION__, mCameraIdStr.c_str());
mStreamingRequestId = REQUEST_ID_NONE;
+ if (flags::camera_multi_client() && mSharedMode) {
+ mStreamingRequestLastFrameNumber = *lastFrameNumber;
+ }
} else {
res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
"Camera %s: Error clearing streaming request: %s (%d)",
@@ -1042,7 +1176,7 @@
int streamId = camera3::CAMERA3_STREAM_ID_INVALID;
std::vector<int> surfaceIds;
if (flags::camera_multi_client() && mSharedMode) {
- err = mDevice->getSharedStreamId(outputConfiguration, &streamId);
+ err = mDevice->getSharedStreamId(streamInfo, &streamId);
if (err == OK) {
err = mDevice->addSharedSurfaces(streamId, streamInfos, surfaceHolders, &surfaceIds);
}
@@ -1590,6 +1724,10 @@
"Camera %s: Error flushing device: %s (%d)", mCameraIdStr.c_str(), strerror(-err),
err);
}
+ if (flags::camera_multi_client() && mSharedMode) {
+ mSharedRequestMap.clear();
+ mStreamingRequestLastFrameNumber = *lastFrameNumber;
+ }
return res;
}
@@ -2192,6 +2330,19 @@
sp<hardware::camera2::ICameraDeviceCallbacks> remoteCb = getRemoteCallback();
bool skipClientNotification = false;
+ if (flags::camera_multi_client() && mSharedMode) {
+ int clientReqId;
+ if (!matchClientRequest(resultExtras, &clientReqId)) {
+ return;
+ }
+ CaptureResultExtras mutableResultExtras = resultExtras;
+ mutableResultExtras.requestId = clientReqId;
+ mSharedRequestMap.erase(resultExtras.requestId);
+ if (remoteCb != 0) {
+ remoteCb->onDeviceError(errorCode, mutableResultExtras);
+ }
+ return;
+ }
{
// Access to the composite stream map must be synchronized
Mutex::Autolock l(mCompositeLock);
@@ -2254,10 +2405,24 @@
nsecs_t timestamp) {
// Thread safe. Don't bother locking.
sp<hardware::camera2::ICameraDeviceCallbacks> remoteCb = getRemoteCallback();
- if (remoteCb != 0) {
- remoteCb->onCaptureStarted(resultExtras, timestamp);
+ CaptureResultExtras mutableResultExtras = resultExtras;
+ if (flags::camera_multi_client() && mSharedMode) {
+ int clientReqId;
+ if (!matchClientRequest(resultExtras, &clientReqId)) {
+ return;
+ }
+ mutableResultExtras.requestId = clientReqId;
}
- Camera2ClientBase::notifyShutter(resultExtras, timestamp);
+
+ if (remoteCb != 0) {
+ remoteCb->onCaptureStarted(mutableResultExtras, timestamp);
+ }
+ Camera2ClientBase::notifyShutter(mutableResultExtras, timestamp);
+ if (flags::camera_multi_client() && mSharedMode) {
+ // When camera is opened in shared mode, composite streams are not
+ // supported.
+ return;
+ }
// Access to the composite stream map must be synchronized
Mutex::Autolock l(mCompositeLock);
@@ -2298,13 +2463,16 @@
if (mDevice == 0) return;
nsecs_t startTime = systemTime();
- if (!flags::camera_multi_client() || sCameraService->isOnlyClient(this)){
- ALOGV("Camera %s: Stopping processors", mCameraIdStr.c_str());
-
- if (mFrameProcessor.get() != nullptr) {
+ if (mFrameProcessor.get() != nullptr) {
mFrameProcessor->removeListener(
camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MIN_ID,
camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MAX_ID, /*listener*/this);
+ }
+ if (!flags::camera_multi_client() || !mSharedMode ||
+ (mSharedMode && sCameraService->isOnlyClient(this))){
+ ALOGV("Camera %s: Stopping processors", mCameraIdStr.c_str());
+
+ if (mFrameProcessor.get() != nullptr) {
mFrameProcessor->requestExit();
ALOGV("Camera %s: Waiting for threads", mCameraIdStr.c_str());
mFrameProcessor->join();
@@ -2386,9 +2554,49 @@
return retVal;
}
+bool CameraDeviceClient::matchClientRequest(const CaptureResultExtras& resultExtras,
+ int* clientReqId) {
+ if (flags::camera_multi_client() && mSharedMode) {
+ if (resultExtras.requestId == mSharedStreamingRequest.first) {
+ *clientReqId = mSharedStreamingRequest.second;
+ return true;
+ }
+ if (mIsPrimaryClient) {
+ auto iter = mSharedRequestMap.find(resultExtras.requestId);
+ if (iter != mSharedRequestMap.end()) {
+ *clientReqId = iter->second;
+ return true;
+ }
+ }
+ }
+ return true;
+}
+
void CameraDeviceClient::onResultAvailable(const CaptureResult& result) {
ATRACE_CALL();
ALOGVV("%s E", __FUNCTION__);
+ CaptureResult mutableResult = result;
+ if (flags::camera_multi_client() && mSharedMode) {
+ int clientReqId;
+ if (!matchClientRequest(result.mResultExtras, &clientReqId)) {
+ return;
+ }
+ // When a client stops streaming using cancelRequest, we still need to deliver couple
+ // more capture results to the client, till the lastframe number returned by the
+ // cancelRequest. Therefore, only clean the shared streaming request once all the frames for
+ // the repeating request have been delivered to the client.
+ if ((mStreamingRequestId == REQUEST_ID_NONE) &&
+ (result.mResultExtras.frameNumber > mStreamingRequestLastFrameNumber)) {
+ mSharedStreamingRequest.first = REQUEST_ID_NONE;
+ mSharedStreamingRequest.second = REQUEST_ID_NONE;
+ }
+ mSharedRequestMap.erase(result.mResultExtras.requestId);
+ mutableResult.mResultExtras.requestId = clientReqId;
+ if (mutableResult.mMetadata.update(ANDROID_REQUEST_ID, &clientReqId, 1) != OK) {
+ ALOGE("%s Failed to set request ID in metadata.", __FUNCTION__);
+ return;
+ }
+ }
// Thread-safe. No lock necessary.
sp<hardware::camera2::ICameraDeviceCallbacks> remoteCb = mRemoteCallback;
@@ -2398,29 +2606,29 @@
// Vendor clients need to modify metadata and also this call is in process
// before going through FMQ to vendor clients. So don't use FMQ here.
if (!mIsVendorClient && flags::fmq_metadata()) {
- fmqMetadataSize = writeResultMetadataIntoResultQueue(result.mMetadata);
+ fmqMetadataSize = writeResultMetadataIntoResultQueue(mutableResult.mMetadata);
}
hardware::camera2::impl::CameraMetadataNative resultMetadata;
CameraMetadataInfo resultInfo;
if (fmqMetadataSize == 0) {
// The flag was off / we're serving VNDK shim call or FMQ write failed.
- resultMetadata = result.mMetadata;
+ resultMetadata = mutableResult.mMetadata;
resultInfo.set<CameraMetadataInfo::metadata>(resultMetadata);
} else {
resultInfo.set<CameraMetadataInfo::fmqSize>(fmqMetadataSize);
}
std::vector<PhysicalCaptureResultInfo> physicalMetadatas =
- convertToFMQ(result.mPhysicalMetadatas);
+ convertToFMQ(mutableResult.mPhysicalMetadatas);
- remoteCb->onResultReceived(resultInfo, result.mResultExtras,
+ remoteCb->onResultReceived(resultInfo, mutableResult.mResultExtras,
physicalMetadatas);
}
// Access to the composite stream map must be synchronized
Mutex::Autolock l(mCompositeLock);
for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
- mCompositeStreamMap.valueAt(i)->onResultAvailable(result);
+ mCompositeStreamMap.valueAt(i)->onResultAvailable(mutableResult);
}
ALOGVV("%s X", __FUNCTION__);
}
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 4ad3c49..a2fae86 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -98,6 +98,11 @@
virtual binder::Status cancelRequest(int requestId,
/*out*/
int64_t* lastFrameNumber = NULL) override;
+ virtual binder::Status startStreaming(
+ const std::vector<int>& streamIds,
+ const std::vector<int>& surfaceIds,
+ /*out*/
+ hardware::camera2::utils::SubmitInfo *submitInfo = nullptr) override;
virtual binder::Status beginConfigure() override;
@@ -329,6 +334,8 @@
// Surface only
status_t getSurfaceKey(sp<Surface> surface, SurfaceKey* out) const;
+ bool matchClientRequest(const CaptureResultExtras& resultExtras, int* clientReqId);
+
// IGraphicsBufferProducer binder -> Stream ID + Surface ID for output streams
KeyedVector<SurfaceKey, StreamSurfaceId> mStreamMap;
@@ -350,6 +357,9 @@
// Streaming request ID
int32_t mStreamingRequestId;
Mutex mStreamingRequestIdLock;
+ std::pair<int32_t, int32_t> mSharedStreamingRequest;
+ std::map<int32_t, int32_t> mSharedRequestMap;
+ int64_t mStreamingRequestLastFrameNumber;
static const int32_t REQUEST_ID_NONE = -1;
int32_t mRequestIdCounter;
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index 4fe9444..13d4d26 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -34,6 +34,7 @@
#include "device3/Camera3StreamInterface.h"
#include "device3/StatusTracker.h"
#include "binder/Status.h"
+#include "FrameProcessorBase.h"
#include "FrameProducer.h"
#include "utils/IPCTransport.h"
#include "utils/SessionConfigurationUtils.h"
@@ -47,7 +48,6 @@
typedef enum camera_stream_configuration_mode {
CAMERA_STREAM_CONFIGURATION_NORMAL_MODE = 0,
CAMERA_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE = 1,
- CAMERA_STREAM_CONFIGURATION_SHARED_MODE = 2,
CAMERA_VENDOR_STREAM_CONFIGURATION_MODE_START = 0x8000
} camera_stream_configuration_mode_t;
@@ -301,7 +301,8 @@
* In shared session mode, this function retrieves the stream ID associated with a specific
* output configuration.
*/
- virtual status_t getSharedStreamId(const OutputConfiguration &config, int *streamId) = 0;
+ virtual status_t getSharedStreamId(const android::camera3::OutputStreamInfo &config,
+ int *streamId) = 0;
/**
* In shared session mode, this function add surfaces to an existing shared stream ID.
@@ -316,6 +317,47 @@
virtual status_t removeSharedSurfaces(int streamId, const std::vector<size_t> &surfaceIds) = 0;
/**
+ * In shared session mode, this function retrieves the frame processor.
+ */
+ virtual sp<camera2::FrameProcessorBase> getSharedFrameProcessor() = 0;
+
+ /**
+ * Submit a shared streaming request for streaming.
+ * Output lastFrameNumber is the last frame number of the previous streaming request.
+ */
+ virtual status_t setSharedStreamingRequest(
+ const PhysicalCameraSettingsList &request,
+ const SurfaceMap &surfaceMap, int32_t *sharedReqID,
+ int64_t *lastFrameNumber = NULL) = 0;
+
+ /**
+ * Clear the shared streaming request slot.
+ * Output lastFrameNumber is the last frame number of the previous streaming request.
+ */
+ virtual status_t clearSharedStreamingRequest(int64_t *lastFrameNumber = NULL) = 0;
+
+ /**
+ * In shared session mode, only primary clients can change the capture
+ * parameters through capture request or repeating request. When the primary
+ * client sends the capture request to the camera device, the request ID is
+ * overridden by the camera device to maintain unique ID. This API is
+ * similar to captureList API, with only difference that the request ID is
+ * changed by the device before submitting the request to HAL.
+ * Output sharedReqID is the request ID actually used.
+ * Output lastFrameNumber is the expected last frame number of the list of requests.
+ */
+ virtual status_t setSharedCaptureRequest(const PhysicalCameraSettingsList &request,
+ const SurfaceMap &surfaceMap, int32_t *sharedReqID,
+ int64_t *lastFrameNumber = NULL) = 0;
+
+ /**
+ * Submit a start streaming request.
+ * Output lastFrameNumber is the last frame number of the previous streaming request.
+ */
+ virtual status_t startStreaming(const int32_t reqId, const SurfaceMap &surfaceMap,
+ int32_t *sharedReqID, int64_t *lastFrameNumber = NULL) = 0;
+
+ /**
* Take the currently-defined set of streams and configure the HAL to use
* them. This is a long-running operation (may be several hundered ms).
*
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index a8d7480..98cf5a3 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -2096,7 +2096,7 @@
const int32_t sharedColorSpaceTag = ANDROID_SHARED_SESSION_COLOR_SPACE;
const int32_t sharedOutputConfigurationsTag = ANDROID_SHARED_SESSION_OUTPUT_CONFIGURATIONS;
auto& c = mCameraCharacteristics;
- uint8_t colorSpace = 0;
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
res = c.update(sharedColorSpaceTag, &colorSpace, 1);
@@ -2104,8 +2104,8 @@
// take these values from XML instead.
std::vector<int64_t> sharedOutputConfigEntries;
int64_t surfaceType1 = OutputConfiguration::SURFACE_TYPE_IMAGE_READER;
- int64_t width = 1280;
- int64_t height = 800;
+ int64_t width = 1920;
+ int64_t height = 1080;
int64_t format1 = HAL_PIXEL_FORMAT_RGBA_8888;
int64_t mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO;
int64_t timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT;
@@ -2131,7 +2131,7 @@
// Stream 2 configuration hardcoded
int64_t surfaceType2 = OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW;
int64_t format2 = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
- int64_t usage2 = 0;
+ int64_t usage2 = GraphicBuffer::USAGE_HW_TEXTURE | GraphicBuffer::USAGE_HW_COMPOSER;
sharedOutputConfigEntries.push_back(surfaceType2);
sharedOutputConfigEntries.push_back(width);
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index fb8e5d0..bb113a9 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -2987,6 +2987,36 @@
return retVal;
}
+const sp<Camera3Device::CaptureRequest> Camera3Device::getOngoingRepeatingRequestLocked() {
+ ALOGV("%s", __FUNCTION__);
+
+ if (mRequestThread != NULL) {
+ return mRequestThread->getOngoingRepeatingRequest();
+ }
+
+ return nullptr;
+}
+
+status_t Camera3Device::updateOngoingRepeatingRequestLocked(const SurfaceMap& surfaceMap) {
+ ALOGV("%s", __FUNCTION__);
+
+ if (mRequestThread != NULL) {
+ return mRequestThread->updateOngoingRepeatingRequest(surfaceMap);
+ }
+
+ return INVALID_OPERATION;
+}
+
+int64_t Camera3Device::getRepeatingRequestLastFrameNumberLocked() {
+ ALOGV("%s", __FUNCTION__);
+
+ if (mRequestThread != NULL) {
+ return mRequestThread->getRepeatingRequestLastFrameNumber();
+ }
+
+ return hardware::camera2::ICameraDeviceUser::NO_IN_FLIGHT_REPEATING_FRAMES;
+}
+
void Camera3Device::monitorMetadata(TagMonitor::eventSource source,
int64_t frameNumber, nsecs_t timestamp, const CameraMetadata& metadata,
const std::unordered_map<std::string, CameraMetadata>& physicalMetadata,
@@ -4251,6 +4281,60 @@
return mLatestRequestInfo;
}
+const sp<Camera3Device::CaptureRequest> Camera3Device::RequestThread::getOngoingRepeatingRequest() {
+ ATRACE_CALL();
+ Mutex::Autolock l(mRequestLock);
+
+ ALOGV("RequestThread::%s", __FUNCTION__);
+ if (mRepeatingRequests.empty()) {
+ return nullptr;
+ }
+
+ return *mRepeatingRequests.begin();
+}
+
+status_t Camera3Device::RequestThread::updateOngoingRepeatingRequest(const SurfaceMap& surfaceMap) {
+ ATRACE_CALL();
+ Mutex::Autolock l(mRequestLock);
+ if (mRepeatingRequests.empty()) {
+ return INVALID_OPERATION;
+ }
+
+ sp<CaptureRequest> curRequest = *mRepeatingRequests.begin();
+ std::vector<int32_t> outputStreamIds;
+ Vector<sp<camera3::Camera3OutputStreamInterface>> outputStreams;
+ for (const auto& [key, value] : surfaceMap) {
+ outputStreamIds.push_back(key);
+ }
+ for (auto id : outputStreamIds) {
+ sp<Camera3Device> parent = mParent.promote();
+ if (parent == nullptr) {
+ ALOGE("%s: parent does not exist!", __FUNCTION__);
+ return INVALID_OPERATION;
+ }
+ sp<Camera3OutputStreamInterface> stream = parent->mOutputStreams.get(id);
+ if (stream == nullptr) {
+ CLOGE("Request references unknown stream %d",id);
+ return BAD_VALUE;
+ }
+ outputStreams.push(stream);
+ }
+ curRequest->mOutputStreams = outputStreams;
+ curRequest->mOutputSurfaces = surfaceMap;
+
+ ALOGV("RequestThread::%s", __FUNCTION__);
+ return OK;
+
+}
+
+int64_t Camera3Device::RequestThread::getRepeatingRequestLastFrameNumber() {
+ ATRACE_CALL();
+ Mutex::Autolock l(mRequestLock);
+
+ ALOGV("RequestThread::%s", __FUNCTION__);
+ return mRepeatingLastFrameNumber;
+}
+
bool Camera3Device::RequestThread::isStreamPending(
sp<Camera3StreamInterface>& stream) {
ATRACE_CALL();
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 5d3c010..12b1770 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -38,6 +38,7 @@
#include "common/CameraDeviceBase.h"
#include "common/DepthPhotoProcessor.h"
+#include "common/FrameProcessorBase.h"
#include "device3/BufferUtils.h"
#include "device3/StatusTracker.h"
#include "device3/Camera3BufferManager.h"
@@ -197,7 +198,7 @@
virtual status_t beginConfigure() override {return OK;};
- virtual status_t getSharedStreamId(const OutputConfiguration& /*config*/,
+ virtual status_t getSharedStreamId(const OutputStreamInfo& /*config*/,
int* /*streamId*/) override {return INVALID_OPERATION;};
virtual status_t addSharedSurfaces(int /*streamId*/,
@@ -208,6 +209,27 @@
virtual status_t removeSharedSurfaces(int /*streamId*/,
const std::vector<size_t>& /*surfaceIds*/) override {return INVALID_OPERATION;};
+ virtual status_t setSharedStreamingRequest(
+ const PhysicalCameraSettingsList& /*request*/, const SurfaceMap& /*surfaceMap*/,
+ int32_t* /*sharedReqID*/, int64_t* /*lastFrameNumber = NULL*/) override {
+ return INVALID_OPERATION;
+ };
+
+ virtual status_t clearSharedStreamingRequest(int64_t* /*lastFrameNumber = NULL*/) override {
+ return INVALID_OPERATION;
+ };
+
+ virtual status_t setSharedCaptureRequest(const PhysicalCameraSettingsList& /*request*/,
+ const SurfaceMap& /*surfaceMap*/, int32_t* /*sharedReqID*/,
+ int64_t* /*lastFrameNumber = NULL*/) override {return INVALID_OPERATION;};
+
+ virtual sp<camera2::FrameProcessorBase> getSharedFrameProcessor() override {return nullptr;};
+
+ virtual status_t startStreaming(const int32_t /*reqId*/, const SurfaceMap& /*surfaceMap*/,
+ int32_t* /*sharedReqID*/, int64_t* /*lastFrameNumber = NULL*/)
+ override {return INVALID_OPERATION;};
+
+
status_t configureStreams(const CameraMetadata& sessionParams,
int operatingMode =
camera_stream_configuration_mode_t::CAMERA_STREAM_CONFIGURATION_NORMAL_MODE) override;
@@ -226,7 +248,7 @@
// Transitions to the idle state on success
status_t waitUntilDrained() override;
- status_t setNotifyCallback(wp<NotificationListener> listener) override;
+ virtual status_t setNotifyCallback(wp<NotificationListener> listener) override;
bool willNotify3A() override;
status_t waitForNextFrame(nsecs_t timeout) override;
status_t getNextResult(CaptureResult *frame) override;
@@ -763,6 +785,22 @@
};
/**
+ * Get the first repeating request in the ongoing repeating request list.
+ */
+ const sp<CaptureRequest> getOngoingRepeatingRequestLocked();
+
+ /**
+ * Update the first repeating request in the ongoing repeating request list
+ * with the surface map provided.
+ */
+ status_t updateOngoingRepeatingRequestLocked(const SurfaceMap& surfaceMap);
+
+ /**
+ * Get the repeating request last frame number.
+ */
+ int64_t getRepeatingRequestLastFrameNumberLocked();
+
+ /**
* Get the last request submitted to the hal by the request thread.
*
* Must be called with mLock held.
@@ -1067,6 +1105,20 @@
**/
void wakeupLatestRequest(bool latestRequestFailed, int32_t latestRequestId);
+ /**
+ * Get the first repeating request in the ongoing repeating request list.
+ */
+ const sp<CaptureRequest> getOngoingRepeatingRequest();
+
+ /**
+ * Update the first repeating request in the ongoing repeating request list
+ * with the surface map provided.
+ */
+ status_t updateOngoingRepeatingRequest(const SurfaceMap& surfaceMap);
+
+ // Get the repeating request last frame number.
+ int64_t getRepeatingRequestLastFrameNumber();
+
protected:
virtual bool threadLoop();
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index 8f3249d..673b946 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -131,6 +131,18 @@
dataSpace(_dataSpace), consumerUsage(_consumerUsage),
sensorPixelModesUsed(_sensorPixelModesUsed), dynamicRangeProfile(_dynamicRangeProfile),
streamUseCase(_streamUseCase), timestampBase(_timestampBase), colorSpace(_colorSpace) {}
+ bool operator == (const OutputStreamInfo& other) const {
+ return (width == other.width &&
+ height == other.height &&
+ format == other.format &&
+ (other.format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
+ (dataSpace == other.dataSpace && consumerUsage == other.consumerUsage)) &&
+ sensorPixelModesUsed == other.sensorPixelModesUsed &&
+ dynamicRangeProfile == other.dynamicRangeProfile &&
+ colorSpace == other.colorSpace &&
+ streamUseCase == other.streamUseCase &&
+ timestampBase == other.timestampBase);
+ }
};
// A holder containing a surface and its corresponding mirroring mode
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3SharedDevice.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3SharedDevice.cpp
index 5bd8d8c..f203ffe 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3SharedDevice.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3SharedDevice.cpp
@@ -57,6 +57,27 @@
namespace android {
+class OpaqueConsumerListener : public BufferItemConsumer::FrameAvailableListener {
+public:
+ OpaqueConsumerListener(const wp<BufferItemConsumer>& consumer) : mConsumer(consumer) {}
+
+ virtual void onFrameAvailable(const BufferItem&) {
+ sp<BufferItemConsumer> consumer = mConsumer.promote();
+ if (consumer == nullptr) {
+ return;
+ }
+ BufferItem item;
+ consumer->acquireBuffer(&item, 0);
+ consumer->releaseBuffer(item, Fence::NO_FENCE);
+ }
+ virtual void onFrameReplaced(const BufferItem&) {}
+ virtual void onFrameDequeued(const uint64_t) {}
+ virtual void onFrameCancelled(const uint64_t) {}
+ virtual void onFrameDetached(const uint64_t) {}
+
+ wp<BufferItemConsumer> mConsumer;
+};
+
// Metadata android.info.availableSharedOutputConfigurations has list of shared output
// configurations. Each output configuration has minimum of 11 entries of size long
// followed by the physical camera id if present.
@@ -64,11 +85,13 @@
static const int SHARED_OUTPUT_CONFIG_NUM_OF_ENTRIES = 11;
std::map<std::string, sp<AidlCamera3SharedDevice>> AidlCamera3SharedDevice::sSharedDevices;
std::map<std::string, std::unordered_set<int>> AidlCamera3SharedDevice::sClientsUid;
+Mutex AidlCamera3SharedDevice::sSharedClientsLock;
sp<AidlCamera3SharedDevice> AidlCamera3SharedDevice::getInstance(
std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
const std::string& id, bool overrideForPerfClass, int rotationOverride,
bool legacyClient) {
+ Mutex::Autolock l(sSharedClientsLock);
if (sClientsUid[id].empty()) {
AidlCamera3SharedDevice* sharedDevice = new AidlCamera3SharedDevice(
cameraServiceProxyWrapper, attributionAndPermissionUtils, id, overrideForPerfClass,
@@ -85,21 +108,38 @@
const std::string& monitorTags) {
ATRACE_CALL();
status_t res = OK;
-
+ Mutex::Autolock l(mSharedDeviceLock);
if (mStatus == STATUS_UNINITIALIZED) {
res = AidlCamera3Device::initialize(manager, monitorTags);
if (res == OK) {
mSharedOutputConfigurations = getSharedOutputConfiguration();
+ wp<NotificationListener> weakThis(this);
+ res = AidlCamera3Device::setNotifyCallback(weakThis);
+ if (res != OK) {
+ ALOGE("%s: Camera %s: Unable to set notify callback: %s (%d)",
+ __FUNCTION__, mId.c_str(), strerror(-res), res);
+ return res;
+ }
+ mFrameProcessor = new camera2::FrameProcessorBase(this);
+ std::string threadName = std::string("CDU-") + mId + "-FrameProc";
+ res = mFrameProcessor->run(threadName.c_str());
+ if (res != OK) {
+ ALOGE("%s: Unable to start frame processor thread: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
}
}
return res;
}
status_t AidlCamera3SharedDevice::disconnectClient(int clientUid) {
+ Mutex::Autolock l(mSharedDeviceLock);
if (sClientsUid[mId].erase(clientUid) == 0) {
ALOGW("%s: Camera %s: Client %d is not connected to shared device", __FUNCTION__,
mId.c_str(), clientUid);
}
+
if (sClientsUid[mId].empty()) {
return Camera3Device::disconnect();
}
@@ -108,11 +148,11 @@
std::vector<OutputConfiguration> AidlCamera3SharedDevice::getSharedOutputConfiguration() {
std::vector<OutputConfiguration> sharedConfigs;
- uint8_t colorspace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
+ int32_t colorspace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
camera_metadata_entry sharedSessionColorSpace = mDeviceInfo.find(
ANDROID_SHARED_SESSION_COLOR_SPACE);
if (sharedSessionColorSpace.count > 0) {
- colorspace = *sharedSessionColorSpace.data.u8;
+ colorspace = *sharedSessionColorSpace.data.i32;
}
camera_metadata_entry sharedSessionConfigs = mDeviceInfo.find(
ANDROID_SHARED_SESSION_OUTPUT_CONFIGURATIONS);
@@ -170,6 +210,7 @@
}
status_t AidlCamera3SharedDevice::beginConfigure() {
+ Mutex::Autolock l(mSharedDeviceLock);
status_t res;
int i = 0;
@@ -177,13 +218,20 @@
return OK;
}
+ mSharedSurfaces.clear();
+ mOpaqueConsumers.clear();
+ mSharedSurfaceIds.clear();
+ mSharedStreams.clear();
+ mStreamInfoMap.clear();
+
for (auto config : mSharedOutputConfigurations) {
std::vector<SurfaceHolder> consumers;
- android_dataspace dataSpace;
+ android_dataspace dataspace = (android_dataspace)config.getDataspace();
+
if (config.getColorSpace()
!= ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED
&& config.getFormat() != HAL_PIXEL_FORMAT_BLOB) {
- if (!dataSpaceFromColorSpace(&dataSpace, config.getColorSpace())) {
+ if (!dataSpaceFromColorSpace(&dataspace, config.getColorSpace())) {
std::string msg = fmt::sprintf("Camera %s: color space %d not supported, "
" failed to convert to data space", mId.c_str(), config.getColorSpace());
ALOGE("%s: %s", __FUNCTION__, msg.c_str());
@@ -199,25 +247,39 @@
ALOGE("%s: %s", __FUNCTION__, msg.c_str());
return INVALID_OPERATION;
}
- sp<IGraphicBufferProducer> producer;
- sp<IGraphicBufferConsumer> consumer;
- BufferQueue::createBufferQueue(&producer, &consumer);
- mSharedSurfaces[i] = new Surface(producer);
+
+ #if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+ sp<BufferItemConsumer> consumer = sp<BufferItemConsumer>::make(
+ AHARDWAREBUFFER_USAGE_CAMERA_READ);
+ mOpaqueConsumers.push_back(consumer);
+ mSharedSurfaces.push_back(consumer->getSurface());
+ #else
+ sp<IGraphicBufferProducer> producer;
+ sp<IGraphicBufferConsumer> consumer;
+ BufferQueue::createBufferQueue(&producer, &consumer);
+ sp<BufferItemConsumer> opaqueConsumer = sp<BufferItemConsumer>::make(consumer,
+ AHARDWAREBUFFER_USAGE_CAMERA_READ);
+ mOpaqueConsumers.push_back(opaqueConsumer);
+ mSharedSurfaces.push_back(new Surface(producer));
+ #endif // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+ sp<OpaqueConsumerListener> consumerListener = sp<OpaqueConsumerListener>::make(
+ mOpaqueConsumers[i]);
+ mOpaqueConsumers[i]->setFrameAvailableListener(consumerListener);
consumers.push_back({mSharedSurfaces[i], config.getMirrorMode()});
- mSharedStreams[i] = new Camera3SharedOutputStream(mNextStreamId, consumers,
+ mSharedStreams.push_back(new Camera3SharedOutputStream(mNextStreamId, consumers,
config.getWidth(),config.getHeight(), config.getFormat(), config.getUsage(),
- dataSpace, static_cast<camera_stream_rotation_t>(config.getRotation()),
+ dataspace, static_cast<camera_stream_rotation_t>(config.getRotation()),
mTimestampOffset, config.getPhysicalCameraId(), overriddenSensorPixelModes,
getTransportType(), config.getSurfaceSetID(), mUseHalBufManager,
config.getDynamicRangeProfile(), config.getStreamUseCase(),
mDeviceTimeBaseIsRealtime, config.getTimestampBase(),
- config.getColorSpace(), config.useReadoutTimestamp());
+ config.getColorSpace(), config.useReadoutTimestamp()));
int id = mSharedStreams[i]->getSurfaceId(consumers[0].mSurface);
if (id < 0) {
SET_ERR_L("Invalid surface id");
return BAD_VALUE;
}
- mSharedSurfaceIds[i] = id;
+ mSharedSurfaceIds.push_back(id);
mSharedStreams[i]->setStatusTracker(mStatusTracker);
mSharedStreams[i]->setBufferManager(mBufferManager);
mSharedStreams[i]->setImageDumpMask(mImageDumpMask);
@@ -227,11 +289,15 @@
return res;
}
mSessionStatsBuilder.addStream(mNextStreamId);
- mConfiguredOutputs.add(mNextStreamId++, config);
+ OutputStreamInfo streamInfo(config.getWidth(),config.getHeight(), config.getFormat(),
+ dataspace, config.getUsage(), overriddenSensorPixelModes,
+ config.getDynamicRangeProfile(), config.getStreamUseCase(),
+ config.getTimestampBase(), config.getColorSpace());
+ mStreamInfoMap[mNextStreamId++] = streamInfo;
i++;
}
CameraMetadata sessionParams;
- res = configureStreams(sessionParams, CAMERA_STREAM_CONFIGURATION_SHARED_MODE);
+ res = configureStreams(sessionParams, CAMERA_STREAM_CONFIGURATION_NORMAL_MODE);
if (res != OK) {
std::string msg = fmt::sprintf("Camera %s: Error configuring streams: %s (%d)",
mId.c_str(), strerror(-res), res);
@@ -241,15 +307,17 @@
return OK;
}
-status_t AidlCamera3SharedDevice::getSharedStreamId(const OutputConfiguration &config,
+status_t AidlCamera3SharedDevice::getSharedStreamId(const OutputStreamInfo &config,
int *streamId) {
+ Mutex::Autolock l(mSharedDeviceLock);
if (streamId == nullptr) {
return BAD_VALUE;
}
- for (size_t i = 0 ; i < mConfiguredOutputs.size(); i++){
- OutputConfiguration sharedConfig = mConfiguredOutputs.valueAt(i);
- if (config.sharedConfigEqual(sharedConfig)) {
- *streamId = mConfiguredOutputs.keyAt(i);
+
+ for (const auto& streamInfo : mStreamInfoMap) {
+ OutputStreamInfo info = streamInfo.second;
+ if (info == config) {
+ *streamId = streamInfo.first;
return OK;
}
}
@@ -259,6 +327,7 @@
status_t AidlCamera3SharedDevice::addSharedSurfaces(int streamId,
const std::vector<android::camera3::OutputStreamInfo> &outputInfo,
const std::vector<SurfaceHolder> &surfaces, std::vector<int> *surfaceIds) {
+ Mutex::Autolock l(mSharedDeviceLock);
KeyedVector<sp<Surface>, size_t> outputMap;
std::vector<size_t> removedSurfaceIds;
status_t res;
@@ -285,6 +354,7 @@
status_t AidlCamera3SharedDevice::removeSharedSurfaces(int streamId,
const std::vector<size_t> &removedSurfaceIds) {
+ Mutex::Autolock l(mSharedDeviceLock);
KeyedVector<sp<Surface>, size_t> outputMap;
std::vector<SurfaceHolder> surfaces;
std::vector<OutputStreamInfo> outputInfo;
@@ -303,4 +373,257 @@
}
return OK;
}
+
+SurfaceMap AidlCamera3SharedDevice::mergeSurfaceMaps(const SurfaceMap& map1,
+ const SurfaceMap& map2) {
+ SurfaceMap mergedMap = map1;
+
+ for (const auto& [key, value] : map2) {
+ // If the key exists in map1, append the values
+ if (mergedMap.count(key) > 0) {
+ mergedMap[key].insert(mergedMap[key].end(), value.begin(), value.end());
+ } else {
+ // Otherwise, insert the key-value pair from map2
+ mergedMap[key] = value;
+ }
+ }
+ return mergedMap;
+}
+
+SurfaceMap AidlCamera3SharedDevice::removeClientSurfaceMap(const SurfaceMap& map1,
+ const SurfaceMap& map2) {
+ SurfaceMap resultMap = map1;
+
+ for (const auto& [key, value2] : map2) {
+ auto it1 = resultMap.find(key);
+ if (it1 != resultMap.end()) {
+ // Key exists in both maps, remove matching values
+ std::vector<size_t>& value1 = it1->second;
+ for (size_t val2 : value2) {
+ value1.erase(std::remove(value1.begin(), value1.end(), val2), value1.end());
+ }
+
+ // If the vector is empty after removing, remove the key
+ if (value1.empty()) {
+ resultMap.erase(it1);
+ }
+ }
+ }
+ return resultMap;
+}
+
+status_t AidlCamera3SharedDevice::setSharedStreamingRequest(
+ const CameraDeviceBase::PhysicalCameraSettingsList &clientSettings,
+ const SurfaceMap &surfaceMap, int32_t *sharedReqID,
+ int64_t *lastFrameNumber) {
+ if ((sharedReqID == nullptr) || (lastFrameNumber == nullptr)) {
+ return BAD_VALUE;
+ }
+
+ Mutex::Autolock l(mSharedDeviceLock);
+ auto requestIdEntry = clientSettings.begin()->metadata.find(ANDROID_REQUEST_ID);
+ if (requestIdEntry.count == 0) {
+ CLOGE("RequestID does not exist in metadata");
+ return BAD_VALUE;
+ }
+ int clientRequestId = requestIdEntry.data.i32[0];
+ CameraDeviceBase::PhysicalCameraSettingsList newSettings = clientSettings;
+ SurfaceMap newSurfaceMap = surfaceMap;
+ List<const CameraDeviceBase::PhysicalCameraSettingsList> settingsList;
+ std::list<SurfaceMap> surfaceMaps;
+ int32_t requestID = mRequestIdCounter;
+ const sp<CaptureRequest> curRequest = getOngoingRepeatingRequestLocked();
+
+ if (curRequest != nullptr) {
+ // If there is ongoing streaming going by secondary clients, then
+ // merge their surface map in the new repeating request.
+ newSurfaceMap = mergeSurfaceMaps(surfaceMap, curRequest->mOutputSurfaces);
+ }
+
+ std::vector<int32_t> outputStreamIds;
+ for (const auto& [key, value] : newSurfaceMap) {
+ outputStreamIds.push_back(key);
+ }
+ surfaceMaps.push_back(newSurfaceMap);
+ newSettings.begin()->metadata.update(ANDROID_REQUEST_ID, &requestID, /*size*/1);
+ mRequestIdCounter++;
+ newSettings.begin()->metadata.update(ANDROID_REQUEST_OUTPUT_STREAMS,
+ &outputStreamIds[0], outputStreamIds.size());
+ settingsList.push_back(newSettings);
+ status_t err = setStreamingRequestList(settingsList, surfaceMaps, lastFrameNumber);
+ if (err != OK) {
+ CLOGE("Cannot start shared streaming request");
+ return err;
+ }
+ mStreamingRequestId = requestID;
+ uid_t clientUid = mAttributionAndPermissionUtils->getCallingUid();
+ mClientRequestIds[clientUid] = clientRequestId;
+ mClientSurfaces[clientUid] = surfaceMap;
+ *sharedReqID = mStreamingRequestId;
+
+ return err;
+}
+
+status_t AidlCamera3SharedDevice::clearSharedStreamingRequest(int64_t *lastFrameNumber) {
+ Mutex::Autolock l(mSharedDeviceLock);
+ uid_t clientUid = mAttributionAndPermissionUtils->getCallingUid();
+ const sp<CaptureRequest> curRequest = getOngoingRepeatingRequestLocked();
+ if (curRequest == nullptr) {
+ CLOGE("No streaming ongoing");
+ return INVALID_OPERATION;
+ }
+
+ SurfaceMap newSurfaceMap;
+ newSurfaceMap = removeClientSurfaceMap(curRequest->mOutputSurfaces, mClientSurfaces[clientUid]);
+ mClientRequestIds.erase(clientUid);
+ mClientSurfaces.erase(clientUid);
+ if (newSurfaceMap.empty()) {
+ status_t err = clearStreamingRequest(lastFrameNumber);
+ if (err != OK) {
+ CLOGE("Error clearing streaming request");
+ }
+ return err;
+ }
+ *lastFrameNumber = getRepeatingRequestLastFrameNumberLocked();
+ return updateOngoingRepeatingRequestLocked(newSurfaceMap);
+}
+
+status_t AidlCamera3SharedDevice::setSharedCaptureRequest(const PhysicalCameraSettingsList &request,
+ const SurfaceMap &surfaceMap, int32_t *sharedReqID, int64_t *lastFrameNumber) {
+ Mutex::Autolock l(mSharedDeviceLock);
+ if (sharedReqID == nullptr) {
+ return BAD_VALUE;
+ }
+ CameraDeviceBase::PhysicalCameraSettingsList newRequest = request;
+ int newReqID = mRequestIdCounter;
+ List<const CameraDeviceBase::PhysicalCameraSettingsList> settingsList;
+ std::list<SurfaceMap> surfaceMaps;
+ surfaceMaps.push_back(surfaceMap);
+ newRequest.begin()->metadata.update(ANDROID_REQUEST_ID, &newReqID, /*size*/1);
+ settingsList.push_back(newRequest);
+ mRequestIdCounter++;
+ status_t err = captureList(settingsList, surfaceMaps, lastFrameNumber);
+ if (err != OK) {
+ CLOGE("Cannot start shared capture request");
+ return err;
+ }
+ *sharedReqID = newReqID;
+
+ return err;
+}
+
+status_t AidlCamera3SharedDevice::startStreaming(const int32_t reqId, const SurfaceMap& surfaceMap,
+ int32_t* sharedReqID, int64_t* lastFrameNumber) {
+ ATRACE_CALL();
+
+ if ((sharedReqID == nullptr) || (lastFrameNumber == nullptr)) {
+ return BAD_VALUE;
+ }
+
+ Mutex::Autolock l(mSharedDeviceLock);
+ const sp<CaptureRequest> curRequest = getOngoingRepeatingRequestLocked();
+ if (curRequest != nullptr) {
+ // If there is already repeating request ongoing, attach the surfaces to
+ // the request.
+ SurfaceMap newSurfaceMap = mergeSurfaceMaps(surfaceMap, curRequest->mOutputSurfaces);
+ updateOngoingRepeatingRequestLocked(newSurfaceMap);
+ *lastFrameNumber = getRepeatingRequestLastFrameNumberLocked();
+ } else {
+ // If there is no ongoing repeating request, then send a default
+ // request with template preview.
+ std::vector<int32_t> outputStreamIds;
+ for (const auto& [key, value] : surfaceMap) {
+ outputStreamIds.push_back(key);
+ }
+
+ CameraMetadata previewTemplate;
+ status_t err = createDefaultRequest(CAMERA_TEMPLATE_PREVIEW, &previewTemplate);
+ if (err != OK) {
+ ALOGE("%s: Failed to create default PREVIEW request: %s (%d)",
+ __FUNCTION__, strerror(-err), err);
+ return err;
+ }
+ int32_t requestID = mRequestIdCounter;
+ previewTemplate.update(ANDROID_REQUEST_ID, &requestID, /*size*/1);
+ mRequestIdCounter++;
+ previewTemplate.update(ANDROID_REQUEST_OUTPUT_STREAMS, &outputStreamIds[0],
+ outputStreamIds.size());
+ CameraDeviceBase::PhysicalCameraSettingsList previewSettings;
+ previewSettings.push_back({mId, previewTemplate});
+
+ List<const CameraDeviceBase::PhysicalCameraSettingsList> settingsList;
+ std::list<SurfaceMap> surfaceMaps;
+ settingsList.push_back(previewSettings);
+ surfaceMaps.push_back(surfaceMap);
+ err = setStreamingRequestList(settingsList, surfaceMaps, lastFrameNumber);
+ if (err != OK) {
+ CLOGE("Cannot start shared streaming request");
+ return err;
+ }
+ mStreamingRequestId = requestID;
+ }
+
+ uid_t clientUid = mAttributionAndPermissionUtils->getCallingUid();
+ mClientRequestIds[clientUid] = reqId;
+ mClientSurfaces[clientUid] = surfaceMap;
+ *sharedReqID = mStreamingRequestId;
+ return OK;
+}
+
+status_t AidlCamera3SharedDevice::setNotifyCallback(wp<NotificationListener> listener) {
+ ATRACE_CALL();
+ Mutex::Autolock l(mSharedDeviceLock);
+
+ if (listener == NULL) {
+ return BAD_VALUE;
+ }
+ mClientListeners[mAttributionAndPermissionUtils->getCallingUid()] = listener;
+ return OK;
+}
+
+void AidlCamera3SharedDevice::notifyError(
+ int32_t errorCode,
+ const CaptureResultExtras& resultExtras) {
+ for (auto clientListener : mClientListeners) {
+ sp<NotificationListener> listener = clientListener.second.promote();
+ if (listener != NULL) {
+ listener->notifyError(errorCode, resultExtras);
+ }
+ }
+}
+
+status_t AidlCamera3SharedDevice::notifyActive(float maxPreviewFps) {
+ for (auto activeClient : mClientRequestIds) {
+ sp<NotificationListener> listener = mClientListeners[activeClient.first].promote();
+ if (listener != NULL) {
+ listener->notifyActive(maxPreviewFps);
+ }
+ }
+
+ return OK;
+}
+
+void AidlCamera3SharedDevice::notifyIdle(int64_t requestCount, int64_t resultErrorCount,
+ bool deviceError,
+ std::pair<int32_t, int32_t> mostRequestedFpsRange,
+ const std::vector<hardware::CameraStreamStats>& stats) {
+ for (auto clientListener : mClientListeners) {
+ sp<NotificationListener> listener = clientListener.second.promote();
+ if (listener != NULL) {
+ listener->notifyIdle(requestCount, resultErrorCount, deviceError, mostRequestedFpsRange,
+ stats);
+ }
+ }
+}
+
+void AidlCamera3SharedDevice::notifyShutter(const CaptureResultExtras& resultExtras,
+ nsecs_t timestamp) {
+ for (auto clientListener : mClientListeners) {
+ sp<NotificationListener> listener = clientListener.second.promote();
+ if (listener != NULL) {
+ listener->notifyShutter(resultExtras, timestamp);
+ }
+ }
+}
+
}
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3SharedDevice.h b/services/camera/libcameraservice/device3/aidl/AidlCamera3SharedDevice.h
index b2ee2d6..d1aa27c 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3SharedDevice.h
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3SharedDevice.h
@@ -18,6 +18,7 @@
#define ANDROID_SERVERS_AIDLCAMERA3SHAREDDEVICE_H
#include <camera/camera2/OutputConfiguration.h>
+#include "common/FrameProcessorBase.h"
#include "../Camera3SharedOutputStream.h"
#include "AidlCamera3Device.h"
namespace android {
@@ -27,7 +28,8 @@
*/
using ::android::camera3::Camera3SharedOutputStream;
class AidlCamera3SharedDevice :
- public AidlCamera3Device {
+ public AidlCamera3Device,
+ public NotificationListener {
public:
static sp<AidlCamera3SharedDevice> getInstance(
std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
@@ -38,29 +40,73 @@
const std::string& monitorTags) override;
status_t disconnectClient(int clientUid) override;
status_t beginConfigure() override;
- status_t getSharedStreamId(const OutputConfiguration &config, int *streamId) override;
+ status_t getSharedStreamId(const OutputStreamInfo &config, int *streamId) override;
status_t addSharedSurfaces(int streamId,
const std::vector<android::camera3::OutputStreamInfo> &outputInfo,
const std::vector<SurfaceHolder>& surfaces,
std::vector<int> *surfaceIds = nullptr) override;
status_t removeSharedSurfaces(int streamId,
const std::vector<size_t> &surfaceIds) override;
+ status_t setSharedStreamingRequest(const PhysicalCameraSettingsList &request,
+ const SurfaceMap &surfaceMap, int32_t *sharedReqID, int64_t *lastFrameNumber = NULL)
+ override;
+ status_t clearSharedStreamingRequest(int64_t *lastFrameNumber = NULL) override;
+ status_t setSharedCaptureRequest(const PhysicalCameraSettingsList &request,
+ const SurfaceMap &surfaceMap, int32_t *sharedReqID, int64_t *lastFrameNumber = NULL)
+ override;
+ sp<camera2::FrameProcessorBase> getSharedFrameProcessor() override {return mFrameProcessor;};
+ status_t startStreaming(const int32_t reqId, const SurfaceMap &surfaceMap,
+ int32_t *sharedReqID, int64_t *lastFrameNumber = NULL);
+
+ status_t setNotifyCallback(wp<NotificationListener> listener) override;
+ virtual void notifyError(int32_t errorCode,
+ const CaptureResultExtras &resultExtras) override;
+ virtual status_t notifyActive(float maxPreviewFps) override;
+ virtual void notifyIdle(int64_t requestCount, int64_t resultError, bool deviceError,
+ std::pair<int32_t, int32_t> mostRequestedFpsRange,
+ const std::vector<hardware::CameraStreamStats>& streamStats) override;
+ virtual void notifyShutter(const CaptureResultExtras &resultExtras,
+ nsecs_t timestamp) override;
+ virtual void notifyRequestQueueEmpty() {};
+ // Prepare api not supported for shared session
+ virtual void notifyPrepared(int /*streamId*/) {};
+ // Required only for API1
+ virtual void notifyAutoFocus(uint8_t /*newState*/, int /*triggerId*/) {};
+ virtual void notifyAutoExposure(uint8_t /*newState*/, int /*triggerId*/) {};
+ virtual void notifyAutoWhitebalance(uint8_t /*newState*/,
+ int /*triggerId*/) {};
+ virtual void notifyRepeatingRequestError(long /*lastFrameNumber*/) {};
private:
static std::map<std::string, sp<AidlCamera3SharedDevice>> sSharedDevices;
static std::map<std::string, std::unordered_set<int>> sClientsUid;
+ static Mutex sSharedClientsLock;
AidlCamera3SharedDevice(
std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
const std::string& id, bool overrideForPerfClass, int rotationOverride,
bool legacyClient)
: AidlCamera3Device(cameraServiceProxyWrapper, attributionAndPermissionUtils, id,
- overrideForPerfClass, rotationOverride, legacyClient) {}
+ overrideForPerfClass, rotationOverride, legacyClient),
+ mStreamingRequestId(REQUEST_ID_NONE),
+ mRequestIdCounter(0) {}
std::vector<OutputConfiguration> getSharedOutputConfiguration();
std::vector<OutputConfiguration> mSharedOutputConfigurations;
std::vector<int> mSharedSurfaceIds;
std::vector<sp<Surface>> mSharedSurfaces;
+ std::vector<sp<BufferItemConsumer>> mOpaqueConsumers;
std::vector<sp<Camera3SharedOutputStream>> mSharedStreams;
- KeyedVector<int32_t, OutputConfiguration> mConfiguredOutputs;
+ std::unordered_map<int32_t, OutputStreamInfo> mStreamInfoMap;
+ // Streaming request ID
+ int32_t mStreamingRequestId;
+ static const int32_t REQUEST_ID_NONE = -1;
+ int32_t mRequestIdCounter;
+ std::unordered_map<uid_t, int32_t> mClientRequestIds;
+ std::unordered_map<uid_t, SurfaceMap> mClientSurfaces;
+ std::unordered_map<uid_t, wp<NotificationListener>> mClientListeners;
+ SurfaceMap mergeSurfaceMaps(const SurfaceMap& map1, const SurfaceMap& map2);
+ SurfaceMap removeClientSurfaceMap(const SurfaceMap& map1, const SurfaceMap& map2);
+ Mutex mSharedDeviceLock;
+ sp<camera2::FrameProcessorBase> mFrameProcessor;
}; // class AidlCamera3SharedDevice
}; // namespace android
#endif