Merge Android U (ab/10368041)
Bug: 291102124
Merged-In: Ied8e295ae059db07463ba06d3e6d747659b2757f
Change-Id: Ib79234b765308e957b682871b2178b66769f5660
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index d54ba46..c60f327 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -32,12 +32,12 @@
#include "device3/Camera3Device.h"
#include "device3/Camera3OutputStream.h"
#include "api2/CameraDeviceClient.h"
-#include "utils/CameraServiceProxyWrapper.h"
#include <camera_metadata_hidden.h>
#include "DepthCompositeStream.h"
#include "HeicCompositeStream.h"
+#include "JpegRCompositeStream.h"
// Convenience methods for constructing binder::Status objects for error returns
@@ -88,6 +88,7 @@
CameraDeviceClient::CameraDeviceClient(const sp<CameraService>& cameraService,
const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
+ std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
const std::string& clientPackageName,
bool systemNativeClient,
const std::optional<std::string>& clientFeatureId,
@@ -99,9 +100,10 @@
int servicePid,
bool overrideForPerfClass,
bool overrideToPortrait) :
- Camera2ClientBase(cameraService, remoteCallback, clientPackageName, systemNativeClient,
- clientFeatureId, cameraId, /*API1 camera ID*/ -1, cameraFacing, sensorOrientation,
- clientPid, clientUid, servicePid, overrideForPerfClass, overrideToPortrait),
+ Camera2ClientBase(cameraService, remoteCallback, cameraServiceProxyWrapper, clientPackageName,
+ systemNativeClient, clientFeatureId, cameraId, /*API1 camera ID*/ -1, cameraFacing,
+ sensorOrientation, clientPid, clientUid, servicePid, overrideForPerfClass,
+ overrideToPortrait),
mInputStream(),
mStreamingRequestId(REQUEST_ID_NONE),
mRequestIdCounter(0),
@@ -129,7 +131,12 @@
mFrameProcessor = new FrameProcessorBase(mDevice);
std::string threadName = std::string("CDU-") + mCameraIdStr + "-FrameProc";
- mFrameProcessor->run(threadName.c_str());
+ res = mFrameProcessor->run(threadName.c_str());
+ if (res != OK) {
+ ALOGE("%s: Unable to start frame processor thread: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
mFrameProcessor->registerListener(camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MIN_ID,
camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MAX_ID,
@@ -183,11 +190,11 @@
// Cache physical camera ids corresponding to this device and also the high
// resolution sensors in this device + physical camera ids
mProviderManager->isLogicalCamera(mCameraIdStr, &mPhysicalCameraIds);
- if (isUltraHighResolutionSensor(mCameraIdStr)) {
+ if (supportsUltraHighResolutionCapture(mCameraIdStr)) {
mHighResolutionSensors.insert(mCameraIdStr);
}
for (auto &physicalId : mPhysicalCameraIds) {
- if (isUltraHighResolutionSensor(physicalId)) {
+ if (supportsUltraHighResolutionCapture(physicalId)) {
mHighResolutionSensors.insert(physicalId);
}
}
@@ -694,7 +701,7 @@
nsecs_t configureEnd = systemTime();
int32_t configureDurationMs = ns2ms(configureEnd) - startTimeMs;
- CameraServiceProxyWrapper::logStreamConfigured(mCameraIdStr, operatingMode,
+ mCameraServiceProxyWrapper->logStreamConfigured(mCameraIdStr, operatingMode,
false /*internalReconfig*/, configureDurationMs);
}
@@ -882,6 +889,8 @@
int64_t streamUseCase = outputConfiguration.getStreamUseCase();
int timestampBase = outputConfiguration.getTimestampBase();
int mirrorMode = outputConfiguration.getMirrorMode();
+ int32_t colorSpace = outputConfiguration.getColorSpace();
+ bool useReadoutTimestamp = outputConfiguration.useReadoutTimestamp();
res = SessionConfigurationUtils::checkSurfaceType(numBufferProducers, deferredConsumer,
outputConfiguration.getSurfaceType());
@@ -927,7 +936,7 @@
res = SessionConfigurationUtils::createSurfaceFromGbp(streamInfo,
isStreamInfoValid, surface, bufferProducer, mCameraIdStr,
mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode);
+ streamUseCase, timestampBase, mirrorMode, colorSpace);
if (!res.isOk())
return res;
@@ -949,19 +958,26 @@
bool isDepthCompositeStream =
camera3::DepthCompositeStream::isDepthCompositeStream(surfaces[0]);
bool isHeicCompositeStream = camera3::HeicCompositeStream::isHeicCompositeStream(surfaces[0]);
- if (isDepthCompositeStream || isHeicCompositeStream) {
+ bool isJpegRCompositeStream =
+ camera3::JpegRCompositeStream::isJpegRCompositeStream(surfaces[0]) &&
+ !mDevice->isCompositeJpegRDisabled();
+ if (isDepthCompositeStream || isHeicCompositeStream || isJpegRCompositeStream) {
sp<CompositeStream> compositeStream;
if (isDepthCompositeStream) {
compositeStream = new camera3::DepthCompositeStream(mDevice, getRemoteCallback());
- } else {
+ } else if (isHeicCompositeStream) {
compositeStream = new camera3::HeicCompositeStream(mDevice, getRemoteCallback());
+ } else {
+ compositeStream = new camera3::JpegRCompositeStream(mDevice, getRemoteCallback());
}
err = compositeStream->createStream(surfaces, deferredConsumer, streamInfo.width,
streamInfo.height, streamInfo.format,
static_cast<camera_stream_rotation_t>(outputConfiguration.getRotation()),
&streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
- outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution);
+ outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution,
+ streamInfo.colorSpace, streamInfo.dynamicRangeProfile, streamInfo.streamUseCase,
+ useReadoutTimestamp);
if (err == OK) {
Mutex::Autolock l(mCompositeLock);
mCompositeStreamMap.add(IInterface::asBinder(surfaces[0]->getIGraphicBufferProducer()),
@@ -974,7 +990,8 @@
&streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution,
/*consumerUsage*/0, streamInfo.dynamicRangeProfile, streamInfo.streamUseCase,
- streamInfo.timestampBase, streamInfo.mirrorMode);
+ streamInfo.timestampBase, streamInfo.mirrorMode, streamInfo.colorSpace,
+ useReadoutTimestamp);
}
if (err != OK) {
@@ -1025,6 +1042,7 @@
int width, height, format, surfaceType;
uint64_t consumerUsage;
android_dataspace dataSpace;
+ int32_t colorSpace;
status_t err;
binder::Status res;
@@ -1038,6 +1056,7 @@
surfaceType = outputConfiguration.getSurfaceType();
format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
+ colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
// Hardcode consumer usage flags: SurfaceView--0x900, SurfaceTexture--0x100.
consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE;
if (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) {
@@ -1055,7 +1074,7 @@
outputConfiguration.getSensorPixelModesUsed();
if (SessionConfigurationUtils::checkAndOverrideSensorPixelModesUsed(
sensorPixelModesUsed, format, width, height, getStaticInfo(cameraIdUsed),
- /*allowRounding*/ false, &overriddenSensorPixelModesUsed) != OK) {
+ &overriddenSensorPixelModesUsed) != OK) {
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
"sensor pixel modes used not valid for deferred stream");
}
@@ -1070,7 +1089,8 @@
outputConfiguration.isMultiResolution(), consumerUsage,
outputConfiguration.getDynamicRangeProfile(),
outputConfiguration.getStreamUseCase(),
- outputConfiguration.getMirrorMode());
+ outputConfiguration.getMirrorMode(),
+ outputConfiguration.useReadoutTimestamp());
if (err != OK) {
res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
@@ -1087,7 +1107,8 @@
outputConfiguration.getDynamicRangeProfile(),
outputConfiguration.getStreamUseCase(),
outputConfiguration.getTimestampBase(),
- outputConfiguration.getMirrorMode()));
+ outputConfiguration.getMirrorMode(),
+ colorSpace));
ALOGV("%s: Camera %s: Successfully created a new stream ID %d for a deferred surface"
" (%d x %d) stream with format 0x%x.",
@@ -1277,6 +1298,7 @@
int64_t streamUseCase = outputConfiguration.getStreamUseCase();
int timestampBase = outputConfiguration.getTimestampBase();
int64_t dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
+ int32_t colorSpace = outputConfiguration.getColorSpace();
int mirrorMode = outputConfiguration.getMirrorMode();
for (size_t i = 0; i < newOutputsMap.size(); i++) {
@@ -1285,7 +1307,7 @@
res = SessionConfigurationUtils::createSurfaceFromGbp(outInfo,
/*isStreamInfoValid*/ false, surface, newOutputsMap.valueAt(i), mCameraIdStr,
mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode);
+ streamUseCase, timestampBase, mirrorMode, colorSpace);
if (!res.isOk())
return res;
@@ -1460,7 +1482,7 @@
binder::Status CameraDeviceClient::prepare(int streamId) {
ATRACE_CALL();
- ALOGV("%s", __FUNCTION__);
+ ALOGV("%s stream id %d", __FUNCTION__, streamId);
binder::Status res;
if (!(res = checkPidStatus(__FUNCTION__)).isOk()) return res;
@@ -1500,7 +1522,7 @@
binder::Status CameraDeviceClient::prepare2(int maxCount, int streamId) {
ATRACE_CALL();
- ALOGV("%s", __FUNCTION__);
+ ALOGV("%s stream id %d", __FUNCTION__, streamId);
binder::Status res;
if (!(res = checkPidStatus(__FUNCTION__)).isOk()) return res;
@@ -1644,7 +1666,8 @@
const std::vector<int32_t> &sensorPixelModesUsed =
outputConfiguration.getSensorPixelModesUsed();
int64_t dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
- int64_t streamUseCase= outputConfiguration.getStreamUseCase();
+ int32_t colorSpace = outputConfiguration.getColorSpace();
+ int64_t streamUseCase = outputConfiguration.getStreamUseCase();
int timestampBase = outputConfiguration.getTimestampBase();
int mirrorMode = outputConfiguration.getMirrorMode();
for (auto& bufferProducer : bufferProducers) {
@@ -1660,7 +1683,7 @@
res = SessionConfigurationUtils::createSurfaceFromGbp(mStreamInfoMap[streamId],
true /*isStreamInfoValid*/, surface, bufferProducer, mCameraIdStr,
mDevice->infoPhysical(physicalId), sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode);
+ streamUseCase, timestampBase, mirrorMode, colorSpace);
if (!res.isOk())
return res;
@@ -1744,6 +1767,13 @@
static_cast<camera_metadata_enum_android_scaler_rotate_and_crop_t>(rotateAndCrop));
}
+status_t CameraDeviceClient::setAutoframingOverride(uint8_t autoframingValue) {
+ if (autoframingValue > ANDROID_CONTROL_AUTOFRAMING_AUTO) return BAD_VALUE;
+
+ return mDevice->setAutoframingAutoBehavior(
+ static_cast<camera_metadata_enum_android_control_autoframing_t>(autoframingValue));
+}
+
bool CameraDeviceClient::supportsCameraMute() {
return mDevice->supportsCameraMute();
}
@@ -1761,6 +1791,14 @@
mDevice->clearStreamUseCaseOverrides();
}
+bool CameraDeviceClient::supportsZoomOverride() {
+ return mDevice->supportsZoomOverride();
+}
+
+status_t CameraDeviceClient::setZoomOverride(int32_t zoomOverride) {
+ return mDevice->setZoomOverride(zoomOverride);
+}
+
binder::Status CameraDeviceClient::switchToOffline(
const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
const std::vector<int>& offlineOutputIds,
@@ -1813,7 +1851,9 @@
for (const auto& gbp : mConfiguredOutputs.valueAt(index).getGraphicBufferProducers()) {
sp<Surface> s = new Surface(gbp, false /*controlledByApp*/);
isCompositeStream = camera3::DepthCompositeStream::isDepthCompositeStream(s) ||
- camera3::HeicCompositeStream::isHeicCompositeStream(s);
+ camera3::HeicCompositeStream::isHeicCompositeStream(s) ||
+ (camera3::JpegRCompositeStream::isJpegRCompositeStream(s) &&
+ !mDevice->isCompositeJpegRDisabled());
if (isCompositeStream) {
auto compositeIdx = mCompositeStreamMap.indexOfKey(IInterface::asBinder(gbp));
if (compositeIdx == NAME_NOT_FOUND) {
@@ -1996,8 +2036,20 @@
if (remoteCb != 0) {
remoteCb->onDeviceIdle();
}
+
+ std::vector<hardware::CameraStreamStats> fullStreamStats = streamStats;
+ {
+ Mutex::Autolock l(mCompositeLock);
+ for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
+ hardware::CameraStreamStats compositeStats;
+ mCompositeStreamMap.valueAt(i)->getStreamStats(&compositeStats);
+ if (compositeStats.mWidth > 0) {
+ fullStreamStats.push_back(compositeStats);
+ }
+ }
+ }
Camera2ClientBase::notifyIdleWithUserTag(requestCount, resultErrorCount, deviceError,
- streamStats, mUserTag, mVideoStabilizationMode);
+ fullStreamStats, mUserTag, mVideoStabilizationMode);
}
void CameraDeviceClient::notifyShutter(const CaptureResultExtras& resultExtras,
@@ -2020,6 +2072,7 @@
// Thread safe. Don't bother locking.
sp<hardware::camera2::ICameraDeviceCallbacks> remoteCb = getRemoteCallback();
if (remoteCb != 0) {
+ ALOGV("%s: stream id %d", __FUNCTION__, streamId);
remoteCb->onPrepared(streamId);
}
}
@@ -2074,10 +2127,11 @@
mCompositeStreamMap.clear();
}
+ bool hasDeviceError = mDevice->hasDeviceError();
Camera2ClientBase::detachDevice();
int32_t closeLatencyMs = ns2ms(systemTime() - startTime);
- CameraServiceProxyWrapper::logClose(mCameraIdStr, closeLatencyMs);
+ mCameraServiceProxyWrapper->logClose(mCameraIdStr, closeLatencyMs, hasDeviceError);
}
/** Device-related methods */
@@ -2215,9 +2269,9 @@
return mDevice->infoPhysical(cameraId);
}
-bool CameraDeviceClient::isUltraHighResolutionSensor(const std::string &cameraId) {
+bool CameraDeviceClient::supportsUltraHighResolutionCapture(const std::string &cameraId) {
const CameraMetadata &deviceInfo = getStaticInfo(cameraId);
- return SessionConfigurationUtils::isUltraHighResolutionSensor(deviceInfo);
+ return SessionConfigurationUtils::supportsUltraHighResolutionCapture(deviceInfo);
}
bool CameraDeviceClient::isSensorPixelModeConsistent(
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 4b330f3..45c904a 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -29,6 +29,7 @@
#include "common/FrameProcessorBase.h"
#include "common/Camera2ClientBase.h"
#include "CompositeStream.h"
+#include "utils/CameraServiceProxyWrapper.h"
#include "utils/SessionConfigurationUtils.h"
using android::camera3::OutputStreamInfo;
@@ -179,6 +180,7 @@
CameraDeviceClient(const sp<CameraService>& cameraService,
const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
+ std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
const std::string& clientPackageName,
bool clientPackageOverride,
const std::optional<std::string>& clientFeatureId,
@@ -197,9 +199,14 @@
virtual status_t setRotateAndCropOverride(uint8_t rotateAndCrop) override;
+ virtual status_t setAutoframingOverride(uint8_t autoframingValue) override;
+
virtual bool supportsCameraMute();
virtual status_t setCameraMute(bool enabled);
+ virtual bool supportsZoomOverride() override;
+ virtual status_t setZoomOverride(int32_t zoomOverride) override;
+
virtual status_t dump(int fd, const Vector<String16>& args);
virtual status_t dumpClient(int fd, const Vector<String16>& args);
@@ -238,7 +245,7 @@
// Calculate the ANativeWindow transform from android.sensor.orientation
status_t getRotationTransformLocked(int mirrorMode, /*out*/int32_t* transform);
- bool isUltraHighResolutionSensor(const std::string &cameraId);
+ bool supportsUltraHighResolutionCapture(const std::string &cameraId);
bool isSensorPixelModeConsistent(const std::list<int> &streamIdList,
const CameraMetadata &settings);
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
index 89c05b0..99bdb0e 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
@@ -49,7 +49,12 @@
mFrameProcessor = new camera2::FrameProcessorBase(mOfflineSession);
std::string threadName = fmt::sprintf("Offline-%s-FrameProc", mCameraIdStr.c_str());
- mFrameProcessor->run(threadName.c_str());
+ res = mFrameProcessor->run(threadName.c_str());
+ if (res != OK) {
+ ALOGE("%s: Unable to start frame processor thread: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
mFrameProcessor->registerListener(camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MIN_ID,
camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MAX_ID,
@@ -81,6 +86,10 @@
return OK;
}
+status_t CameraOfflineSessionClient::setAutoframingOverride(uint8_t) {
+ return OK;
+}
+
bool CameraOfflineSessionClient::supportsCameraMute() {
// Offline mode doesn't support muting
return false;
@@ -97,6 +106,14 @@
void CameraOfflineSessionClient::clearStreamUseCaseOverrides() {
}
+bool CameraOfflineSessionClient::supportsZoomOverride() {
+ return false;
+}
+
+status_t CameraOfflineSessionClient::setZoomOverride(int32_t /*zoomOverride*/) {
+ return INVALID_OPERATION;
+}
+
status_t CameraOfflineSessionClient::dump(int fd, const Vector<String16>& args) {
return BasicClient::dump(fd, args);
}
@@ -247,7 +264,7 @@
mOpsActive = true;
// Transition device state to OPEN
- sCameraService->mUidPolicy->registerMonitorUid(mClientUid);
+ sCameraService->mUidPolicy->registerMonitorUid(mClientUid, /*openCamera*/true);
return OK;
}
@@ -271,7 +288,7 @@
}
mOpsCallback.clear();
- sCameraService->mUidPolicy->unregisterMonitorUid(mClientUid);
+ sCameraService->mUidPolicy->unregisterMonitorUid(mClientUid, /*closeCamera*/true);
return OK;
}
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
index 4a5b1f2..70bad03 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
@@ -82,6 +82,8 @@
status_t setRotateAndCropOverride(uint8_t rotateAndCrop) override;
+ status_t setAutoframingOverride(uint8_t autoframingValue) override;
+
bool supportsCameraMute() override;
status_t setCameraMute(bool enabled) override;
@@ -92,6 +94,10 @@
void clearStreamUseCaseOverrides() override;
+ bool supportsZoomOverride() override;
+
+ status_t setZoomOverride(int32_t zoomOverride) override;
+
// permissions management
status_t startCameraOps() override;
status_t finishCameraOps() override;
diff --git a/services/camera/libcameraservice/api2/CompositeStream.cpp b/services/camera/libcameraservice/api2/CompositeStream.cpp
index 3221d74..8f53458 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/CompositeStream.cpp
@@ -49,7 +49,8 @@
camera_stream_rotation_t rotation, int * id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> * surfaceIds,
- int streamSetId, bool isShared, bool isMultiResolution) {
+ int streamSetId, bool isShared, bool isMultiResolution, int32_t colorSpace,
+ int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp) {
if (hasDeferredConsumer) {
ALOGE("%s: Deferred consumers not supported in case of composite streams!",
__FUNCTION__);
@@ -75,7 +76,8 @@
}
return createInternalStreams(consumers, hasDeferredConsumer, width, height, format, rotation,
- id, physicalCameraId, sensorPixelModesUsed, surfaceIds, streamSetId, isShared);
+ id, physicalCameraId, sensorPixelModesUsed, surfaceIds, streamSetId, isShared,
+ colorSpace, dynamicProfile, streamUseCase, useReadoutTimestamp);
}
status_t CompositeStream::deleteStream() {
@@ -85,6 +87,7 @@
mCaptureResults.clear();
mFrameNumberMap.clear();
mErrorFrameNumbers.clear();
+ mRequestTimeMap.clear();
}
return deleteInternalStreams();
@@ -95,6 +98,8 @@
Mutex::Autolock l(mMutex);
if (!mErrorState && (streamId == getStreamId())) {
mPendingCaptureResults.emplace(frameNumber, CameraMetadata());
+ auto ts = systemTime();
+ mRequestTimeMap.emplace(frameNumber, ts);
}
}
@@ -109,6 +114,11 @@
void CompositeStream::eraseResult(int64_t frameNumber) {
Mutex::Autolock l(mMutex);
+ auto requestTimeIt = mRequestTimeMap.find(frameNumber);
+ if (requestTimeIt != mRequestTimeMap.end()) {
+ mRequestTimeMap.erase(requestTimeIt);
+ }
+
auto it = mPendingCaptureResults.find(frameNumber);
if (it == mPendingCaptureResults.end()) {
return;
diff --git a/services/camera/libcameraservice/api2/CompositeStream.h b/services/camera/libcameraservice/api2/CompositeStream.h
index ec16dde..1b7fc6e 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.h
+++ b/services/camera/libcameraservice/api2/CompositeStream.h
@@ -46,7 +46,8 @@
camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds,
- int streamSetId, bool isShared, bool isMultiResolution);
+ int streamSetId, bool isShared, bool isMultiResolution, int32_t colorSpace,
+ int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp);
status_t deleteStream();
@@ -59,7 +60,8 @@
camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds,
- int streamSetId, bool isShared) = 0;
+ int streamSetId, bool isShared, int32_t colorSpace,
+ int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp) = 0;
// Release all internal streams and corresponding resources.
virtual status_t deleteInternalStreams() = 0;
@@ -81,6 +83,9 @@
// Notify when shutter notify is triggered
virtual void onShutter(const CaptureResultExtras& /*resultExtras*/, nsecs_t /*timestamp*/) {}
+ // Get composite stream stats
+ virtual void getStreamStats(hardware::CameraStreamStats* streamStats /*out*/) = 0;
+
void onResultAvailable(const CaptureResult& result);
bool onError(int32_t errorCode, const CaptureResultExtras& resultExtras);
@@ -138,6 +143,9 @@
// Keeps a set buffer/result frame numbers for any errors detected during processing.
std::set<int64_t> mErrorFrameNumbers;
+ // Frame number to request time map
+ std::unordered_map<int64_t, nsecs_t> mRequestTimeMap;
+
};
}; //namespace camera3
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
index 01fe78b..1bd0b85 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
@@ -99,7 +99,7 @@
}
getSupportedDepthSizes(staticInfo, /*maxResolution*/false, &mSupportedDepthSizes);
- if (SessionConfigurationUtils::isUltraHighResolutionSensor(staticInfo)) {
+ if (SessionConfigurationUtils::supportsUltraHighResolutionCapture(staticInfo)) {
getSupportedDepthSizes(staticInfo, true, &mSupportedDepthSizesMaximumResolution);
}
}
@@ -582,7 +582,8 @@
camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds,
- int /*streamSetId*/, bool /*isShared*/) {
+ int /*streamSetId*/, bool /*isShared*/, int32_t /*colorSpace*/,
+ int64_t /*dynamicProfile*/, int64_t /*streamUseCase*/, bool useReadoutTimestamp) {
if (mSupportedDepthSizes.empty()) {
ALOGE("%s: This camera device doesn't support any depth map streams!", __FUNCTION__);
return INVALID_OPERATION;
@@ -613,7 +614,14 @@
mBlobSurface = new Surface(producer);
ret = device->createStream(mBlobSurface, width, height, format, kJpegDataSpace, rotation,
- id, physicalCameraId, sensorPixelModesUsed, surfaceIds);
+ id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
+ camera3::CAMERA3_STREAM_SET_ID_INVALID, /*isShared*/false, /*isMultiResolution*/false,
+ /*consumerUsage*/0, ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+ ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+ OutputConfiguration::MIRROR_MODE_AUTO,
+ ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ useReadoutTimestamp);
if (ret == OK) {
mBlobStreamId = *id;
mBlobSurfaceId = (*surfaceIds)[0];
@@ -630,7 +638,14 @@
std::vector<int> depthSurfaceId;
ret = device->createStream(mDepthSurface, depthWidth, depthHeight, kDepthMapPixelFormat,
kDepthMapDataSpace, rotation, &mDepthStreamId, physicalCameraId, sensorPixelModesUsed,
- &depthSurfaceId);
+ &depthSurfaceId, camera3::CAMERA3_STREAM_SET_ID_INVALID, /*isShared*/false,
+ /*isMultiResolution*/false, /*consumerUsage*/0,
+ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+ ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+ OutputConfiguration::MIRROR_MODE_AUTO,
+ ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ useReadoutTimestamp);
if (ret == OK) {
mDepthSurfaceId = depthSurfaceId[0];
} else {
@@ -887,7 +902,7 @@
return BAD_VALUE;
}
- if (SessionConfigurationUtils::isUltraHighResolutionSensor(ch)) {
+ if (SessionConfigurationUtils::supportsUltraHighResolutionCapture(ch)) {
getSupportedDepthSizes(ch, /*maxResolution*/true, &depthSizesMaximumResolution);
if (depthSizesMaximumResolution.empty()) {
ALOGE("%s: No depth stream configurations for maximum resolution present",
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.h b/services/camera/libcameraservice/api2/DepthCompositeStream.h
index a8c40ae..f797f9c 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.h
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.h
@@ -53,7 +53,8 @@
camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds,
- int streamSetId, bool isShared) override;
+ int streamSetId, bool isShared, int32_t colorSpace,
+ int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp) override;
status_t deleteInternalStreams() override;
status_t configureStream() override;
status_t insertGbp(SurfaceMap* /*out*/outSurfaceMap, Vector<int32_t>* /*out*/outputStreamIds,
@@ -68,6 +69,9 @@
static status_t getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/);
+ // Get composite stream stats
+ void getStreamStats(hardware::CameraStreamStats*) override {};
+
protected:
bool threadLoop() override;
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index 97c1ae1..68e9ad4 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -121,8 +121,8 @@
camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds,
- int /*streamSetId*/, bool /*isShared*/) {
-
+ int /*streamSetId*/, bool /*isShared*/, int32_t colorSpace,
+ int64_t /*dynamicProfile*/, int64_t /*streamUseCase*/, bool useReadoutTimestamp) {
sp<CameraDeviceBase> device = mDevice.promote();
if (!device.get()) {
ALOGE("%s: Invalid camera device!", __FUNCTION__);
@@ -148,7 +148,14 @@
res = device->createStream(mAppSegmentSurface, mAppSegmentMaxSize, 1, format,
kAppSegmentDataSpace, rotation, &mAppSegmentStreamId, physicalCameraId,
- sensorPixelModesUsed,surfaceIds);
+ sensorPixelModesUsed, surfaceIds, camera3::CAMERA3_STREAM_SET_ID_INVALID,
+ /*isShared*/false, /*isMultiResolution*/false,
+ /*consumerUsage*/0, ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+ ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+ OutputConfiguration::MIRROR_MODE_AUTO,
+ colorSpace,
+ useReadoutTimestamp);
if (res == OK) {
mAppSegmentSurfaceId = (*surfaceIds)[0];
} else {
@@ -184,7 +191,14 @@
int srcStreamFmt = mUseGrid ? HAL_PIXEL_FORMAT_YCbCr_420_888 :
HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
res = device->createStream(mMainImageSurface, width, height, srcStreamFmt, kHeifDataSpace,
- rotation, id, physicalCameraId, sensorPixelModesUsed, &sourceSurfaceId);
+ rotation, id, physicalCameraId, sensorPixelModesUsed, &sourceSurfaceId,
+ camera3::CAMERA3_STREAM_SET_ID_INVALID, /*isShared*/false, /*isMultiResolution*/false,
+ /*consumerUsage*/0, ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+ ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+ OutputConfiguration::MIRROR_MODE_AUTO,
+ colorSpace,
+ useReadoutTimestamp);
if (res == OK) {
mMainImageSurfaceId = sourceSurfaceId[0];
mMainImageStreamId = *id;
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.h b/services/camera/libcameraservice/api2/HeicCompositeStream.h
index 78c5f02..b539cdd 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.h
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.h
@@ -47,8 +47,8 @@
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
- std::vector<int> *surfaceIds,
- int streamSetId, bool isShared) override;
+ std::vector<int> *surfaceIds, int streamSetId, bool isShared, int32_t colorSpace,
+ int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp) override;
status_t deleteInternalStreams() override;
@@ -75,6 +75,9 @@
static status_t getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/);
+ // Get composite stream stats
+ void getStreamStats(hardware::CameraStreamStats*) override {};
+
static bool isSizeSupportedByHeifEncoder(int32_t width, int32_t height,
bool* useHeic, bool* useGrid, int64_t* stall, AString* hevcName = nullptr);
static bool isInMemoryTempFileSupported();
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
new file mode 100644
index 0000000..988446b
--- /dev/null
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
@@ -0,0 +1,879 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "hardware/gralloc.h"
+#include "system/graphics-base-v1.0.h"
+#include "system/graphics-base-v1.1.h"
+#define LOG_TAG "Camera3-JpegRCompositeStream"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+
+#include <aidl/android/hardware/camera/device/CameraBlob.h>
+#include <aidl/android/hardware/camera/device/CameraBlobId.h>
+
+#include "common/CameraProviderManager.h"
+#include <gui/Surface.h>
+#include <ultrahdr/jpegr.h>
+#include <utils/ExifUtils.h>
+#include <utils/Log.h>
+#include "utils/SessionConfigurationUtils.h"
+#include <utils/Trace.h>
+
+#include "JpegRCompositeStream.h"
+
+namespace android {
+namespace camera3 {
+
+using aidl::android::hardware::camera::device::CameraBlob;
+using aidl::android::hardware::camera::device::CameraBlobId;
+
+JpegRCompositeStream::JpegRCompositeStream(sp<CameraDeviceBase> device,
+ wp<hardware::camera2::ICameraDeviceCallbacks> cb) :
+ CompositeStream(device, cb),
+ mBlobStreamId(-1),
+ mBlobSurfaceId(-1),
+ mP010StreamId(-1),
+ mP010SurfaceId(-1),
+ mBlobWidth(0),
+ mBlobHeight(0),
+ mP010BufferAcquired(false),
+ mBlobBufferAcquired(false),
+ mOutputColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
+ mOutputStreamUseCase(0),
+ mFirstRequestLatency(-1),
+ mProducerListener(new ProducerListener()),
+ mMaxJpegBufferSize(-1),
+ mUHRMaxJpegBufferSize(-1),
+ mStaticInfo(device->info()) {
+ auto entry = mStaticInfo.find(ANDROID_JPEG_MAX_SIZE);
+ if (entry.count > 0) {
+ mMaxJpegBufferSize = entry.data.i32[0];
+ } else {
+ ALOGW("%s: Maximum jpeg size absent from camera characteristics", __FUNCTION__);
+ }
+
+ mUHRMaxJpegSize =
+ SessionConfigurationUtils::getMaxJpegResolution(mStaticInfo,
+ /*ultraHighResolution*/true);
+ mDefaultMaxJpegSize =
+ SessionConfigurationUtils::getMaxJpegResolution(mStaticInfo,
+ /*isUltraHighResolution*/false);
+
+ mUHRMaxJpegBufferSize =
+ SessionConfigurationUtils::getUHRMaxJpegBufferSize(mUHRMaxJpegSize, mDefaultMaxJpegSize,
+ mMaxJpegBufferSize);
+}
+
+JpegRCompositeStream::~JpegRCompositeStream() {
+ mBlobConsumer.clear(),
+ mBlobSurface.clear(),
+ mBlobStreamId = -1;
+ mBlobSurfaceId = -1;
+ mP010Consumer.clear();
+ mP010Surface.clear();
+ mP010Consumer = nullptr;
+ mP010Surface = nullptr;
+}
+
+void JpegRCompositeStream::compilePendingInputLocked() {
+ CpuConsumer::LockedBuffer imgBuffer;
+
+ while (mSupportInternalJpeg && !mInputJpegBuffers.empty() && !mBlobBufferAcquired) {
+ auto it = mInputJpegBuffers.begin();
+ auto res = mBlobConsumer->lockNextBuffer(&imgBuffer);
+ if (res == NOT_ENOUGH_DATA) {
+ // Can not lock any more buffers.
+ break;
+ } else if (res != OK) {
+ ALOGE("%s: Error locking blob image buffer: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ mPendingInputFrames[*it].error = true;
+ mInputJpegBuffers.erase(it);
+ continue;
+ }
+
+ if (*it != imgBuffer.timestamp) {
+ ALOGW("%s: Expecting jpeg buffer with time stamp: %" PRId64 " received buffer with "
+ "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
+ }
+
+ if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
+ (mPendingInputFrames[imgBuffer.timestamp].error)) {
+ mBlobConsumer->unlockBuffer(imgBuffer);
+ } else {
+ mPendingInputFrames[imgBuffer.timestamp].jpegBuffer = imgBuffer;
+ mBlobBufferAcquired = true;
+ }
+ mInputJpegBuffers.erase(it);
+ }
+
+ while (!mInputP010Buffers.empty() && !mP010BufferAcquired) {
+ auto it = mInputP010Buffers.begin();
+ auto res = mP010Consumer->lockNextBuffer(&imgBuffer);
+ if (res == NOT_ENOUGH_DATA) {
+ // Can not lock any more buffers.
+ break;
+ } else if (res != OK) {
+ ALOGE("%s: Error receiving P010 image buffer: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ mPendingInputFrames[*it].error = true;
+ mInputP010Buffers.erase(it);
+ continue;
+ }
+
+ if (*it != imgBuffer.timestamp) {
+ ALOGW("%s: Expecting P010 buffer with time stamp: %" PRId64 " received buffer with "
+ "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
+ }
+
+ if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
+ (mPendingInputFrames[imgBuffer.timestamp].error)) {
+ mP010Consumer->unlockBuffer(imgBuffer);
+ } else {
+ mPendingInputFrames[imgBuffer.timestamp].p010Buffer = imgBuffer;
+ mP010BufferAcquired = true;
+ }
+ mInputP010Buffers.erase(it);
+ }
+
+ while (!mCaptureResults.empty()) {
+ auto it = mCaptureResults.begin();
+ // Negative timestamp indicates that something went wrong during the capture result
+ // collection process.
+ if (it->first >= 0) {
+ auto frameNumber = std::get<0>(it->second);
+ mPendingInputFrames[it->first].frameNumber = frameNumber;
+ mPendingInputFrames[it->first].result = std::get<1>(it->second);
+ mSessionStatsBuilder.incResultCounter(false /*dropped*/);
+ }
+ mCaptureResults.erase(it);
+ }
+
+ while (!mFrameNumberMap.empty()) {
+ auto it = mFrameNumberMap.begin();
+ auto frameNumber = it->first;
+ mPendingInputFrames[it->second].frameNumber = frameNumber;
+ auto requestTimeIt = mRequestTimeMap.find(frameNumber);
+ if (requestTimeIt != mRequestTimeMap.end()) {
+ mPendingInputFrames[it->second].requestTimeNs = requestTimeIt->second;
+ mRequestTimeMap.erase(requestTimeIt);
+ }
+ mFrameNumberMap.erase(it);
+ }
+
+ auto it = mErrorFrameNumbers.begin();
+ while (it != mErrorFrameNumbers.end()) {
+ bool frameFound = false;
+ for (auto &inputFrame : mPendingInputFrames) {
+ if (inputFrame.second.frameNumber == *it) {
+ inputFrame.second.error = true;
+ frameFound = true;
+ break;
+ }
+ }
+
+ if (frameFound) {
+ mSessionStatsBuilder.incCounter(mP010StreamId, true /*dropped*/,
+ 0 /*captureLatencyMs*/);
+ it = mErrorFrameNumbers.erase(it);
+ } else {
+ ALOGW("%s: Not able to find failing input with frame number: %" PRId64, __FUNCTION__,
+ *it);
+ it++;
+ }
+ }
+}
+
+bool JpegRCompositeStream::getNextReadyInputLocked(int64_t *currentTs /*inout*/) {
+ if (currentTs == nullptr) {
+ return false;
+ }
+
+ bool newInputAvailable = false;
+ for (const auto& it : mPendingInputFrames) {
+ if ((!it.second.error) && (it.second.p010Buffer.data != nullptr) &&
+ (it.second.requestTimeNs != -1) &&
+ ((it.second.jpegBuffer.data != nullptr) || !mSupportInternalJpeg) &&
+ (it.first < *currentTs)) {
+ *currentTs = it.first;
+ newInputAvailable = true;
+ }
+ }
+
+ return newInputAvailable;
+}
+
+int64_t JpegRCompositeStream::getNextFailingInputLocked(int64_t *currentTs /*inout*/) {
+ int64_t ret = -1;
+ if (currentTs == nullptr) {
+ return ret;
+ }
+
+ for (const auto& it : mPendingInputFrames) {
+ if (it.second.error && !it.second.errorNotified && (it.first < *currentTs)) {
+ *currentTs = it.first;
+ ret = it.second.frameNumber;
+ }
+ }
+
+ return ret;
+}
+
+status_t JpegRCompositeStream::processInputFrame(nsecs_t ts, const InputFrame &inputFrame) {
+ status_t res;
+ sp<ANativeWindow> outputANW = mOutputSurface;
+ ANativeWindowBuffer *anb;
+ int fenceFd;
+ void *dstBuffer;
+
+ size_t maxJpegRBufferSize = 0;
+ if (mMaxJpegBufferSize > 0) {
+ // If this is an ultra high resolution sensor and the input frames size
+ // is > default res jpeg.
+ if (mUHRMaxJpegSize.width != 0 &&
+ inputFrame.jpegBuffer.width * inputFrame.jpegBuffer.height >
+ mDefaultMaxJpegSize.width * mDefaultMaxJpegSize.height) {
+ maxJpegRBufferSize = mUHRMaxJpegBufferSize;
+ } else {
+ maxJpegRBufferSize = mMaxJpegBufferSize;
+ }
+ } else {
+ maxJpegRBufferSize = inputFrame.p010Buffer.width * inputFrame.p010Buffer.height;
+ }
+
+ uint8_t jpegQuality = 100;
+ auto entry = inputFrame.result.find(ANDROID_JPEG_QUALITY);
+ if (entry.count > 0) {
+ jpegQuality = entry.data.u8[0];
+ }
+
+ if ((res = native_window_set_buffers_dimensions(mOutputSurface.get(), maxJpegRBufferSize, 1))
+ != OK) {
+ ALOGE("%s: Unable to configure stream buffer dimensions"
+ " %zux%u for stream %d", __FUNCTION__, maxJpegRBufferSize, 1U, mP010StreamId);
+ return res;
+ }
+
+ res = outputANW->dequeueBuffer(mOutputSurface.get(), &anb, &fenceFd);
+ if (res != OK) {
+ ALOGE("%s: Error retrieving output buffer: %s (%d)", __FUNCTION__, strerror(-res),
+ res);
+ return res;
+ }
+
+ sp<GraphicBuffer> gb = GraphicBuffer::from(anb);
+ GraphicBufferLocker gbLocker(gb);
+ res = gbLocker.lockAsync(&dstBuffer, fenceFd);
+ if (res != OK) {
+ ALOGE("%s: Error trying to lock output buffer fence: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
+ return res;
+ }
+
+ if ((gb->getWidth() < maxJpegRBufferSize) || (gb->getHeight() != 1)) {
+ ALOGE("%s: Blob buffer size mismatch, expected %zux%u received %dx%d", __FUNCTION__,
+ maxJpegRBufferSize, 1, gb->getWidth(), gb->getHeight());
+ outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
+ return BAD_VALUE;
+ }
+
+ size_t actualJpegRSize = 0;
+ ultrahdr::jpegr_uncompressed_struct p010;
+ ultrahdr::jpegr_compressed_struct jpegR;
+ ultrahdr::JpegR jpegREncoder;
+
+ p010.height = inputFrame.p010Buffer.height;
+ p010.width = inputFrame.p010Buffer.width;
+ p010.colorGamut = ultrahdr::ultrahdr_color_gamut::ULTRAHDR_COLORGAMUT_BT2100;
+ p010.data = inputFrame.p010Buffer.data;
+ p010.chroma_data = inputFrame.p010Buffer.dataCb;
+ // Strides are expected to be in pixels not bytes
+ p010.luma_stride = inputFrame.p010Buffer.stride / 2;
+ p010.chroma_stride = inputFrame.p010Buffer.chromaStride / 2;
+
+ jpegR.data = dstBuffer;
+ jpegR.maxLength = maxJpegRBufferSize;
+
+ ultrahdr::ultrahdr_transfer_function transferFunction;
+ switch (mP010DynamicRange) {
+ case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
+ case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
+ transferFunction = ultrahdr::ultrahdr_transfer_function::ULTRAHDR_TF_PQ;
+ break;
+ default:
+ transferFunction = ultrahdr::ultrahdr_transfer_function::ULTRAHDR_TF_HLG;
+ }
+
+ if (mSupportInternalJpeg) {
+ ultrahdr::jpegr_compressed_struct jpeg;
+
+ jpeg.data = inputFrame.jpegBuffer.data;
+ jpeg.length = android::camera2::JpegProcessor::findJpegSize(inputFrame.jpegBuffer.data,
+ inputFrame.jpegBuffer.width);
+ if (jpeg.length == 0) {
+ ALOGW("%s: Failed to find input jpeg size, default to using entire buffer!",
+ __FUNCTION__);
+ jpeg.length = inputFrame.jpegBuffer.width;
+ }
+
+ if (mOutputColorSpace == ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3) {
+ jpeg.colorGamut = ultrahdr::ultrahdr_color_gamut::ULTRAHDR_COLORGAMUT_P3;
+ } else {
+ jpeg.colorGamut = ultrahdr::ultrahdr_color_gamut::ULTRAHDR_COLORGAMUT_BT709;
+ }
+
+ res = jpegREncoder.encodeJPEGR(&p010, &jpeg, transferFunction, &jpegR);
+ } else {
+ const uint8_t* exifBuffer = nullptr;
+ size_t exifBufferSize = 0;
+ std::unique_ptr<ExifUtils> utils(ExifUtils::create());
+ utils->initializeEmpty();
+ utils->setFromMetadata(inputFrame.result, mStaticInfo, inputFrame.p010Buffer.width,
+ inputFrame.p010Buffer.height);
+ if (utils->generateApp1()) {
+ exifBuffer = utils->getApp1Buffer();
+ exifBufferSize = utils->getApp1Length();
+ } else {
+ ALOGE("%s: Unable to generate App1 buffer", __FUNCTION__);
+ }
+
+ ultrahdr::jpegr_exif_struct exif;
+ exif.data = reinterpret_cast<void*>(const_cast<uint8_t*>(exifBuffer));
+ exif.length = exifBufferSize;
+
+ res = jpegREncoder.encodeJPEGR(&p010, transferFunction, &jpegR, jpegQuality, &exif);
+ }
+
+ if (res != OK) {
+ ALOGE("%s: Error trying to encode JPEG/R: %s (%d)", __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+
+ actualJpegRSize = jpegR.length;
+
+ size_t finalJpegRSize = actualJpegRSize + sizeof(CameraBlob);
+ if (finalJpegRSize > maxJpegRBufferSize) {
+ ALOGE("%s: Final jpeg buffer not large enough for the jpeg blob header", __FUNCTION__);
+ outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
+ return NO_MEMORY;
+ }
+
+ res = native_window_set_buffers_timestamp(mOutputSurface.get(), ts);
+ if (res != OK) {
+ ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)", __FUNCTION__,
+ getStreamId(), strerror(-res), res);
+ return res;
+ }
+
+ ALOGV("%s: Final jpeg size: %zu", __func__, finalJpegRSize);
+ uint8_t* header = static_cast<uint8_t *> (dstBuffer) +
+ (gb->getWidth() - sizeof(CameraBlob));
+ CameraBlob blobHeader = {
+ .blobId = CameraBlobId::JPEG,
+ .blobSizeBytes = static_cast<int32_t>(actualJpegRSize)
+ };
+ memcpy(header, &blobHeader, sizeof(CameraBlob));
+
+ if (inputFrame.requestTimeNs != -1) {
+ auto captureLatency = ns2ms(systemTime() - inputFrame.requestTimeNs);
+ mSessionStatsBuilder.incCounter(mP010StreamId, false /*dropped*/, captureLatency);
+ if (mFirstRequestLatency == -1) {
+ mFirstRequestLatency = captureLatency;
+ }
+ }
+ outputANW->queueBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
+
+ return res;
+}
+
+void JpegRCompositeStream::releaseInputFrameLocked(InputFrame *inputFrame /*out*/) {
+ if (inputFrame == nullptr) {
+ return;
+ }
+
+ if (inputFrame->p010Buffer.data != nullptr) {
+ mP010Consumer->unlockBuffer(inputFrame->p010Buffer);
+ inputFrame->p010Buffer.data = nullptr;
+ mP010BufferAcquired = false;
+ }
+
+ if (inputFrame->jpegBuffer.data != nullptr) {
+ mBlobConsumer->unlockBuffer(inputFrame->jpegBuffer);
+ inputFrame->jpegBuffer.data = nullptr;
+ mBlobBufferAcquired = false;
+ }
+
+ if ((inputFrame->error || mErrorState) && !inputFrame->errorNotified) {
+ //TODO: Figure out correct requestId
+ notifyError(inputFrame->frameNumber, -1 /*requestId*/);
+ inputFrame->errorNotified = true;
+ mSessionStatsBuilder.incCounter(mP010StreamId, true /*dropped*/, 0 /*captureLatencyMs*/);
+ }
+}
+
+void JpegRCompositeStream::releaseInputFramesLocked(int64_t currentTs) {
+ auto it = mPendingInputFrames.begin();
+ while (it != mPendingInputFrames.end()) {
+ if (it->first <= currentTs) {
+ releaseInputFrameLocked(&it->second);
+ it = mPendingInputFrames.erase(it);
+ } else {
+ it++;
+ }
+ }
+}
+
+bool JpegRCompositeStream::threadLoop() {
+ int64_t currentTs = INT64_MAX;
+ bool newInputAvailable = false;
+
+ {
+ Mutex::Autolock l(mMutex);
+
+ if (mErrorState) {
+ // In case we landed in error state, return any pending buffers and
+ // halt all further processing.
+ compilePendingInputLocked();
+ releaseInputFramesLocked(currentTs);
+ return false;
+ }
+
+ while (!newInputAvailable) {
+ compilePendingInputLocked();
+ newInputAvailable = getNextReadyInputLocked(¤tTs);
+ if (!newInputAvailable) {
+ auto failingFrameNumber = getNextFailingInputLocked(¤tTs);
+ if (failingFrameNumber >= 0) {
+ // We cannot erase 'mPendingInputFrames[currentTs]' at this point because it is
+ // possible for two internal stream buffers to fail. In such scenario the
+ // composite stream should notify the client about a stream buffer error only
+ // once and this information is kept within 'errorNotified'.
+ // Any present failed input frames will be removed on a subsequent call to
+ // 'releaseInputFramesLocked()'.
+ releaseInputFrameLocked(&mPendingInputFrames[currentTs]);
+ currentTs = INT64_MAX;
+ }
+
+ auto ret = mInputReadyCondition.waitRelative(mMutex, kWaitDuration);
+ if (ret == TIMED_OUT) {
+ return true;
+ } else if (ret != OK) {
+ ALOGE("%s: Timed wait on condition failed: %s (%d)", __FUNCTION__,
+ strerror(-ret), ret);
+ return false;
+ }
+ }
+ }
+ }
+
+ auto res = processInputFrame(currentTs, mPendingInputFrames[currentTs]);
+ Mutex::Autolock l(mMutex);
+ if (res != OK) {
+ ALOGE("%s: Failed processing frame with timestamp: %" PRIu64 ": %s (%d)", __FUNCTION__,
+ currentTs, strerror(-res), res);
+ mPendingInputFrames[currentTs].error = true;
+ }
+
+ releaseInputFramesLocked(currentTs);
+
+ return true;
+}
+
+bool JpegRCompositeStream::isJpegRCompositeStream(const sp<Surface> &surface) {
+ if (CameraProviderManager::kFrameworkJpegRDisabled) {
+ return false;
+ }
+ ANativeWindow *anw = surface.get();
+ status_t err;
+ int format;
+ if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
+ ALOGE("%s: Failed to query Surface format: %s (%d)", __FUNCTION__, strerror(-err),
+ err);
+ return false;
+ }
+
+ int dataspace;
+ if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE, &dataspace)) != OK) {
+ ALOGE("%s: Failed to query Surface dataspace: %s (%d)", __FUNCTION__, strerror(-err),
+ err);
+ return false;
+ }
+
+ if ((format == HAL_PIXEL_FORMAT_BLOB) && (dataspace == static_cast<int>(kJpegRDataSpace))) {
+ return true;
+ }
+
+ return false;
+}
+
+void JpegRCompositeStream::deriveDynamicRangeAndDataspace(int64_t dynamicProfile,
+ int64_t* /*out*/dynamicRange, int64_t* /*out*/dataSpace) {
+ if ((dynamicRange == nullptr) || (dataSpace == nullptr)) {
+ return;
+ }
+
+ switch (dynamicProfile) {
+ case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
+ case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
+ *dynamicRange = dynamicProfile;
+ *dataSpace = HAL_DATASPACE_BT2020_ITU_PQ;
+ break;
+ case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF:
+ case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO:
+ case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM:
+ case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO:
+ *dynamicRange = dynamicProfile;
+ *dataSpace = HAL_DATASPACE_BT2020_ITU_HLG;
+ break;
+ default:
+ *dynamicRange = kP010DefaultDynamicRange;
+ *dataSpace = kP010DefaultDataSpace;
+ }
+
+}
+
+status_t JpegRCompositeStream::createInternalStreams(const std::vector<sp<Surface>>& consumers,
+ bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
+ camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
+ const std::unordered_set<int32_t> &sensorPixelModesUsed,
+ std::vector<int> *surfaceIds,
+ int /*streamSetId*/, bool /*isShared*/, int32_t colorSpace,
+ int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp) {
+ sp<CameraDeviceBase> device = mDevice.promote();
+ if (!device.get()) {
+ ALOGE("%s: Invalid camera device!", __FUNCTION__);
+ return NO_INIT;
+ }
+
+ deriveDynamicRangeAndDataspace(dynamicProfile, &mP010DynamicRange, &mP010DataSpace);
+ mSupportInternalJpeg = CameraProviderManager::isConcurrentDynamicRangeCaptureSupported(
+ mStaticInfo, mP010DynamicRange,
+ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD);
+
+ sp<IGraphicBufferProducer> producer;
+ sp<IGraphicBufferConsumer> consumer;
+ BufferQueue::createBufferQueue(&producer, &consumer);
+ mP010Consumer = new CpuConsumer(consumer, /*maxLockedBuffers*/1, /*controlledByApp*/ true);
+ mP010Consumer->setFrameAvailableListener(this);
+ mP010Consumer->setName(String8("Camera3-P010CompositeStream"));
+ mP010Surface = new Surface(producer);
+
+ auto ret = device->createStream(mP010Surface, width, height, kP010PixelFormat,
+ static_cast<android_dataspace>(mP010DataSpace), rotation,
+ id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
+ camera3::CAMERA3_STREAM_SET_ID_INVALID, false /*isShared*/, false /*isMultiResolution*/,
+ GRALLOC_USAGE_SW_READ_OFTEN, mP010DynamicRange, streamUseCase,
+ OutputConfiguration::TIMESTAMP_BASE_DEFAULT, OutputConfiguration::MIRROR_MODE_AUTO,
+ ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED, useReadoutTimestamp);
+ if (ret == OK) {
+ mP010StreamId = *id;
+ mP010SurfaceId = (*surfaceIds)[0];
+ mOutputSurface = consumers[0];
+ } else {
+ return ret;
+ }
+
+ if (mSupportInternalJpeg) {
+ BufferQueue::createBufferQueue(&producer, &consumer);
+ mBlobConsumer = new CpuConsumer(consumer, /*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
+ mBlobConsumer->setFrameAvailableListener(this);
+ mBlobConsumer->setName(String8("Camera3-JpegRCompositeStream"));
+ mBlobSurface = new Surface(producer);
+ std::vector<int> blobSurfaceId;
+ ret = device->createStream(mBlobSurface, width, height, format,
+ kJpegDataSpace, rotation, &mBlobStreamId, physicalCameraId, sensorPixelModesUsed,
+ &blobSurfaceId,
+ /*streamSetI*/ camera3::CAMERA3_STREAM_SET_ID_INVALID,
+ /*isShared*/ false,
+ /*isMultiResolution*/ false,
+ /*consumerUsage*/ GRALLOC_USAGE_SW_READ_OFTEN,
+ /*dynamicProfile*/ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+ streamUseCase,
+ /*timestampBase*/ OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+ /*mirrorMode*/ OutputConfiguration::MIRROR_MODE_AUTO,
+ /*colorSpace*/ colorSpace, useReadoutTimestamp);
+ if (ret == OK) {
+ mBlobSurfaceId = blobSurfaceId[0];
+ } else {
+ return ret;
+ }
+
+ ret = registerCompositeStreamListener(mBlobStreamId);
+ if (ret != OK) {
+ ALOGE("%s: Failed to register jpeg stream listener!", __FUNCTION__);
+ return ret;
+ }
+ }
+
+ ret = registerCompositeStreamListener(getStreamId());
+ if (ret != OK) {
+ ALOGE("%s: Failed to register P010 stream listener!", __FUNCTION__);
+ return ret;
+ }
+
+ mOutputColorSpace = colorSpace;
+ mOutputStreamUseCase = streamUseCase;
+ mBlobWidth = width;
+ mBlobHeight = height;
+
+ return ret;
+}
+
+status_t JpegRCompositeStream::configureStream() {
+ if (isRunning()) {
+ // Processing thread is already running, nothing more to do.
+ return NO_ERROR;
+ }
+
+ if (mOutputSurface.get() == nullptr) {
+ ALOGE("%s: No valid output surface set!", __FUNCTION__);
+ return NO_INIT;
+ }
+
+ auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mProducerListener);
+ if (res != OK) {
+ ALOGE("%s: Unable to connect to native window for stream %d",
+ __FUNCTION__, mP010StreamId);
+ return res;
+ }
+
+ if ((res = native_window_set_buffers_format(mOutputSurface.get(), HAL_PIXEL_FORMAT_BLOB))
+ != OK) {
+ ALOGE("%s: Unable to configure stream buffer format for stream %d", __FUNCTION__,
+ mP010StreamId);
+ return res;
+ }
+
+ if ((res = native_window_set_usage(mOutputSurface.get(),
+ GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN)) != OK) {
+ ALOGE("%s: Unable to configure stream buffer usage for stream %d", __FUNCTION__,
+ mP010StreamId);
+ return res;
+ }
+
+ int maxProducerBuffers;
+ ANativeWindow *anw = mP010Surface.get();
+ if ((res = anw->query(anw, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxProducerBuffers)) != OK) {
+ ALOGE("%s: Unable to query consumer undequeued"
+ " buffer count for stream %d", __FUNCTION__, mP010StreamId);
+ return res;
+ }
+
+ ANativeWindow *anwConsumer = mOutputSurface.get();
+ int maxConsumerBuffers;
+ if ((res = anwConsumer->query(anwConsumer, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
+ &maxConsumerBuffers)) != OK) {
+ ALOGE("%s: Unable to query consumer undequeued"
+ " buffer count for stream %d", __FUNCTION__, mP010StreamId);
+ return res;
+ }
+
+ if ((res = native_window_set_buffer_count(
+ anwConsumer, maxProducerBuffers + maxConsumerBuffers)) != OK) {
+ ALOGE("%s: Unable to set buffer count for stream %d", __FUNCTION__, mP010StreamId);
+ return res;
+ }
+
+ mSessionStatsBuilder.addStream(mP010StreamId);
+
+ run("JpegRCompositeStreamProc");
+
+ return NO_ERROR;
+}
+
+status_t JpegRCompositeStream::deleteInternalStreams() {
+ // The 'CameraDeviceClient' parent will delete the P010 stream
+ requestExit();
+
+ auto ret = join();
+ if (ret != OK) {
+ ALOGE("%s: Failed to join with the main processing thread: %s (%d)", __FUNCTION__,
+ strerror(-ret), ret);
+ }
+
+ if (mBlobStreamId >= 0) {
+ // Camera devices may not be valid after switching to offline mode.
+ // In this case, all offline streams including internal composite streams
+ // are managed and released by the offline session.
+ sp<CameraDeviceBase> device = mDevice.promote();
+ if (device.get() != nullptr) {
+ ret = device->deleteStream(mBlobStreamId);
+ }
+
+ mBlobStreamId = -1;
+ }
+
+ if (mOutputSurface != nullptr) {
+ mOutputSurface->disconnect(NATIVE_WINDOW_API_CAMERA);
+ mOutputSurface.clear();
+ }
+
+ return ret;
+}
+
+void JpegRCompositeStream::onFrameAvailable(const BufferItem& item) {
+ if (item.mDataSpace == kJpegDataSpace) {
+ ALOGV("%s: Jpeg buffer with ts: %" PRIu64 " ms. arrived!",
+ __func__, ns2ms(item.mTimestamp));
+
+ Mutex::Autolock l(mMutex);
+ if (!mErrorState) {
+ mInputJpegBuffers.push_back(item.mTimestamp);
+ mInputReadyCondition.signal();
+ }
+ } else if (item.mDataSpace == static_cast<android_dataspace_t>(mP010DataSpace)) {
+ ALOGV("%s: P010 buffer with ts: %" PRIu64 " ms. arrived!", __func__,
+ ns2ms(item.mTimestamp));
+
+ Mutex::Autolock l(mMutex);
+ if (!mErrorState) {
+ mInputP010Buffers.push_back(item.mTimestamp);
+ mInputReadyCondition.signal();
+ }
+ } else {
+ ALOGE("%s: Unexpected data space: 0x%x", __FUNCTION__, item.mDataSpace);
+ }
+}
+
+status_t JpegRCompositeStream::insertGbp(SurfaceMap* /*out*/outSurfaceMap,
+ Vector<int32_t> * /*out*/outputStreamIds, int32_t* /*out*/currentStreamId) {
+ if (outputStreamIds == nullptr) {
+ return BAD_VALUE;
+ }
+
+ if (outSurfaceMap->find(mP010StreamId) == outSurfaceMap->end()) {
+ outputStreamIds->push_back(mP010StreamId);
+ }
+ (*outSurfaceMap)[mP010StreamId].push_back(mP010SurfaceId);
+
+ if (mSupportInternalJpeg) {
+ if (outSurfaceMap->find(mBlobStreamId) == outSurfaceMap->end()) {
+ outputStreamIds->push_back(mBlobStreamId);
+ }
+ (*outSurfaceMap)[mBlobStreamId].push_back(mBlobSurfaceId);
+ }
+
+ if (currentStreamId != nullptr) {
+ *currentStreamId = mP010StreamId;
+ }
+
+ return NO_ERROR;
+}
+
+status_t JpegRCompositeStream::insertCompositeStreamIds(
+ std::vector<int32_t>* compositeStreamIds /*out*/) {
+ if (compositeStreamIds == nullptr) {
+ return BAD_VALUE;
+ }
+
+ compositeStreamIds->push_back(mP010StreamId);
+ if (mSupportInternalJpeg) {
+ compositeStreamIds->push_back(mBlobStreamId);
+ }
+
+ return OK;
+}
+
+void JpegRCompositeStream::onResultError(const CaptureResultExtras& resultExtras) {
+ // Processing can continue even in case of result errors.
+ // At the moment Jpeg/R composite stream processing relies mainly on static camera
+ // characteristics data. The actual result data can be used for the jpeg quality but
+ // in case it is absent we can default to maximum.
+ eraseResult(resultExtras.frameNumber);
+ mSessionStatsBuilder.incResultCounter(true /*dropped*/);
+}
+
+bool JpegRCompositeStream::onStreamBufferError(const CaptureResultExtras& resultExtras) {
+ bool ret = false;
+ // Buffer errors concerning internal composite streams should not be directly visible to
+ // camera clients. They must only receive a single buffer error with the public composite
+ // stream id.
+ if ((resultExtras.errorStreamId == mP010StreamId) ||
+ (resultExtras.errorStreamId == mBlobStreamId)) {
+ flagAnErrorFrameNumber(resultExtras.frameNumber);
+ ret = true;
+ }
+
+ return ret;
+}
+
+status_t JpegRCompositeStream::getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
+ const CameraMetadata& staticInfo,
+ std::vector<OutputStreamInfo>* compositeOutput /*out*/) {
+ if (compositeOutput == nullptr) {
+ return BAD_VALUE;
+ }
+
+ int64_t dynamicRange, dataSpace;
+ deriveDynamicRangeAndDataspace(streamInfo.dynamicRangeProfile, &dynamicRange, &dataSpace);
+
+ compositeOutput->clear();
+ compositeOutput->push_back({});
+ (*compositeOutput)[0].width = streamInfo.width;
+ (*compositeOutput)[0].height = streamInfo.height;
+ (*compositeOutput)[0].format = kP010PixelFormat;
+ (*compositeOutput)[0].dataSpace = static_cast<android_dataspace_t>(dataSpace);
+ (*compositeOutput)[0].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
+ (*compositeOutput)[0].dynamicRangeProfile = dynamicRange;
+ (*compositeOutput)[0].colorSpace =
+ ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
+
+ if (CameraProviderManager::isConcurrentDynamicRangeCaptureSupported(staticInfo,
+ streamInfo.dynamicRangeProfile,
+ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD)) {
+ compositeOutput->push_back({});
+ (*compositeOutput)[1].width = streamInfo.width;
+ (*compositeOutput)[1].height = streamInfo.height;
+ (*compositeOutput)[1].format = HAL_PIXEL_FORMAT_BLOB;
+ (*compositeOutput)[1].dataSpace = kJpegDataSpace;
+ (*compositeOutput)[1].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
+ (*compositeOutput)[1].dynamicRangeProfile =
+ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
+ (*compositeOutput)[1].colorSpace = streamInfo.colorSpace;
+ }
+
+ return NO_ERROR;
+}
+
+void JpegRCompositeStream::getStreamStats(hardware::CameraStreamStats* streamStats) {
+ if ((streamStats == nullptr) || (mFirstRequestLatency != -1)) {
+ return;
+ }
+
+ bool deviceError;
+ std::map<int, StreamStats> stats;
+ mSessionStatsBuilder.buildAndReset(&streamStats->mRequestCount, &streamStats->mErrorCount,
+ &deviceError, &stats);
+ if (stats.find(mP010StreamId) != stats.end()) {
+ streamStats->mWidth = mBlobWidth;
+ streamStats->mHeight = mBlobHeight;
+ streamStats->mFormat = HAL_PIXEL_FORMAT_BLOB;
+ streamStats->mDataSpace = static_cast<int>(kJpegRDataSpace);
+ streamStats->mDynamicRangeProfile = mP010DynamicRange;
+ streamStats->mColorSpace = mOutputColorSpace;
+ streamStats->mStreamUseCase = mOutputStreamUseCase;
+ streamStats->mStartLatencyMs = mFirstRequestLatency;
+ streamStats->mHistogramType = hardware::CameraStreamStats::HISTOGRAM_TYPE_CAPTURE_LATENCY;
+ streamStats->mHistogramBins.assign(stats[mP010StreamId].mCaptureLatencyBins.begin(),
+ stats[mP010StreamId].mCaptureLatencyBins.end());
+ streamStats->mHistogramCounts.assign(stats[mP010StreamId].mCaptureLatencyHistogram.begin(),
+ stats[mP010StreamId].mCaptureLatencyHistogram.end());
+ }
+}
+
+}; // namespace camera3
+}; // namespace android
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.h b/services/camera/libcameraservice/api2/JpegRCompositeStream.h
new file mode 100644
index 0000000..016d57c
--- /dev/null
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.h
@@ -0,0 +1,155 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_CAMERA3_JPEG_R_COMPOSITE_STREAM_H
+#define ANDROID_SERVERS_CAMERA_CAMERA3_JPEG_R_COMPOSITE_STREAM_H
+
+#include <gui/CpuConsumer.h>
+#include "aidl/android/hardware/graphics/common/Dataspace.h"
+#include "system/graphics-base-v1.1.h"
+
+#include "api1/client2/JpegProcessor.h"
+#include "utils/SessionStatsBuilder.h"
+
+#include "CompositeStream.h"
+
+namespace android {
+
+class CameraDeviceClient;
+class CameraMetadata;
+class Surface;
+
+namespace camera3 {
+
+class JpegRCompositeStream : public CompositeStream, public Thread,
+ public CpuConsumer::FrameAvailableListener {
+
+public:
+ JpegRCompositeStream(sp<CameraDeviceBase> device,
+ wp<hardware::camera2::ICameraDeviceCallbacks> cb);
+ ~JpegRCompositeStream() override;
+
+ static bool isJpegRCompositeStream(const sp<Surface> &surface);
+
+ // CompositeStream overrides
+ status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
+ bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
+ camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
+ const std::unordered_set<int32_t> &sensorPixelModesUsed,
+ std::vector<int> *surfaceIds,
+ int streamSetId, bool isShared, int32_t colorSpace,
+ int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp) override;
+ status_t deleteInternalStreams() override;
+ status_t configureStream() override;
+ status_t insertGbp(SurfaceMap* /*out*/outSurfaceMap, Vector<int32_t>* /*out*/outputStreamIds,
+ int32_t* /*out*/currentStreamId) override;
+ status_t insertCompositeStreamIds(std::vector<int32_t>* compositeStreamIds /*out*/) override;
+ int getStreamId() override { return mP010StreamId; }
+
+ // CpuConsumer listener implementation
+ void onFrameAvailable(const BufferItem& item) override;
+
+ // Return stream information about the internal camera streams
+ static status_t getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
+ const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/);
+
+ // Get composite stream stats
+ void getStreamStats(hardware::CameraStreamStats* streamStats) override;
+
+protected:
+
+ bool threadLoop() override;
+ bool onStreamBufferError(const CaptureResultExtras& resultExtras) override;
+ void onResultError(const CaptureResultExtras& resultExtras) override;
+
+private:
+ struct InputFrame {
+ CpuConsumer::LockedBuffer p010Buffer;
+ CpuConsumer::LockedBuffer jpegBuffer;
+ CameraMetadata result;
+ bool error;
+ bool errorNotified;
+ int64_t frameNumber;
+ int32_t requestId;
+ nsecs_t requestTimeNs;
+
+ InputFrame() : error(false), errorNotified(false), frameNumber(-1), requestId(-1),
+ requestTimeNs(-1) { }
+ };
+
+ status_t processInputFrame(nsecs_t ts, const InputFrame &inputFrame);
+
+ // Buffer/Results handling
+ void compilePendingInputLocked();
+ void releaseInputFrameLocked(InputFrame *inputFrame /*out*/);
+ void releaseInputFramesLocked(int64_t currentTs);
+
+ // Find first complete and valid frame with smallest timestamp
+ bool getNextReadyInputLocked(int64_t *currentTs /*inout*/);
+
+ // Find next failing frame number with smallest timestamp and return respective frame number
+ int64_t getNextFailingInputLocked(int64_t *currentTs /*inout*/);
+
+ static void deriveDynamicRangeAndDataspace(int64_t dynamicProfile, int64_t* /*out*/dynamicRange,
+ int64_t* /*out*/dataSpace);
+
+ static const nsecs_t kWaitDuration = 10000000; // 10 ms
+ static const auto kP010PixelFormat = HAL_PIXEL_FORMAT_YCBCR_P010;
+ static const auto kP010DefaultDataSpace = HAL_DATASPACE_BT2020_ITU_HLG;
+ static const auto kP010DefaultDynamicRange =
+ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10;
+ static const auto kJpegDataSpace = HAL_DATASPACE_V0_JFIF;
+ static const auto kJpegRDataSpace =
+ aidl::android::hardware::graphics::common::Dataspace::JPEG_R;
+
+ bool mSupportInternalJpeg = false;
+ int64_t mP010DataSpace = HAL_DATASPACE_BT2020_HLG;
+ int64_t mP010DynamicRange =
+ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10;
+ int mBlobStreamId, mBlobSurfaceId, mP010StreamId, mP010SurfaceId;
+ size_t mBlobWidth, mBlobHeight;
+ sp<CpuConsumer> mBlobConsumer, mP010Consumer;
+ bool mP010BufferAcquired, mBlobBufferAcquired;
+ sp<Surface> mP010Surface, mBlobSurface, mOutputSurface;
+ int32_t mOutputColorSpace;
+ int64_t mOutputStreamUseCase;
+ nsecs_t mFirstRequestLatency;
+ sp<ProducerListener> mProducerListener;
+
+ ssize_t mMaxJpegBufferSize;
+ ssize_t mUHRMaxJpegBufferSize;
+
+ camera3::Size mDefaultMaxJpegSize;
+ camera3::Size mUHRMaxJpegSize;
+
+ // Keep all incoming P010 buffer timestamps pending further processing.
+ std::vector<int64_t> mInputP010Buffers;
+
+ // Keep all incoming Jpeg/Blob buffer timestamps pending further processing.
+ std::vector<int64_t> mInputJpegBuffers;
+
+ // Map of all input frames pending further processing.
+ std::unordered_map<int64_t, InputFrame> mPendingInputFrames;
+
+ const CameraMetadata mStaticInfo;
+
+ SessionStatsBuilder mSessionStatsBuilder;
+};
+
+}; //namespace camera3
+}; //namespace android
+
+#endif