Merge Android U (ab/10368041)
Bug: 291102124
Merged-In: Ied8e295ae059db07463ba06d3e6d747659b2757f
Change-Id: Ib79234b765308e957b682871b2178b66769f5660
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
index f6ad2fe..d07bf6d 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
@@ -28,32 +28,41 @@
namespace android {
-using hardware::ICameraServiceProxy;
+using hardware::CameraExtensionSessionStats;
using hardware::CameraSessionStats;
+using hardware::ICameraServiceProxy;
-Mutex CameraServiceProxyWrapper::sProxyMutex;
-sp<hardware::ICameraServiceProxy> CameraServiceProxyWrapper::sCameraServiceProxy;
-
-Mutex CameraServiceProxyWrapper::mLock;
-std::map<std::string, std::shared_ptr<CameraServiceProxyWrapper::CameraSessionStatsWrapper>>
- CameraServiceProxyWrapper::mSessionStatsMap;
+namespace {
+// Sentinel value to be returned when extension session with a stale or invalid key is reported.
+const std::string POISON_EXT_STATS_KEY("poisoned_stats");
+} // anonymous namespace
/**
* CameraSessionStatsWrapper functions
*/
-void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onOpen() {
- Mutex::Autolock l(mLock);
-
- updateProxyDeviceState(mSessionStats);
+void CameraServiceProxyWrapper::CameraSessionStatsWrapper::updateProxyDeviceState(
+ sp<hardware::ICameraServiceProxy>& proxyBinder) {
+ if (proxyBinder == nullptr) return;
+ proxyBinder->notifyCameraState(mSessionStats);
}
-void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onClose(int32_t latencyMs) {
+void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onOpen(
+ sp<hardware::ICameraServiceProxy>& proxyBinder) {
+ Mutex::Autolock l(mLock);
+ updateProxyDeviceState(proxyBinder);
+}
+
+void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onClose(
+ sp<hardware::ICameraServiceProxy>& proxyBinder, int32_t latencyMs,
+ bool deviceError) {
Mutex::Autolock l(mLock);
mSessionStats.mNewCameraState = CameraSessionStats::CAMERA_STATE_CLOSED;
mSessionStats.mLatencyMs = latencyMs;
- updateProxyDeviceState(mSessionStats);
+ mSessionStats.mDeviceError = deviceError;
+ mSessionStats.mSessionIndex = 0;
+ updateProxyDeviceState(proxyBinder);
}
void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onStreamConfigured(
@@ -68,12 +77,14 @@
}
}
-void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onActive(float maxPreviewFps) {
+void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onActive(
+ sp<hardware::ICameraServiceProxy>& proxyBinder, float maxPreviewFps) {
Mutex::Autolock l(mLock);
mSessionStats.mNewCameraState = CameraSessionStats::CAMERA_STATE_ACTIVE;
mSessionStats.mMaxPreviewFps = maxPreviewFps;
- updateProxyDeviceState(mSessionStats);
+ mSessionStats.mSessionIndex++;
+ updateProxyDeviceState(proxyBinder);
// Reset mCreationDuration to -1 to distinguish between 1st session
// after configuration, and all other sessions after configuration.
@@ -81,6 +92,7 @@
}
void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onIdle(
+ sp<hardware::ICameraServiceProxy>& proxyBinder,
int64_t requestCount, int64_t resultErrorCount, bool deviceError,
const std::string& userTag, int32_t videoStabilizationMode,
const std::vector<hardware::CameraStreamStats>& streamStats) {
@@ -93,10 +105,76 @@
mSessionStats.mUserTag = userTag;
mSessionStats.mVideoStabilizationMode = videoStabilizationMode;
mSessionStats.mStreamStats = streamStats;
- updateProxyDeviceState(mSessionStats);
+
+ updateProxyDeviceState(proxyBinder);
mSessionStats.mInternalReconfigure = 0;
mSessionStats.mStreamStats.clear();
+ mSessionStats.mCameraExtensionSessionStats = {};
+}
+
+int64_t CameraServiceProxyWrapper::CameraSessionStatsWrapper::getLogId() {
+ Mutex::Autolock l(mLock);
+ return mSessionStats.mLogId;
+}
+
+std::string CameraServiceProxyWrapper::CameraSessionStatsWrapper::updateExtensionSessionStats(
+ const hardware::CameraExtensionSessionStats& extStats) {
+ Mutex::Autolock l(mLock);
+ CameraExtensionSessionStats& currStats = mSessionStats.mCameraExtensionSessionStats;
+ if (currStats.key != extStats.key) {
+ // Mismatched keys. Extensions stats likely reported for a closed session
+ ALOGW("%s: mismatched extensions stats key: current='%s' reported='%s'. Dropping stats.",
+ __FUNCTION__, toStdString(currStats.key).c_str(), toStdString(extStats.key).c_str());
+ return POISON_EXT_STATS_KEY; // return poisoned key to so future calls are
+ // definitely dropped.
+ }
+
+ // Matching keys...
+ if (currStats.key.size()) {
+ // non-empty matching keys. overwrite.
+ ALOGV("%s: Overwriting extension session stats: %s", __FUNCTION__,
+ extStats.toString().c_str());
+ currStats = extStats;
+ return toStdString(currStats.key);
+ }
+
+ // Matching empty keys...
+ if (mSessionStats.mClientName != toStdString(extStats.clientName)) {
+ ALOGW("%s: extension stats reported for unexpected package: current='%s' reported='%s'. "
+ "Dropping stats.", __FUNCTION__,
+ mSessionStats.mClientName.c_str(),
+ toStdString(extStats.clientName).c_str());
+ return POISON_EXT_STATS_KEY;
+ }
+
+ // Matching empty keys for the current client...
+ if (mSessionStats.mNewCameraState == CameraSessionStats::CAMERA_STATE_OPEN ||
+ mSessionStats.mNewCameraState == CameraSessionStats::CAMERA_STATE_IDLE) {
+ // Camera is open, but not active. It is possible that the active callback hasn't
+ // occurred yet. Keep the stats, but don't associate it with any session.
+ ALOGV("%s: extension stat reported for an open, but not active camera. "
+ "Saving stats, but not generating key.", __FUNCTION__);
+ currStats = extStats;
+ return {}; // Subsequent calls will handle setting the correct key.
+ }
+
+ if (mSessionStats.mNewCameraState == CameraSessionStats::CAMERA_STATE_ACTIVE) {
+ // camera is active. First call for the session!
+ currStats = extStats;
+
+ // Generate a new key from logId and sessionIndex.
+ std::ostringstream key;
+ key << mSessionStats.mSessionIndex << '/' << mSessionStats.mLogId;
+ currStats.key = String16(key.str().c_str());
+ ALOGV("%s: New extension session stats: %s", __FUNCTION__, currStats.toString().c_str());
+ return toStdString(currStats.key);
+ }
+
+ // Camera is closed. Probably a stale call.
+ ALOGW("%s: extension stats reported for closed camera id '%s'. Dropping stats.",
+ __FUNCTION__, mSessionStats.mCameraId.c_str());
+ return {};
}
/**
@@ -105,19 +183,26 @@
sp<ICameraServiceProxy> CameraServiceProxyWrapper::getCameraServiceProxy() {
#ifndef __BRILLO__
- Mutex::Autolock al(sProxyMutex);
- if (sCameraServiceProxy == nullptr) {
- sp<IServiceManager> sm = defaultServiceManager();
- // Use checkService because cameraserver normally starts before the
- // system server and the proxy service. So the long timeout that getService
- // has before giving up is inappropriate.
- sp<IBinder> binder = sm->checkService(String16("media.camera.proxy"));
- if (binder != nullptr) {
- sCameraServiceProxy = interface_cast<ICameraServiceProxy>(binder);
- }
+ Mutex::Autolock al(mProxyMutex);
+ if (mCameraServiceProxy == nullptr) {
+ mCameraServiceProxy = getDefaultCameraServiceProxy();
}
#endif
- return sCameraServiceProxy;
+ return mCameraServiceProxy;
+}
+
+sp<hardware::ICameraServiceProxy> CameraServiceProxyWrapper::getDefaultCameraServiceProxy() {
+#ifndef __BRILLO__
+ sp<IServiceManager> sm = defaultServiceManager();
+ // Use checkService because cameraserver normally starts before the
+ // system server and the proxy service. So the long timeout that getService
+ // has before giving up is inappropriate.
+ sp<IBinder> binder = sm->checkService(String16("media.camera.proxy"));
+ if (binder != nullptr) {
+ return interface_cast<ICameraServiceProxy>(binder);
+ }
+#endif
+ return nullptr;
}
void CameraServiceProxyWrapper::pingCameraServiceProxy() {
@@ -141,10 +226,19 @@
return ret;
}
-void CameraServiceProxyWrapper::updateProxyDeviceState(const CameraSessionStats& sessionStats) {
+int CameraServiceProxyWrapper::getAutoframingOverride(const std::string& packageName) {
sp<ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
- if (proxyBinder == nullptr) return;
- proxyBinder->notifyCameraState(sessionStats);
+ if (proxyBinder == nullptr) {
+ return ANDROID_CONTROL_AUTOFRAMING_OFF;
+ }
+ int ret = 0;
+ auto status = proxyBinder->getAutoframingOverride(packageName, &ret);
+ if (!status.isOk()) {
+ ALOGE("%s: Failed during autoframing override query: %s", __FUNCTION__,
+ status.exceptionMessage().c_str());
+ }
+
+ return ret;
}
void CameraServiceProxyWrapper::logStreamConfigured(const std::string& id,
@@ -152,12 +246,12 @@
std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
{
Mutex::Autolock l(mLock);
- sessionStats = mSessionStatsMap[id];
- if (sessionStats == nullptr) {
+ if (mSessionStatsMap.count(id) == 0) {
ALOGE("%s: SessionStatsMap should contain camera %s",
__FUNCTION__, id.c_str());
return;
}
+ sessionStats = mSessionStatsMap[id];
}
ALOGV("%s: id %s, operatingMode %d, internalConfig %d, latencyMs %d",
@@ -169,16 +263,17 @@
std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
{
Mutex::Autolock l(mLock);
- sessionStats = mSessionStatsMap[id];
- if (sessionStats == nullptr) {
+ if (mSessionStatsMap.count(id) == 0) {
ALOGE("%s: SessionStatsMap should contain camera %s when logActive is called",
__FUNCTION__, id.c_str());
return;
}
+ sessionStats = mSessionStatsMap[id];
}
ALOGV("%s: id %s", __FUNCTION__, id.c_str());
- sessionStats->onActive(maxPreviewFps);
+ sp<hardware::ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+ sessionStats->onActive(proxyBinder, maxPreviewFps);
}
void CameraServiceProxyWrapper::logIdle(const std::string& id,
@@ -188,13 +283,12 @@
std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
{
Mutex::Autolock l(mLock);
- sessionStats = mSessionStatsMap[id];
- }
-
- if (sessionStats == nullptr) {
- ALOGE("%s: SessionStatsMap should contain camera %s when logIdle is called",
+ if (mSessionStatsMap.count(id) == 0) {
+ ALOGE("%s: SessionStatsMap should contain camera %s when logIdle is called",
__FUNCTION__, id.c_str());
- return;
+ return;
+ }
+ sessionStats = mSessionStatsMap[id];
}
ALOGV("%s: id %s, requestCount %" PRId64 ", resultErrorCount %" PRId64 ", deviceError %d"
@@ -208,7 +302,8 @@
streamStats[i].mStartLatencyMs);
}
- sessionStats->onIdle(requestCount, resultErrorCount, deviceError, userTag,
+ sp<hardware::ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+ sessionStats->onIdle(proxyBinder, requestCount, resultErrorCount, deviceError, userTag,
videoStabilizationMode, streamStats);
}
@@ -229,19 +324,24 @@
apiLevel = CameraSessionStats::CAMERA_API_LEVEL_2;
}
- sessionStats = std::make_shared<CameraSessionStatsWrapper>(id, facing,
- CameraSessionStats::CAMERA_STATE_OPEN, clientPackageName,
- apiLevel, isNdk, latencyMs);
+ // Generate a new log ID for open events
+ int64_t logId = generateLogId(mRandomDevice);
+
+ sessionStats = std::make_shared<CameraSessionStatsWrapper>(
+ id, facing, CameraSessionStats::CAMERA_STATE_OPEN, clientPackageName,
+ apiLevel, isNdk, latencyMs, logId);
mSessionStatsMap.emplace(id, sessionStats);
ALOGV("%s: Adding id %s", __FUNCTION__, id.c_str());
}
ALOGV("%s: id %s, facing %d, effectiveApiLevel %d, isNdk %d, latencyMs %d",
__FUNCTION__, id.c_str(), facing, effectiveApiLevel, isNdk, latencyMs);
- sessionStats->onOpen();
+ sp<hardware::ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+ sessionStats->onOpen(proxyBinder);
}
-void CameraServiceProxyWrapper::logClose(const std::string& id, int32_t latencyMs) {
+void CameraServiceProxyWrapper::logClose(const std::string& id, int32_t latencyMs,
+ bool deviceError) {
std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
{
Mutex::Autolock l(mLock);
@@ -257,12 +357,15 @@
__FUNCTION__, id.c_str());
return;
}
+
mSessionStatsMap.erase(id);
- ALOGV("%s: Erasing id %s", __FUNCTION__, id.c_str());
+ ALOGV("%s: Erasing id %s, deviceError %d", __FUNCTION__, id.c_str(), deviceError);
}
- ALOGV("%s: id %s, latencyMs %d", __FUNCTION__, id.c_str(), latencyMs);
- sessionStats->onClose(latencyMs);
+ ALOGV("%s: id %s, latencyMs %d, deviceError %d", __FUNCTION__,
+ id.c_str(), latencyMs, deviceError);
+ sp<hardware::ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+ sessionStats->onClose(proxyBinder, latencyMs, deviceError);
}
bool CameraServiceProxyWrapper::isCameraDisabled(int userId) {
@@ -277,4 +380,48 @@
return ret;
}
-}; // namespace android
+int64_t CameraServiceProxyWrapper::getCurrentLogIdForCamera(const std::string& cameraId) {
+ std::shared_ptr<CameraSessionStatsWrapper> stats;
+ {
+ Mutex::Autolock _l(mLock);
+ if (mSessionStatsMap.count(cameraId) == 0) {
+ ALOGE("%s: SessionStatsMap should contain camera %s before asking for its logging ID.",
+ __FUNCTION__, cameraId.c_str());
+ return 0;
+ }
+
+ stats = mSessionStatsMap[cameraId];
+ }
+ return stats->getLogId();
+}
+
+int64_t CameraServiceProxyWrapper::generateLogId(std::random_device& randomDevice) {
+ int64_t ret = 0;
+ do {
+ // std::random_device generates 32 bits per call, so we call it twice
+ ret = randomDevice();
+ ret = ret << 32;
+ ret = ret | randomDevice();
+ } while (ret == 0); // 0 is not a valid identifier
+
+ return ret;
+}
+
+std::string CameraServiceProxyWrapper::updateExtensionStats(
+ const hardware::CameraExtensionSessionStats& extStats) {
+ std::shared_ptr<CameraSessionStatsWrapper> stats;
+ std::string cameraId = toStdString(extStats.cameraId);
+ {
+ Mutex::Autolock _l(mLock);
+ if (mSessionStatsMap.count(cameraId) == 0) {
+ ALOGE("%s CameraExtensionSessionStats reported for camera id that isn't open: %s",
+ __FUNCTION__, cameraId.c_str());
+ return {};
+ }
+
+ stats = mSessionStatsMap[cameraId];
+ return stats->updateExtensionSessionStats(extStats);
+ }
+}
+
+} // namespace android
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
index aee875f..1afe5b3 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
@@ -22,6 +22,7 @@
#include <utils/Mutex.h>
#include <utils/StrongPointer.h>
#include <utils/Timers.h>
+#include <random>
#include <string>
#include <camera/CameraSessionStats.h>
@@ -31,72 +32,106 @@
class CameraServiceProxyWrapper {
private:
// Guard mCameraServiceProxy
- static Mutex sProxyMutex;
+ Mutex mProxyMutex;
// Cached interface to the camera service proxy in system service
- static sp<hardware::ICameraServiceProxy> sCameraServiceProxy;
+ sp<hardware::ICameraServiceProxy> mCameraServiceProxy;
- struct CameraSessionStatsWrapper {
+ class CameraSessionStatsWrapper {
+ private:
hardware::CameraSessionStats mSessionStats;
Mutex mLock; // lock for per camera session stats
- CameraSessionStatsWrapper(const std::string& cameraId, int facing, int newCameraState,
- const std::string& clientName, int apiLevel, bool isNdk, int32_t latencyMs) :
- mSessionStats(cameraId, facing, newCameraState, clientName, apiLevel, isNdk, latencyMs)
- {}
+ /**
+ * Update the session stats of a given camera device (open/close/active/idle) with
+ * the camera proxy service in the system service
+ */
+ void updateProxyDeviceState(sp<hardware::ICameraServiceProxy>& proxyBinder);
- void onOpen();
- void onClose(int32_t latencyMs);
+ public:
+ CameraSessionStatsWrapper(const std::string& cameraId, int facing, int newCameraState,
+ const std::string& clientName, int apiLevel, bool isNdk,
+ int32_t latencyMs, int64_t logId)
+ : mSessionStats(cameraId, facing, newCameraState, clientName, apiLevel, isNdk,
+ latencyMs, logId) {}
+
+ void onOpen(sp<hardware::ICameraServiceProxy>& proxyBinder);
+ void onClose(sp<hardware::ICameraServiceProxy>& proxyBinder, int32_t latencyMs,
+ bool deviceError);
void onStreamConfigured(int operatingMode, bool internalReconfig, int32_t latencyMs);
- void onActive(float maxPreviewFps);
- void onIdle(int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+ void onActive(sp<hardware::ICameraServiceProxy>& proxyBinder, float maxPreviewFps);
+ void onIdle(sp<hardware::ICameraServiceProxy>& proxyBinder,
+ int64_t requestCount, int64_t resultErrorCount, bool deviceError,
const std::string& userTag, int32_t videoStabilizationMode,
const std::vector<hardware::CameraStreamStats>& streamStats);
+
+ std::string updateExtensionSessionStats(
+ const hardware::CameraExtensionSessionStats& extStats);
+
+ // Returns the logId associated with this event.
+ int64_t getLogId();
};
// Lock for camera session stats map
- static Mutex mLock;
+ Mutex mLock;
// Map from camera id to the camera's session statistics
- static std::map<std::string, std::shared_ptr<CameraSessionStatsWrapper>> mSessionStatsMap;
+ std::map<std::string, std::shared_ptr<CameraSessionStatsWrapper>> mSessionStatsMap;
- /**
- * Update the session stats of a given camera device (open/close/active/idle) with
- * the camera proxy service in the system service
- */
- static void updateProxyDeviceState(
- const hardware::CameraSessionStats& sessionStats);
+ std::random_device mRandomDevice; // pulls 32-bit random numbers from /dev/urandom
- static sp<hardware::ICameraServiceProxy> getCameraServiceProxy();
+ sp<hardware::ICameraServiceProxy> getCameraServiceProxy();
+
+ // Returns a randomly generated ID that is suitable for logging the event. A new identifier
+ // should only be generated for an open event. All other events for the cameraId should use the
+ // ID generated for the open event associated with them.
+ static int64_t generateLogId(std::random_device& randomDevice);
public:
+ CameraServiceProxyWrapper(sp<hardware::ICameraServiceProxy> serviceProxy = nullptr) :
+ mCameraServiceProxy(serviceProxy)
+ { }
+
+ static sp<hardware::ICameraServiceProxy> getDefaultCameraServiceProxy();
+
// Open
- static void logOpen(const std::string& id, int facing,
+ void logOpen(const std::string& id, int facing,
const std::string& clientPackageName, int apiLevel, bool isNdk,
int32_t latencyMs);
// Close
- static void logClose(const std::string& id, int32_t latencyMs);
+ void logClose(const std::string& id, int32_t latencyMs, bool deviceError);
// Stream configuration
- static void logStreamConfigured(const std::string& id, int operatingMode, bool internalReconfig,
+ void logStreamConfigured(const std::string& id, int operatingMode, bool internalReconfig,
int32_t latencyMs);
// Session state becomes active
- static void logActive(const std::string& id, float maxPreviewFps);
+ void logActive(const std::string& id, float maxPreviewFps);
// Session state becomes idle
- static void logIdle(const std::string& id,
+ void logIdle(const std::string& id,
int64_t requestCount, int64_t resultErrorCount, bool deviceError,
const std::string& userTag, int32_t videoStabilizationMode,
const std::vector<hardware::CameraStreamStats>& streamStats);
// Ping camera service proxy for user update
- static void pingCameraServiceProxy();
+ void pingCameraServiceProxy();
// Return the current top activity rotate and crop override.
- static int getRotateAndCropOverride(const std::string &packageName, int lensFacing, int userId);
+ int getRotateAndCropOverride(const std::string &packageName, int lensFacing, int userId);
+
+ // Return the current top activity autoframing.
+ int getAutoframingOverride(const std::string& packageName);
// Detect if the camera is disabled by device policy.
- static bool isCameraDisabled(int userId);
+ bool isCameraDisabled(int userId);
+
+ // Returns the logId currently associated with the given cameraId. See 'mLogId' in
+ // frameworks/av/camera/include/camera/CameraSessionStats.h for more details about this
+ // identifier. Returns a non-0 value on success.
+ int64_t getCurrentLogIdForCamera(const std::string& cameraId);
+
+ // Update the stored extension stats to the latest values
+ std::string updateExtensionStats(const hardware::CameraExtensionSessionStats& extStats);
};
} // android
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index c9520d5..f7257e3 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -19,6 +19,8 @@
#include "SessionConfigurationUtils.h"
#include "../api2/DepthCompositeStream.h"
#include "../api2/HeicCompositeStream.h"
+#include "aidl/android/hardware/graphics/common/Dataspace.h"
+#include "api2/JpegRCompositeStream.h"
#include "common/CameraDeviceBase.h"
#include "common/HalConversionsTemplated.h"
#include "../CameraService.h"
@@ -27,6 +29,7 @@
#include "device3/Camera3OutputStream.h"
#include "system/graphics-base-v1.1.h"
#include <camera/StringUtils.h>
+#include <ui/PublicFormat.h>
using android::camera3::OutputStreamInfo;
using android::camera3::OutputStreamInfo;
@@ -71,11 +74,11 @@
int32_t dynamicDepthKey =
SessionConfigurationUtils::getAppropriateModeTag(
- ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS);
+ ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS, maxRes);
int32_t heicKey =
SessionConfigurationUtils::getAppropriateModeTag(
- ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS);
+ ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, maxRes);
getStreamConfigurations(staticInfo, scalerKey, scm);
getStreamConfigurations(staticInfo, depthKey, scm);
@@ -128,7 +131,7 @@
size_t getUHRMaxJpegBufferSize(camera3::Size uhrMaxJpegSize,
camera3::Size defaultMaxJpegSize, size_t defaultMaxJpegBufferSize) {
- return ((float)uhrMaxJpegSize.width * uhrMaxJpegSize.height) /
+ return ((float)(uhrMaxJpegSize.width * uhrMaxJpegSize.height)) /
(defaultMaxJpegSize.width * defaultMaxJpegSize.height) * defaultMaxJpegBufferSize;
}
@@ -159,8 +162,13 @@
getAppropriateModeTag(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, maxResolution);
const int32_t heicSizesTag =
getAppropriateModeTag(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, maxResolution);
+ const int32_t jpegRSizesTag = getAppropriateModeTag(
+ ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS, maxResolution);
+ bool isJpegRDataSpace = (dataSpace == static_cast<android_dataspace_t>(
+ ::aidl::android::hardware::graphics::common::Dataspace::JPEG_R));
camera_metadata_ro_entry streamConfigs =
+ (isJpegRDataSpace) ? info.find(jpegRSizesTag) :
(dataSpace == HAL_DATASPACE_DEPTH) ? info.find(depthSizesTag) :
(dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) ?
info.find(heicSizesTag) :
@@ -194,6 +202,8 @@
if (bestWidth == -1) {
// Return false if no configurations for this format were listed
+ ALOGE("%s: No configurations for format %d width %d, height %d, maxResolution ? %s",
+ __FUNCTION__, format, width, height, maxResolution ? "true" : "false");
return false;
}
@@ -210,11 +220,18 @@
}
//check if format is 10-bit compatible
-bool is10bitCompatibleFormat(int32_t format) {
+bool is10bitCompatibleFormat(int32_t format, android_dataspace_t dataSpace) {
switch(format) {
case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
case HAL_PIXEL_FORMAT_YCBCR_P010:
return true;
+ case HAL_PIXEL_FORMAT_BLOB:
+ if (dataSpace == static_cast<android_dataspace_t>(
+ ::aidl::android::hardware::graphics::common::Dataspace::JPEG_R)) {
+ return true;
+ }
+
+ return false;
default:
return false;
}
@@ -283,6 +300,65 @@
}
}
+bool deviceReportsColorSpaces(const CameraMetadata& staticInfo) {
+ camera_metadata_ro_entry_t entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+ for (size_t i = 0; i < entry.count; ++i) {
+ uint8_t capability = entry.data.u8[i];
+ if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES) {
+ return true;
+ }
+ }
+
+ return false;
+}
+
+bool isColorSpaceSupported(int32_t colorSpace, int32_t format, android_dataspace dataSpace,
+ int64_t dynamicRangeProfile, const CameraMetadata& staticInfo) {
+ int64_t colorSpace64 = colorSpace;
+ int64_t format64 = format;
+
+ // Translate HAL format + data space to public format
+ if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace == HAL_DATASPACE_V0_JFIF) {
+ format64 = 0x100; // JPEG
+ } else if (format == HAL_PIXEL_FORMAT_BLOB
+ && dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) {
+ format64 = 0x48454946; // HEIC
+ } else if (format == HAL_PIXEL_FORMAT_BLOB
+ && dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_DYNAMIC_DEPTH)) {
+ format64 = 0x69656963; // DEPTH_JPEG
+ } else if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace == HAL_DATASPACE_DEPTH) {
+ return false; // DEPTH_POINT_CLOUD, not applicable
+ } else if (format == HAL_PIXEL_FORMAT_Y16 && dataSpace == HAL_DATASPACE_DEPTH) {
+ return false; // DEPTH16, not applicable
+ } else if (format == HAL_PIXEL_FORMAT_RAW16 && dataSpace == HAL_DATASPACE_DEPTH) {
+ return false; // RAW_DEPTH, not applicable
+ } else if (format == HAL_PIXEL_FORMAT_RAW10 && dataSpace == HAL_DATASPACE_DEPTH) {
+ return false; // RAW_DEPTH10, not applicable
+ } else if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace ==
+ static_cast<android_dataspace>(
+ ::aidl::android::hardware::graphics::common::Dataspace::JPEG_R)) {
+ format64 = static_cast<int64_t>(PublicFormat::JPEG_R);
+ }
+
+ camera_metadata_ro_entry_t entry =
+ staticInfo.find(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP);
+ for (size_t i = 0; i < entry.count; i += 3) {
+ bool isFormatCompatible = (format64 == entry.data.i64[i + 1]);
+ bool isDynamicProfileCompatible =
+ (dynamicRangeProfile & entry.data.i64[i + 2]) != 0;
+
+ if (colorSpace64 == entry.data.i64[i]
+ && isFormatCompatible
+ && isDynamicProfileCompatible) {
+ return true;
+ }
+ }
+
+ ALOGE("Color space %d, image format %" PRId64 ", and dynamic range 0x%" PRIx64
+ " combination not found", colorSpace, format64, dynamicRangeProfile);
+ return false;
+}
+
bool isPublicFormat(int32_t format)
{
switch(format) {
@@ -310,6 +386,23 @@
}
}
+bool dataSpaceFromColorSpace(android_dataspace *dataSpace, int32_t colorSpace) {
+ switch (colorSpace) {
+ case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SRGB:
+ *dataSpace = HAL_DATASPACE_V0_SRGB;
+ return true;
+ case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3:
+ *dataSpace = HAL_DATASPACE_DISPLAY_P3;
+ return true;
+ case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_BT2020_HLG:
+ *(reinterpret_cast<int32_t*>(dataSpace)) = HAL_DATASPACE_BT2020_HLG;
+ return true;
+ default:
+ ALOGE("%s: Unsupported color space %d", __FUNCTION__, colorSpace);
+ return false;
+ }
+}
+
bool isStreamUseCaseSupported(int64_t streamUseCase,
const CameraMetadata &deviceInfo) {
camera_metadata_ro_entry_t availableStreamUseCases =
@@ -337,7 +430,8 @@
sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
const std::string &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
const std::vector<int32_t> &sensorPixelModesUsed, int64_t dynamicRangeProfile,
- int64_t streamUseCase, int timestampBase, int mirrorMode) {
+ int64_t streamUseCase, int timestampBase, int mirrorMode,
+ int32_t colorSpace) {
// bufferProducer must be non-null
if (gbp == nullptr) {
std::string msg = fmt::sprintf("Camera %s: Surface is NULL", logicalCameraId.c_str());
@@ -401,6 +495,16 @@
return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
}
+ if (colorSpace != ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED &&
+ format != HAL_PIXEL_FORMAT_BLOB) {
+ if (!dataSpaceFromColorSpace(&dataSpace, colorSpace)) {
+ std::string msg = fmt::sprintf("Camera %s: color space %d not supported, failed to "
+ "convert to data space", logicalCameraId.c_str(), colorSpace);
+ ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+ }
+ }
+
// FIXME: remove this override since the default format should be
// IMPLEMENTATION_DEFINED. b/9487482 & b/35317944
if ((format >= HAL_PIXEL_FORMAT_RGBA_8888 && format <= HAL_PIXEL_FORMAT_BGRA_8888) &&
@@ -412,7 +516,7 @@
}
std::unordered_set<int32_t> overriddenSensorPixelModes;
if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed, format, width, height,
- physicalCameraMetadata, flexibleConsumer, &overriddenSensorPixelModes) != OK) {
+ physicalCameraMetadata, &overriddenSensorPixelModes) != OK) {
std::string msg = fmt::sprintf("Camera %s: sensor pixel modes for stream with "
"format %#x are not valid",logicalCameraId.c_str(), format);
ALOGE("%s: %s", __FUNCTION__, msg.c_str());
@@ -444,13 +548,23 @@
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
}
if (SessionConfigurationUtils::is10bitDynamicRangeProfile(dynamicRangeProfile) &&
- !SessionConfigurationUtils::is10bitCompatibleFormat(format)) {
+ !SessionConfigurationUtils::is10bitCompatibleFormat(format, dataSpace)) {
std::string msg = fmt::sprintf("Camera %s: No 10-bit supported stream configurations with "
"format %#x defined and profile %" PRIx64 ", failed to create output stream",
logicalCameraId.c_str(), format, dynamicRangeProfile);
ALOGE("%s: %s", __FUNCTION__, msg.c_str());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
}
+ if (colorSpace != ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED &&
+ SessionConfigurationUtils::deviceReportsColorSpaces(physicalCameraMetadata) &&
+ !SessionConfigurationUtils::isColorSpaceSupported(colorSpace, format, dataSpace,
+ dynamicRangeProfile, physicalCameraMetadata)) {
+ std::string msg = fmt::sprintf("Camera %s: Color space %d not supported, failed to "
+ "create output stream (pixel format %d dynamic range profile %" PRId64 ")",
+ logicalCameraId.c_str(), colorSpace, format, dynamicRangeProfile);
+ ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+ }
if (!SessionConfigurationUtils::isStreamUseCaseSupported(streamUseCase,
physicalCameraMetadata)) {
std::string msg = fmt::sprintf("Camera %s: stream use case %" PRId64 " not supported,"
@@ -484,6 +598,7 @@
streamInfo.streamUseCase = streamUseCase;
streamInfo.timestampBase = timestampBase;
streamInfo.mirrorMode = mirrorMode;
+ streamInfo.colorSpace = colorSpace;
return binder::Status::ok();
}
if (width != streamInfo.width) {
@@ -539,6 +654,7 @@
camera3::Camera3OutputStream::applyZSLUsageQuirk(streamInfo.format, &u);
stream->usage = AidlCamera3Device::mapToAidlConsumerUsage(u);
stream->dataSpace = AidlCamera3Device::mapToAidlDataspace(streamInfo.dataSpace);
+ stream->colorSpace = streamInfo.colorSpace;
stream->rotation = AidlCamera3Device::mapToAidlStreamRotation(rotation);
stream->id = -1; // Invalid stream id
stream->physicalCameraId = physicalId;
@@ -563,6 +679,7 @@
convertToHALStreamCombination(
const SessionConfiguration& sessionConfiguration,
const std::string &logicalCameraId, const CameraMetadata &deviceInfo,
+ bool isCompositeJpegRDisabled,
metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
aidl::android::hardware::camera::device::StreamConfiguration &streamConfiguration,
bool overrideForPerfClass, bool *earlyExit) {
@@ -637,6 +754,7 @@
const std::string &physicalCameraId = it.getPhysicalCameraId();
int64_t dynamicRangeProfile = it.getDynamicRangeProfile();
+ int32_t colorSpace = it.getColorSpace();
std::vector<int32_t> sensorPixelModesUsed = it.getSensorPixelModesUsed();
const CameraMetadata &physicalDeviceInfo = getMetadata(physicalCameraId,
overrideForPerfClass);
@@ -674,7 +792,7 @@
streamInfo.dynamicRangeProfile = it.getDynamicRangeProfile();
if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed,
streamInfo.format, streamInfo.width,
- streamInfo.height, metadataChosen, false /*flexibleConsumer*/,
+ streamInfo.height, metadataChosen,
&streamInfo.sensorPixelModesUsed) != OK) {
ALOGE("%s: Deferred surface sensor pixel modes not valid",
__FUNCTION__);
@@ -695,7 +813,7 @@
sp<Surface> surface;
res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
logicalCameraId, metadataChosen, sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode);
+ streamUseCase, timestampBase, mirrorMode, colorSpace);
if (!res.isOk())
return res;
@@ -705,7 +823,10 @@
camera3::DepthCompositeStream::isDepthCompositeStream(surface);
bool isHeicCompositeStream =
camera3::HeicCompositeStream::isHeicCompositeStream(surface);
- if (isDepthCompositeStream || isHeicCompositeStream) {
+ bool isJpegRCompositeStream =
+ camera3::JpegRCompositeStream::isJpegRCompositeStream(surface) &&
+ !isCompositeJpegRDisabled;
+ if (isDepthCompositeStream || isHeicCompositeStream || isJpegRCompositeStream) {
// We need to take in to account that composite streams can have
// additional internal camera streams.
std::vector<OutputStreamInfo> compositeStreams;
@@ -713,10 +834,14 @@
// TODO: Take care of composite streams.
ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo,
deviceInfo, &compositeStreams);
- } else {
+ } else if (isHeicCompositeStream) {
ret = camera3::HeicCompositeStream::getCompositeStreamInfo(streamInfo,
deviceInfo, &compositeStreams);
+ } else {
+ ret = camera3::JpegRCompositeStream::getCompositeStreamInfo(streamInfo,
+ deviceInfo, &compositeStreams);
}
+
if (ret != OK) {
std::string msg = fmt::sprintf(
"Camera %s: Failed adding composite streams: %s (%d)",
@@ -845,15 +970,17 @@
status_t checkAndOverrideSensorPixelModesUsed(
const std::vector<int32_t> &sensorPixelModesUsed, int format, int width, int height,
- const CameraMetadata &staticInfo, bool flexibleConsumer,
+ const CameraMetadata &staticInfo,
std::unordered_set<int32_t> *overriddenSensorPixelModesUsed) {
const std::unordered_set<int32_t> &sensorPixelModesUsedSet =
convertToSet(sensorPixelModesUsed);
- if (!isUltraHighResolutionSensor(staticInfo)) {
+ if (!supportsUltraHighResolutionCapture(staticInfo)) {
if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
sensorPixelModesUsedSet.end()) {
// invalid value for non ultra high res sensors
+ ALOGE("%s ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION used on a device which doesn't "
+ "support ultra high resolution capture", __FUNCTION__);
return BAD_VALUE;
}
overriddenSensorPixelModesUsed->clear();
@@ -874,35 +1001,40 @@
// Case 1: The client has not changed the sensor mode defaults. In this case, we check if the
// size + format of the OutputConfiguration is found exclusively in 1.
// If yes, add that sensorPixelMode to overriddenSensorPixelModes.
- // If no, add 'DEFAULT' to sensorPixelMode. This maintains backwards
- // compatibility.
+ // If no, add 'DEFAULT' and MAXIMUM_RESOLUTION to overriddenSensorPixelModes.
+ // This maintains backwards compatibility and also tells the framework the stream
+ // might be used in either sensor pixel mode.
if (sensorPixelModesUsedSet.size() == 0) {
- // Ambiguous case, default to only 'DEFAULT' mode.
+ // Ambiguous case, override to include both cases.
if (isInDefaultStreamConfigurationMap && isInMaximumResolutionStreamConfigurationMap) {
overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
- return OK;
- }
- // We don't allow flexible consumer for max resolution mode.
- if (isInMaximumResolutionStreamConfigurationMap) {
overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
return OK;
}
- if (isInDefaultStreamConfigurationMap || (flexibleConsumer && width < ROUNDING_WIDTH_CAP)) {
+ if (isInMaximumResolutionStreamConfigurationMap) {
+ overriddenSensorPixelModesUsed->insert(
+ ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
+ } else {
overriddenSensorPixelModesUsed->insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
- return OK;
}
- return BAD_VALUE;
+ return OK;
}
// Case2: The app has set sensorPixelModesUsed, we need to verify that they
// are valid / err out.
if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_DEFAULT) !=
sensorPixelModesUsedSet.end() && !isInDefaultStreamConfigurationMap) {
+ ALOGE("%s: ANDROID_SENSOR_PIXEL_MODE_DEFAULT set by client, but stream f: %d size %d x %d"
+ " isn't present in default stream configuration map", __FUNCTION__, format, width,
+ height);
return BAD_VALUE;
}
if (sensorPixelModesUsedSet.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
sensorPixelModesUsedSet.end() && !isInMaximumResolutionStreamConfigurationMap) {
+ ALOGE("%s: ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION set by client, but stream f: "
+ "%d size %d x %d isn't present in default stream configuration map", __FUNCTION__,
+ format, width, height);
return BAD_VALUE;
}
*overriddenSensorPixelModesUsed = sensorPixelModesUsedSet;
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index 220d1f8..79d80ea 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -100,10 +100,11 @@
sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
const std::string &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
const std::vector<int32_t> &sensorPixelModesUsed, int64_t dynamicRangeProfile,
- int64_t streamUseCase, int timestampBase, int mirrorMode);
+ int64_t streamUseCase, int timestampBase, int mirrorMode,
+ int32_t colorSpace);
//check if format is 10-bit output compatible
-bool is10bitCompatibleFormat(int32_t format);
+bool is10bitCompatibleFormat(int32_t format, android_dataspace_t dataSpace);
// check if the dynamic range requires 10-bit output
bool is10bitDynamicRangeProfile(int64_t dynamicRangeProfile);
@@ -111,6 +112,13 @@
// Check if the device supports a given dynamicRangeProfile
bool isDynamicRangeProfileSupported(int64_t dynamicRangeProfile, const CameraMetadata& staticMeta);
+bool deviceReportsColorSpaces(const CameraMetadata& staticMeta);
+
+bool isColorSpaceSupported(int32_t colorSpace, int32_t format, android_dataspace dataSpace,
+ int64_t dynamicRangeProfile, const CameraMetadata& staticMeta);
+
+bool dataSpaceFromColorSpace(android_dataspace *dataSpace, int32_t colorSpace);
+
bool isStreamUseCaseSupported(int64_t streamUseCase, const CameraMetadata &deviceInfo);
void mapStreamInfo(const OutputStreamInfo &streamInfo,
@@ -133,7 +141,8 @@
convertToHALStreamCombination(
const SessionConfiguration& sessionConfiguration,
const std::string &logicalCameraId, const CameraMetadata &deviceInfo,
- metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
+ bool isCompositeJpegRDisabled, metadataGetter getMetadata,
+ const std::vector<std::string> &physicalCameraIds,
aidl::android::hardware::camera::device::StreamConfiguration &streamConfiguration,
bool overrideForPerfClass, bool *earlyExit);
@@ -141,7 +150,7 @@
status_t checkAndOverrideSensorPixelModesUsed(
const std::vector<int32_t> &sensorPixelModesUsed, int format, int width, int height,
- const CameraMetadata &staticInfo, bool flexibleConsumer,
+ const CameraMetadata &staticInfo,
std::unordered_set<int32_t> *overriddenSensorPixelModesUsed);
bool targetPerfClassPrimaryCamera(
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
index f63eea1..cf93d3b 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
@@ -111,8 +111,8 @@
bool overrideForPerfClass, bool *earlyExit) {
aidl::android::hardware::camera::device::StreamConfiguration aidlStreamConfiguration;
auto ret = convertToHALStreamCombination(sessionConfiguration, logicalCameraId, deviceInfo,
- getMetadata, physicalCameraIds, aidlStreamConfiguration, overrideForPerfClass,
- earlyExit);
+ false /*isCompositeJpegRDisabled*/, getMetadata, physicalCameraIds,
+ aidlStreamConfiguration, overrideForPerfClass, earlyExit);
if (!ret.isOk()) {
return ret;
}
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp
index 1efdc60..7d344f8 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.cpp
@@ -49,12 +49,22 @@
return ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
case ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS:
return ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION;
+ case ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS:
+ return ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
+ case ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS:
+ return ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
+ case ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS:
+ return ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS_MAXIMUM_RESOLUTION;
case ANDROID_SENSOR_OPAQUE_RAW_SIZE:
return ANDROID_SENSOR_OPAQUE_RAW_SIZE_MAXIMUM_RESOLUTION;
case ANDROID_LENS_INTRINSIC_CALIBRATION:
return ANDROID_LENS_INTRINSIC_CALIBRATION_MAXIMUM_RESOLUTION;
case ANDROID_LENS_DISTORTION:
return ANDROID_LENS_DISTORTION_MAXIMUM_RESOLUTION;
+ case ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE:
+ return ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION;
+ case ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE:
+ return ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION;
default:
ALOGE("%s: Tag %d doesn't have a maximum resolution counterpart", __FUNCTION__,
defaultTag);
@@ -63,7 +73,62 @@
return -1;
}
-bool isUltraHighResolutionSensor(const CameraMetadata &deviceInfo) {
+static bool isKeyPresentWithCount(const CameraMetadata &deviceInfo, uint32_t tag, uint32_t count) {
+ auto countFound = deviceInfo.find(tag).count;
+ return (countFound != 0) && (countFound % count == 0);
+}
+
+static bool supportsKeysForBasicUltraHighResolutionCapture(const CameraMetadata &deviceInfo) {
+ // Check whether the following conditions are satisfied for reduced ultra high
+ // resolution support :
+ // 1) SENSOR_PIXEL_MODE is advertised in ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS
+ // 2) The following keys are present in CameraCharacteristics for basic functionality
+ // a) ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION
+ // b) ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION
+ // c) ANDROID_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION
+ // d) ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
+ // e) ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
+ // f) ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION
+ camera_metadata_ro_entry_t entryChar;
+ entryChar = deviceInfo.find(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS);
+ bool supportsSensorPixelMode = false;
+ for (size_t i = 0; i < entryChar.count; i++) {
+ int32_t key = entryChar.data.i32[i];
+ if (key == ANDROID_SENSOR_PIXEL_MODE) {
+ supportsSensorPixelMode = true;
+ break;
+ }
+ }
+ if (!supportsSensorPixelMode) {
+ return false;
+ }
+
+ // Basic sensor array size information tags are present
+ if (!isKeyPresentWithCount(deviceInfo, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION,
+ /*count*/2) ||
+ !isKeyPresentWithCount(deviceInfo,
+ ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
+ /*count*/4) ||
+ !isKeyPresentWithCount(deviceInfo,
+ ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION, /*count*/4) ||
+ !isKeyPresentWithCount(deviceInfo, ANDROID_SENSOR_INFO_BINNING_FACTOR, /*count*/2)) {
+ return false;
+ }
+
+ // Basic stream configuration tags are present
+ if (!isKeyPresentWithCount(deviceInfo,
+ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION, /*count*/4) ||
+ !isKeyPresentWithCount(deviceInfo,
+ ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION, /*count*/4) ||
+ !isKeyPresentWithCount(deviceInfo,
+ ANDROID_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION, /*count*/ 4)) {
+ return false;
+ }
+
+ return true;
+}
+
+bool supportsUltraHighResolutionCapture(const CameraMetadata &deviceInfo) {
camera_metadata_ro_entry_t entryCap;
entryCap = deviceInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
// Go through the capabilities and check if it has
@@ -74,7 +139,10 @@
return true;
}
}
- return false;
+
+ // If not, then check that the keys which guarantee basic supports for
+ // ultra high resolution capture are supported.
+ return supportsKeysForBasicUltraHighResolutionCapture(deviceInfo);
}
bool getArrayWidthAndHeight(const CameraMetadata *deviceInfo,
@@ -93,4 +161,4 @@
} // namespace SessionConfigurationUtils
} // namespace camera3
-} // namespace android
\ No newline at end of file
+} // namespace android
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.h b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.h
index 45b1e91..dac1824 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHost.h
@@ -22,7 +22,7 @@
namespace camera3 {
namespace SessionConfigurationUtils {
-bool isUltraHighResolutionSensor(const CameraMetadata &deviceInfo);
+bool supportsUltraHighResolutionCapture(const CameraMetadata &deviceInfo);
int32_t getAppropriateModeTag(int32_t defaultTag, bool maxResolution = false);
@@ -33,4 +33,4 @@
} // camera3
} // android
-#endif
\ No newline at end of file
+#endif