Create API for reporting color space support to Camera2 framework consumers.
Test: Ran CtsCameraTestCases
Test: Ran new VTS test for color space reporting
Test: Created app to test display P3 camera, switching between color spaces
Bug: 238359088
Change-Id: I229b6bc32d13882e852edbdc353f00f0e7c25852
diff --git a/camera/CameraSessionStats.cpp b/camera/CameraSessionStats.cpp
index d1aa36a..0706ac1 100644
--- a/camera/CameraSessionStats.cpp
+++ b/camera/CameraSessionStats.cpp
@@ -130,6 +130,12 @@
return err;
}
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
+ if ((err = parcel->readInt32(&colorSpace)) != OK) {
+ ALOGE("%s: Failed to read color space from parcel", __FUNCTION__);
+ return err;
+ }
+
mWidth = width;
mHeight = height;
mFormat = format;
@@ -146,6 +152,7 @@
mHistogramCounts = std::move(histogramCounts);
mDynamicRangeProfile = dynamicRangeProfile;
mStreamUseCase = streamUseCase;
+ mColorSpace = colorSpace;
return OK;
}
@@ -238,6 +245,11 @@
return err;
}
+ if ((err = parcel->writeInt32(mColorSpace)) != OK) {
+ ALOGE("%s: Failed to write color space", __FUNCTION__);
+ return err;
+ }
+
return OK;
}
diff --git a/camera/camera2/OutputConfiguration.cpp b/camera/camera2/OutputConfiguration.cpp
index 11d4960..d50566d 100644
--- a/camera/camera2/OutputConfiguration.cpp
+++ b/camera/camera2/OutputConfiguration.cpp
@@ -26,8 +26,8 @@
#include <system/camera_metadata.h>
#include <utils/String8.h>
-namespace android {
+namespace android {
const int OutputConfiguration::INVALID_ROTATION = -1;
const int OutputConfiguration::INVALID_SET_ID = -1;
@@ -81,6 +81,10 @@
return mDynamicRangeProfile;
}
+int32_t OutputConfiguration::getColorSpace() const {
+ return mColorSpace;
+}
+
int64_t OutputConfiguration::getStreamUseCase() const {
return mStreamUseCase;
}
@@ -103,6 +107,7 @@
mIsShared(false),
mIsMultiResolution(false),
mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
+ mColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
mTimestampBase(TIMESTAMP_BASE_DEFAULT),
mMirrorMode(MIRROR_MODE_AUTO) {
@@ -191,6 +196,11 @@
ALOGE("%s: Failed to read surface dynamic range profile flag from parcel", __FUNCTION__);
return err;
}
+ int32_t colorSpace;
+ if ((err = parcel->readInt32(&colorSpace)) != OK) {
+ ALOGE("%s: Failed to read surface color space flag from parcel", __FUNCTION__);
+ return err;
+ }
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
if ((err = parcel->readInt64(&streamUseCase)) != OK) {
@@ -230,6 +240,7 @@
mSensorPixelModesUsed = std::move(sensorPixelModesUsed);
mDynamicRangeProfile = dynamicProfile;
+ mColorSpace = colorSpace;
ALOGV("%s: OutputConfiguration: rotation = %d, setId = %d, surfaceType = %d,"
" physicalCameraId = %s, isMultiResolution = %d, streamUseCase = %" PRId64
@@ -252,6 +263,7 @@
mPhysicalCameraId = physicalId;
mIsMultiResolution = false;
mDynamicRangeProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
+ mColorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
mStreamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
mTimestampBase = TIMESTAMP_BASE_DEFAULT;
mMirrorMode = MIRROR_MODE_AUTO;
@@ -265,6 +277,7 @@
mWidth(width), mHeight(height), mIsDeferred(false), mIsShared(isShared),
mPhysicalCameraId(physicalCameraId), mIsMultiResolution(false),
mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
+ mColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
mTimestampBase(TIMESTAMP_BASE_DEFAULT),
mMirrorMode(MIRROR_MODE_AUTO) { }
@@ -317,6 +330,9 @@
err = parcel->writeInt64(mDynamicRangeProfile);
if (err != OK) return err;
+ err = parcel->writeInt32(mColorSpace);
+ if (err != OK) return err;
+
err = parcel->writeInt64(mStreamUseCase);
if (err != OK) return err;
diff --git a/camera/cameraserver/Android.bp b/camera/cameraserver/Android.bp
index 094a3c1..27ebb7a 100644
--- a/camera/cameraserver/Android.bp
+++ b/camera/cameraserver/Android.bp
@@ -43,7 +43,7 @@
"android.hardware.camera.provider@2.5",
"android.hardware.camera.provider@2.6",
"android.hardware.camera.provider@2.7",
- "android.hardware.camera.provider-V1-ndk",
+ "android.hardware.camera.provider-V2-ndk",
"android.hardware.camera.device@1.0",
"android.hardware.camera.device@3.2",
"android.hardware.camera.device@3.4",
diff --git a/camera/include/camera/CameraSessionStats.h b/camera/include/camera/CameraSessionStats.h
index aaa88b2..90ee924 100644
--- a/camera/include/camera/CameraSessionStats.h
+++ b/camera/include/camera/CameraSessionStats.h
@@ -67,22 +67,26 @@
int64_t mDynamicRangeProfile;
// Stream use case
int64_t mStreamUseCase;
+ // Color space
+ int32_t mColorSpace;
CameraStreamStats() :
mWidth(0), mHeight(0), mFormat(0), mMaxPreviewFps(0), mDataSpace(0), mUsage(0),
mRequestCount(0), mErrorCount(0), mStartLatencyMs(0),
mMaxHalBuffers(0), mMaxAppBuffers(0), mHistogramType(HISTOGRAM_TYPE_UNKNOWN),
mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
- mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {}
+ mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
+ mColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED) {}
CameraStreamStats(int width, int height, int format, float maxPreviewFps, int dataSpace,
int64_t usage, int maxHalBuffers, int maxAppBuffers, int dynamicRangeProfile,
- int streamUseCase)
+ int streamUseCase, int32_t colorSpace)
: mWidth(width), mHeight(height), mFormat(format), mMaxPreviewFps(maxPreviewFps),
mDataSpace(dataSpace), mUsage(usage), mRequestCount(0), mErrorCount(0),
mStartLatencyMs(0), mMaxHalBuffers(maxHalBuffers), mMaxAppBuffers(maxAppBuffers),
mHistogramType(HISTOGRAM_TYPE_UNKNOWN),
mDynamicRangeProfile(dynamicRangeProfile),
- mStreamUseCase(streamUseCase) {}
+ mStreamUseCase(streamUseCase),
+ mColorSpace(colorSpace) {}
virtual status_t readFromParcel(const android::Parcel* parcel) override;
virtual status_t writeToParcel(android::Parcel* parcel) const override;
diff --git a/camera/include/camera/camera2/OutputConfiguration.h b/camera/include/camera/camera2/OutputConfiguration.h
index b7c7f7f..a713b40 100644
--- a/camera/include/camera/camera2/OutputConfiguration.h
+++ b/camera/include/camera/camera2/OutputConfiguration.h
@@ -61,6 +61,7 @@
int getWidth() const;
int getHeight() const;
int64_t getDynamicRangeProfile() const;
+ int32_t getColorSpace() const;
bool isDeferred() const;
bool isShared() const;
String16 getPhysicalCameraId() const;
@@ -111,6 +112,7 @@
mIsMultiResolution == other.mIsMultiResolution &&
sensorPixelModesUsedEqual(other) &&
mDynamicRangeProfile == other.mDynamicRangeProfile &&
+ mColorSpace == other.mColorSpace &&
mStreamUseCase == other.mStreamUseCase &&
mTimestampBase == other.mTimestampBase &&
mMirrorMode == other.mMirrorMode);
@@ -153,6 +155,9 @@
if (mDynamicRangeProfile != other.mDynamicRangeProfile) {
return mDynamicRangeProfile < other.mDynamicRangeProfile;
}
+ if (mColorSpace != other.mColorSpace) {
+ return mColorSpace < other.mColorSpace;
+ }
if (mStreamUseCase != other.mStreamUseCase) {
return mStreamUseCase < other.mStreamUseCase;
}
@@ -187,6 +192,7 @@
bool mIsMultiResolution;
std::vector<int32_t> mSensorPixelModesUsed;
int64_t mDynamicRangeProfile;
+ int32_t mColorSpace;
int64_t mStreamUseCase;
int mTimestampBase;
int mMirrorMode;
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 0d156a5..3769b8c 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -3520,6 +3520,26 @@
*/
ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP = // int64[n*3] (acamera_metadata_enum_android_request_available_dynamic_range_profiles_map_t)
ACAMERA_REQUEST_START + 19,
+ /**
+ * <p>A list of all possible color space profiles supported by a camera device.</p>
+ *
+ * <p>Type: int64[n*3] (acamera_metadata_enum_android_request_available_color_space_profiles_map_t)</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>A color space profile is a combination of a color space, an image format, and a dynamic range
+ * profile. If a camera does not support the
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT">CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT</a>
+ * capability, the dynamic range profile will always be
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/params/DynamicRangeProfiles.html#STANDARD">DynamicRangeProfiles#STANDARD</a>. Camera clients can
+ * use <a href="https://developer.android.com/reference/android/hardware/camera2/params/SessionConfiguration.html#setColorSpace">SessionConfiguration#setColorSpace</a> to select
+ * a color space.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP = // int64[n*3] (acamera_metadata_enum_android_request_available_color_space_profiles_map_t)
+ ACAMERA_REQUEST_START + 21,
ACAMERA_REQUEST_END,
/**
@@ -9448,6 +9468,99 @@
} acamera_metadata_enum_android_request_available_dynamic_range_profiles_map_t;
+// ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP
+typedef enum acamera_metadata_enum_acamera_request_available_color_space_profiles_map {
+ /**
+ * <p>Default value, when not explicitly specified. The Camera device will choose the color
+ * space to employ.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED = -1,
+
+ /**
+ * <p>RGB color space sRGB standardized as IEC 61966-2.1:1999.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SRGB = 0,
+
+ /**
+ * <p>RGB color space sRGB standardized as IEC 61966-2.1:1999.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_LINEAR_SRGB = 1,
+
+ /**
+ * <p>RGB color space scRGB-nl standardized as IEC 61966-2-2:2003.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_EXTENDED_SRGB = 2,
+
+ /**
+ * <p>RGB color space scRGB standardized as IEC 61966-2-2:2003.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_LINEAR_EXTENDED_SRGB
+ = 3,
+
+ /**
+ * <p>RGB color space BT.709 standardized as Rec. ITU-R BT.709-5.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_BT709 = 4,
+
+ /**
+ * <p>RGB color space BT.2020 standardized as Rec. ITU-R BT.2020-1.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_BT2020 = 5,
+
+ /**
+ * <p>RGB color space DCI-P3 standardized as SMPTE RP 431-2-2007.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DCI_P3 = 6,
+
+ /**
+ * <p>RGB color space Display P3 based on SMPTE RP 431-2-2007 and IEC 61966-2.1:1999.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3 = 7,
+
+ /**
+ * <p>RGB color space NTSC, 1953 standard.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_NTSC_1953 = 8,
+
+ /**
+ * <p>RGB color space SMPTE C.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SMPTE_C = 9,
+
+ /**
+ * <p>RGB color space Adobe RGB (1998).</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_ADOBE_RGB = 10,
+
+ /**
+ * <p>RGB color space ProPhoto RGB standardized as ROMM RGB ISO 22028-2:2013.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_PRO_PHOTO_RGB = 11,
+
+ /**
+ * <p>RGB color space ACES standardized as SMPTE ST 2065-1:2012.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_ACES = 12,
+
+ /**
+ * <p>RGB color space ACEScg standardized as Academy S-2014-004.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_ACESCG = 13,
+
+ /**
+ * <p>XYZ color space CIE XYZ. This color space assumes standard illuminant D50 as its white
+ * point.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_CIE_XYZ = 14,
+
+ /**
+ * <p>Lab color space CIE L<em>a</em>b*. This color space uses CIE XYZ D50 as a profile conversion
+ * space.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_CIE_LAB = 15,
+
+} acamera_metadata_enum_android_request_available_color_space_profiles_map_t;
+
// ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS
typedef enum acamera_metadata_enum_acamera_scaler_available_stream_configurations {
diff --git a/camera/tests/fuzzer/camera_SessionStats_fuzzer.cpp b/camera/tests/fuzzer/camera_SessionStats_fuzzer.cpp
index 1e94d47..5866aaf 100644
--- a/camera/tests/fuzzer/camera_SessionStats_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_SessionStats_fuzzer.cpp
@@ -79,10 +79,14 @@
if (fdp.ConsumeBool()) {
parcelCamStreamStats.writeInt32(streamUseCase);
}
+ int32_t colorSpace = fdp.ConsumeIntegral<int32_t>();
+ if (fdp.ConsumeBool()) {
+ parcelCamStreamStats.writeInt32(colorSpace);
+ }
cameraStreamStats = new CameraStreamStats(width, height, format, maxPreviewFps, dataSpace,
usage, maxHalBuffers, maxAppBuffers,
- dynamicRangeProfile, streamUseCase);
+ dynamicRangeProfile, streamUseCase, colorSpace);
}
parcelCamStreamStats.setDataPosition(0);
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index 981c569..f4ac2a1 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -156,14 +156,14 @@
"android.hardware.camera.provider@2.5",
"android.hardware.camera.provider@2.6",
"android.hardware.camera.provider@2.7",
- "android.hardware.camera.provider-V1-ndk",
+ "android.hardware.camera.provider-V2-ndk",
"android.hardware.camera.device@3.2",
"android.hardware.camera.device@3.3",
"android.hardware.camera.device@3.4",
"android.hardware.camera.device@3.5",
"android.hardware.camera.device@3.6",
"android.hardware.camera.device@3.7",
- "android.hardware.camera.device-V1-ndk",
+ "android.hardware.camera.device-V2-ndk",
"media_permission-aidl-cpp",
],
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index ba26ac4..c49ecb2 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -884,6 +884,7 @@
int64_t streamUseCase = outputConfiguration.getStreamUseCase();
int timestampBase = outputConfiguration.getTimestampBase();
int mirrorMode = outputConfiguration.getMirrorMode();
+ int32_t colorSpace = outputConfiguration.getColorSpace();
res = SessionConfigurationUtils::checkSurfaceType(numBufferProducers, deferredConsumer,
outputConfiguration.getSurfaceType());
@@ -928,7 +929,7 @@
res = SessionConfigurationUtils::createSurfaceFromGbp(streamInfo,
isStreamInfoValid, surface, bufferProducer, mCameraIdStr,
mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode);
+ streamUseCase, timestampBase, mirrorMode, colorSpace);
if (!res.isOk())
return res;
@@ -975,7 +976,7 @@
&streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution,
/*consumerUsage*/0, streamInfo.dynamicRangeProfile, streamInfo.streamUseCase,
- streamInfo.timestampBase, streamInfo.mirrorMode);
+ streamInfo.timestampBase, streamInfo.mirrorMode, streamInfo.colorSpace);
}
if (err != OK) {
@@ -1027,6 +1028,7 @@
int width, height, format, surfaceType;
uint64_t consumerUsage;
android_dataspace dataSpace;
+ int32_t colorSpace;
status_t err;
binder::Status res;
@@ -1040,6 +1042,7 @@
surfaceType = outputConfiguration.getSurfaceType();
format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
+ colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
// Hardcode consumer usage flags: SurfaceView--0x900, SurfaceTexture--0x100.
consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE;
if (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) {
@@ -1089,7 +1092,8 @@
outputConfiguration.getDynamicRangeProfile(),
outputConfiguration.getStreamUseCase(),
outputConfiguration.getTimestampBase(),
- outputConfiguration.getMirrorMode()));
+ outputConfiguration.getMirrorMode(),
+ colorSpace));
ALOGV("%s: Camera %s: Successfully created a new stream ID %d for a deferred surface"
" (%d x %d) stream with format 0x%x.",
@@ -1280,6 +1284,7 @@
int64_t streamUseCase = outputConfiguration.getStreamUseCase();
int timestampBase = outputConfiguration.getTimestampBase();
int64_t dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
+ int32_t colorSpace = outputConfiguration.getColorSpace();
int mirrorMode = outputConfiguration.getMirrorMode();
for (size_t i = 0; i < newOutputsMap.size(); i++) {
@@ -1288,7 +1293,7 @@
res = SessionConfigurationUtils::createSurfaceFromGbp(outInfo,
/*isStreamInfoValid*/ false, surface, newOutputsMap.valueAt(i), mCameraIdStr,
mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode);
+ streamUseCase, timestampBase, mirrorMode, colorSpace);
if (!res.isOk())
return res;
@@ -1646,7 +1651,8 @@
const std::vector<int32_t> &sensorPixelModesUsed =
outputConfiguration.getSensorPixelModesUsed();
int64_t dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
- int64_t streamUseCase= outputConfiguration.getStreamUseCase();
+ int32_t colorSpace = outputConfiguration.getColorSpace();
+ int64_t streamUseCase = outputConfiguration.getStreamUseCase();
int timestampBase = outputConfiguration.getTimestampBase();
int mirrorMode = outputConfiguration.getMirrorMode();
for (auto& bufferProducer : bufferProducers) {
@@ -1662,7 +1668,7 @@
res = SessionConfigurationUtils::createSurfaceFromGbp(mStreamInfoMap[streamId],
true /*isStreamInfoValid*/, surface, bufferProducer, mCameraIdStr,
mDevice->infoPhysical(physicalId), sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode);
+ streamUseCase, timestampBase, mirrorMode, colorSpace);
if (!res.isOk())
return res;
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index 69514f3..89a2af8 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -192,7 +192,9 @@
int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) = 0;
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED)
+ = 0;
/**
* Create an output stream of the requested size, format, rotation and
@@ -213,7 +215,9 @@
int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) = 0;
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED)
+ = 0;
/**
* Create an input stream of width, height, and format.
@@ -235,11 +239,13 @@
bool dataSpaceOverridden;
android_dataspace originalDataSpace;
int64_t dynamicRangeProfile;
+ int32_t colorSpace;
StreamInfo() : width(0), height(0), format(0), formatOverridden(false), originalFormat(0),
dataSpace(HAL_DATASPACE_UNKNOWN), dataSpaceOverridden(false),
originalDataSpace(HAL_DATASPACE_UNKNOWN),
- dynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD){}
+ dynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
+ colorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED) {}
/**
* Check whether the format matches the current or the original one in case
* it got overridden.
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index ca75102..17a4a44 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -992,7 +992,7 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
uint64_t consumerUsage, int64_t dynamicRangeProfile, int64_t streamUseCase,
- int timestampBase, int mirrorMode) {
+ int timestampBase, int mirrorMode, int32_t colorSpace) {
ATRACE_CALL();
if (consumer == nullptr) {
@@ -1006,7 +1006,7 @@
return createStream(consumers, /*hasDeferredConsumer*/ false, width, height,
format, dataSpace, rotation, id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
streamSetId, isShared, isMultiResolution, consumerUsage, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode);
+ streamUseCase, timestampBase, mirrorMode, colorSpace);
}
static bool isRawFormat(int format) {
@@ -1027,7 +1027,7 @@
const String8& physicalCameraId, const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
uint64_t consumerUsage, int64_t dynamicRangeProfile, int64_t streamUseCase,
- int timestampBase, int mirrorMode) {
+ int timestampBase, int mirrorMode, int32_t colorSpace) {
ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
@@ -1036,10 +1036,10 @@
ALOGV("Camera %s: Creating new stream %d: %d x %d, format %d, dataspace %d rotation %d"
" consumer usage %" PRIu64 ", isShared %d, physicalCameraId %s, isMultiResolution %d"
" dynamicRangeProfile 0x%" PRIx64 ", streamUseCase %" PRId64 ", timestampBase %d,"
- " mirrorMode %d",
+ " mirrorMode %d colorSpace %d",
mId.string(), mNextStreamId, width, height, format, dataSpace, rotation,
consumerUsage, isShared, physicalCameraId.string(), isMultiResolution,
- dynamicRangeProfile, streamUseCase, timestampBase, mirrorMode);
+ dynamicRangeProfile, streamUseCase, timestampBase, mirrorMode, colorSpace);
status_t res;
bool wasActive = false;
@@ -1110,7 +1110,7 @@
width, height, blobBufferSize, format, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode);
+ timestampBase, mirrorMode, colorSpace);
} else if (format == HAL_PIXEL_FORMAT_RAW_OPAQUE) {
bool maxResolution =
sensorPixelModesUsed.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
@@ -1125,25 +1125,25 @@
width, height, rawOpaqueBufferSize, format, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode);
+ timestampBase, mirrorMode, colorSpace);
} else if (isShared) {
newStream = new Camera3SharedOutputStream(mNextStreamId, consumers,
width, height, format, consumerUsage, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
mUseHalBufManager, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode);
+ timestampBase, mirrorMode, colorSpace);
} else if (consumers.size() == 0 && hasDeferredConsumer) {
newStream = new Camera3OutputStream(mNextStreamId,
width, height, format, consumerUsage, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode);
+ timestampBase, mirrorMode, colorSpace);
} else {
newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
width, height, format, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode);
+ timestampBase, mirrorMode, colorSpace);
}
size_t consumerCount = consumers.size();
@@ -1231,6 +1231,7 @@
streamInfo->dataSpaceOverridden = stream->isDataSpaceOverridden();
streamInfo->originalDataSpace = stream->getOriginalDataSpace();
streamInfo->dynamicRangeProfile = stream->getDynamicRangeProfile();
+ streamInfo->colorSpace = stream->getColorSpace();
return OK;
}
@@ -1880,7 +1881,8 @@
stream->getFormat(), streamMaxPreviewFps, stream->getDataSpace(), usage,
stream->getMaxHalBuffers(),
stream->getMaxTotalBuffers() - stream->getMaxHalBuffers(),
- stream->getDynamicRangeProfile(), streamUseCase);
+ stream->getDynamicRangeProfile(), streamUseCase,
+ stream->getColorSpace());
}
}
}
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 80b1722..f5e167e 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -151,7 +151,9 @@
ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) override;
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED)
+ override;
status_t createStream(const std::vector<sp<Surface>>& consumers,
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
@@ -166,7 +168,9 @@
ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) override;
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED)
+ override;
status_t createInputStream(
uint32_t width, uint32_t height, int format, bool isMultiResolution,
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
index f594f84..a78d01e 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
@@ -35,11 +35,12 @@
const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile, int64_t streamUseCase,
- bool deviceTimeBaseIsRealtime, int timestampBase) :
+ bool deviceTimeBaseIsRealtime, int timestampBase, int32_t colorSpace) :
Camera3Stream(id, type,
width, height, maxSize, format, dataSpace, rotation,
physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
- dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime, timestampBase),
+ dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime, timestampBase,
+ colorSpace),
mTotalBufferCount(0),
mMaxCachedBufferCount(0),
mHandoutTotalBufferCount(0),
@@ -93,6 +94,7 @@
}
lines.appendFormat(" Dynamic Range Profile: 0x%" PRIx64 "\n",
camera_stream::dynamic_range_profile);
+ lines.appendFormat(" Color Space: %d\n", camera_stream::color_space);
lines.appendFormat(" Stream use case: %" PRId64 "\n", camera_stream::use_case);
lines.appendFormat(" Timestamp base: %d\n", getTimestampBase());
lines.appendFormat(" Frames produced: %d, last timestamp: %" PRId64 " ns\n",
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
index ca1f238..6af0875 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
@@ -41,7 +41,8 @@
int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
- int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT);
+ int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
public:
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 3ef29bc..1abcd86 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -56,12 +56,12 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
- int mirrorMode) :
+ int mirrorMode, int32_t colorSpace) :
Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
/*maxSize*/0, format, dataSpace, rotation,
physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
- timestampBase),
+ timestampBase, colorSpace),
mConsumer(consumer),
mTransform(0),
mTraceFirstBuffer(true),
@@ -91,11 +91,11 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
- int mirrorMode) :
+ int mirrorMode, int32_t colorSpace) :
Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height, maxSize,
format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
setId, isMultiResolution, dynamicRangeProfile, streamUseCase,
- deviceTimeBaseIsRealtime, timestampBase),
+ deviceTimeBaseIsRealtime, timestampBase, colorSpace),
mConsumer(consumer),
mTransform(0),
mTraceFirstBuffer(true),
@@ -131,12 +131,12 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
- int mirrorMode) :
+ int mirrorMode, int32_t colorSpace) :
Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
/*maxSize*/0, format, dataSpace, rotation,
physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
- timestampBase),
+ timestampBase, colorSpace),
mConsumer(nullptr),
mTransform(0),
mTraceFirstBuffer(true),
@@ -180,13 +180,13 @@
int setId, bool isMultiResolution,
int64_t dynamicRangeProfile, int64_t streamUseCase,
bool deviceTimeBaseIsRealtime, int timestampBase,
- int mirrorMode) :
+ int mirrorMode, int32_t colorSpace) :
Camera3IOStreamBase(id, type, width, height,
/*maxSize*/0,
format, dataSpace, rotation,
physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
- timestampBase),
+ timestampBase, colorSpace),
mTransform(0),
mTraceFirstBuffer(true),
mUseBufferManager(false),
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index db988a0..0d758bc 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -96,7 +96,8 @@
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
/**
* Set up a stream for formats that have a variable buffer size for the same
* dimensions, such as compressed JPEG.
@@ -113,7 +114,8 @@
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
/**
* Set up a stream with deferred consumer for formats that have 2 dimensions, such as
* RAW and YUV. The consumer must be set before using this stream for output. A valid
@@ -129,7 +131,8 @@
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
virtual ~Camera3OutputStream();
@@ -273,7 +276,8 @@
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
/**
* Note that we release the lock briefly in this function
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
index 9215f23..da45227 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
@@ -35,12 +35,12 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
int setId, bool useHalBufManager, int64_t dynamicProfile,
int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
- int mirrorMode) :
+ int mirrorMode, int32_t colorSpace) :
Camera3OutputStream(id, CAMERA_STREAM_OUTPUT, width, height,
format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
transport, consumerUsage, timestampOffset, setId,
/*isMultiResolution*/false, dynamicProfile, streamUseCase,
- deviceTimeBaseIsRealtime, timestampBase, mirrorMode),
+ deviceTimeBaseIsRealtime, timestampBase, mirrorMode, colorSpace),
mUseHalBufManager(useHalBufManager) {
size_t consumerCount = std::min(surfaces.size(), kMaxOutputs);
if (surfaces.size() > consumerCount) {
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
index aac3c2a..5167225 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
@@ -45,7 +45,8 @@
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
virtual ~Camera3SharedOutputStream();
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 88be9ff..4d8495f 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -55,7 +55,8 @@
const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
- int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase) :
+ int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
+ int32_t colorSpace) :
camera_stream(),
mId(id),
mSetId(setId),
@@ -95,6 +96,7 @@
camera_stream::sensor_pixel_modes_used = sensorPixelModesUsed;
camera_stream::dynamic_range_profile = dynamicRangeProfile;
camera_stream::use_case = streamUseCase;
+ camera_stream::color_space = colorSpace;
if ((format == HAL_PIXEL_FORMAT_BLOB || format == HAL_PIXEL_FORMAT_RAW_OPAQUE) &&
maxSize == 0) {
@@ -135,6 +137,10 @@
return camera_stream::data_space;
}
+int32_t Camera3Stream::getColorSpace() const {
+ return camera_stream::color_space;
+}
+
uint64_t Camera3Stream::getUsage() const {
return mUsage;
}
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index 214618a..f32053b 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -167,6 +167,7 @@
uint32_t getHeight() const;
int getFormat() const;
android_dataspace getDataSpace() const;
+ int32_t getColorSpace() const;
uint64_t getUsage() const;
void setUsage(uint64_t usage);
void setFormatOverride(bool formatOverriden);
@@ -509,7 +510,8 @@
const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
- int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase);
+ int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
+ int32_t colorSpace);
wp<Camera3StreamBufferFreedListener> mBufferFreedListener;
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index 6812e89..823be2e 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -67,6 +67,7 @@
std::unordered_set<int32_t> sensor_pixel_modes_used;
int64_t dynamic_range_profile;
int64_t use_case;
+ int32_t color_space;
} camera_stream_t;
typedef struct camera_stream_buffer {
@@ -114,20 +115,24 @@
int64_t streamUseCase;
int timestampBase;
int mirrorMode;
+ int32_t colorSpace;
OutputStreamInfo() :
width(-1), height(-1), format(-1), dataSpace(HAL_DATASPACE_UNKNOWN),
consumerUsage(0),
dynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
streamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
timestampBase(OutputConfiguration::TIMESTAMP_BASE_DEFAULT),
- mirrorMode(OutputConfiguration::MIRROR_MODE_AUTO) {}
+ mirrorMode(OutputConfiguration::MIRROR_MODE_AUTO),
+ colorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED) {}
OutputStreamInfo(int _width, int _height, int _format, android_dataspace _dataSpace,
uint64_t _consumerUsage, const std::unordered_set<int32_t>& _sensorPixelModesUsed,
- int64_t _dynamicRangeProfile, int _streamUseCase, int _timestampBase, int _mirrorMode) :
+ int64_t _dynamicRangeProfile, int _streamUseCase, int _timestampBase, int _mirrorMode,
+ int32_t _colorSpace) :
width(_width), height(_height), format(_format),
dataSpace(_dataSpace), consumerUsage(_consumerUsage),
sensorPixelModesUsed(_sensorPixelModesUsed), dynamicRangeProfile(_dynamicRangeProfile),
- streamUseCase(_streamUseCase), timestampBase(_timestampBase), mirrorMode(_mirrorMode) {}
+ streamUseCase(_streamUseCase), timestampBase(_timestampBase), mirrorMode(_mirrorMode),
+ colorSpace(_colorSpace) {}
};
// Utility class to lock and unlock a GraphicBuffer
@@ -206,6 +211,7 @@
virtual int getFormat() const = 0;
virtual int64_t getDynamicRangeProfile() const = 0;
virtual android_dataspace getDataSpace() const = 0;
+ virtual int32_t getColorSpace() const = 0;
virtual void setFormatOverride(bool formatOverriden) = 0;
virtual bool isFormatOverridden() const = 0;
virtual int getOriginalFormat() const = 0;
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
index 4f4e581..99c067e 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
@@ -913,6 +913,7 @@
cam3stream->getOriginalFormat() : src->format);
dst.dataSpace = mapToAidlDataspace(cam3stream->isDataSpaceOverridden() ?
cam3stream->getOriginalDataSpace() : src->data_space);
+ dst.colorSpace = src->color_space;
dst.bufferSize = bufferSizes[i];
if (src->physical_camera_id != nullptr) {
diff --git a/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h b/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h
index 74b3700..4ea6972 100644
--- a/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h
+++ b/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h
@@ -73,6 +73,9 @@
ANDROID_REQUEST_RECOMMENDED_TEN_BIT_DYNAMIC_RANGE_PROFILE,
ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES,
} },
+ {34, {
+ ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP,
+ } },
};
/**
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
index d909624..b1bf41e 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
@@ -52,7 +52,7 @@
"android.hardware.camera.provider@2.5",
"android.hardware.camera.provider@2.6",
"android.hardware.camera.provider@2.7",
- "android.hardware.camera.provider-V1-ndk",
+ "android.hardware.camera.provider-V2-ndk",
"android.hardware.camera.device@1.0",
"android.hardware.camera.device@3.2",
"android.hardware.camera.device@3.3",
diff --git a/services/camera/libcameraservice/tests/Android.bp b/services/camera/libcameraservice/tests/Android.bp
index 4d7798c..5e2a3fb 100644
--- a/services/camera/libcameraservice/tests/Android.bp
+++ b/services/camera/libcameraservice/tests/Android.bp
@@ -49,7 +49,7 @@
"android.hardware.camera.provider@2.5",
"android.hardware.camera.provider@2.6",
"android.hardware.camera.provider@2.7",
- "android.hardware.camera.provider-V1-ndk",
+ "android.hardware.camera.provider-V2-ndk",
"android.hardware.camera.device@1.0",
"android.hardware.camera.device@3.2",
"android.hardware.camera.device@3.4",
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index ed490a8..f9afd41 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -282,6 +282,61 @@
}
}
+bool deviceReportsColorSpaces(const CameraMetadata& staticInfo) {
+ camera_metadata_ro_entry_t entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+ for (size_t i = 0; i < entry.count; ++i) {
+ uint8_t capability = entry.data.u8[i];
+ if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES) {
+ return true;
+ }
+ }
+
+ return false;
+}
+
+bool isColorSpaceSupported(int32_t colorSpace, int32_t format, android_dataspace dataSpace,
+ int64_t dynamicRangeProfile, const CameraMetadata& staticInfo) {
+ int64_t colorSpace64 = colorSpace;
+ int64_t format64 = format;
+
+ // Translate HAL format + data space to public format
+ if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace == HAL_DATASPACE_V0_JFIF) {
+ format64 = 0x100; // JPEG
+ } else if (format == HAL_PIXEL_FORMAT_BLOB
+ && dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) {
+ format64 = 0x48454946; // HEIC
+ } else if (format == HAL_PIXEL_FORMAT_BLOB
+ && dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_DYNAMIC_DEPTH)) {
+ format64 = 0x69656963; // DEPTH_JPEG
+ } else if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace == HAL_DATASPACE_DEPTH) {
+ return false; // DEPTH_POINT_CLOUD, not applicable
+ } else if (format == HAL_PIXEL_FORMAT_Y16 && dataSpace == HAL_DATASPACE_DEPTH) {
+ return false; // DEPTH16, not applicable
+ } else if (format == HAL_PIXEL_FORMAT_RAW16 && dataSpace == HAL_DATASPACE_DEPTH) {
+ return false; // RAW_DEPTH, not applicable
+ } else if (format == HAL_PIXEL_FORMAT_RAW10 && dataSpace == HAL_DATASPACE_DEPTH) {
+ return false; // RAW_DEPTH10, not applicable
+ }
+
+ camera_metadata_ro_entry_t entry =
+ staticInfo.find(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP);
+ for (size_t i = 0; i < entry.count; i += 3) {
+ bool isFormatCompatible = (format64 == entry.data.i64[i + 1]);
+ bool isDynamicProfileCompatible =
+ (dynamicRangeProfile & entry.data.i64[i + 2]) != 0;
+
+ if (colorSpace64 == entry.data.i64[i]
+ && isFormatCompatible
+ && isDynamicProfileCompatible) {
+ return true;
+ }
+ }
+
+ ALOGE("Color space %d, image format %" PRId64 ", and dynamic range 0x%" PRIx64
+ " combination not found", colorSpace, format64, dynamicRangeProfile);
+ return false;
+}
+
bool isPublicFormat(int32_t format)
{
switch(format) {
@@ -336,7 +391,8 @@
sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
const String8 &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
const std::vector<int32_t> &sensorPixelModesUsed, int64_t dynamicRangeProfile,
- int64_t streamUseCase, int timestampBase, int mirrorMode) {
+ int64_t streamUseCase, int timestampBase, int mirrorMode,
+ int32_t colorSpace) {
// bufferProducer must be non-null
if (gbp == nullptr) {
String8 msg = String8::format("Camera %s: Surface is NULL", logicalCameraId.string());
@@ -450,6 +506,16 @@
ALOGE("%s: %s", __FUNCTION__, msg.string());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
}
+ if (colorSpace != ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED &&
+ SessionConfigurationUtils::deviceReportsColorSpaces(physicalCameraMetadata) &&
+ !SessionConfigurationUtils::isColorSpaceSupported(colorSpace, format, dataSpace,
+ dynamicRangeProfile, physicalCameraMetadata)) {
+ String8 msg = String8::format("Camera %s: Color space %d not supported, failed to "
+ "create output stream (pixel format %d dynamic range profile %" PRId64 ")",
+ logicalCameraId.string(), colorSpace, format, dynamicRangeProfile);
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+ }
if (!SessionConfigurationUtils::isStreamUseCaseSupported(streamUseCase,
physicalCameraMetadata)) {
String8 msg = String8::format("Camera %s: stream use case %" PRId64 " not supported,"
@@ -483,6 +549,7 @@
streamInfo.streamUseCase = streamUseCase;
streamInfo.timestampBase = timestampBase;
streamInfo.mirrorMode = mirrorMode;
+ streamInfo.colorSpace = colorSpace;
return binder::Status::ok();
}
if (width != streamInfo.width) {
@@ -538,6 +605,7 @@
camera3::Camera3OutputStream::applyZSLUsageQuirk(streamInfo.format, &u);
stream->usage = AidlCamera3Device::mapToAidlConsumerUsage(u);
stream->dataSpace = AidlCamera3Device::mapToAidlDataspace(streamInfo.dataSpace);
+ stream->colorSpace = streamInfo.colorSpace;
stream->rotation = AidlCamera3Device::mapToAidlStreamRotation(rotation);
stream->id = -1; // Invalid stream id
stream->physicalCameraId = std::string(physicalId.string());
@@ -635,6 +703,7 @@
String8 physicalCameraId = String8(it.getPhysicalCameraId());
int64_t dynamicRangeProfile = it.getDynamicRangeProfile();
+ int32_t colorSpace = it.getColorSpace();
std::vector<int32_t> sensorPixelModesUsed = it.getSensorPixelModesUsed();
const CameraMetadata &physicalDeviceInfo = getMetadata(physicalCameraId,
overrideForPerfClass);
@@ -693,7 +762,7 @@
sp<Surface> surface;
res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
logicalCameraId, metadataChosen, sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode);
+ streamUseCase, timestampBase, mirrorMode, colorSpace);
if (!res.isOk())
return res;
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index a127c7b..264045e 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -98,7 +98,8 @@
sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
const String8 &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
const std::vector<int32_t> &sensorPixelModesUsed, int64_t dynamicRangeProfile,
- int64_t streamUseCase, int timestampBase, int mirrorMode);
+ int64_t streamUseCase, int timestampBase, int mirrorMode,
+ int32_t colorSpace);
//check if format is 10-bit output compatible
bool is10bitCompatibleFormat(int32_t format);
@@ -109,6 +110,11 @@
// Check if the device supports a given dynamicRangeProfile
bool isDynamicRangeProfileSupported(int64_t dynamicRangeProfile, const CameraMetadata& staticMeta);
+bool deviceReportsColorSpaces(const CameraMetadata& staticMeta);
+
+bool isColorSpaceSupported(int32_t colorSpace, int32_t format, android_dataspace dataSpace,
+ int64_t dynamicRangeProfile, const CameraMetadata& staticMeta);
+
bool isStreamUseCaseSupported(int64_t streamUseCase, const CameraMetadata &deviceInfo);
void mapStreamInfo(const OutputStreamInfo &streamInfo,