Camera: Support feature combination query without surfaces
- Support non-completed MediaRecoder, MediaCodec, and ImageReader
output targets in addition to SurfaceView and SurfaceTexture.
A non-completed target means that the actual sufface isn't
available yet.
- The non-completed ImageReader output target can specify
format, usage and dataspace.
Test: atest FeatureCombinationTest
Test: atest CameraDeviceSetupTest
Bug: 298033056
Bug: 309627704
Change-Id: Ia2a7c956fcf05b39756bbb07f938bfcbabff4522
diff --git a/camera/camera2/OutputConfiguration.cpp b/camera/camera2/OutputConfiguration.cpp
index 73b153c..2d1af32 100644
--- a/camera/camera2/OutputConfiguration.cpp
+++ b/camera/camera2/OutputConfiguration.cpp
@@ -25,6 +25,7 @@
#include <binder/Parcel.h>
#include <gui/view/Surface.h>
#include <system/camera_metadata.h>
+#include <system/graphics.h>
#include <utils/String8.h>
@@ -102,6 +103,25 @@
return mUseReadoutTimestamp;
}
+int OutputConfiguration::getFormat() const {
+ return mFormat;
+}
+
+int OutputConfiguration::getDataspace() const {
+ return mDataspace;
+}
+
+int64_t OutputConfiguration::getUsage() const {
+ return mUsage;
+}
+
+bool OutputConfiguration::isComplete() const {
+ return !((mSurfaceType == SURFACE_TYPE_MEDIA_RECORDER ||
+ mSurfaceType == SURFACE_TYPE_MEDIA_CODEC ||
+ mSurfaceType == SURFACE_TYPE_IMAGE_READER) &&
+ mGbps.empty());
+}
+
OutputConfiguration::OutputConfiguration() :
mRotation(INVALID_ROTATION),
mSurfaceSetID(INVALID_SET_ID),
@@ -116,7 +136,10 @@
mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
mTimestampBase(TIMESTAMP_BASE_DEFAULT),
mMirrorMode(MIRROR_MODE_AUTO),
- mUseReadoutTimestamp(false) {
+ mUseReadoutTimestamp(false),
+ mFormat(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED),
+ mDataspace(0),
+ mUsage(0) {
}
OutputConfiguration::OutputConfiguration(const android::Parcel& parcel) :
@@ -234,6 +257,24 @@
return err;
}
+ int format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+ if ((err = parcel->readInt32(&format)) != OK) {
+ ALOGE("%s: Failed to read format from parcel", __FUNCTION__);
+ return err;
+ }
+
+ int dataspace = 0;
+ if ((err = parcel->readInt32(&dataspace)) != OK) {
+ ALOGE("%s: Failed to read dataspace from parcel", __FUNCTION__);
+ return err;
+ }
+
+ int64_t usage = 0;
+ if ((err = parcel->readInt64(&usage)) != OK) {
+ ALOGE("%s: Failed to read usage flag from parcel", __FUNCTION__);
+ return err;
+ }
+
mRotation = rotation;
mSurfaceSetID = setID;
mSurfaceType = surfaceType;
@@ -256,13 +297,17 @@
mSensorPixelModesUsed = std::move(sensorPixelModesUsed);
mDynamicRangeProfile = dynamicProfile;
mColorSpace = colorSpace;
+ mFormat = format;
+ mDataspace = dataspace;
+ mUsage = usage;
ALOGV("%s: OutputConfiguration: rotation = %d, setId = %d, surfaceType = %d,"
" physicalCameraId = %s, isMultiResolution = %d, streamUseCase = %" PRId64
- ", timestampBase = %d, mirrorMode = %d, useReadoutTimestamp = %d",
+ ", timestampBase = %d, mirrorMode = %d, useReadoutTimestamp = %d, format = %d, "
+ "dataspace = %d, usage = %" PRId64,
__FUNCTION__, mRotation, mSurfaceSetID, mSurfaceType,
mPhysicalCameraId.c_str(), mIsMultiResolution, mStreamUseCase, timestampBase,
- mMirrorMode, mUseReadoutTimestamp);
+ mMirrorMode, mUseReadoutTimestamp, mFormat, mDataspace, mUsage);
return err;
}
@@ -283,6 +328,9 @@
mTimestampBase = TIMESTAMP_BASE_DEFAULT;
mMirrorMode = MIRROR_MODE_AUTO;
mUseReadoutTimestamp = false;
+ mFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+ mDataspace = 0;
+ mUsage = 0;
}
OutputConfiguration::OutputConfiguration(
@@ -296,7 +344,9 @@
mColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
mTimestampBase(TIMESTAMP_BASE_DEFAULT),
- mMirrorMode(MIRROR_MODE_AUTO), mUseReadoutTimestamp(false) { }
+ mMirrorMode(MIRROR_MODE_AUTO), mUseReadoutTimestamp(false),
+ mFormat(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED), mDataspace(0),
+ mUsage(0) { }
status_t OutputConfiguration::writeToParcel(android::Parcel* parcel) const {
@@ -362,6 +412,15 @@
err = parcel->writeInt32(mUseReadoutTimestamp ? 1 : 0);
if (err != OK) return err;
+ err = parcel->writeInt32(mFormat);
+ if (err != OK) return err;
+
+ err = parcel->writeInt32(mDataspace);
+ if (err != OK) return err;
+
+ err = parcel->writeInt64(mUsage);
+ if (err != OK) return err;
+
return OK;
}
diff --git a/camera/include/camera/camera2/OutputConfiguration.h b/camera/include/camera/camera2/OutputConfiguration.h
index 3f74b4a..83ce39d 100644
--- a/camera/include/camera/camera2/OutputConfiguration.h
+++ b/camera/include/camera/camera2/OutputConfiguration.h
@@ -35,10 +35,13 @@
static const int INVALID_ROTATION;
static const int INVALID_SET_ID;
- enum SurfaceType{
+ enum SurfaceType {
SURFACE_TYPE_UNKNOWN = -1,
SURFACE_TYPE_SURFACE_VIEW = 0,
- SURFACE_TYPE_SURFACE_TEXTURE = 1
+ SURFACE_TYPE_SURFACE_TEXTURE = 1,
+ SURFACE_TYPE_MEDIA_RECORDER = 2,
+ SURFACE_TYPE_MEDIA_CODEC = 3,
+ SURFACE_TYPE_IMAGE_READER = 4
};
enum TimestampBaseType {
TIMESTAMP_BASE_DEFAULT = 0,
@@ -71,6 +74,10 @@
int getTimestampBase() const;
int getMirrorMode() const;
bool useReadoutTimestamp() const;
+ int getFormat() const;
+ int getDataspace() const;
+ int64_t getUsage() const;
+ bool isComplete() const;
// set of sensor pixel mode resolutions allowed {MAX_RESOLUTION, DEFAULT_MODE};
const std::vector<int32_t>& getSensorPixelModesUsed() const;
@@ -98,7 +105,7 @@
OutputConfiguration(const std::vector<sp<IGraphicBufferProducer>>& gbps,
int rotation, const std::string& physicalCameraId,
int surfaceSetID = INVALID_SET_ID,
- int surfaceType = OutputConfiguration::SURFACE_TYPE_UNKNOWN, int width = 0,
+ int surfaceType = SURFACE_TYPE_UNKNOWN, int width = 0,
int height = 0, bool isShared = false);
bool operator == (const OutputConfiguration& other) const {
@@ -118,7 +125,10 @@
mStreamUseCase == other.mStreamUseCase &&
mTimestampBase == other.mTimestampBase &&
mMirrorMode == other.mMirrorMode &&
- mUseReadoutTimestamp == other.mUseReadoutTimestamp);
+ mUseReadoutTimestamp == other.mUseReadoutTimestamp &&
+ mFormat == other.mFormat &&
+ mDataspace == other.mDataspace &&
+ mUsage == other.mUsage);
}
bool operator != (const OutputConfiguration& other) const {
return !(*this == other);
@@ -173,6 +183,15 @@
if (mUseReadoutTimestamp != other.mUseReadoutTimestamp) {
return mUseReadoutTimestamp < other.mUseReadoutTimestamp;
}
+ if (mFormat != other.mFormat) {
+ return mFormat < other.mFormat;
+ }
+ if (mDataspace != other.mDataspace) {
+ return mDataspace < other.mDataspace;
+ }
+ if (mUsage != other.mUsage) {
+ return mUsage < other.mUsage;
+ }
return gbpsLessThan(other);
}
@@ -203,6 +222,9 @@
int mTimestampBase;
int mMirrorMode;
bool mUseReadoutTimestamp;
+ int mFormat;
+ int mDataspace;
+ int64_t mUsage;
};
} // namespace params
} // namespace camera2
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 7c2f71c..59828fb 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -892,6 +892,11 @@
Mutex::Autolock icl(mBinderSerializationLock);
+ if (!outputConfiguration.isComplete()) {
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+ "OutputConfiguration isn't valid!");
+ }
+
const std::vector<sp<IGraphicBufferProducer>>& bufferProducers =
outputConfiguration.getGraphicBufferProducers();
size_t numBufferProducers = bufferProducers.size();
@@ -908,7 +913,7 @@
bool useReadoutTimestamp = outputConfiguration.useReadoutTimestamp();
res = SessionConfigurationUtils::checkSurfaceType(numBufferProducers, deferredConsumer,
- outputConfiguration.getSurfaceType());
+ outputConfiguration.getSurfaceType(), /*isConfigurationComplete*/true);
if (!res.isOk()) {
return res;
}
@@ -951,7 +956,7 @@
res = SessionConfigurationUtils::createSurfaceFromGbp(streamInfo,
isStreamInfoValid, surface, bufferProducer, mCameraIdStr,
mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode, colorSpace);
+ streamUseCase, timestampBase, mirrorMode, colorSpace, /*respectSurfaceSize*/false);
if (!res.isOk())
return res;
@@ -1064,6 +1069,10 @@
if (!mDevice.get()) {
return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
}
+ if (!outputConfiguration.isComplete()) {
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+ "OutputConfiguration isn't valid!");
+ }
// Infer the surface info for deferred surface stream creation.
width = outputConfiguration.getWidth();
@@ -1256,6 +1265,10 @@
if (!mDevice.get()) {
return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
}
+ if (!outputConfiguration.isComplete()) {
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+ "OutputConfiguration isn't valid!");
+ }
const std::vector<sp<IGraphicBufferProducer> >& bufferProducers =
outputConfiguration.getGraphicBufferProducers();
@@ -1323,7 +1336,7 @@
res = SessionConfigurationUtils::createSurfaceFromGbp(outInfo,
/*isStreamInfoValid*/ false, surface, newOutputsMap.valueAt(i), mCameraIdStr,
mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode, colorSpace);
+ streamUseCase, timestampBase, mirrorMode, colorSpace, /*respectSurfaceSize*/false);
if (!res.isOk())
return res;
@@ -1636,6 +1649,11 @@
Mutex::Autolock icl(mBinderSerializationLock);
+ if (!outputConfiguration.isComplete()) {
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+ "OutputConfiguration isn't valid!");
+ }
+
const std::vector<sp<IGraphicBufferProducer> >& bufferProducers =
outputConfiguration.getGraphicBufferProducers();
const std::string &physicalId = outputConfiguration.getPhysicalCameraId();
@@ -1701,7 +1719,7 @@
res = SessionConfigurationUtils::createSurfaceFromGbp(mStreamInfoMap[streamId],
true /*isStreamInfoValid*/, surface, bufferProducer, mCameraIdStr,
mDevice->infoPhysical(physicalId), sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode, colorSpace);
+ streamUseCase, timestampBase, mirrorMode, colorSpace, /*respectSurfaceSize*/false);
if (!res.isOk())
return res;
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index 11ef9b7..23aed6e 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -432,7 +432,7 @@
const std::string &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
const std::vector<int32_t> &sensorPixelModesUsed, int64_t dynamicRangeProfile,
int64_t streamUseCase, int timestampBase, int mirrorMode,
- int32_t colorSpace) {
+ int32_t colorSpace, bool respectSurfaceSize) {
// bufferProducer must be non-null
if (gbp == nullptr) {
std::string msg = fmt::sprintf("Camera %s: Surface is NULL", logicalCameraId.c_str());
@@ -529,8 +529,10 @@
// we can use the default stream configuration map
foundInMaxRes = true;
}
- // Round dimensions to the nearest dimensions available for this format
- if (flexibleConsumer && isPublicFormat(format) &&
+ // Round dimensions to the nearest dimensions available for this format.
+ // Only do the rounding if the client doesn't ask to respect the surface
+ // size.
+ if (flexibleConsumer && isPublicFormat(format) && !respectSurfaceSize &&
!SessionConfigurationUtils::roundBufferDimensionNearest(width, height,
format, dataSpace, physicalCameraMetadata, foundInMaxRes, /*out*/&width,
/*out*/&height)) {
@@ -753,6 +755,7 @@
const std::vector<sp<IGraphicBufferProducer>>& bufferProducers =
it.getGraphicBufferProducers();
bool deferredConsumer = it.isDeferred();
+ bool isConfigurationComplete = it.isComplete();
const std::string &physicalCameraId = it.getPhysicalCameraId();
int64_t dynamicRangeProfile = it.getDynamicRangeProfile();
@@ -768,7 +771,8 @@
int32_t groupId = it.isMultiResolution() ? it.getSurfaceSetID() : -1;
OutputStreamInfo streamInfo;
- res = checkSurfaceType(numBufferProducers, deferredConsumer, it.getSurfaceType());
+ res = checkSurfaceType(numBufferProducers, deferredConsumer, it.getSurfaceType(),
+ isConfigurationComplete);
if (!res.isOk()) {
return res;
}
@@ -781,15 +785,38 @@
int64_t streamUseCase = it.getStreamUseCase();
int timestampBase = it.getTimestampBase();
int mirrorMode = it.getMirrorMode();
- if (deferredConsumer) {
+ // If the configuration is a deferred consumer, or a not yet completed
+ // configuration with no buffer producers attached.
+ if (deferredConsumer || (!isConfigurationComplete && numBufferProducers == 0)) {
streamInfo.width = it.getWidth();
streamInfo.height = it.getHeight();
- streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
- streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
auto surfaceType = it.getSurfaceType();
- streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE;
- if (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) {
- streamInfo.consumerUsage |= GraphicBuffer::USAGE_HW_COMPOSER;
+ switch (surfaceType) {
+ case OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE:
+ streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE;
+ streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+ streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
+ break;
+ case OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW:
+ streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE
+ | GraphicBuffer::USAGE_HW_COMPOSER;
+ streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+ streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
+ break;
+ case OutputConfiguration::SURFACE_TYPE_MEDIA_RECORDER:
+ case OutputConfiguration::SURFACE_TYPE_MEDIA_CODEC:
+ streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_VIDEO_ENCODER;
+ streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+ streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
+ break;
+ case OutputConfiguration::SURFACE_TYPE_IMAGE_READER:
+ streamInfo.consumerUsage = it.getUsage();
+ streamInfo.format = it.getFormat();
+ streamInfo.dataSpace = (android_dataspace)it.getDataspace();
+ break;
+ default:
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+ "Invalid surface type.");
}
streamInfo.dynamicRangeProfile = it.getDynamicRangeProfile();
if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed,
@@ -815,7 +842,8 @@
sp<Surface> surface;
res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
logicalCameraId, metadataChosen, sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode, colorSpace);
+ streamUseCase, timestampBase, mirrorMode, colorSpace,
+ /*respectSurfaceSize*/true);
if (!res.isOk())
return res;
@@ -912,22 +940,37 @@
}
binder::Status checkSurfaceType(size_t numBufferProducers,
- bool deferredConsumer, int surfaceType) {
+ bool deferredConsumer, int surfaceType, bool isConfigurationComplete) {
if (numBufferProducers > MAX_SURFACES_PER_STREAM) {
ALOGE("%s: GraphicBufferProducer count %zu for stream exceeds limit of %d",
__FUNCTION__, numBufferProducers, MAX_SURFACES_PER_STREAM);
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Surface count is too high");
- } else if ((numBufferProducers == 0) && (!deferredConsumer)) {
+ } else if ((numBufferProducers == 0) && (!deferredConsumer) && isConfigurationComplete) {
ALOGE("%s: Number of consumers cannot be smaller than 1", __FUNCTION__);
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "No valid consumers.");
}
- bool validSurfaceType = ((surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) ||
- (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE));
-
- if (deferredConsumer && !validSurfaceType) {
- ALOGE("%s: Target surface has invalid surfaceType = %d.", __FUNCTION__, surfaceType);
- return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Target Surface is invalid");
+ if (deferredConsumer) {
+ bool validSurfaceType = (
+ (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) ||
+ (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE));
+ if (!validSurfaceType) {
+ std::string msg = fmt::sprintf("Deferred target surface has invalid "
+ "surfaceType = %d.", surfaceType);
+ ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+ }
+ } else if (!isConfigurationComplete && numBufferProducers == 0) {
+ bool validSurfaceType = (
+ (surfaceType == OutputConfiguration::SURFACE_TYPE_MEDIA_RECORDER) ||
+ (surfaceType == OutputConfiguration::SURFACE_TYPE_MEDIA_CODEC) ||
+ (surfaceType == OutputConfiguration::SURFACE_TYPE_IMAGE_READER));
+ if (!validSurfaceType) {
+ std::string msg = fmt::sprintf("OutputConfiguration target surface has invalid "
+ "surfaceType = %d.", surfaceType);
+ ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+ }
}
return binder::Status::ok();
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index 0545cea..a226829 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -112,7 +112,7 @@
const std::string &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
const std::vector<int32_t> &sensorPixelModesUsed, int64_t dynamicRangeProfile,
int64_t streamUseCase, int timestampBase, int mirrorMode,
- int32_t colorSpace);
+ int32_t colorSpace, bool respectSurfaceSize);
//check if format is 10-bit output compatible
bool is10bitCompatibleFormat(int32_t format, android_dataspace_t dataSpace);
@@ -143,10 +143,10 @@
const std::string &logicalCameraId);
binder::Status checkSurfaceType(size_t numBufferProducers,
-bool deferredConsumer, int surfaceType);
+ bool deferredConsumer, int surfaceType, bool isConfigurationComplete);
binder::Status checkOperatingMode(int operatingMode,
-const CameraMetadata &staticInfo, const std::string &cameraId);
+ const CameraMetadata &staticInfo, const std::string &cameraId);
binder::Status
convertToHALStreamCombination(