Camera: Move readoutTimestamp to OutputConfiguration
Whether readoutTimestamp is used should be orthogonal to the timestamp
base being used (DEFAULT, SENSOR, REALTIME, etc). Make them so.
Test: Camera CTS
Bug: 232437315
Change-Id: I7e3e45bc7955da141bd47b3217e30f45ab429cab
diff --git a/camera/camera2/OutputConfiguration.cpp b/camera/camera2/OutputConfiguration.cpp
index d50566d..da4484a 100644
--- a/camera/camera2/OutputConfiguration.cpp
+++ b/camera/camera2/OutputConfiguration.cpp
@@ -97,6 +97,10 @@
return mMirrorMode;
}
+bool OutputConfiguration::useReadoutTimestamp() const {
+ return mUseReadoutTimestamp;
+}
+
OutputConfiguration::OutputConfiguration() :
mRotation(INVALID_ROTATION),
mSurfaceSetID(INVALID_SET_ID),
@@ -110,7 +114,8 @@
mColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
mTimestampBase(TIMESTAMP_BASE_DEFAULT),
- mMirrorMode(MIRROR_MODE_AUTO) {
+ mMirrorMode(MIRROR_MODE_AUTO),
+ mUseReadoutTimestamp(false) {
}
OutputConfiguration::OutputConfiguration(const android::Parcel& parcel) :
@@ -220,6 +225,12 @@
return err;
}
+ int useReadoutTimestamp = 0;
+ if ((err = parcel->readInt32(&useReadoutTimestamp)) != OK) {
+ ALOGE("%s: Failed to read useReadoutTimestamp flag from parcel", __FUNCTION__);
+ return err;
+ }
+
mRotation = rotation;
mSurfaceSetID = setID;
mSurfaceType = surfaceType;
@@ -231,6 +242,7 @@
mStreamUseCase = streamUseCase;
mTimestampBase = timestampBase;
mMirrorMode = mirrorMode;
+ mUseReadoutTimestamp = useReadoutTimestamp != 0;
for (auto& surface : surfaceShims) {
ALOGV("%s: OutputConfiguration: %p, name %s", __FUNCTION__,
surface.graphicBufferProducer.get(),
@@ -244,10 +256,10 @@
ALOGV("%s: OutputConfiguration: rotation = %d, setId = %d, surfaceType = %d,"
" physicalCameraId = %s, isMultiResolution = %d, streamUseCase = %" PRId64
- ", timestampBase = %d, mirrorMode = %d",
+ ", timestampBase = %d, mirrorMode = %d, useReadoutTimestamp = %d",
__FUNCTION__, mRotation, mSurfaceSetID, mSurfaceType,
String8(mPhysicalCameraId).string(), mIsMultiResolution, mStreamUseCase, timestampBase,
- mMirrorMode);
+ mMirrorMode, mUseReadoutTimestamp);
return err;
}
@@ -267,6 +279,7 @@
mStreamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
mTimestampBase = TIMESTAMP_BASE_DEFAULT;
mMirrorMode = MIRROR_MODE_AUTO;
+ mUseReadoutTimestamp = false;
}
OutputConfiguration::OutputConfiguration(
@@ -280,7 +293,7 @@
mColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
mTimestampBase(TIMESTAMP_BASE_DEFAULT),
- mMirrorMode(MIRROR_MODE_AUTO) { }
+ mMirrorMode(MIRROR_MODE_AUTO), mUseReadoutTimestamp(false) { }
status_t OutputConfiguration::writeToParcel(android::Parcel* parcel) const {
@@ -342,6 +355,9 @@
err = parcel->writeInt32(mMirrorMode);
if (err != OK) return err;
+ err = parcel->writeInt32(mUseReadoutTimestamp ? 1 : 0);
+ if (err != OK) return err;
+
return OK;
}
diff --git a/camera/include/camera/camera2/OutputConfiguration.h b/camera/include/camera/camera2/OutputConfiguration.h
index a713b40..16fddb5 100644
--- a/camera/include/camera/camera2/OutputConfiguration.h
+++ b/camera/include/camera/camera2/OutputConfiguration.h
@@ -44,8 +44,7 @@
TIMESTAMP_BASE_MONOTONIC = 2,
TIMESTAMP_BASE_REALTIME = 3,
TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED = 4,
- TIMESTAMP_BASE_READOUT_SENSOR = 5,
- TIMESTAMP_BASE_MAX = TIMESTAMP_BASE_READOUT_SENSOR,
+ TIMESTAMP_BASE_MAX = TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED,
};
enum MirrorModeType {
MIRROR_MODE_AUTO = 0,
@@ -69,6 +68,7 @@
int64_t getStreamUseCase() const;
int getTimestampBase() const;
int getMirrorMode() const;
+ bool useReadoutTimestamp() const;
// set of sensor pixel mode resolutions allowed {MAX_RESOLUTION, DEFAULT_MODE};
const std::vector<int32_t>& getSensorPixelModesUsed() const;
@@ -115,7 +115,8 @@
mColorSpace == other.mColorSpace &&
mStreamUseCase == other.mStreamUseCase &&
mTimestampBase == other.mTimestampBase &&
- mMirrorMode == other.mMirrorMode);
+ mMirrorMode == other.mMirrorMode &&
+ mUseReadoutTimestamp == other.mUseReadoutTimestamp);
}
bool operator != (const OutputConfiguration& other) const {
return !(*this == other);
@@ -167,6 +168,9 @@
if (mMirrorMode != other.mMirrorMode) {
return mMirrorMode < other.mMirrorMode;
}
+ if (mUseReadoutTimestamp != other.mUseReadoutTimestamp) {
+ return mUseReadoutTimestamp < other.mUseReadoutTimestamp;
+ }
return gbpsLessThan(other);
}
@@ -196,6 +200,7 @@
int64_t mStreamUseCase;
int mTimestampBase;
int mMirrorMode;
+ bool mUseReadoutTimestamp;
};
} // namespace params
} // namespace camera2
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index c08aff3..720ffd7 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -890,6 +890,7 @@
int timestampBase = outputConfiguration.getTimestampBase();
int mirrorMode = outputConfiguration.getMirrorMode();
int32_t colorSpace = outputConfiguration.getColorSpace();
+ bool useReadoutTimestamp = outputConfiguration.useReadoutTimestamp();
res = SessionConfigurationUtils::checkSurfaceType(numBufferProducers, deferredConsumer,
outputConfiguration.getSurfaceType());
@@ -973,7 +974,8 @@
static_cast<camera_stream_rotation_t>(outputConfiguration.getRotation()),
&streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution,
- streamInfo.colorSpace, streamInfo.dynamicRangeProfile, streamInfo.streamUseCase);
+ streamInfo.colorSpace, streamInfo.dynamicRangeProfile, streamInfo.streamUseCase,
+ useReadoutTimestamp);
if (err == OK) {
Mutex::Autolock l(mCompositeLock);
mCompositeStreamMap.add(IInterface::asBinder(surfaces[0]->getIGraphicBufferProducer()),
@@ -986,7 +988,8 @@
&streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution,
/*consumerUsage*/0, streamInfo.dynamicRangeProfile, streamInfo.streamUseCase,
- streamInfo.timestampBase, streamInfo.mirrorMode, streamInfo.colorSpace);
+ streamInfo.timestampBase, streamInfo.mirrorMode, streamInfo.colorSpace,
+ useReadoutTimestamp);
}
if (err != OK) {
@@ -1085,7 +1088,8 @@
outputConfiguration.isMultiResolution(), consumerUsage,
outputConfiguration.getDynamicRangeProfile(),
outputConfiguration.getStreamUseCase(),
- outputConfiguration.getMirrorMode());
+ outputConfiguration.getMirrorMode(),
+ outputConfiguration.useReadoutTimestamp());
if (err != OK) {
res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
diff --git a/services/camera/libcameraservice/api2/CompositeStream.cpp b/services/camera/libcameraservice/api2/CompositeStream.cpp
index 8cc47ee..503cf23 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/CompositeStream.cpp
@@ -50,7 +50,7 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> * surfaceIds,
int streamSetId, bool isShared, bool isMultiResolution, int32_t colorSpace,
- int64_t dynamicProfile, int64_t streamUseCase) {
+ int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp) {
if (hasDeferredConsumer) {
ALOGE("%s: Deferred consumers not supported in case of composite streams!",
__FUNCTION__);
@@ -77,7 +77,7 @@
return createInternalStreams(consumers, hasDeferredConsumer, width, height, format, rotation,
id, physicalCameraId, sensorPixelModesUsed, surfaceIds, streamSetId, isShared,
- colorSpace, dynamicProfile, streamUseCase);
+ colorSpace, dynamicProfile, streamUseCase, useReadoutTimestamp);
}
status_t CompositeStream::deleteStream() {
diff --git a/services/camera/libcameraservice/api2/CompositeStream.h b/services/camera/libcameraservice/api2/CompositeStream.h
index 99067dd..c27faba 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.h
+++ b/services/camera/libcameraservice/api2/CompositeStream.h
@@ -47,7 +47,7 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds,
int streamSetId, bool isShared, bool isMultiResolution, int32_t colorSpace,
- int64_t dynamicProfile, int64_t streamUseCase);
+ int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp);
status_t deleteStream();
@@ -61,7 +61,7 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds,
int streamSetId, bool isShared, int32_t colorSpace,
- int64_t dynamicProfile, int64_t streamUseCase) = 0;
+ int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp) = 0;
// Release all internal streams and corresponding resources.
virtual status_t deleteInternalStreams() = 0;
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
index 2746289..a3547dd 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
@@ -582,7 +582,7 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds,
int /*streamSetId*/, bool /*isShared*/, int32_t /*colorSpace*/,
- int64_t /*dynamicProfile*/, int64_t /*streamUseCase*/) {
+ int64_t /*dynamicProfile*/, int64_t /*streamUseCase*/, bool useReadoutTimestamp) {
if (mSupportedDepthSizes.empty()) {
ALOGE("%s: This camera device doesn't support any depth map streams!", __FUNCTION__);
return INVALID_OPERATION;
@@ -613,7 +613,14 @@
mBlobSurface = new Surface(producer);
ret = device->createStream(mBlobSurface, width, height, format, kJpegDataSpace, rotation,
- id, physicalCameraId, sensorPixelModesUsed, surfaceIds);
+ id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
+ camera3::CAMERA3_STREAM_SET_ID_INVALID, /*isShared*/false, /*isMultiResolution*/false,
+ /*consumerUsage*/0, ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+ ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+ OutputConfiguration::MIRROR_MODE_AUTO,
+ ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ useReadoutTimestamp);
if (ret == OK) {
mBlobStreamId = *id;
mBlobSurfaceId = (*surfaceIds)[0];
@@ -630,7 +637,14 @@
std::vector<int> depthSurfaceId;
ret = device->createStream(mDepthSurface, depthWidth, depthHeight, kDepthMapPixelFormat,
kDepthMapDataSpace, rotation, &mDepthStreamId, physicalCameraId, sensorPixelModesUsed,
- &depthSurfaceId);
+ &depthSurfaceId, camera3::CAMERA3_STREAM_SET_ID_INVALID, /*isShared*/false,
+ /*isMultiResolution*/false, /*consumerUsage*/0,
+ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+ ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+ OutputConfiguration::MIRROR_MODE_AUTO,
+ ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ useReadoutTimestamp);
if (ret == OK) {
mDepthSurfaceId = depthSurfaceId[0];
} else {
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.h b/services/camera/libcameraservice/api2/DepthCompositeStream.h
index b4a4b05..de0ed67 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.h
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.h
@@ -54,7 +54,7 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds,
int streamSetId, bool isShared, int32_t colorSpace,
- int64_t dynamicProfile, int64_t streamUseCase) override;
+ int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp) override;
status_t deleteInternalStreams() override;
status_t configureStream() override;
status_t insertGbp(SurfaceMap* /*out*/outSurfaceMap, Vector<int32_t>* /*out*/outputStreamIds,
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index 52ab22f..8b8dbe8 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -121,8 +121,7 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds,
int /*streamSetId*/, bool /*isShared*/, int32_t /*colorSpace*/,
- int64_t /*dynamicProfile*/, int64_t /*streamUseCase*/) {
-
+ int64_t /*dynamicProfile*/, int64_t /*streamUseCase*/, bool useReadoutTimestamp) {
sp<CameraDeviceBase> device = mDevice.promote();
if (!device.get()) {
ALOGE("%s: Invalid camera device!", __FUNCTION__);
@@ -148,7 +147,14 @@
res = device->createStream(mAppSegmentSurface, mAppSegmentMaxSize, 1, format,
kAppSegmentDataSpace, rotation, &mAppSegmentStreamId, physicalCameraId,
- sensorPixelModesUsed,surfaceIds);
+ sensorPixelModesUsed, surfaceIds, camera3::CAMERA3_STREAM_SET_ID_INVALID,
+ /*isShared*/false, /*isMultiResolution*/false,
+ /*consumerUsage*/0, ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+ ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+ OutputConfiguration::MIRROR_MODE_AUTO,
+ ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ useReadoutTimestamp);
if (res == OK) {
mAppSegmentSurfaceId = (*surfaceIds)[0];
} else {
@@ -184,7 +190,14 @@
int srcStreamFmt = mUseGrid ? HAL_PIXEL_FORMAT_YCbCr_420_888 :
HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
res = device->createStream(mMainImageSurface, width, height, srcStreamFmt, kHeifDataSpace,
- rotation, id, physicalCameraId, sensorPixelModesUsed, &sourceSurfaceId);
+ rotation, id, physicalCameraId, sensorPixelModesUsed, &sourceSurfaceId,
+ camera3::CAMERA3_STREAM_SET_ID_INVALID, /*isShared*/false, /*isMultiResolution*/false,
+ /*consumerUsage*/0, ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+ ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+ OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+ OutputConfiguration::MIRROR_MODE_AUTO,
+ ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ useReadoutTimestamp);
if (res == OK) {
mMainImageSurfaceId = sourceSurfaceId[0];
mMainImageStreamId = *id;
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.h b/services/camera/libcameraservice/api2/HeicCompositeStream.h
index cdcaded..3132183 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.h
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.h
@@ -47,9 +47,8 @@
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
- std::vector<int> *surfaceIds,
- int streamSetId, bool isShared, int32_t colorSpace,
- int64_t dynamicProfile, int64_t streamUseCase) override;
+ std::vector<int> *surfaceIds, int streamSetId, bool isShared, int32_t colorSpace,
+ int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp) override;
status_t deleteInternalStreams() override;
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
index 8f9d813..fb8979d 100644
--- a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
@@ -532,7 +532,7 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds,
int /*streamSetId*/, bool /*isShared*/, int32_t colorSpace,
- int64_t dynamicProfile, int64_t streamUseCase) {
+ int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp) {
sp<CameraDeviceBase> device = mDevice.promote();
if (!device.get()) {
ALOGE("%s: Invalid camera device!", __FUNCTION__);
@@ -556,9 +556,9 @@
static_cast<android_dataspace>(mP010DataSpace), rotation,
id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
camera3::CAMERA3_STREAM_SET_ID_INVALID, false /*isShared*/, false /*isMultiResolution*/,
- GRALLOC_USAGE_SW_READ_OFTEN,
- mP010DynamicRange,
- streamUseCase);
+ GRALLOC_USAGE_SW_READ_OFTEN, mP010DynamicRange, streamUseCase,
+ OutputConfiguration::TIMESTAMP_BASE_DEFAULT, OutputConfiguration::MIRROR_MODE_AUTO,
+ ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED, useReadoutTimestamp);
if (ret == OK) {
mP010StreamId = *id;
mP010SurfaceId = (*surfaceIds)[0];
@@ -585,7 +585,7 @@
streamUseCase,
/*timestampBase*/ OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
/*mirrorMode*/ OutputConfiguration::MIRROR_MODE_AUTO,
- /*colorSpace*/ colorSpace);
+ /*colorSpace*/ colorSpace, useReadoutTimestamp);
if (ret == OK) {
mBlobSurfaceId = blobSurfaceId[0];
} else {
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.h b/services/camera/libcameraservice/api2/JpegRCompositeStream.h
index 181a05d..4b462b5 100644
--- a/services/camera/libcameraservice/api2/JpegRCompositeStream.h
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.h
@@ -50,7 +50,7 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds,
int streamSetId, bool isShared, int32_t colorSpace,
- int64_t dynamicProfile, int64_t streamUseCase) override;
+ int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp) override;
status_t deleteInternalStreams() override;
status_t configureStream() override;
status_t insertGbp(SurfaceMap* /*out*/outSurfaceMap, Vector<int32_t>* /*out*/outputStreamIds,
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index 977ab7c..6c30606 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -193,7 +193,8 @@
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
- int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED)
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ bool useReadoutTimestamp = false)
= 0;
/**
@@ -216,7 +217,8 @@
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
- int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED)
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ bool useReadoutTimestamp = false)
= 0;
/**
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 5e99389..28a150c 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -1001,7 +1001,7 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
uint64_t consumerUsage, int64_t dynamicRangeProfile, int64_t streamUseCase,
- int timestampBase, int mirrorMode, int32_t colorSpace) {
+ int timestampBase, int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) {
ATRACE_CALL();
if (consumer == nullptr) {
@@ -1015,7 +1015,7 @@
return createStream(consumers, /*hasDeferredConsumer*/ false, width, height,
format, dataSpace, rotation, id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
streamSetId, isShared, isMultiResolution, consumerUsage, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode, colorSpace);
+ streamUseCase, timestampBase, mirrorMode, colorSpace, useReadoutTimestamp);
}
static bool isRawFormat(int format) {
@@ -1036,7 +1036,7 @@
const String8& physicalCameraId, const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
uint64_t consumerUsage, int64_t dynamicRangeProfile, int64_t streamUseCase,
- int timestampBase, int mirrorMode, int32_t colorSpace) {
+ int timestampBase, int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) {
ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
@@ -1045,10 +1045,11 @@
ALOGV("Camera %s: Creating new stream %d: %d x %d, format %d, dataspace %d rotation %d"
" consumer usage %" PRIu64 ", isShared %d, physicalCameraId %s, isMultiResolution %d"
" dynamicRangeProfile 0x%" PRIx64 ", streamUseCase %" PRId64 ", timestampBase %d,"
- " mirrorMode %d colorSpace %d",
+ " mirrorMode %d, colorSpace %d, useReadoutTimestamp %d",
mId.string(), mNextStreamId, width, height, format, dataSpace, rotation,
consumerUsage, isShared, physicalCameraId.string(), isMultiResolution,
- dynamicRangeProfile, streamUseCase, timestampBase, mirrorMode, colorSpace);
+ dynamicRangeProfile, streamUseCase, timestampBase, mirrorMode, colorSpace,
+ useReadoutTimestamp);
status_t res;
bool wasActive = false;
@@ -1119,7 +1120,7 @@
width, height, blobBufferSize, format, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode, colorSpace);
+ timestampBase, mirrorMode, colorSpace, useReadoutTimestamp);
} else if (format == HAL_PIXEL_FORMAT_RAW_OPAQUE) {
bool maxResolution =
sensorPixelModesUsed.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
@@ -1134,25 +1135,25 @@
width, height, rawOpaqueBufferSize, format, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode, colorSpace);
+ timestampBase, mirrorMode, colorSpace, useReadoutTimestamp);
} else if (isShared) {
newStream = new Camera3SharedOutputStream(mNextStreamId, consumers,
width, height, format, consumerUsage, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
mUseHalBufManager, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode, colorSpace);
+ timestampBase, mirrorMode, colorSpace, useReadoutTimestamp);
} else if (consumers.size() == 0 && hasDeferredConsumer) {
newStream = new Camera3OutputStream(mNextStreamId,
width, height, format, consumerUsage, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode, colorSpace);
+ timestampBase, mirrorMode, colorSpace, useReadoutTimestamp);
} else {
newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
width, height, format, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode, colorSpace);
+ timestampBase, mirrorMode, colorSpace, useReadoutTimestamp);
}
size_t consumerCount = consumers.size();
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 9b75ac5..990f556 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -153,7 +153,8 @@
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
- int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED)
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ bool useReadoutTimestamp = false)
override;
status_t createStream(const std::vector<sp<Surface>>& consumers,
@@ -170,7 +171,8 @@
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
- int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED)
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ bool useReadoutTimestamp = false)
override;
status_t createInputStream(
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 1abcd86..88075d6 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -56,7 +56,7 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
- int mirrorMode, int32_t colorSpace) :
+ int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) :
Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
/*maxSize*/0, format, dataSpace, rotation,
physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
@@ -67,7 +67,7 @@
mTraceFirstBuffer(true),
mUseBufferManager(false),
mTimestampOffset(timestampOffset),
- mUseReadoutTime(false),
+ mUseReadoutTime(useReadoutTimestamp),
mConsumerUsage(0),
mDropBuffers(false),
mMirrorMode(mirrorMode),
@@ -91,7 +91,7 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
- int mirrorMode, int32_t colorSpace) :
+ int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) :
Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height, maxSize,
format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
setId, isMultiResolution, dynamicRangeProfile, streamUseCase,
@@ -101,7 +101,7 @@
mTraceFirstBuffer(true),
mUseBufferManager(false),
mTimestampOffset(timestampOffset),
- mUseReadoutTime(false),
+ mUseReadoutTime(useReadoutTimestamp),
mConsumerUsage(0),
mDropBuffers(false),
mMirrorMode(mirrorMode),
@@ -131,7 +131,7 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
- int mirrorMode, int32_t colorSpace) :
+ int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) :
Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
/*maxSize*/0, format, dataSpace, rotation,
physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
@@ -142,7 +142,7 @@
mTraceFirstBuffer(true),
mUseBufferManager(false),
mTimestampOffset(timestampOffset),
- mUseReadoutTime(false),
+ mUseReadoutTime(useReadoutTimestamp),
mConsumerUsage(consumerUsage),
mDropBuffers(false),
mMirrorMode(mirrorMode),
@@ -180,7 +180,8 @@
int setId, bool isMultiResolution,
int64_t dynamicRangeProfile, int64_t streamUseCase,
bool deviceTimeBaseIsRealtime, int timestampBase,
- int mirrorMode, int32_t colorSpace) :
+ int mirrorMode, int32_t colorSpace,
+ bool useReadoutTimestamp) :
Camera3IOStreamBase(id, type, width, height,
/*maxSize*/0,
format, dataSpace, rotation,
@@ -191,7 +192,7 @@
mTraceFirstBuffer(true),
mUseBufferManager(false),
mTimestampOffset(timestampOffset),
- mUseReadoutTime(false),
+ mUseReadoutTime(useReadoutTimestamp),
mConsumerUsage(consumerUsage),
mDropBuffers(false),
mMirrorMode(mirrorMode),
@@ -467,7 +468,7 @@
}
}
- nsecs_t captureTime = (mUseReadoutTime && readoutTimestamp != 0 ?
+ nsecs_t captureTime = ((mUseReadoutTime || mSyncToDisplay) && readoutTimestamp != 0 ?
readoutTimestamp : timestamp) - mTimestampOffset;
if (mPreviewFrameSpacer != nullptr) {
nsecs_t readoutTime = (readoutTimestamp != 0 ? readoutTimestamp : timestamp)
@@ -719,16 +720,12 @@
mFrameCount = 0;
mLastTimestamp = 0;
- mUseReadoutTime =
- (timestampBase == OutputConfiguration::TIMESTAMP_BASE_READOUT_SENSOR || mSyncToDisplay);
-
if (isDeviceTimeBaseRealtime()) {
if (isDefaultTimeBase && !isConsumedByHWComposer() && !isVideoStream()) {
// Default time base, but not hardware composer or video encoder
mTimestampOffset = 0;
} else if (timestampBase == OutputConfiguration::TIMESTAMP_BASE_REALTIME ||
- timestampBase == OutputConfiguration::TIMESTAMP_BASE_SENSOR ||
- timestampBase == OutputConfiguration::TIMESTAMP_BASE_READOUT_SENSOR) {
+ timestampBase == OutputConfiguration::TIMESTAMP_BASE_SENSOR) {
mTimestampOffset = 0;
}
// If timestampBase is CHOREOGRAPHER SYNCED or MONOTONIC, leave
@@ -738,7 +735,7 @@
// Reverse offset for monotonicTime -> bootTime
mTimestampOffset = -mTimestampOffset;
} else {
- // If timestampBase is DEFAULT, MONOTONIC, SENSOR, READOUT_SENSOR or
+ // If timestampBase is DEFAULT, MONOTONIC, SENSOR or
// CHOREOGRAPHER_SYNCED, timestamp offset is 0.
mTimestampOffset = 0;
}
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index 0d758bc..a2f16d4 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -97,7 +97,8 @@
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
- int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ bool useReadoutTimestamp = false);
/**
* Set up a stream for formats that have a variable buffer size for the same
* dimensions, such as compressed JPEG.
@@ -115,7 +116,8 @@
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
- int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ bool useReadoutTimestamp = false);
/**
* Set up a stream with deferred consumer for formats that have 2 dimensions, such as
* RAW and YUV. The consumer must be set before using this stream for output. A valid
@@ -132,7 +134,8 @@
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
- int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ bool useReadoutTimestamp = false);
virtual ~Camera3OutputStream();
@@ -277,7 +280,8 @@
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
- int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ bool useReadoutTimestamp = false);
/**
* Note that we release the lock briefly in this function
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
index da45227..f3a7359 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
@@ -35,12 +35,13 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
int setId, bool useHalBufManager, int64_t dynamicProfile,
int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
- int mirrorMode, int32_t colorSpace) :
+ int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) :
Camera3OutputStream(id, CAMERA_STREAM_OUTPUT, width, height,
format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
transport, consumerUsage, timestampOffset, setId,
/*isMultiResolution*/false, dynamicProfile, streamUseCase,
- deviceTimeBaseIsRealtime, timestampBase, mirrorMode, colorSpace),
+ deviceTimeBaseIsRealtime, timestampBase, mirrorMode, colorSpace,
+ useReadoutTimestamp),
mUseHalBufManager(useHalBufManager) {
size_t consumerCount = std::min(surfaces.size(), kMaxOutputs);
if (surfaces.size() > consumerCount) {
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
index 5167225..1102ecb 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
@@ -46,7 +46,8 @@
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
- int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
+ bool useReadoutTimestamp = false);
virtual ~Camera3SharedOutputStream();