Camera: Add PreviewFrameScheduler to address preview jitter
The ideal viewfinder user experience is that frames are presented to the
user in the same cadence as outputed by the camera sensor. However, the
processing latency between frames could vary, due to factors such
as CPU load, difference in request settings, etc. This frame processing
latency results in variation in presentation of frames to the user.
Improve the user experience by:
1. Cache preview buffers in PreviewFrameScheduler.
2. For each choreographer callback, queue the oldest preview buffer,
with the best matching presentation timestamp. Frame N's
presentation timestamp is the choreographer timeline timestamp closest to
(Frame N-1's presentation time + capture interval between frame N-1 and N).
3. Maintain at most 2 queue-able buffers. If a 3rd preview buffer becomes
available, queue it to the buffer queue right away.
Test: Run GoogleCamera video mode and observe smoother viewfinder
Test: Observe surfaceflinger trace when running viewfinder
Test: Camera CTS
Bug: 200306379
Change-Id: I791c841aaded2acd112de8f7e99a131443b21e11
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 3738d01..d8bcc8a 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -22,6 +22,7 @@
#include <fstream>
#include <android-base/unique_fd.h>
+#include <cutils/properties.h>
#include <ui/GraphicBuffer.h>
#include <utils/Log.h>
#include <utils/Trace.h>
@@ -347,20 +348,6 @@
mTraceFirstBuffer = false;
}
- if (transform != -1) {
- setTransformLocked(transform);
- }
-
- /* Certain consumers (such as AudioSource or HardwareComposer) use
- * MONOTONIC time, causing time misalignment if camera timestamp is
- * in BOOTTIME. Do the conversion if necessary. */
- res = native_window_set_buffers_timestamp(mConsumer.get(),
- mUseMonoTimestamp ? timestamp - mTimestampOffset : timestamp);
- if (res != OK) {
- ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
- __FUNCTION__, mId, strerror(-res), res);
- return res;
- }
// If this is a JPEG output, and image dump mask is set, save image to
// disk.
if (getFormat() == HAL_PIXEL_FORMAT_BLOB && getDataSpace() == HAL_DATASPACE_V0_JFIF &&
@@ -368,10 +355,31 @@
dumpImageToDisk(timestamp, anwBuffer, anwReleaseFence);
}
- res = queueBufferToConsumer(currentConsumer, anwBuffer, anwReleaseFence, surface_ids);
- if (shouldLogError(res, state)) {
- ALOGE("%s: Stream %d: Error queueing buffer to native window:"
- " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
+ /* Certain consumers (such as AudioSource or HardwareComposer) use
+ * MONOTONIC time, causing time misalignment if camera timestamp is
+ * in BOOTTIME. Do the conversion if necessary. */
+ nsecs_t adjustedTs = mUseMonoTimestamp ? timestamp - mTimestampOffset : timestamp;
+ if (mPreviewFrameScheduler != nullptr) {
+ res = mPreviewFrameScheduler->queuePreviewBuffer(adjustedTs, transform,
+ anwBuffer, anwReleaseFence);
+ if (res != OK) {
+ ALOGE("%s: Stream %d: Error queuing buffer to preview buffer scheduler: %s (%d)",
+ __FUNCTION__, mId, strerror(-res), res);
+ return res;
+ }
+ } else {
+ setTransform(transform);
+ res = native_window_set_buffers_timestamp(mConsumer.get(), adjustedTs);
+ if (res != OK) {
+ ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
+ __FUNCTION__, mId, strerror(-res), res);
+ return res;
+ }
+ res = queueBufferToConsumer(currentConsumer, anwBuffer, anwReleaseFence, surface_ids);
+ if (shouldLogError(res, state)) {
+ ALOGE("%s: Stream %d: Error queueing buffer to native window:"
+ " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
+ }
}
}
mLock.lock();
@@ -412,6 +420,9 @@
status_t Camera3OutputStream::setTransformLocked(int transform) {
status_t res = OK;
+
+ if (transform == -1) return res;
+
if (mState == STATE_ERROR) {
ALOGE("%s: Stream in error state", __FUNCTION__);
return INVALID_OPERATION;
@@ -437,7 +448,7 @@
return res;
}
- if ((res = configureConsumerQueueLocked()) != OK) {
+ if ((res = configureConsumerQueueLocked(true /*allowPreviewScheduler*/)) != OK) {
return res;
}
@@ -461,7 +472,7 @@
return OK;
}
-status_t Camera3OutputStream::configureConsumerQueueLocked() {
+status_t Camera3OutputStream::configureConsumerQueueLocked(bool allowPreviewScheduler) {
status_t res;
mTraceFirstBuffer = true;
@@ -547,6 +558,15 @@
}
mTotalBufferCount = maxConsumerBuffers + camera_stream::max_buffers;
+ if (allowPreviewScheduler && isConsumedByHWComposer()) {
+ // We cannot distinguish between a SurfaceView and an ImageReader of
+ // preview buffer format. The PreviewFrameScheduler needs to handle both.
+ if (!property_get_bool("camera.disable_preview_scheduler", false)) {
+ mPreviewFrameScheduler = std::make_unique<PreviewFrameScheduler>(*this, mConsumer);
+ mTotalBufferCount += PreviewFrameScheduler::kQueueDepthWatermark;
+ }
+ }
+
mHandoutTotalBufferCount = 0;
mFrameCount = 0;
mLastTimestamp = 0;
@@ -1185,6 +1205,11 @@
}
}
+bool Camera3OutputStream::shouldLogError(status_t res) {
+ Mutex::Autolock l(mLock);
+ return shouldLogError(res, mState);
+}
+
}; // namespace camera3
}; // namespace android