blob: a387064b8272be7dd156ebf63cd45db998d93612 [file] [log] [blame]
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -08001/*
Shuzhen Wangc28189a2017-11-27 23:05:10 -08002 * Copyright (C) 2013-2018 The Android Open Source Project
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -08003 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "Camera3-OutputStream"
18#define ATRACE_TAG ATRACE_TAG_CAMERA
19//#define LOG_NDEBUG 0
20
Shuzhen Wang34a5e282022-06-17 14:48:35 -070021#include <algorithm>
Shuzhen Wangabbcb6b2020-12-09 22:32:44 -080022#include <ctime>
23#include <fstream>
24
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +000025#include <aidl/android/hardware/camera/device/CameraBlob.h>
26#include <aidl/android/hardware/camera/device/CameraBlobId.h>
Emilian Peeve579d8b2023-02-28 14:16:08 -080027#include "aidl/android/hardware/graphics/common/Dataspace.h"
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +000028
Shuzhen Wangabbcb6b2020-12-09 22:32:44 -080029#include <android-base/unique_fd.h>
Shuzhen Wange4adddb2021-09-21 15:24:44 -070030#include <cutils/properties.h>
Shuzhen Wangabbcb6b2020-12-09 22:32:44 -080031#include <ui/GraphicBuffer.h>
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -080032#include <utils/Log.h>
33#include <utils/Trace.h>
Shuzhen Wangabbcb6b2020-12-09 22:32:44 -080034
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +000035#include <common/CameraDeviceBase.h>
Shuzhen Wangabbcb6b2020-12-09 22:32:44 -080036#include "api1/client2/JpegProcessor.h"
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -080037#include "Camera3OutputStream.h"
Jayant Chowdharyd4776262020-06-23 23:45:57 -070038#include "utils/TraceHFR.h"
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -080039
40#ifndef container_of
41#define container_of(ptr, type, member) \
42 (type *)((char*)(ptr) - offsetof(type, member))
43#endif
44
45namespace android {
46
47namespace camera3 {
48
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +000049using aidl::android::hardware::camera::device::CameraBlob;
50using aidl::android::hardware::camera::device::CameraBlobId;
51
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -080052Camera3OutputStream::Camera3OutputStream(int id,
Eino-Ville Talvala727d1722015-06-09 13:44:19 -070053 sp<Surface> consumer,
Eino-Ville Talvala3d82c0d2015-02-23 15:19:19 -080054 uint32_t width, uint32_t height, int format,
Emilian Peevf4816702020-04-03 15:44:51 -070055 android_dataspace dataSpace, camera_stream_rotation_t rotation,
Shuzhen Wangc28189a2017-11-27 23:05:10 -080056 nsecs_t timestampOffset, const String8& physicalCameraId,
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +000057 const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
Emilian Peevc81a7592022-02-14 17:38:18 -080058 int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
Shuzhen Wang8ed1e872022-03-08 16:34:33 -080059 int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
Shuzhen Wangbce53db2022-12-03 00:38:20 +000060 int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) :
Emilian Peevf4816702020-04-03 15:44:51 -070061 Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
Shuzhen Wangc28189a2017-11-27 23:05:10 -080062 /*maxSize*/0, format, dataSpace, rotation,
Emilian Peev2295df72021-11-12 18:14:10 -080063 physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
Shuzhen Wange4208922022-02-01 16:52:48 -080064 dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
Austin Borger9e2b27c2022-07-15 11:27:24 -070065 timestampBase, colorSpace),
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -080066 mConsumer(consumer),
Ruchit Sharmae0711f22014-08-18 13:48:24 -040067 mTransform(0),
Zhijun He125684a2015-12-26 15:07:30 -080068 mTraceFirstBuffer(true),
Shuzhen Wangc28dccc2016-02-11 23:48:46 -080069 mUseBufferManager(false),
Zhijun He5d677d12016-05-29 16:52:39 -070070 mTimestampOffset(timestampOffset),
Shuzhen Wangbce53db2022-12-03 00:38:20 +000071 mUseReadoutTime(useReadoutTimestamp),
Shuzhen Wang686f6442017-06-20 16:16:04 -070072 mConsumerUsage(0),
Chien-Yu Chena936ac22017-10-23 15:59:49 -070073 mDropBuffers(false),
Shuzhen Wang610d7b82022-02-08 14:37:22 -080074 mMirrorMode(mirrorMode),
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +000075 mDequeueBufferLatency(kDequeueLatencyBinSize),
76 mIPCTransport(transport) {
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -080077
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -080078 if (mConsumer == NULL) {
79 ALOGE("%s: Consumer is NULL!", __FUNCTION__);
80 mState = STATE_ERROR;
81 }
Zhijun He125684a2015-12-26 15:07:30 -080082
Shuzhen Wang0160ddd2019-08-15 09:11:56 -070083 bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
84 mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -080085}
86
87Camera3OutputStream::Camera3OutputStream(int id,
Eino-Ville Talvala727d1722015-06-09 13:44:19 -070088 sp<Surface> consumer,
Eino-Ville Talvala3d82c0d2015-02-23 15:19:19 -080089 uint32_t width, uint32_t height, size_t maxSize, int format,
Emilian Peevf4816702020-04-03 15:44:51 -070090 android_dataspace dataSpace, camera_stream_rotation_t rotation,
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -080091 nsecs_t timestampOffset, const String8& physicalCameraId,
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +000092 const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
Emilian Peevc81a7592022-02-14 17:38:18 -080093 int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
Shuzhen Wang8ed1e872022-03-08 16:34:33 -080094 int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
Shuzhen Wangbce53db2022-12-03 00:38:20 +000095 int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) :
Emilian Peevf4816702020-04-03 15:44:51 -070096 Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height, maxSize,
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -080097 format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
Shuzhen Wange4208922022-02-01 16:52:48 -080098 setId, isMultiResolution, dynamicRangeProfile, streamUseCase,
Austin Borger9e2b27c2022-07-15 11:27:24 -070099 deviceTimeBaseIsRealtime, timestampBase, colorSpace),
Igor Murashkina55b5452013-04-02 16:36:33 -0700100 mConsumer(consumer),
Ruchit Sharmae0711f22014-08-18 13:48:24 -0400101 mTransform(0),
Zhijun He125684a2015-12-26 15:07:30 -0800102 mTraceFirstBuffer(true),
Shuzhen Wangc28dccc2016-02-11 23:48:46 -0800103 mUseBufferManager(false),
Zhijun He5d677d12016-05-29 16:52:39 -0700104 mTimestampOffset(timestampOffset),
Shuzhen Wangbce53db2022-12-03 00:38:20 +0000105 mUseReadoutTime(useReadoutTimestamp),
Shuzhen Wang686f6442017-06-20 16:16:04 -0700106 mConsumerUsage(0),
Chien-Yu Chena936ac22017-10-23 15:59:49 -0700107 mDropBuffers(false),
Shuzhen Wang610d7b82022-02-08 14:37:22 -0800108 mMirrorMode(mirrorMode),
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000109 mDequeueBufferLatency(kDequeueLatencyBinSize),
110 mIPCTransport(transport) {
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800111
Yin-Chia Yehe9154ce2015-12-07 14:38:04 -0800112 if (format != HAL_PIXEL_FORMAT_BLOB && format != HAL_PIXEL_FORMAT_RAW_OPAQUE) {
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800113 ALOGE("%s: Bad format for size-only stream: %d", __FUNCTION__,
114 format);
115 mState = STATE_ERROR;
116 }
117
118 if (mConsumer == NULL) {
119 ALOGE("%s: Consumer is NULL!", __FUNCTION__);
120 mState = STATE_ERROR;
121 }
Zhijun He125684a2015-12-26 15:07:30 -0800122
Shuzhen Wang0160ddd2019-08-15 09:11:56 -0700123 bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
124 mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800125}
126
Zhijun He5d677d12016-05-29 16:52:39 -0700127Camera3OutputStream::Camera3OutputStream(int id,
128 uint32_t width, uint32_t height, int format,
Emilian Peev050f5dc2017-05-18 14:43:56 +0100129 uint64_t consumerUsage, android_dataspace dataSpace,
Emilian Peevf4816702020-04-03 15:44:51 -0700130 camera_stream_rotation_t rotation, nsecs_t timestampOffset,
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800131 const String8& physicalCameraId,
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000132 const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
Emilian Peevc81a7592022-02-14 17:38:18 -0800133 int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
Shuzhen Wang8ed1e872022-03-08 16:34:33 -0800134 int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
Shuzhen Wangbce53db2022-12-03 00:38:20 +0000135 int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) :
Emilian Peevf4816702020-04-03 15:44:51 -0700136 Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
Shuzhen Wangc28189a2017-11-27 23:05:10 -0800137 /*maxSize*/0, format, dataSpace, rotation,
Emilian Peev2295df72021-11-12 18:14:10 -0800138 physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
Shuzhen Wange4208922022-02-01 16:52:48 -0800139 dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
Austin Borger9e2b27c2022-07-15 11:27:24 -0700140 timestampBase, colorSpace),
Zhijun He5d677d12016-05-29 16:52:39 -0700141 mConsumer(nullptr),
142 mTransform(0),
143 mTraceFirstBuffer(true),
144 mUseBufferManager(false),
145 mTimestampOffset(timestampOffset),
Shuzhen Wangbce53db2022-12-03 00:38:20 +0000146 mUseReadoutTime(useReadoutTimestamp),
Shuzhen Wang686f6442017-06-20 16:16:04 -0700147 mConsumerUsage(consumerUsage),
Chien-Yu Chena936ac22017-10-23 15:59:49 -0700148 mDropBuffers(false),
Shuzhen Wang610d7b82022-02-08 14:37:22 -0800149 mMirrorMode(mirrorMode),
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000150 mDequeueBufferLatency(kDequeueLatencyBinSize),
151 mIPCTransport(transport) {
Zhijun He5d677d12016-05-29 16:52:39 -0700152 // Deferred consumer only support preview surface format now.
153 if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
154 ALOGE("%s: Deferred consumer only supports IMPLEMENTATION_DEFINED format now!",
155 __FUNCTION__);
156 mState = STATE_ERROR;
157 }
158
Ivan Lozanoc0ad82f2020-07-30 09:32:57 -0400159 // Validation check for the consumer usage flag.
Zhijun He5d677d12016-05-29 16:52:39 -0700160 if ((consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) == 0 &&
161 (consumerUsage & GraphicBuffer::USAGE_HW_COMPOSER) == 0) {
Emilian Peev050f5dc2017-05-18 14:43:56 +0100162 ALOGE("%s: Deferred consumer usage flag is illegal %" PRIu64 "!",
163 __FUNCTION__, consumerUsage);
Zhijun He5d677d12016-05-29 16:52:39 -0700164 mState = STATE_ERROR;
165 }
166
167 mConsumerName = String8("Deferred");
Shuzhen Wang0160ddd2019-08-15 09:11:56 -0700168 bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
169 mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
Zhijun He5d677d12016-05-29 16:52:39 -0700170}
171
Emilian Peevf4816702020-04-03 15:44:51 -0700172Camera3OutputStream::Camera3OutputStream(int id, camera_stream_type_t type,
Igor Murashkine3a9f962013-05-08 18:03:15 -0700173 uint32_t width, uint32_t height,
Eino-Ville Talvala3d82c0d2015-02-23 15:19:19 -0800174 int format,
Yin-Chia Yehb97babb2015-03-12 13:42:44 -0700175 android_dataspace dataSpace,
Emilian Peevf4816702020-04-03 15:44:51 -0700176 camera_stream_rotation_t rotation,
Shuzhen Wangc28189a2017-11-27 23:05:10 -0800177 const String8& physicalCameraId,
Shuzhen Wange4208922022-02-01 16:52:48 -0800178 const std::unordered_set<int32_t> &sensorPixelModesUsed,
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000179 IPCTransport transport,
Emilian Peev050f5dc2017-05-18 14:43:56 +0100180 uint64_t consumerUsage, nsecs_t timestampOffset,
Emilian Peev2295df72021-11-12 18:14:10 -0800181 int setId, bool isMultiResolution,
Shuzhen Wang8ed1e872022-03-08 16:34:33 -0800182 int64_t dynamicRangeProfile, int64_t streamUseCase,
Shuzhen Wang610d7b82022-02-08 14:37:22 -0800183 bool deviceTimeBaseIsRealtime, int timestampBase,
Shuzhen Wangbce53db2022-12-03 00:38:20 +0000184 int mirrorMode, int32_t colorSpace,
185 bool useReadoutTimestamp) :
Igor Murashkine3a9f962013-05-08 18:03:15 -0700186 Camera3IOStreamBase(id, type, width, height,
187 /*maxSize*/0,
Shuzhen Wangc28189a2017-11-27 23:05:10 -0800188 format, dataSpace, rotation,
Emilian Peev2295df72021-11-12 18:14:10 -0800189 physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
Shuzhen Wange4208922022-02-01 16:52:48 -0800190 dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
Austin Borger9e2b27c2022-07-15 11:27:24 -0700191 timestampBase, colorSpace),
Zhijun He125684a2015-12-26 15:07:30 -0800192 mTransform(0),
193 mTraceFirstBuffer(true),
Zhijun He5d677d12016-05-29 16:52:39 -0700194 mUseBufferManager(false),
Shuzhen Wang0129d522016-10-30 22:43:41 -0700195 mTimestampOffset(timestampOffset),
Shuzhen Wangbce53db2022-12-03 00:38:20 +0000196 mUseReadoutTime(useReadoutTimestamp),
Shuzhen Wang686f6442017-06-20 16:16:04 -0700197 mConsumerUsage(consumerUsage),
Chien-Yu Chena936ac22017-10-23 15:59:49 -0700198 mDropBuffers(false),
Shuzhen Wang610d7b82022-02-08 14:37:22 -0800199 mMirrorMode(mirrorMode),
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000200 mDequeueBufferLatency(kDequeueLatencyBinSize),
201 mIPCTransport(transport) {
Zhijun He125684a2015-12-26 15:07:30 -0800202
Shuzhen Wang0160ddd2019-08-15 09:11:56 -0700203 bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
204 mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
Igor Murashkine3a9f962013-05-08 18:03:15 -0700205
206 // Subclasses expected to initialize mConsumer themselves
207}
208
209
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800210Camera3OutputStream::~Camera3OutputStream() {
211 disconnectLocked();
212}
213
Emilian Peevf4816702020-04-03 15:44:51 -0700214status_t Camera3OutputStream::getBufferLocked(camera_stream_buffer *buffer,
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800215 const std::vector<size_t>&) {
Jayant Chowdharyd4776262020-06-23 23:45:57 -0700216 ATRACE_HFR_CALL();
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800217
218 ANativeWindowBuffer* anb;
Zhijun He125684a2015-12-26 15:07:30 -0800219 int fenceFd = -1;
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -0700220
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800221 status_t res;
222 res = getBufferLockedCommon(&anb, &fenceFd);
223 if (res != OK) {
224 return res;
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800225 }
226
Igor Murashkine3a9f962013-05-08 18:03:15 -0700227 /**
228 * FenceFD now owned by HAL except in case of error,
229 * in which case we reassign it to acquire_fence
230 */
231 handoutBufferLocked(*buffer, &(anb->handle), /*acquireFence*/fenceFd,
Emilian Peevf4816702020-04-03 15:44:51 -0700232 /*releaseFence*/-1, CAMERA_BUFFER_STATUS_OK, /*output*/true);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800233
234 return OK;
235}
236
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800237status_t Camera3OutputStream::getBuffersLocked(std::vector<OutstandingBuffer>* outBuffers) {
238 status_t res;
239
240 if ((res = getBufferPreconditionCheckLocked()) != OK) {
241 return res;
242 }
243
244 if (mUseBufferManager) {
245 ALOGE("%s: stream %d is managed by buffer manager and does not support batch operation",
246 __FUNCTION__, mId);
247 return INVALID_OPERATION;
248 }
249
250 sp<Surface> consumer = mConsumer;
251 /**
252 * Release the lock briefly to avoid deadlock for below scenario:
253 * Thread 1: StreamingProcessor::startStream -> Camera3Stream::isConfiguring().
254 * This thread acquired StreamingProcessor lock and try to lock Camera3Stream lock.
255 * Thread 2: Camera3Stream::returnBuffer->StreamingProcessor::onFrameAvailable().
256 * This thread acquired Camera3Stream lock and bufferQueue lock, and try to lock
257 * StreamingProcessor lock.
258 * Thread 3: Camera3Stream::getBuffer(). This thread acquired Camera3Stream lock
259 * and try to lock bufferQueue lock.
260 * Then there is circular locking dependency.
261 */
262 mLock.unlock();
263
264 size_t numBuffersRequested = outBuffers->size();
265 std::vector<Surface::BatchBuffer> buffers(numBuffersRequested);
266
267 nsecs_t dequeueStart = systemTime(SYSTEM_TIME_MONOTONIC);
268 res = consumer->dequeueBuffers(&buffers);
269 nsecs_t dequeueEnd = systemTime(SYSTEM_TIME_MONOTONIC);
270 mDequeueBufferLatency.add(dequeueStart, dequeueEnd);
271
272 mLock.lock();
273
274 if (res != OK) {
275 if (shouldLogError(res, mState)) {
276 ALOGE("%s: Stream %d: Can't dequeue %zu output buffers: %s (%d)",
277 __FUNCTION__, mId, numBuffersRequested, strerror(-res), res);
278 }
279 checkRetAndSetAbandonedLocked(res);
280 return res;
281 }
282 checkRemovedBuffersLocked();
283
284 /**
285 * FenceFD now owned by HAL except in case of error,
286 * in which case we reassign it to acquire_fence
287 */
288 for (size_t i = 0; i < numBuffersRequested; i++) {
289 handoutBufferLocked(*(outBuffers->at(i).outBuffer),
290 &(buffers[i].buffer->handle), /*acquireFence*/buffers[i].fenceFd,
291 /*releaseFence*/-1, CAMERA_BUFFER_STATUS_OK, /*output*/true);
292 }
293 return OK;
294}
295
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800296status_t Camera3OutputStream::queueBufferToConsumer(sp<ANativeWindow>& consumer,
Yin-Chia Yeh58b1b4e2018-10-15 12:18:36 -0700297 ANativeWindowBuffer* buffer, int anwReleaseFence,
298 const std::vector<size_t>&) {
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800299 return consumer->queueBuffer(consumer.get(), buffer, anwReleaseFence);
300}
301
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800302status_t Camera3OutputStream::returnBufferLocked(
Emilian Peevf4816702020-04-03 15:44:51 -0700303 const camera_stream_buffer &buffer,
Shuzhen Wang90708ea2021-11-04 11:40:49 -0700304 nsecs_t timestamp, nsecs_t readoutTimestamp,
305 int32_t transform, const std::vector<size_t>& surface_ids) {
Jayant Chowdharyd4776262020-06-23 23:45:57 -0700306 ATRACE_HFR_CALL();
Igor Murashkine3a9f962013-05-08 18:03:15 -0700307
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800308 if (mHandoutTotalBufferCount == 1) {
309 returnPrefetchedBuffersLocked();
310 }
311
Shuzhen Wang90708ea2021-11-04 11:40:49 -0700312 status_t res = returnAnyBufferLocked(buffer, timestamp, readoutTimestamp,
313 /*output*/true, transform, surface_ids);
Igor Murashkine3a9f962013-05-08 18:03:15 -0700314
315 if (res != OK) {
316 return res;
317 }
318
319 mLastTimestamp = timestamp;
Eino-Ville Talvalac31dc7e2017-01-31 17:35:41 -0800320 mFrameCount++;
Igor Murashkine3a9f962013-05-08 18:03:15 -0700321
322 return OK;
323}
324
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000325status_t Camera3OutputStream::fixUpHidlJpegBlobHeader(ANativeWindowBuffer* anwBuffer, int fence) {
326 // Lock the JPEG buffer for CPU read
327 sp<GraphicBuffer> graphicBuffer = GraphicBuffer::from(anwBuffer);
328 void* mapped = nullptr;
329 base::unique_fd fenceFd(dup(fence));
330 // Use USAGE_SW_WRITE_RARELY since we're going to re-write the CameraBlob
331 // header.
332 GraphicBufferLocker gbLocker(graphicBuffer);
333 status_t res =
334 gbLocker.lockAsync(
335 GraphicBuffer::USAGE_SW_READ_OFTEN | GraphicBuffer::USAGE_SW_WRITE_RARELY,
Emilian Peev293bd972022-08-05 17:28:06 -0700336 &mapped, fenceFd.release());
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000337 if (res != OK) {
338 ALOGE("%s: Failed to lock the buffer: %s (%d)", __FUNCTION__, strerror(-res), res);
339 return res;
340 }
341
342 uint8_t *hidlHeaderStart =
343 static_cast<uint8_t*>(mapped) + graphicBuffer->getWidth() - sizeof(camera_jpeg_blob_t);
344 // Check that the jpeg buffer is big enough to contain HIDL camera blob
345 if (hidlHeaderStart < static_cast<uint8_t *>(mapped)) {
346 ALOGE("%s, jpeg buffer not large enough to fit HIDL camera blob %" PRIu32, __FUNCTION__,
347 graphicBuffer->getWidth());
348 return BAD_VALUE;
349 }
350 camera_jpeg_blob_t *hidlBlobHeader = reinterpret_cast<camera_jpeg_blob_t *>(hidlHeaderStart);
351
352 // Check that the blob is indeed the jpeg blob id.
353 if (hidlBlobHeader->jpeg_blob_id != CAMERA_JPEG_BLOB_ID) {
354 ALOGE("%s, jpeg blob id %d is not correct", __FUNCTION__, hidlBlobHeader->jpeg_blob_id);
355 return BAD_VALUE;
356 }
357
358 // Retrieve id and blob size
359 CameraBlobId blobId = static_cast<CameraBlobId>(hidlBlobHeader->jpeg_blob_id);
360 uint32_t blobSizeBytes = hidlBlobHeader->jpeg_size;
361
362 if (blobSizeBytes > (graphicBuffer->getWidth() - sizeof(camera_jpeg_blob_t))) {
363 ALOGE("%s, blobSize in HIDL jpeg blob : %d is corrupt, buffer size %" PRIu32, __FUNCTION__,
364 blobSizeBytes, graphicBuffer->getWidth());
365 }
366
367 uint8_t *aidlHeaderStart =
368 static_cast<uint8_t*>(mapped) + graphicBuffer->getWidth() - sizeof(CameraBlob);
369
370 // Check that the jpeg buffer is big enough to contain AIDL camera blob
371 if (aidlHeaderStart < static_cast<uint8_t *>(mapped)) {
372 ALOGE("%s, jpeg buffer not large enough to fit AIDL camera blob %" PRIu32, __FUNCTION__,
373 graphicBuffer->getWidth());
374 return BAD_VALUE;
375 }
376
377 if (static_cast<uint8_t*>(mapped) + blobSizeBytes > aidlHeaderStart) {
378 ALOGE("%s, jpeg blob with size %d , buffer size %" PRIu32 " not large enough to fit"
379 " AIDL camera blob without corrupting jpeg", __FUNCTION__, blobSizeBytes,
380 graphicBuffer->getWidth());
381 return BAD_VALUE;
382 }
383
384 // Fill in JPEG header
Avichal Rakesh51af0702022-05-26 22:58:03 +0000385 CameraBlob aidlHeader = {
386 .blobId = blobId,
387 .blobSizeBytes = static_cast<int32_t>(blobSizeBytes)
388 };
389 memcpy(aidlHeaderStart, &aidlHeader, sizeof(CameraBlob));
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000390 graphicBuffer->unlock();
391 return OK;
392}
393
Igor Murashkine3a9f962013-05-08 18:03:15 -0700394status_t Camera3OutputStream::returnBufferCheckedLocked(
Emilian Peevf4816702020-04-03 15:44:51 -0700395 const camera_stream_buffer &buffer,
Igor Murashkine3a9f962013-05-08 18:03:15 -0700396 nsecs_t timestamp,
Shuzhen Wang90708ea2021-11-04 11:40:49 -0700397 nsecs_t readoutTimestamp,
Jing Mikec7f9b132023-03-12 11:12:04 +0800398 [[maybe_unused]] bool output,
Emilian Peev5104fe92021-10-21 14:27:09 -0700399 int32_t transform,
Yin-Chia Yeh58b1b4e2018-10-15 12:18:36 -0700400 const std::vector<size_t>& surface_ids,
Igor Murashkine3a9f962013-05-08 18:03:15 -0700401 /*out*/
402 sp<Fence> *releaseFenceOut) {
403
Igor Murashkine3a9f962013-05-08 18:03:15 -0700404 ALOG_ASSERT(output, "Expected output to be true");
405
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800406 status_t res;
Igor Murashkin5a1798a2013-05-07 10:58:13 -0700407
Yin-Chia Yeh4c9736f2015-03-05 15:01:36 -0800408 // Fence management - always honor release fence from HAL
409 sp<Fence> releaseFence = new Fence(buffer.release_fence);
Igor Murashkin5a1798a2013-05-07 10:58:13 -0700410 int anwReleaseFence = releaseFence->dup();
411
412 /**
Zhijun He124ccf42013-05-22 14:01:30 -0700413 * Release the lock briefly to avoid deadlock with
414 * StreamingProcessor::startStream -> Camera3Stream::isConfiguring (this
415 * thread will go into StreamingProcessor::onFrameAvailable) during
416 * queueBuffer
417 */
418 sp<ANativeWindow> currentConsumer = mConsumer;
Yin-Chia Yeha1b56c82019-03-27 15:50:39 -0700419 StreamState state = mState;
Zhijun He124ccf42013-05-22 14:01:30 -0700420 mLock.unlock();
421
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800422 ANativeWindowBuffer *anwBuffer = container_of(buffer.buffer, ANativeWindowBuffer, handle);
Shuzhen Wangc2352702022-09-06 18:36:31 -0700423 bool bufferDeferred = false;
Zhijun He124ccf42013-05-22 14:01:30 -0700424 /**
Igor Murashkin5a1798a2013-05-07 10:58:13 -0700425 * Return buffer back to ANativeWindow
426 */
Emilian Peevf4816702020-04-03 15:44:51 -0700427 if (buffer.status == CAMERA_BUFFER_STATUS_ERROR || mDropBuffers || timestamp == 0) {
Igor Murashkin5a1798a2013-05-07 10:58:13 -0700428 // Cancel buffer
Chien-Yu Chena936ac22017-10-23 15:59:49 -0700429 if (mDropBuffers) {
430 ALOGV("%s: Dropping a frame for stream %d.", __FUNCTION__, mId);
Emilian Peevf4816702020-04-03 15:44:51 -0700431 } else if (buffer.status == CAMERA_BUFFER_STATUS_ERROR) {
Yin-Chia Yeha1b56c82019-03-27 15:50:39 -0700432 ALOGV("%s: A frame is dropped for stream %d due to buffer error.", __FUNCTION__, mId);
Shuzhen Wangf0c4a6b2018-09-05 09:36:14 -0700433 } else {
434 ALOGE("%s: Stream %d: timestamp shouldn't be 0", __FUNCTION__, mId);
Chien-Yu Chena936ac22017-10-23 15:59:49 -0700435 }
436
Zhijun He124ccf42013-05-22 14:01:30 -0700437 res = currentConsumer->cancelBuffer(currentConsumer.get(),
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800438 anwBuffer,
Igor Murashkin5a1798a2013-05-07 10:58:13 -0700439 anwReleaseFence);
Yin-Chia Yeha1b56c82019-03-27 15:50:39 -0700440 if (shouldLogError(res, state)) {
Igor Murashkin5a1798a2013-05-07 10:58:13 -0700441 ALOGE("%s: Stream %d: Error cancelling buffer to native window:"
Igor Murashkine3a9f962013-05-08 18:03:15 -0700442 " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
Igor Murashkin5a1798a2013-05-07 10:58:13 -0700443 }
Zhijun He1ff811b2016-01-26 14:39:51 -0800444
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800445 notifyBufferReleased(anwBuffer);
Zhijun He1ff811b2016-01-26 14:39:51 -0800446 if (mUseBufferManager) {
447 // Return this buffer back to buffer manager.
Shuzhen Wang0160ddd2019-08-15 09:11:56 -0700448 mBufferProducerListener->onBufferReleased();
Zhijun He1ff811b2016-01-26 14:39:51 -0800449 }
Igor Murashkin5a1798a2013-05-07 10:58:13 -0700450 } else {
Emilian Peevf4816702020-04-03 15:44:51 -0700451 if (mTraceFirstBuffer && (stream_type == CAMERA_STREAM_OUTPUT)) {
Ruchit Sharmae0711f22014-08-18 13:48:24 -0400452 {
453 char traceLog[48];
454 snprintf(traceLog, sizeof(traceLog), "Stream %d: first full buffer\n", mId);
455 ATRACE_NAME(traceLog);
456 }
457 mTraceFirstBuffer = false;
458 }
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000459 // Fix CameraBlob id type discrepancy between HIDL and AIDL, details : http://b/229688810
Emilian Peeve579d8b2023-02-28 14:16:08 -0800460 if (getFormat() == HAL_PIXEL_FORMAT_BLOB && (getDataSpace() == HAL_DATASPACE_V0_JFIF ||
461 (getDataSpace() ==
462 static_cast<android_dataspace_t>(
463 aidl::android::hardware::graphics::common::Dataspace::JPEG_R)))) {
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000464 if (mIPCTransport == IPCTransport::HIDL) {
465 fixUpHidlJpegBlobHeader(anwBuffer, anwReleaseFence);
466 }
467 // If this is a JPEG output, and image dump mask is set, save image to
468 // disk.
469 if (mImageDumpMask) {
470 dumpImageToDisk(timestamp, anwBuffer, anwReleaseFence);
471 }
Shuzhen Wangabbcb6b2020-12-09 22:32:44 -0800472 }
Yin-Chia Yeh4c9736f2015-03-05 15:01:36 -0800473
Shuzhen Wangbce53db2022-12-03 00:38:20 +0000474 nsecs_t captureTime = ((mUseReadoutTime || mSyncToDisplay) && readoutTimestamp != 0 ?
Shuzhen Wangffc4c012022-04-20 15:55:46 -0700475 readoutTimestamp : timestamp) - mTimestampOffset;
Shuzhen Wangba92d772022-04-11 11:47:24 -0700476 if (mPreviewFrameSpacer != nullptr) {
Shuzhen Wangfe8a2a32022-05-10 18:18:54 -0700477 nsecs_t readoutTime = (readoutTimestamp != 0 ? readoutTimestamp : timestamp)
478 - mTimestampOffset;
479 res = mPreviewFrameSpacer->queuePreviewBuffer(captureTime, readoutTime,
480 transform, anwBuffer, anwReleaseFence);
Shuzhen Wangba92d772022-04-11 11:47:24 -0700481 if (res != OK) {
482 ALOGE("%s: Stream %d: Error queuing buffer to preview buffer spacer: %s (%d)",
483 __FUNCTION__, mId, strerror(-res), res);
484 return res;
485 }
Shuzhen Wangc2352702022-09-06 18:36:31 -0700486 bufferDeferred = true;
Shuzhen Wangba92d772022-04-11 11:47:24 -0700487 } else {
Shuzhen Wangba92d772022-04-11 11:47:24 -0700488 nsecs_t presentTime = mSyncToDisplay ?
Shuzhen Wang0897d592023-04-07 12:48:05 -0700489 syncTimestampToDisplayLocked(captureTime, releaseFence->dup()) : captureTime;
Emilian Peev2295df72021-11-12 18:14:10 -0800490
Shuzhen Wangba92d772022-04-11 11:47:24 -0700491 setTransform(transform, true/*mayChangeMirror*/);
492 res = native_window_set_buffers_timestamp(mConsumer.get(), presentTime);
493 if (res != OK) {
494 ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
495 __FUNCTION__, mId, strerror(-res), res);
496 return res;
497 }
Emilian Peev2295df72021-11-12 18:14:10 -0800498
Shuzhen Wangba92d772022-04-11 11:47:24 -0700499 queueHDRMetadata(anwBuffer->handle, currentConsumer, dynamic_range_profile);
Shuzhen Wang00abbeb2022-02-25 17:14:42 -0800500
Shuzhen Wangba92d772022-04-11 11:47:24 -0700501 res = queueBufferToConsumer(currentConsumer, anwBuffer, anwReleaseFence, surface_ids);
502 if (shouldLogError(res, state)) {
503 ALOGE("%s: Stream %d: Error queueing buffer to native window:"
504 " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
505 }
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800506 }
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800507 }
Zhijun He124ccf42013-05-22 14:01:30 -0700508 mLock.lock();
Eino-Ville Talvala4d44cad2015-04-11 13:15:45 -0700509
Shuzhen Wangc2352702022-09-06 18:36:31 -0700510 if (bufferDeferred) {
511 mCachedOutputBufferCount++;
512 }
513
Eino-Ville Talvala4d44cad2015-04-11 13:15:45 -0700514 // Once a valid buffer has been returned to the queue, can no longer
515 // dequeue all buffers for preallocation.
Emilian Peevf4816702020-04-03 15:44:51 -0700516 if (buffer.status != CAMERA_BUFFER_STATUS_ERROR) {
Eino-Ville Talvala4d44cad2015-04-11 13:15:45 -0700517 mStreamUnpreparable = true;
518 }
519
Igor Murashkine3a9f962013-05-08 18:03:15 -0700520 *releaseFenceOut = releaseFence;
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800521
Eino-Ville Talvalaf1e98d82013-09-06 09:32:43 -0700522 return res;
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800523}
524
Jing Mikec7f9b132023-03-12 11:12:04 +0800525void Camera3OutputStream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const {
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800526 String8 lines;
527 lines.appendFormat(" Stream[%d]: Output\n", mId);
Eino-Ville Talvala727d1722015-06-09 13:44:19 -0700528 lines.appendFormat(" Consumer name: %s\n", mConsumerName.string());
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800529 write(fd, lines.string(), lines.size());
Igor Murashkine3a9f962013-05-08 18:03:15 -0700530
531 Camera3IOStreamBase::dump(fd, args);
Shuzhen Wang686f6442017-06-20 16:16:04 -0700532
533 mDequeueBufferLatency.dump(fd,
534 " DequeueBuffer latency histogram:");
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800535}
536
Shuzhen Wang610d7b82022-02-08 14:37:22 -0800537status_t Camera3OutputStream::setTransform(int transform, bool mayChangeMirror) {
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800538 ATRACE_CALL();
539 Mutex::Autolock l(mLock);
Shuzhen Wang610d7b82022-02-08 14:37:22 -0800540 if (mMirrorMode != OutputConfiguration::MIRROR_MODE_AUTO && mayChangeMirror) {
541 // If the mirroring mode is not AUTO, do not allow transform update
542 // which may change mirror.
543 return OK;
544 }
545
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800546 return setTransformLocked(transform);
547}
548
549status_t Camera3OutputStream::setTransformLocked(int transform) {
550 status_t res = OK;
Shuzhen Wange4adddb2021-09-21 15:24:44 -0700551
552 if (transform == -1) return res;
553
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800554 if (mState == STATE_ERROR) {
555 ALOGE("%s: Stream in error state", __FUNCTION__);
556 return INVALID_OPERATION;
557 }
558
559 mTransform = transform;
560 if (mState == STATE_CONFIGURED) {
561 res = native_window_set_buffers_transform(mConsumer.get(),
562 transform);
563 if (res != OK) {
564 ALOGE("%s: Unable to configure stream transform to %x: %s (%d)",
565 __FUNCTION__, transform, strerror(-res), res);
566 }
567 }
568 return res;
569}
570
571status_t Camera3OutputStream::configureQueueLocked() {
572 status_t res;
573
Ruchit Sharmae0711f22014-08-18 13:48:24 -0400574 mTraceFirstBuffer = true;
Igor Murashkine3a9f962013-05-08 18:03:15 -0700575 if ((res = Camera3IOStreamBase::configureQueueLocked()) != OK) {
576 return res;
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800577 }
578
Shuzhen Wangba92d772022-04-11 11:47:24 -0700579 if ((res = configureConsumerQueueLocked(true /*allowPreviewRespace*/)) != OK) {
Shuzhen Wang0129d522016-10-30 22:43:41 -0700580 return res;
581 }
582
583 // Set dequeueBuffer/attachBuffer timeout if the consumer is not hw composer or hw texture.
584 // We need skip these cases as timeout will disable the non-blocking (async) mode.
585 if (!(isConsumedByHWComposer() || isConsumedByHWTexture())) {
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -0800586 if (mUseBufferManager) {
587 // When buffer manager is handling the buffer, we should have available buffers in
588 // buffer queue before we calls into dequeueBuffer because buffer manager is tracking
589 // free buffers.
590 // There are however some consumer side feature (ImageReader::discardFreeBuffers) that
591 // can discard free buffers without notifying buffer manager. We want the timeout to
592 // happen immediately here so buffer manager can try to update its internal state and
593 // try to allocate a buffer instead of waiting.
594 mConsumer->setDequeueTimeout(0);
595 } else {
596 mConsumer->setDequeueTimeout(kDequeueBufferTimeout);
597 }
Shuzhen Wang0129d522016-10-30 22:43:41 -0700598 }
599
600 return OK;
601}
602
Shuzhen Wangba92d772022-04-11 11:47:24 -0700603status_t Camera3OutputStream::configureConsumerQueueLocked(bool allowPreviewRespace) {
Shuzhen Wang0129d522016-10-30 22:43:41 -0700604 status_t res;
605
606 mTraceFirstBuffer = true;
607
Igor Murashkine3a9f962013-05-08 18:03:15 -0700608 ALOG_ASSERT(mConsumer != 0, "mConsumer should never be NULL");
609
Zhijun He125684a2015-12-26 15:07:30 -0800610 // Configure consumer-side ANativeWindow interface. The listener may be used
611 // to notify buffer manager (if it is used) of the returned buffers.
Yin-Chia Yeh017d49c2017-03-31 19:11:00 -0700612 res = mConsumer->connect(NATIVE_WINDOW_API_CAMERA,
Shuzhen Wang0160ddd2019-08-15 09:11:56 -0700613 /*reportBufferRemoval*/true,
614 /*listener*/mBufferProducerListener);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800615 if (res != OK) {
616 ALOGE("%s: Unable to connect to native window for stream %d",
617 __FUNCTION__, mId);
618 return res;
619 }
620
Eino-Ville Talvala727d1722015-06-09 13:44:19 -0700621 mConsumerName = mConsumer->getConsumerName();
622
Emilian Peev050f5dc2017-05-18 14:43:56 +0100623 res = native_window_set_usage(mConsumer.get(), mUsage);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800624 if (res != OK) {
Emilian Peev050f5dc2017-05-18 14:43:56 +0100625 ALOGE("%s: Unable to configure usage %" PRIu64 " for stream %d",
626 __FUNCTION__, mUsage, mId);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800627 return res;
628 }
629
630 res = native_window_set_scaling_mode(mConsumer.get(),
631 NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
632 if (res != OK) {
633 ALOGE("%s: Unable to configure stream scaling: %s (%d)",
634 __FUNCTION__, strerror(-res), res);
635 return res;
636 }
637
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800638 if (mMaxSize == 0) {
639 // For buffers of known size
Eino-Ville Talvala7d70c5e2014-07-24 18:10:23 -0700640 res = native_window_set_buffers_dimensions(mConsumer.get(),
Emilian Peevf4816702020-04-03 15:44:51 -0700641 camera_stream::width, camera_stream::height);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800642 } else {
643 // For buffers with bounded size
Eino-Ville Talvala7d70c5e2014-07-24 18:10:23 -0700644 res = native_window_set_buffers_dimensions(mConsumer.get(),
645 mMaxSize, 1);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800646 }
647 if (res != OK) {
Eino-Ville Talvala7d70c5e2014-07-24 18:10:23 -0700648 ALOGE("%s: Unable to configure stream buffer dimensions"
649 " %d x %d (maxSize %zu) for stream %d",
Emilian Peevf4816702020-04-03 15:44:51 -0700650 __FUNCTION__, camera_stream::width, camera_stream::height,
Eino-Ville Talvala7d70c5e2014-07-24 18:10:23 -0700651 mMaxSize, mId);
652 return res;
653 }
654 res = native_window_set_buffers_format(mConsumer.get(),
Emilian Peevf4816702020-04-03 15:44:51 -0700655 camera_stream::format);
Eino-Ville Talvala7d70c5e2014-07-24 18:10:23 -0700656 if (res != OK) {
657 ALOGE("%s: Unable to configure stream buffer format %#x for stream %d",
Emilian Peevf4816702020-04-03 15:44:51 -0700658 __FUNCTION__, camera_stream::format, mId);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800659 return res;
660 }
661
Eino-Ville Talvala3d82c0d2015-02-23 15:19:19 -0800662 res = native_window_set_buffers_data_space(mConsumer.get(),
Emilian Peevf4816702020-04-03 15:44:51 -0700663 camera_stream::data_space);
Eino-Ville Talvala3d82c0d2015-02-23 15:19:19 -0800664 if (res != OK) {
665 ALOGE("%s: Unable to configure stream dataspace %#x for stream %d",
Emilian Peevf4816702020-04-03 15:44:51 -0700666 __FUNCTION__, camera_stream::data_space, mId);
Eino-Ville Talvala3d82c0d2015-02-23 15:19:19 -0800667 return res;
668 }
669
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800670 int maxConsumerBuffers;
Eino-Ville Talvala727d1722015-06-09 13:44:19 -0700671 res = static_cast<ANativeWindow*>(mConsumer.get())->query(
672 mConsumer.get(),
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800673 NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxConsumerBuffers);
674 if (res != OK) {
675 ALOGE("%s: Unable to query consumer undequeued"
676 " buffer count for stream %d", __FUNCTION__, mId);
677 return res;
678 }
679
Alex Ray20cb3002013-05-28 20:18:22 -0700680 ALOGV("%s: Consumer wants %d buffers, HAL wants %d", __FUNCTION__,
Emilian Peevf4816702020-04-03 15:44:51 -0700681 maxConsumerBuffers, camera_stream::max_buffers);
682 if (camera_stream::max_buffers == 0) {
Zhijun He2ab500c2013-07-23 08:02:53 -0700683 ALOGE("%s: Camera HAL requested max_buffer count: %d, requires at least 1",
Emilian Peevf4816702020-04-03 15:44:51 -0700684 __FUNCTION__, camera_stream::max_buffers);
Alex Ray20cb3002013-05-28 20:18:22 -0700685 return INVALID_OPERATION;
686 }
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800687
Emilian Peevf4816702020-04-03 15:44:51 -0700688 mTotalBufferCount = maxConsumerBuffers + camera_stream::max_buffers;
Shuzhen Wange4208922022-02-01 16:52:48 -0800689
690 int timestampBase = getTimestampBase();
691 bool isDefaultTimeBase = (timestampBase ==
692 OutputConfiguration::TIMESTAMP_BASE_DEFAULT);
Shuzhen Wangba92d772022-04-11 11:47:24 -0700693 if (allowPreviewRespace) {
Shuzhen Wange4208922022-02-01 16:52:48 -0800694 bool forceChoreographer = (timestampBase ==
695 OutputConfiguration::TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED);
Shuzhen Wangba92d772022-04-11 11:47:24 -0700696 bool defaultToChoreographer = (isDefaultTimeBase &&
Shuzhen Wangfe8a2a32022-05-10 18:18:54 -0700697 isConsumedByHWComposer());
698 bool defaultToSpacer = (isDefaultTimeBase &&
699 isConsumedByHWTexture() &&
700 !isConsumedByCPU() &&
701 !isVideoStream());
Shuzhen Wange4208922022-02-01 16:52:48 -0800702 if (forceChoreographer || defaultToChoreographer) {
Shuzhen Wang00abbeb2022-02-25 17:14:42 -0800703 mSyncToDisplay = true;
Shuzhen Wangc2352702022-09-06 18:36:31 -0700704 // For choreographer synced stream, extra buffers aren't kept by
705 // camera service. So no need to update mMaxCachedBufferCount.
Shuzhen Wang00abbeb2022-02-25 17:14:42 -0800706 mTotalBufferCount += kDisplaySyncExtraBuffer;
Shuzhen Wangfe8a2a32022-05-10 18:18:54 -0700707 } else if (defaultToSpacer) {
Shuzhen Wangdc9aa822022-05-16 10:04:17 -0700708 mPreviewFrameSpacer = new PreviewFrameSpacer(this, mConsumer);
Shuzhen Wangc2352702022-09-06 18:36:31 -0700709 // For preview frame spacer, the extra buffer is kept by camera
710 // service. So update mMaxCachedBufferCount.
711 mMaxCachedBufferCount = 1;
712 mTotalBufferCount += mMaxCachedBufferCount;
Shuzhen Wangba92d772022-04-11 11:47:24 -0700713 res = mPreviewFrameSpacer->run(String8::format("PreviewSpacer-%d", mId).string());
714 if (res != OK) {
Austin Borger7b129542022-06-09 13:23:06 -0700715 ALOGE("%s: Unable to start preview spacer: %s (%d)", __FUNCTION__,
716 strerror(-res), res);
Shuzhen Wangba92d772022-04-11 11:47:24 -0700717 return res;
718 }
Shuzhen Wange4adddb2021-09-21 15:24:44 -0700719 }
720 }
Zhijun He6adc9cc2014-04-15 14:09:55 -0700721 mHandoutTotalBufferCount = 0;
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800722 mFrameCount = 0;
723 mLastTimestamp = 0;
Shuzhen Wange4208922022-02-01 16:52:48 -0800724
725 if (isDeviceTimeBaseRealtime()) {
726 if (isDefaultTimeBase && !isConsumedByHWComposer() && !isVideoStream()) {
727 // Default time base, but not hardware composer or video encoder
728 mTimestampOffset = 0;
729 } else if (timestampBase == OutputConfiguration::TIMESTAMP_BASE_REALTIME ||
Shuzhen Wangbce53db2022-12-03 00:38:20 +0000730 timestampBase == OutputConfiguration::TIMESTAMP_BASE_SENSOR) {
Shuzhen Wange4208922022-02-01 16:52:48 -0800731 mTimestampOffset = 0;
732 }
733 // If timestampBase is CHOREOGRAPHER SYNCED or MONOTONIC, leave
734 // timestamp offset as bootTime - monotonicTime.
735 } else {
736 if (timestampBase == OutputConfiguration::TIMESTAMP_BASE_REALTIME) {
737 // Reverse offset for monotonicTime -> bootTime
738 mTimestampOffset = -mTimestampOffset;
739 } else {
Shuzhen Wangbce53db2022-12-03 00:38:20 +0000740 // If timestampBase is DEFAULT, MONOTONIC, SENSOR or
Shuzhen Wange4208922022-02-01 16:52:48 -0800741 // CHOREOGRAPHER_SYNCED, timestamp offset is 0.
742 mTimestampOffset = 0;
743 }
744 }
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800745
746 res = native_window_set_buffer_count(mConsumer.get(),
747 mTotalBufferCount);
748 if (res != OK) {
749 ALOGE("%s: Unable to set buffer count for stream %d",
750 __FUNCTION__, mId);
751 return res;
752 }
753
754 res = native_window_set_buffers_transform(mConsumer.get(),
755 mTransform);
756 if (res != OK) {
757 ALOGE("%s: Unable to configure stream transform to %x: %s (%d)",
758 __FUNCTION__, mTransform, strerror(-res), res);
Shuzhen Wang0129d522016-10-30 22:43:41 -0700759 return res;
Zhijun Hef0645c12016-08-02 00:58:11 -0700760 }
761
Zhijun He125684a2015-12-26 15:07:30 -0800762 /**
Zhijun Heedd41ae2016-02-03 14:45:53 -0800763 * Camera3 Buffer manager is only supported by HAL3.3 onwards, as the older HALs requires
Zhijun He125684a2015-12-26 15:07:30 -0800764 * buffers to be statically allocated for internal static buffer registration, while the
765 * buffers provided by buffer manager are really dynamically allocated. Camera3Device only
Zhijun Heedd41ae2016-02-03 14:45:53 -0800766 * sets the mBufferManager if device version is > HAL3.2, which guarantees that the buffer
767 * manager setup is skipped in below code. Note that HAL3.2 is also excluded here, as some
768 * HAL3.2 devices may not support the dynamic buffer registeration.
Yin-Chia Yehb6578902019-04-16 13:36:16 -0700769 * Also Camera3BufferManager does not support display/texture streams as they have its own
770 * buffer management logic.
Zhijun He125684a2015-12-26 15:07:30 -0800771 */
Yin-Chia Yehb6578902019-04-16 13:36:16 -0700772 if (mBufferManager != 0 && mSetId > CAMERA3_STREAM_SET_ID_INVALID &&
773 !(isConsumedByHWComposer() || isConsumedByHWTexture())) {
Emilian Peev050f5dc2017-05-18 14:43:56 +0100774 uint64_t consumerUsage = 0;
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -0700775 getEndpointUsage(&consumerUsage);
Shuzhen Wang83bff122020-11-20 15:51:39 -0800776 uint32_t width = (mMaxSize == 0) ? getWidth() : mMaxSize;
777 uint32_t height = (mMaxSize == 0) ? getHeight() : 1;
Zhijun He125684a2015-12-26 15:07:30 -0800778 StreamInfo streamInfo(
Shuzhen Wang83bff122020-11-20 15:51:39 -0800779 getId(), getStreamSetId(), width, height, getFormat(), getDataSpace(),
Emilian Peev050f5dc2017-05-18 14:43:56 +0100780 mUsage | consumerUsage, mTotalBufferCount,
Shuzhen Wang83bff122020-11-20 15:51:39 -0800781 /*isConfigured*/true, isMultiResolution());
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -0700782 wp<Camera3OutputStream> weakThis(this);
783 res = mBufferManager->registerStream(weakThis,
784 streamInfo);
Zhijun He125684a2015-12-26 15:07:30 -0800785 if (res == OK) {
786 // Disable buffer allocation for this BufferQueue, buffer manager will take over
787 // the buffer allocation responsibility.
788 mConsumer->getIGraphicBufferProducer()->allowAllocation(false);
789 mUseBufferManager = true;
790 } else {
791 ALOGE("%s: Unable to register stream %d to camera3 buffer manager, "
792 "(error %d %s), fall back to BufferQueue for buffer management!",
793 __FUNCTION__, mId, res, strerror(-res));
794 }
795 }
796
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800797 return OK;
798}
799
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800800status_t Camera3OutputStream::getBufferLockedCommon(ANativeWindowBuffer** anb, int* fenceFd) {
Jayant Chowdharyd4776262020-06-23 23:45:57 -0700801 ATRACE_HFR_CALL();
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800802 status_t res;
803
804 if ((res = getBufferPreconditionCheckLocked()) != OK) {
805 return res;
806 }
807
808 bool gotBufferFromManager = false;
809
810 if (mUseBufferManager) {
811 sp<GraphicBuffer> gb;
Shuzhen Wang83bff122020-11-20 15:51:39 -0800812 res = mBufferManager->getBufferForStream(getId(), getStreamSetId(),
813 isMultiResolution(), &gb, fenceFd);
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800814 if (res == OK) {
815 // Attach this buffer to the bufferQueue: the buffer will be in dequeue state after a
816 // successful return.
817 *anb = gb.get();
818 res = mConsumer->attachBuffer(*anb);
Yin-Chia Yeha1b56c82019-03-27 15:50:39 -0700819 if (shouldLogError(res, mState)) {
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800820 ALOGE("%s: Stream %d: Can't attach the output buffer to this surface: %s (%d)",
821 __FUNCTION__, mId, strerror(-res), res);
Yin-Chia Yeha1b56c82019-03-27 15:50:39 -0700822 }
823 if (res != OK) {
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -0800824 checkRetAndSetAbandonedLocked(res);
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800825 return res;
826 }
827 gotBufferFromManager = true;
828 ALOGV("Stream %d: Attached new buffer", getId());
829 } else if (res == ALREADY_EXISTS) {
830 // Have sufficient free buffers already attached, can just
831 // dequeue from buffer queue
832 ALOGV("Stream %d: Reusing attached buffer", getId());
833 gotBufferFromManager = false;
834 } else if (res != OK) {
835 ALOGE("%s: Stream %d: Can't get next output buffer from buffer manager: %s (%d)",
836 __FUNCTION__, mId, strerror(-res), res);
837 return res;
838 }
839 }
840 if (!gotBufferFromManager) {
841 /**
842 * Release the lock briefly to avoid deadlock for below scenario:
843 * Thread 1: StreamingProcessor::startStream -> Camera3Stream::isConfiguring().
844 * This thread acquired StreamingProcessor lock and try to lock Camera3Stream lock.
845 * Thread 2: Camera3Stream::returnBuffer->StreamingProcessor::onFrameAvailable().
846 * This thread acquired Camera3Stream lock and bufferQueue lock, and try to lock
847 * StreamingProcessor lock.
848 * Thread 3: Camera3Stream::getBuffer(). This thread acquired Camera3Stream lock
849 * and try to lock bufferQueue lock.
850 * Then there is circular locking dependency.
851 */
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800852 sp<Surface> consumer = mConsumer;
Shuzhen Wang6c14e312021-07-05 16:20:33 -0700853 size_t remainingBuffers = (mState == STATE_PREPARING ? mTotalBufferCount :
854 camera_stream::max_buffers) - mHandoutTotalBufferCount;
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800855 mLock.unlock();
856
Shuzhen Wang686f6442017-06-20 16:16:04 -0700857 nsecs_t dequeueStart = systemTime(SYSTEM_TIME_MONOTONIC);
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800858
Shuzhen Wangc7629462021-07-12 15:02:58 -0700859 size_t batchSize = mBatchSize.load();
860 if (batchSize == 1) {
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800861 sp<ANativeWindow> anw = consumer;
862 res = anw->dequeueBuffer(anw.get(), anb, fenceFd);
863 } else {
Shuzhen Wangc7629462021-07-12 15:02:58 -0700864 std::unique_lock<std::mutex> batchLock(mBatchLock);
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800865 res = OK;
866 if (mBatchedBuffers.size() == 0) {
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800867 if (remainingBuffers == 0) {
868 ALOGE("%s: cannot get buffer while all buffers are handed out", __FUNCTION__);
869 return INVALID_OPERATION;
870 }
871 if (batchSize > remainingBuffers) {
872 batchSize = remainingBuffers;
873 }
Shuzhen Wangc7629462021-07-12 15:02:58 -0700874 batchLock.unlock();
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800875 // Refill batched buffers
Shuzhen Wangc7629462021-07-12 15:02:58 -0700876 std::vector<Surface::BatchBuffer> batchedBuffers;
877 batchedBuffers.resize(batchSize);
878 res = consumer->dequeueBuffers(&batchedBuffers);
879 batchLock.lock();
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800880 if (res != OK) {
881 ALOGE("%s: batch dequeueBuffers call failed! %s (%d)",
882 __FUNCTION__, strerror(-res), res);
Shuzhen Wangc7629462021-07-12 15:02:58 -0700883 } else {
884 mBatchedBuffers = std::move(batchedBuffers);
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800885 }
886 }
887
888 if (res == OK) {
889 // Dispatch batch buffers
890 *anb = mBatchedBuffers.back().buffer;
891 *fenceFd = mBatchedBuffers.back().fenceFd;
892 mBatchedBuffers.pop_back();
893 }
894 }
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800895
Shuzhen Wang686f6442017-06-20 16:16:04 -0700896 nsecs_t dequeueEnd = systemTime(SYSTEM_TIME_MONOTONIC);
897 mDequeueBufferLatency.add(dequeueStart, dequeueEnd);
898
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800899 mLock.lock();
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -0800900
901 if (mUseBufferManager && res == TIMED_OUT) {
902 checkRemovedBuffersLocked();
903
904 sp<GraphicBuffer> gb;
905 res = mBufferManager->getBufferForStream(
Shuzhen Wang83bff122020-11-20 15:51:39 -0800906 getId(), getStreamSetId(), isMultiResolution(),
907 &gb, fenceFd, /*noFreeBuffer*/true);
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -0800908
909 if (res == OK) {
910 // Attach this buffer to the bufferQueue: the buffer will be in dequeue state after
911 // a successful return.
912 *anb = gb.get();
913 res = mConsumer->attachBuffer(*anb);
914 gotBufferFromManager = true;
915 ALOGV("Stream %d: Attached new buffer", getId());
916
917 if (res != OK) {
Yin-Chia Yeha1b56c82019-03-27 15:50:39 -0700918 if (shouldLogError(res, mState)) {
919 ALOGE("%s: Stream %d: Can't attach the output buffer to this surface:"
920 " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
921 }
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -0800922 checkRetAndSetAbandonedLocked(res);
923 return res;
924 }
925 } else {
926 ALOGE("%s: Stream %d: Can't get next output buffer from buffer manager:"
927 " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
928 return res;
929 }
930 } else if (res != OK) {
Yin-Chia Yeha1b56c82019-03-27 15:50:39 -0700931 if (shouldLogError(res, mState)) {
932 ALOGE("%s: Stream %d: Can't dequeue next output buffer: %s (%d)",
933 __FUNCTION__, mId, strerror(-res), res);
934 }
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -0800935 checkRetAndSetAbandonedLocked(res);
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800936 return res;
937 }
938 }
939
Yin-Chia Yeh017d49c2017-03-31 19:11:00 -0700940 if (res == OK) {
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -0800941 checkRemovedBuffersLocked();
Yin-Chia Yeh017d49c2017-03-31 19:11:00 -0700942 }
943
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800944 return res;
945}
946
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -0800947void Camera3OutputStream::checkRemovedBuffersLocked(bool notifyBufferManager) {
948 std::vector<sp<GraphicBuffer>> removedBuffers;
949 status_t res = mConsumer->getAndFlushRemovedBuffers(&removedBuffers);
950 if (res == OK) {
951 onBuffersRemovedLocked(removedBuffers);
952
953 if (notifyBufferManager && mUseBufferManager && removedBuffers.size() > 0) {
Shuzhen Wang83bff122020-11-20 15:51:39 -0800954 mBufferManager->onBuffersRemoved(getId(), getStreamSetId(), isMultiResolution(),
955 removedBuffers.size());
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -0800956 }
957 }
958}
959
960void Camera3OutputStream::checkRetAndSetAbandonedLocked(status_t res) {
961 // Only transition to STATE_ABANDONED from STATE_CONFIGURED. (If it is
962 // STATE_PREPARING, let prepareNextBuffer handle the error.)
963 if ((res == NO_INIT || res == DEAD_OBJECT) && mState == STATE_CONFIGURED) {
964 mState = STATE_ABANDONED;
965 }
966}
967
Yin-Chia Yeha1b56c82019-03-27 15:50:39 -0700968bool Camera3OutputStream::shouldLogError(status_t res, StreamState state) {
969 if (res == OK) {
970 return false;
971 }
972 if ((res == DEAD_OBJECT || res == NO_INIT) && state == STATE_ABANDONED) {
973 return false;
974 }
975 return true;
976}
977
Shuzhen Wangc2352702022-09-06 18:36:31 -0700978void Camera3OutputStream::onCachedBufferQueued() {
979 Mutex::Autolock l(mLock);
980 mCachedOutputBufferCount--;
981 // Signal whoever is waiting for the buffer to be returned to the buffer
982 // queue.
983 mOutputBufferReturnedSignal.signal();
984}
985
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800986status_t Camera3OutputStream::disconnectLocked() {
987 status_t res;
988
Igor Murashkine3a9f962013-05-08 18:03:15 -0700989 if ((res = Camera3IOStreamBase::disconnectLocked()) != OK) {
990 return res;
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800991 }
992
Zhijun He5d677d12016-05-29 16:52:39 -0700993 // Stream configuration was not finished (can only be in STATE_IN_CONFIG or STATE_CONSTRUCTED
994 // state), don't need change the stream state, return OK.
995 if (mConsumer == nullptr) {
996 return OK;
997 }
998
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800999 returnPrefetchedBuffersLocked();
1000
Shuzhen Wangdc9aa822022-05-16 10:04:17 -07001001 if (mPreviewFrameSpacer != nullptr) {
1002 mPreviewFrameSpacer->requestExit();
1003 }
1004
Zhijun He125684a2015-12-26 15:07:30 -08001005 ALOGV("%s: disconnecting stream %d from native window", __FUNCTION__, getId());
1006
Igor Murashkine3a9f962013-05-08 18:03:15 -07001007 res = native_window_api_disconnect(mConsumer.get(),
1008 NATIVE_WINDOW_API_CAMERA);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -08001009 /**
1010 * This is not an error. if client calling process dies, the window will
1011 * also die and all calls to it will return DEAD_OBJECT, thus it's already
1012 * "disconnected"
1013 */
1014 if (res == DEAD_OBJECT) {
1015 ALOGW("%s: While disconnecting stream %d from native window, the"
1016 " native window died from under us", __FUNCTION__, mId);
1017 }
1018 else if (res != OK) {
Igor Murashkine3a9f962013-05-08 18:03:15 -07001019 ALOGE("%s: Unable to disconnect stream %d from native window "
1020 "(error %d %s)",
1021 __FUNCTION__, mId, res, strerror(-res));
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -08001022 mState = STATE_ERROR;
1023 return res;
1024 }
1025
Zhijun He125684a2015-12-26 15:07:30 -08001026 // Since device is already idle, there is no getBuffer call to buffer manager, unregister the
1027 // stream at this point should be safe.
1028 if (mUseBufferManager) {
Shuzhen Wang83bff122020-11-20 15:51:39 -08001029 res = mBufferManager->unregisterStream(getId(), getStreamSetId(), isMultiResolution());
Zhijun He125684a2015-12-26 15:07:30 -08001030 if (res != OK) {
1031 ALOGE("%s: Unable to unregister stream %d from buffer manager "
1032 "(error %d %s)", __FUNCTION__, mId, res, strerror(-res));
1033 mState = STATE_ERROR;
1034 return res;
1035 }
1036 // Note that, to make prepare/teardown case work, we must not mBufferManager.clear(), as
1037 // the stream is still in usable state after this call.
1038 mUseBufferManager = false;
1039 }
1040
Igor Murashkine3a9f962013-05-08 18:03:15 -07001041 mState = (mState == STATE_IN_RECONFIG) ? STATE_IN_CONFIG
1042 : STATE_CONSTRUCTED;
Shuzhen Wang686f6442017-06-20 16:16:04 -07001043
1044 mDequeueBufferLatency.log("Stream %d dequeueBuffer latency histogram", mId);
1045 mDequeueBufferLatency.reset();
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -08001046 return OK;
1047}
1048
Emilian Peev050f5dc2017-05-18 14:43:56 +01001049status_t Camera3OutputStream::getEndpointUsage(uint64_t *usage) const {
Eino-Ville Talvalab2f5b192013-07-30 14:36:03 -07001050
1051 status_t res;
Shuzhen Wang0129d522016-10-30 22:43:41 -07001052
Zhijun He5d677d12016-05-29 16:52:39 -07001053 if (mConsumer == nullptr) {
1054 // mConsumerUsage was sanitized before the Camera3OutputStream was constructed.
1055 *usage = mConsumerUsage;
1056 return OK;
1057 }
1058
Shuzhen Wang0129d522016-10-30 22:43:41 -07001059 res = getEndpointUsageForSurface(usage, mConsumer);
1060
1061 return res;
1062}
1063
Emilian Peev35ae8262018-11-08 13:11:32 +00001064void Camera3OutputStream::applyZSLUsageQuirk(int format, uint64_t *consumerUsage /*inout*/) {
1065 if (consumerUsage == nullptr) {
1066 return;
1067 }
Shuzhen Wang0129d522016-10-30 22:43:41 -07001068
Chien-Yu Chen618ff8a2015-03-13 11:27:17 -07001069 // If an opaque output stream's endpoint is ImageReader, add
Yin-Chia Yeh47cf8e62017-04-04 13:00:03 -07001070 // GRALLOC_USAGE_HW_CAMERA_ZSL to the usage so HAL knows it will be used
Chien-Yu Chen618ff8a2015-03-13 11:27:17 -07001071 // for the ZSL use case.
1072 // Assume it's for ImageReader if the consumer usage doesn't have any of these bits set:
1073 // 1. GRALLOC_USAGE_HW_TEXTURE
1074 // 2. GRALLOC_USAGE_HW_RENDER
1075 // 3. GRALLOC_USAGE_HW_COMPOSER
1076 // 4. GRALLOC_USAGE_HW_VIDEO_ENCODER
Emilian Peev35ae8262018-11-08 13:11:32 +00001077 if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED &&
1078 (*consumerUsage & (GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_RENDER |
Shuzhen Wang0129d522016-10-30 22:43:41 -07001079 GRALLOC_USAGE_HW_COMPOSER | GRALLOC_USAGE_HW_VIDEO_ENCODER)) == 0) {
Emilian Peev35ae8262018-11-08 13:11:32 +00001080 *consumerUsage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
Chien-Yu Chen618ff8a2015-03-13 11:27:17 -07001081 }
Emilian Peev35ae8262018-11-08 13:11:32 +00001082}
Chien-Yu Chen618ff8a2015-03-13 11:27:17 -07001083
Emilian Peev35ae8262018-11-08 13:11:32 +00001084status_t Camera3OutputStream::getEndpointUsageForSurface(uint64_t *usage,
1085 const sp<Surface>& surface) const {
1086 status_t res;
1087 uint64_t u = 0;
1088
1089 res = native_window_get_consumer_usage(static_cast<ANativeWindow*>(surface.get()), &u);
Emilian Peevf4816702020-04-03 15:44:51 -07001090 applyZSLUsageQuirk(camera_stream::format, &u);
Chien-Yu Chen618ff8a2015-03-13 11:27:17 -07001091 *usage = u;
Eino-Ville Talvalab2f5b192013-07-30 14:36:03 -07001092 return res;
1093}
1094
Chien-Yu Chen85a64552015-08-28 15:46:12 -07001095bool Camera3OutputStream::isVideoStream() const {
Emilian Peev050f5dc2017-05-18 14:43:56 +01001096 uint64_t usage = 0;
Chien-Yu Chen85a64552015-08-28 15:46:12 -07001097 status_t res = getEndpointUsage(&usage);
1098 if (res != OK) {
1099 ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
1100 return false;
1101 }
1102
1103 return (usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) != 0;
1104}
1105
Zhijun He125684a2015-12-26 15:07:30 -08001106status_t Camera3OutputStream::setBufferManager(sp<Camera3BufferManager> bufferManager) {
1107 Mutex::Autolock l(mLock);
1108 if (mState != STATE_CONSTRUCTED) {
Zhijun He5d677d12016-05-29 16:52:39 -07001109 ALOGE("%s: this method can only be called when stream in CONSTRUCTED state.",
Zhijun He125684a2015-12-26 15:07:30 -08001110 __FUNCTION__);
1111 return INVALID_OPERATION;
1112 }
1113 mBufferManager = bufferManager;
1114
1115 return OK;
1116}
1117
Emilian Peev40ead602017-09-26 15:46:36 +01001118status_t Camera3OutputStream::updateStream(const std::vector<sp<Surface>> &/*outputSurfaces*/,
1119 const std::vector<OutputStreamInfo> &/*outputInfo*/,
1120 const std::vector<size_t> &/*removedSurfaceIds*/,
1121 KeyedVector<sp<Surface>, size_t> * /*outputMapo*/) {
1122 ALOGE("%s: this method is not supported!", __FUNCTION__);
1123 return INVALID_OPERATION;
1124}
1125
Shuzhen Wang0160ddd2019-08-15 09:11:56 -07001126void Camera3OutputStream::BufferProducerListener::onBufferReleased() {
Zhijun He125684a2015-12-26 15:07:30 -08001127 sp<Camera3OutputStream> stream = mParent.promote();
1128 if (stream == nullptr) {
1129 ALOGV("%s: Parent camera3 output stream was destroyed", __FUNCTION__);
1130 return;
1131 }
1132
1133 Mutex::Autolock l(stream->mLock);
1134 if (!(stream->mUseBufferManager)) {
1135 return;
1136 }
1137
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001138 ALOGV("Stream %d: Buffer released", stream->getId());
Yin-Chia Yeh89954d92017-05-21 17:28:53 -07001139 bool shouldFreeBuffer = false;
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001140 status_t res = stream->mBufferManager->onBufferReleased(
Shuzhen Wang83bff122020-11-20 15:51:39 -08001141 stream->getId(), stream->getStreamSetId(), stream->isMultiResolution(),
1142 &shouldFreeBuffer);
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001143 if (res != OK) {
1144 ALOGE("%s: signaling buffer release to buffer manager failed: %s (%d).", __FUNCTION__,
1145 strerror(-res), res);
1146 stream->mState = STATE_ERROR;
1147 }
Yin-Chia Yeh89954d92017-05-21 17:28:53 -07001148
1149 if (shouldFreeBuffer) {
1150 sp<GraphicBuffer> buffer;
1151 // Detach and free a buffer (when buffer goes out of scope)
1152 stream->detachBufferLocked(&buffer, /*fenceFd*/ nullptr);
1153 if (buffer.get() != nullptr) {
1154 stream->mBufferManager->notifyBufferRemoved(
Shuzhen Wang83bff122020-11-20 15:51:39 -08001155 stream->getId(), stream->getStreamSetId(), stream->isMultiResolution());
Yin-Chia Yeh89954d92017-05-21 17:28:53 -07001156 }
1157 }
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001158}
1159
Shuzhen Wang0160ddd2019-08-15 09:11:56 -07001160void Camera3OutputStream::BufferProducerListener::onBuffersDiscarded(
1161 const std::vector<sp<GraphicBuffer>>& buffers) {
1162 sp<Camera3OutputStream> stream = mParent.promote();
1163 if (stream == nullptr) {
1164 ALOGV("%s: Parent camera3 output stream was destroyed", __FUNCTION__);
1165 return;
1166 }
1167
1168 if (buffers.size() > 0) {
1169 Mutex::Autolock l(stream->mLock);
1170 stream->onBuffersRemovedLocked(buffers);
1171 if (stream->mUseBufferManager) {
1172 stream->mBufferManager->onBuffersRemoved(stream->getId(),
Shuzhen Wang83bff122020-11-20 15:51:39 -08001173 stream->getStreamSetId(), stream->isMultiResolution(), buffers.size());
Shuzhen Wang0160ddd2019-08-15 09:11:56 -07001174 }
1175 ALOGV("Stream %d: %zu Buffers discarded.", stream->getId(), buffers.size());
1176 }
1177}
1178
Yin-Chia Yeh017d49c2017-03-31 19:11:00 -07001179void Camera3OutputStream::onBuffersRemovedLocked(
1180 const std::vector<sp<GraphicBuffer>>& removedBuffers) {
Yin-Chia Yehdb1e8642017-07-14 15:19:30 -07001181 sp<Camera3StreamBufferFreedListener> callback = mBufferFreedListener.promote();
Yin-Chia Yeh017d49c2017-03-31 19:11:00 -07001182 if (callback != nullptr) {
Chih-Hung Hsieh48fc6192017-08-04 14:37:31 -07001183 for (const auto& gb : removedBuffers) {
Yin-Chia Yeh017d49c2017-03-31 19:11:00 -07001184 callback->onBufferFreed(mId, gb->handle);
1185 }
1186 }
1187}
1188
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001189status_t Camera3OutputStream::detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd) {
1190 Mutex::Autolock l(mLock);
Yin-Chia Yeh89954d92017-05-21 17:28:53 -07001191 return detachBufferLocked(buffer, fenceFd);
1192}
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001193
Yin-Chia Yeh89954d92017-05-21 17:28:53 -07001194status_t Camera3OutputStream::detachBufferLocked(sp<GraphicBuffer>* buffer, int* fenceFd) {
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001195 ALOGV("Stream %d: detachBuffer", getId());
1196 if (buffer == nullptr) {
1197 return BAD_VALUE;
1198 }
1199
Zhijun He125684a2015-12-26 15:07:30 -08001200 sp<Fence> fence;
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001201 status_t res = mConsumer->detachNextBuffer(buffer, &fence);
Zhijun He125684a2015-12-26 15:07:30 -08001202 if (res == NO_MEMORY) {
1203 // This may rarely happen, which indicates that the released buffer was freed by other
1204 // call (e.g., attachBuffer, dequeueBuffer etc.) before reaching here. We should notify the
1205 // buffer manager that this buffer has been freed. It's not fatal, but should be avoided,
1206 // therefore log a warning.
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001207 *buffer = 0;
Zhijun He125684a2015-12-26 15:07:30 -08001208 ALOGW("%s: the released buffer has already been freed by the buffer queue!", __FUNCTION__);
1209 } else if (res != OK) {
Eino-Ville Talvalaff51b472016-06-28 15:26:19 -07001210 // Treat other errors as abandonment
Yin-Chia Yeha1b56c82019-03-27 15:50:39 -07001211 if (shouldLogError(res, mState)) {
1212 ALOGE("%s: detach next buffer failed: %s (%d).", __FUNCTION__, strerror(-res), res);
1213 }
Eino-Ville Talvalaff51b472016-06-28 15:26:19 -07001214 mState = STATE_ABANDONED;
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001215 return res;
Zhijun He125684a2015-12-26 15:07:30 -08001216 }
1217
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001218 if (fenceFd != nullptr) {
1219 if (fence!= 0 && fence->isValid()) {
1220 *fenceFd = fence->dup();
1221 } else {
1222 *fenceFd = -1;
1223 }
Zhijun He125684a2015-12-26 15:07:30 -08001224 }
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001225
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -08001226 // Here we assume detachBuffer is called by buffer manager so it doesn't need to be notified
1227 checkRemovedBuffersLocked(/*notifyBufferManager*/false);
Yin-Chia Yeh017d49c2017-03-31 19:11:00 -07001228 return res;
Zhijun He125684a2015-12-26 15:07:30 -08001229}
Shuzhen Wang13a69632016-01-26 09:51:07 -08001230
Chien-Yu Chena936ac22017-10-23 15:59:49 -07001231status_t Camera3OutputStream::dropBuffers(bool dropping) {
1232 Mutex::Autolock l(mLock);
1233 mDropBuffers = dropping;
1234 return OK;
1235}
1236
Shuzhen Wang5c22c152017-12-31 17:12:25 -08001237const String8& Camera3OutputStream::getPhysicalCameraId() const {
1238 Mutex::Autolock l(mLock);
1239 return physicalCameraId();
1240}
1241
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -08001242status_t Camera3OutputStream::notifyBufferReleased(ANativeWindowBuffer* /*anwBuffer*/) {
Shuzhen Wang0129d522016-10-30 22:43:41 -07001243 return OK;
1244}
1245
1246bool Camera3OutputStream::isConsumerConfigurationDeferred(size_t surface_id) const {
Zhijun He5d677d12016-05-29 16:52:39 -07001247 Mutex::Autolock l(mLock);
Shuzhen Wang0129d522016-10-30 22:43:41 -07001248
1249 if (surface_id != 0) {
Shuzhen Wang758c2152017-01-10 18:26:18 -08001250 ALOGE("%s: surface_id %zu for Camera3OutputStream should be 0!", __FUNCTION__, surface_id);
Shuzhen Wang0129d522016-10-30 22:43:41 -07001251 }
Zhijun He5d677d12016-05-29 16:52:39 -07001252 return mConsumer == nullptr;
1253}
1254
Shuzhen Wang758c2152017-01-10 18:26:18 -08001255status_t Camera3OutputStream::setConsumers(const std::vector<sp<Surface>>& consumers) {
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -08001256 Mutex::Autolock l(mLock);
Shuzhen Wang758c2152017-01-10 18:26:18 -08001257 if (consumers.size() != 1) {
1258 ALOGE("%s: it's illegal to set %zu consumer surfaces!",
1259 __FUNCTION__, consumers.size());
1260 return INVALID_OPERATION;
1261 }
1262 if (consumers[0] == nullptr) {
1263 ALOGE("%s: it's illegal to set null consumer surface!", __FUNCTION__);
Zhijun He5d677d12016-05-29 16:52:39 -07001264 return INVALID_OPERATION;
1265 }
1266
1267 if (mConsumer != nullptr) {
1268 ALOGE("%s: consumer surface was already set!", __FUNCTION__);
1269 return INVALID_OPERATION;
1270 }
1271
Shuzhen Wang758c2152017-01-10 18:26:18 -08001272 mConsumer = consumers[0];
Zhijun He5d677d12016-05-29 16:52:39 -07001273 return OK;
1274}
1275
Shuzhen Wang13a69632016-01-26 09:51:07 -08001276bool Camera3OutputStream::isConsumedByHWComposer() const {
Emilian Peev050f5dc2017-05-18 14:43:56 +01001277 uint64_t usage = 0;
Shuzhen Wang13a69632016-01-26 09:51:07 -08001278 status_t res = getEndpointUsage(&usage);
1279 if (res != OK) {
1280 ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
1281 return false;
1282 }
1283
1284 return (usage & GRALLOC_USAGE_HW_COMPOSER) != 0;
1285}
1286
Zhijun Hef0645c12016-08-02 00:58:11 -07001287bool Camera3OutputStream::isConsumedByHWTexture() const {
Emilian Peev050f5dc2017-05-18 14:43:56 +01001288 uint64_t usage = 0;
Zhijun Hef0645c12016-08-02 00:58:11 -07001289 status_t res = getEndpointUsage(&usage);
1290 if (res != OK) {
1291 ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
1292 return false;
1293 }
1294
1295 return (usage & GRALLOC_USAGE_HW_TEXTURE) != 0;
1296}
1297
Shuzhen Wangfe8a2a32022-05-10 18:18:54 -07001298bool Camera3OutputStream::isConsumedByCPU() const {
1299 uint64_t usage = 0;
1300 status_t res = getEndpointUsage(&usage);
1301 if (res != OK) {
1302 ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
1303 return false;
1304 }
1305
1306 return (usage & GRALLOC_USAGE_SW_READ_MASK) != 0;
1307}
1308
Shuzhen Wangabbcb6b2020-12-09 22:32:44 -08001309void Camera3OutputStream::dumpImageToDisk(nsecs_t timestamp,
1310 ANativeWindowBuffer* anwBuffer, int fence) {
1311 // Deriver output file name
1312 std::string fileExtension = "jpg";
1313 char imageFileName[64];
1314 time_t now = time(0);
1315 tm *localTime = localtime(&now);
1316 snprintf(imageFileName, sizeof(imageFileName), "IMG_%4d%02d%02d_%02d%02d%02d_%" PRId64 ".%s",
Shuzhen Wang6a8237f2021-07-13 14:42:57 -07001317 1900 + localTime->tm_year, localTime->tm_mon + 1, localTime->tm_mday,
Shuzhen Wangabbcb6b2020-12-09 22:32:44 -08001318 localTime->tm_hour, localTime->tm_min, localTime->tm_sec,
1319 timestamp, fileExtension.c_str());
1320
1321 // Lock the image for CPU read
1322 sp<GraphicBuffer> graphicBuffer = GraphicBuffer::from(anwBuffer);
1323 void* mapped = nullptr;
1324 base::unique_fd fenceFd(dup(fence));
1325 status_t res = graphicBuffer->lockAsync(GraphicBuffer::USAGE_SW_READ_OFTEN, &mapped,
Emilian Peev293bd972022-08-05 17:28:06 -07001326 fenceFd.release());
Shuzhen Wangabbcb6b2020-12-09 22:32:44 -08001327 if (res != OK) {
1328 ALOGE("%s: Failed to lock the buffer: %s (%d)", __FUNCTION__, strerror(-res), res);
1329 return;
1330 }
1331
1332 // Figure out actual file size
1333 auto actualJpegSize = android::camera2::JpegProcessor::findJpegSize((uint8_t*)mapped, mMaxSize);
1334 if (actualJpegSize == 0) {
1335 actualJpegSize = mMaxSize;
1336 }
1337
1338 // Output image data to file
1339 std::string filePath = "/data/misc/cameraserver/";
1340 filePath += imageFileName;
1341 std::ofstream imageFile(filePath.c_str(), std::ofstream::binary);
1342 if (!imageFile.is_open()) {
1343 ALOGE("%s: Unable to create file %s", __FUNCTION__, filePath.c_str());
1344 graphicBuffer->unlock();
1345 return;
1346 }
1347 imageFile.write((const char*)mapped, actualJpegSize);
1348
1349 graphicBuffer->unlock();
1350}
1351
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -08001352status_t Camera3OutputStream::setBatchSize(size_t batchSize) {
1353 Mutex::Autolock l(mLock);
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -08001354 if (batchSize == 0) {
1355 ALOGE("%s: invalid batch size 0", __FUNCTION__);
1356 return BAD_VALUE;
1357 }
1358
1359 if (mUseBufferManager) {
1360 ALOGE("%s: batch operation is not supported with buffer manager", __FUNCTION__);
1361 return INVALID_OPERATION;
1362 }
1363
1364 if (!isVideoStream()) {
1365 ALOGE("%s: batch operation is not supported with non-video stream", __FUNCTION__);
1366 return INVALID_OPERATION;
1367 }
1368
Shuzhen Wangc7629462021-07-12 15:02:58 -07001369 if (camera_stream::max_buffers < batchSize) {
1370 ALOGW("%s: batch size is capped by max_buffers %d", __FUNCTION__,
1371 camera_stream::max_buffers);
1372 batchSize = camera_stream::max_buffers;
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -08001373 }
Shuzhen Wangc7629462021-07-12 15:02:58 -07001374
1375 size_t defaultBatchSize = 1;
1376 if (!mBatchSize.compare_exchange_strong(defaultBatchSize, batchSize)) {
1377 ALOGE("%s: change batch size from %zu to %zu dynamically is not supported",
1378 __FUNCTION__, defaultBatchSize, batchSize);
1379 return INVALID_OPERATION;
1380 }
1381
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -08001382 return OK;
1383}
1384
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001385void Camera3OutputStream::onMinDurationChanged(nsecs_t duration, bool fixedFps) {
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001386 Mutex::Autolock l(mLock);
1387 mMinExpectedDuration = duration;
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001388 mFixedFps = fixedFps;
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001389}
1390
Shuzhen Wang16610a62022-12-15 22:38:07 -08001391void Camera3OutputStream::setStreamUseCase(int64_t streamUseCase) {
1392 Mutex::Autolock l(mLock);
1393 camera_stream::use_case = streamUseCase;
1394}
1395
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -08001396void Camera3OutputStream::returnPrefetchedBuffersLocked() {
Shuzhen Wangc7629462021-07-12 15:02:58 -07001397 std::vector<Surface::BatchBuffer> batchedBuffers;
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -08001398
Shuzhen Wangc7629462021-07-12 15:02:58 -07001399 {
1400 std::lock_guard<std::mutex> batchLock(mBatchLock);
1401 if (mBatchedBuffers.size() != 0) {
1402 ALOGW("%s: %zu extra prefetched buffers detected. Returning",
1403 __FUNCTION__, mBatchedBuffers.size());
1404 batchedBuffers = std::move(mBatchedBuffers);
1405 }
1406 }
1407
1408 if (batchedBuffers.size() > 0) {
1409 mConsumer->cancelBuffers(batchedBuffers);
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -08001410 }
1411}
1412
Shuzhen Wang0897d592023-04-07 12:48:05 -07001413nsecs_t Camera3OutputStream::syncTimestampToDisplayLocked(nsecs_t t, int releaseFence) {
Shuzhen Wang35bd3552022-09-21 16:56:04 -07001414 nsecs_t currentTime = systemTime();
1415 if (!mFixedFps) {
1416 mLastCaptureTime = t;
1417 mLastPresentTime = currentTime;
1418 return t;
1419 }
1420
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001421 ParcelableVsyncEventData parcelableVsyncEventData;
1422 auto res = mDisplayEventReceiver.getLatestVsyncEventData(&parcelableVsyncEventData);
1423 if (res != OK) {
1424 ALOGE("%s: Stream %d: Error getting latest vsync event data: %s (%d)",
1425 __FUNCTION__, mId, strerror(-res), res);
Shuzhen Wang14c62b82022-04-11 09:37:05 -07001426 mLastCaptureTime = t;
Shuzhen Wang35bd3552022-09-21 16:56:04 -07001427 mLastPresentTime = currentTime;
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001428 return t;
1429 }
1430
1431 const VsyncEventData& vsyncEventData = parcelableVsyncEventData.vsync;
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001432 nsecs_t minPresentT = mLastPresentTime + vsyncEventData.frameInterval / 2;
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001433
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001434 // Find the best presentation time without worrying about previous frame's
1435 // presentation time if capture interval is more than kSpacingResetIntervalNs.
1436 //
1437 // When frame interval is more than 50 ms apart (3 vsyncs for 60hz refresh rate),
1438 // there is little risk in starting over and finding the earliest vsync to latch onto.
1439 // - Update captureToPresentTime offset to be used for later frames.
1440 // - Example use cases:
1441 // - when frame rate drops down to below 20 fps, or
1442 // - A new streaming session starts (stopPreview followed by
1443 // startPreview)
1444 //
Shuzhen Wang34a5e282022-06-17 14:48:35 -07001445 nsecs_t captureInterval = t - mLastCaptureTime;
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001446 if (captureInterval > kSpacingResetIntervalNs) {
Rachel Lee86d90eb2023-04-25 14:37:34 -07001447 for (size_t i = 0; i < vsyncEventData.frameTimelinesLength; i++) {
Shuzhen Wang34a5e282022-06-17 14:48:35 -07001448 const auto& timeline = vsyncEventData.frameTimelines[i];
1449 if (timeline.deadlineTimestamp >= currentTime &&
1450 timeline.expectedPresentationTime > minPresentT) {
1451 nsecs_t presentT = vsyncEventData.frameTimelines[i].expectedPresentationTime;
1452 mCaptureToPresentOffset = presentT - t;
1453 mLastCaptureTime = t;
1454 mLastPresentTime = presentT;
1455
Shuzhen Wang0897d592023-04-07 12:48:05 -07001456 // If releaseFence is available, store the fence to check signal
1457 // time later.
1458 mRefVsyncData = vsyncEventData;
1459 mReferenceCaptureTime = t;
1460 mReferenceArrivalTime = currentTime;
1461 if (releaseFence != -1) {
1462 mReferenceFrameFence = new Fence(releaseFence);
1463 } else {
1464 mFenceSignalOffset = 0;
1465 }
1466
Shuzhen Wang34a5e282022-06-17 14:48:35 -07001467 // Move the expected presentation time back by 1/3 of frame interval to
1468 // mitigate the time drift. Due to time drift, if we directly use the
1469 // expected presentation time, often times 2 expected presentation time
1470 // falls into the same VSYNC interval.
1471 return presentT - vsyncEventData.frameInterval/3;
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001472 }
1473 }
1474 }
1475
Shuzhen Wang0897d592023-04-07 12:48:05 -07001476 // If there is a reference frame release fence, get the signal time and
1477 // update the captureToPresentOffset.
1478 if (mReferenceFrameFence != nullptr) {
1479 mFenceSignalOffset = 0;
1480 nsecs_t signalTime = mReferenceFrameFence->getSignalTime();
1481 // Now that the fence has signaled, recalculate the offsets based on
1482 // the timeline which was actually latched
1483 if (signalTime != INT64_MAX) {
1484 for (size_t i = 0; i < mRefVsyncData.frameTimelinesLength; i++) {
1485 const auto& timeline = mRefVsyncData.frameTimelines[i];
1486 if (timeline.deadlineTimestamp >= signalTime) {
1487 nsecs_t originalOffset = mCaptureToPresentOffset;
1488 mCaptureToPresentOffset = timeline.expectedPresentationTime
1489 - mReferenceCaptureTime;
1490 mLastPresentTime = timeline.expectedPresentationTime;
1491 mFenceSignalOffset = signalTime > mReferenceArrivalTime ?
1492 signalTime - mReferenceArrivalTime : 0;
1493
1494 ALOGV("%s: Last deadline %" PRId64 " signalTime %" PRId64
1495 " original offset %" PRId64 " new offset %" PRId64
1496 " fencesignal offset %" PRId64, __FUNCTION__,
1497 timeline.deadlineTimestamp, signalTime, originalOffset,
1498 mCaptureToPresentOffset, mFenceSignalOffset);
1499 break;
1500 }
1501 }
1502 mReferenceFrameFence.clear();
1503 }
1504 }
1505
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001506 nsecs_t idealPresentT = t + mCaptureToPresentOffset;
Shuzhen Wang14c62b82022-04-11 09:37:05 -07001507 nsecs_t expectedPresentT = mLastPresentTime;
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001508 nsecs_t minDiff = INT64_MAX;
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001509
1510 // In fixed FPS case, when frame durations are close to multiples of display refresh
1511 // rate, derive minimum intervals between presentation times based on minimal
Shuzhen Wang661b34f2022-05-18 22:00:19 -07001512 // expected duration. The minimum number of Vsyncs is:
1513 // - 0 if minFrameDuration in (0, 1.5] * vSyncInterval,
1514 // - 1 if minFrameDuration in (1.5, 2.5] * vSyncInterval,
1515 // - and so on.
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001516 //
1517 // This spaces out the displaying of the frames so that the frame
1518 // presentations are roughly in sync with frame captures.
Shuzhen Wang661b34f2022-05-18 22:00:19 -07001519 int minVsyncs = (mMinExpectedDuration - vsyncEventData.frameInterval / 2) /
1520 vsyncEventData.frameInterval;
1521 if (minVsyncs < 0) minVsyncs = 0;
Shuzhen Wanged08fbe2022-06-21 01:00:50 -07001522 nsecs_t minInterval = minVsyncs * vsyncEventData.frameInterval;
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001523
1524 // In fixed FPS case, if the frame duration deviates from multiples of
1525 // display refresh rate, find the closest Vsync without requiring a minimum
1526 // number of Vsync.
1527 //
1528 // Example: (24fps camera, 60hz refresh):
1529 // capture readout: | t1 | t1 | .. | t1 | .. | t1 | .. | t1 |
1530 // display VSYNC: | t2 | t2 | ... | t2 | ... | t2 | ... | t2 |
1531 // | : 1 frame
1532 // t1 : 41.67ms
1533 // t2 : 16.67ms
1534 // t1/t2 = 2.5
1535 //
1536 // 24fps is a commonly used video frame rate. Because the capture
1537 // interval is 2.5 times of display refresh interval, the minVsyncs
1538 // calculation will directly fall at the boundary condition. In this case,
1539 // we should fall back to the basic logic of finding closest vsync
1540 // timestamp without worrying about minVsyncs.
1541 float captureToVsyncIntervalRatio = 1.0f * mMinExpectedDuration / vsyncEventData.frameInterval;
1542 float ratioDeviation = std::fabs(
1543 captureToVsyncIntervalRatio - std::roundf(captureToVsyncIntervalRatio));
1544 bool captureDeviateFromVsync = ratioDeviation >= kMaxIntervalRatioDeviation;
1545 bool cameraDisplayInSync = (mFixedFps && !captureDeviateFromVsync);
1546
Shuzhen Wanged08fbe2022-06-21 01:00:50 -07001547 // Find best timestamp in the vsync timelines:
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001548 // - Only use at most kMaxTimelines timelines to avoid long latency
Shuzhen Wang0897d592023-04-07 12:48:05 -07001549 // - Add an extra timeline if display fence is used
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001550 // - closest to the ideal presentation time,
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001551 // - deadline timestamp is greater than the current time, and
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001552 // - For fixed FPS, if the capture interval doesn't deviate too much from refresh interval,
1553 // the candidate presentation time is at least minInterval in the future compared to last
1554 // presentation time.
1555 // - For variable FPS, or if the capture interval deviates from refresh
1556 // interval for more than 5%, find a presentation time closest to the
1557 // (lastPresentationTime + captureToPresentOffset) instead.
Shuzhen Wang0897d592023-04-07 12:48:05 -07001558 int fenceAdjustment = (mFenceSignalOffset > 0) ? 1 : 0;
1559 int maxTimelines = std::min(kMaxTimelines + fenceAdjustment,
1560 (int)vsyncEventData.frameTimelinesLength);
Shuzhen Wanged08fbe2022-06-21 01:00:50 -07001561 float biasForShortDelay = 1.0f;
1562 for (int i = 0; i < maxTimelines; i ++) {
1563 const auto& vsyncTime = vsyncEventData.frameTimelines[i];
1564 if (minVsyncs > 0) {
1565 // Bias towards using smaller timeline index:
1566 // i = 0: bias = 1
1567 // i = maxTimelines-1: bias = -1
1568 biasForShortDelay = 1.0 - 2.0 * i / (maxTimelines - 1);
1569 }
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001570 if (std::abs(vsyncTime.expectedPresentationTime - idealPresentT) < minDiff &&
Shuzhen Wang0897d592023-04-07 12:48:05 -07001571 vsyncTime.deadlineTimestamp >= currentTime + mFenceSignalOffset &&
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001572 ((!cameraDisplayInSync && vsyncTime.expectedPresentationTime > minPresentT) ||
1573 (cameraDisplayInSync && vsyncTime.expectedPresentationTime >
Kwangkyu Park1c0042b2022-12-20 00:03:17 +09001574 mLastPresentTime + minInterval +
1575 static_cast<nsecs_t>(biasForShortDelay * kTimelineThresholdNs)))) {
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001576 expectedPresentT = vsyncTime.expectedPresentationTime;
1577 minDiff = std::abs(vsyncTime.expectedPresentationTime - idealPresentT);
1578 }
1579 }
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001580
Shuzhen Wang35bd3552022-09-21 16:56:04 -07001581 if (expectedPresentT == mLastPresentTime && expectedPresentT <
1582 vsyncEventData.frameTimelines[maxTimelines-1].expectedPresentationTime) {
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001583 // Couldn't find a reasonable presentation time. Using last frame's
1584 // presentation time would cause a frame drop. The best option now
1585 // is to use the next VSync as long as the last presentation time
1586 // doesn't already has the maximum latency, in which case dropping the
1587 // buffer is more desired than increasing latency.
1588 //
1589 // Example: (60fps camera, 59.9hz refresh):
1590 // capture readout: | t1 | t1 | .. | t1 | .. | t1 | .. | t1 |
1591 // \ \ \ \ \ \ \ \ \
1592 // queue to BQ: | | | | | | | | |
1593 // \ \ \ \ \ \ \ \ \
1594 // display VSYNC: | t2 | t2 | ... | t2 | ... | t2 | ... | t2 |
1595 //
1596 // |: 1 frame
1597 // t1 : 16.67ms
1598 // t2 : 16.69ms
1599 //
1600 // It takes 833 frames for capture readout count and display VSYNC count to be off
1601 // by 1.
1602 // - At frames [0, 832], presentationTime is set to timeline[0]
1603 // - At frames [833, 833*2-1], presentationTime is set to timeline[1]
1604 // - At frames [833*2, 833*3-1] presentationTime is set to timeline[2]
1605 // - At frame 833*3, no presentation time is found because we only
1606 // search for timeline[0..2].
1607 // - Drop one buffer is better than further extend the presentation
1608 // time.
1609 //
1610 // However, if frame 833*2 arrives 16.67ms early (right after frame
1611 // 833*2-1), no presentation time can be found because
1612 // getLatestVsyncEventData is called early. In that case, it's better to
1613 // set presentation time by offseting last presentation time.
1614 expectedPresentT += vsyncEventData.frameInterval;
1615 }
1616
Shuzhen Wang14c62b82022-04-11 09:37:05 -07001617 mLastCaptureTime = t;
1618 mLastPresentTime = expectedPresentT;
1619
1620 // Move the expected presentation time back by 1/3 of frame interval to
1621 // mitigate the time drift. Due to time drift, if we directly use the
1622 // expected presentation time, often times 2 expected presentation time
1623 // falls into the same VSYNC interval.
1624 return expectedPresentT - vsyncEventData.frameInterval/3;
Shuzhen Wange4adddb2021-09-21 15:24:44 -07001625}
1626
Shuzhen Wangba92d772022-04-11 11:47:24 -07001627bool Camera3OutputStream::shouldLogError(status_t res) {
1628 Mutex::Autolock l(mLock);
1629 return shouldLogError(res, mState);
1630}
1631
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -08001632}; // namespace camera3
1633
1634}; // namespace android