blob: 58db57a6ade34d2fa9ab1529757a9dff25ad1821 [file] [log] [blame]
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -08001/*
Shuzhen Wangc28189a2017-11-27 23:05:10 -08002 * Copyright (C) 2013-2018 The Android Open Source Project
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -08003 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "Camera3-OutputStream"
18#define ATRACE_TAG ATRACE_TAG_CAMERA
19//#define LOG_NDEBUG 0
20
Shuzhen Wang34a5e282022-06-17 14:48:35 -070021#include <algorithm>
Shuzhen Wangabbcb6b2020-12-09 22:32:44 -080022#include <ctime>
23#include <fstream>
24
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +000025#include <aidl/android/hardware/camera/device/CameraBlob.h>
26#include <aidl/android/hardware/camera/device/CameraBlobId.h>
27
Shuzhen Wangabbcb6b2020-12-09 22:32:44 -080028#include <android-base/unique_fd.h>
Shuzhen Wange4adddb2021-09-21 15:24:44 -070029#include <cutils/properties.h>
Shuzhen Wangabbcb6b2020-12-09 22:32:44 -080030#include <ui/GraphicBuffer.h>
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -080031#include <utils/Log.h>
32#include <utils/Trace.h>
Shuzhen Wangabbcb6b2020-12-09 22:32:44 -080033
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +000034#include <common/CameraDeviceBase.h>
Shuzhen Wangabbcb6b2020-12-09 22:32:44 -080035#include "api1/client2/JpegProcessor.h"
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -080036#include "Camera3OutputStream.h"
Jayant Chowdharyd4776262020-06-23 23:45:57 -070037#include "utils/TraceHFR.h"
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -080038
39#ifndef container_of
40#define container_of(ptr, type, member) \
41 (type *)((char*)(ptr) - offsetof(type, member))
42#endif
43
44namespace android {
45
46namespace camera3 {
47
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +000048using aidl::android::hardware::camera::device::CameraBlob;
49using aidl::android::hardware::camera::device::CameraBlobId;
50
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -080051Camera3OutputStream::Camera3OutputStream(int id,
Eino-Ville Talvala727d1722015-06-09 13:44:19 -070052 sp<Surface> consumer,
Eino-Ville Talvala3d82c0d2015-02-23 15:19:19 -080053 uint32_t width, uint32_t height, int format,
Emilian Peevf4816702020-04-03 15:44:51 -070054 android_dataspace dataSpace, camera_stream_rotation_t rotation,
Shuzhen Wangc28189a2017-11-27 23:05:10 -080055 nsecs_t timestampOffset, const String8& physicalCameraId,
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +000056 const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
Emilian Peevc81a7592022-02-14 17:38:18 -080057 int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
Shuzhen Wang8ed1e872022-03-08 16:34:33 -080058 int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
Shuzhen Wangbce53db2022-12-03 00:38:20 +000059 int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) :
Emilian Peevf4816702020-04-03 15:44:51 -070060 Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
Shuzhen Wangc28189a2017-11-27 23:05:10 -080061 /*maxSize*/0, format, dataSpace, rotation,
Emilian Peev2295df72021-11-12 18:14:10 -080062 physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
Shuzhen Wange4208922022-02-01 16:52:48 -080063 dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
Austin Borger9e2b27c2022-07-15 11:27:24 -070064 timestampBase, colorSpace),
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -080065 mConsumer(consumer),
Ruchit Sharmae0711f22014-08-18 13:48:24 -040066 mTransform(0),
Zhijun He125684a2015-12-26 15:07:30 -080067 mTraceFirstBuffer(true),
Shuzhen Wangc28dccc2016-02-11 23:48:46 -080068 mUseBufferManager(false),
Zhijun He5d677d12016-05-29 16:52:39 -070069 mTimestampOffset(timestampOffset),
Shuzhen Wangbce53db2022-12-03 00:38:20 +000070 mUseReadoutTime(useReadoutTimestamp),
Shuzhen Wang686f6442017-06-20 16:16:04 -070071 mConsumerUsage(0),
Chien-Yu Chena936ac22017-10-23 15:59:49 -070072 mDropBuffers(false),
Shuzhen Wang610d7b82022-02-08 14:37:22 -080073 mMirrorMode(mirrorMode),
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +000074 mDequeueBufferLatency(kDequeueLatencyBinSize),
75 mIPCTransport(transport) {
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -080076
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -080077 if (mConsumer == NULL) {
78 ALOGE("%s: Consumer is NULL!", __FUNCTION__);
79 mState = STATE_ERROR;
80 }
Zhijun He125684a2015-12-26 15:07:30 -080081
Shuzhen Wang0160ddd2019-08-15 09:11:56 -070082 bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
83 mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -080084}
85
86Camera3OutputStream::Camera3OutputStream(int id,
Eino-Ville Talvala727d1722015-06-09 13:44:19 -070087 sp<Surface> consumer,
Eino-Ville Talvala3d82c0d2015-02-23 15:19:19 -080088 uint32_t width, uint32_t height, size_t maxSize, int format,
Emilian Peevf4816702020-04-03 15:44:51 -070089 android_dataspace dataSpace, camera_stream_rotation_t rotation,
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -080090 nsecs_t timestampOffset, const String8& physicalCameraId,
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +000091 const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
Emilian Peevc81a7592022-02-14 17:38:18 -080092 int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
Shuzhen Wang8ed1e872022-03-08 16:34:33 -080093 int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
Shuzhen Wangbce53db2022-12-03 00:38:20 +000094 int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) :
Emilian Peevf4816702020-04-03 15:44:51 -070095 Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height, maxSize,
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -080096 format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
Shuzhen Wange4208922022-02-01 16:52:48 -080097 setId, isMultiResolution, dynamicRangeProfile, streamUseCase,
Austin Borger9e2b27c2022-07-15 11:27:24 -070098 deviceTimeBaseIsRealtime, timestampBase, colorSpace),
Igor Murashkina55b5452013-04-02 16:36:33 -070099 mConsumer(consumer),
Ruchit Sharmae0711f22014-08-18 13:48:24 -0400100 mTransform(0),
Zhijun He125684a2015-12-26 15:07:30 -0800101 mTraceFirstBuffer(true),
Shuzhen Wangc28dccc2016-02-11 23:48:46 -0800102 mUseBufferManager(false),
Zhijun He5d677d12016-05-29 16:52:39 -0700103 mTimestampOffset(timestampOffset),
Shuzhen Wangbce53db2022-12-03 00:38:20 +0000104 mUseReadoutTime(useReadoutTimestamp),
Shuzhen Wang686f6442017-06-20 16:16:04 -0700105 mConsumerUsage(0),
Chien-Yu Chena936ac22017-10-23 15:59:49 -0700106 mDropBuffers(false),
Shuzhen Wang610d7b82022-02-08 14:37:22 -0800107 mMirrorMode(mirrorMode),
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000108 mDequeueBufferLatency(kDequeueLatencyBinSize),
109 mIPCTransport(transport) {
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800110
Yin-Chia Yehe9154ce2015-12-07 14:38:04 -0800111 if (format != HAL_PIXEL_FORMAT_BLOB && format != HAL_PIXEL_FORMAT_RAW_OPAQUE) {
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800112 ALOGE("%s: Bad format for size-only stream: %d", __FUNCTION__,
113 format);
114 mState = STATE_ERROR;
115 }
116
117 if (mConsumer == NULL) {
118 ALOGE("%s: Consumer is NULL!", __FUNCTION__);
119 mState = STATE_ERROR;
120 }
Zhijun He125684a2015-12-26 15:07:30 -0800121
Shuzhen Wang0160ddd2019-08-15 09:11:56 -0700122 bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
123 mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800124}
125
Zhijun He5d677d12016-05-29 16:52:39 -0700126Camera3OutputStream::Camera3OutputStream(int id,
127 uint32_t width, uint32_t height, int format,
Emilian Peev050f5dc2017-05-18 14:43:56 +0100128 uint64_t consumerUsage, android_dataspace dataSpace,
Emilian Peevf4816702020-04-03 15:44:51 -0700129 camera_stream_rotation_t rotation, nsecs_t timestampOffset,
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800130 const String8& physicalCameraId,
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000131 const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
Emilian Peevc81a7592022-02-14 17:38:18 -0800132 int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
Shuzhen Wang8ed1e872022-03-08 16:34:33 -0800133 int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
Shuzhen Wangbce53db2022-12-03 00:38:20 +0000134 int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) :
Emilian Peevf4816702020-04-03 15:44:51 -0700135 Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
Shuzhen Wangc28189a2017-11-27 23:05:10 -0800136 /*maxSize*/0, format, dataSpace, rotation,
Emilian Peev2295df72021-11-12 18:14:10 -0800137 physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
Shuzhen Wange4208922022-02-01 16:52:48 -0800138 dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
Austin Borger9e2b27c2022-07-15 11:27:24 -0700139 timestampBase, colorSpace),
Zhijun He5d677d12016-05-29 16:52:39 -0700140 mConsumer(nullptr),
141 mTransform(0),
142 mTraceFirstBuffer(true),
143 mUseBufferManager(false),
144 mTimestampOffset(timestampOffset),
Shuzhen Wangbce53db2022-12-03 00:38:20 +0000145 mUseReadoutTime(useReadoutTimestamp),
Shuzhen Wang686f6442017-06-20 16:16:04 -0700146 mConsumerUsage(consumerUsage),
Chien-Yu Chena936ac22017-10-23 15:59:49 -0700147 mDropBuffers(false),
Shuzhen Wang610d7b82022-02-08 14:37:22 -0800148 mMirrorMode(mirrorMode),
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000149 mDequeueBufferLatency(kDequeueLatencyBinSize),
150 mIPCTransport(transport) {
Zhijun He5d677d12016-05-29 16:52:39 -0700151 // Deferred consumer only support preview surface format now.
152 if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
153 ALOGE("%s: Deferred consumer only supports IMPLEMENTATION_DEFINED format now!",
154 __FUNCTION__);
155 mState = STATE_ERROR;
156 }
157
Ivan Lozanoc0ad82f2020-07-30 09:32:57 -0400158 // Validation check for the consumer usage flag.
Zhijun He5d677d12016-05-29 16:52:39 -0700159 if ((consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) == 0 &&
160 (consumerUsage & GraphicBuffer::USAGE_HW_COMPOSER) == 0) {
Emilian Peev050f5dc2017-05-18 14:43:56 +0100161 ALOGE("%s: Deferred consumer usage flag is illegal %" PRIu64 "!",
162 __FUNCTION__, consumerUsage);
Zhijun He5d677d12016-05-29 16:52:39 -0700163 mState = STATE_ERROR;
164 }
165
166 mConsumerName = String8("Deferred");
Shuzhen Wang0160ddd2019-08-15 09:11:56 -0700167 bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
168 mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
Zhijun He5d677d12016-05-29 16:52:39 -0700169}
170
Emilian Peevf4816702020-04-03 15:44:51 -0700171Camera3OutputStream::Camera3OutputStream(int id, camera_stream_type_t type,
Igor Murashkine3a9f962013-05-08 18:03:15 -0700172 uint32_t width, uint32_t height,
Eino-Ville Talvala3d82c0d2015-02-23 15:19:19 -0800173 int format,
Yin-Chia Yehb97babb2015-03-12 13:42:44 -0700174 android_dataspace dataSpace,
Emilian Peevf4816702020-04-03 15:44:51 -0700175 camera_stream_rotation_t rotation,
Shuzhen Wangc28189a2017-11-27 23:05:10 -0800176 const String8& physicalCameraId,
Shuzhen Wange4208922022-02-01 16:52:48 -0800177 const std::unordered_set<int32_t> &sensorPixelModesUsed,
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000178 IPCTransport transport,
Emilian Peev050f5dc2017-05-18 14:43:56 +0100179 uint64_t consumerUsage, nsecs_t timestampOffset,
Emilian Peev2295df72021-11-12 18:14:10 -0800180 int setId, bool isMultiResolution,
Shuzhen Wang8ed1e872022-03-08 16:34:33 -0800181 int64_t dynamicRangeProfile, int64_t streamUseCase,
Shuzhen Wang610d7b82022-02-08 14:37:22 -0800182 bool deviceTimeBaseIsRealtime, int timestampBase,
Shuzhen Wangbce53db2022-12-03 00:38:20 +0000183 int mirrorMode, int32_t colorSpace,
184 bool useReadoutTimestamp) :
Igor Murashkine3a9f962013-05-08 18:03:15 -0700185 Camera3IOStreamBase(id, type, width, height,
186 /*maxSize*/0,
Shuzhen Wangc28189a2017-11-27 23:05:10 -0800187 format, dataSpace, rotation,
Emilian Peev2295df72021-11-12 18:14:10 -0800188 physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
Shuzhen Wange4208922022-02-01 16:52:48 -0800189 dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
Austin Borger9e2b27c2022-07-15 11:27:24 -0700190 timestampBase, colorSpace),
Zhijun He125684a2015-12-26 15:07:30 -0800191 mTransform(0),
192 mTraceFirstBuffer(true),
Zhijun He5d677d12016-05-29 16:52:39 -0700193 mUseBufferManager(false),
Shuzhen Wang0129d522016-10-30 22:43:41 -0700194 mTimestampOffset(timestampOffset),
Shuzhen Wangbce53db2022-12-03 00:38:20 +0000195 mUseReadoutTime(useReadoutTimestamp),
Shuzhen Wang686f6442017-06-20 16:16:04 -0700196 mConsumerUsage(consumerUsage),
Chien-Yu Chena936ac22017-10-23 15:59:49 -0700197 mDropBuffers(false),
Shuzhen Wang610d7b82022-02-08 14:37:22 -0800198 mMirrorMode(mirrorMode),
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000199 mDequeueBufferLatency(kDequeueLatencyBinSize),
200 mIPCTransport(transport) {
Zhijun He125684a2015-12-26 15:07:30 -0800201
Shuzhen Wang0160ddd2019-08-15 09:11:56 -0700202 bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
203 mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
Igor Murashkine3a9f962013-05-08 18:03:15 -0700204
205 // Subclasses expected to initialize mConsumer themselves
206}
207
208
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800209Camera3OutputStream::~Camera3OutputStream() {
210 disconnectLocked();
211}
212
Emilian Peevf4816702020-04-03 15:44:51 -0700213status_t Camera3OutputStream::getBufferLocked(camera_stream_buffer *buffer,
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800214 const std::vector<size_t>&) {
Jayant Chowdharyd4776262020-06-23 23:45:57 -0700215 ATRACE_HFR_CALL();
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800216
217 ANativeWindowBuffer* anb;
Zhijun He125684a2015-12-26 15:07:30 -0800218 int fenceFd = -1;
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -0700219
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800220 status_t res;
221 res = getBufferLockedCommon(&anb, &fenceFd);
222 if (res != OK) {
223 return res;
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800224 }
225
Igor Murashkine3a9f962013-05-08 18:03:15 -0700226 /**
227 * FenceFD now owned by HAL except in case of error,
228 * in which case we reassign it to acquire_fence
229 */
230 handoutBufferLocked(*buffer, &(anb->handle), /*acquireFence*/fenceFd,
Emilian Peevf4816702020-04-03 15:44:51 -0700231 /*releaseFence*/-1, CAMERA_BUFFER_STATUS_OK, /*output*/true);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800232
233 return OK;
234}
235
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800236status_t Camera3OutputStream::getBuffersLocked(std::vector<OutstandingBuffer>* outBuffers) {
237 status_t res;
238
239 if ((res = getBufferPreconditionCheckLocked()) != OK) {
240 return res;
241 }
242
243 if (mUseBufferManager) {
244 ALOGE("%s: stream %d is managed by buffer manager and does not support batch operation",
245 __FUNCTION__, mId);
246 return INVALID_OPERATION;
247 }
248
249 sp<Surface> consumer = mConsumer;
250 /**
251 * Release the lock briefly to avoid deadlock for below scenario:
252 * Thread 1: StreamingProcessor::startStream -> Camera3Stream::isConfiguring().
253 * This thread acquired StreamingProcessor lock and try to lock Camera3Stream lock.
254 * Thread 2: Camera3Stream::returnBuffer->StreamingProcessor::onFrameAvailable().
255 * This thread acquired Camera3Stream lock and bufferQueue lock, and try to lock
256 * StreamingProcessor lock.
257 * Thread 3: Camera3Stream::getBuffer(). This thread acquired Camera3Stream lock
258 * and try to lock bufferQueue lock.
259 * Then there is circular locking dependency.
260 */
261 mLock.unlock();
262
263 size_t numBuffersRequested = outBuffers->size();
264 std::vector<Surface::BatchBuffer> buffers(numBuffersRequested);
265
266 nsecs_t dequeueStart = systemTime(SYSTEM_TIME_MONOTONIC);
267 res = consumer->dequeueBuffers(&buffers);
268 nsecs_t dequeueEnd = systemTime(SYSTEM_TIME_MONOTONIC);
269 mDequeueBufferLatency.add(dequeueStart, dequeueEnd);
270
271 mLock.lock();
272
273 if (res != OK) {
274 if (shouldLogError(res, mState)) {
275 ALOGE("%s: Stream %d: Can't dequeue %zu output buffers: %s (%d)",
276 __FUNCTION__, mId, numBuffersRequested, strerror(-res), res);
277 }
278 checkRetAndSetAbandonedLocked(res);
279 return res;
280 }
281 checkRemovedBuffersLocked();
282
283 /**
284 * FenceFD now owned by HAL except in case of error,
285 * in which case we reassign it to acquire_fence
286 */
287 for (size_t i = 0; i < numBuffersRequested; i++) {
288 handoutBufferLocked(*(outBuffers->at(i).outBuffer),
289 &(buffers[i].buffer->handle), /*acquireFence*/buffers[i].fenceFd,
290 /*releaseFence*/-1, CAMERA_BUFFER_STATUS_OK, /*output*/true);
291 }
292 return OK;
293}
294
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800295status_t Camera3OutputStream::queueBufferToConsumer(sp<ANativeWindow>& consumer,
Yin-Chia Yeh58b1b4e2018-10-15 12:18:36 -0700296 ANativeWindowBuffer* buffer, int anwReleaseFence,
297 const std::vector<size_t>&) {
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800298 return consumer->queueBuffer(consumer.get(), buffer, anwReleaseFence);
299}
300
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800301status_t Camera3OutputStream::returnBufferLocked(
Emilian Peevf4816702020-04-03 15:44:51 -0700302 const camera_stream_buffer &buffer,
Shuzhen Wang90708ea2021-11-04 11:40:49 -0700303 nsecs_t timestamp, nsecs_t readoutTimestamp,
304 int32_t transform, const std::vector<size_t>& surface_ids) {
Jayant Chowdharyd4776262020-06-23 23:45:57 -0700305 ATRACE_HFR_CALL();
Igor Murashkine3a9f962013-05-08 18:03:15 -0700306
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800307 if (mHandoutTotalBufferCount == 1) {
308 returnPrefetchedBuffersLocked();
309 }
310
Shuzhen Wang90708ea2021-11-04 11:40:49 -0700311 status_t res = returnAnyBufferLocked(buffer, timestamp, readoutTimestamp,
312 /*output*/true, transform, surface_ids);
Igor Murashkine3a9f962013-05-08 18:03:15 -0700313
314 if (res != OK) {
315 return res;
316 }
317
318 mLastTimestamp = timestamp;
Eino-Ville Talvalac31dc7e2017-01-31 17:35:41 -0800319 mFrameCount++;
Igor Murashkine3a9f962013-05-08 18:03:15 -0700320
321 return OK;
322}
323
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000324status_t Camera3OutputStream::fixUpHidlJpegBlobHeader(ANativeWindowBuffer* anwBuffer, int fence) {
325 // Lock the JPEG buffer for CPU read
326 sp<GraphicBuffer> graphicBuffer = GraphicBuffer::from(anwBuffer);
327 void* mapped = nullptr;
328 base::unique_fd fenceFd(dup(fence));
329 // Use USAGE_SW_WRITE_RARELY since we're going to re-write the CameraBlob
330 // header.
331 GraphicBufferLocker gbLocker(graphicBuffer);
332 status_t res =
333 gbLocker.lockAsync(
334 GraphicBuffer::USAGE_SW_READ_OFTEN | GraphicBuffer::USAGE_SW_WRITE_RARELY,
Emilian Peev293bd972022-08-05 17:28:06 -0700335 &mapped, fenceFd.release());
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000336 if (res != OK) {
337 ALOGE("%s: Failed to lock the buffer: %s (%d)", __FUNCTION__, strerror(-res), res);
338 return res;
339 }
340
341 uint8_t *hidlHeaderStart =
342 static_cast<uint8_t*>(mapped) + graphicBuffer->getWidth() - sizeof(camera_jpeg_blob_t);
343 // Check that the jpeg buffer is big enough to contain HIDL camera blob
344 if (hidlHeaderStart < static_cast<uint8_t *>(mapped)) {
345 ALOGE("%s, jpeg buffer not large enough to fit HIDL camera blob %" PRIu32, __FUNCTION__,
346 graphicBuffer->getWidth());
347 return BAD_VALUE;
348 }
349 camera_jpeg_blob_t *hidlBlobHeader = reinterpret_cast<camera_jpeg_blob_t *>(hidlHeaderStart);
350
351 // Check that the blob is indeed the jpeg blob id.
352 if (hidlBlobHeader->jpeg_blob_id != CAMERA_JPEG_BLOB_ID) {
353 ALOGE("%s, jpeg blob id %d is not correct", __FUNCTION__, hidlBlobHeader->jpeg_blob_id);
354 return BAD_VALUE;
355 }
356
357 // Retrieve id and blob size
358 CameraBlobId blobId = static_cast<CameraBlobId>(hidlBlobHeader->jpeg_blob_id);
359 uint32_t blobSizeBytes = hidlBlobHeader->jpeg_size;
360
361 if (blobSizeBytes > (graphicBuffer->getWidth() - sizeof(camera_jpeg_blob_t))) {
362 ALOGE("%s, blobSize in HIDL jpeg blob : %d is corrupt, buffer size %" PRIu32, __FUNCTION__,
363 blobSizeBytes, graphicBuffer->getWidth());
364 }
365
366 uint8_t *aidlHeaderStart =
367 static_cast<uint8_t*>(mapped) + graphicBuffer->getWidth() - sizeof(CameraBlob);
368
369 // Check that the jpeg buffer is big enough to contain AIDL camera blob
370 if (aidlHeaderStart < static_cast<uint8_t *>(mapped)) {
371 ALOGE("%s, jpeg buffer not large enough to fit AIDL camera blob %" PRIu32, __FUNCTION__,
372 graphicBuffer->getWidth());
373 return BAD_VALUE;
374 }
375
376 if (static_cast<uint8_t*>(mapped) + blobSizeBytes > aidlHeaderStart) {
377 ALOGE("%s, jpeg blob with size %d , buffer size %" PRIu32 " not large enough to fit"
378 " AIDL camera blob without corrupting jpeg", __FUNCTION__, blobSizeBytes,
379 graphicBuffer->getWidth());
380 return BAD_VALUE;
381 }
382
383 // Fill in JPEG header
Avichal Rakesh51af0702022-05-26 22:58:03 +0000384 CameraBlob aidlHeader = {
385 .blobId = blobId,
386 .blobSizeBytes = static_cast<int32_t>(blobSizeBytes)
387 };
388 memcpy(aidlHeaderStart, &aidlHeader, sizeof(CameraBlob));
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000389 graphicBuffer->unlock();
390 return OK;
391}
392
Igor Murashkine3a9f962013-05-08 18:03:15 -0700393status_t Camera3OutputStream::returnBufferCheckedLocked(
Emilian Peevf4816702020-04-03 15:44:51 -0700394 const camera_stream_buffer &buffer,
Igor Murashkine3a9f962013-05-08 18:03:15 -0700395 nsecs_t timestamp,
Shuzhen Wang90708ea2021-11-04 11:40:49 -0700396 nsecs_t readoutTimestamp,
Jing Mikec7f9b132023-03-12 11:12:04 +0800397 [[maybe_unused]] bool output,
Emilian Peev5104fe92021-10-21 14:27:09 -0700398 int32_t transform,
Yin-Chia Yeh58b1b4e2018-10-15 12:18:36 -0700399 const std::vector<size_t>& surface_ids,
Igor Murashkine3a9f962013-05-08 18:03:15 -0700400 /*out*/
401 sp<Fence> *releaseFenceOut) {
402
Igor Murashkine3a9f962013-05-08 18:03:15 -0700403 ALOG_ASSERT(output, "Expected output to be true");
404
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800405 status_t res;
Igor Murashkin5a1798a2013-05-07 10:58:13 -0700406
Yin-Chia Yeh4c9736f2015-03-05 15:01:36 -0800407 // Fence management - always honor release fence from HAL
408 sp<Fence> releaseFence = new Fence(buffer.release_fence);
Igor Murashkin5a1798a2013-05-07 10:58:13 -0700409 int anwReleaseFence = releaseFence->dup();
410
411 /**
Zhijun He124ccf42013-05-22 14:01:30 -0700412 * Release the lock briefly to avoid deadlock with
413 * StreamingProcessor::startStream -> Camera3Stream::isConfiguring (this
414 * thread will go into StreamingProcessor::onFrameAvailable) during
415 * queueBuffer
416 */
417 sp<ANativeWindow> currentConsumer = mConsumer;
Yin-Chia Yeha1b56c82019-03-27 15:50:39 -0700418 StreamState state = mState;
Zhijun He124ccf42013-05-22 14:01:30 -0700419 mLock.unlock();
420
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800421 ANativeWindowBuffer *anwBuffer = container_of(buffer.buffer, ANativeWindowBuffer, handle);
Shuzhen Wangc2352702022-09-06 18:36:31 -0700422 bool bufferDeferred = false;
Zhijun He124ccf42013-05-22 14:01:30 -0700423 /**
Igor Murashkin5a1798a2013-05-07 10:58:13 -0700424 * Return buffer back to ANativeWindow
425 */
Emilian Peevf4816702020-04-03 15:44:51 -0700426 if (buffer.status == CAMERA_BUFFER_STATUS_ERROR || mDropBuffers || timestamp == 0) {
Igor Murashkin5a1798a2013-05-07 10:58:13 -0700427 // Cancel buffer
Chien-Yu Chena936ac22017-10-23 15:59:49 -0700428 if (mDropBuffers) {
429 ALOGV("%s: Dropping a frame for stream %d.", __FUNCTION__, mId);
Emilian Peevf4816702020-04-03 15:44:51 -0700430 } else if (buffer.status == CAMERA_BUFFER_STATUS_ERROR) {
Yin-Chia Yeha1b56c82019-03-27 15:50:39 -0700431 ALOGV("%s: A frame is dropped for stream %d due to buffer error.", __FUNCTION__, mId);
Shuzhen Wangf0c4a6b2018-09-05 09:36:14 -0700432 } else {
433 ALOGE("%s: Stream %d: timestamp shouldn't be 0", __FUNCTION__, mId);
Chien-Yu Chena936ac22017-10-23 15:59:49 -0700434 }
435
Zhijun He124ccf42013-05-22 14:01:30 -0700436 res = currentConsumer->cancelBuffer(currentConsumer.get(),
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800437 anwBuffer,
Igor Murashkin5a1798a2013-05-07 10:58:13 -0700438 anwReleaseFence);
Yin-Chia Yeha1b56c82019-03-27 15:50:39 -0700439 if (shouldLogError(res, state)) {
Igor Murashkin5a1798a2013-05-07 10:58:13 -0700440 ALOGE("%s: Stream %d: Error cancelling buffer to native window:"
Igor Murashkine3a9f962013-05-08 18:03:15 -0700441 " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
Igor Murashkin5a1798a2013-05-07 10:58:13 -0700442 }
Zhijun He1ff811b2016-01-26 14:39:51 -0800443
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800444 notifyBufferReleased(anwBuffer);
Zhijun He1ff811b2016-01-26 14:39:51 -0800445 if (mUseBufferManager) {
446 // Return this buffer back to buffer manager.
Shuzhen Wang0160ddd2019-08-15 09:11:56 -0700447 mBufferProducerListener->onBufferReleased();
Zhijun He1ff811b2016-01-26 14:39:51 -0800448 }
Igor Murashkin5a1798a2013-05-07 10:58:13 -0700449 } else {
Emilian Peevf4816702020-04-03 15:44:51 -0700450 if (mTraceFirstBuffer && (stream_type == CAMERA_STREAM_OUTPUT)) {
Ruchit Sharmae0711f22014-08-18 13:48:24 -0400451 {
452 char traceLog[48];
453 snprintf(traceLog, sizeof(traceLog), "Stream %d: first full buffer\n", mId);
454 ATRACE_NAME(traceLog);
455 }
456 mTraceFirstBuffer = false;
457 }
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000458 // Fix CameraBlob id type discrepancy between HIDL and AIDL, details : http://b/229688810
459 if (getFormat() == HAL_PIXEL_FORMAT_BLOB && getDataSpace() == HAL_DATASPACE_V0_JFIF) {
460 if (mIPCTransport == IPCTransport::HIDL) {
461 fixUpHidlJpegBlobHeader(anwBuffer, anwReleaseFence);
462 }
463 // If this is a JPEG output, and image dump mask is set, save image to
464 // disk.
465 if (mImageDumpMask) {
466 dumpImageToDisk(timestamp, anwBuffer, anwReleaseFence);
467 }
Shuzhen Wangabbcb6b2020-12-09 22:32:44 -0800468 }
Yin-Chia Yeh4c9736f2015-03-05 15:01:36 -0800469
Shuzhen Wangbce53db2022-12-03 00:38:20 +0000470 nsecs_t captureTime = ((mUseReadoutTime || mSyncToDisplay) && readoutTimestamp != 0 ?
Shuzhen Wangffc4c012022-04-20 15:55:46 -0700471 readoutTimestamp : timestamp) - mTimestampOffset;
Shuzhen Wangba92d772022-04-11 11:47:24 -0700472 if (mPreviewFrameSpacer != nullptr) {
Shuzhen Wangfe8a2a32022-05-10 18:18:54 -0700473 nsecs_t readoutTime = (readoutTimestamp != 0 ? readoutTimestamp : timestamp)
474 - mTimestampOffset;
475 res = mPreviewFrameSpacer->queuePreviewBuffer(captureTime, readoutTime,
476 transform, anwBuffer, anwReleaseFence);
Shuzhen Wangba92d772022-04-11 11:47:24 -0700477 if (res != OK) {
478 ALOGE("%s: Stream %d: Error queuing buffer to preview buffer spacer: %s (%d)",
479 __FUNCTION__, mId, strerror(-res), res);
480 return res;
481 }
Shuzhen Wangc2352702022-09-06 18:36:31 -0700482 bufferDeferred = true;
Shuzhen Wangba92d772022-04-11 11:47:24 -0700483 } else {
Shuzhen Wangba92d772022-04-11 11:47:24 -0700484 nsecs_t presentTime = mSyncToDisplay ?
485 syncTimestampToDisplayLocked(captureTime) : captureTime;
Emilian Peev2295df72021-11-12 18:14:10 -0800486
Shuzhen Wangba92d772022-04-11 11:47:24 -0700487 setTransform(transform, true/*mayChangeMirror*/);
488 res = native_window_set_buffers_timestamp(mConsumer.get(), presentTime);
489 if (res != OK) {
490 ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
491 __FUNCTION__, mId, strerror(-res), res);
492 return res;
493 }
Emilian Peev2295df72021-11-12 18:14:10 -0800494
Shuzhen Wangba92d772022-04-11 11:47:24 -0700495 queueHDRMetadata(anwBuffer->handle, currentConsumer, dynamic_range_profile);
Shuzhen Wang00abbeb2022-02-25 17:14:42 -0800496
Shuzhen Wangba92d772022-04-11 11:47:24 -0700497 res = queueBufferToConsumer(currentConsumer, anwBuffer, anwReleaseFence, surface_ids);
498 if (shouldLogError(res, state)) {
499 ALOGE("%s: Stream %d: Error queueing buffer to native window:"
500 " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
501 }
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800502 }
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800503 }
Zhijun He124ccf42013-05-22 14:01:30 -0700504 mLock.lock();
Eino-Ville Talvala4d44cad2015-04-11 13:15:45 -0700505
Shuzhen Wangc2352702022-09-06 18:36:31 -0700506 if (bufferDeferred) {
507 mCachedOutputBufferCount++;
508 }
509
Eino-Ville Talvala4d44cad2015-04-11 13:15:45 -0700510 // Once a valid buffer has been returned to the queue, can no longer
511 // dequeue all buffers for preallocation.
Emilian Peevf4816702020-04-03 15:44:51 -0700512 if (buffer.status != CAMERA_BUFFER_STATUS_ERROR) {
Eino-Ville Talvala4d44cad2015-04-11 13:15:45 -0700513 mStreamUnpreparable = true;
514 }
515
Igor Murashkine3a9f962013-05-08 18:03:15 -0700516 *releaseFenceOut = releaseFence;
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800517
Eino-Ville Talvalaf1e98d82013-09-06 09:32:43 -0700518 return res;
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800519}
520
Jing Mikec7f9b132023-03-12 11:12:04 +0800521void Camera3OutputStream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const {
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800522 String8 lines;
523 lines.appendFormat(" Stream[%d]: Output\n", mId);
Eino-Ville Talvala727d1722015-06-09 13:44:19 -0700524 lines.appendFormat(" Consumer name: %s\n", mConsumerName.string());
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800525 write(fd, lines.string(), lines.size());
Igor Murashkine3a9f962013-05-08 18:03:15 -0700526
527 Camera3IOStreamBase::dump(fd, args);
Shuzhen Wang686f6442017-06-20 16:16:04 -0700528
529 mDequeueBufferLatency.dump(fd,
530 " DequeueBuffer latency histogram:");
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800531}
532
Shuzhen Wang610d7b82022-02-08 14:37:22 -0800533status_t Camera3OutputStream::setTransform(int transform, bool mayChangeMirror) {
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800534 ATRACE_CALL();
535 Mutex::Autolock l(mLock);
Shuzhen Wang610d7b82022-02-08 14:37:22 -0800536 if (mMirrorMode != OutputConfiguration::MIRROR_MODE_AUTO && mayChangeMirror) {
537 // If the mirroring mode is not AUTO, do not allow transform update
538 // which may change mirror.
539 return OK;
540 }
541
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800542 return setTransformLocked(transform);
543}
544
545status_t Camera3OutputStream::setTransformLocked(int transform) {
546 status_t res = OK;
Shuzhen Wange4adddb2021-09-21 15:24:44 -0700547
548 if (transform == -1) return res;
549
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800550 if (mState == STATE_ERROR) {
551 ALOGE("%s: Stream in error state", __FUNCTION__);
552 return INVALID_OPERATION;
553 }
554
555 mTransform = transform;
556 if (mState == STATE_CONFIGURED) {
557 res = native_window_set_buffers_transform(mConsumer.get(),
558 transform);
559 if (res != OK) {
560 ALOGE("%s: Unable to configure stream transform to %x: %s (%d)",
561 __FUNCTION__, transform, strerror(-res), res);
562 }
563 }
564 return res;
565}
566
567status_t Camera3OutputStream::configureQueueLocked() {
568 status_t res;
569
Ruchit Sharmae0711f22014-08-18 13:48:24 -0400570 mTraceFirstBuffer = true;
Igor Murashkine3a9f962013-05-08 18:03:15 -0700571 if ((res = Camera3IOStreamBase::configureQueueLocked()) != OK) {
572 return res;
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800573 }
574
Shuzhen Wangba92d772022-04-11 11:47:24 -0700575 if ((res = configureConsumerQueueLocked(true /*allowPreviewRespace*/)) != OK) {
Shuzhen Wang0129d522016-10-30 22:43:41 -0700576 return res;
577 }
578
579 // Set dequeueBuffer/attachBuffer timeout if the consumer is not hw composer or hw texture.
580 // We need skip these cases as timeout will disable the non-blocking (async) mode.
581 if (!(isConsumedByHWComposer() || isConsumedByHWTexture())) {
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -0800582 if (mUseBufferManager) {
583 // When buffer manager is handling the buffer, we should have available buffers in
584 // buffer queue before we calls into dequeueBuffer because buffer manager is tracking
585 // free buffers.
586 // There are however some consumer side feature (ImageReader::discardFreeBuffers) that
587 // can discard free buffers without notifying buffer manager. We want the timeout to
588 // happen immediately here so buffer manager can try to update its internal state and
589 // try to allocate a buffer instead of waiting.
590 mConsumer->setDequeueTimeout(0);
591 } else {
592 mConsumer->setDequeueTimeout(kDequeueBufferTimeout);
593 }
Shuzhen Wang0129d522016-10-30 22:43:41 -0700594 }
595
596 return OK;
597}
598
Shuzhen Wangba92d772022-04-11 11:47:24 -0700599status_t Camera3OutputStream::configureConsumerQueueLocked(bool allowPreviewRespace) {
Shuzhen Wang0129d522016-10-30 22:43:41 -0700600 status_t res;
601
602 mTraceFirstBuffer = true;
603
Igor Murashkine3a9f962013-05-08 18:03:15 -0700604 ALOG_ASSERT(mConsumer != 0, "mConsumer should never be NULL");
605
Zhijun He125684a2015-12-26 15:07:30 -0800606 // Configure consumer-side ANativeWindow interface. The listener may be used
607 // to notify buffer manager (if it is used) of the returned buffers.
Yin-Chia Yeh017d49c2017-03-31 19:11:00 -0700608 res = mConsumer->connect(NATIVE_WINDOW_API_CAMERA,
Shuzhen Wang0160ddd2019-08-15 09:11:56 -0700609 /*reportBufferRemoval*/true,
610 /*listener*/mBufferProducerListener);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800611 if (res != OK) {
612 ALOGE("%s: Unable to connect to native window for stream %d",
613 __FUNCTION__, mId);
614 return res;
615 }
616
Eino-Ville Talvala727d1722015-06-09 13:44:19 -0700617 mConsumerName = mConsumer->getConsumerName();
618
Emilian Peev050f5dc2017-05-18 14:43:56 +0100619 res = native_window_set_usage(mConsumer.get(), mUsage);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800620 if (res != OK) {
Emilian Peev050f5dc2017-05-18 14:43:56 +0100621 ALOGE("%s: Unable to configure usage %" PRIu64 " for stream %d",
622 __FUNCTION__, mUsage, mId);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800623 return res;
624 }
625
626 res = native_window_set_scaling_mode(mConsumer.get(),
627 NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
628 if (res != OK) {
629 ALOGE("%s: Unable to configure stream scaling: %s (%d)",
630 __FUNCTION__, strerror(-res), res);
631 return res;
632 }
633
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800634 if (mMaxSize == 0) {
635 // For buffers of known size
Eino-Ville Talvala7d70c5e2014-07-24 18:10:23 -0700636 res = native_window_set_buffers_dimensions(mConsumer.get(),
Emilian Peevf4816702020-04-03 15:44:51 -0700637 camera_stream::width, camera_stream::height);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800638 } else {
639 // For buffers with bounded size
Eino-Ville Talvala7d70c5e2014-07-24 18:10:23 -0700640 res = native_window_set_buffers_dimensions(mConsumer.get(),
641 mMaxSize, 1);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800642 }
643 if (res != OK) {
Eino-Ville Talvala7d70c5e2014-07-24 18:10:23 -0700644 ALOGE("%s: Unable to configure stream buffer dimensions"
645 " %d x %d (maxSize %zu) for stream %d",
Emilian Peevf4816702020-04-03 15:44:51 -0700646 __FUNCTION__, camera_stream::width, camera_stream::height,
Eino-Ville Talvala7d70c5e2014-07-24 18:10:23 -0700647 mMaxSize, mId);
648 return res;
649 }
650 res = native_window_set_buffers_format(mConsumer.get(),
Emilian Peevf4816702020-04-03 15:44:51 -0700651 camera_stream::format);
Eino-Ville Talvala7d70c5e2014-07-24 18:10:23 -0700652 if (res != OK) {
653 ALOGE("%s: Unable to configure stream buffer format %#x for stream %d",
Emilian Peevf4816702020-04-03 15:44:51 -0700654 __FUNCTION__, camera_stream::format, mId);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800655 return res;
656 }
657
Eino-Ville Talvala3d82c0d2015-02-23 15:19:19 -0800658 res = native_window_set_buffers_data_space(mConsumer.get(),
Emilian Peevf4816702020-04-03 15:44:51 -0700659 camera_stream::data_space);
Eino-Ville Talvala3d82c0d2015-02-23 15:19:19 -0800660 if (res != OK) {
661 ALOGE("%s: Unable to configure stream dataspace %#x for stream %d",
Emilian Peevf4816702020-04-03 15:44:51 -0700662 __FUNCTION__, camera_stream::data_space, mId);
Eino-Ville Talvala3d82c0d2015-02-23 15:19:19 -0800663 return res;
664 }
665
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800666 int maxConsumerBuffers;
Eino-Ville Talvala727d1722015-06-09 13:44:19 -0700667 res = static_cast<ANativeWindow*>(mConsumer.get())->query(
668 mConsumer.get(),
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800669 NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxConsumerBuffers);
670 if (res != OK) {
671 ALOGE("%s: Unable to query consumer undequeued"
672 " buffer count for stream %d", __FUNCTION__, mId);
673 return res;
674 }
675
Alex Ray20cb3002013-05-28 20:18:22 -0700676 ALOGV("%s: Consumer wants %d buffers, HAL wants %d", __FUNCTION__,
Emilian Peevf4816702020-04-03 15:44:51 -0700677 maxConsumerBuffers, camera_stream::max_buffers);
678 if (camera_stream::max_buffers == 0) {
Zhijun He2ab500c2013-07-23 08:02:53 -0700679 ALOGE("%s: Camera HAL requested max_buffer count: %d, requires at least 1",
Emilian Peevf4816702020-04-03 15:44:51 -0700680 __FUNCTION__, camera_stream::max_buffers);
Alex Ray20cb3002013-05-28 20:18:22 -0700681 return INVALID_OPERATION;
682 }
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800683
Emilian Peevf4816702020-04-03 15:44:51 -0700684 mTotalBufferCount = maxConsumerBuffers + camera_stream::max_buffers;
Shuzhen Wange4208922022-02-01 16:52:48 -0800685
686 int timestampBase = getTimestampBase();
687 bool isDefaultTimeBase = (timestampBase ==
688 OutputConfiguration::TIMESTAMP_BASE_DEFAULT);
Shuzhen Wangba92d772022-04-11 11:47:24 -0700689 if (allowPreviewRespace) {
Shuzhen Wange4208922022-02-01 16:52:48 -0800690 bool forceChoreographer = (timestampBase ==
691 OutputConfiguration::TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED);
Shuzhen Wangba92d772022-04-11 11:47:24 -0700692 bool defaultToChoreographer = (isDefaultTimeBase &&
Shuzhen Wangfe8a2a32022-05-10 18:18:54 -0700693 isConsumedByHWComposer());
694 bool defaultToSpacer = (isDefaultTimeBase &&
695 isConsumedByHWTexture() &&
696 !isConsumedByCPU() &&
697 !isVideoStream());
Shuzhen Wange4208922022-02-01 16:52:48 -0800698 if (forceChoreographer || defaultToChoreographer) {
Shuzhen Wang00abbeb2022-02-25 17:14:42 -0800699 mSyncToDisplay = true;
Shuzhen Wangc2352702022-09-06 18:36:31 -0700700 // For choreographer synced stream, extra buffers aren't kept by
701 // camera service. So no need to update mMaxCachedBufferCount.
Shuzhen Wang00abbeb2022-02-25 17:14:42 -0800702 mTotalBufferCount += kDisplaySyncExtraBuffer;
Shuzhen Wangfe8a2a32022-05-10 18:18:54 -0700703 } else if (defaultToSpacer) {
Shuzhen Wangdc9aa822022-05-16 10:04:17 -0700704 mPreviewFrameSpacer = new PreviewFrameSpacer(this, mConsumer);
Shuzhen Wangc2352702022-09-06 18:36:31 -0700705 // For preview frame spacer, the extra buffer is kept by camera
706 // service. So update mMaxCachedBufferCount.
707 mMaxCachedBufferCount = 1;
708 mTotalBufferCount += mMaxCachedBufferCount;
Shuzhen Wangba92d772022-04-11 11:47:24 -0700709 res = mPreviewFrameSpacer->run(String8::format("PreviewSpacer-%d", mId).string());
710 if (res != OK) {
Austin Borger7b129542022-06-09 13:23:06 -0700711 ALOGE("%s: Unable to start preview spacer: %s (%d)", __FUNCTION__,
712 strerror(-res), res);
Shuzhen Wangba92d772022-04-11 11:47:24 -0700713 return res;
714 }
Shuzhen Wange4adddb2021-09-21 15:24:44 -0700715 }
716 }
Zhijun He6adc9cc2014-04-15 14:09:55 -0700717 mHandoutTotalBufferCount = 0;
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800718 mFrameCount = 0;
719 mLastTimestamp = 0;
Shuzhen Wange4208922022-02-01 16:52:48 -0800720
721 if (isDeviceTimeBaseRealtime()) {
722 if (isDefaultTimeBase && !isConsumedByHWComposer() && !isVideoStream()) {
723 // Default time base, but not hardware composer or video encoder
724 mTimestampOffset = 0;
725 } else if (timestampBase == OutputConfiguration::TIMESTAMP_BASE_REALTIME ||
Shuzhen Wangbce53db2022-12-03 00:38:20 +0000726 timestampBase == OutputConfiguration::TIMESTAMP_BASE_SENSOR) {
Shuzhen Wange4208922022-02-01 16:52:48 -0800727 mTimestampOffset = 0;
728 }
729 // If timestampBase is CHOREOGRAPHER SYNCED or MONOTONIC, leave
730 // timestamp offset as bootTime - monotonicTime.
731 } else {
732 if (timestampBase == OutputConfiguration::TIMESTAMP_BASE_REALTIME) {
733 // Reverse offset for monotonicTime -> bootTime
734 mTimestampOffset = -mTimestampOffset;
735 } else {
Shuzhen Wangbce53db2022-12-03 00:38:20 +0000736 // If timestampBase is DEFAULT, MONOTONIC, SENSOR or
Shuzhen Wange4208922022-02-01 16:52:48 -0800737 // CHOREOGRAPHER_SYNCED, timestamp offset is 0.
738 mTimestampOffset = 0;
739 }
740 }
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800741
742 res = native_window_set_buffer_count(mConsumer.get(),
743 mTotalBufferCount);
744 if (res != OK) {
745 ALOGE("%s: Unable to set buffer count for stream %d",
746 __FUNCTION__, mId);
747 return res;
748 }
749
750 res = native_window_set_buffers_transform(mConsumer.get(),
751 mTransform);
752 if (res != OK) {
753 ALOGE("%s: Unable to configure stream transform to %x: %s (%d)",
754 __FUNCTION__, mTransform, strerror(-res), res);
Shuzhen Wang0129d522016-10-30 22:43:41 -0700755 return res;
Zhijun Hef0645c12016-08-02 00:58:11 -0700756 }
757
Zhijun He125684a2015-12-26 15:07:30 -0800758 /**
Zhijun Heedd41ae2016-02-03 14:45:53 -0800759 * Camera3 Buffer manager is only supported by HAL3.3 onwards, as the older HALs requires
Zhijun He125684a2015-12-26 15:07:30 -0800760 * buffers to be statically allocated for internal static buffer registration, while the
761 * buffers provided by buffer manager are really dynamically allocated. Camera3Device only
Zhijun Heedd41ae2016-02-03 14:45:53 -0800762 * sets the mBufferManager if device version is > HAL3.2, which guarantees that the buffer
763 * manager setup is skipped in below code. Note that HAL3.2 is also excluded here, as some
764 * HAL3.2 devices may not support the dynamic buffer registeration.
Yin-Chia Yehb6578902019-04-16 13:36:16 -0700765 * Also Camera3BufferManager does not support display/texture streams as they have its own
766 * buffer management logic.
Zhijun He125684a2015-12-26 15:07:30 -0800767 */
Yin-Chia Yehb6578902019-04-16 13:36:16 -0700768 if (mBufferManager != 0 && mSetId > CAMERA3_STREAM_SET_ID_INVALID &&
769 !(isConsumedByHWComposer() || isConsumedByHWTexture())) {
Emilian Peev050f5dc2017-05-18 14:43:56 +0100770 uint64_t consumerUsage = 0;
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -0700771 getEndpointUsage(&consumerUsage);
Shuzhen Wang83bff122020-11-20 15:51:39 -0800772 uint32_t width = (mMaxSize == 0) ? getWidth() : mMaxSize;
773 uint32_t height = (mMaxSize == 0) ? getHeight() : 1;
Zhijun He125684a2015-12-26 15:07:30 -0800774 StreamInfo streamInfo(
Shuzhen Wang83bff122020-11-20 15:51:39 -0800775 getId(), getStreamSetId(), width, height, getFormat(), getDataSpace(),
Emilian Peev050f5dc2017-05-18 14:43:56 +0100776 mUsage | consumerUsage, mTotalBufferCount,
Shuzhen Wang83bff122020-11-20 15:51:39 -0800777 /*isConfigured*/true, isMultiResolution());
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -0700778 wp<Camera3OutputStream> weakThis(this);
779 res = mBufferManager->registerStream(weakThis,
780 streamInfo);
Zhijun He125684a2015-12-26 15:07:30 -0800781 if (res == OK) {
782 // Disable buffer allocation for this BufferQueue, buffer manager will take over
783 // the buffer allocation responsibility.
784 mConsumer->getIGraphicBufferProducer()->allowAllocation(false);
785 mUseBufferManager = true;
786 } else {
787 ALOGE("%s: Unable to register stream %d to camera3 buffer manager, "
788 "(error %d %s), fall back to BufferQueue for buffer management!",
789 __FUNCTION__, mId, res, strerror(-res));
790 }
791 }
792
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800793 return OK;
794}
795
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800796status_t Camera3OutputStream::getBufferLockedCommon(ANativeWindowBuffer** anb, int* fenceFd) {
Jayant Chowdharyd4776262020-06-23 23:45:57 -0700797 ATRACE_HFR_CALL();
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800798 status_t res;
799
800 if ((res = getBufferPreconditionCheckLocked()) != OK) {
801 return res;
802 }
803
804 bool gotBufferFromManager = false;
805
806 if (mUseBufferManager) {
807 sp<GraphicBuffer> gb;
Shuzhen Wang83bff122020-11-20 15:51:39 -0800808 res = mBufferManager->getBufferForStream(getId(), getStreamSetId(),
809 isMultiResolution(), &gb, fenceFd);
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800810 if (res == OK) {
811 // Attach this buffer to the bufferQueue: the buffer will be in dequeue state after a
812 // successful return.
813 *anb = gb.get();
814 res = mConsumer->attachBuffer(*anb);
Yin-Chia Yeha1b56c82019-03-27 15:50:39 -0700815 if (shouldLogError(res, mState)) {
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800816 ALOGE("%s: Stream %d: Can't attach the output buffer to this surface: %s (%d)",
817 __FUNCTION__, mId, strerror(-res), res);
Yin-Chia Yeha1b56c82019-03-27 15:50:39 -0700818 }
819 if (res != OK) {
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -0800820 checkRetAndSetAbandonedLocked(res);
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800821 return res;
822 }
823 gotBufferFromManager = true;
824 ALOGV("Stream %d: Attached new buffer", getId());
825 } else if (res == ALREADY_EXISTS) {
826 // Have sufficient free buffers already attached, can just
827 // dequeue from buffer queue
828 ALOGV("Stream %d: Reusing attached buffer", getId());
829 gotBufferFromManager = false;
830 } else if (res != OK) {
831 ALOGE("%s: Stream %d: Can't get next output buffer from buffer manager: %s (%d)",
832 __FUNCTION__, mId, strerror(-res), res);
833 return res;
834 }
835 }
836 if (!gotBufferFromManager) {
837 /**
838 * Release the lock briefly to avoid deadlock for below scenario:
839 * Thread 1: StreamingProcessor::startStream -> Camera3Stream::isConfiguring().
840 * This thread acquired StreamingProcessor lock and try to lock Camera3Stream lock.
841 * Thread 2: Camera3Stream::returnBuffer->StreamingProcessor::onFrameAvailable().
842 * This thread acquired Camera3Stream lock and bufferQueue lock, and try to lock
843 * StreamingProcessor lock.
844 * Thread 3: Camera3Stream::getBuffer(). This thread acquired Camera3Stream lock
845 * and try to lock bufferQueue lock.
846 * Then there is circular locking dependency.
847 */
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800848 sp<Surface> consumer = mConsumer;
Shuzhen Wang6c14e312021-07-05 16:20:33 -0700849 size_t remainingBuffers = (mState == STATE_PREPARING ? mTotalBufferCount :
850 camera_stream::max_buffers) - mHandoutTotalBufferCount;
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800851 mLock.unlock();
852
Shuzhen Wang686f6442017-06-20 16:16:04 -0700853 nsecs_t dequeueStart = systemTime(SYSTEM_TIME_MONOTONIC);
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800854
Shuzhen Wangc7629462021-07-12 15:02:58 -0700855 size_t batchSize = mBatchSize.load();
856 if (batchSize == 1) {
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800857 sp<ANativeWindow> anw = consumer;
858 res = anw->dequeueBuffer(anw.get(), anb, fenceFd);
859 } else {
Shuzhen Wangc7629462021-07-12 15:02:58 -0700860 std::unique_lock<std::mutex> batchLock(mBatchLock);
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800861 res = OK;
862 if (mBatchedBuffers.size() == 0) {
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800863 if (remainingBuffers == 0) {
864 ALOGE("%s: cannot get buffer while all buffers are handed out", __FUNCTION__);
865 return INVALID_OPERATION;
866 }
867 if (batchSize > remainingBuffers) {
868 batchSize = remainingBuffers;
869 }
Shuzhen Wangc7629462021-07-12 15:02:58 -0700870 batchLock.unlock();
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800871 // Refill batched buffers
Shuzhen Wangc7629462021-07-12 15:02:58 -0700872 std::vector<Surface::BatchBuffer> batchedBuffers;
873 batchedBuffers.resize(batchSize);
874 res = consumer->dequeueBuffers(&batchedBuffers);
875 batchLock.lock();
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800876 if (res != OK) {
877 ALOGE("%s: batch dequeueBuffers call failed! %s (%d)",
878 __FUNCTION__, strerror(-res), res);
Shuzhen Wangc7629462021-07-12 15:02:58 -0700879 } else {
880 mBatchedBuffers = std::move(batchedBuffers);
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800881 }
882 }
883
884 if (res == OK) {
885 // Dispatch batch buffers
886 *anb = mBatchedBuffers.back().buffer;
887 *fenceFd = mBatchedBuffers.back().fenceFd;
888 mBatchedBuffers.pop_back();
889 }
890 }
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800891
Shuzhen Wang686f6442017-06-20 16:16:04 -0700892 nsecs_t dequeueEnd = systemTime(SYSTEM_TIME_MONOTONIC);
893 mDequeueBufferLatency.add(dequeueStart, dequeueEnd);
894
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800895 mLock.lock();
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -0800896
897 if (mUseBufferManager && res == TIMED_OUT) {
898 checkRemovedBuffersLocked();
899
900 sp<GraphicBuffer> gb;
901 res = mBufferManager->getBufferForStream(
Shuzhen Wang83bff122020-11-20 15:51:39 -0800902 getId(), getStreamSetId(), isMultiResolution(),
903 &gb, fenceFd, /*noFreeBuffer*/true);
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -0800904
905 if (res == OK) {
906 // Attach this buffer to the bufferQueue: the buffer will be in dequeue state after
907 // a successful return.
908 *anb = gb.get();
909 res = mConsumer->attachBuffer(*anb);
910 gotBufferFromManager = true;
911 ALOGV("Stream %d: Attached new buffer", getId());
912
913 if (res != OK) {
Yin-Chia Yeha1b56c82019-03-27 15:50:39 -0700914 if (shouldLogError(res, mState)) {
915 ALOGE("%s: Stream %d: Can't attach the output buffer to this surface:"
916 " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
917 }
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -0800918 checkRetAndSetAbandonedLocked(res);
919 return res;
920 }
921 } else {
922 ALOGE("%s: Stream %d: Can't get next output buffer from buffer manager:"
923 " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
924 return res;
925 }
926 } else if (res != OK) {
Yin-Chia Yeha1b56c82019-03-27 15:50:39 -0700927 if (shouldLogError(res, mState)) {
928 ALOGE("%s: Stream %d: Can't dequeue next output buffer: %s (%d)",
929 __FUNCTION__, mId, strerror(-res), res);
930 }
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -0800931 checkRetAndSetAbandonedLocked(res);
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800932 return res;
933 }
934 }
935
Yin-Chia Yeh017d49c2017-03-31 19:11:00 -0700936 if (res == OK) {
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -0800937 checkRemovedBuffersLocked();
Yin-Chia Yeh017d49c2017-03-31 19:11:00 -0700938 }
939
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800940 return res;
941}
942
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -0800943void Camera3OutputStream::checkRemovedBuffersLocked(bool notifyBufferManager) {
944 std::vector<sp<GraphicBuffer>> removedBuffers;
945 status_t res = mConsumer->getAndFlushRemovedBuffers(&removedBuffers);
946 if (res == OK) {
947 onBuffersRemovedLocked(removedBuffers);
948
949 if (notifyBufferManager && mUseBufferManager && removedBuffers.size() > 0) {
Shuzhen Wang83bff122020-11-20 15:51:39 -0800950 mBufferManager->onBuffersRemoved(getId(), getStreamSetId(), isMultiResolution(),
951 removedBuffers.size());
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -0800952 }
953 }
954}
955
956void Camera3OutputStream::checkRetAndSetAbandonedLocked(status_t res) {
957 // Only transition to STATE_ABANDONED from STATE_CONFIGURED. (If it is
958 // STATE_PREPARING, let prepareNextBuffer handle the error.)
959 if ((res == NO_INIT || res == DEAD_OBJECT) && mState == STATE_CONFIGURED) {
960 mState = STATE_ABANDONED;
961 }
962}
963
Yin-Chia Yeha1b56c82019-03-27 15:50:39 -0700964bool Camera3OutputStream::shouldLogError(status_t res, StreamState state) {
965 if (res == OK) {
966 return false;
967 }
968 if ((res == DEAD_OBJECT || res == NO_INIT) && state == STATE_ABANDONED) {
969 return false;
970 }
971 return true;
972}
973
Shuzhen Wangc2352702022-09-06 18:36:31 -0700974void Camera3OutputStream::onCachedBufferQueued() {
975 Mutex::Autolock l(mLock);
976 mCachedOutputBufferCount--;
977 // Signal whoever is waiting for the buffer to be returned to the buffer
978 // queue.
979 mOutputBufferReturnedSignal.signal();
980}
981
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800982status_t Camera3OutputStream::disconnectLocked() {
983 status_t res;
984
Igor Murashkine3a9f962013-05-08 18:03:15 -0700985 if ((res = Camera3IOStreamBase::disconnectLocked()) != OK) {
986 return res;
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800987 }
988
Zhijun He5d677d12016-05-29 16:52:39 -0700989 // Stream configuration was not finished (can only be in STATE_IN_CONFIG or STATE_CONSTRUCTED
990 // state), don't need change the stream state, return OK.
991 if (mConsumer == nullptr) {
992 return OK;
993 }
994
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800995 returnPrefetchedBuffersLocked();
996
Shuzhen Wangdc9aa822022-05-16 10:04:17 -0700997 if (mPreviewFrameSpacer != nullptr) {
998 mPreviewFrameSpacer->requestExit();
999 }
1000
Zhijun He125684a2015-12-26 15:07:30 -08001001 ALOGV("%s: disconnecting stream %d from native window", __FUNCTION__, getId());
1002
Igor Murashkine3a9f962013-05-08 18:03:15 -07001003 res = native_window_api_disconnect(mConsumer.get(),
1004 NATIVE_WINDOW_API_CAMERA);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -08001005 /**
1006 * This is not an error. if client calling process dies, the window will
1007 * also die and all calls to it will return DEAD_OBJECT, thus it's already
1008 * "disconnected"
1009 */
1010 if (res == DEAD_OBJECT) {
1011 ALOGW("%s: While disconnecting stream %d from native window, the"
1012 " native window died from under us", __FUNCTION__, mId);
1013 }
1014 else if (res != OK) {
Igor Murashkine3a9f962013-05-08 18:03:15 -07001015 ALOGE("%s: Unable to disconnect stream %d from native window "
1016 "(error %d %s)",
1017 __FUNCTION__, mId, res, strerror(-res));
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -08001018 mState = STATE_ERROR;
1019 return res;
1020 }
1021
Zhijun He125684a2015-12-26 15:07:30 -08001022 // Since device is already idle, there is no getBuffer call to buffer manager, unregister the
1023 // stream at this point should be safe.
1024 if (mUseBufferManager) {
Shuzhen Wang83bff122020-11-20 15:51:39 -08001025 res = mBufferManager->unregisterStream(getId(), getStreamSetId(), isMultiResolution());
Zhijun He125684a2015-12-26 15:07:30 -08001026 if (res != OK) {
1027 ALOGE("%s: Unable to unregister stream %d from buffer manager "
1028 "(error %d %s)", __FUNCTION__, mId, res, strerror(-res));
1029 mState = STATE_ERROR;
1030 return res;
1031 }
1032 // Note that, to make prepare/teardown case work, we must not mBufferManager.clear(), as
1033 // the stream is still in usable state after this call.
1034 mUseBufferManager = false;
1035 }
1036
Igor Murashkine3a9f962013-05-08 18:03:15 -07001037 mState = (mState == STATE_IN_RECONFIG) ? STATE_IN_CONFIG
1038 : STATE_CONSTRUCTED;
Shuzhen Wang686f6442017-06-20 16:16:04 -07001039
1040 mDequeueBufferLatency.log("Stream %d dequeueBuffer latency histogram", mId);
1041 mDequeueBufferLatency.reset();
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -08001042 return OK;
1043}
1044
Emilian Peev050f5dc2017-05-18 14:43:56 +01001045status_t Camera3OutputStream::getEndpointUsage(uint64_t *usage) const {
Eino-Ville Talvalab2f5b192013-07-30 14:36:03 -07001046
1047 status_t res;
Shuzhen Wang0129d522016-10-30 22:43:41 -07001048
Zhijun He5d677d12016-05-29 16:52:39 -07001049 if (mConsumer == nullptr) {
1050 // mConsumerUsage was sanitized before the Camera3OutputStream was constructed.
1051 *usage = mConsumerUsage;
1052 return OK;
1053 }
1054
Shuzhen Wang0129d522016-10-30 22:43:41 -07001055 res = getEndpointUsageForSurface(usage, mConsumer);
1056
1057 return res;
1058}
1059
Emilian Peev35ae8262018-11-08 13:11:32 +00001060void Camera3OutputStream::applyZSLUsageQuirk(int format, uint64_t *consumerUsage /*inout*/) {
1061 if (consumerUsage == nullptr) {
1062 return;
1063 }
Shuzhen Wang0129d522016-10-30 22:43:41 -07001064
Chien-Yu Chen618ff8a2015-03-13 11:27:17 -07001065 // If an opaque output stream's endpoint is ImageReader, add
Yin-Chia Yeh47cf8e62017-04-04 13:00:03 -07001066 // GRALLOC_USAGE_HW_CAMERA_ZSL to the usage so HAL knows it will be used
Chien-Yu Chen618ff8a2015-03-13 11:27:17 -07001067 // for the ZSL use case.
1068 // Assume it's for ImageReader if the consumer usage doesn't have any of these bits set:
1069 // 1. GRALLOC_USAGE_HW_TEXTURE
1070 // 2. GRALLOC_USAGE_HW_RENDER
1071 // 3. GRALLOC_USAGE_HW_COMPOSER
1072 // 4. GRALLOC_USAGE_HW_VIDEO_ENCODER
Emilian Peev35ae8262018-11-08 13:11:32 +00001073 if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED &&
1074 (*consumerUsage & (GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_RENDER |
Shuzhen Wang0129d522016-10-30 22:43:41 -07001075 GRALLOC_USAGE_HW_COMPOSER | GRALLOC_USAGE_HW_VIDEO_ENCODER)) == 0) {
Emilian Peev35ae8262018-11-08 13:11:32 +00001076 *consumerUsage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
Chien-Yu Chen618ff8a2015-03-13 11:27:17 -07001077 }
Emilian Peev35ae8262018-11-08 13:11:32 +00001078}
Chien-Yu Chen618ff8a2015-03-13 11:27:17 -07001079
Emilian Peev35ae8262018-11-08 13:11:32 +00001080status_t Camera3OutputStream::getEndpointUsageForSurface(uint64_t *usage,
1081 const sp<Surface>& surface) const {
1082 status_t res;
1083 uint64_t u = 0;
1084
1085 res = native_window_get_consumer_usage(static_cast<ANativeWindow*>(surface.get()), &u);
Emilian Peevf4816702020-04-03 15:44:51 -07001086 applyZSLUsageQuirk(camera_stream::format, &u);
Chien-Yu Chen618ff8a2015-03-13 11:27:17 -07001087 *usage = u;
Eino-Ville Talvalab2f5b192013-07-30 14:36:03 -07001088 return res;
1089}
1090
Chien-Yu Chen85a64552015-08-28 15:46:12 -07001091bool Camera3OutputStream::isVideoStream() const {
Emilian Peev050f5dc2017-05-18 14:43:56 +01001092 uint64_t usage = 0;
Chien-Yu Chen85a64552015-08-28 15:46:12 -07001093 status_t res = getEndpointUsage(&usage);
1094 if (res != OK) {
1095 ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
1096 return false;
1097 }
1098
1099 return (usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) != 0;
1100}
1101
Zhijun He125684a2015-12-26 15:07:30 -08001102status_t Camera3OutputStream::setBufferManager(sp<Camera3BufferManager> bufferManager) {
1103 Mutex::Autolock l(mLock);
1104 if (mState != STATE_CONSTRUCTED) {
Zhijun He5d677d12016-05-29 16:52:39 -07001105 ALOGE("%s: this method can only be called when stream in CONSTRUCTED state.",
Zhijun He125684a2015-12-26 15:07:30 -08001106 __FUNCTION__);
1107 return INVALID_OPERATION;
1108 }
1109 mBufferManager = bufferManager;
1110
1111 return OK;
1112}
1113
Emilian Peev40ead602017-09-26 15:46:36 +01001114status_t Camera3OutputStream::updateStream(const std::vector<sp<Surface>> &/*outputSurfaces*/,
1115 const std::vector<OutputStreamInfo> &/*outputInfo*/,
1116 const std::vector<size_t> &/*removedSurfaceIds*/,
1117 KeyedVector<sp<Surface>, size_t> * /*outputMapo*/) {
1118 ALOGE("%s: this method is not supported!", __FUNCTION__);
1119 return INVALID_OPERATION;
1120}
1121
Shuzhen Wang0160ddd2019-08-15 09:11:56 -07001122void Camera3OutputStream::BufferProducerListener::onBufferReleased() {
Zhijun He125684a2015-12-26 15:07:30 -08001123 sp<Camera3OutputStream> stream = mParent.promote();
1124 if (stream == nullptr) {
1125 ALOGV("%s: Parent camera3 output stream was destroyed", __FUNCTION__);
1126 return;
1127 }
1128
1129 Mutex::Autolock l(stream->mLock);
1130 if (!(stream->mUseBufferManager)) {
1131 return;
1132 }
1133
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001134 ALOGV("Stream %d: Buffer released", stream->getId());
Yin-Chia Yeh89954d92017-05-21 17:28:53 -07001135 bool shouldFreeBuffer = false;
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001136 status_t res = stream->mBufferManager->onBufferReleased(
Shuzhen Wang83bff122020-11-20 15:51:39 -08001137 stream->getId(), stream->getStreamSetId(), stream->isMultiResolution(),
1138 &shouldFreeBuffer);
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001139 if (res != OK) {
1140 ALOGE("%s: signaling buffer release to buffer manager failed: %s (%d).", __FUNCTION__,
1141 strerror(-res), res);
1142 stream->mState = STATE_ERROR;
1143 }
Yin-Chia Yeh89954d92017-05-21 17:28:53 -07001144
1145 if (shouldFreeBuffer) {
1146 sp<GraphicBuffer> buffer;
1147 // Detach and free a buffer (when buffer goes out of scope)
1148 stream->detachBufferLocked(&buffer, /*fenceFd*/ nullptr);
1149 if (buffer.get() != nullptr) {
1150 stream->mBufferManager->notifyBufferRemoved(
Shuzhen Wang83bff122020-11-20 15:51:39 -08001151 stream->getId(), stream->getStreamSetId(), stream->isMultiResolution());
Yin-Chia Yeh89954d92017-05-21 17:28:53 -07001152 }
1153 }
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001154}
1155
Shuzhen Wang0160ddd2019-08-15 09:11:56 -07001156void Camera3OutputStream::BufferProducerListener::onBuffersDiscarded(
1157 const std::vector<sp<GraphicBuffer>>& buffers) {
1158 sp<Camera3OutputStream> stream = mParent.promote();
1159 if (stream == nullptr) {
1160 ALOGV("%s: Parent camera3 output stream was destroyed", __FUNCTION__);
1161 return;
1162 }
1163
1164 if (buffers.size() > 0) {
1165 Mutex::Autolock l(stream->mLock);
1166 stream->onBuffersRemovedLocked(buffers);
1167 if (stream->mUseBufferManager) {
1168 stream->mBufferManager->onBuffersRemoved(stream->getId(),
Shuzhen Wang83bff122020-11-20 15:51:39 -08001169 stream->getStreamSetId(), stream->isMultiResolution(), buffers.size());
Shuzhen Wang0160ddd2019-08-15 09:11:56 -07001170 }
1171 ALOGV("Stream %d: %zu Buffers discarded.", stream->getId(), buffers.size());
1172 }
1173}
1174
Yin-Chia Yeh017d49c2017-03-31 19:11:00 -07001175void Camera3OutputStream::onBuffersRemovedLocked(
1176 const std::vector<sp<GraphicBuffer>>& removedBuffers) {
Yin-Chia Yehdb1e8642017-07-14 15:19:30 -07001177 sp<Camera3StreamBufferFreedListener> callback = mBufferFreedListener.promote();
Yin-Chia Yeh017d49c2017-03-31 19:11:00 -07001178 if (callback != nullptr) {
Chih-Hung Hsieh48fc6192017-08-04 14:37:31 -07001179 for (const auto& gb : removedBuffers) {
Yin-Chia Yeh017d49c2017-03-31 19:11:00 -07001180 callback->onBufferFreed(mId, gb->handle);
1181 }
1182 }
1183}
1184
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001185status_t Camera3OutputStream::detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd) {
1186 Mutex::Autolock l(mLock);
Yin-Chia Yeh89954d92017-05-21 17:28:53 -07001187 return detachBufferLocked(buffer, fenceFd);
1188}
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001189
Yin-Chia Yeh89954d92017-05-21 17:28:53 -07001190status_t Camera3OutputStream::detachBufferLocked(sp<GraphicBuffer>* buffer, int* fenceFd) {
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001191 ALOGV("Stream %d: detachBuffer", getId());
1192 if (buffer == nullptr) {
1193 return BAD_VALUE;
1194 }
1195
Zhijun He125684a2015-12-26 15:07:30 -08001196 sp<Fence> fence;
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001197 status_t res = mConsumer->detachNextBuffer(buffer, &fence);
Zhijun He125684a2015-12-26 15:07:30 -08001198 if (res == NO_MEMORY) {
1199 // This may rarely happen, which indicates that the released buffer was freed by other
1200 // call (e.g., attachBuffer, dequeueBuffer etc.) before reaching here. We should notify the
1201 // buffer manager that this buffer has been freed. It's not fatal, but should be avoided,
1202 // therefore log a warning.
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001203 *buffer = 0;
Zhijun He125684a2015-12-26 15:07:30 -08001204 ALOGW("%s: the released buffer has already been freed by the buffer queue!", __FUNCTION__);
1205 } else if (res != OK) {
Eino-Ville Talvalaff51b472016-06-28 15:26:19 -07001206 // Treat other errors as abandonment
Yin-Chia Yeha1b56c82019-03-27 15:50:39 -07001207 if (shouldLogError(res, mState)) {
1208 ALOGE("%s: detach next buffer failed: %s (%d).", __FUNCTION__, strerror(-res), res);
1209 }
Eino-Ville Talvalaff51b472016-06-28 15:26:19 -07001210 mState = STATE_ABANDONED;
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001211 return res;
Zhijun He125684a2015-12-26 15:07:30 -08001212 }
1213
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001214 if (fenceFd != nullptr) {
1215 if (fence!= 0 && fence->isValid()) {
1216 *fenceFd = fence->dup();
1217 } else {
1218 *fenceFd = -1;
1219 }
Zhijun He125684a2015-12-26 15:07:30 -08001220 }
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001221
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -08001222 // Here we assume detachBuffer is called by buffer manager so it doesn't need to be notified
1223 checkRemovedBuffersLocked(/*notifyBufferManager*/false);
Yin-Chia Yeh017d49c2017-03-31 19:11:00 -07001224 return res;
Zhijun He125684a2015-12-26 15:07:30 -08001225}
Shuzhen Wang13a69632016-01-26 09:51:07 -08001226
Chien-Yu Chena936ac22017-10-23 15:59:49 -07001227status_t Camera3OutputStream::dropBuffers(bool dropping) {
1228 Mutex::Autolock l(mLock);
1229 mDropBuffers = dropping;
1230 return OK;
1231}
1232
Shuzhen Wang5c22c152017-12-31 17:12:25 -08001233const String8& Camera3OutputStream::getPhysicalCameraId() const {
1234 Mutex::Autolock l(mLock);
1235 return physicalCameraId();
1236}
1237
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -08001238status_t Camera3OutputStream::notifyBufferReleased(ANativeWindowBuffer* /*anwBuffer*/) {
Shuzhen Wang0129d522016-10-30 22:43:41 -07001239 return OK;
1240}
1241
1242bool Camera3OutputStream::isConsumerConfigurationDeferred(size_t surface_id) const {
Zhijun He5d677d12016-05-29 16:52:39 -07001243 Mutex::Autolock l(mLock);
Shuzhen Wang0129d522016-10-30 22:43:41 -07001244
1245 if (surface_id != 0) {
Shuzhen Wang758c2152017-01-10 18:26:18 -08001246 ALOGE("%s: surface_id %zu for Camera3OutputStream should be 0!", __FUNCTION__, surface_id);
Shuzhen Wang0129d522016-10-30 22:43:41 -07001247 }
Zhijun He5d677d12016-05-29 16:52:39 -07001248 return mConsumer == nullptr;
1249}
1250
Shuzhen Wang758c2152017-01-10 18:26:18 -08001251status_t Camera3OutputStream::setConsumers(const std::vector<sp<Surface>>& consumers) {
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -08001252 Mutex::Autolock l(mLock);
Shuzhen Wang758c2152017-01-10 18:26:18 -08001253 if (consumers.size() != 1) {
1254 ALOGE("%s: it's illegal to set %zu consumer surfaces!",
1255 __FUNCTION__, consumers.size());
1256 return INVALID_OPERATION;
1257 }
1258 if (consumers[0] == nullptr) {
1259 ALOGE("%s: it's illegal to set null consumer surface!", __FUNCTION__);
Zhijun He5d677d12016-05-29 16:52:39 -07001260 return INVALID_OPERATION;
1261 }
1262
1263 if (mConsumer != nullptr) {
1264 ALOGE("%s: consumer surface was already set!", __FUNCTION__);
1265 return INVALID_OPERATION;
1266 }
1267
Shuzhen Wang758c2152017-01-10 18:26:18 -08001268 mConsumer = consumers[0];
Zhijun He5d677d12016-05-29 16:52:39 -07001269 return OK;
1270}
1271
Shuzhen Wang13a69632016-01-26 09:51:07 -08001272bool Camera3OutputStream::isConsumedByHWComposer() const {
Emilian Peev050f5dc2017-05-18 14:43:56 +01001273 uint64_t usage = 0;
Shuzhen Wang13a69632016-01-26 09:51:07 -08001274 status_t res = getEndpointUsage(&usage);
1275 if (res != OK) {
1276 ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
1277 return false;
1278 }
1279
1280 return (usage & GRALLOC_USAGE_HW_COMPOSER) != 0;
1281}
1282
Zhijun Hef0645c12016-08-02 00:58:11 -07001283bool Camera3OutputStream::isConsumedByHWTexture() const {
Emilian Peev050f5dc2017-05-18 14:43:56 +01001284 uint64_t usage = 0;
Zhijun Hef0645c12016-08-02 00:58:11 -07001285 status_t res = getEndpointUsage(&usage);
1286 if (res != OK) {
1287 ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
1288 return false;
1289 }
1290
1291 return (usage & GRALLOC_USAGE_HW_TEXTURE) != 0;
1292}
1293
Shuzhen Wangfe8a2a32022-05-10 18:18:54 -07001294bool Camera3OutputStream::isConsumedByCPU() const {
1295 uint64_t usage = 0;
1296 status_t res = getEndpointUsage(&usage);
1297 if (res != OK) {
1298 ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
1299 return false;
1300 }
1301
1302 return (usage & GRALLOC_USAGE_SW_READ_MASK) != 0;
1303}
1304
Shuzhen Wangabbcb6b2020-12-09 22:32:44 -08001305void Camera3OutputStream::dumpImageToDisk(nsecs_t timestamp,
1306 ANativeWindowBuffer* anwBuffer, int fence) {
1307 // Deriver output file name
1308 std::string fileExtension = "jpg";
1309 char imageFileName[64];
1310 time_t now = time(0);
1311 tm *localTime = localtime(&now);
1312 snprintf(imageFileName, sizeof(imageFileName), "IMG_%4d%02d%02d_%02d%02d%02d_%" PRId64 ".%s",
Shuzhen Wang6a8237f2021-07-13 14:42:57 -07001313 1900 + localTime->tm_year, localTime->tm_mon + 1, localTime->tm_mday,
Shuzhen Wangabbcb6b2020-12-09 22:32:44 -08001314 localTime->tm_hour, localTime->tm_min, localTime->tm_sec,
1315 timestamp, fileExtension.c_str());
1316
1317 // Lock the image for CPU read
1318 sp<GraphicBuffer> graphicBuffer = GraphicBuffer::from(anwBuffer);
1319 void* mapped = nullptr;
1320 base::unique_fd fenceFd(dup(fence));
1321 status_t res = graphicBuffer->lockAsync(GraphicBuffer::USAGE_SW_READ_OFTEN, &mapped,
Emilian Peev293bd972022-08-05 17:28:06 -07001322 fenceFd.release());
Shuzhen Wangabbcb6b2020-12-09 22:32:44 -08001323 if (res != OK) {
1324 ALOGE("%s: Failed to lock the buffer: %s (%d)", __FUNCTION__, strerror(-res), res);
1325 return;
1326 }
1327
1328 // Figure out actual file size
1329 auto actualJpegSize = android::camera2::JpegProcessor::findJpegSize((uint8_t*)mapped, mMaxSize);
1330 if (actualJpegSize == 0) {
1331 actualJpegSize = mMaxSize;
1332 }
1333
1334 // Output image data to file
1335 std::string filePath = "/data/misc/cameraserver/";
1336 filePath += imageFileName;
1337 std::ofstream imageFile(filePath.c_str(), std::ofstream::binary);
1338 if (!imageFile.is_open()) {
1339 ALOGE("%s: Unable to create file %s", __FUNCTION__, filePath.c_str());
1340 graphicBuffer->unlock();
1341 return;
1342 }
1343 imageFile.write((const char*)mapped, actualJpegSize);
1344
1345 graphicBuffer->unlock();
1346}
1347
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -08001348status_t Camera3OutputStream::setBatchSize(size_t batchSize) {
1349 Mutex::Autolock l(mLock);
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -08001350 if (batchSize == 0) {
1351 ALOGE("%s: invalid batch size 0", __FUNCTION__);
1352 return BAD_VALUE;
1353 }
1354
1355 if (mUseBufferManager) {
1356 ALOGE("%s: batch operation is not supported with buffer manager", __FUNCTION__);
1357 return INVALID_OPERATION;
1358 }
1359
1360 if (!isVideoStream()) {
1361 ALOGE("%s: batch operation is not supported with non-video stream", __FUNCTION__);
1362 return INVALID_OPERATION;
1363 }
1364
Shuzhen Wangc7629462021-07-12 15:02:58 -07001365 if (camera_stream::max_buffers < batchSize) {
1366 ALOGW("%s: batch size is capped by max_buffers %d", __FUNCTION__,
1367 camera_stream::max_buffers);
1368 batchSize = camera_stream::max_buffers;
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -08001369 }
Shuzhen Wangc7629462021-07-12 15:02:58 -07001370
1371 size_t defaultBatchSize = 1;
1372 if (!mBatchSize.compare_exchange_strong(defaultBatchSize, batchSize)) {
1373 ALOGE("%s: change batch size from %zu to %zu dynamically is not supported",
1374 __FUNCTION__, defaultBatchSize, batchSize);
1375 return INVALID_OPERATION;
1376 }
1377
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -08001378 return OK;
1379}
1380
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001381void Camera3OutputStream::onMinDurationChanged(nsecs_t duration, bool fixedFps) {
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001382 Mutex::Autolock l(mLock);
1383 mMinExpectedDuration = duration;
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001384 mFixedFps = fixedFps;
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001385}
1386
Shuzhen Wang16610a62022-12-15 22:38:07 -08001387void Camera3OutputStream::setStreamUseCase(int64_t streamUseCase) {
1388 Mutex::Autolock l(mLock);
1389 camera_stream::use_case = streamUseCase;
1390}
1391
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -08001392void Camera3OutputStream::returnPrefetchedBuffersLocked() {
Shuzhen Wangc7629462021-07-12 15:02:58 -07001393 std::vector<Surface::BatchBuffer> batchedBuffers;
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -08001394
Shuzhen Wangc7629462021-07-12 15:02:58 -07001395 {
1396 std::lock_guard<std::mutex> batchLock(mBatchLock);
1397 if (mBatchedBuffers.size() != 0) {
1398 ALOGW("%s: %zu extra prefetched buffers detected. Returning",
1399 __FUNCTION__, mBatchedBuffers.size());
1400 batchedBuffers = std::move(mBatchedBuffers);
1401 }
1402 }
1403
1404 if (batchedBuffers.size() > 0) {
1405 mConsumer->cancelBuffers(batchedBuffers);
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -08001406 }
1407}
1408
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001409nsecs_t Camera3OutputStream::syncTimestampToDisplayLocked(nsecs_t t) {
Shuzhen Wang35bd3552022-09-21 16:56:04 -07001410 nsecs_t currentTime = systemTime();
1411 if (!mFixedFps) {
1412 mLastCaptureTime = t;
1413 mLastPresentTime = currentTime;
1414 return t;
1415 }
1416
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001417 ParcelableVsyncEventData parcelableVsyncEventData;
1418 auto res = mDisplayEventReceiver.getLatestVsyncEventData(&parcelableVsyncEventData);
1419 if (res != OK) {
1420 ALOGE("%s: Stream %d: Error getting latest vsync event data: %s (%d)",
1421 __FUNCTION__, mId, strerror(-res), res);
Shuzhen Wang14c62b82022-04-11 09:37:05 -07001422 mLastCaptureTime = t;
Shuzhen Wang35bd3552022-09-21 16:56:04 -07001423 mLastPresentTime = currentTime;
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001424 return t;
1425 }
1426
1427 const VsyncEventData& vsyncEventData = parcelableVsyncEventData.vsync;
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001428 nsecs_t minPresentT = mLastPresentTime + vsyncEventData.frameInterval / 2;
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001429
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001430 // Find the best presentation time without worrying about previous frame's
1431 // presentation time if capture interval is more than kSpacingResetIntervalNs.
1432 //
1433 // When frame interval is more than 50 ms apart (3 vsyncs for 60hz refresh rate),
1434 // there is little risk in starting over and finding the earliest vsync to latch onto.
1435 // - Update captureToPresentTime offset to be used for later frames.
1436 // - Example use cases:
1437 // - when frame rate drops down to below 20 fps, or
1438 // - A new streaming session starts (stopPreview followed by
1439 // startPreview)
1440 //
Shuzhen Wang34a5e282022-06-17 14:48:35 -07001441 nsecs_t captureInterval = t - mLastCaptureTime;
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001442 if (captureInterval > kSpacingResetIntervalNs) {
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001443 for (size_t i = 0; i < VsyncEventData::kFrameTimelinesLength; i++) {
Shuzhen Wang34a5e282022-06-17 14:48:35 -07001444 const auto& timeline = vsyncEventData.frameTimelines[i];
1445 if (timeline.deadlineTimestamp >= currentTime &&
1446 timeline.expectedPresentationTime > minPresentT) {
1447 nsecs_t presentT = vsyncEventData.frameTimelines[i].expectedPresentationTime;
1448 mCaptureToPresentOffset = presentT - t;
1449 mLastCaptureTime = t;
1450 mLastPresentTime = presentT;
1451
1452 // Move the expected presentation time back by 1/3 of frame interval to
1453 // mitigate the time drift. Due to time drift, if we directly use the
1454 // expected presentation time, often times 2 expected presentation time
1455 // falls into the same VSYNC interval.
1456 return presentT - vsyncEventData.frameInterval/3;
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001457 }
1458 }
1459 }
1460
1461 nsecs_t idealPresentT = t + mCaptureToPresentOffset;
Shuzhen Wang14c62b82022-04-11 09:37:05 -07001462 nsecs_t expectedPresentT = mLastPresentTime;
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001463 nsecs_t minDiff = INT64_MAX;
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001464
1465 // In fixed FPS case, when frame durations are close to multiples of display refresh
1466 // rate, derive minimum intervals between presentation times based on minimal
Shuzhen Wang661b34f2022-05-18 22:00:19 -07001467 // expected duration. The minimum number of Vsyncs is:
1468 // - 0 if minFrameDuration in (0, 1.5] * vSyncInterval,
1469 // - 1 if minFrameDuration in (1.5, 2.5] * vSyncInterval,
1470 // - and so on.
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001471 //
1472 // This spaces out the displaying of the frames so that the frame
1473 // presentations are roughly in sync with frame captures.
Shuzhen Wang661b34f2022-05-18 22:00:19 -07001474 int minVsyncs = (mMinExpectedDuration - vsyncEventData.frameInterval / 2) /
1475 vsyncEventData.frameInterval;
1476 if (minVsyncs < 0) minVsyncs = 0;
Shuzhen Wanged08fbe2022-06-21 01:00:50 -07001477 nsecs_t minInterval = minVsyncs * vsyncEventData.frameInterval;
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001478
1479 // In fixed FPS case, if the frame duration deviates from multiples of
1480 // display refresh rate, find the closest Vsync without requiring a minimum
1481 // number of Vsync.
1482 //
1483 // Example: (24fps camera, 60hz refresh):
1484 // capture readout: | t1 | t1 | .. | t1 | .. | t1 | .. | t1 |
1485 // display VSYNC: | t2 | t2 | ... | t2 | ... | t2 | ... | t2 |
1486 // | : 1 frame
1487 // t1 : 41.67ms
1488 // t2 : 16.67ms
1489 // t1/t2 = 2.5
1490 //
1491 // 24fps is a commonly used video frame rate. Because the capture
1492 // interval is 2.5 times of display refresh interval, the minVsyncs
1493 // calculation will directly fall at the boundary condition. In this case,
1494 // we should fall back to the basic logic of finding closest vsync
1495 // timestamp without worrying about minVsyncs.
1496 float captureToVsyncIntervalRatio = 1.0f * mMinExpectedDuration / vsyncEventData.frameInterval;
1497 float ratioDeviation = std::fabs(
1498 captureToVsyncIntervalRatio - std::roundf(captureToVsyncIntervalRatio));
1499 bool captureDeviateFromVsync = ratioDeviation >= kMaxIntervalRatioDeviation;
1500 bool cameraDisplayInSync = (mFixedFps && !captureDeviateFromVsync);
1501
Shuzhen Wanged08fbe2022-06-21 01:00:50 -07001502 // Find best timestamp in the vsync timelines:
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001503 // - Only use at most kMaxTimelines timelines to avoid long latency
1504 // - closest to the ideal presentation time,
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001505 // - deadline timestamp is greater than the current time, and
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001506 // - For fixed FPS, if the capture interval doesn't deviate too much from refresh interval,
1507 // the candidate presentation time is at least minInterval in the future compared to last
1508 // presentation time.
1509 // - For variable FPS, or if the capture interval deviates from refresh
1510 // interval for more than 5%, find a presentation time closest to the
1511 // (lastPresentationTime + captureToPresentOffset) instead.
Shuzhen Wanged08fbe2022-06-21 01:00:50 -07001512 int maxTimelines = std::min(kMaxTimelines, (int)VsyncEventData::kFrameTimelinesLength);
1513 float biasForShortDelay = 1.0f;
1514 for (int i = 0; i < maxTimelines; i ++) {
1515 const auto& vsyncTime = vsyncEventData.frameTimelines[i];
1516 if (minVsyncs > 0) {
1517 // Bias towards using smaller timeline index:
1518 // i = 0: bias = 1
1519 // i = maxTimelines-1: bias = -1
1520 biasForShortDelay = 1.0 - 2.0 * i / (maxTimelines - 1);
1521 }
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001522 if (std::abs(vsyncTime.expectedPresentationTime - idealPresentT) < minDiff &&
1523 vsyncTime.deadlineTimestamp >= currentTime &&
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001524 ((!cameraDisplayInSync && vsyncTime.expectedPresentationTime > minPresentT) ||
1525 (cameraDisplayInSync && vsyncTime.expectedPresentationTime >
Kwangkyu Park1c0042b2022-12-20 00:03:17 +09001526 mLastPresentTime + minInterval +
1527 static_cast<nsecs_t>(biasForShortDelay * kTimelineThresholdNs)))) {
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001528 expectedPresentT = vsyncTime.expectedPresentationTime;
1529 minDiff = std::abs(vsyncTime.expectedPresentationTime - idealPresentT);
1530 }
1531 }
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001532
Shuzhen Wang35bd3552022-09-21 16:56:04 -07001533 if (expectedPresentT == mLastPresentTime && expectedPresentT <
1534 vsyncEventData.frameTimelines[maxTimelines-1].expectedPresentationTime) {
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001535 // Couldn't find a reasonable presentation time. Using last frame's
1536 // presentation time would cause a frame drop. The best option now
1537 // is to use the next VSync as long as the last presentation time
1538 // doesn't already has the maximum latency, in which case dropping the
1539 // buffer is more desired than increasing latency.
1540 //
1541 // Example: (60fps camera, 59.9hz refresh):
1542 // capture readout: | t1 | t1 | .. | t1 | .. | t1 | .. | t1 |
1543 // \ \ \ \ \ \ \ \ \
1544 // queue to BQ: | | | | | | | | |
1545 // \ \ \ \ \ \ \ \ \
1546 // display VSYNC: | t2 | t2 | ... | t2 | ... | t2 | ... | t2 |
1547 //
1548 // |: 1 frame
1549 // t1 : 16.67ms
1550 // t2 : 16.69ms
1551 //
1552 // It takes 833 frames for capture readout count and display VSYNC count to be off
1553 // by 1.
1554 // - At frames [0, 832], presentationTime is set to timeline[0]
1555 // - At frames [833, 833*2-1], presentationTime is set to timeline[1]
1556 // - At frames [833*2, 833*3-1] presentationTime is set to timeline[2]
1557 // - At frame 833*3, no presentation time is found because we only
1558 // search for timeline[0..2].
1559 // - Drop one buffer is better than further extend the presentation
1560 // time.
1561 //
1562 // However, if frame 833*2 arrives 16.67ms early (right after frame
1563 // 833*2-1), no presentation time can be found because
1564 // getLatestVsyncEventData is called early. In that case, it's better to
1565 // set presentation time by offseting last presentation time.
1566 expectedPresentT += vsyncEventData.frameInterval;
1567 }
1568
Shuzhen Wang14c62b82022-04-11 09:37:05 -07001569 mLastCaptureTime = t;
1570 mLastPresentTime = expectedPresentT;
1571
1572 // Move the expected presentation time back by 1/3 of frame interval to
1573 // mitigate the time drift. Due to time drift, if we directly use the
1574 // expected presentation time, often times 2 expected presentation time
1575 // falls into the same VSYNC interval.
1576 return expectedPresentT - vsyncEventData.frameInterval/3;
Shuzhen Wange4adddb2021-09-21 15:24:44 -07001577}
1578
Shuzhen Wangba92d772022-04-11 11:47:24 -07001579bool Camera3OutputStream::shouldLogError(status_t res) {
1580 Mutex::Autolock l(mLock);
1581 return shouldLogError(res, mState);
1582}
1583
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -08001584}; // namespace camera3
1585
1586}; // namespace android