blob: 2ce04e8e49cca7516d24f5ce961c14960d5a666c [file] [log] [blame]
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -08001/*
Shuzhen Wangc28189a2017-11-27 23:05:10 -08002 * Copyright (C) 2013-2018 The Android Open Source Project
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -08003 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "Camera3-OutputStream"
18#define ATRACE_TAG ATRACE_TAG_CAMERA
19//#define LOG_NDEBUG 0
20
Shuzhen Wang34a5e282022-06-17 14:48:35 -070021#include <algorithm>
Shuzhen Wangabbcb6b2020-12-09 22:32:44 -080022#include <ctime>
23#include <fstream>
24
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +000025#include <aidl/android/hardware/camera/device/CameraBlob.h>
26#include <aidl/android/hardware/camera/device/CameraBlobId.h>
Emilian Peeve579d8b2023-02-28 14:16:08 -080027#include "aidl/android/hardware/graphics/common/Dataspace.h"
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +000028
Shuzhen Wangabbcb6b2020-12-09 22:32:44 -080029#include <android-base/unique_fd.h>
Shuzhen Wange4adddb2021-09-21 15:24:44 -070030#include <cutils/properties.h>
Shuzhen Wangabbcb6b2020-12-09 22:32:44 -080031#include <ui/GraphicBuffer.h>
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -080032#include <utils/Log.h>
33#include <utils/Trace.h>
Austin Borger1c1bee02023-06-01 16:51:35 -070034#include <camera/StringUtils.h>
Shuzhen Wangabbcb6b2020-12-09 22:32:44 -080035
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +000036#include <common/CameraDeviceBase.h>
Shuzhen Wangabbcb6b2020-12-09 22:32:44 -080037#include "api1/client2/JpegProcessor.h"
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -080038#include "Camera3OutputStream.h"
Jayant Chowdharyd4776262020-06-23 23:45:57 -070039#include "utils/TraceHFR.h"
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -080040
41#ifndef container_of
42#define container_of(ptr, type, member) \
43 (type *)((char*)(ptr) - offsetof(type, member))
44#endif
45
46namespace android {
47
48namespace camera3 {
49
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +000050using aidl::android::hardware::camera::device::CameraBlob;
51using aidl::android::hardware::camera::device::CameraBlobId;
52
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -080053Camera3OutputStream::Camera3OutputStream(int id,
Eino-Ville Talvala727d1722015-06-09 13:44:19 -070054 sp<Surface> consumer,
Eino-Ville Talvala3d82c0d2015-02-23 15:19:19 -080055 uint32_t width, uint32_t height, int format,
Emilian Peevf4816702020-04-03 15:44:51 -070056 android_dataspace dataSpace, camera_stream_rotation_t rotation,
Austin Borger1c1bee02023-06-01 16:51:35 -070057 nsecs_t timestampOffset, const std::string& physicalCameraId,
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +000058 const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
Emilian Peevc81a7592022-02-14 17:38:18 -080059 int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
Shuzhen Wang8ed1e872022-03-08 16:34:33 -080060 int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
Shuzhen Wangbce53db2022-12-03 00:38:20 +000061 int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) :
Emilian Peevf4816702020-04-03 15:44:51 -070062 Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
Shuzhen Wangc28189a2017-11-27 23:05:10 -080063 /*maxSize*/0, format, dataSpace, rotation,
Emilian Peev2295df72021-11-12 18:14:10 -080064 physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
Shuzhen Wange4208922022-02-01 16:52:48 -080065 dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
Austin Borger9e2b27c2022-07-15 11:27:24 -070066 timestampBase, colorSpace),
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -080067 mConsumer(consumer),
Ruchit Sharmae0711f22014-08-18 13:48:24 -040068 mTransform(0),
Zhijun He125684a2015-12-26 15:07:30 -080069 mTraceFirstBuffer(true),
Shuzhen Wangc28dccc2016-02-11 23:48:46 -080070 mUseBufferManager(false),
Zhijun He5d677d12016-05-29 16:52:39 -070071 mTimestampOffset(timestampOffset),
Shuzhen Wangbce53db2022-12-03 00:38:20 +000072 mUseReadoutTime(useReadoutTimestamp),
Shuzhen Wang686f6442017-06-20 16:16:04 -070073 mConsumerUsage(0),
Chien-Yu Chena936ac22017-10-23 15:59:49 -070074 mDropBuffers(false),
Shuzhen Wang610d7b82022-02-08 14:37:22 -080075 mMirrorMode(mirrorMode),
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +000076 mDequeueBufferLatency(kDequeueLatencyBinSize),
77 mIPCTransport(transport) {
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -080078
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -080079 if (mConsumer == NULL) {
80 ALOGE("%s: Consumer is NULL!", __FUNCTION__);
81 mState = STATE_ERROR;
82 }
Zhijun He125684a2015-12-26 15:07:30 -080083
Shuzhen Wang0160ddd2019-08-15 09:11:56 -070084 bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
85 mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -080086}
87
88Camera3OutputStream::Camera3OutputStream(int id,
Eino-Ville Talvala727d1722015-06-09 13:44:19 -070089 sp<Surface> consumer,
Eino-Ville Talvala3d82c0d2015-02-23 15:19:19 -080090 uint32_t width, uint32_t height, size_t maxSize, int format,
Emilian Peevf4816702020-04-03 15:44:51 -070091 android_dataspace dataSpace, camera_stream_rotation_t rotation,
Austin Borger1c1bee02023-06-01 16:51:35 -070092 nsecs_t timestampOffset, const std::string& physicalCameraId,
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +000093 const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
Emilian Peevc81a7592022-02-14 17:38:18 -080094 int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
Shuzhen Wang8ed1e872022-03-08 16:34:33 -080095 int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
Shuzhen Wangbce53db2022-12-03 00:38:20 +000096 int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) :
Emilian Peevf4816702020-04-03 15:44:51 -070097 Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height, maxSize,
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -080098 format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
Shuzhen Wange4208922022-02-01 16:52:48 -080099 setId, isMultiResolution, dynamicRangeProfile, streamUseCase,
Austin Borger9e2b27c2022-07-15 11:27:24 -0700100 deviceTimeBaseIsRealtime, timestampBase, colorSpace),
Igor Murashkina55b5452013-04-02 16:36:33 -0700101 mConsumer(consumer),
Ruchit Sharmae0711f22014-08-18 13:48:24 -0400102 mTransform(0),
Zhijun He125684a2015-12-26 15:07:30 -0800103 mTraceFirstBuffer(true),
Shuzhen Wangc28dccc2016-02-11 23:48:46 -0800104 mUseBufferManager(false),
Zhijun He5d677d12016-05-29 16:52:39 -0700105 mTimestampOffset(timestampOffset),
Shuzhen Wangbce53db2022-12-03 00:38:20 +0000106 mUseReadoutTime(useReadoutTimestamp),
Shuzhen Wang686f6442017-06-20 16:16:04 -0700107 mConsumerUsage(0),
Chien-Yu Chena936ac22017-10-23 15:59:49 -0700108 mDropBuffers(false),
Shuzhen Wang610d7b82022-02-08 14:37:22 -0800109 mMirrorMode(mirrorMode),
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000110 mDequeueBufferLatency(kDequeueLatencyBinSize),
111 mIPCTransport(transport) {
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800112
Yin-Chia Yehe9154ce2015-12-07 14:38:04 -0800113 if (format != HAL_PIXEL_FORMAT_BLOB && format != HAL_PIXEL_FORMAT_RAW_OPAQUE) {
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800114 ALOGE("%s: Bad format for size-only stream: %d", __FUNCTION__,
115 format);
116 mState = STATE_ERROR;
117 }
118
119 if (mConsumer == NULL) {
120 ALOGE("%s: Consumer is NULL!", __FUNCTION__);
121 mState = STATE_ERROR;
122 }
Zhijun He125684a2015-12-26 15:07:30 -0800123
Shuzhen Wang0160ddd2019-08-15 09:11:56 -0700124 bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
125 mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800126}
127
Zhijun He5d677d12016-05-29 16:52:39 -0700128Camera3OutputStream::Camera3OutputStream(int id,
129 uint32_t width, uint32_t height, int format,
Emilian Peev050f5dc2017-05-18 14:43:56 +0100130 uint64_t consumerUsage, android_dataspace dataSpace,
Emilian Peevf4816702020-04-03 15:44:51 -0700131 camera_stream_rotation_t rotation, nsecs_t timestampOffset,
Austin Borger1c1bee02023-06-01 16:51:35 -0700132 const std::string& physicalCameraId,
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000133 const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
Emilian Peevc81a7592022-02-14 17:38:18 -0800134 int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
Shuzhen Wang8ed1e872022-03-08 16:34:33 -0800135 int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
Shuzhen Wangbce53db2022-12-03 00:38:20 +0000136 int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) :
Emilian Peevf4816702020-04-03 15:44:51 -0700137 Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
Shuzhen Wangc28189a2017-11-27 23:05:10 -0800138 /*maxSize*/0, format, dataSpace, rotation,
Emilian Peev2295df72021-11-12 18:14:10 -0800139 physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
Shuzhen Wange4208922022-02-01 16:52:48 -0800140 dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
Austin Borger9e2b27c2022-07-15 11:27:24 -0700141 timestampBase, colorSpace),
Zhijun He5d677d12016-05-29 16:52:39 -0700142 mConsumer(nullptr),
143 mTransform(0),
144 mTraceFirstBuffer(true),
145 mUseBufferManager(false),
146 mTimestampOffset(timestampOffset),
Shuzhen Wangbce53db2022-12-03 00:38:20 +0000147 mUseReadoutTime(useReadoutTimestamp),
Shuzhen Wang686f6442017-06-20 16:16:04 -0700148 mConsumerUsage(consumerUsage),
Chien-Yu Chena936ac22017-10-23 15:59:49 -0700149 mDropBuffers(false),
Shuzhen Wang610d7b82022-02-08 14:37:22 -0800150 mMirrorMode(mirrorMode),
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000151 mDequeueBufferLatency(kDequeueLatencyBinSize),
152 mIPCTransport(transport) {
Zhijun He5d677d12016-05-29 16:52:39 -0700153 // Deferred consumer only support preview surface format now.
154 if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
155 ALOGE("%s: Deferred consumer only supports IMPLEMENTATION_DEFINED format now!",
156 __FUNCTION__);
157 mState = STATE_ERROR;
158 }
159
Ivan Lozanoc0ad82f2020-07-30 09:32:57 -0400160 // Validation check for the consumer usage flag.
Zhijun He5d677d12016-05-29 16:52:39 -0700161 if ((consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) == 0 &&
162 (consumerUsage & GraphicBuffer::USAGE_HW_COMPOSER) == 0) {
Emilian Peev050f5dc2017-05-18 14:43:56 +0100163 ALOGE("%s: Deferred consumer usage flag is illegal %" PRIu64 "!",
164 __FUNCTION__, consumerUsage);
Zhijun He5d677d12016-05-29 16:52:39 -0700165 mState = STATE_ERROR;
166 }
167
Austin Borger1c1bee02023-06-01 16:51:35 -0700168 mConsumerName = "Deferred";
Shuzhen Wang0160ddd2019-08-15 09:11:56 -0700169 bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
170 mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
Zhijun He5d677d12016-05-29 16:52:39 -0700171}
172
Emilian Peevf4816702020-04-03 15:44:51 -0700173Camera3OutputStream::Camera3OutputStream(int id, camera_stream_type_t type,
Igor Murashkine3a9f962013-05-08 18:03:15 -0700174 uint32_t width, uint32_t height,
Eino-Ville Talvala3d82c0d2015-02-23 15:19:19 -0800175 int format,
Yin-Chia Yehb97babb2015-03-12 13:42:44 -0700176 android_dataspace dataSpace,
Emilian Peevf4816702020-04-03 15:44:51 -0700177 camera_stream_rotation_t rotation,
Austin Borger1c1bee02023-06-01 16:51:35 -0700178 const std::string& physicalCameraId,
Shuzhen Wange4208922022-02-01 16:52:48 -0800179 const std::unordered_set<int32_t> &sensorPixelModesUsed,
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000180 IPCTransport transport,
Emilian Peev050f5dc2017-05-18 14:43:56 +0100181 uint64_t consumerUsage, nsecs_t timestampOffset,
Emilian Peev2295df72021-11-12 18:14:10 -0800182 int setId, bool isMultiResolution,
Shuzhen Wang8ed1e872022-03-08 16:34:33 -0800183 int64_t dynamicRangeProfile, int64_t streamUseCase,
Shuzhen Wang610d7b82022-02-08 14:37:22 -0800184 bool deviceTimeBaseIsRealtime, int timestampBase,
Shuzhen Wangbce53db2022-12-03 00:38:20 +0000185 int mirrorMode, int32_t colorSpace,
186 bool useReadoutTimestamp) :
Igor Murashkine3a9f962013-05-08 18:03:15 -0700187 Camera3IOStreamBase(id, type, width, height,
188 /*maxSize*/0,
Shuzhen Wangc28189a2017-11-27 23:05:10 -0800189 format, dataSpace, rotation,
Emilian Peev2295df72021-11-12 18:14:10 -0800190 physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
Shuzhen Wange4208922022-02-01 16:52:48 -0800191 dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
Austin Borger9e2b27c2022-07-15 11:27:24 -0700192 timestampBase, colorSpace),
Zhijun He125684a2015-12-26 15:07:30 -0800193 mTransform(0),
194 mTraceFirstBuffer(true),
Zhijun He5d677d12016-05-29 16:52:39 -0700195 mUseBufferManager(false),
Shuzhen Wang0129d522016-10-30 22:43:41 -0700196 mTimestampOffset(timestampOffset),
Shuzhen Wangbce53db2022-12-03 00:38:20 +0000197 mUseReadoutTime(useReadoutTimestamp),
Shuzhen Wang686f6442017-06-20 16:16:04 -0700198 mConsumerUsage(consumerUsage),
Chien-Yu Chena936ac22017-10-23 15:59:49 -0700199 mDropBuffers(false),
Shuzhen Wang610d7b82022-02-08 14:37:22 -0800200 mMirrorMode(mirrorMode),
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000201 mDequeueBufferLatency(kDequeueLatencyBinSize),
202 mIPCTransport(transport) {
Zhijun He125684a2015-12-26 15:07:30 -0800203
Shuzhen Wang0160ddd2019-08-15 09:11:56 -0700204 bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
205 mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
Igor Murashkine3a9f962013-05-08 18:03:15 -0700206
207 // Subclasses expected to initialize mConsumer themselves
208}
209
210
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800211Camera3OutputStream::~Camera3OutputStream() {
212 disconnectLocked();
213}
214
Emilian Peevf4816702020-04-03 15:44:51 -0700215status_t Camera3OutputStream::getBufferLocked(camera_stream_buffer *buffer,
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800216 const std::vector<size_t>&) {
Jayant Chowdharyd4776262020-06-23 23:45:57 -0700217 ATRACE_HFR_CALL();
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800218
219 ANativeWindowBuffer* anb;
Zhijun He125684a2015-12-26 15:07:30 -0800220 int fenceFd = -1;
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -0700221
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800222 status_t res;
223 res = getBufferLockedCommon(&anb, &fenceFd);
224 if (res != OK) {
225 return res;
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800226 }
227
Igor Murashkine3a9f962013-05-08 18:03:15 -0700228 /**
229 * FenceFD now owned by HAL except in case of error,
230 * in which case we reassign it to acquire_fence
231 */
232 handoutBufferLocked(*buffer, &(anb->handle), /*acquireFence*/fenceFd,
Emilian Peevf4816702020-04-03 15:44:51 -0700233 /*releaseFence*/-1, CAMERA_BUFFER_STATUS_OK, /*output*/true);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800234
235 return OK;
236}
237
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800238status_t Camera3OutputStream::queueBufferToConsumer(sp<ANativeWindow>& consumer,
Yin-Chia Yeh58b1b4e2018-10-15 12:18:36 -0700239 ANativeWindowBuffer* buffer, int anwReleaseFence,
240 const std::vector<size_t>&) {
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800241 return consumer->queueBuffer(consumer.get(), buffer, anwReleaseFence);
242}
243
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800244status_t Camera3OutputStream::returnBufferLocked(
Emilian Peevf4816702020-04-03 15:44:51 -0700245 const camera_stream_buffer &buffer,
Shuzhen Wang90708ea2021-11-04 11:40:49 -0700246 nsecs_t timestamp, nsecs_t readoutTimestamp,
247 int32_t transform, const std::vector<size_t>& surface_ids) {
Jayant Chowdharyd4776262020-06-23 23:45:57 -0700248 ATRACE_HFR_CALL();
Igor Murashkine3a9f962013-05-08 18:03:15 -0700249
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800250 if (mHandoutTotalBufferCount == 1) {
251 returnPrefetchedBuffersLocked();
252 }
253
Shuzhen Wang90708ea2021-11-04 11:40:49 -0700254 status_t res = returnAnyBufferLocked(buffer, timestamp, readoutTimestamp,
255 /*output*/true, transform, surface_ids);
Igor Murashkine3a9f962013-05-08 18:03:15 -0700256
257 if (res != OK) {
258 return res;
259 }
260
261 mLastTimestamp = timestamp;
Eino-Ville Talvalac31dc7e2017-01-31 17:35:41 -0800262 mFrameCount++;
Igor Murashkine3a9f962013-05-08 18:03:15 -0700263
264 return OK;
265}
266
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000267status_t Camera3OutputStream::fixUpHidlJpegBlobHeader(ANativeWindowBuffer* anwBuffer, int fence) {
268 // Lock the JPEG buffer for CPU read
269 sp<GraphicBuffer> graphicBuffer = GraphicBuffer::from(anwBuffer);
270 void* mapped = nullptr;
271 base::unique_fd fenceFd(dup(fence));
272 // Use USAGE_SW_WRITE_RARELY since we're going to re-write the CameraBlob
273 // header.
274 GraphicBufferLocker gbLocker(graphicBuffer);
275 status_t res =
276 gbLocker.lockAsync(
277 GraphicBuffer::USAGE_SW_READ_OFTEN | GraphicBuffer::USAGE_SW_WRITE_RARELY,
Emilian Peev293bd972022-08-05 17:28:06 -0700278 &mapped, fenceFd.release());
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000279 if (res != OK) {
280 ALOGE("%s: Failed to lock the buffer: %s (%d)", __FUNCTION__, strerror(-res), res);
281 return res;
282 }
283
284 uint8_t *hidlHeaderStart =
285 static_cast<uint8_t*>(mapped) + graphicBuffer->getWidth() - sizeof(camera_jpeg_blob_t);
286 // Check that the jpeg buffer is big enough to contain HIDL camera blob
287 if (hidlHeaderStart < static_cast<uint8_t *>(mapped)) {
288 ALOGE("%s, jpeg buffer not large enough to fit HIDL camera blob %" PRIu32, __FUNCTION__,
289 graphicBuffer->getWidth());
290 return BAD_VALUE;
291 }
292 camera_jpeg_blob_t *hidlBlobHeader = reinterpret_cast<camera_jpeg_blob_t *>(hidlHeaderStart);
293
294 // Check that the blob is indeed the jpeg blob id.
295 if (hidlBlobHeader->jpeg_blob_id != CAMERA_JPEG_BLOB_ID) {
296 ALOGE("%s, jpeg blob id %d is not correct", __FUNCTION__, hidlBlobHeader->jpeg_blob_id);
297 return BAD_VALUE;
298 }
299
300 // Retrieve id and blob size
301 CameraBlobId blobId = static_cast<CameraBlobId>(hidlBlobHeader->jpeg_blob_id);
302 uint32_t blobSizeBytes = hidlBlobHeader->jpeg_size;
303
304 if (blobSizeBytes > (graphicBuffer->getWidth() - sizeof(camera_jpeg_blob_t))) {
305 ALOGE("%s, blobSize in HIDL jpeg blob : %d is corrupt, buffer size %" PRIu32, __FUNCTION__,
306 blobSizeBytes, graphicBuffer->getWidth());
307 }
308
309 uint8_t *aidlHeaderStart =
310 static_cast<uint8_t*>(mapped) + graphicBuffer->getWidth() - sizeof(CameraBlob);
311
312 // Check that the jpeg buffer is big enough to contain AIDL camera blob
313 if (aidlHeaderStart < static_cast<uint8_t *>(mapped)) {
314 ALOGE("%s, jpeg buffer not large enough to fit AIDL camera blob %" PRIu32, __FUNCTION__,
315 graphicBuffer->getWidth());
316 return BAD_VALUE;
317 }
318
319 if (static_cast<uint8_t*>(mapped) + blobSizeBytes > aidlHeaderStart) {
320 ALOGE("%s, jpeg blob with size %d , buffer size %" PRIu32 " not large enough to fit"
321 " AIDL camera blob without corrupting jpeg", __FUNCTION__, blobSizeBytes,
322 graphicBuffer->getWidth());
323 return BAD_VALUE;
324 }
325
326 // Fill in JPEG header
Avichal Rakesh51af0702022-05-26 22:58:03 +0000327 CameraBlob aidlHeader = {
328 .blobId = blobId,
329 .blobSizeBytes = static_cast<int32_t>(blobSizeBytes)
330 };
331 memcpy(aidlHeaderStart, &aidlHeader, sizeof(CameraBlob));
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000332 graphicBuffer->unlock();
333 return OK;
334}
335
Igor Murashkine3a9f962013-05-08 18:03:15 -0700336status_t Camera3OutputStream::returnBufferCheckedLocked(
Emilian Peevf4816702020-04-03 15:44:51 -0700337 const camera_stream_buffer &buffer,
Igor Murashkine3a9f962013-05-08 18:03:15 -0700338 nsecs_t timestamp,
Shuzhen Wang90708ea2021-11-04 11:40:49 -0700339 nsecs_t readoutTimestamp,
Jing Mikec7f9b132023-03-12 11:12:04 +0800340 [[maybe_unused]] bool output,
Emilian Peev5104fe92021-10-21 14:27:09 -0700341 int32_t transform,
Yin-Chia Yeh58b1b4e2018-10-15 12:18:36 -0700342 const std::vector<size_t>& surface_ids,
Igor Murashkine3a9f962013-05-08 18:03:15 -0700343 /*out*/
344 sp<Fence> *releaseFenceOut) {
345
Igor Murashkine3a9f962013-05-08 18:03:15 -0700346 ALOG_ASSERT(output, "Expected output to be true");
347
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800348 status_t res;
Igor Murashkin5a1798a2013-05-07 10:58:13 -0700349
Yin-Chia Yeh4c9736f2015-03-05 15:01:36 -0800350 // Fence management - always honor release fence from HAL
351 sp<Fence> releaseFence = new Fence(buffer.release_fence);
Igor Murashkin5a1798a2013-05-07 10:58:13 -0700352 int anwReleaseFence = releaseFence->dup();
353
354 /**
Zhijun He124ccf42013-05-22 14:01:30 -0700355 * Release the lock briefly to avoid deadlock with
356 * StreamingProcessor::startStream -> Camera3Stream::isConfiguring (this
357 * thread will go into StreamingProcessor::onFrameAvailable) during
358 * queueBuffer
359 */
360 sp<ANativeWindow> currentConsumer = mConsumer;
Yin-Chia Yeha1b56c82019-03-27 15:50:39 -0700361 StreamState state = mState;
Zhijun He124ccf42013-05-22 14:01:30 -0700362 mLock.unlock();
363
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800364 ANativeWindowBuffer *anwBuffer = container_of(buffer.buffer, ANativeWindowBuffer, handle);
Shuzhen Wangc2352702022-09-06 18:36:31 -0700365 bool bufferDeferred = false;
Zhijun He124ccf42013-05-22 14:01:30 -0700366 /**
Igor Murashkin5a1798a2013-05-07 10:58:13 -0700367 * Return buffer back to ANativeWindow
368 */
Emilian Peevf4816702020-04-03 15:44:51 -0700369 if (buffer.status == CAMERA_BUFFER_STATUS_ERROR || mDropBuffers || timestamp == 0) {
Igor Murashkin5a1798a2013-05-07 10:58:13 -0700370 // Cancel buffer
Chien-Yu Chena936ac22017-10-23 15:59:49 -0700371 if (mDropBuffers) {
372 ALOGV("%s: Dropping a frame for stream %d.", __FUNCTION__, mId);
Emilian Peevf4816702020-04-03 15:44:51 -0700373 } else if (buffer.status == CAMERA_BUFFER_STATUS_ERROR) {
Yin-Chia Yeha1b56c82019-03-27 15:50:39 -0700374 ALOGV("%s: A frame is dropped for stream %d due to buffer error.", __FUNCTION__, mId);
Shuzhen Wangf0c4a6b2018-09-05 09:36:14 -0700375 } else {
376 ALOGE("%s: Stream %d: timestamp shouldn't be 0", __FUNCTION__, mId);
Chien-Yu Chena936ac22017-10-23 15:59:49 -0700377 }
378
Zhijun He124ccf42013-05-22 14:01:30 -0700379 res = currentConsumer->cancelBuffer(currentConsumer.get(),
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800380 anwBuffer,
Igor Murashkin5a1798a2013-05-07 10:58:13 -0700381 anwReleaseFence);
Yin-Chia Yeha1b56c82019-03-27 15:50:39 -0700382 if (shouldLogError(res, state)) {
Igor Murashkin5a1798a2013-05-07 10:58:13 -0700383 ALOGE("%s: Stream %d: Error cancelling buffer to native window:"
Igor Murashkine3a9f962013-05-08 18:03:15 -0700384 " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
Igor Murashkin5a1798a2013-05-07 10:58:13 -0700385 }
Zhijun He1ff811b2016-01-26 14:39:51 -0800386
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800387 notifyBufferReleased(anwBuffer);
Zhijun He1ff811b2016-01-26 14:39:51 -0800388 if (mUseBufferManager) {
389 // Return this buffer back to buffer manager.
Shuzhen Wang0160ddd2019-08-15 09:11:56 -0700390 mBufferProducerListener->onBufferReleased();
Zhijun He1ff811b2016-01-26 14:39:51 -0800391 }
Igor Murashkin5a1798a2013-05-07 10:58:13 -0700392 } else {
Emilian Peevf4816702020-04-03 15:44:51 -0700393 if (mTraceFirstBuffer && (stream_type == CAMERA_STREAM_OUTPUT)) {
Ruchit Sharmae0711f22014-08-18 13:48:24 -0400394 {
395 char traceLog[48];
396 snprintf(traceLog, sizeof(traceLog), "Stream %d: first full buffer\n", mId);
397 ATRACE_NAME(traceLog);
398 }
399 mTraceFirstBuffer = false;
400 }
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000401 // Fix CameraBlob id type discrepancy between HIDL and AIDL, details : http://b/229688810
Emilian Peeve579d8b2023-02-28 14:16:08 -0800402 if (getFormat() == HAL_PIXEL_FORMAT_BLOB && (getDataSpace() == HAL_DATASPACE_V0_JFIF ||
403 (getDataSpace() ==
404 static_cast<android_dataspace_t>(
405 aidl::android::hardware::graphics::common::Dataspace::JPEG_R)))) {
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000406 if (mIPCTransport == IPCTransport::HIDL) {
407 fixUpHidlJpegBlobHeader(anwBuffer, anwReleaseFence);
408 }
409 // If this is a JPEG output, and image dump mask is set, save image to
410 // disk.
411 if (mImageDumpMask) {
412 dumpImageToDisk(timestamp, anwBuffer, anwReleaseFence);
413 }
Shuzhen Wangabbcb6b2020-12-09 22:32:44 -0800414 }
Yin-Chia Yeh4c9736f2015-03-05 15:01:36 -0800415
Shuzhen Wangbce53db2022-12-03 00:38:20 +0000416 nsecs_t captureTime = ((mUseReadoutTime || mSyncToDisplay) && readoutTimestamp != 0 ?
Shuzhen Wangffc4c012022-04-20 15:55:46 -0700417 readoutTimestamp : timestamp) - mTimestampOffset;
Shuzhen Wangba92d772022-04-11 11:47:24 -0700418 if (mPreviewFrameSpacer != nullptr) {
Shuzhen Wangfe8a2a32022-05-10 18:18:54 -0700419 nsecs_t readoutTime = (readoutTimestamp != 0 ? readoutTimestamp : timestamp)
420 - mTimestampOffset;
421 res = mPreviewFrameSpacer->queuePreviewBuffer(captureTime, readoutTime,
422 transform, anwBuffer, anwReleaseFence);
Shuzhen Wangba92d772022-04-11 11:47:24 -0700423 if (res != OK) {
424 ALOGE("%s: Stream %d: Error queuing buffer to preview buffer spacer: %s (%d)",
425 __FUNCTION__, mId, strerror(-res), res);
426 return res;
427 }
Shuzhen Wangc2352702022-09-06 18:36:31 -0700428 bufferDeferred = true;
Shuzhen Wangba92d772022-04-11 11:47:24 -0700429 } else {
Shuzhen Wangba92d772022-04-11 11:47:24 -0700430 nsecs_t presentTime = mSyncToDisplay ?
Ravneet446b3bf2023-07-11 19:26:38 +0000431 syncTimestampToDisplayLocked(captureTime, releaseFence) : captureTime;
Emilian Peev2295df72021-11-12 18:14:10 -0800432
Shuzhen Wangba92d772022-04-11 11:47:24 -0700433 setTransform(transform, true/*mayChangeMirror*/);
434 res = native_window_set_buffers_timestamp(mConsumer.get(), presentTime);
435 if (res != OK) {
436 ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
437 __FUNCTION__, mId, strerror(-res), res);
438 return res;
439 }
Emilian Peev2295df72021-11-12 18:14:10 -0800440
Shuzhen Wangba92d772022-04-11 11:47:24 -0700441 queueHDRMetadata(anwBuffer->handle, currentConsumer, dynamic_range_profile);
Shuzhen Wang00abbeb2022-02-25 17:14:42 -0800442
Shuzhen Wangba92d772022-04-11 11:47:24 -0700443 res = queueBufferToConsumer(currentConsumer, anwBuffer, anwReleaseFence, surface_ids);
444 if (shouldLogError(res, state)) {
445 ALOGE("%s: Stream %d: Error queueing buffer to native window:"
446 " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
447 }
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800448 }
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800449 }
Zhijun He124ccf42013-05-22 14:01:30 -0700450 mLock.lock();
Eino-Ville Talvala4d44cad2015-04-11 13:15:45 -0700451
Shuzhen Wangc2352702022-09-06 18:36:31 -0700452 if (bufferDeferred) {
453 mCachedOutputBufferCount++;
454 }
455
Eino-Ville Talvala4d44cad2015-04-11 13:15:45 -0700456 // Once a valid buffer has been returned to the queue, can no longer
457 // dequeue all buffers for preallocation.
Emilian Peevf4816702020-04-03 15:44:51 -0700458 if (buffer.status != CAMERA_BUFFER_STATUS_ERROR) {
Eino-Ville Talvala4d44cad2015-04-11 13:15:45 -0700459 mStreamUnpreparable = true;
460 }
461
Igor Murashkine3a9f962013-05-08 18:03:15 -0700462 *releaseFenceOut = releaseFence;
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800463
Eino-Ville Talvalaf1e98d82013-09-06 09:32:43 -0700464 return res;
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800465}
466
Jing Mikec7f9b132023-03-12 11:12:04 +0800467void Camera3OutputStream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const {
Austin Borger1c1bee02023-06-01 16:51:35 -0700468 std::string lines;
469 lines += fmt::sprintf(" Stream[%d]: Output\n", mId);
470 lines += fmt::sprintf(" Consumer name: %s\n", mConsumerName);
471 write(fd, lines.c_str(), lines.size());
Igor Murashkine3a9f962013-05-08 18:03:15 -0700472
473 Camera3IOStreamBase::dump(fd, args);
Shuzhen Wang686f6442017-06-20 16:16:04 -0700474
475 mDequeueBufferLatency.dump(fd,
476 " DequeueBuffer latency histogram:");
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800477}
478
Shuzhen Wang610d7b82022-02-08 14:37:22 -0800479status_t Camera3OutputStream::setTransform(int transform, bool mayChangeMirror) {
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800480 ATRACE_CALL();
481 Mutex::Autolock l(mLock);
Shuzhen Wang610d7b82022-02-08 14:37:22 -0800482 if (mMirrorMode != OutputConfiguration::MIRROR_MODE_AUTO && mayChangeMirror) {
483 // If the mirroring mode is not AUTO, do not allow transform update
484 // which may change mirror.
485 return OK;
486 }
487
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800488 return setTransformLocked(transform);
489}
490
491status_t Camera3OutputStream::setTransformLocked(int transform) {
492 status_t res = OK;
Shuzhen Wange4adddb2021-09-21 15:24:44 -0700493
494 if (transform == -1) return res;
495
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800496 if (mState == STATE_ERROR) {
497 ALOGE("%s: Stream in error state", __FUNCTION__);
498 return INVALID_OPERATION;
499 }
500
501 mTransform = transform;
502 if (mState == STATE_CONFIGURED) {
503 res = native_window_set_buffers_transform(mConsumer.get(),
504 transform);
505 if (res != OK) {
506 ALOGE("%s: Unable to configure stream transform to %x: %s (%d)",
507 __FUNCTION__, transform, strerror(-res), res);
508 }
509 }
510 return res;
511}
512
513status_t Camera3OutputStream::configureQueueLocked() {
514 status_t res;
515
Ruchit Sharmae0711f22014-08-18 13:48:24 -0400516 mTraceFirstBuffer = true;
Igor Murashkine3a9f962013-05-08 18:03:15 -0700517 if ((res = Camera3IOStreamBase::configureQueueLocked()) != OK) {
518 return res;
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800519 }
520
Shuzhen Wangba92d772022-04-11 11:47:24 -0700521 if ((res = configureConsumerQueueLocked(true /*allowPreviewRespace*/)) != OK) {
Shuzhen Wang0129d522016-10-30 22:43:41 -0700522 return res;
523 }
524
525 // Set dequeueBuffer/attachBuffer timeout if the consumer is not hw composer or hw texture.
526 // We need skip these cases as timeout will disable the non-blocking (async) mode.
527 if (!(isConsumedByHWComposer() || isConsumedByHWTexture())) {
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -0800528 if (mUseBufferManager) {
529 // When buffer manager is handling the buffer, we should have available buffers in
530 // buffer queue before we calls into dequeueBuffer because buffer manager is tracking
531 // free buffers.
532 // There are however some consumer side feature (ImageReader::discardFreeBuffers) that
533 // can discard free buffers without notifying buffer manager. We want the timeout to
534 // happen immediately here so buffer manager can try to update its internal state and
535 // try to allocate a buffer instead of waiting.
536 mConsumer->setDequeueTimeout(0);
537 } else {
538 mConsumer->setDequeueTimeout(kDequeueBufferTimeout);
539 }
Shuzhen Wang0129d522016-10-30 22:43:41 -0700540 }
541
542 return OK;
543}
544
Shuzhen Wangba92d772022-04-11 11:47:24 -0700545status_t Camera3OutputStream::configureConsumerQueueLocked(bool allowPreviewRespace) {
Shuzhen Wang0129d522016-10-30 22:43:41 -0700546 status_t res;
547
548 mTraceFirstBuffer = true;
549
Igor Murashkine3a9f962013-05-08 18:03:15 -0700550 ALOG_ASSERT(mConsumer != 0, "mConsumer should never be NULL");
551
Zhijun He125684a2015-12-26 15:07:30 -0800552 // Configure consumer-side ANativeWindow interface. The listener may be used
553 // to notify buffer manager (if it is used) of the returned buffers.
Yin-Chia Yeh017d49c2017-03-31 19:11:00 -0700554 res = mConsumer->connect(NATIVE_WINDOW_API_CAMERA,
Shuzhen Wang0160ddd2019-08-15 09:11:56 -0700555 /*reportBufferRemoval*/true,
556 /*listener*/mBufferProducerListener);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800557 if (res != OK) {
558 ALOGE("%s: Unable to connect to native window for stream %d",
559 __FUNCTION__, mId);
560 return res;
561 }
562
Eino-Ville Talvala727d1722015-06-09 13:44:19 -0700563 mConsumerName = mConsumer->getConsumerName();
564
Emilian Peev050f5dc2017-05-18 14:43:56 +0100565 res = native_window_set_usage(mConsumer.get(), mUsage);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800566 if (res != OK) {
Emilian Peev050f5dc2017-05-18 14:43:56 +0100567 ALOGE("%s: Unable to configure usage %" PRIu64 " for stream %d",
568 __FUNCTION__, mUsage, mId);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800569 return res;
570 }
571
572 res = native_window_set_scaling_mode(mConsumer.get(),
573 NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
574 if (res != OK) {
575 ALOGE("%s: Unable to configure stream scaling: %s (%d)",
576 __FUNCTION__, strerror(-res), res);
577 return res;
578 }
579
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800580 if (mMaxSize == 0) {
581 // For buffers of known size
Eino-Ville Talvala7d70c5e2014-07-24 18:10:23 -0700582 res = native_window_set_buffers_dimensions(mConsumer.get(),
Emilian Peevf4816702020-04-03 15:44:51 -0700583 camera_stream::width, camera_stream::height);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800584 } else {
585 // For buffers with bounded size
Eino-Ville Talvala7d70c5e2014-07-24 18:10:23 -0700586 res = native_window_set_buffers_dimensions(mConsumer.get(),
587 mMaxSize, 1);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800588 }
589 if (res != OK) {
Eino-Ville Talvala7d70c5e2014-07-24 18:10:23 -0700590 ALOGE("%s: Unable to configure stream buffer dimensions"
591 " %d x %d (maxSize %zu) for stream %d",
Emilian Peevf4816702020-04-03 15:44:51 -0700592 __FUNCTION__, camera_stream::width, camera_stream::height,
Eino-Ville Talvala7d70c5e2014-07-24 18:10:23 -0700593 mMaxSize, mId);
594 return res;
595 }
596 res = native_window_set_buffers_format(mConsumer.get(),
Emilian Peevf4816702020-04-03 15:44:51 -0700597 camera_stream::format);
Eino-Ville Talvala7d70c5e2014-07-24 18:10:23 -0700598 if (res != OK) {
599 ALOGE("%s: Unable to configure stream buffer format %#x for stream %d",
Emilian Peevf4816702020-04-03 15:44:51 -0700600 __FUNCTION__, camera_stream::format, mId);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800601 return res;
602 }
603
Eino-Ville Talvala3d82c0d2015-02-23 15:19:19 -0800604 res = native_window_set_buffers_data_space(mConsumer.get(),
Emilian Peevf4816702020-04-03 15:44:51 -0700605 camera_stream::data_space);
Eino-Ville Talvala3d82c0d2015-02-23 15:19:19 -0800606 if (res != OK) {
607 ALOGE("%s: Unable to configure stream dataspace %#x for stream %d",
Emilian Peevf4816702020-04-03 15:44:51 -0700608 __FUNCTION__, camera_stream::data_space, mId);
Eino-Ville Talvala3d82c0d2015-02-23 15:19:19 -0800609 return res;
610 }
611
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800612 int maxConsumerBuffers;
Eino-Ville Talvala727d1722015-06-09 13:44:19 -0700613 res = static_cast<ANativeWindow*>(mConsumer.get())->query(
614 mConsumer.get(),
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800615 NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxConsumerBuffers);
616 if (res != OK) {
617 ALOGE("%s: Unable to query consumer undequeued"
618 " buffer count for stream %d", __FUNCTION__, mId);
619 return res;
620 }
621
Alex Ray20cb3002013-05-28 20:18:22 -0700622 ALOGV("%s: Consumer wants %d buffers, HAL wants %d", __FUNCTION__,
Emilian Peevf4816702020-04-03 15:44:51 -0700623 maxConsumerBuffers, camera_stream::max_buffers);
624 if (camera_stream::max_buffers == 0) {
Zhijun He2ab500c2013-07-23 08:02:53 -0700625 ALOGE("%s: Camera HAL requested max_buffer count: %d, requires at least 1",
Emilian Peevf4816702020-04-03 15:44:51 -0700626 __FUNCTION__, camera_stream::max_buffers);
Alex Ray20cb3002013-05-28 20:18:22 -0700627 return INVALID_OPERATION;
628 }
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800629
Emilian Peevf4816702020-04-03 15:44:51 -0700630 mTotalBufferCount = maxConsumerBuffers + camera_stream::max_buffers;
Shuzhen Wange4208922022-02-01 16:52:48 -0800631
632 int timestampBase = getTimestampBase();
633 bool isDefaultTimeBase = (timestampBase ==
634 OutputConfiguration::TIMESTAMP_BASE_DEFAULT);
Shuzhen Wangba92d772022-04-11 11:47:24 -0700635 if (allowPreviewRespace) {
Shuzhen Wange4208922022-02-01 16:52:48 -0800636 bool forceChoreographer = (timestampBase ==
637 OutputConfiguration::TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED);
Shuzhen Wangba92d772022-04-11 11:47:24 -0700638 bool defaultToChoreographer = (isDefaultTimeBase &&
Shuzhen Wangfe8a2a32022-05-10 18:18:54 -0700639 isConsumedByHWComposer());
640 bool defaultToSpacer = (isDefaultTimeBase &&
641 isConsumedByHWTexture() &&
642 !isConsumedByCPU() &&
643 !isVideoStream());
Shuzhen Wange4208922022-02-01 16:52:48 -0800644 if (forceChoreographer || defaultToChoreographer) {
Shuzhen Wang00abbeb2022-02-25 17:14:42 -0800645 mSyncToDisplay = true;
Shuzhen Wangc2352702022-09-06 18:36:31 -0700646 // For choreographer synced stream, extra buffers aren't kept by
647 // camera service. So no need to update mMaxCachedBufferCount.
Shuzhen Wang00abbeb2022-02-25 17:14:42 -0800648 mTotalBufferCount += kDisplaySyncExtraBuffer;
Shuzhen Wangfe8a2a32022-05-10 18:18:54 -0700649 } else if (defaultToSpacer) {
Shuzhen Wangdc9aa822022-05-16 10:04:17 -0700650 mPreviewFrameSpacer = new PreviewFrameSpacer(this, mConsumer);
Shuzhen Wangc2352702022-09-06 18:36:31 -0700651 // For preview frame spacer, the extra buffer is kept by camera
652 // service. So update mMaxCachedBufferCount.
653 mMaxCachedBufferCount = 1;
654 mTotalBufferCount += mMaxCachedBufferCount;
Austin Borger1c1bee02023-06-01 16:51:35 -0700655 res = mPreviewFrameSpacer->run((std::string("PreviewSpacer-")
656 + std::to_string(mId)).c_str());
Shuzhen Wangba92d772022-04-11 11:47:24 -0700657 if (res != OK) {
Austin Borger7b129542022-06-09 13:23:06 -0700658 ALOGE("%s: Unable to start preview spacer: %s (%d)", __FUNCTION__,
659 strerror(-res), res);
Shuzhen Wangba92d772022-04-11 11:47:24 -0700660 return res;
661 }
Shuzhen Wange4adddb2021-09-21 15:24:44 -0700662 }
663 }
Zhijun He6adc9cc2014-04-15 14:09:55 -0700664 mHandoutTotalBufferCount = 0;
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800665 mFrameCount = 0;
666 mLastTimestamp = 0;
Shuzhen Wange4208922022-02-01 16:52:48 -0800667
668 if (isDeviceTimeBaseRealtime()) {
669 if (isDefaultTimeBase && !isConsumedByHWComposer() && !isVideoStream()) {
670 // Default time base, but not hardware composer or video encoder
671 mTimestampOffset = 0;
672 } else if (timestampBase == OutputConfiguration::TIMESTAMP_BASE_REALTIME ||
Shuzhen Wangbce53db2022-12-03 00:38:20 +0000673 timestampBase == OutputConfiguration::TIMESTAMP_BASE_SENSOR) {
Shuzhen Wange4208922022-02-01 16:52:48 -0800674 mTimestampOffset = 0;
675 }
676 // If timestampBase is CHOREOGRAPHER SYNCED or MONOTONIC, leave
677 // timestamp offset as bootTime - monotonicTime.
678 } else {
679 if (timestampBase == OutputConfiguration::TIMESTAMP_BASE_REALTIME) {
680 // Reverse offset for monotonicTime -> bootTime
681 mTimestampOffset = -mTimestampOffset;
682 } else {
Shuzhen Wangbce53db2022-12-03 00:38:20 +0000683 // If timestampBase is DEFAULT, MONOTONIC, SENSOR or
Shuzhen Wange4208922022-02-01 16:52:48 -0800684 // CHOREOGRAPHER_SYNCED, timestamp offset is 0.
685 mTimestampOffset = 0;
686 }
687 }
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800688
689 res = native_window_set_buffer_count(mConsumer.get(),
690 mTotalBufferCount);
691 if (res != OK) {
692 ALOGE("%s: Unable to set buffer count for stream %d",
693 __FUNCTION__, mId);
694 return res;
695 }
696
697 res = native_window_set_buffers_transform(mConsumer.get(),
698 mTransform);
699 if (res != OK) {
700 ALOGE("%s: Unable to configure stream transform to %x: %s (%d)",
701 __FUNCTION__, mTransform, strerror(-res), res);
Shuzhen Wang0129d522016-10-30 22:43:41 -0700702 return res;
Zhijun Hef0645c12016-08-02 00:58:11 -0700703 }
704
Zhijun He125684a2015-12-26 15:07:30 -0800705 /**
Zhijun Heedd41ae2016-02-03 14:45:53 -0800706 * Camera3 Buffer manager is only supported by HAL3.3 onwards, as the older HALs requires
Zhijun He125684a2015-12-26 15:07:30 -0800707 * buffers to be statically allocated for internal static buffer registration, while the
708 * buffers provided by buffer manager are really dynamically allocated. Camera3Device only
Zhijun Heedd41ae2016-02-03 14:45:53 -0800709 * sets the mBufferManager if device version is > HAL3.2, which guarantees that the buffer
710 * manager setup is skipped in below code. Note that HAL3.2 is also excluded here, as some
711 * HAL3.2 devices may not support the dynamic buffer registeration.
Yin-Chia Yehb6578902019-04-16 13:36:16 -0700712 * Also Camera3BufferManager does not support display/texture streams as they have its own
713 * buffer management logic.
Zhijun He125684a2015-12-26 15:07:30 -0800714 */
Yin-Chia Yehb6578902019-04-16 13:36:16 -0700715 if (mBufferManager != 0 && mSetId > CAMERA3_STREAM_SET_ID_INVALID &&
716 !(isConsumedByHWComposer() || isConsumedByHWTexture())) {
Emilian Peev050f5dc2017-05-18 14:43:56 +0100717 uint64_t consumerUsage = 0;
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -0700718 getEndpointUsage(&consumerUsage);
Shuzhen Wang83bff122020-11-20 15:51:39 -0800719 uint32_t width = (mMaxSize == 0) ? getWidth() : mMaxSize;
720 uint32_t height = (mMaxSize == 0) ? getHeight() : 1;
Zhijun He125684a2015-12-26 15:07:30 -0800721 StreamInfo streamInfo(
Shuzhen Wang83bff122020-11-20 15:51:39 -0800722 getId(), getStreamSetId(), width, height, getFormat(), getDataSpace(),
Emilian Peev050f5dc2017-05-18 14:43:56 +0100723 mUsage | consumerUsage, mTotalBufferCount,
Shuzhen Wang83bff122020-11-20 15:51:39 -0800724 /*isConfigured*/true, isMultiResolution());
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -0700725 wp<Camera3OutputStream> weakThis(this);
726 res = mBufferManager->registerStream(weakThis,
727 streamInfo);
Zhijun He125684a2015-12-26 15:07:30 -0800728 if (res == OK) {
729 // Disable buffer allocation for this BufferQueue, buffer manager will take over
730 // the buffer allocation responsibility.
731 mConsumer->getIGraphicBufferProducer()->allowAllocation(false);
732 mUseBufferManager = true;
733 } else {
734 ALOGE("%s: Unable to register stream %d to camera3 buffer manager, "
735 "(error %d %s), fall back to BufferQueue for buffer management!",
736 __FUNCTION__, mId, res, strerror(-res));
737 }
738 }
739
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800740 return OK;
741}
742
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800743status_t Camera3OutputStream::getBufferLockedCommon(ANativeWindowBuffer** anb, int* fenceFd) {
Jayant Chowdharyd4776262020-06-23 23:45:57 -0700744 ATRACE_HFR_CALL();
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800745 status_t res;
746
747 if ((res = getBufferPreconditionCheckLocked()) != OK) {
748 return res;
749 }
750
751 bool gotBufferFromManager = false;
752
753 if (mUseBufferManager) {
754 sp<GraphicBuffer> gb;
Shuzhen Wang83bff122020-11-20 15:51:39 -0800755 res = mBufferManager->getBufferForStream(getId(), getStreamSetId(),
756 isMultiResolution(), &gb, fenceFd);
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800757 if (res == OK) {
758 // Attach this buffer to the bufferQueue: the buffer will be in dequeue state after a
759 // successful return.
760 *anb = gb.get();
761 res = mConsumer->attachBuffer(*anb);
Yin-Chia Yeha1b56c82019-03-27 15:50:39 -0700762 if (shouldLogError(res, mState)) {
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800763 ALOGE("%s: Stream %d: Can't attach the output buffer to this surface: %s (%d)",
764 __FUNCTION__, mId, strerror(-res), res);
Yin-Chia Yeha1b56c82019-03-27 15:50:39 -0700765 }
766 if (res != OK) {
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -0800767 checkRetAndSetAbandonedLocked(res);
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800768 return res;
769 }
770 gotBufferFromManager = true;
771 ALOGV("Stream %d: Attached new buffer", getId());
772 } else if (res == ALREADY_EXISTS) {
773 // Have sufficient free buffers already attached, can just
774 // dequeue from buffer queue
775 ALOGV("Stream %d: Reusing attached buffer", getId());
776 gotBufferFromManager = false;
777 } else if (res != OK) {
778 ALOGE("%s: Stream %d: Can't get next output buffer from buffer manager: %s (%d)",
779 __FUNCTION__, mId, strerror(-res), res);
780 return res;
781 }
782 }
783 if (!gotBufferFromManager) {
784 /**
785 * Release the lock briefly to avoid deadlock for below scenario:
786 * Thread 1: StreamingProcessor::startStream -> Camera3Stream::isConfiguring().
787 * This thread acquired StreamingProcessor lock and try to lock Camera3Stream lock.
788 * Thread 2: Camera3Stream::returnBuffer->StreamingProcessor::onFrameAvailable().
789 * This thread acquired Camera3Stream lock and bufferQueue lock, and try to lock
790 * StreamingProcessor lock.
791 * Thread 3: Camera3Stream::getBuffer(). This thread acquired Camera3Stream lock
792 * and try to lock bufferQueue lock.
793 * Then there is circular locking dependency.
794 */
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800795 sp<Surface> consumer = mConsumer;
Shuzhen Wang6c14e312021-07-05 16:20:33 -0700796 size_t remainingBuffers = (mState == STATE_PREPARING ? mTotalBufferCount :
797 camera_stream::max_buffers) - mHandoutTotalBufferCount;
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800798 mLock.unlock();
799
Shuzhen Wang686f6442017-06-20 16:16:04 -0700800 nsecs_t dequeueStart = systemTime(SYSTEM_TIME_MONOTONIC);
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800801
Shuzhen Wangc7629462021-07-12 15:02:58 -0700802 size_t batchSize = mBatchSize.load();
803 if (batchSize == 1) {
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800804 sp<ANativeWindow> anw = consumer;
805 res = anw->dequeueBuffer(anw.get(), anb, fenceFd);
806 } else {
Shuzhen Wangc7629462021-07-12 15:02:58 -0700807 std::unique_lock<std::mutex> batchLock(mBatchLock);
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800808 res = OK;
809 if (mBatchedBuffers.size() == 0) {
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800810 if (remainingBuffers == 0) {
811 ALOGE("%s: cannot get buffer while all buffers are handed out", __FUNCTION__);
812 return INVALID_OPERATION;
813 }
814 if (batchSize > remainingBuffers) {
815 batchSize = remainingBuffers;
816 }
Shuzhen Wangc7629462021-07-12 15:02:58 -0700817 batchLock.unlock();
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800818 // Refill batched buffers
Shuzhen Wangc7629462021-07-12 15:02:58 -0700819 std::vector<Surface::BatchBuffer> batchedBuffers;
820 batchedBuffers.resize(batchSize);
821 res = consumer->dequeueBuffers(&batchedBuffers);
822 batchLock.lock();
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800823 if (res != OK) {
824 ALOGE("%s: batch dequeueBuffers call failed! %s (%d)",
825 __FUNCTION__, strerror(-res), res);
Shuzhen Wangc7629462021-07-12 15:02:58 -0700826 } else {
827 mBatchedBuffers = std::move(batchedBuffers);
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800828 }
829 }
830
831 if (res == OK) {
832 // Dispatch batch buffers
833 *anb = mBatchedBuffers.back().buffer;
834 *fenceFd = mBatchedBuffers.back().fenceFd;
835 mBatchedBuffers.pop_back();
836 }
837 }
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800838
Shuzhen Wang686f6442017-06-20 16:16:04 -0700839 nsecs_t dequeueEnd = systemTime(SYSTEM_TIME_MONOTONIC);
840 mDequeueBufferLatency.add(dequeueStart, dequeueEnd);
841
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800842 mLock.lock();
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -0800843
844 if (mUseBufferManager && res == TIMED_OUT) {
845 checkRemovedBuffersLocked();
846
847 sp<GraphicBuffer> gb;
848 res = mBufferManager->getBufferForStream(
Shuzhen Wang83bff122020-11-20 15:51:39 -0800849 getId(), getStreamSetId(), isMultiResolution(),
850 &gb, fenceFd, /*noFreeBuffer*/true);
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -0800851
852 if (res == OK) {
853 // Attach this buffer to the bufferQueue: the buffer will be in dequeue state after
854 // a successful return.
855 *anb = gb.get();
856 res = mConsumer->attachBuffer(*anb);
857 gotBufferFromManager = true;
858 ALOGV("Stream %d: Attached new buffer", getId());
859
860 if (res != OK) {
Yin-Chia Yeha1b56c82019-03-27 15:50:39 -0700861 if (shouldLogError(res, mState)) {
862 ALOGE("%s: Stream %d: Can't attach the output buffer to this surface:"
863 " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
864 }
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -0800865 checkRetAndSetAbandonedLocked(res);
866 return res;
867 }
868 } else {
869 ALOGE("%s: Stream %d: Can't get next output buffer from buffer manager:"
870 " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
871 return res;
872 }
873 } else if (res != OK) {
Yin-Chia Yeha1b56c82019-03-27 15:50:39 -0700874 if (shouldLogError(res, mState)) {
875 ALOGE("%s: Stream %d: Can't dequeue next output buffer: %s (%d)",
876 __FUNCTION__, mId, strerror(-res), res);
877 }
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -0800878 checkRetAndSetAbandonedLocked(res);
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800879 return res;
880 }
881 }
882
Yin-Chia Yeh017d49c2017-03-31 19:11:00 -0700883 if (res == OK) {
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -0800884 checkRemovedBuffersLocked();
Yin-Chia Yeh017d49c2017-03-31 19:11:00 -0700885 }
886
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800887 return res;
888}
889
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -0800890void Camera3OutputStream::checkRemovedBuffersLocked(bool notifyBufferManager) {
891 std::vector<sp<GraphicBuffer>> removedBuffers;
892 status_t res = mConsumer->getAndFlushRemovedBuffers(&removedBuffers);
893 if (res == OK) {
894 onBuffersRemovedLocked(removedBuffers);
895
896 if (notifyBufferManager && mUseBufferManager && removedBuffers.size() > 0) {
Shuzhen Wang83bff122020-11-20 15:51:39 -0800897 mBufferManager->onBuffersRemoved(getId(), getStreamSetId(), isMultiResolution(),
898 removedBuffers.size());
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -0800899 }
900 }
901}
902
903void Camera3OutputStream::checkRetAndSetAbandonedLocked(status_t res) {
904 // Only transition to STATE_ABANDONED from STATE_CONFIGURED. (If it is
905 // STATE_PREPARING, let prepareNextBuffer handle the error.)
906 if ((res == NO_INIT || res == DEAD_OBJECT) && mState == STATE_CONFIGURED) {
907 mState = STATE_ABANDONED;
908 }
909}
910
Yin-Chia Yeha1b56c82019-03-27 15:50:39 -0700911bool Camera3OutputStream::shouldLogError(status_t res, StreamState state) {
912 if (res == OK) {
913 return false;
914 }
915 if ((res == DEAD_OBJECT || res == NO_INIT) && state == STATE_ABANDONED) {
916 return false;
917 }
918 return true;
919}
920
Shuzhen Wangc2352702022-09-06 18:36:31 -0700921void Camera3OutputStream::onCachedBufferQueued() {
922 Mutex::Autolock l(mLock);
923 mCachedOutputBufferCount--;
924 // Signal whoever is waiting for the buffer to be returned to the buffer
925 // queue.
926 mOutputBufferReturnedSignal.signal();
927}
928
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800929status_t Camera3OutputStream::disconnectLocked() {
930 status_t res;
931
Igor Murashkine3a9f962013-05-08 18:03:15 -0700932 if ((res = Camera3IOStreamBase::disconnectLocked()) != OK) {
933 return res;
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800934 }
935
Zhijun He5d677d12016-05-29 16:52:39 -0700936 // Stream configuration was not finished (can only be in STATE_IN_CONFIG or STATE_CONSTRUCTED
937 // state), don't need change the stream state, return OK.
938 if (mConsumer == nullptr) {
939 return OK;
940 }
941
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800942 returnPrefetchedBuffersLocked();
943
Shuzhen Wangdc9aa822022-05-16 10:04:17 -0700944 if (mPreviewFrameSpacer != nullptr) {
945 mPreviewFrameSpacer->requestExit();
946 }
947
Zhijun He125684a2015-12-26 15:07:30 -0800948 ALOGV("%s: disconnecting stream %d from native window", __FUNCTION__, getId());
949
Igor Murashkine3a9f962013-05-08 18:03:15 -0700950 res = native_window_api_disconnect(mConsumer.get(),
951 NATIVE_WINDOW_API_CAMERA);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800952 /**
953 * This is not an error. if client calling process dies, the window will
954 * also die and all calls to it will return DEAD_OBJECT, thus it's already
955 * "disconnected"
956 */
957 if (res == DEAD_OBJECT) {
958 ALOGW("%s: While disconnecting stream %d from native window, the"
959 " native window died from under us", __FUNCTION__, mId);
960 }
961 else if (res != OK) {
Igor Murashkine3a9f962013-05-08 18:03:15 -0700962 ALOGE("%s: Unable to disconnect stream %d from native window "
963 "(error %d %s)",
964 __FUNCTION__, mId, res, strerror(-res));
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800965 mState = STATE_ERROR;
966 return res;
967 }
968
Zhijun He125684a2015-12-26 15:07:30 -0800969 // Since device is already idle, there is no getBuffer call to buffer manager, unregister the
970 // stream at this point should be safe.
971 if (mUseBufferManager) {
Shuzhen Wang83bff122020-11-20 15:51:39 -0800972 res = mBufferManager->unregisterStream(getId(), getStreamSetId(), isMultiResolution());
Zhijun He125684a2015-12-26 15:07:30 -0800973 if (res != OK) {
974 ALOGE("%s: Unable to unregister stream %d from buffer manager "
975 "(error %d %s)", __FUNCTION__, mId, res, strerror(-res));
976 mState = STATE_ERROR;
977 return res;
978 }
979 // Note that, to make prepare/teardown case work, we must not mBufferManager.clear(), as
980 // the stream is still in usable state after this call.
981 mUseBufferManager = false;
982 }
983
Igor Murashkine3a9f962013-05-08 18:03:15 -0700984 mState = (mState == STATE_IN_RECONFIG) ? STATE_IN_CONFIG
985 : STATE_CONSTRUCTED;
Shuzhen Wang686f6442017-06-20 16:16:04 -0700986
987 mDequeueBufferLatency.log("Stream %d dequeueBuffer latency histogram", mId);
988 mDequeueBufferLatency.reset();
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800989 return OK;
990}
991
Emilian Peev050f5dc2017-05-18 14:43:56 +0100992status_t Camera3OutputStream::getEndpointUsage(uint64_t *usage) const {
Eino-Ville Talvalab2f5b192013-07-30 14:36:03 -0700993
994 status_t res;
Shuzhen Wang0129d522016-10-30 22:43:41 -0700995
Zhijun He5d677d12016-05-29 16:52:39 -0700996 if (mConsumer == nullptr) {
997 // mConsumerUsage was sanitized before the Camera3OutputStream was constructed.
998 *usage = mConsumerUsage;
999 return OK;
1000 }
1001
Shuzhen Wang0129d522016-10-30 22:43:41 -07001002 res = getEndpointUsageForSurface(usage, mConsumer);
1003
1004 return res;
1005}
1006
Emilian Peev35ae8262018-11-08 13:11:32 +00001007void Camera3OutputStream::applyZSLUsageQuirk(int format, uint64_t *consumerUsage /*inout*/) {
1008 if (consumerUsage == nullptr) {
1009 return;
1010 }
Shuzhen Wang0129d522016-10-30 22:43:41 -07001011
Chien-Yu Chen618ff8a2015-03-13 11:27:17 -07001012 // If an opaque output stream's endpoint is ImageReader, add
Yin-Chia Yeh47cf8e62017-04-04 13:00:03 -07001013 // GRALLOC_USAGE_HW_CAMERA_ZSL to the usage so HAL knows it will be used
Chien-Yu Chen618ff8a2015-03-13 11:27:17 -07001014 // for the ZSL use case.
1015 // Assume it's for ImageReader if the consumer usage doesn't have any of these bits set:
1016 // 1. GRALLOC_USAGE_HW_TEXTURE
1017 // 2. GRALLOC_USAGE_HW_RENDER
1018 // 3. GRALLOC_USAGE_HW_COMPOSER
1019 // 4. GRALLOC_USAGE_HW_VIDEO_ENCODER
Emilian Peev35ae8262018-11-08 13:11:32 +00001020 if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED &&
1021 (*consumerUsage & (GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_RENDER |
Shuzhen Wang0129d522016-10-30 22:43:41 -07001022 GRALLOC_USAGE_HW_COMPOSER | GRALLOC_USAGE_HW_VIDEO_ENCODER)) == 0) {
Emilian Peev35ae8262018-11-08 13:11:32 +00001023 *consumerUsage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
Chien-Yu Chen618ff8a2015-03-13 11:27:17 -07001024 }
Emilian Peev35ae8262018-11-08 13:11:32 +00001025}
Chien-Yu Chen618ff8a2015-03-13 11:27:17 -07001026
Emilian Peev35ae8262018-11-08 13:11:32 +00001027status_t Camera3OutputStream::getEndpointUsageForSurface(uint64_t *usage,
1028 const sp<Surface>& surface) const {
1029 status_t res;
1030 uint64_t u = 0;
1031
1032 res = native_window_get_consumer_usage(static_cast<ANativeWindow*>(surface.get()), &u);
Emilian Peevf4816702020-04-03 15:44:51 -07001033 applyZSLUsageQuirk(camera_stream::format, &u);
Chien-Yu Chen618ff8a2015-03-13 11:27:17 -07001034 *usage = u;
Eino-Ville Talvalab2f5b192013-07-30 14:36:03 -07001035 return res;
1036}
1037
Chien-Yu Chen85a64552015-08-28 15:46:12 -07001038bool Camera3OutputStream::isVideoStream() const {
Emilian Peev050f5dc2017-05-18 14:43:56 +01001039 uint64_t usage = 0;
Chien-Yu Chen85a64552015-08-28 15:46:12 -07001040 status_t res = getEndpointUsage(&usage);
1041 if (res != OK) {
1042 ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
1043 return false;
1044 }
1045
1046 return (usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) != 0;
1047}
1048
Zhijun He125684a2015-12-26 15:07:30 -08001049status_t Camera3OutputStream::setBufferManager(sp<Camera3BufferManager> bufferManager) {
1050 Mutex::Autolock l(mLock);
1051 if (mState != STATE_CONSTRUCTED) {
Zhijun He5d677d12016-05-29 16:52:39 -07001052 ALOGE("%s: this method can only be called when stream in CONSTRUCTED state.",
Zhijun He125684a2015-12-26 15:07:30 -08001053 __FUNCTION__);
1054 return INVALID_OPERATION;
1055 }
1056 mBufferManager = bufferManager;
1057
1058 return OK;
1059}
1060
Emilian Peev40ead602017-09-26 15:46:36 +01001061status_t Camera3OutputStream::updateStream(const std::vector<sp<Surface>> &/*outputSurfaces*/,
1062 const std::vector<OutputStreamInfo> &/*outputInfo*/,
1063 const std::vector<size_t> &/*removedSurfaceIds*/,
1064 KeyedVector<sp<Surface>, size_t> * /*outputMapo*/) {
1065 ALOGE("%s: this method is not supported!", __FUNCTION__);
1066 return INVALID_OPERATION;
1067}
1068
Shuzhen Wang0160ddd2019-08-15 09:11:56 -07001069void Camera3OutputStream::BufferProducerListener::onBufferReleased() {
Zhijun He125684a2015-12-26 15:07:30 -08001070 sp<Camera3OutputStream> stream = mParent.promote();
1071 if (stream == nullptr) {
1072 ALOGV("%s: Parent camera3 output stream was destroyed", __FUNCTION__);
1073 return;
1074 }
1075
1076 Mutex::Autolock l(stream->mLock);
1077 if (!(stream->mUseBufferManager)) {
1078 return;
1079 }
1080
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001081 ALOGV("Stream %d: Buffer released", stream->getId());
Yin-Chia Yeh89954d92017-05-21 17:28:53 -07001082 bool shouldFreeBuffer = false;
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001083 status_t res = stream->mBufferManager->onBufferReleased(
Shuzhen Wang83bff122020-11-20 15:51:39 -08001084 stream->getId(), stream->getStreamSetId(), stream->isMultiResolution(),
1085 &shouldFreeBuffer);
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001086 if (res != OK) {
1087 ALOGE("%s: signaling buffer release to buffer manager failed: %s (%d).", __FUNCTION__,
1088 strerror(-res), res);
1089 stream->mState = STATE_ERROR;
1090 }
Yin-Chia Yeh89954d92017-05-21 17:28:53 -07001091
1092 if (shouldFreeBuffer) {
1093 sp<GraphicBuffer> buffer;
1094 // Detach and free a buffer (when buffer goes out of scope)
1095 stream->detachBufferLocked(&buffer, /*fenceFd*/ nullptr);
1096 if (buffer.get() != nullptr) {
1097 stream->mBufferManager->notifyBufferRemoved(
Shuzhen Wang83bff122020-11-20 15:51:39 -08001098 stream->getId(), stream->getStreamSetId(), stream->isMultiResolution());
Yin-Chia Yeh89954d92017-05-21 17:28:53 -07001099 }
1100 }
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001101}
1102
Shuzhen Wang0160ddd2019-08-15 09:11:56 -07001103void Camera3OutputStream::BufferProducerListener::onBuffersDiscarded(
1104 const std::vector<sp<GraphicBuffer>>& buffers) {
1105 sp<Camera3OutputStream> stream = mParent.promote();
1106 if (stream == nullptr) {
1107 ALOGV("%s: Parent camera3 output stream was destroyed", __FUNCTION__);
1108 return;
1109 }
1110
1111 if (buffers.size() > 0) {
1112 Mutex::Autolock l(stream->mLock);
1113 stream->onBuffersRemovedLocked(buffers);
1114 if (stream->mUseBufferManager) {
1115 stream->mBufferManager->onBuffersRemoved(stream->getId(),
Shuzhen Wang83bff122020-11-20 15:51:39 -08001116 stream->getStreamSetId(), stream->isMultiResolution(), buffers.size());
Shuzhen Wang0160ddd2019-08-15 09:11:56 -07001117 }
1118 ALOGV("Stream %d: %zu Buffers discarded.", stream->getId(), buffers.size());
1119 }
1120}
1121
Yin-Chia Yeh017d49c2017-03-31 19:11:00 -07001122void Camera3OutputStream::onBuffersRemovedLocked(
1123 const std::vector<sp<GraphicBuffer>>& removedBuffers) {
Yin-Chia Yehdb1e8642017-07-14 15:19:30 -07001124 sp<Camera3StreamBufferFreedListener> callback = mBufferFreedListener.promote();
Yin-Chia Yeh017d49c2017-03-31 19:11:00 -07001125 if (callback != nullptr) {
Chih-Hung Hsieh48fc6192017-08-04 14:37:31 -07001126 for (const auto& gb : removedBuffers) {
Yin-Chia Yeh017d49c2017-03-31 19:11:00 -07001127 callback->onBufferFreed(mId, gb->handle);
1128 }
1129 }
1130}
1131
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001132status_t Camera3OutputStream::detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd) {
1133 Mutex::Autolock l(mLock);
Yin-Chia Yeh89954d92017-05-21 17:28:53 -07001134 return detachBufferLocked(buffer, fenceFd);
1135}
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001136
Yin-Chia Yeh89954d92017-05-21 17:28:53 -07001137status_t Camera3OutputStream::detachBufferLocked(sp<GraphicBuffer>* buffer, int* fenceFd) {
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001138 ALOGV("Stream %d: detachBuffer", getId());
1139 if (buffer == nullptr) {
1140 return BAD_VALUE;
1141 }
1142
Zhijun He125684a2015-12-26 15:07:30 -08001143 sp<Fence> fence;
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001144 status_t res = mConsumer->detachNextBuffer(buffer, &fence);
Zhijun He125684a2015-12-26 15:07:30 -08001145 if (res == NO_MEMORY) {
1146 // This may rarely happen, which indicates that the released buffer was freed by other
1147 // call (e.g., attachBuffer, dequeueBuffer etc.) before reaching here. We should notify the
1148 // buffer manager that this buffer has been freed. It's not fatal, but should be avoided,
1149 // therefore log a warning.
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001150 *buffer = 0;
Zhijun He125684a2015-12-26 15:07:30 -08001151 ALOGW("%s: the released buffer has already been freed by the buffer queue!", __FUNCTION__);
1152 } else if (res != OK) {
Eino-Ville Talvalaff51b472016-06-28 15:26:19 -07001153 // Treat other errors as abandonment
Yin-Chia Yeha1b56c82019-03-27 15:50:39 -07001154 if (shouldLogError(res, mState)) {
1155 ALOGE("%s: detach next buffer failed: %s (%d).", __FUNCTION__, strerror(-res), res);
1156 }
Eino-Ville Talvalaff51b472016-06-28 15:26:19 -07001157 mState = STATE_ABANDONED;
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001158 return res;
Zhijun He125684a2015-12-26 15:07:30 -08001159 }
1160
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001161 if (fenceFd != nullptr) {
1162 if (fence!= 0 && fence->isValid()) {
1163 *fenceFd = fence->dup();
1164 } else {
1165 *fenceFd = -1;
1166 }
Zhijun He125684a2015-12-26 15:07:30 -08001167 }
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001168
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -08001169 // Here we assume detachBuffer is called by buffer manager so it doesn't need to be notified
1170 checkRemovedBuffersLocked(/*notifyBufferManager*/false);
Yin-Chia Yeh017d49c2017-03-31 19:11:00 -07001171 return res;
Zhijun He125684a2015-12-26 15:07:30 -08001172}
Shuzhen Wang13a69632016-01-26 09:51:07 -08001173
Chien-Yu Chena936ac22017-10-23 15:59:49 -07001174status_t Camera3OutputStream::dropBuffers(bool dropping) {
1175 Mutex::Autolock l(mLock);
1176 mDropBuffers = dropping;
1177 return OK;
1178}
1179
Austin Borger1c1bee02023-06-01 16:51:35 -07001180const std::string& Camera3OutputStream::getPhysicalCameraId() const {
Shuzhen Wang5c22c152017-12-31 17:12:25 -08001181 Mutex::Autolock l(mLock);
1182 return physicalCameraId();
1183}
1184
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -08001185status_t Camera3OutputStream::notifyBufferReleased(ANativeWindowBuffer* /*anwBuffer*/) {
Shuzhen Wang0129d522016-10-30 22:43:41 -07001186 return OK;
1187}
1188
1189bool Camera3OutputStream::isConsumerConfigurationDeferred(size_t surface_id) const {
Zhijun He5d677d12016-05-29 16:52:39 -07001190 Mutex::Autolock l(mLock);
Shuzhen Wang0129d522016-10-30 22:43:41 -07001191
1192 if (surface_id != 0) {
Shuzhen Wang758c2152017-01-10 18:26:18 -08001193 ALOGE("%s: surface_id %zu for Camera3OutputStream should be 0!", __FUNCTION__, surface_id);
Shuzhen Wang0129d522016-10-30 22:43:41 -07001194 }
Zhijun He5d677d12016-05-29 16:52:39 -07001195 return mConsumer == nullptr;
1196}
1197
Shuzhen Wang758c2152017-01-10 18:26:18 -08001198status_t Camera3OutputStream::setConsumers(const std::vector<sp<Surface>>& consumers) {
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -08001199 Mutex::Autolock l(mLock);
Shuzhen Wang758c2152017-01-10 18:26:18 -08001200 if (consumers.size() != 1) {
1201 ALOGE("%s: it's illegal to set %zu consumer surfaces!",
1202 __FUNCTION__, consumers.size());
1203 return INVALID_OPERATION;
1204 }
1205 if (consumers[0] == nullptr) {
1206 ALOGE("%s: it's illegal to set null consumer surface!", __FUNCTION__);
Zhijun He5d677d12016-05-29 16:52:39 -07001207 return INVALID_OPERATION;
1208 }
1209
1210 if (mConsumer != nullptr) {
1211 ALOGE("%s: consumer surface was already set!", __FUNCTION__);
1212 return INVALID_OPERATION;
1213 }
1214
Shuzhen Wang758c2152017-01-10 18:26:18 -08001215 mConsumer = consumers[0];
Zhijun He5d677d12016-05-29 16:52:39 -07001216 return OK;
1217}
1218
Shuzhen Wang13a69632016-01-26 09:51:07 -08001219bool Camera3OutputStream::isConsumedByHWComposer() const {
Emilian Peev050f5dc2017-05-18 14:43:56 +01001220 uint64_t usage = 0;
Shuzhen Wang13a69632016-01-26 09:51:07 -08001221 status_t res = getEndpointUsage(&usage);
1222 if (res != OK) {
1223 ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
1224 return false;
1225 }
1226
1227 return (usage & GRALLOC_USAGE_HW_COMPOSER) != 0;
1228}
1229
Zhijun Hef0645c12016-08-02 00:58:11 -07001230bool Camera3OutputStream::isConsumedByHWTexture() const {
Emilian Peev050f5dc2017-05-18 14:43:56 +01001231 uint64_t usage = 0;
Zhijun Hef0645c12016-08-02 00:58:11 -07001232 status_t res = getEndpointUsage(&usage);
1233 if (res != OK) {
1234 ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
1235 return false;
1236 }
1237
1238 return (usage & GRALLOC_USAGE_HW_TEXTURE) != 0;
1239}
1240
Shuzhen Wangfe8a2a32022-05-10 18:18:54 -07001241bool Camera3OutputStream::isConsumedByCPU() const {
1242 uint64_t usage = 0;
1243 status_t res = getEndpointUsage(&usage);
1244 if (res != OK) {
1245 ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
1246 return false;
1247 }
1248
1249 return (usage & GRALLOC_USAGE_SW_READ_MASK) != 0;
1250}
1251
Shuzhen Wangabbcb6b2020-12-09 22:32:44 -08001252void Camera3OutputStream::dumpImageToDisk(nsecs_t timestamp,
1253 ANativeWindowBuffer* anwBuffer, int fence) {
1254 // Deriver output file name
1255 std::string fileExtension = "jpg";
1256 char imageFileName[64];
1257 time_t now = time(0);
1258 tm *localTime = localtime(&now);
1259 snprintf(imageFileName, sizeof(imageFileName), "IMG_%4d%02d%02d_%02d%02d%02d_%" PRId64 ".%s",
Shuzhen Wang6a8237f2021-07-13 14:42:57 -07001260 1900 + localTime->tm_year, localTime->tm_mon + 1, localTime->tm_mday,
Shuzhen Wangabbcb6b2020-12-09 22:32:44 -08001261 localTime->tm_hour, localTime->tm_min, localTime->tm_sec,
1262 timestamp, fileExtension.c_str());
1263
1264 // Lock the image for CPU read
1265 sp<GraphicBuffer> graphicBuffer = GraphicBuffer::from(anwBuffer);
1266 void* mapped = nullptr;
1267 base::unique_fd fenceFd(dup(fence));
1268 status_t res = graphicBuffer->lockAsync(GraphicBuffer::USAGE_SW_READ_OFTEN, &mapped,
Emilian Peev293bd972022-08-05 17:28:06 -07001269 fenceFd.release());
Shuzhen Wangabbcb6b2020-12-09 22:32:44 -08001270 if (res != OK) {
1271 ALOGE("%s: Failed to lock the buffer: %s (%d)", __FUNCTION__, strerror(-res), res);
1272 return;
1273 }
1274
1275 // Figure out actual file size
1276 auto actualJpegSize = android::camera2::JpegProcessor::findJpegSize((uint8_t*)mapped, mMaxSize);
1277 if (actualJpegSize == 0) {
1278 actualJpegSize = mMaxSize;
1279 }
1280
1281 // Output image data to file
1282 std::string filePath = "/data/misc/cameraserver/";
1283 filePath += imageFileName;
Austin Borger1c1bee02023-06-01 16:51:35 -07001284 std::ofstream imageFile(filePath, std::ofstream::binary);
Shuzhen Wangabbcb6b2020-12-09 22:32:44 -08001285 if (!imageFile.is_open()) {
1286 ALOGE("%s: Unable to create file %s", __FUNCTION__, filePath.c_str());
1287 graphicBuffer->unlock();
1288 return;
1289 }
1290 imageFile.write((const char*)mapped, actualJpegSize);
1291
1292 graphicBuffer->unlock();
1293}
1294
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -08001295status_t Camera3OutputStream::setBatchSize(size_t batchSize) {
1296 Mutex::Autolock l(mLock);
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -08001297 if (batchSize == 0) {
1298 ALOGE("%s: invalid batch size 0", __FUNCTION__);
1299 return BAD_VALUE;
1300 }
1301
1302 if (mUseBufferManager) {
1303 ALOGE("%s: batch operation is not supported with buffer manager", __FUNCTION__);
1304 return INVALID_OPERATION;
1305 }
1306
1307 if (!isVideoStream()) {
1308 ALOGE("%s: batch operation is not supported with non-video stream", __FUNCTION__);
1309 return INVALID_OPERATION;
1310 }
1311
Shuzhen Wangc7629462021-07-12 15:02:58 -07001312 if (camera_stream::max_buffers < batchSize) {
1313 ALOGW("%s: batch size is capped by max_buffers %d", __FUNCTION__,
1314 camera_stream::max_buffers);
1315 batchSize = camera_stream::max_buffers;
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -08001316 }
Shuzhen Wangc7629462021-07-12 15:02:58 -07001317
1318 size_t defaultBatchSize = 1;
1319 if (!mBatchSize.compare_exchange_strong(defaultBatchSize, batchSize)) {
1320 ALOGE("%s: change batch size from %zu to %zu dynamically is not supported",
1321 __FUNCTION__, defaultBatchSize, batchSize);
1322 return INVALID_OPERATION;
1323 }
1324
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -08001325 return OK;
1326}
1327
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001328void Camera3OutputStream::onMinDurationChanged(nsecs_t duration, bool fixedFps) {
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001329 Mutex::Autolock l(mLock);
1330 mMinExpectedDuration = duration;
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001331 mFixedFps = fixedFps;
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001332}
1333
Shuzhen Wang16610a62022-12-15 22:38:07 -08001334void Camera3OutputStream::setStreamUseCase(int64_t streamUseCase) {
1335 Mutex::Autolock l(mLock);
1336 camera_stream::use_case = streamUseCase;
1337}
1338
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -08001339void Camera3OutputStream::returnPrefetchedBuffersLocked() {
Shuzhen Wangc7629462021-07-12 15:02:58 -07001340 std::vector<Surface::BatchBuffer> batchedBuffers;
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -08001341
Shuzhen Wangc7629462021-07-12 15:02:58 -07001342 {
1343 std::lock_guard<std::mutex> batchLock(mBatchLock);
1344 if (mBatchedBuffers.size() != 0) {
1345 ALOGW("%s: %zu extra prefetched buffers detected. Returning",
1346 __FUNCTION__, mBatchedBuffers.size());
1347 batchedBuffers = std::move(mBatchedBuffers);
1348 }
1349 }
1350
1351 if (batchedBuffers.size() > 0) {
1352 mConsumer->cancelBuffers(batchedBuffers);
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -08001353 }
1354}
1355
Ravneet446b3bf2023-07-11 19:26:38 +00001356nsecs_t Camera3OutputStream::syncTimestampToDisplayLocked(nsecs_t t, sp<Fence> releaseFence) {
Shuzhen Wang35bd3552022-09-21 16:56:04 -07001357 nsecs_t currentTime = systemTime();
1358 if (!mFixedFps) {
1359 mLastCaptureTime = t;
1360 mLastPresentTime = currentTime;
1361 return t;
1362 }
1363
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001364 ParcelableVsyncEventData parcelableVsyncEventData;
1365 auto res = mDisplayEventReceiver.getLatestVsyncEventData(&parcelableVsyncEventData);
1366 if (res != OK) {
1367 ALOGE("%s: Stream %d: Error getting latest vsync event data: %s (%d)",
1368 __FUNCTION__, mId, strerror(-res), res);
Shuzhen Wang14c62b82022-04-11 09:37:05 -07001369 mLastCaptureTime = t;
Shuzhen Wang35bd3552022-09-21 16:56:04 -07001370 mLastPresentTime = currentTime;
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001371 return t;
1372 }
1373
1374 const VsyncEventData& vsyncEventData = parcelableVsyncEventData.vsync;
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001375 nsecs_t minPresentT = mLastPresentTime + vsyncEventData.frameInterval / 2;
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001376
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001377 // Find the best presentation time without worrying about previous frame's
1378 // presentation time if capture interval is more than kSpacingResetIntervalNs.
1379 //
1380 // When frame interval is more than 50 ms apart (3 vsyncs for 60hz refresh rate),
1381 // there is little risk in starting over and finding the earliest vsync to latch onto.
1382 // - Update captureToPresentTime offset to be used for later frames.
1383 // - Example use cases:
1384 // - when frame rate drops down to below 20 fps, or
1385 // - A new streaming session starts (stopPreview followed by
1386 // startPreview)
1387 //
Shuzhen Wang34a5e282022-06-17 14:48:35 -07001388 nsecs_t captureInterval = t - mLastCaptureTime;
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001389 if (captureInterval > kSpacingResetIntervalNs) {
Rachel Lee86d90eb2023-04-25 14:37:34 -07001390 for (size_t i = 0; i < vsyncEventData.frameTimelinesLength; i++) {
Shuzhen Wang34a5e282022-06-17 14:48:35 -07001391 const auto& timeline = vsyncEventData.frameTimelines[i];
1392 if (timeline.deadlineTimestamp >= currentTime &&
1393 timeline.expectedPresentationTime > minPresentT) {
1394 nsecs_t presentT = vsyncEventData.frameTimelines[i].expectedPresentationTime;
1395 mCaptureToPresentOffset = presentT - t;
1396 mLastCaptureTime = t;
1397 mLastPresentTime = presentT;
1398
Shuzhen Wang0897d592023-04-07 12:48:05 -07001399 // If releaseFence is available, store the fence to check signal
1400 // time later.
1401 mRefVsyncData = vsyncEventData;
1402 mReferenceCaptureTime = t;
1403 mReferenceArrivalTime = currentTime;
Ravneet446b3bf2023-07-11 19:26:38 +00001404 if (releaseFence->isValid()) {
1405 mReferenceFrameFence = new Fence(releaseFence->dup());
Shuzhen Wang0897d592023-04-07 12:48:05 -07001406 } else {
1407 mFenceSignalOffset = 0;
1408 }
1409
Shuzhen Wang34a5e282022-06-17 14:48:35 -07001410 // Move the expected presentation time back by 1/3 of frame interval to
1411 // mitigate the time drift. Due to time drift, if we directly use the
1412 // expected presentation time, often times 2 expected presentation time
1413 // falls into the same VSYNC interval.
1414 return presentT - vsyncEventData.frameInterval/3;
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001415 }
1416 }
1417 }
1418
Shuzhen Wang0897d592023-04-07 12:48:05 -07001419 // If there is a reference frame release fence, get the signal time and
1420 // update the captureToPresentOffset.
1421 if (mReferenceFrameFence != nullptr) {
1422 mFenceSignalOffset = 0;
1423 nsecs_t signalTime = mReferenceFrameFence->getSignalTime();
1424 // Now that the fence has signaled, recalculate the offsets based on
1425 // the timeline which was actually latched
1426 if (signalTime != INT64_MAX) {
1427 for (size_t i = 0; i < mRefVsyncData.frameTimelinesLength; i++) {
1428 const auto& timeline = mRefVsyncData.frameTimelines[i];
1429 if (timeline.deadlineTimestamp >= signalTime) {
1430 nsecs_t originalOffset = mCaptureToPresentOffset;
1431 mCaptureToPresentOffset = timeline.expectedPresentationTime
1432 - mReferenceCaptureTime;
1433 mLastPresentTime = timeline.expectedPresentationTime;
1434 mFenceSignalOffset = signalTime > mReferenceArrivalTime ?
1435 signalTime - mReferenceArrivalTime : 0;
1436
1437 ALOGV("%s: Last deadline %" PRId64 " signalTime %" PRId64
1438 " original offset %" PRId64 " new offset %" PRId64
1439 " fencesignal offset %" PRId64, __FUNCTION__,
1440 timeline.deadlineTimestamp, signalTime, originalOffset,
1441 mCaptureToPresentOffset, mFenceSignalOffset);
1442 break;
1443 }
1444 }
1445 mReferenceFrameFence.clear();
1446 }
1447 }
1448
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001449 nsecs_t idealPresentT = t + mCaptureToPresentOffset;
Shuzhen Wang14c62b82022-04-11 09:37:05 -07001450 nsecs_t expectedPresentT = mLastPresentTime;
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001451 nsecs_t minDiff = INT64_MAX;
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001452
1453 // In fixed FPS case, when frame durations are close to multiples of display refresh
1454 // rate, derive minimum intervals between presentation times based on minimal
Shuzhen Wang661b34f2022-05-18 22:00:19 -07001455 // expected duration. The minimum number of Vsyncs is:
1456 // - 0 if minFrameDuration in (0, 1.5] * vSyncInterval,
1457 // - 1 if minFrameDuration in (1.5, 2.5] * vSyncInterval,
1458 // - and so on.
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001459 //
1460 // This spaces out the displaying of the frames so that the frame
1461 // presentations are roughly in sync with frame captures.
Shuzhen Wang661b34f2022-05-18 22:00:19 -07001462 int minVsyncs = (mMinExpectedDuration - vsyncEventData.frameInterval / 2) /
1463 vsyncEventData.frameInterval;
1464 if (minVsyncs < 0) minVsyncs = 0;
Shuzhen Wanged08fbe2022-06-21 01:00:50 -07001465 nsecs_t minInterval = minVsyncs * vsyncEventData.frameInterval;
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001466
1467 // In fixed FPS case, if the frame duration deviates from multiples of
1468 // display refresh rate, find the closest Vsync without requiring a minimum
1469 // number of Vsync.
1470 //
1471 // Example: (24fps camera, 60hz refresh):
1472 // capture readout: | t1 | t1 | .. | t1 | .. | t1 | .. | t1 |
1473 // display VSYNC: | t2 | t2 | ... | t2 | ... | t2 | ... | t2 |
1474 // | : 1 frame
1475 // t1 : 41.67ms
1476 // t2 : 16.67ms
1477 // t1/t2 = 2.5
1478 //
1479 // 24fps is a commonly used video frame rate. Because the capture
1480 // interval is 2.5 times of display refresh interval, the minVsyncs
1481 // calculation will directly fall at the boundary condition. In this case,
1482 // we should fall back to the basic logic of finding closest vsync
1483 // timestamp without worrying about minVsyncs.
1484 float captureToVsyncIntervalRatio = 1.0f * mMinExpectedDuration / vsyncEventData.frameInterval;
1485 float ratioDeviation = std::fabs(
1486 captureToVsyncIntervalRatio - std::roundf(captureToVsyncIntervalRatio));
1487 bool captureDeviateFromVsync = ratioDeviation >= kMaxIntervalRatioDeviation;
1488 bool cameraDisplayInSync = (mFixedFps && !captureDeviateFromVsync);
1489
Shuzhen Wanged08fbe2022-06-21 01:00:50 -07001490 // Find best timestamp in the vsync timelines:
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001491 // - Only use at most kMaxTimelines timelines to avoid long latency
Shuzhen Wang0897d592023-04-07 12:48:05 -07001492 // - Add an extra timeline if display fence is used
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001493 // - closest to the ideal presentation time,
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001494 // - deadline timestamp is greater than the current time, and
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001495 // - For fixed FPS, if the capture interval doesn't deviate too much from refresh interval,
1496 // the candidate presentation time is at least minInterval in the future compared to last
1497 // presentation time.
1498 // - For variable FPS, or if the capture interval deviates from refresh
1499 // interval for more than 5%, find a presentation time closest to the
1500 // (lastPresentationTime + captureToPresentOffset) instead.
Shuzhen Wang0897d592023-04-07 12:48:05 -07001501 int fenceAdjustment = (mFenceSignalOffset > 0) ? 1 : 0;
1502 int maxTimelines = std::min(kMaxTimelines + fenceAdjustment,
1503 (int)vsyncEventData.frameTimelinesLength);
Shuzhen Wanged08fbe2022-06-21 01:00:50 -07001504 float biasForShortDelay = 1.0f;
1505 for (int i = 0; i < maxTimelines; i ++) {
1506 const auto& vsyncTime = vsyncEventData.frameTimelines[i];
1507 if (minVsyncs > 0) {
1508 // Bias towards using smaller timeline index:
1509 // i = 0: bias = 1
1510 // i = maxTimelines-1: bias = -1
1511 biasForShortDelay = 1.0 - 2.0 * i / (maxTimelines - 1);
1512 }
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001513 if (std::abs(vsyncTime.expectedPresentationTime - idealPresentT) < minDiff &&
Shuzhen Wang0897d592023-04-07 12:48:05 -07001514 vsyncTime.deadlineTimestamp >= currentTime + mFenceSignalOffset &&
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001515 ((!cameraDisplayInSync && vsyncTime.expectedPresentationTime > minPresentT) ||
1516 (cameraDisplayInSync && vsyncTime.expectedPresentationTime >
Kwangkyu Park1c0042b2022-12-20 00:03:17 +09001517 mLastPresentTime + minInterval +
1518 static_cast<nsecs_t>(biasForShortDelay * kTimelineThresholdNs)))) {
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001519 expectedPresentT = vsyncTime.expectedPresentationTime;
1520 minDiff = std::abs(vsyncTime.expectedPresentationTime - idealPresentT);
1521 }
1522 }
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001523
Shuzhen Wang35bd3552022-09-21 16:56:04 -07001524 if (expectedPresentT == mLastPresentTime && expectedPresentT <
1525 vsyncEventData.frameTimelines[maxTimelines-1].expectedPresentationTime) {
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001526 // Couldn't find a reasonable presentation time. Using last frame's
1527 // presentation time would cause a frame drop. The best option now
1528 // is to use the next VSync as long as the last presentation time
1529 // doesn't already has the maximum latency, in which case dropping the
1530 // buffer is more desired than increasing latency.
1531 //
1532 // Example: (60fps camera, 59.9hz refresh):
1533 // capture readout: | t1 | t1 | .. | t1 | .. | t1 | .. | t1 |
1534 // \ \ \ \ \ \ \ \ \
1535 // queue to BQ: | | | | | | | | |
1536 // \ \ \ \ \ \ \ \ \
1537 // display VSYNC: | t2 | t2 | ... | t2 | ... | t2 | ... | t2 |
1538 //
1539 // |: 1 frame
1540 // t1 : 16.67ms
1541 // t2 : 16.69ms
1542 //
1543 // It takes 833 frames for capture readout count and display VSYNC count to be off
1544 // by 1.
1545 // - At frames [0, 832], presentationTime is set to timeline[0]
1546 // - At frames [833, 833*2-1], presentationTime is set to timeline[1]
1547 // - At frames [833*2, 833*3-1] presentationTime is set to timeline[2]
1548 // - At frame 833*3, no presentation time is found because we only
1549 // search for timeline[0..2].
1550 // - Drop one buffer is better than further extend the presentation
1551 // time.
1552 //
1553 // However, if frame 833*2 arrives 16.67ms early (right after frame
1554 // 833*2-1), no presentation time can be found because
1555 // getLatestVsyncEventData is called early. In that case, it's better to
1556 // set presentation time by offseting last presentation time.
1557 expectedPresentT += vsyncEventData.frameInterval;
1558 }
1559
Shuzhen Wang14c62b82022-04-11 09:37:05 -07001560 mLastCaptureTime = t;
1561 mLastPresentTime = expectedPresentT;
1562
1563 // Move the expected presentation time back by 1/3 of frame interval to
1564 // mitigate the time drift. Due to time drift, if we directly use the
1565 // expected presentation time, often times 2 expected presentation time
1566 // falls into the same VSYNC interval.
1567 return expectedPresentT - vsyncEventData.frameInterval/3;
Shuzhen Wange4adddb2021-09-21 15:24:44 -07001568}
1569
Shuzhen Wangba92d772022-04-11 11:47:24 -07001570bool Camera3OutputStream::shouldLogError(status_t res) {
1571 Mutex::Autolock l(mLock);
1572 return shouldLogError(res, mState);
1573}
1574
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -08001575}; // namespace camera3
1576
1577}; // namespace android