blob: 3cd4543d886959d72fc65c5e8031006df5447319 [file] [log] [blame]
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -08001/*
Shuzhen Wangc28189a2017-11-27 23:05:10 -08002 * Copyright (C) 2013-2018 The Android Open Source Project
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -08003 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "Camera3-OutputStream"
18#define ATRACE_TAG ATRACE_TAG_CAMERA
19//#define LOG_NDEBUG 0
20
Shuzhen Wang34a5e282022-06-17 14:48:35 -070021#include <algorithm>
Shuzhen Wangabbcb6b2020-12-09 22:32:44 -080022#include <ctime>
23#include <fstream>
24
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +000025#include <aidl/android/hardware/camera/device/CameraBlob.h>
26#include <aidl/android/hardware/camera/device/CameraBlobId.h>
Emilian Peeve579d8b2023-02-28 14:16:08 -080027#include "aidl/android/hardware/graphics/common/Dataspace.h"
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +000028
Shuzhen Wangabbcb6b2020-12-09 22:32:44 -080029#include <android-base/unique_fd.h>
Emilian Peev3b93acb2024-03-11 21:09:48 +000030#include <com_android_internal_camera_flags.h>
Shuzhen Wange4adddb2021-09-21 15:24:44 -070031#include <cutils/properties.h>
Shuzhen Wangabbcb6b2020-12-09 22:32:44 -080032#include <ui/GraphicBuffer.h>
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -080033#include <utils/Log.h>
34#include <utils/Trace.h>
Austin Borger1c1bee02023-06-01 16:51:35 -070035#include <camera/StringUtils.h>
Shuzhen Wangabbcb6b2020-12-09 22:32:44 -080036
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +000037#include <common/CameraDeviceBase.h>
Shuzhen Wangabbcb6b2020-12-09 22:32:44 -080038#include "api1/client2/JpegProcessor.h"
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -080039#include "Camera3OutputStream.h"
Jayant Chowdharyd4776262020-06-23 23:45:57 -070040#include "utils/TraceHFR.h"
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -080041
42#ifndef container_of
43#define container_of(ptr, type, member) \
44 (type *)((char*)(ptr) - offsetof(type, member))
45#endif
46
Emilian Peev3b93acb2024-03-11 21:09:48 +000047namespace flags = com::android::internal::camera::flags;
48
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -080049namespace android {
50
51namespace camera3 {
52
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +000053using aidl::android::hardware::camera::device::CameraBlob;
54using aidl::android::hardware::camera::device::CameraBlobId;
55
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -080056Camera3OutputStream::Camera3OutputStream(int id,
Eino-Ville Talvala727d1722015-06-09 13:44:19 -070057 sp<Surface> consumer,
Eino-Ville Talvala3d82c0d2015-02-23 15:19:19 -080058 uint32_t width, uint32_t height, int format,
Emilian Peevf4816702020-04-03 15:44:51 -070059 android_dataspace dataSpace, camera_stream_rotation_t rotation,
Austin Borger1c1bee02023-06-01 16:51:35 -070060 nsecs_t timestampOffset, const std::string& physicalCameraId,
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +000061 const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
Emilian Peevc81a7592022-02-14 17:38:18 -080062 int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
Shuzhen Wang8ed1e872022-03-08 16:34:33 -080063 int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
Shuzhen Wangbce53db2022-12-03 00:38:20 +000064 int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) :
Emilian Peevf4816702020-04-03 15:44:51 -070065 Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
Shuzhen Wangc28189a2017-11-27 23:05:10 -080066 /*maxSize*/0, format, dataSpace, rotation,
Emilian Peev2295df72021-11-12 18:14:10 -080067 physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
Shuzhen Wange4208922022-02-01 16:52:48 -080068 dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
Austin Borger9e2b27c2022-07-15 11:27:24 -070069 timestampBase, colorSpace),
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -080070 mConsumer(consumer),
Ruchit Sharmae0711f22014-08-18 13:48:24 -040071 mTransform(0),
Zhijun He125684a2015-12-26 15:07:30 -080072 mTraceFirstBuffer(true),
Shuzhen Wangc28dccc2016-02-11 23:48:46 -080073 mUseBufferManager(false),
Zhijun He5d677d12016-05-29 16:52:39 -070074 mTimestampOffset(timestampOffset),
Shuzhen Wangbce53db2022-12-03 00:38:20 +000075 mUseReadoutTime(useReadoutTimestamp),
Shuzhen Wang686f6442017-06-20 16:16:04 -070076 mConsumerUsage(0),
Chien-Yu Chena936ac22017-10-23 15:59:49 -070077 mDropBuffers(false),
Shuzhen Wang610d7b82022-02-08 14:37:22 -080078 mMirrorMode(mirrorMode),
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +000079 mDequeueBufferLatency(kDequeueLatencyBinSize),
80 mIPCTransport(transport) {
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -080081
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -080082 if (mConsumer == NULL) {
83 ALOGE("%s: Consumer is NULL!", __FUNCTION__);
84 mState = STATE_ERROR;
85 }
Zhijun He125684a2015-12-26 15:07:30 -080086
Shuzhen Wang0160ddd2019-08-15 09:11:56 -070087 bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
88 mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -080089}
90
91Camera3OutputStream::Camera3OutputStream(int id,
Eino-Ville Talvala727d1722015-06-09 13:44:19 -070092 sp<Surface> consumer,
Eino-Ville Talvala3d82c0d2015-02-23 15:19:19 -080093 uint32_t width, uint32_t height, size_t maxSize, int format,
Emilian Peevf4816702020-04-03 15:44:51 -070094 android_dataspace dataSpace, camera_stream_rotation_t rotation,
Austin Borger1c1bee02023-06-01 16:51:35 -070095 nsecs_t timestampOffset, const std::string& physicalCameraId,
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +000096 const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
Emilian Peevc81a7592022-02-14 17:38:18 -080097 int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
Shuzhen Wang8ed1e872022-03-08 16:34:33 -080098 int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
Shuzhen Wangbce53db2022-12-03 00:38:20 +000099 int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) :
Emilian Peevf4816702020-04-03 15:44:51 -0700100 Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height, maxSize,
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800101 format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
Shuzhen Wange4208922022-02-01 16:52:48 -0800102 setId, isMultiResolution, dynamicRangeProfile, streamUseCase,
Austin Borger9e2b27c2022-07-15 11:27:24 -0700103 deviceTimeBaseIsRealtime, timestampBase, colorSpace),
Igor Murashkina55b5452013-04-02 16:36:33 -0700104 mConsumer(consumer),
Ruchit Sharmae0711f22014-08-18 13:48:24 -0400105 mTransform(0),
Zhijun He125684a2015-12-26 15:07:30 -0800106 mTraceFirstBuffer(true),
Shuzhen Wangc28dccc2016-02-11 23:48:46 -0800107 mUseBufferManager(false),
Zhijun He5d677d12016-05-29 16:52:39 -0700108 mTimestampOffset(timestampOffset),
Shuzhen Wangbce53db2022-12-03 00:38:20 +0000109 mUseReadoutTime(useReadoutTimestamp),
Shuzhen Wang686f6442017-06-20 16:16:04 -0700110 mConsumerUsage(0),
Chien-Yu Chena936ac22017-10-23 15:59:49 -0700111 mDropBuffers(false),
Shuzhen Wang610d7b82022-02-08 14:37:22 -0800112 mMirrorMode(mirrorMode),
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000113 mDequeueBufferLatency(kDequeueLatencyBinSize),
114 mIPCTransport(transport) {
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800115
Yin-Chia Yehe9154ce2015-12-07 14:38:04 -0800116 if (format != HAL_PIXEL_FORMAT_BLOB && format != HAL_PIXEL_FORMAT_RAW_OPAQUE) {
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800117 ALOGE("%s: Bad format for size-only stream: %d", __FUNCTION__,
118 format);
119 mState = STATE_ERROR;
120 }
121
122 if (mConsumer == NULL) {
123 ALOGE("%s: Consumer is NULL!", __FUNCTION__);
124 mState = STATE_ERROR;
125 }
Zhijun He125684a2015-12-26 15:07:30 -0800126
Shuzhen Wang0160ddd2019-08-15 09:11:56 -0700127 bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
128 mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800129}
130
Zhijun He5d677d12016-05-29 16:52:39 -0700131Camera3OutputStream::Camera3OutputStream(int id,
132 uint32_t width, uint32_t height, int format,
Emilian Peev050f5dc2017-05-18 14:43:56 +0100133 uint64_t consumerUsage, android_dataspace dataSpace,
Emilian Peevf4816702020-04-03 15:44:51 -0700134 camera_stream_rotation_t rotation, nsecs_t timestampOffset,
Austin Borger1c1bee02023-06-01 16:51:35 -0700135 const std::string& physicalCameraId,
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000136 const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
Emilian Peevc81a7592022-02-14 17:38:18 -0800137 int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
Shuzhen Wang8ed1e872022-03-08 16:34:33 -0800138 int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
Shuzhen Wangbce53db2022-12-03 00:38:20 +0000139 int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) :
Emilian Peevf4816702020-04-03 15:44:51 -0700140 Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
Shuzhen Wangc28189a2017-11-27 23:05:10 -0800141 /*maxSize*/0, format, dataSpace, rotation,
Emilian Peev2295df72021-11-12 18:14:10 -0800142 physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
Shuzhen Wange4208922022-02-01 16:52:48 -0800143 dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
Austin Borger9e2b27c2022-07-15 11:27:24 -0700144 timestampBase, colorSpace),
Zhijun He5d677d12016-05-29 16:52:39 -0700145 mConsumer(nullptr),
146 mTransform(0),
147 mTraceFirstBuffer(true),
148 mUseBufferManager(false),
149 mTimestampOffset(timestampOffset),
Shuzhen Wangbce53db2022-12-03 00:38:20 +0000150 mUseReadoutTime(useReadoutTimestamp),
Shuzhen Wang686f6442017-06-20 16:16:04 -0700151 mConsumerUsage(consumerUsage),
Chien-Yu Chena936ac22017-10-23 15:59:49 -0700152 mDropBuffers(false),
Shuzhen Wang610d7b82022-02-08 14:37:22 -0800153 mMirrorMode(mirrorMode),
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000154 mDequeueBufferLatency(kDequeueLatencyBinSize),
155 mIPCTransport(transport) {
Zhijun He5d677d12016-05-29 16:52:39 -0700156 // Deferred consumer only support preview surface format now.
157 if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
158 ALOGE("%s: Deferred consumer only supports IMPLEMENTATION_DEFINED format now!",
159 __FUNCTION__);
160 mState = STATE_ERROR;
161 }
162
Ivan Lozanoc0ad82f2020-07-30 09:32:57 -0400163 // Validation check for the consumer usage flag.
Zhijun He5d677d12016-05-29 16:52:39 -0700164 if ((consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) == 0 &&
165 (consumerUsage & GraphicBuffer::USAGE_HW_COMPOSER) == 0) {
Emilian Peev050f5dc2017-05-18 14:43:56 +0100166 ALOGE("%s: Deferred consumer usage flag is illegal %" PRIu64 "!",
167 __FUNCTION__, consumerUsage);
Zhijun He5d677d12016-05-29 16:52:39 -0700168 mState = STATE_ERROR;
169 }
170
Shuzhen Wang0160ddd2019-08-15 09:11:56 -0700171 bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
172 mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
Zhijun He5d677d12016-05-29 16:52:39 -0700173}
174
Emilian Peevf4816702020-04-03 15:44:51 -0700175Camera3OutputStream::Camera3OutputStream(int id, camera_stream_type_t type,
Igor Murashkine3a9f962013-05-08 18:03:15 -0700176 uint32_t width, uint32_t height,
Eino-Ville Talvala3d82c0d2015-02-23 15:19:19 -0800177 int format,
Yin-Chia Yehb97babb2015-03-12 13:42:44 -0700178 android_dataspace dataSpace,
Emilian Peevf4816702020-04-03 15:44:51 -0700179 camera_stream_rotation_t rotation,
Austin Borger1c1bee02023-06-01 16:51:35 -0700180 const std::string& physicalCameraId,
Shuzhen Wange4208922022-02-01 16:52:48 -0800181 const std::unordered_set<int32_t> &sensorPixelModesUsed,
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000182 IPCTransport transport,
Emilian Peev050f5dc2017-05-18 14:43:56 +0100183 uint64_t consumerUsage, nsecs_t timestampOffset,
Emilian Peev2295df72021-11-12 18:14:10 -0800184 int setId, bool isMultiResolution,
Shuzhen Wang8ed1e872022-03-08 16:34:33 -0800185 int64_t dynamicRangeProfile, int64_t streamUseCase,
Shuzhen Wang610d7b82022-02-08 14:37:22 -0800186 bool deviceTimeBaseIsRealtime, int timestampBase,
Shuzhen Wangbce53db2022-12-03 00:38:20 +0000187 int mirrorMode, int32_t colorSpace,
188 bool useReadoutTimestamp) :
Igor Murashkine3a9f962013-05-08 18:03:15 -0700189 Camera3IOStreamBase(id, type, width, height,
190 /*maxSize*/0,
Shuzhen Wangc28189a2017-11-27 23:05:10 -0800191 format, dataSpace, rotation,
Emilian Peev2295df72021-11-12 18:14:10 -0800192 physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
Shuzhen Wange4208922022-02-01 16:52:48 -0800193 dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
Austin Borger9e2b27c2022-07-15 11:27:24 -0700194 timestampBase, colorSpace),
Zhijun He125684a2015-12-26 15:07:30 -0800195 mTransform(0),
196 mTraceFirstBuffer(true),
Zhijun He5d677d12016-05-29 16:52:39 -0700197 mUseBufferManager(false),
Shuzhen Wang0129d522016-10-30 22:43:41 -0700198 mTimestampOffset(timestampOffset),
Shuzhen Wangbce53db2022-12-03 00:38:20 +0000199 mUseReadoutTime(useReadoutTimestamp),
Shuzhen Wang686f6442017-06-20 16:16:04 -0700200 mConsumerUsage(consumerUsage),
Chien-Yu Chena936ac22017-10-23 15:59:49 -0700201 mDropBuffers(false),
Shuzhen Wang610d7b82022-02-08 14:37:22 -0800202 mMirrorMode(mirrorMode),
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000203 mDequeueBufferLatency(kDequeueLatencyBinSize),
204 mIPCTransport(transport) {
Zhijun He125684a2015-12-26 15:07:30 -0800205
Shuzhen Wang0160ddd2019-08-15 09:11:56 -0700206 bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
207 mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
Igor Murashkine3a9f962013-05-08 18:03:15 -0700208
209 // Subclasses expected to initialize mConsumer themselves
210}
211
212
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800213Camera3OutputStream::~Camera3OutputStream() {
214 disconnectLocked();
215}
216
Emilian Peevf4816702020-04-03 15:44:51 -0700217status_t Camera3OutputStream::getBufferLocked(camera_stream_buffer *buffer,
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800218 const std::vector<size_t>&) {
Jayant Chowdharyd4776262020-06-23 23:45:57 -0700219 ATRACE_HFR_CALL();
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800220
221 ANativeWindowBuffer* anb;
Zhijun He125684a2015-12-26 15:07:30 -0800222 int fenceFd = -1;
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -0700223
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800224 status_t res;
225 res = getBufferLockedCommon(&anb, &fenceFd);
226 if (res != OK) {
227 return res;
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800228 }
229
Igor Murashkine3a9f962013-05-08 18:03:15 -0700230 /**
231 * FenceFD now owned by HAL except in case of error,
232 * in which case we reassign it to acquire_fence
233 */
234 handoutBufferLocked(*buffer, &(anb->handle), /*acquireFence*/fenceFd,
Emilian Peevf4816702020-04-03 15:44:51 -0700235 /*releaseFence*/-1, CAMERA_BUFFER_STATUS_OK, /*output*/true);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800236
237 return OK;
238}
239
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800240status_t Camera3OutputStream::queueBufferToConsumer(sp<ANativeWindow>& consumer,
Yin-Chia Yeh58b1b4e2018-10-15 12:18:36 -0700241 ANativeWindowBuffer* buffer, int anwReleaseFence,
242 const std::vector<size_t>&) {
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800243 return consumer->queueBuffer(consumer.get(), buffer, anwReleaseFence);
244}
245
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800246status_t Camera3OutputStream::returnBufferLocked(
Emilian Peevf4816702020-04-03 15:44:51 -0700247 const camera_stream_buffer &buffer,
Shuzhen Wang90708ea2021-11-04 11:40:49 -0700248 nsecs_t timestamp, nsecs_t readoutTimestamp,
249 int32_t transform, const std::vector<size_t>& surface_ids) {
Jayant Chowdharyd4776262020-06-23 23:45:57 -0700250 ATRACE_HFR_CALL();
Igor Murashkine3a9f962013-05-08 18:03:15 -0700251
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800252 if (mHandoutTotalBufferCount == 1) {
253 returnPrefetchedBuffersLocked();
254 }
255
Shuzhen Wang90708ea2021-11-04 11:40:49 -0700256 status_t res = returnAnyBufferLocked(buffer, timestamp, readoutTimestamp,
257 /*output*/true, transform, surface_ids);
Igor Murashkine3a9f962013-05-08 18:03:15 -0700258
259 if (res != OK) {
260 return res;
261 }
262
263 mLastTimestamp = timestamp;
Eino-Ville Talvalac31dc7e2017-01-31 17:35:41 -0800264 mFrameCount++;
Igor Murashkine3a9f962013-05-08 18:03:15 -0700265
266 return OK;
267}
268
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000269status_t Camera3OutputStream::fixUpHidlJpegBlobHeader(ANativeWindowBuffer* anwBuffer, int fence) {
270 // Lock the JPEG buffer for CPU read
271 sp<GraphicBuffer> graphicBuffer = GraphicBuffer::from(anwBuffer);
272 void* mapped = nullptr;
273 base::unique_fd fenceFd(dup(fence));
274 // Use USAGE_SW_WRITE_RARELY since we're going to re-write the CameraBlob
275 // header.
276 GraphicBufferLocker gbLocker(graphicBuffer);
277 status_t res =
278 gbLocker.lockAsync(
279 GraphicBuffer::USAGE_SW_READ_OFTEN | GraphicBuffer::USAGE_SW_WRITE_RARELY,
Emilian Peev293bd972022-08-05 17:28:06 -0700280 &mapped, fenceFd.release());
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000281 if (res != OK) {
282 ALOGE("%s: Failed to lock the buffer: %s (%d)", __FUNCTION__, strerror(-res), res);
283 return res;
284 }
285
286 uint8_t *hidlHeaderStart =
287 static_cast<uint8_t*>(mapped) + graphicBuffer->getWidth() - sizeof(camera_jpeg_blob_t);
288 // Check that the jpeg buffer is big enough to contain HIDL camera blob
289 if (hidlHeaderStart < static_cast<uint8_t *>(mapped)) {
290 ALOGE("%s, jpeg buffer not large enough to fit HIDL camera blob %" PRIu32, __FUNCTION__,
291 graphicBuffer->getWidth());
292 return BAD_VALUE;
293 }
294 camera_jpeg_blob_t *hidlBlobHeader = reinterpret_cast<camera_jpeg_blob_t *>(hidlHeaderStart);
295
296 // Check that the blob is indeed the jpeg blob id.
297 if (hidlBlobHeader->jpeg_blob_id != CAMERA_JPEG_BLOB_ID) {
298 ALOGE("%s, jpeg blob id %d is not correct", __FUNCTION__, hidlBlobHeader->jpeg_blob_id);
299 return BAD_VALUE;
300 }
301
302 // Retrieve id and blob size
303 CameraBlobId blobId = static_cast<CameraBlobId>(hidlBlobHeader->jpeg_blob_id);
304 uint32_t blobSizeBytes = hidlBlobHeader->jpeg_size;
305
306 if (blobSizeBytes > (graphicBuffer->getWidth() - sizeof(camera_jpeg_blob_t))) {
307 ALOGE("%s, blobSize in HIDL jpeg blob : %d is corrupt, buffer size %" PRIu32, __FUNCTION__,
308 blobSizeBytes, graphicBuffer->getWidth());
309 }
310
311 uint8_t *aidlHeaderStart =
312 static_cast<uint8_t*>(mapped) + graphicBuffer->getWidth() - sizeof(CameraBlob);
313
314 // Check that the jpeg buffer is big enough to contain AIDL camera blob
315 if (aidlHeaderStart < static_cast<uint8_t *>(mapped)) {
316 ALOGE("%s, jpeg buffer not large enough to fit AIDL camera blob %" PRIu32, __FUNCTION__,
317 graphicBuffer->getWidth());
318 return BAD_VALUE;
319 }
320
321 if (static_cast<uint8_t*>(mapped) + blobSizeBytes > aidlHeaderStart) {
322 ALOGE("%s, jpeg blob with size %d , buffer size %" PRIu32 " not large enough to fit"
323 " AIDL camera blob without corrupting jpeg", __FUNCTION__, blobSizeBytes,
324 graphicBuffer->getWidth());
325 return BAD_VALUE;
326 }
327
328 // Fill in JPEG header
Avichal Rakesh51af0702022-05-26 22:58:03 +0000329 CameraBlob aidlHeader = {
330 .blobId = blobId,
331 .blobSizeBytes = static_cast<int32_t>(blobSizeBytes)
332 };
333 memcpy(aidlHeaderStart, &aidlHeader, sizeof(CameraBlob));
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000334 graphicBuffer->unlock();
335 return OK;
336}
337
Igor Murashkine3a9f962013-05-08 18:03:15 -0700338status_t Camera3OutputStream::returnBufferCheckedLocked(
Emilian Peevf4816702020-04-03 15:44:51 -0700339 const camera_stream_buffer &buffer,
Igor Murashkine3a9f962013-05-08 18:03:15 -0700340 nsecs_t timestamp,
Shuzhen Wang90708ea2021-11-04 11:40:49 -0700341 nsecs_t readoutTimestamp,
Jing Mikec7f9b132023-03-12 11:12:04 +0800342 [[maybe_unused]] bool output,
Emilian Peev5104fe92021-10-21 14:27:09 -0700343 int32_t transform,
Yin-Chia Yeh58b1b4e2018-10-15 12:18:36 -0700344 const std::vector<size_t>& surface_ids,
Igor Murashkine3a9f962013-05-08 18:03:15 -0700345 /*out*/
346 sp<Fence> *releaseFenceOut) {
347
Igor Murashkine3a9f962013-05-08 18:03:15 -0700348 ALOG_ASSERT(output, "Expected output to be true");
349
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800350 status_t res;
Igor Murashkin5a1798a2013-05-07 10:58:13 -0700351
Yin-Chia Yeh4c9736f2015-03-05 15:01:36 -0800352 // Fence management - always honor release fence from HAL
353 sp<Fence> releaseFence = new Fence(buffer.release_fence);
Igor Murashkin5a1798a2013-05-07 10:58:13 -0700354 int anwReleaseFence = releaseFence->dup();
355
356 /**
Zhijun He124ccf42013-05-22 14:01:30 -0700357 * Release the lock briefly to avoid deadlock with
358 * StreamingProcessor::startStream -> Camera3Stream::isConfiguring (this
359 * thread will go into StreamingProcessor::onFrameAvailable) during
360 * queueBuffer
361 */
362 sp<ANativeWindow> currentConsumer = mConsumer;
Yin-Chia Yeha1b56c82019-03-27 15:50:39 -0700363 StreamState state = mState;
Zhijun He124ccf42013-05-22 14:01:30 -0700364 mLock.unlock();
365
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800366 ANativeWindowBuffer *anwBuffer = container_of(buffer.buffer, ANativeWindowBuffer, handle);
Shuzhen Wangc2352702022-09-06 18:36:31 -0700367 bool bufferDeferred = false;
Zhijun He124ccf42013-05-22 14:01:30 -0700368 /**
Igor Murashkin5a1798a2013-05-07 10:58:13 -0700369 * Return buffer back to ANativeWindow
370 */
Emilian Peevf4816702020-04-03 15:44:51 -0700371 if (buffer.status == CAMERA_BUFFER_STATUS_ERROR || mDropBuffers || timestamp == 0) {
Igor Murashkin5a1798a2013-05-07 10:58:13 -0700372 // Cancel buffer
Chien-Yu Chena936ac22017-10-23 15:59:49 -0700373 if (mDropBuffers) {
374 ALOGV("%s: Dropping a frame for stream %d.", __FUNCTION__, mId);
Emilian Peevf4816702020-04-03 15:44:51 -0700375 } else if (buffer.status == CAMERA_BUFFER_STATUS_ERROR) {
Yin-Chia Yeha1b56c82019-03-27 15:50:39 -0700376 ALOGV("%s: A frame is dropped for stream %d due to buffer error.", __FUNCTION__, mId);
Shuzhen Wangf0c4a6b2018-09-05 09:36:14 -0700377 } else {
378 ALOGE("%s: Stream %d: timestamp shouldn't be 0", __FUNCTION__, mId);
Chien-Yu Chena936ac22017-10-23 15:59:49 -0700379 }
380
Zhijun He124ccf42013-05-22 14:01:30 -0700381 res = currentConsumer->cancelBuffer(currentConsumer.get(),
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800382 anwBuffer,
Igor Murashkin5a1798a2013-05-07 10:58:13 -0700383 anwReleaseFence);
Yin-Chia Yeha1b56c82019-03-27 15:50:39 -0700384 if (shouldLogError(res, state)) {
Igor Murashkin5a1798a2013-05-07 10:58:13 -0700385 ALOGE("%s: Stream %d: Error cancelling buffer to native window:"
Igor Murashkine3a9f962013-05-08 18:03:15 -0700386 " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
Igor Murashkin5a1798a2013-05-07 10:58:13 -0700387 }
Zhijun He1ff811b2016-01-26 14:39:51 -0800388
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800389 notifyBufferReleased(anwBuffer);
Zhijun He1ff811b2016-01-26 14:39:51 -0800390 if (mUseBufferManager) {
391 // Return this buffer back to buffer manager.
Shuzhen Wang0160ddd2019-08-15 09:11:56 -0700392 mBufferProducerListener->onBufferReleased();
Zhijun He1ff811b2016-01-26 14:39:51 -0800393 }
Igor Murashkin5a1798a2013-05-07 10:58:13 -0700394 } else {
Emilian Peevf4816702020-04-03 15:44:51 -0700395 if (mTraceFirstBuffer && (stream_type == CAMERA_STREAM_OUTPUT)) {
Ruchit Sharmae0711f22014-08-18 13:48:24 -0400396 {
397 char traceLog[48];
398 snprintf(traceLog, sizeof(traceLog), "Stream %d: first full buffer\n", mId);
399 ATRACE_NAME(traceLog);
400 }
401 mTraceFirstBuffer = false;
402 }
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000403 // Fix CameraBlob id type discrepancy between HIDL and AIDL, details : http://b/229688810
Emilian Peeve579d8b2023-02-28 14:16:08 -0800404 if (getFormat() == HAL_PIXEL_FORMAT_BLOB && (getDataSpace() == HAL_DATASPACE_V0_JFIF ||
405 (getDataSpace() ==
406 static_cast<android_dataspace_t>(
407 aidl::android::hardware::graphics::common::Dataspace::JPEG_R)))) {
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000408 if (mIPCTransport == IPCTransport::HIDL) {
409 fixUpHidlJpegBlobHeader(anwBuffer, anwReleaseFence);
410 }
411 // If this is a JPEG output, and image dump mask is set, save image to
412 // disk.
413 if (mImageDumpMask) {
414 dumpImageToDisk(timestamp, anwBuffer, anwReleaseFence);
415 }
Shuzhen Wangabbcb6b2020-12-09 22:32:44 -0800416 }
Yin-Chia Yeh4c9736f2015-03-05 15:01:36 -0800417
Shuzhen Wangbce53db2022-12-03 00:38:20 +0000418 nsecs_t captureTime = ((mUseReadoutTime || mSyncToDisplay) && readoutTimestamp != 0 ?
Shuzhen Wangffc4c012022-04-20 15:55:46 -0700419 readoutTimestamp : timestamp) - mTimestampOffset;
Shuzhen Wangba92d772022-04-11 11:47:24 -0700420 if (mPreviewFrameSpacer != nullptr) {
Shuzhen Wangfe8a2a32022-05-10 18:18:54 -0700421 nsecs_t readoutTime = (readoutTimestamp != 0 ? readoutTimestamp : timestamp)
422 - mTimestampOffset;
423 res = mPreviewFrameSpacer->queuePreviewBuffer(captureTime, readoutTime,
424 transform, anwBuffer, anwReleaseFence);
Shuzhen Wangba92d772022-04-11 11:47:24 -0700425 if (res != OK) {
426 ALOGE("%s: Stream %d: Error queuing buffer to preview buffer spacer: %s (%d)",
427 __FUNCTION__, mId, strerror(-res), res);
428 return res;
429 }
Shuzhen Wangc2352702022-09-06 18:36:31 -0700430 bufferDeferred = true;
Shuzhen Wangba92d772022-04-11 11:47:24 -0700431 } else {
Shuzhen Wangba92d772022-04-11 11:47:24 -0700432 nsecs_t presentTime = mSyncToDisplay ?
Ravneet446b3bf2023-07-11 19:26:38 +0000433 syncTimestampToDisplayLocked(captureTime, releaseFence) : captureTime;
Emilian Peev2295df72021-11-12 18:14:10 -0800434
Shuzhen Wangba92d772022-04-11 11:47:24 -0700435 setTransform(transform, true/*mayChangeMirror*/);
436 res = native_window_set_buffers_timestamp(mConsumer.get(), presentTime);
437 if (res != OK) {
438 ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
439 __FUNCTION__, mId, strerror(-res), res);
440 return res;
441 }
Emilian Peev2295df72021-11-12 18:14:10 -0800442
Shuzhen Wangba92d772022-04-11 11:47:24 -0700443 queueHDRMetadata(anwBuffer->handle, currentConsumer, dynamic_range_profile);
Shuzhen Wang00abbeb2022-02-25 17:14:42 -0800444
Shuzhen Wangba92d772022-04-11 11:47:24 -0700445 res = queueBufferToConsumer(currentConsumer, anwBuffer, anwReleaseFence, surface_ids);
446 if (shouldLogError(res, state)) {
447 ALOGE("%s: Stream %d: Error queueing buffer to native window:"
448 " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
449 }
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800450 }
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800451 }
Zhijun He124ccf42013-05-22 14:01:30 -0700452 mLock.lock();
Eino-Ville Talvala4d44cad2015-04-11 13:15:45 -0700453
Shuzhen Wangc2352702022-09-06 18:36:31 -0700454 if (bufferDeferred) {
455 mCachedOutputBufferCount++;
456 }
457
Eino-Ville Talvala4d44cad2015-04-11 13:15:45 -0700458 // Once a valid buffer has been returned to the queue, can no longer
459 // dequeue all buffers for preallocation.
Emilian Peevf4816702020-04-03 15:44:51 -0700460 if (buffer.status != CAMERA_BUFFER_STATUS_ERROR) {
Eino-Ville Talvala4d44cad2015-04-11 13:15:45 -0700461 mStreamUnpreparable = true;
462 }
463
Igor Murashkine3a9f962013-05-08 18:03:15 -0700464 *releaseFenceOut = releaseFence;
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800465
Eino-Ville Talvalaf1e98d82013-09-06 09:32:43 -0700466 return res;
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800467}
468
Emilian Peev3b93acb2024-03-11 21:09:48 +0000469void Camera3OutputStream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) {
Austin Borger1c1bee02023-06-01 16:51:35 -0700470 std::string lines;
471 lines += fmt::sprintf(" Stream[%d]: Output\n", mId);
Emilian Peev3b93acb2024-03-11 21:09:48 +0000472 lines += fmt::sprintf(" Consumer name: %s\n", (mConsumer.get() != nullptr) ?
473 mConsumer->getConsumerName() : "Deferred");
Austin Borger1c1bee02023-06-01 16:51:35 -0700474 write(fd, lines.c_str(), lines.size());
Igor Murashkine3a9f962013-05-08 18:03:15 -0700475
476 Camera3IOStreamBase::dump(fd, args);
Shuzhen Wang686f6442017-06-20 16:16:04 -0700477
478 mDequeueBufferLatency.dump(fd,
479 " DequeueBuffer latency histogram:");
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800480}
481
Shuzhen Wang610d7b82022-02-08 14:37:22 -0800482status_t Camera3OutputStream::setTransform(int transform, bool mayChangeMirror) {
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800483 ATRACE_CALL();
484 Mutex::Autolock l(mLock);
Shuzhen Wang610d7b82022-02-08 14:37:22 -0800485 if (mMirrorMode != OutputConfiguration::MIRROR_MODE_AUTO && mayChangeMirror) {
486 // If the mirroring mode is not AUTO, do not allow transform update
487 // which may change mirror.
488 return OK;
489 }
490
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800491 return setTransformLocked(transform);
492}
493
494status_t Camera3OutputStream::setTransformLocked(int transform) {
495 status_t res = OK;
Shuzhen Wange4adddb2021-09-21 15:24:44 -0700496
497 if (transform == -1) return res;
498
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800499 if (mState == STATE_ERROR) {
500 ALOGE("%s: Stream in error state", __FUNCTION__);
501 return INVALID_OPERATION;
502 }
503
504 mTransform = transform;
505 if (mState == STATE_CONFIGURED) {
506 res = native_window_set_buffers_transform(mConsumer.get(),
507 transform);
508 if (res != OK) {
509 ALOGE("%s: Unable to configure stream transform to %x: %s (%d)",
510 __FUNCTION__, transform, strerror(-res), res);
511 }
512 }
513 return res;
514}
515
516status_t Camera3OutputStream::configureQueueLocked() {
517 status_t res;
518
Ruchit Sharmae0711f22014-08-18 13:48:24 -0400519 mTraceFirstBuffer = true;
Igor Murashkine3a9f962013-05-08 18:03:15 -0700520 if ((res = Camera3IOStreamBase::configureQueueLocked()) != OK) {
521 return res;
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800522 }
523
Shuzhen Wangba92d772022-04-11 11:47:24 -0700524 if ((res = configureConsumerQueueLocked(true /*allowPreviewRespace*/)) != OK) {
Shuzhen Wang0129d522016-10-30 22:43:41 -0700525 return res;
526 }
527
528 // Set dequeueBuffer/attachBuffer timeout if the consumer is not hw composer or hw texture.
529 // We need skip these cases as timeout will disable the non-blocking (async) mode.
530 if (!(isConsumedByHWComposer() || isConsumedByHWTexture())) {
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -0800531 if (mUseBufferManager) {
532 // When buffer manager is handling the buffer, we should have available buffers in
533 // buffer queue before we calls into dequeueBuffer because buffer manager is tracking
534 // free buffers.
535 // There are however some consumer side feature (ImageReader::discardFreeBuffers) that
536 // can discard free buffers without notifying buffer manager. We want the timeout to
537 // happen immediately here so buffer manager can try to update its internal state and
538 // try to allocate a buffer instead of waiting.
539 mConsumer->setDequeueTimeout(0);
540 } else {
541 mConsumer->setDequeueTimeout(kDequeueBufferTimeout);
542 }
Shuzhen Wang0129d522016-10-30 22:43:41 -0700543 }
544
545 return OK;
546}
547
Shuzhen Wangba92d772022-04-11 11:47:24 -0700548status_t Camera3OutputStream::configureConsumerQueueLocked(bool allowPreviewRespace) {
Shuzhen Wang0129d522016-10-30 22:43:41 -0700549 status_t res;
550
551 mTraceFirstBuffer = true;
552
Igor Murashkine3a9f962013-05-08 18:03:15 -0700553 ALOG_ASSERT(mConsumer != 0, "mConsumer should never be NULL");
554
Zhijun He125684a2015-12-26 15:07:30 -0800555 // Configure consumer-side ANativeWindow interface. The listener may be used
556 // to notify buffer manager (if it is used) of the returned buffers.
Yin-Chia Yeh017d49c2017-03-31 19:11:00 -0700557 res = mConsumer->connect(NATIVE_WINDOW_API_CAMERA,
Shuzhen Wang0160ddd2019-08-15 09:11:56 -0700558 /*reportBufferRemoval*/true,
559 /*listener*/mBufferProducerListener);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800560 if (res != OK) {
561 ALOGE("%s: Unable to connect to native window for stream %d",
562 __FUNCTION__, mId);
563 return res;
564 }
565
Emilian Peev050f5dc2017-05-18 14:43:56 +0100566 res = native_window_set_usage(mConsumer.get(), mUsage);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800567 if (res != OK) {
Emilian Peev050f5dc2017-05-18 14:43:56 +0100568 ALOGE("%s: Unable to configure usage %" PRIu64 " for stream %d",
569 __FUNCTION__, mUsage, mId);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800570 return res;
571 }
572
573 res = native_window_set_scaling_mode(mConsumer.get(),
574 NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
575 if (res != OK) {
576 ALOGE("%s: Unable to configure stream scaling: %s (%d)",
577 __FUNCTION__, strerror(-res), res);
578 return res;
579 }
580
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800581 if (mMaxSize == 0) {
582 // For buffers of known size
Eino-Ville Talvala7d70c5e2014-07-24 18:10:23 -0700583 res = native_window_set_buffers_dimensions(mConsumer.get(),
Emilian Peevf4816702020-04-03 15:44:51 -0700584 camera_stream::width, camera_stream::height);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800585 } else {
586 // For buffers with bounded size
Eino-Ville Talvala7d70c5e2014-07-24 18:10:23 -0700587 res = native_window_set_buffers_dimensions(mConsumer.get(),
588 mMaxSize, 1);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800589 }
590 if (res != OK) {
Eino-Ville Talvala7d70c5e2014-07-24 18:10:23 -0700591 ALOGE("%s: Unable to configure stream buffer dimensions"
592 " %d x %d (maxSize %zu) for stream %d",
Emilian Peevf4816702020-04-03 15:44:51 -0700593 __FUNCTION__, camera_stream::width, camera_stream::height,
Eino-Ville Talvala7d70c5e2014-07-24 18:10:23 -0700594 mMaxSize, mId);
595 return res;
596 }
597 res = native_window_set_buffers_format(mConsumer.get(),
Emilian Peevf4816702020-04-03 15:44:51 -0700598 camera_stream::format);
Eino-Ville Talvala7d70c5e2014-07-24 18:10:23 -0700599 if (res != OK) {
600 ALOGE("%s: Unable to configure stream buffer format %#x for stream %d",
Emilian Peevf4816702020-04-03 15:44:51 -0700601 __FUNCTION__, camera_stream::format, mId);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800602 return res;
603 }
604
Eino-Ville Talvala3d82c0d2015-02-23 15:19:19 -0800605 res = native_window_set_buffers_data_space(mConsumer.get(),
Emilian Peevf4816702020-04-03 15:44:51 -0700606 camera_stream::data_space);
Eino-Ville Talvala3d82c0d2015-02-23 15:19:19 -0800607 if (res != OK) {
608 ALOGE("%s: Unable to configure stream dataspace %#x for stream %d",
Emilian Peevf4816702020-04-03 15:44:51 -0700609 __FUNCTION__, camera_stream::data_space, mId);
Eino-Ville Talvala3d82c0d2015-02-23 15:19:19 -0800610 return res;
611 }
612
Emilian Peev3b93acb2024-03-11 21:09:48 +0000613 int maxConsumerBuffers = 0;
Eino-Ville Talvala727d1722015-06-09 13:44:19 -0700614 res = static_cast<ANativeWindow*>(mConsumer.get())->query(
615 mConsumer.get(),
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800616 NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxConsumerBuffers);
617 if (res != OK) {
618 ALOGE("%s: Unable to query consumer undequeued"
619 " buffer count for stream %d", __FUNCTION__, mId);
620 return res;
621 }
622
Alex Ray20cb3002013-05-28 20:18:22 -0700623 ALOGV("%s: Consumer wants %d buffers, HAL wants %d", __FUNCTION__,
Emilian Peevf4816702020-04-03 15:44:51 -0700624 maxConsumerBuffers, camera_stream::max_buffers);
625 if (camera_stream::max_buffers == 0) {
Zhijun He2ab500c2013-07-23 08:02:53 -0700626 ALOGE("%s: Camera HAL requested max_buffer count: %d, requires at least 1",
Emilian Peevf4816702020-04-03 15:44:51 -0700627 __FUNCTION__, camera_stream::max_buffers);
Alex Ray20cb3002013-05-28 20:18:22 -0700628 return INVALID_OPERATION;
629 }
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800630
Emilian Peevf4816702020-04-03 15:44:51 -0700631 mTotalBufferCount = maxConsumerBuffers + camera_stream::max_buffers;
Shuzhen Wange4208922022-02-01 16:52:48 -0800632
633 int timestampBase = getTimestampBase();
634 bool isDefaultTimeBase = (timestampBase ==
635 OutputConfiguration::TIMESTAMP_BASE_DEFAULT);
Shuzhen Wangba92d772022-04-11 11:47:24 -0700636 if (allowPreviewRespace) {
Shuzhen Wange4208922022-02-01 16:52:48 -0800637 bool forceChoreographer = (timestampBase ==
638 OutputConfiguration::TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED);
Shuzhen Wangba92d772022-04-11 11:47:24 -0700639 bool defaultToChoreographer = (isDefaultTimeBase &&
Shuzhen Wangfe8a2a32022-05-10 18:18:54 -0700640 isConsumedByHWComposer());
641 bool defaultToSpacer = (isDefaultTimeBase &&
642 isConsumedByHWTexture() &&
643 !isConsumedByCPU() &&
644 !isVideoStream());
Shuzhen Wange4208922022-02-01 16:52:48 -0800645 if (forceChoreographer || defaultToChoreographer) {
Shuzhen Wang00abbeb2022-02-25 17:14:42 -0800646 mSyncToDisplay = true;
Shuzhen Wangc2352702022-09-06 18:36:31 -0700647 // For choreographer synced stream, extra buffers aren't kept by
648 // camera service. So no need to update mMaxCachedBufferCount.
Shuzhen Wang00abbeb2022-02-25 17:14:42 -0800649 mTotalBufferCount += kDisplaySyncExtraBuffer;
Shuzhen Wangfe8a2a32022-05-10 18:18:54 -0700650 } else if (defaultToSpacer) {
Shuzhen Wangdc9aa822022-05-16 10:04:17 -0700651 mPreviewFrameSpacer = new PreviewFrameSpacer(this, mConsumer);
Shuzhen Wangc2352702022-09-06 18:36:31 -0700652 // For preview frame spacer, the extra buffer is kept by camera
653 // service. So update mMaxCachedBufferCount.
654 mMaxCachedBufferCount = 1;
655 mTotalBufferCount += mMaxCachedBufferCount;
Austin Borger1c1bee02023-06-01 16:51:35 -0700656 res = mPreviewFrameSpacer->run((std::string("PreviewSpacer-")
657 + std::to_string(mId)).c_str());
Shuzhen Wangba92d772022-04-11 11:47:24 -0700658 if (res != OK) {
Austin Borger7b129542022-06-09 13:23:06 -0700659 ALOGE("%s: Unable to start preview spacer: %s (%d)", __FUNCTION__,
660 strerror(-res), res);
Shuzhen Wangba92d772022-04-11 11:47:24 -0700661 return res;
662 }
Shuzhen Wange4adddb2021-09-21 15:24:44 -0700663 }
664 }
Zhijun He6adc9cc2014-04-15 14:09:55 -0700665 mHandoutTotalBufferCount = 0;
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800666 mFrameCount = 0;
667 mLastTimestamp = 0;
Shuzhen Wange4208922022-02-01 16:52:48 -0800668
669 if (isDeviceTimeBaseRealtime()) {
670 if (isDefaultTimeBase && !isConsumedByHWComposer() && !isVideoStream()) {
671 // Default time base, but not hardware composer or video encoder
672 mTimestampOffset = 0;
673 } else if (timestampBase == OutputConfiguration::TIMESTAMP_BASE_REALTIME ||
Shuzhen Wangbce53db2022-12-03 00:38:20 +0000674 timestampBase == OutputConfiguration::TIMESTAMP_BASE_SENSOR) {
Shuzhen Wange4208922022-02-01 16:52:48 -0800675 mTimestampOffset = 0;
676 }
677 // If timestampBase is CHOREOGRAPHER SYNCED or MONOTONIC, leave
678 // timestamp offset as bootTime - monotonicTime.
679 } else {
680 if (timestampBase == OutputConfiguration::TIMESTAMP_BASE_REALTIME) {
681 // Reverse offset for monotonicTime -> bootTime
682 mTimestampOffset = -mTimestampOffset;
683 } else {
Shuzhen Wangbce53db2022-12-03 00:38:20 +0000684 // If timestampBase is DEFAULT, MONOTONIC, SENSOR or
Shuzhen Wange4208922022-02-01 16:52:48 -0800685 // CHOREOGRAPHER_SYNCED, timestamp offset is 0.
686 mTimestampOffset = 0;
687 }
688 }
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800689
Emilian Peev3b93acb2024-03-11 21:09:48 +0000690 if (flags::surface_ipc()) {
691 res = mConsumer->setMaxDequeuedBufferCount(mTotalBufferCount - maxConsumerBuffers);
692 } else {
693 res = native_window_set_buffer_count(mConsumer.get(), mTotalBufferCount);
694 }
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800695 if (res != OK) {
696 ALOGE("%s: Unable to set buffer count for stream %d",
697 __FUNCTION__, mId);
698 return res;
699 }
700
701 res = native_window_set_buffers_transform(mConsumer.get(),
702 mTransform);
703 if (res != OK) {
704 ALOGE("%s: Unable to configure stream transform to %x: %s (%d)",
705 __FUNCTION__, mTransform, strerror(-res), res);
Shuzhen Wang0129d522016-10-30 22:43:41 -0700706 return res;
Zhijun Hef0645c12016-08-02 00:58:11 -0700707 }
708
Zhijun He125684a2015-12-26 15:07:30 -0800709 /**
Zhijun Heedd41ae2016-02-03 14:45:53 -0800710 * Camera3 Buffer manager is only supported by HAL3.3 onwards, as the older HALs requires
Zhijun He125684a2015-12-26 15:07:30 -0800711 * buffers to be statically allocated for internal static buffer registration, while the
712 * buffers provided by buffer manager are really dynamically allocated. Camera3Device only
Zhijun Heedd41ae2016-02-03 14:45:53 -0800713 * sets the mBufferManager if device version is > HAL3.2, which guarantees that the buffer
714 * manager setup is skipped in below code. Note that HAL3.2 is also excluded here, as some
715 * HAL3.2 devices may not support the dynamic buffer registeration.
Yin-Chia Yehb6578902019-04-16 13:36:16 -0700716 * Also Camera3BufferManager does not support display/texture streams as they have its own
717 * buffer management logic.
Zhijun He125684a2015-12-26 15:07:30 -0800718 */
Yin-Chia Yehb6578902019-04-16 13:36:16 -0700719 if (mBufferManager != 0 && mSetId > CAMERA3_STREAM_SET_ID_INVALID &&
720 !(isConsumedByHWComposer() || isConsumedByHWTexture())) {
Emilian Peev050f5dc2017-05-18 14:43:56 +0100721 uint64_t consumerUsage = 0;
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -0700722 getEndpointUsage(&consumerUsage);
Shuzhen Wang83bff122020-11-20 15:51:39 -0800723 uint32_t width = (mMaxSize == 0) ? getWidth() : mMaxSize;
724 uint32_t height = (mMaxSize == 0) ? getHeight() : 1;
Zhijun He125684a2015-12-26 15:07:30 -0800725 StreamInfo streamInfo(
Shuzhen Wang83bff122020-11-20 15:51:39 -0800726 getId(), getStreamSetId(), width, height, getFormat(), getDataSpace(),
Emilian Peev050f5dc2017-05-18 14:43:56 +0100727 mUsage | consumerUsage, mTotalBufferCount,
Shuzhen Wang83bff122020-11-20 15:51:39 -0800728 /*isConfigured*/true, isMultiResolution());
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -0700729 wp<Camera3OutputStream> weakThis(this);
730 res = mBufferManager->registerStream(weakThis,
731 streamInfo);
Zhijun He125684a2015-12-26 15:07:30 -0800732 if (res == OK) {
733 // Disable buffer allocation for this BufferQueue, buffer manager will take over
734 // the buffer allocation responsibility.
735 mConsumer->getIGraphicBufferProducer()->allowAllocation(false);
736 mUseBufferManager = true;
737 } else {
738 ALOGE("%s: Unable to register stream %d to camera3 buffer manager, "
739 "(error %d %s), fall back to BufferQueue for buffer management!",
740 __FUNCTION__, mId, res, strerror(-res));
741 }
742 }
743
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800744 return OK;
745}
746
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800747status_t Camera3OutputStream::getBufferLockedCommon(ANativeWindowBuffer** anb, int* fenceFd) {
Jayant Chowdharyd4776262020-06-23 23:45:57 -0700748 ATRACE_HFR_CALL();
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800749 status_t res;
750
751 if ((res = getBufferPreconditionCheckLocked()) != OK) {
752 return res;
753 }
754
755 bool gotBufferFromManager = false;
756
757 if (mUseBufferManager) {
758 sp<GraphicBuffer> gb;
Shuzhen Wang83bff122020-11-20 15:51:39 -0800759 res = mBufferManager->getBufferForStream(getId(), getStreamSetId(),
760 isMultiResolution(), &gb, fenceFd);
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800761 if (res == OK) {
762 // Attach this buffer to the bufferQueue: the buffer will be in dequeue state after a
763 // successful return.
764 *anb = gb.get();
765 res = mConsumer->attachBuffer(*anb);
Yin-Chia Yeha1b56c82019-03-27 15:50:39 -0700766 if (shouldLogError(res, mState)) {
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800767 ALOGE("%s: Stream %d: Can't attach the output buffer to this surface: %s (%d)",
768 __FUNCTION__, mId, strerror(-res), res);
Yin-Chia Yeha1b56c82019-03-27 15:50:39 -0700769 }
770 if (res != OK) {
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -0800771 checkRetAndSetAbandonedLocked(res);
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800772 return res;
773 }
774 gotBufferFromManager = true;
775 ALOGV("Stream %d: Attached new buffer", getId());
776 } else if (res == ALREADY_EXISTS) {
777 // Have sufficient free buffers already attached, can just
778 // dequeue from buffer queue
779 ALOGV("Stream %d: Reusing attached buffer", getId());
780 gotBufferFromManager = false;
781 } else if (res != OK) {
782 ALOGE("%s: Stream %d: Can't get next output buffer from buffer manager: %s (%d)",
783 __FUNCTION__, mId, strerror(-res), res);
784 return res;
785 }
786 }
787 if (!gotBufferFromManager) {
788 /**
789 * Release the lock briefly to avoid deadlock for below scenario:
790 * Thread 1: StreamingProcessor::startStream -> Camera3Stream::isConfiguring().
791 * This thread acquired StreamingProcessor lock and try to lock Camera3Stream lock.
792 * Thread 2: Camera3Stream::returnBuffer->StreamingProcessor::onFrameAvailable().
793 * This thread acquired Camera3Stream lock and bufferQueue lock, and try to lock
794 * StreamingProcessor lock.
795 * Thread 3: Camera3Stream::getBuffer(). This thread acquired Camera3Stream lock
796 * and try to lock bufferQueue lock.
797 * Then there is circular locking dependency.
798 */
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800799 sp<Surface> consumer = mConsumer;
Shuzhen Wang6c14e312021-07-05 16:20:33 -0700800 size_t remainingBuffers = (mState == STATE_PREPARING ? mTotalBufferCount :
801 camera_stream::max_buffers) - mHandoutTotalBufferCount;
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800802 mLock.unlock();
803
Shuzhen Wang686f6442017-06-20 16:16:04 -0700804 nsecs_t dequeueStart = systemTime(SYSTEM_TIME_MONOTONIC);
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800805
Shuzhen Wangc7629462021-07-12 15:02:58 -0700806 size_t batchSize = mBatchSize.load();
807 if (batchSize == 1) {
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800808 sp<ANativeWindow> anw = consumer;
809 res = anw->dequeueBuffer(anw.get(), anb, fenceFd);
810 } else {
Shuzhen Wangc7629462021-07-12 15:02:58 -0700811 std::unique_lock<std::mutex> batchLock(mBatchLock);
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800812 res = OK;
813 if (mBatchedBuffers.size() == 0) {
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800814 if (remainingBuffers == 0) {
815 ALOGE("%s: cannot get buffer while all buffers are handed out", __FUNCTION__);
816 return INVALID_OPERATION;
817 }
818 if (batchSize > remainingBuffers) {
819 batchSize = remainingBuffers;
820 }
Shuzhen Wangc7629462021-07-12 15:02:58 -0700821 batchLock.unlock();
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800822 // Refill batched buffers
Shuzhen Wangc7629462021-07-12 15:02:58 -0700823 std::vector<Surface::BatchBuffer> batchedBuffers;
824 batchedBuffers.resize(batchSize);
825 res = consumer->dequeueBuffers(&batchedBuffers);
826 batchLock.lock();
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800827 if (res != OK) {
828 ALOGE("%s: batch dequeueBuffers call failed! %s (%d)",
829 __FUNCTION__, strerror(-res), res);
Shuzhen Wangc7629462021-07-12 15:02:58 -0700830 } else {
831 mBatchedBuffers = std::move(batchedBuffers);
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800832 }
833 }
834
835 if (res == OK) {
836 // Dispatch batch buffers
837 *anb = mBatchedBuffers.back().buffer;
838 *fenceFd = mBatchedBuffers.back().fenceFd;
839 mBatchedBuffers.pop_back();
840 }
841 }
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800842
Shuzhen Wang686f6442017-06-20 16:16:04 -0700843 nsecs_t dequeueEnd = systemTime(SYSTEM_TIME_MONOTONIC);
844 mDequeueBufferLatency.add(dequeueStart, dequeueEnd);
845
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800846 mLock.lock();
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -0800847
848 if (mUseBufferManager && res == TIMED_OUT) {
849 checkRemovedBuffersLocked();
850
851 sp<GraphicBuffer> gb;
852 res = mBufferManager->getBufferForStream(
Shuzhen Wang83bff122020-11-20 15:51:39 -0800853 getId(), getStreamSetId(), isMultiResolution(),
854 &gb, fenceFd, /*noFreeBuffer*/true);
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -0800855
856 if (res == OK) {
857 // Attach this buffer to the bufferQueue: the buffer will be in dequeue state after
858 // a successful return.
859 *anb = gb.get();
860 res = mConsumer->attachBuffer(*anb);
861 gotBufferFromManager = true;
862 ALOGV("Stream %d: Attached new buffer", getId());
863
864 if (res != OK) {
Yin-Chia Yeha1b56c82019-03-27 15:50:39 -0700865 if (shouldLogError(res, mState)) {
866 ALOGE("%s: Stream %d: Can't attach the output buffer to this surface:"
867 " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
868 }
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -0800869 checkRetAndSetAbandonedLocked(res);
870 return res;
871 }
872 } else {
873 ALOGE("%s: Stream %d: Can't get next output buffer from buffer manager:"
874 " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
875 return res;
876 }
877 } else if (res != OK) {
Yin-Chia Yeha1b56c82019-03-27 15:50:39 -0700878 if (shouldLogError(res, mState)) {
879 ALOGE("%s: Stream %d: Can't dequeue next output buffer: %s (%d)",
880 __FUNCTION__, mId, strerror(-res), res);
881 }
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -0800882 checkRetAndSetAbandonedLocked(res);
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800883 return res;
884 }
885 }
886
Yin-Chia Yeh017d49c2017-03-31 19:11:00 -0700887 if (res == OK) {
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -0800888 checkRemovedBuffersLocked();
Yin-Chia Yeh017d49c2017-03-31 19:11:00 -0700889 }
890
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -0800891 return res;
892}
893
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -0800894void Camera3OutputStream::checkRemovedBuffersLocked(bool notifyBufferManager) {
895 std::vector<sp<GraphicBuffer>> removedBuffers;
896 status_t res = mConsumer->getAndFlushRemovedBuffers(&removedBuffers);
897 if (res == OK) {
898 onBuffersRemovedLocked(removedBuffers);
899
900 if (notifyBufferManager && mUseBufferManager && removedBuffers.size() > 0) {
Shuzhen Wang83bff122020-11-20 15:51:39 -0800901 mBufferManager->onBuffersRemoved(getId(), getStreamSetId(), isMultiResolution(),
902 removedBuffers.size());
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -0800903 }
904 }
905}
906
907void Camera3OutputStream::checkRetAndSetAbandonedLocked(status_t res) {
908 // Only transition to STATE_ABANDONED from STATE_CONFIGURED. (If it is
909 // STATE_PREPARING, let prepareNextBuffer handle the error.)
910 if ((res == NO_INIT || res == DEAD_OBJECT) && mState == STATE_CONFIGURED) {
911 mState = STATE_ABANDONED;
912 }
913}
914
Yin-Chia Yeha1b56c82019-03-27 15:50:39 -0700915bool Camera3OutputStream::shouldLogError(status_t res, StreamState state) {
916 if (res == OK) {
917 return false;
918 }
919 if ((res == DEAD_OBJECT || res == NO_INIT) && state == STATE_ABANDONED) {
920 return false;
921 }
922 return true;
923}
924
Shuzhen Wangc2352702022-09-06 18:36:31 -0700925void Camera3OutputStream::onCachedBufferQueued() {
926 Mutex::Autolock l(mLock);
927 mCachedOutputBufferCount--;
928 // Signal whoever is waiting for the buffer to be returned to the buffer
929 // queue.
930 mOutputBufferReturnedSignal.signal();
931}
932
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800933status_t Camera3OutputStream::disconnectLocked() {
934 status_t res;
935
Igor Murashkine3a9f962013-05-08 18:03:15 -0700936 if ((res = Camera3IOStreamBase::disconnectLocked()) != OK) {
937 return res;
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800938 }
939
Zhijun He5d677d12016-05-29 16:52:39 -0700940 // Stream configuration was not finished (can only be in STATE_IN_CONFIG or STATE_CONSTRUCTED
941 // state), don't need change the stream state, return OK.
942 if (mConsumer == nullptr) {
943 return OK;
944 }
945
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -0800946 returnPrefetchedBuffersLocked();
947
Shuzhen Wangdc9aa822022-05-16 10:04:17 -0700948 if (mPreviewFrameSpacer != nullptr) {
949 mPreviewFrameSpacer->requestExit();
950 }
951
Zhijun He125684a2015-12-26 15:07:30 -0800952 ALOGV("%s: disconnecting stream %d from native window", __FUNCTION__, getId());
953
Igor Murashkine3a9f962013-05-08 18:03:15 -0700954 res = native_window_api_disconnect(mConsumer.get(),
955 NATIVE_WINDOW_API_CAMERA);
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800956 /**
957 * This is not an error. if client calling process dies, the window will
958 * also die and all calls to it will return DEAD_OBJECT, thus it's already
959 * "disconnected"
960 */
961 if (res == DEAD_OBJECT) {
962 ALOGW("%s: While disconnecting stream %d from native window, the"
963 " native window died from under us", __FUNCTION__, mId);
964 }
965 else if (res != OK) {
Igor Murashkine3a9f962013-05-08 18:03:15 -0700966 ALOGE("%s: Unable to disconnect stream %d from native window "
967 "(error %d %s)",
968 __FUNCTION__, mId, res, strerror(-res));
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800969 mState = STATE_ERROR;
970 return res;
971 }
972
Zhijun He125684a2015-12-26 15:07:30 -0800973 // Since device is already idle, there is no getBuffer call to buffer manager, unregister the
974 // stream at this point should be safe.
975 if (mUseBufferManager) {
Shuzhen Wang83bff122020-11-20 15:51:39 -0800976 res = mBufferManager->unregisterStream(getId(), getStreamSetId(), isMultiResolution());
Zhijun He125684a2015-12-26 15:07:30 -0800977 if (res != OK) {
978 ALOGE("%s: Unable to unregister stream %d from buffer manager "
979 "(error %d %s)", __FUNCTION__, mId, res, strerror(-res));
980 mState = STATE_ERROR;
981 return res;
982 }
983 // Note that, to make prepare/teardown case work, we must not mBufferManager.clear(), as
984 // the stream is still in usable state after this call.
985 mUseBufferManager = false;
986 }
987
Igor Murashkine3a9f962013-05-08 18:03:15 -0700988 mState = (mState == STATE_IN_RECONFIG) ? STATE_IN_CONFIG
989 : STATE_CONSTRUCTED;
Shuzhen Wang686f6442017-06-20 16:16:04 -0700990
991 mDequeueBufferLatency.log("Stream %d dequeueBuffer latency histogram", mId);
992 mDequeueBufferLatency.reset();
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -0800993 return OK;
994}
995
Emilian Peev3b93acb2024-03-11 21:09:48 +0000996status_t Camera3OutputStream::getEndpointUsage(uint64_t *usage) {
Eino-Ville Talvalab2f5b192013-07-30 14:36:03 -0700997
998 status_t res;
Shuzhen Wang0129d522016-10-30 22:43:41 -0700999
Zhijun He5d677d12016-05-29 16:52:39 -07001000 if (mConsumer == nullptr) {
1001 // mConsumerUsage was sanitized before the Camera3OutputStream was constructed.
1002 *usage = mConsumerUsage;
1003 return OK;
1004 }
1005
Shuzhen Wang0129d522016-10-30 22:43:41 -07001006 res = getEndpointUsageForSurface(usage, mConsumer);
1007
1008 return res;
1009}
1010
Emilian Peev35ae8262018-11-08 13:11:32 +00001011void Camera3OutputStream::applyZSLUsageQuirk(int format, uint64_t *consumerUsage /*inout*/) {
1012 if (consumerUsage == nullptr) {
1013 return;
1014 }
Shuzhen Wang0129d522016-10-30 22:43:41 -07001015
Chien-Yu Chen618ff8a2015-03-13 11:27:17 -07001016 // If an opaque output stream's endpoint is ImageReader, add
Yin-Chia Yeh47cf8e62017-04-04 13:00:03 -07001017 // GRALLOC_USAGE_HW_CAMERA_ZSL to the usage so HAL knows it will be used
Chien-Yu Chen618ff8a2015-03-13 11:27:17 -07001018 // for the ZSL use case.
1019 // Assume it's for ImageReader if the consumer usage doesn't have any of these bits set:
1020 // 1. GRALLOC_USAGE_HW_TEXTURE
1021 // 2. GRALLOC_USAGE_HW_RENDER
1022 // 3. GRALLOC_USAGE_HW_COMPOSER
1023 // 4. GRALLOC_USAGE_HW_VIDEO_ENCODER
Emilian Peev35ae8262018-11-08 13:11:32 +00001024 if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED &&
1025 (*consumerUsage & (GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_RENDER |
Shuzhen Wang0129d522016-10-30 22:43:41 -07001026 GRALLOC_USAGE_HW_COMPOSER | GRALLOC_USAGE_HW_VIDEO_ENCODER)) == 0) {
Emilian Peev35ae8262018-11-08 13:11:32 +00001027 *consumerUsage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
Chien-Yu Chen618ff8a2015-03-13 11:27:17 -07001028 }
Emilian Peev35ae8262018-11-08 13:11:32 +00001029}
Chien-Yu Chen618ff8a2015-03-13 11:27:17 -07001030
Emilian Peev35ae8262018-11-08 13:11:32 +00001031status_t Camera3OutputStream::getEndpointUsageForSurface(uint64_t *usage,
Emilian Peev3b93acb2024-03-11 21:09:48 +00001032 const sp<Surface>& surface) {
Emilian Peeva495a7b2024-04-02 22:52:42 +00001033 bool internalConsumer = (mConsumer.get() != nullptr) && (mConsumer == surface);
1034 if (mConsumerUsageCachedValue.has_value() && flags::surface_ipc() && internalConsumer) {
Emilian Peev3b93acb2024-03-11 21:09:48 +00001035 *usage = mConsumerUsageCachedValue.value();
1036 return OK;
1037 }
Emilian Peev35ae8262018-11-08 13:11:32 +00001038
Emilian Peev3b93acb2024-03-11 21:09:48 +00001039 status_t res;
1040
1041 res = native_window_get_consumer_usage(static_cast<ANativeWindow*>(surface.get()), usage);
1042 applyZSLUsageQuirk(camera_stream::format, usage);
Emilian Peeva495a7b2024-04-02 22:52:42 +00001043 if (internalConsumer) {
1044 mConsumerUsageCachedValue = *usage;
1045 }
Eino-Ville Talvalab2f5b192013-07-30 14:36:03 -07001046 return res;
1047}
1048
Emilian Peev3b93acb2024-03-11 21:09:48 +00001049bool Camera3OutputStream::isVideoStream() {
Emilian Peev050f5dc2017-05-18 14:43:56 +01001050 uint64_t usage = 0;
Chien-Yu Chen85a64552015-08-28 15:46:12 -07001051 status_t res = getEndpointUsage(&usage);
1052 if (res != OK) {
1053 ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
1054 return false;
1055 }
1056
1057 return (usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) != 0;
1058}
1059
Zhijun He125684a2015-12-26 15:07:30 -08001060status_t Camera3OutputStream::setBufferManager(sp<Camera3BufferManager> bufferManager) {
1061 Mutex::Autolock l(mLock);
1062 if (mState != STATE_CONSTRUCTED) {
Zhijun He5d677d12016-05-29 16:52:39 -07001063 ALOGE("%s: this method can only be called when stream in CONSTRUCTED state.",
Zhijun He125684a2015-12-26 15:07:30 -08001064 __FUNCTION__);
1065 return INVALID_OPERATION;
1066 }
1067 mBufferManager = bufferManager;
1068
1069 return OK;
1070}
1071
Emilian Peev40ead602017-09-26 15:46:36 +01001072status_t Camera3OutputStream::updateStream(const std::vector<sp<Surface>> &/*outputSurfaces*/,
1073 const std::vector<OutputStreamInfo> &/*outputInfo*/,
1074 const std::vector<size_t> &/*removedSurfaceIds*/,
1075 KeyedVector<sp<Surface>, size_t> * /*outputMapo*/) {
1076 ALOGE("%s: this method is not supported!", __FUNCTION__);
1077 return INVALID_OPERATION;
1078}
1079
Shuzhen Wang0160ddd2019-08-15 09:11:56 -07001080void Camera3OutputStream::BufferProducerListener::onBufferReleased() {
Zhijun He125684a2015-12-26 15:07:30 -08001081 sp<Camera3OutputStream> stream = mParent.promote();
1082 if (stream == nullptr) {
1083 ALOGV("%s: Parent camera3 output stream was destroyed", __FUNCTION__);
1084 return;
1085 }
1086
1087 Mutex::Autolock l(stream->mLock);
1088 if (!(stream->mUseBufferManager)) {
1089 return;
1090 }
1091
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001092 ALOGV("Stream %d: Buffer released", stream->getId());
Yin-Chia Yeh89954d92017-05-21 17:28:53 -07001093 bool shouldFreeBuffer = false;
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001094 status_t res = stream->mBufferManager->onBufferReleased(
Shuzhen Wang83bff122020-11-20 15:51:39 -08001095 stream->getId(), stream->getStreamSetId(), stream->isMultiResolution(),
1096 &shouldFreeBuffer);
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001097 if (res != OK) {
1098 ALOGE("%s: signaling buffer release to buffer manager failed: %s (%d).", __FUNCTION__,
1099 strerror(-res), res);
1100 stream->mState = STATE_ERROR;
1101 }
Yin-Chia Yeh89954d92017-05-21 17:28:53 -07001102
1103 if (shouldFreeBuffer) {
1104 sp<GraphicBuffer> buffer;
1105 // Detach and free a buffer (when buffer goes out of scope)
1106 stream->detachBufferLocked(&buffer, /*fenceFd*/ nullptr);
1107 if (buffer.get() != nullptr) {
1108 stream->mBufferManager->notifyBufferRemoved(
Shuzhen Wang83bff122020-11-20 15:51:39 -08001109 stream->getId(), stream->getStreamSetId(), stream->isMultiResolution());
Yin-Chia Yeh89954d92017-05-21 17:28:53 -07001110 }
1111 }
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001112}
1113
Shuzhen Wang0160ddd2019-08-15 09:11:56 -07001114void Camera3OutputStream::BufferProducerListener::onBuffersDiscarded(
1115 const std::vector<sp<GraphicBuffer>>& buffers) {
1116 sp<Camera3OutputStream> stream = mParent.promote();
1117 if (stream == nullptr) {
1118 ALOGV("%s: Parent camera3 output stream was destroyed", __FUNCTION__);
1119 return;
1120 }
1121
1122 if (buffers.size() > 0) {
1123 Mutex::Autolock l(stream->mLock);
1124 stream->onBuffersRemovedLocked(buffers);
1125 if (stream->mUseBufferManager) {
1126 stream->mBufferManager->onBuffersRemoved(stream->getId(),
Shuzhen Wang83bff122020-11-20 15:51:39 -08001127 stream->getStreamSetId(), stream->isMultiResolution(), buffers.size());
Shuzhen Wang0160ddd2019-08-15 09:11:56 -07001128 }
1129 ALOGV("Stream %d: %zu Buffers discarded.", stream->getId(), buffers.size());
1130 }
1131}
1132
Yin-Chia Yeh017d49c2017-03-31 19:11:00 -07001133void Camera3OutputStream::onBuffersRemovedLocked(
1134 const std::vector<sp<GraphicBuffer>>& removedBuffers) {
Yin-Chia Yehdb1e8642017-07-14 15:19:30 -07001135 sp<Camera3StreamBufferFreedListener> callback = mBufferFreedListener.promote();
Yin-Chia Yeh017d49c2017-03-31 19:11:00 -07001136 if (callback != nullptr) {
Chih-Hung Hsieh48fc6192017-08-04 14:37:31 -07001137 for (const auto& gb : removedBuffers) {
Yin-Chia Yeh017d49c2017-03-31 19:11:00 -07001138 callback->onBufferFreed(mId, gb->handle);
1139 }
1140 }
1141}
1142
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001143status_t Camera3OutputStream::detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd) {
1144 Mutex::Autolock l(mLock);
Yin-Chia Yeh89954d92017-05-21 17:28:53 -07001145 return detachBufferLocked(buffer, fenceFd);
1146}
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001147
Yin-Chia Yeh89954d92017-05-21 17:28:53 -07001148status_t Camera3OutputStream::detachBufferLocked(sp<GraphicBuffer>* buffer, int* fenceFd) {
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001149 ALOGV("Stream %d: detachBuffer", getId());
1150 if (buffer == nullptr) {
1151 return BAD_VALUE;
1152 }
1153
Zhijun He125684a2015-12-26 15:07:30 -08001154 sp<Fence> fence;
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001155 status_t res = mConsumer->detachNextBuffer(buffer, &fence);
Zhijun He125684a2015-12-26 15:07:30 -08001156 if (res == NO_MEMORY) {
1157 // This may rarely happen, which indicates that the released buffer was freed by other
1158 // call (e.g., attachBuffer, dequeueBuffer etc.) before reaching here. We should notify the
1159 // buffer manager that this buffer has been freed. It's not fatal, but should be avoided,
1160 // therefore log a warning.
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001161 *buffer = 0;
Zhijun He125684a2015-12-26 15:07:30 -08001162 ALOGW("%s: the released buffer has already been freed by the buffer queue!", __FUNCTION__);
1163 } else if (res != OK) {
Eino-Ville Talvalaff51b472016-06-28 15:26:19 -07001164 // Treat other errors as abandonment
Yin-Chia Yeha1b56c82019-03-27 15:50:39 -07001165 if (shouldLogError(res, mState)) {
1166 ALOGE("%s: detach next buffer failed: %s (%d).", __FUNCTION__, strerror(-res), res);
1167 }
Eino-Ville Talvalaff51b472016-06-28 15:26:19 -07001168 mState = STATE_ABANDONED;
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001169 return res;
Zhijun He125684a2015-12-26 15:07:30 -08001170 }
1171
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001172 if (fenceFd != nullptr) {
1173 if (fence!= 0 && fence->isValid()) {
1174 *fenceFd = fence->dup();
1175 } else {
1176 *fenceFd = -1;
1177 }
Zhijun He125684a2015-12-26 15:07:30 -08001178 }
Eino-Ville Talvala77c1a352016-06-13 12:32:43 -07001179
Yin-Chia Yehbf1b8b92019-03-06 14:56:08 -08001180 // Here we assume detachBuffer is called by buffer manager so it doesn't need to be notified
1181 checkRemovedBuffersLocked(/*notifyBufferManager*/false);
Yin-Chia Yeh017d49c2017-03-31 19:11:00 -07001182 return res;
Zhijun He125684a2015-12-26 15:07:30 -08001183}
Shuzhen Wang13a69632016-01-26 09:51:07 -08001184
Chien-Yu Chena936ac22017-10-23 15:59:49 -07001185status_t Camera3OutputStream::dropBuffers(bool dropping) {
1186 Mutex::Autolock l(mLock);
1187 mDropBuffers = dropping;
1188 return OK;
1189}
1190
Austin Borger1c1bee02023-06-01 16:51:35 -07001191const std::string& Camera3OutputStream::getPhysicalCameraId() const {
Shuzhen Wang5c22c152017-12-31 17:12:25 -08001192 Mutex::Autolock l(mLock);
1193 return physicalCameraId();
1194}
1195
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -08001196status_t Camera3OutputStream::notifyBufferReleased(ANativeWindowBuffer* /*anwBuffer*/) {
Shuzhen Wang0129d522016-10-30 22:43:41 -07001197 return OK;
1198}
1199
1200bool Camera3OutputStream::isConsumerConfigurationDeferred(size_t surface_id) const {
Zhijun He5d677d12016-05-29 16:52:39 -07001201 Mutex::Autolock l(mLock);
Shuzhen Wang0129d522016-10-30 22:43:41 -07001202
1203 if (surface_id != 0) {
Shuzhen Wang758c2152017-01-10 18:26:18 -08001204 ALOGE("%s: surface_id %zu for Camera3OutputStream should be 0!", __FUNCTION__, surface_id);
Shuzhen Wang0129d522016-10-30 22:43:41 -07001205 }
Zhijun He5d677d12016-05-29 16:52:39 -07001206 return mConsumer == nullptr;
1207}
1208
Shuzhen Wang758c2152017-01-10 18:26:18 -08001209status_t Camera3OutputStream::setConsumers(const std::vector<sp<Surface>>& consumers) {
Shuzhen Wangbee0f0a2017-01-24 14:51:37 -08001210 Mutex::Autolock l(mLock);
Shuzhen Wang758c2152017-01-10 18:26:18 -08001211 if (consumers.size() != 1) {
1212 ALOGE("%s: it's illegal to set %zu consumer surfaces!",
1213 __FUNCTION__, consumers.size());
1214 return INVALID_OPERATION;
1215 }
1216 if (consumers[0] == nullptr) {
1217 ALOGE("%s: it's illegal to set null consumer surface!", __FUNCTION__);
Zhijun He5d677d12016-05-29 16:52:39 -07001218 return INVALID_OPERATION;
1219 }
1220
1221 if (mConsumer != nullptr) {
1222 ALOGE("%s: consumer surface was already set!", __FUNCTION__);
1223 return INVALID_OPERATION;
1224 }
1225
Shuzhen Wang758c2152017-01-10 18:26:18 -08001226 mConsumer = consumers[0];
Zhijun He5d677d12016-05-29 16:52:39 -07001227 return OK;
1228}
1229
Emilian Peev3b93acb2024-03-11 21:09:48 +00001230bool Camera3OutputStream::isConsumedByHWComposer() {
Emilian Peev050f5dc2017-05-18 14:43:56 +01001231 uint64_t usage = 0;
Shuzhen Wang13a69632016-01-26 09:51:07 -08001232 status_t res = getEndpointUsage(&usage);
1233 if (res != OK) {
1234 ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
1235 return false;
1236 }
1237
1238 return (usage & GRALLOC_USAGE_HW_COMPOSER) != 0;
1239}
1240
Emilian Peev3b93acb2024-03-11 21:09:48 +00001241bool Camera3OutputStream::isConsumedByHWTexture() {
Emilian Peev050f5dc2017-05-18 14:43:56 +01001242 uint64_t usage = 0;
Zhijun Hef0645c12016-08-02 00:58:11 -07001243 status_t res = getEndpointUsage(&usage);
1244 if (res != OK) {
1245 ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
1246 return false;
1247 }
1248
1249 return (usage & GRALLOC_USAGE_HW_TEXTURE) != 0;
1250}
1251
Emilian Peev3b93acb2024-03-11 21:09:48 +00001252bool Camera3OutputStream::isConsumedByCPU() {
Shuzhen Wangfe8a2a32022-05-10 18:18:54 -07001253 uint64_t usage = 0;
1254 status_t res = getEndpointUsage(&usage);
1255 if (res != OK) {
1256 ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
1257 return false;
1258 }
1259
1260 return (usage & GRALLOC_USAGE_SW_READ_MASK) != 0;
1261}
1262
Shuzhen Wangabbcb6b2020-12-09 22:32:44 -08001263void Camera3OutputStream::dumpImageToDisk(nsecs_t timestamp,
1264 ANativeWindowBuffer* anwBuffer, int fence) {
1265 // Deriver output file name
1266 std::string fileExtension = "jpg";
1267 char imageFileName[64];
1268 time_t now = time(0);
1269 tm *localTime = localtime(&now);
1270 snprintf(imageFileName, sizeof(imageFileName), "IMG_%4d%02d%02d_%02d%02d%02d_%" PRId64 ".%s",
Shuzhen Wang6a8237f2021-07-13 14:42:57 -07001271 1900 + localTime->tm_year, localTime->tm_mon + 1, localTime->tm_mday,
Shuzhen Wangabbcb6b2020-12-09 22:32:44 -08001272 localTime->tm_hour, localTime->tm_min, localTime->tm_sec,
1273 timestamp, fileExtension.c_str());
1274
1275 // Lock the image for CPU read
1276 sp<GraphicBuffer> graphicBuffer = GraphicBuffer::from(anwBuffer);
1277 void* mapped = nullptr;
1278 base::unique_fd fenceFd(dup(fence));
1279 status_t res = graphicBuffer->lockAsync(GraphicBuffer::USAGE_SW_READ_OFTEN, &mapped,
Emilian Peev293bd972022-08-05 17:28:06 -07001280 fenceFd.release());
Shuzhen Wangabbcb6b2020-12-09 22:32:44 -08001281 if (res != OK) {
1282 ALOGE("%s: Failed to lock the buffer: %s (%d)", __FUNCTION__, strerror(-res), res);
1283 return;
1284 }
1285
1286 // Figure out actual file size
1287 auto actualJpegSize = android::camera2::JpegProcessor::findJpegSize((uint8_t*)mapped, mMaxSize);
1288 if (actualJpegSize == 0) {
1289 actualJpegSize = mMaxSize;
1290 }
1291
1292 // Output image data to file
1293 std::string filePath = "/data/misc/cameraserver/";
1294 filePath += imageFileName;
Austin Borger1c1bee02023-06-01 16:51:35 -07001295 std::ofstream imageFile(filePath, std::ofstream::binary);
Shuzhen Wangabbcb6b2020-12-09 22:32:44 -08001296 if (!imageFile.is_open()) {
1297 ALOGE("%s: Unable to create file %s", __FUNCTION__, filePath.c_str());
1298 graphicBuffer->unlock();
1299 return;
1300 }
1301 imageFile.write((const char*)mapped, actualJpegSize);
1302
1303 graphicBuffer->unlock();
1304}
1305
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -08001306status_t Camera3OutputStream::setBatchSize(size_t batchSize) {
1307 Mutex::Autolock l(mLock);
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -08001308 if (batchSize == 0) {
1309 ALOGE("%s: invalid batch size 0", __FUNCTION__);
1310 return BAD_VALUE;
1311 }
1312
1313 if (mUseBufferManager) {
1314 ALOGE("%s: batch operation is not supported with buffer manager", __FUNCTION__);
1315 return INVALID_OPERATION;
1316 }
1317
1318 if (!isVideoStream()) {
1319 ALOGE("%s: batch operation is not supported with non-video stream", __FUNCTION__);
1320 return INVALID_OPERATION;
1321 }
1322
Shuzhen Wangc7629462021-07-12 15:02:58 -07001323 if (camera_stream::max_buffers < batchSize) {
1324 ALOGW("%s: batch size is capped by max_buffers %d", __FUNCTION__,
1325 camera_stream::max_buffers);
1326 batchSize = camera_stream::max_buffers;
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -08001327 }
Shuzhen Wangc7629462021-07-12 15:02:58 -07001328
1329 size_t defaultBatchSize = 1;
1330 if (!mBatchSize.compare_exchange_strong(defaultBatchSize, batchSize)) {
1331 ALOGE("%s: change batch size from %zu to %zu dynamically is not supported",
1332 __FUNCTION__, defaultBatchSize, batchSize);
1333 return INVALID_OPERATION;
1334 }
1335
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -08001336 return OK;
1337}
1338
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001339void Camera3OutputStream::onMinDurationChanged(nsecs_t duration, bool fixedFps) {
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001340 Mutex::Autolock l(mLock);
1341 mMinExpectedDuration = duration;
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001342 mFixedFps = fixedFps;
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001343}
1344
Shuzhen Wang16610a62022-12-15 22:38:07 -08001345void Camera3OutputStream::setStreamUseCase(int64_t streamUseCase) {
1346 Mutex::Autolock l(mLock);
1347 camera_stream::use_case = streamUseCase;
1348}
1349
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -08001350void Camera3OutputStream::returnPrefetchedBuffersLocked() {
Shuzhen Wangc7629462021-07-12 15:02:58 -07001351 std::vector<Surface::BatchBuffer> batchedBuffers;
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -08001352
Shuzhen Wangc7629462021-07-12 15:02:58 -07001353 {
1354 std::lock_guard<std::mutex> batchLock(mBatchLock);
1355 if (mBatchedBuffers.size() != 0) {
1356 ALOGW("%s: %zu extra prefetched buffers detected. Returning",
1357 __FUNCTION__, mBatchedBuffers.size());
1358 batchedBuffers = std::move(mBatchedBuffers);
1359 }
1360 }
1361
1362 if (batchedBuffers.size() > 0) {
1363 mConsumer->cancelBuffers(batchedBuffers);
Yin-Chia Yeh14ef48d2020-02-10 15:06:37 -08001364 }
1365}
1366
Ravneet446b3bf2023-07-11 19:26:38 +00001367nsecs_t Camera3OutputStream::syncTimestampToDisplayLocked(nsecs_t t, sp<Fence> releaseFence) {
Shuzhen Wang35bd3552022-09-21 16:56:04 -07001368 nsecs_t currentTime = systemTime();
1369 if (!mFixedFps) {
1370 mLastCaptureTime = t;
1371 mLastPresentTime = currentTime;
1372 return t;
1373 }
1374
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001375 ParcelableVsyncEventData parcelableVsyncEventData;
1376 auto res = mDisplayEventReceiver.getLatestVsyncEventData(&parcelableVsyncEventData);
1377 if (res != OK) {
1378 ALOGE("%s: Stream %d: Error getting latest vsync event data: %s (%d)",
1379 __FUNCTION__, mId, strerror(-res), res);
Shuzhen Wang14c62b82022-04-11 09:37:05 -07001380 mLastCaptureTime = t;
Shuzhen Wang35bd3552022-09-21 16:56:04 -07001381 mLastPresentTime = currentTime;
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001382 return t;
1383 }
1384
1385 const VsyncEventData& vsyncEventData = parcelableVsyncEventData.vsync;
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001386 nsecs_t minPresentT = mLastPresentTime + vsyncEventData.frameInterval / 2;
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001387
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001388 // Find the best presentation time without worrying about previous frame's
1389 // presentation time if capture interval is more than kSpacingResetIntervalNs.
1390 //
1391 // When frame interval is more than 50 ms apart (3 vsyncs for 60hz refresh rate),
1392 // there is little risk in starting over and finding the earliest vsync to latch onto.
1393 // - Update captureToPresentTime offset to be used for later frames.
1394 // - Example use cases:
1395 // - when frame rate drops down to below 20 fps, or
1396 // - A new streaming session starts (stopPreview followed by
1397 // startPreview)
1398 //
Shuzhen Wang34a5e282022-06-17 14:48:35 -07001399 nsecs_t captureInterval = t - mLastCaptureTime;
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001400 if (captureInterval > kSpacingResetIntervalNs) {
Rachel Lee86d90eb2023-04-25 14:37:34 -07001401 for (size_t i = 0; i < vsyncEventData.frameTimelinesLength; i++) {
Shuzhen Wang34a5e282022-06-17 14:48:35 -07001402 const auto& timeline = vsyncEventData.frameTimelines[i];
1403 if (timeline.deadlineTimestamp >= currentTime &&
1404 timeline.expectedPresentationTime > minPresentT) {
1405 nsecs_t presentT = vsyncEventData.frameTimelines[i].expectedPresentationTime;
1406 mCaptureToPresentOffset = presentT - t;
1407 mLastCaptureTime = t;
1408 mLastPresentTime = presentT;
1409
Shuzhen Wang0897d592023-04-07 12:48:05 -07001410 // If releaseFence is available, store the fence to check signal
1411 // time later.
1412 mRefVsyncData = vsyncEventData;
1413 mReferenceCaptureTime = t;
1414 mReferenceArrivalTime = currentTime;
Ravneet446b3bf2023-07-11 19:26:38 +00001415 if (releaseFence->isValid()) {
1416 mReferenceFrameFence = new Fence(releaseFence->dup());
Shuzhen Wang0897d592023-04-07 12:48:05 -07001417 } else {
1418 mFenceSignalOffset = 0;
1419 }
1420
Shuzhen Wang34a5e282022-06-17 14:48:35 -07001421 // Move the expected presentation time back by 1/3 of frame interval to
1422 // mitigate the time drift. Due to time drift, if we directly use the
1423 // expected presentation time, often times 2 expected presentation time
1424 // falls into the same VSYNC interval.
1425 return presentT - vsyncEventData.frameInterval/3;
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001426 }
1427 }
1428 }
1429
Shuzhen Wang0897d592023-04-07 12:48:05 -07001430 // If there is a reference frame release fence, get the signal time and
1431 // update the captureToPresentOffset.
1432 if (mReferenceFrameFence != nullptr) {
1433 mFenceSignalOffset = 0;
1434 nsecs_t signalTime = mReferenceFrameFence->getSignalTime();
1435 // Now that the fence has signaled, recalculate the offsets based on
1436 // the timeline which was actually latched
1437 if (signalTime != INT64_MAX) {
1438 for (size_t i = 0; i < mRefVsyncData.frameTimelinesLength; i++) {
1439 const auto& timeline = mRefVsyncData.frameTimelines[i];
1440 if (timeline.deadlineTimestamp >= signalTime) {
1441 nsecs_t originalOffset = mCaptureToPresentOffset;
1442 mCaptureToPresentOffset = timeline.expectedPresentationTime
1443 - mReferenceCaptureTime;
1444 mLastPresentTime = timeline.expectedPresentationTime;
1445 mFenceSignalOffset = signalTime > mReferenceArrivalTime ?
1446 signalTime - mReferenceArrivalTime : 0;
1447
1448 ALOGV("%s: Last deadline %" PRId64 " signalTime %" PRId64
1449 " original offset %" PRId64 " new offset %" PRId64
1450 " fencesignal offset %" PRId64, __FUNCTION__,
1451 timeline.deadlineTimestamp, signalTime, originalOffset,
1452 mCaptureToPresentOffset, mFenceSignalOffset);
1453 break;
1454 }
1455 }
1456 mReferenceFrameFence.clear();
1457 }
1458 }
1459
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001460 nsecs_t idealPresentT = t + mCaptureToPresentOffset;
Shuzhen Wang14c62b82022-04-11 09:37:05 -07001461 nsecs_t expectedPresentT = mLastPresentTime;
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001462 nsecs_t minDiff = INT64_MAX;
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001463
1464 // In fixed FPS case, when frame durations are close to multiples of display refresh
1465 // rate, derive minimum intervals between presentation times based on minimal
Shuzhen Wang661b34f2022-05-18 22:00:19 -07001466 // expected duration. The minimum number of Vsyncs is:
1467 // - 0 if minFrameDuration in (0, 1.5] * vSyncInterval,
1468 // - 1 if minFrameDuration in (1.5, 2.5] * vSyncInterval,
1469 // - and so on.
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001470 //
1471 // This spaces out the displaying of the frames so that the frame
1472 // presentations are roughly in sync with frame captures.
Shuzhen Wang661b34f2022-05-18 22:00:19 -07001473 int minVsyncs = (mMinExpectedDuration - vsyncEventData.frameInterval / 2) /
1474 vsyncEventData.frameInterval;
1475 if (minVsyncs < 0) minVsyncs = 0;
Shuzhen Wanged08fbe2022-06-21 01:00:50 -07001476 nsecs_t minInterval = minVsyncs * vsyncEventData.frameInterval;
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001477
1478 // In fixed FPS case, if the frame duration deviates from multiples of
1479 // display refresh rate, find the closest Vsync without requiring a minimum
1480 // number of Vsync.
1481 //
1482 // Example: (24fps camera, 60hz refresh):
1483 // capture readout: | t1 | t1 | .. | t1 | .. | t1 | .. | t1 |
1484 // display VSYNC: | t2 | t2 | ... | t2 | ... | t2 | ... | t2 |
1485 // | : 1 frame
1486 // t1 : 41.67ms
1487 // t2 : 16.67ms
1488 // t1/t2 = 2.5
1489 //
1490 // 24fps is a commonly used video frame rate. Because the capture
1491 // interval is 2.5 times of display refresh interval, the minVsyncs
1492 // calculation will directly fall at the boundary condition. In this case,
1493 // we should fall back to the basic logic of finding closest vsync
1494 // timestamp without worrying about minVsyncs.
1495 float captureToVsyncIntervalRatio = 1.0f * mMinExpectedDuration / vsyncEventData.frameInterval;
1496 float ratioDeviation = std::fabs(
1497 captureToVsyncIntervalRatio - std::roundf(captureToVsyncIntervalRatio));
1498 bool captureDeviateFromVsync = ratioDeviation >= kMaxIntervalRatioDeviation;
1499 bool cameraDisplayInSync = (mFixedFps && !captureDeviateFromVsync);
1500
Shuzhen Wanged08fbe2022-06-21 01:00:50 -07001501 // Find best timestamp in the vsync timelines:
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001502 // - Only use at most kMaxTimelines timelines to avoid long latency
Shuzhen Wang0897d592023-04-07 12:48:05 -07001503 // - Add an extra timeline if display fence is used
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001504 // - closest to the ideal presentation time,
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001505 // - deadline timestamp is greater than the current time, and
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001506 // - For fixed FPS, if the capture interval doesn't deviate too much from refresh interval,
1507 // the candidate presentation time is at least minInterval in the future compared to last
1508 // presentation time.
1509 // - For variable FPS, or if the capture interval deviates from refresh
1510 // interval for more than 5%, find a presentation time closest to the
1511 // (lastPresentationTime + captureToPresentOffset) instead.
Shuzhen Wang0897d592023-04-07 12:48:05 -07001512 int fenceAdjustment = (mFenceSignalOffset > 0) ? 1 : 0;
1513 int maxTimelines = std::min(kMaxTimelines + fenceAdjustment,
1514 (int)vsyncEventData.frameTimelinesLength);
Shuzhen Wanged08fbe2022-06-21 01:00:50 -07001515 float biasForShortDelay = 1.0f;
1516 for (int i = 0; i < maxTimelines; i ++) {
1517 const auto& vsyncTime = vsyncEventData.frameTimelines[i];
1518 if (minVsyncs > 0) {
1519 // Bias towards using smaller timeline index:
1520 // i = 0: bias = 1
1521 // i = maxTimelines-1: bias = -1
1522 biasForShortDelay = 1.0 - 2.0 * i / (maxTimelines - 1);
1523 }
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001524 if (std::abs(vsyncTime.expectedPresentationTime - idealPresentT) < minDiff &&
Shuzhen Wang0897d592023-04-07 12:48:05 -07001525 vsyncTime.deadlineTimestamp >= currentTime + mFenceSignalOffset &&
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001526 ((!cameraDisplayInSync && vsyncTime.expectedPresentationTime > minPresentT) ||
1527 (cameraDisplayInSync && vsyncTime.expectedPresentationTime >
Kwangkyu Park1c0042b2022-12-20 00:03:17 +09001528 mLastPresentTime + minInterval +
1529 static_cast<nsecs_t>(biasForShortDelay * kTimelineThresholdNs)))) {
Shuzhen Wang00abbeb2022-02-25 17:14:42 -08001530 expectedPresentT = vsyncTime.expectedPresentationTime;
1531 minDiff = std::abs(vsyncTime.expectedPresentationTime - idealPresentT);
1532 }
1533 }
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001534
Shuzhen Wang35bd3552022-09-21 16:56:04 -07001535 if (expectedPresentT == mLastPresentTime && expectedPresentT <
1536 vsyncEventData.frameTimelines[maxTimelines-1].expectedPresentationTime) {
Shuzhen Wang696e4da2022-09-08 14:31:13 -07001537 // Couldn't find a reasonable presentation time. Using last frame's
1538 // presentation time would cause a frame drop. The best option now
1539 // is to use the next VSync as long as the last presentation time
1540 // doesn't already has the maximum latency, in which case dropping the
1541 // buffer is more desired than increasing latency.
1542 //
1543 // Example: (60fps camera, 59.9hz refresh):
1544 // capture readout: | t1 | t1 | .. | t1 | .. | t1 | .. | t1 |
1545 // \ \ \ \ \ \ \ \ \
1546 // queue to BQ: | | | | | | | | |
1547 // \ \ \ \ \ \ \ \ \
1548 // display VSYNC: | t2 | t2 | ... | t2 | ... | t2 | ... | t2 |
1549 //
1550 // |: 1 frame
1551 // t1 : 16.67ms
1552 // t2 : 16.69ms
1553 //
1554 // It takes 833 frames for capture readout count and display VSYNC count to be off
1555 // by 1.
1556 // - At frames [0, 832], presentationTime is set to timeline[0]
1557 // - At frames [833, 833*2-1], presentationTime is set to timeline[1]
1558 // - At frames [833*2, 833*3-1] presentationTime is set to timeline[2]
1559 // - At frame 833*3, no presentation time is found because we only
1560 // search for timeline[0..2].
1561 // - Drop one buffer is better than further extend the presentation
1562 // time.
1563 //
1564 // However, if frame 833*2 arrives 16.67ms early (right after frame
1565 // 833*2-1), no presentation time can be found because
1566 // getLatestVsyncEventData is called early. In that case, it's better to
1567 // set presentation time by offseting last presentation time.
1568 expectedPresentT += vsyncEventData.frameInterval;
1569 }
1570
Shuzhen Wang14c62b82022-04-11 09:37:05 -07001571 mLastCaptureTime = t;
1572 mLastPresentTime = expectedPresentT;
1573
1574 // Move the expected presentation time back by 1/3 of frame interval to
1575 // mitigate the time drift. Due to time drift, if we directly use the
1576 // expected presentation time, often times 2 expected presentation time
1577 // falls into the same VSYNC interval.
1578 return expectedPresentT - vsyncEventData.frameInterval/3;
Shuzhen Wange4adddb2021-09-21 15:24:44 -07001579}
1580
Shuzhen Wangba92d772022-04-11 11:47:24 -07001581bool Camera3OutputStream::shouldLogError(status_t res) {
1582 Mutex::Autolock l(mLock);
1583 return shouldLogError(res, mState);
1584}
1585
Eino-Ville Talvalafd58f1a2013-03-06 16:20:06 -08001586}; // namespace camera3
1587
1588}; // namespace android