blob: 244a1e56ded15bc7efc6d7b0423b384ac2722799 [file] [log] [blame]
Emilian Peev538c90e2018-12-17 18:03:19 +00001/*
2 * Copyright (C) 2018 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "Camera3-DepthCompositeStream"
18#define ATRACE_TAG ATRACE_TAG_CAMERA
19//#define LOG_NDEBUG 0
20
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +000021#include <aidl/android/hardware/camera/device/CameraBlob.h>
22#include <aidl/android/hardware/camera/device/CameraBlobId.h>
Austin Borger1c1bee02023-06-01 16:51:35 -070023#include <camera/StringUtils.h>
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +000024
Jim Shargo2e0202f2024-07-30 19:54:43 +000025#include <com_android_graphics_libgui_flags.h>
Emilian Peev538c90e2018-12-17 18:03:19 +000026#include <gui/Surface.h>
27#include <utils/Log.h>
28#include <utils/Trace.h>
29
Jim Shargo2e0202f2024-07-30 19:54:43 +000030#include "api1/client2/JpegProcessor.h"
31#include "common/CameraProviderManager.h"
32#include "utils/SessionConfigurationUtils.h"
33
Emilian Peev538c90e2018-12-17 18:03:19 +000034#include "DepthCompositeStream.h"
35
Emilian Peev538c90e2018-12-17 18:03:19 +000036namespace android {
37namespace camera3 {
38
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +000039using aidl::android::hardware::camera::device::CameraBlob;
40using aidl::android::hardware::camera::device::CameraBlobId;
41
Shuzhen Wange8675782019-12-05 09:12:14 -080042DepthCompositeStream::DepthCompositeStream(sp<CameraDeviceBase> device,
Emilian Peev538c90e2018-12-17 18:03:19 +000043 wp<hardware::camera2::ICameraDeviceCallbacks> cb) :
44 CompositeStream(device, cb),
45 mBlobStreamId(-1),
46 mBlobSurfaceId(-1),
47 mDepthStreamId(-1),
48 mDepthSurfaceId(-1),
49 mBlobWidth(0),
50 mBlobHeight(0),
51 mDepthBufferAcquired(false),
52 mBlobBufferAcquired(false),
Carlos Martinez Romeroae9834f2024-07-03 13:33:47 -070053 mStreamSurfaceListener(new StreamSurfaceListener()),
Jayant Chowdharycd3d36b2021-07-10 10:53:53 -070054 mMaxJpegBufferSize(-1),
55 mUHRMaxJpegBufferSize(-1),
Emilian Peev29e9ec12020-01-02 12:43:50 -080056 mIsLogicalCamera(false) {
Shuzhen Wange8675782019-12-05 09:12:14 -080057 if (device != nullptr) {
58 CameraMetadata staticInfo = device->info();
Emilian Peev538c90e2018-12-17 18:03:19 +000059 auto entry = staticInfo.find(ANDROID_JPEG_MAX_SIZE);
60 if (entry.count > 0) {
Jayant Chowdharycd3d36b2021-07-10 10:53:53 -070061 mMaxJpegBufferSize = entry.data.i32[0];
Emilian Peev538c90e2018-12-17 18:03:19 +000062 } else {
63 ALOGW("%s: Maximum jpeg size absent from camera characteristics", __FUNCTION__);
64 }
65
Jayant Chowdharycd3d36b2021-07-10 10:53:53 -070066 mUHRMaxJpegSize =
67 SessionConfigurationUtils::getMaxJpegResolution(staticInfo,
68 /*ultraHighResolution*/true);
69 mDefaultMaxJpegSize =
70 SessionConfigurationUtils::getMaxJpegResolution(staticInfo,
71 /*isUltraHighResolution*/false);
72
73 mUHRMaxJpegBufferSize =
74 SessionConfigurationUtils::getUHRMaxJpegBufferSize(mUHRMaxJpegSize, mDefaultMaxJpegSize,
75 mMaxJpegBufferSize);
76
Emilian Peev538c90e2018-12-17 18:03:19 +000077 entry = staticInfo.find(ANDROID_LENS_INTRINSIC_CALIBRATION);
78 if (entry.count == 5) {
Emilian Peev94c98022019-06-19 09:11:51 -070079 mIntrinsicCalibration.reserve(5);
80 mIntrinsicCalibration.insert(mIntrinsicCalibration.end(), entry.data.f,
Emilian Peev538c90e2018-12-17 18:03:19 +000081 entry.data.f + 5);
82 } else {
83 ALOGW("%s: Intrinsic calibration absent from camera characteristics!", __FUNCTION__);
84 }
85
86 entry = staticInfo.find(ANDROID_LENS_DISTORTION);
87 if (entry.count == 5) {
88 mLensDistortion.reserve(5);
89 mLensDistortion.insert(mLensDistortion.end(), entry.data.f, entry.data.f + 5);
90 } else {
91 ALOGW("%s: Lens distortion absent from camera characteristics!", __FUNCTION__);
92 }
93
94 entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
95 for (size_t i = 0; i < entry.count; ++i) {
96 uint8_t capability = entry.data.u8[i];
97 if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA) {
98 mIsLogicalCamera = true;
99 break;
100 }
101 }
102
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800103 getSupportedDepthSizes(staticInfo, /*maxResolution*/false, &mSupportedDepthSizes);
Jayant Chowdharydbd1efb2023-02-07 16:14:48 -0800104 if (SessionConfigurationUtils::supportsUltraHighResolutionCapture(staticInfo)) {
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800105 getSupportedDepthSizes(staticInfo, true, &mSupportedDepthSizesMaximumResolution);
106 }
Emilian Peev538c90e2018-12-17 18:03:19 +0000107 }
108}
109
110DepthCompositeStream::~DepthCompositeStream() {
111 mBlobConsumer.clear(),
112 mBlobSurface.clear(),
113 mBlobStreamId = -1;
114 mBlobSurfaceId = -1;
115 mDepthConsumer.clear();
116 mDepthSurface.clear();
117 mDepthConsumer = nullptr;
118 mDepthSurface = nullptr;
119}
120
121void DepthCompositeStream::compilePendingInputLocked() {
122 CpuConsumer::LockedBuffer imgBuffer;
123
124 while (!mInputJpegBuffers.empty() && !mBlobBufferAcquired) {
125 auto it = mInputJpegBuffers.begin();
126 auto res = mBlobConsumer->lockNextBuffer(&imgBuffer);
127 if (res == NOT_ENOUGH_DATA) {
128 // Can not lock any more buffers.
129 break;
130 } else if (res != OK) {
131 ALOGE("%s: Error locking blob image buffer: %s (%d)", __FUNCTION__,
132 strerror(-res), res);
133 mPendingInputFrames[*it].error = true;
Greg Kaiser07095df2019-01-29 06:28:58 -0800134 mInputJpegBuffers.erase(it);
Emilian Peev538c90e2018-12-17 18:03:19 +0000135 continue;
136 }
137
138 if (*it != imgBuffer.timestamp) {
139 ALOGW("%s: Expecting jpeg buffer with time stamp: %" PRId64 " received buffer with "
140 "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
141 }
142
143 if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
144 (mPendingInputFrames[imgBuffer.timestamp].error)) {
145 mBlobConsumer->unlockBuffer(imgBuffer);
146 } else {
147 mPendingInputFrames[imgBuffer.timestamp].jpegBuffer = imgBuffer;
148 mBlobBufferAcquired = true;
149 }
150 mInputJpegBuffers.erase(it);
151 }
152
153 while (!mInputDepthBuffers.empty() && !mDepthBufferAcquired) {
154 auto it = mInputDepthBuffers.begin();
155 auto res = mDepthConsumer->lockNextBuffer(&imgBuffer);
156 if (res == NOT_ENOUGH_DATA) {
157 // Can not lock any more buffers.
158 break;
159 } else if (res != OK) {
160 ALOGE("%s: Error receiving depth image buffer: %s (%d)", __FUNCTION__,
161 strerror(-res), res);
162 mPendingInputFrames[*it].error = true;
163 mInputDepthBuffers.erase(it);
164 continue;
165 }
166
167 if (*it != imgBuffer.timestamp) {
168 ALOGW("%s: Expecting depth buffer with time stamp: %" PRId64 " received buffer with "
169 "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
170 }
171
172 if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
173 (mPendingInputFrames[imgBuffer.timestamp].error)) {
174 mDepthConsumer->unlockBuffer(imgBuffer);
175 } else {
176 mPendingInputFrames[imgBuffer.timestamp].depthBuffer = imgBuffer;
177 mDepthBufferAcquired = true;
178 }
179 mInputDepthBuffers.erase(it);
180 }
181
182 while (!mCaptureResults.empty()) {
183 auto it = mCaptureResults.begin();
184 // Negative timestamp indicates that something went wrong during the capture result
185 // collection process.
186 if (it->first >= 0) {
187 mPendingInputFrames[it->first].frameNumber = std::get<0>(it->second);
188 mPendingInputFrames[it->first].result = std::get<1>(it->second);
189 }
190 mCaptureResults.erase(it);
191 }
192
193 while (!mFrameNumberMap.empty()) {
194 auto it = mFrameNumberMap.begin();
195 mPendingInputFrames[it->second].frameNumber = it->first;
196 mFrameNumberMap.erase(it);
197 }
198
199 auto it = mErrorFrameNumbers.begin();
200 while (it != mErrorFrameNumbers.end()) {
201 bool frameFound = false;
202 for (auto &inputFrame : mPendingInputFrames) {
203 if (inputFrame.second.frameNumber == *it) {
204 inputFrame.second.error = true;
205 frameFound = true;
206 break;
207 }
208 }
209
210 if (frameFound) {
211 it = mErrorFrameNumbers.erase(it);
212 } else {
213 ALOGW("%s: Not able to find failing input with frame number: %" PRId64, __FUNCTION__,
214 *it);
215 it++;
216 }
217 }
218}
219
220bool DepthCompositeStream::getNextReadyInputLocked(int64_t *currentTs /*inout*/) {
221 if (currentTs == nullptr) {
222 return false;
223 }
224
225 bool newInputAvailable = false;
226 for (const auto& it : mPendingInputFrames) {
227 if ((!it.second.error) && (it.second.depthBuffer.data != nullptr) &&
228 (it.second.jpegBuffer.data != nullptr) && (it.first < *currentTs)) {
229 *currentTs = it.first;
230 newInputAvailable = true;
231 }
232 }
233
234 return newInputAvailable;
235}
236
237int64_t DepthCompositeStream::getNextFailingInputLocked(int64_t *currentTs /*inout*/) {
238 int64_t ret = -1;
239 if (currentTs == nullptr) {
240 return ret;
241 }
242
243 for (const auto& it : mPendingInputFrames) {
244 if (it.second.error && !it.second.errorNotified && (it.first < *currentTs)) {
245 *currentTs = it.first;
246 ret = it.second.frameNumber;
247 }
248 }
249
250 return ret;
251}
252
Emilian Peev90a839f2019-10-02 15:12:50 -0700253status_t DepthCompositeStream::processInputFrame(nsecs_t ts, const InputFrame &inputFrame) {
Emilian Peev538c90e2018-12-17 18:03:19 +0000254 status_t res;
255 sp<ANativeWindow> outputANW = mOutputSurface;
256 ANativeWindowBuffer *anb;
257 int fenceFd;
258 void *dstBuffer;
Emilian Peev538c90e2018-12-17 18:03:19 +0000259
260 auto jpegSize = android::camera2::JpegProcessor::findJpegSize(inputFrame.jpegBuffer.data,
261 inputFrame.jpegBuffer.width);
262 if (jpegSize == 0) {
263 ALOGW("%s: Failed to find input jpeg size, default to using entire buffer!", __FUNCTION__);
264 jpegSize = inputFrame.jpegBuffer.width;
265 }
266
Jayant Chowdharycd3d36b2021-07-10 10:53:53 -0700267 size_t maxDepthJpegBufferSize = 0;
268 if (mMaxJpegBufferSize > 0) {
269 // If this is an ultra high resolution sensor and the input frames size
270 // is > default res jpeg.
271 if (mUHRMaxJpegSize.width != 0 &&
272 inputFrame.jpegBuffer.width * inputFrame.jpegBuffer.height >
273 mDefaultMaxJpegSize.width * mDefaultMaxJpegSize.height) {
274 maxDepthJpegBufferSize = mUHRMaxJpegBufferSize;
275 } else {
276 maxDepthJpegBufferSize = mMaxJpegBufferSize;
277 }
Emilian Peev538c90e2018-12-17 18:03:19 +0000278 } else {
Jayant Chowdharycd3d36b2021-07-10 10:53:53 -0700279 maxDepthJpegBufferSize = std::max<size_t> (jpegSize,
Emilian Peev538c90e2018-12-17 18:03:19 +0000280 inputFrame.depthBuffer.width * inputFrame.depthBuffer.height * 3 / 2);
281 }
Jayant Chowdharycd3d36b2021-07-10 10:53:53 -0700282
Emilian Peev538c90e2018-12-17 18:03:19 +0000283 uint8_t jpegQuality = 100;
284 auto entry = inputFrame.result.find(ANDROID_JPEG_QUALITY);
285 if (entry.count > 0) {
286 jpegQuality = entry.data.u8[0];
287 }
Emilian Peev538c90e2018-12-17 18:03:19 +0000288
Emilian Peevcbf174b2019-01-25 14:38:59 -0800289 // The final depth photo will consist of the main jpeg buffer, the depth map buffer (also in
290 // jpeg format) and confidence map (jpeg as well). Assume worst case that all 3 jpeg need
291 // max jpeg size.
Jayant Chowdharycd3d36b2021-07-10 10:53:53 -0700292 size_t finalJpegBufferSize = maxDepthJpegBufferSize * 3;
Emilian Peev538c90e2018-12-17 18:03:19 +0000293
Emilian Peevcbf174b2019-01-25 14:38:59 -0800294 if ((res = native_window_set_buffers_dimensions(mOutputSurface.get(), finalJpegBufferSize, 1))
Emilian Peev538c90e2018-12-17 18:03:19 +0000295 != OK) {
296 ALOGE("%s: Unable to configure stream buffer dimensions"
Emilian Peevcbf174b2019-01-25 14:38:59 -0800297 " %zux%u for stream %d", __FUNCTION__, finalJpegBufferSize, 1U, mBlobStreamId);
Emilian Peev538c90e2018-12-17 18:03:19 +0000298 return res;
299 }
300
301 res = outputANW->dequeueBuffer(mOutputSurface.get(), &anb, &fenceFd);
302 if (res != OK) {
303 ALOGE("%s: Error retrieving output buffer: %s (%d)", __FUNCTION__, strerror(-res),
304 res);
305 return res;
306 }
307
308 sp<GraphicBuffer> gb = GraphicBuffer::from(anb);
Shuzhen Wangc87315d2022-03-17 00:11:20 +0000309 GraphicBufferLocker gbLocker(gb);
310 res = gbLocker.lockAsync(&dstBuffer, fenceFd);
Emilian Peev538c90e2018-12-17 18:03:19 +0000311 if (res != OK) {
312 ALOGE("%s: Error trying to lock output buffer fence: %s (%d)", __FUNCTION__,
313 strerror(-res), res);
314 outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
315 return res;
316 }
317
Emilian Peevcbf174b2019-01-25 14:38:59 -0800318 if ((gb->getWidth() < finalJpegBufferSize) || (gb->getHeight() != 1)) {
Emilian Peev538c90e2018-12-17 18:03:19 +0000319 ALOGE("%s: Blob buffer size mismatch, expected %dx%d received %zux%u", __FUNCTION__,
Emilian Peevcbf174b2019-01-25 14:38:59 -0800320 gb->getWidth(), gb->getHeight(), finalJpegBufferSize, 1U);
Emilian Peev538c90e2018-12-17 18:03:19 +0000321 outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
322 return BAD_VALUE;
323 }
324
Emilian Peevcbf174b2019-01-25 14:38:59 -0800325 DepthPhotoInputFrame depthPhoto;
326 depthPhoto.mMainJpegBuffer = reinterpret_cast<const char*> (inputFrame.jpegBuffer.data);
327 depthPhoto.mMainJpegWidth = mBlobWidth;
328 depthPhoto.mMainJpegHeight = mBlobHeight;
329 depthPhoto.mMainJpegSize = jpegSize;
330 depthPhoto.mDepthMapBuffer = reinterpret_cast<uint16_t*> (inputFrame.depthBuffer.data);
331 depthPhoto.mDepthMapWidth = inputFrame.depthBuffer.width;
332 depthPhoto.mDepthMapHeight = inputFrame.depthBuffer.height;
333 depthPhoto.mDepthMapStride = inputFrame.depthBuffer.stride;
334 depthPhoto.mJpegQuality = jpegQuality;
335 depthPhoto.mIsLogical = mIsLogicalCamera;
Jayant Chowdharycd3d36b2021-07-10 10:53:53 -0700336 depthPhoto.mMaxJpegSize = maxDepthJpegBufferSize;
Emilian Peevcbf174b2019-01-25 14:38:59 -0800337 // The camera intrinsic calibration layout is as follows:
338 // [focalLengthX, focalLengthY, opticalCenterX, opticalCenterY, skew]
Emilian Peev94c98022019-06-19 09:11:51 -0700339 if (mIntrinsicCalibration.size() == 5) {
340 memcpy(depthPhoto.mIntrinsicCalibration, mIntrinsicCalibration.data(),
341 sizeof(depthPhoto.mIntrinsicCalibration));
342 depthPhoto.mIsIntrinsicCalibrationValid = 1;
Emilian Peevcbf174b2019-01-25 14:38:59 -0800343 } else {
Emilian Peev94c98022019-06-19 09:11:51 -0700344 depthPhoto.mIsIntrinsicCalibrationValid = 0;
Emilian Peevcbf174b2019-01-25 14:38:59 -0800345 }
346 // The camera lens distortion contains the following lens correction coefficients.
347 // [kappa_1, kappa_2, kappa_3 kappa_4, kappa_5]
348 if (mLensDistortion.size() == 5) {
349 memcpy(depthPhoto.mLensDistortion, mLensDistortion.data(),
350 sizeof(depthPhoto.mLensDistortion));
351 depthPhoto.mIsLensDistortionValid = 1;
352 } else {
353 depthPhoto.mIsLensDistortionValid = 0;
354 }
Emilian Peev06af8c92019-02-07 12:34:41 -0800355 entry = inputFrame.result.find(ANDROID_JPEG_ORIENTATION);
356 if (entry.count > 0) {
357 // The camera jpeg orientation values must be within [0, 90, 180, 270].
358 switch (entry.data.i32[0]) {
359 case 0:
360 case 90:
361 case 180:
362 case 270:
363 depthPhoto.mOrientation = static_cast<DepthPhotoOrientation> (entry.data.i32[0]);
364 break;
365 default:
366 ALOGE("%s: Unexpected jpeg orientation value: %d, default to 0 degrees",
367 __FUNCTION__, entry.data.i32[0]);
368 }
369 }
Emilian Peevcbf174b2019-01-25 14:38:59 -0800370
371 size_t actualJpegSize = 0;
Emilian Peev29e9ec12020-01-02 12:43:50 -0800372 res = processDepthPhotoFrame(depthPhoto, finalJpegBufferSize, dstBuffer, &actualJpegSize);
Emilian Peevcbf174b2019-01-25 14:38:59 -0800373 if (res != 0) {
374 ALOGE("%s: Depth photo processing failed: %s (%d)", __FUNCTION__, strerror(-res), res);
375 outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
376 return res;
377 }
378
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000379 size_t finalJpegSize = actualJpegSize + sizeof(CameraBlob);
Emilian Peevcbf174b2019-01-25 14:38:59 -0800380 if (finalJpegSize > finalJpegBufferSize) {
381 ALOGE("%s: Final jpeg buffer not large enough for the jpeg blob header", __FUNCTION__);
382 outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
383 return NO_MEMORY;
384 }
385
Emilian Peev90a839f2019-10-02 15:12:50 -0700386 res = native_window_set_buffers_timestamp(mOutputSurface.get(), ts);
387 if (res != OK) {
388 ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)", __FUNCTION__,
389 getStreamId(), strerror(-res), res);
390 return res;
391 }
392
Emilian Peevcbf174b2019-01-25 14:38:59 -0800393 ALOGV("%s: Final jpeg size: %zu", __func__, finalJpegSize);
Emilian Peev538c90e2018-12-17 18:03:19 +0000394 uint8_t* header = static_cast<uint8_t *> (dstBuffer) +
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000395 (gb->getWidth() - sizeof(CameraBlob));
396 CameraBlob *blob = reinterpret_cast<CameraBlob*> (header);
397 blob->blobId = CameraBlobId::JPEG;
398 blob->blobSizeBytes = actualJpegSize;
Emilian Peev538c90e2018-12-17 18:03:19 +0000399 outputANW->queueBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
400
401 return res;
402}
403
404void DepthCompositeStream::releaseInputFrameLocked(InputFrame *inputFrame /*out*/) {
405 if (inputFrame == nullptr) {
406 return;
407 }
408
409 if (inputFrame->depthBuffer.data != nullptr) {
410 mDepthConsumer->unlockBuffer(inputFrame->depthBuffer);
411 inputFrame->depthBuffer.data = nullptr;
412 mDepthBufferAcquired = false;
413 }
414
415 if (inputFrame->jpegBuffer.data != nullptr) {
416 mBlobConsumer->unlockBuffer(inputFrame->jpegBuffer);
417 inputFrame->jpegBuffer.data = nullptr;
418 mBlobBufferAcquired = false;
419 }
420
421 if ((inputFrame->error || mErrorState) && !inputFrame->errorNotified) {
Shuzhen Wange8675782019-12-05 09:12:14 -0800422 //TODO: Figure out correct requestId
423 notifyError(inputFrame->frameNumber, -1 /*requestId*/);
Emilian Peev538c90e2018-12-17 18:03:19 +0000424 inputFrame->errorNotified = true;
425 }
426}
427
428void DepthCompositeStream::releaseInputFramesLocked(int64_t currentTs) {
429 auto it = mPendingInputFrames.begin();
430 while (it != mPendingInputFrames.end()) {
431 if (it->first <= currentTs) {
432 releaseInputFrameLocked(&it->second);
433 it = mPendingInputFrames.erase(it);
434 } else {
435 it++;
436 }
437 }
438}
439
440bool DepthCompositeStream::threadLoop() {
441 int64_t currentTs = INT64_MAX;
442 bool newInputAvailable = false;
443
444 {
445 Mutex::Autolock l(mMutex);
446
447 if (mErrorState) {
448 // In case we landed in error state, return any pending buffers and
449 // halt all further processing.
450 compilePendingInputLocked();
451 releaseInputFramesLocked(currentTs);
452 return false;
453 }
454
455 while (!newInputAvailable) {
456 compilePendingInputLocked();
457 newInputAvailable = getNextReadyInputLocked(&currentTs);
458 if (!newInputAvailable) {
459 auto failingFrameNumber = getNextFailingInputLocked(&currentTs);
460 if (failingFrameNumber >= 0) {
461 // We cannot erase 'mPendingInputFrames[currentTs]' at this point because it is
462 // possible for two internal stream buffers to fail. In such scenario the
463 // composite stream should notify the client about a stream buffer error only
464 // once and this information is kept within 'errorNotified'.
465 // Any present failed input frames will be removed on a subsequent call to
466 // 'releaseInputFramesLocked()'.
467 releaseInputFrameLocked(&mPendingInputFrames[currentTs]);
468 currentTs = INT64_MAX;
469 }
470
471 auto ret = mInputReadyCondition.waitRelative(mMutex, kWaitDuration);
472 if (ret == TIMED_OUT) {
473 return true;
474 } else if (ret != OK) {
475 ALOGE("%s: Timed wait on condition failed: %s (%d)", __FUNCTION__,
476 strerror(-ret), ret);
477 return false;
478 }
479 }
480 }
481 }
482
Emilian Peev90a839f2019-10-02 15:12:50 -0700483 auto res = processInputFrame(currentTs, mPendingInputFrames[currentTs]);
Emilian Peev538c90e2018-12-17 18:03:19 +0000484 Mutex::Autolock l(mMutex);
485 if (res != OK) {
486 ALOGE("%s: Failed processing frame with timestamp: %" PRIu64 ": %s (%d)", __FUNCTION__,
487 currentTs, strerror(-res), res);
488 mPendingInputFrames[currentTs].error = true;
489 }
490
491 releaseInputFramesLocked(currentTs);
492
493 return true;
494}
495
496bool DepthCompositeStream::isDepthCompositeStream(const sp<Surface> &surface) {
497 ANativeWindow *anw = surface.get();
498 status_t err;
499 int format;
500 if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
Austin Borger1c1bee02023-06-01 16:51:35 -0700501 std::string msg = fmt::sprintf("Failed to query Surface format: %s (%d)", strerror(-err),
Emilian Peev538c90e2018-12-17 18:03:19 +0000502 err);
Austin Borger1c1bee02023-06-01 16:51:35 -0700503 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
Emilian Peev538c90e2018-12-17 18:03:19 +0000504 return false;
505 }
506
507 int dataspace;
508 if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE, &dataspace)) != OK) {
Austin Borger1c1bee02023-06-01 16:51:35 -0700509 std::string msg = fmt::sprintf("Failed to query Surface dataspace: %s (%d)", strerror(-err),
Emilian Peev538c90e2018-12-17 18:03:19 +0000510 err);
Austin Borger1c1bee02023-06-01 16:51:35 -0700511 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
Emilian Peev538c90e2018-12-17 18:03:19 +0000512 return false;
513 }
514
515 if ((format == HAL_PIXEL_FORMAT_BLOB) && (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH)) {
516 return true;
517 }
518
519 return false;
520}
521
Emilian Peeve75a2852024-05-20 22:35:15 +0000522bool DepthCompositeStream::isDepthCompositeStreamInfo(const OutputStreamInfo& streamInfo) {
523 if ((streamInfo.dataSpace == static_cast<android_dataspace_t>(HAL_DATASPACE_DYNAMIC_DEPTH)) &&
524 (streamInfo.format == HAL_PIXEL_FORMAT_BLOB)) {
525 return true;
526 }
527
528 return false;
529}
530
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800531static bool setContains(std::unordered_set<int32_t> containerSet, int32_t value) {
532 return containerSet.find(value) != containerSet.end();
533}
534
535status_t DepthCompositeStream::checkAndGetMatchingDepthSize(size_t width, size_t height,
536 const std::vector<std::tuple<size_t, size_t>> &depthSizes,
537 const std::vector<std::tuple<size_t, size_t>> &depthSizesMaximumResolution,
538 const std::unordered_set<int32_t> &sensorPixelModesUsed,
539 size_t *depthWidth, size_t *depthHeight) {
540 if (depthWidth == nullptr || depthHeight == nullptr) {
541 return BAD_VALUE;
542 }
543 size_t chosenDepthWidth = 0, chosenDepthHeight = 0;
544 bool hasDefaultSensorPixelMode =
545 setContains(sensorPixelModesUsed, ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
546
547 bool hasMaximumResolutionSensorPixelMode =
548 setContains(sensorPixelModesUsed, ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
549
550 if (!hasDefaultSensorPixelMode && !hasMaximumResolutionSensorPixelMode) {
551 ALOGE("%s: sensor pixel modes don't contain either maximum resolution or default modes",
552 __FUNCTION__);
553 return BAD_VALUE;
554 }
555
556 if (hasDefaultSensorPixelMode) {
557 auto ret = getMatchingDepthSize(width, height, depthSizes, &chosenDepthWidth,
558 &chosenDepthHeight);
559 if (ret != OK) {
560 ALOGE("%s: No matching depth stream size found", __FUNCTION__);
561 return ret;
562 }
563 }
564
565 if (hasMaximumResolutionSensorPixelMode) {
566 size_t depthWidth = 0, depthHeight = 0;
567 auto ret = getMatchingDepthSize(width, height,
568 depthSizesMaximumResolution, &depthWidth, &depthHeight);
569 if (ret != OK) {
570 ALOGE("%s: No matching max resolution depth stream size found", __FUNCTION__);
571 return ret;
572 }
573 // Both matching depth sizes should be the same.
574 if (chosenDepthWidth != 0 && chosenDepthWidth != depthWidth &&
575 chosenDepthHeight != depthHeight) {
576 ALOGE("%s: Maximum resolution sensor pixel mode and default sensor pixel mode don't"
577 " have matching depth sizes", __FUNCTION__);
578 return BAD_VALUE;
579 }
580 if (chosenDepthWidth == 0) {
581 chosenDepthWidth = depthWidth;
582 chosenDepthHeight = depthHeight;
583 }
584 }
585 *depthWidth = chosenDepthWidth;
586 *depthHeight = chosenDepthHeight;
587 return OK;
588}
589
590
Emilian Peev538c90e2018-12-17 18:03:19 +0000591status_t DepthCompositeStream::createInternalStreams(const std::vector<sp<Surface>>& consumers,
592 bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
Austin Borger1c1bee02023-06-01 16:51:35 -0700593 camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800594 const std::unordered_set<int32_t> &sensorPixelModesUsed,
595 std::vector<int> *surfaceIds,
Emilian Peev434248e2022-10-06 14:58:54 -0700596 int /*streamSetId*/, bool /*isShared*/, int32_t /*colorSpace*/,
Shuzhen Wangbce53db2022-12-03 00:38:20 +0000597 int64_t /*dynamicProfile*/, int64_t /*streamUseCase*/, bool useReadoutTimestamp) {
Emilian Peev538c90e2018-12-17 18:03:19 +0000598 if (mSupportedDepthSizes.empty()) {
599 ALOGE("%s: This camera device doesn't support any depth map streams!", __FUNCTION__);
600 return INVALID_OPERATION;
601 }
602
603 size_t depthWidth, depthHeight;
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800604 auto ret =
605 checkAndGetMatchingDepthSize(width, height, mSupportedDepthSizes,
606 mSupportedDepthSizesMaximumResolution, sensorPixelModesUsed, &depthWidth,
607 &depthHeight);
Emilian Peev538c90e2018-12-17 18:03:19 +0000608 if (ret != OK) {
609 ALOGE("%s: Failed to find an appropriate depth stream size!", __FUNCTION__);
610 return ret;
611 }
612
613 sp<CameraDeviceBase> device = mDevice.promote();
614 if (!device.get()) {
615 ALOGE("%s: Invalid camera device!", __FUNCTION__);
616 return NO_INIT;
617 }
618
Jim Shargo2e0202f2024-07-30 19:54:43 +0000619#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
620 mBlobConsumer = new CpuConsumer(/*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
621 mBlobConsumer->setFrameAvailableListener(this);
622 mBlobConsumer->setName(String8("Camera3-JpegCompositeStream"));
623 mBlobSurface = mBlobConsumer->getSurface();
624#else
Emilian Peev538c90e2018-12-17 18:03:19 +0000625 sp<IGraphicBufferProducer> producer;
626 sp<IGraphicBufferConsumer> consumer;
627 BufferQueue::createBufferQueue(&producer, &consumer);
628 mBlobConsumer = new CpuConsumer(consumer, /*maxLockedBuffers*/1, /*controlledByApp*/ true);
629 mBlobConsumer->setFrameAvailableListener(this);
630 mBlobConsumer->setName(String8("Camera3-JpegCompositeStream"));
631 mBlobSurface = new Surface(producer);
Jim Shargo2e0202f2024-07-30 19:54:43 +0000632#endif // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
Emilian Peev538c90e2018-12-17 18:03:19 +0000633
634 ret = device->createStream(mBlobSurface, width, height, format, kJpegDataSpace, rotation,
Shuzhen Wangbce53db2022-12-03 00:38:20 +0000635 id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
636 camera3::CAMERA3_STREAM_SET_ID_INVALID, /*isShared*/false, /*isMultiResolution*/false,
637 /*consumerUsage*/0, ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
638 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
639 OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
640 OutputConfiguration::MIRROR_MODE_AUTO,
641 ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
642 useReadoutTimestamp);
Emilian Peev538c90e2018-12-17 18:03:19 +0000643 if (ret == OK) {
644 mBlobStreamId = *id;
645 mBlobSurfaceId = (*surfaceIds)[0];
646 mOutputSurface = consumers[0];
647 } else {
648 return ret;
649 }
650
Jim Shargo2e0202f2024-07-30 19:54:43 +0000651#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
652 mDepthConsumer = new CpuConsumer(/*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
653 mDepthConsumer->setFrameAvailableListener(this);
654 mDepthConsumer->setName(String8("Camera3-DepthCompositeStream"));
655 mDepthSurface = mDepthConsumer->getSurface();
656#else
Emilian Peev538c90e2018-12-17 18:03:19 +0000657 BufferQueue::createBufferQueue(&producer, &consumer);
658 mDepthConsumer = new CpuConsumer(consumer, /*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
659 mDepthConsumer->setFrameAvailableListener(this);
660 mDepthConsumer->setName(String8("Camera3-DepthCompositeStream"));
661 mDepthSurface = new Surface(producer);
Jim Shargo2e0202f2024-07-30 19:54:43 +0000662#endif // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
Emilian Peev538c90e2018-12-17 18:03:19 +0000663 std::vector<int> depthSurfaceId;
664 ret = device->createStream(mDepthSurface, depthWidth, depthHeight, kDepthMapPixelFormat,
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800665 kDepthMapDataSpace, rotation, &mDepthStreamId, physicalCameraId, sensorPixelModesUsed,
Shuzhen Wangbce53db2022-12-03 00:38:20 +0000666 &depthSurfaceId, camera3::CAMERA3_STREAM_SET_ID_INVALID, /*isShared*/false,
667 /*isMultiResolution*/false, /*consumerUsage*/0,
668 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
669 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
670 OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
671 OutputConfiguration::MIRROR_MODE_AUTO,
672 ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
673 useReadoutTimestamp);
Emilian Peev538c90e2018-12-17 18:03:19 +0000674 if (ret == OK) {
675 mDepthSurfaceId = depthSurfaceId[0];
676 } else {
677 return ret;
678 }
679
680 ret = registerCompositeStreamListener(getStreamId());
681 if (ret != OK) {
682 ALOGE("%s: Failed to register blob stream listener!", __FUNCTION__);
683 return ret;
684 }
685
686 ret = registerCompositeStreamListener(mDepthStreamId);
687 if (ret != OK) {
688 ALOGE("%s: Failed to register depth stream listener!", __FUNCTION__);
689 return ret;
690 }
691
692 mBlobWidth = width;
693 mBlobHeight = height;
694
695 return ret;
696}
697
698status_t DepthCompositeStream::configureStream() {
699 if (isRunning()) {
700 // Processing thread is already running, nothing more to do.
701 return NO_ERROR;
702 }
703
704 if (mOutputSurface.get() == nullptr) {
705 ALOGE("%s: No valid output surface set!", __FUNCTION__);
706 return NO_INIT;
707 }
708
Carlos Martinez Romeroae9834f2024-07-03 13:33:47 -0700709 auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mStreamSurfaceListener);
Emilian Peev538c90e2018-12-17 18:03:19 +0000710 if (res != OK) {
711 ALOGE("%s: Unable to connect to native window for stream %d",
712 __FUNCTION__, mBlobStreamId);
713 return res;
714 }
715
716 if ((res = native_window_set_buffers_format(mOutputSurface.get(), HAL_PIXEL_FORMAT_BLOB))
717 != OK) {
718 ALOGE("%s: Unable to configure stream buffer format for stream %d", __FUNCTION__,
719 mBlobStreamId);
720 return res;
721 }
722
723 int maxProducerBuffers;
724 ANativeWindow *anw = mBlobSurface.get();
725 if ((res = anw->query(anw, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxProducerBuffers)) != OK) {
726 ALOGE("%s: Unable to query consumer undequeued"
727 " buffer count for stream %d", __FUNCTION__, mBlobStreamId);
728 return res;
729 }
730
731 ANativeWindow *anwConsumer = mOutputSurface.get();
732 int maxConsumerBuffers;
733 if ((res = anwConsumer->query(anwConsumer, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
734 &maxConsumerBuffers)) != OK) {
735 ALOGE("%s: Unable to query consumer undequeued"
736 " buffer count for stream %d", __FUNCTION__, mBlobStreamId);
737 return res;
738 }
739
740 if ((res = native_window_set_buffer_count(
741 anwConsumer, maxProducerBuffers + maxConsumerBuffers)) != OK) {
742 ALOGE("%s: Unable to set buffer count for stream %d", __FUNCTION__, mBlobStreamId);
743 return res;
744 }
745
746 run("DepthCompositeStreamProc");
747
748 return NO_ERROR;
749}
750
751status_t DepthCompositeStream::deleteInternalStreams() {
752 // The 'CameraDeviceClient' parent will delete the blob stream
753 requestExit();
754
755 auto ret = join();
756 if (ret != OK) {
757 ALOGE("%s: Failed to join with the main processing thread: %s (%d)", __FUNCTION__,
758 strerror(-ret), ret);
759 }
760
Emilian Peev538c90e2018-12-17 18:03:19 +0000761 if (mDepthStreamId >= 0) {
Emilian Peevc0fe54c2020-03-11 14:05:07 -0700762 // Camera devices may not be valid after switching to offline mode.
763 // In this case, all offline streams including internal composite streams
764 // are managed and released by the offline session.
765 sp<CameraDeviceBase> device = mDevice.promote();
766 if (device.get() != nullptr) {
767 ret = device->deleteStream(mDepthStreamId);
768 }
769
Emilian Peev538c90e2018-12-17 18:03:19 +0000770 mDepthStreamId = -1;
771 }
772
Shuzhen Wang2c545042019-02-07 10:27:35 -0800773 if (mOutputSurface != nullptr) {
774 mOutputSurface->disconnect(NATIVE_WINDOW_API_CAMERA);
775 mOutputSurface.clear();
776 }
777
Emilian Peev538c90e2018-12-17 18:03:19 +0000778 return ret;
779}
780
781void DepthCompositeStream::onFrameAvailable(const BufferItem& item) {
782 if (item.mDataSpace == kJpegDataSpace) {
783 ALOGV("%s: Jpeg buffer with ts: %" PRIu64 " ms. arrived!",
784 __func__, ns2ms(item.mTimestamp));
785
786 Mutex::Autolock l(mMutex);
787 if (!mErrorState) {
788 mInputJpegBuffers.push_back(item.mTimestamp);
789 mInputReadyCondition.signal();
790 }
791 } else if (item.mDataSpace == kDepthMapDataSpace) {
792 ALOGV("%s: Depth buffer with ts: %" PRIu64 " ms. arrived!", __func__,
793 ns2ms(item.mTimestamp));
794
795 Mutex::Autolock l(mMutex);
796 if (!mErrorState) {
797 mInputDepthBuffers.push_back(item.mTimestamp);
798 mInputReadyCondition.signal();
799 }
800 } else {
801 ALOGE("%s: Unexpected data space: 0x%x", __FUNCTION__, item.mDataSpace);
802 }
803}
804
805status_t DepthCompositeStream::insertGbp(SurfaceMap* /*out*/outSurfaceMap,
806 Vector<int32_t> * /*out*/outputStreamIds, int32_t* /*out*/currentStreamId) {
807 if (outSurfaceMap->find(mDepthStreamId) == outSurfaceMap->end()) {
Emilian Peev538c90e2018-12-17 18:03:19 +0000808 outputStreamIds->push_back(mDepthStreamId);
809 }
810 (*outSurfaceMap)[mDepthStreamId].push_back(mDepthSurfaceId);
811
812 if (outSurfaceMap->find(mBlobStreamId) == outSurfaceMap->end()) {
Emilian Peev538c90e2018-12-17 18:03:19 +0000813 outputStreamIds->push_back(mBlobStreamId);
814 }
815 (*outSurfaceMap)[mBlobStreamId].push_back(mBlobSurfaceId);
816
817 if (currentStreamId != nullptr) {
818 *currentStreamId = mBlobStreamId;
819 }
820
821 return NO_ERROR;
822}
823
Emilian Peev4697b642019-11-19 17:11:14 -0800824status_t DepthCompositeStream::insertCompositeStreamIds(
825 std::vector<int32_t>* compositeStreamIds /*out*/) {
826 if (compositeStreamIds == nullptr) {
827 return BAD_VALUE;
828 }
829
830 compositeStreamIds->push_back(mDepthStreamId);
831 compositeStreamIds->push_back(mBlobStreamId);
832
833 return OK;
834}
835
Emilian Peev538c90e2018-12-17 18:03:19 +0000836void DepthCompositeStream::onResultError(const CaptureResultExtras& resultExtras) {
837 // Processing can continue even in case of result errors.
838 // At the moment depth composite stream processing relies mainly on static camera
839 // characteristics data. The actual result data can be used for the jpeg quality but
840 // in case it is absent we can default to maximum.
841 eraseResult(resultExtras.frameNumber);
842}
843
844bool DepthCompositeStream::onStreamBufferError(const CaptureResultExtras& resultExtras) {
845 bool ret = false;
846 // Buffer errors concerning internal composite streams should not be directly visible to
847 // camera clients. They must only receive a single buffer error with the public composite
848 // stream id.
849 if ((resultExtras.errorStreamId == mDepthStreamId) ||
850 (resultExtras.errorStreamId == mBlobStreamId)) {
851 flagAnErrorFrameNumber(resultExtras.frameNumber);
852 ret = true;
853 }
854
855 return ret;
856}
857
858status_t DepthCompositeStream::getMatchingDepthSize(size_t width, size_t height,
859 const std::vector<std::tuple<size_t, size_t>>& supporedDepthSizes,
860 size_t *depthWidth /*out*/, size_t *depthHeight /*out*/) {
861 if ((depthWidth == nullptr) || (depthHeight == nullptr)) {
862 return BAD_VALUE;
863 }
864
865 float arTol = CameraProviderManager::kDepthARTolerance;
866 *depthWidth = *depthHeight = 0;
867
868 float aspectRatio = static_cast<float> (width) / static_cast<float> (height);
869 for (const auto& it : supporedDepthSizes) {
870 auto currentWidth = std::get<0>(it);
871 auto currentHeight = std::get<1>(it);
872 if ((currentWidth == width) && (currentHeight == height)) {
873 *depthWidth = width;
874 *depthHeight = height;
875 break;
876 } else {
877 float currentRatio = static_cast<float> (currentWidth) /
878 static_cast<float> (currentHeight);
879 auto currentSize = currentWidth * currentHeight;
880 auto oldSize = (*depthWidth) * (*depthHeight);
881 if ((fabs(aspectRatio - currentRatio) <= arTol) && (currentSize > oldSize)) {
882 *depthWidth = currentWidth;
883 *depthHeight = currentHeight;
884 }
885 }
886 }
887
888 return ((*depthWidth > 0) && (*depthHeight > 0)) ? OK : BAD_VALUE;
889}
890
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800891void DepthCompositeStream::getSupportedDepthSizes(const CameraMetadata& ch, bool maxResolution,
Emilian Peev538c90e2018-12-17 18:03:19 +0000892 std::vector<std::tuple<size_t, size_t>>* depthSizes /*out*/) {
893 if (depthSizes == nullptr) {
894 return;
895 }
896
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800897 auto entry = ch.find(
898 camera3::SessionConfigurationUtils::getAppropriateModeTag(
899 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, maxResolution));
Emilian Peev538c90e2018-12-17 18:03:19 +0000900 if (entry.count > 0) {
901 // Depth stream dimensions have four int32_t components
902 // (pixelformat, width, height, type)
903 size_t entryCount = entry.count / 4;
904 depthSizes->reserve(entryCount);
905 for (size_t i = 0; i < entry.count; i += 4) {
906 if ((entry.data.i32[i] == kDepthMapPixelFormat) &&
907 (entry.data.i32[i+3] ==
908 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT)) {
909 depthSizes->push_back(std::make_tuple(entry.data.i32[i+1],
910 entry.data.i32[i+2]));
911 }
912 }
913 }
914}
915
916status_t DepthCompositeStream::getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
917 const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/) {
918 if (compositeOutput == nullptr) {
919 return BAD_VALUE;
920 }
921
922 std::vector<std::tuple<size_t, size_t>> depthSizes;
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800923 std::vector<std::tuple<size_t, size_t>> depthSizesMaximumResolution;
924 getSupportedDepthSizes(ch, /*maxResolution*/false, &depthSizes);
Emilian Peev538c90e2018-12-17 18:03:19 +0000925 if (depthSizes.empty()) {
926 ALOGE("%s: No depth stream configurations present", __FUNCTION__);
927 return BAD_VALUE;
928 }
929
Jayant Chowdharydbd1efb2023-02-07 16:14:48 -0800930 if (SessionConfigurationUtils::supportsUltraHighResolutionCapture(ch)) {
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800931 getSupportedDepthSizes(ch, /*maxResolution*/true, &depthSizesMaximumResolution);
932 if (depthSizesMaximumResolution.empty()) {
933 ALOGE("%s: No depth stream configurations for maximum resolution present",
934 __FUNCTION__);
935 return BAD_VALUE;
936 }
937 }
938
939 size_t chosenDepthWidth = 0, chosenDepthHeight = 0;
940 auto ret = checkAndGetMatchingDepthSize(streamInfo.width, streamInfo.height, depthSizes,
941 depthSizesMaximumResolution, streamInfo.sensorPixelModesUsed, &chosenDepthWidth,
942 &chosenDepthHeight);
943
Emilian Peev538c90e2018-12-17 18:03:19 +0000944 if (ret != OK) {
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800945 ALOGE("%s: Couldn't get matching depth sizes", __FUNCTION__);
Emilian Peev538c90e2018-12-17 18:03:19 +0000946 return ret;
947 }
948
949 compositeOutput->clear();
950 compositeOutput->insert(compositeOutput->end(), 2, streamInfo);
951
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800952 // Sensor pixel modes should stay the same here. They're already overridden.
Emilian Peev538c90e2018-12-17 18:03:19 +0000953 // Jpeg/Blob stream info
954 (*compositeOutput)[0].dataSpace = kJpegDataSpace;
955 (*compositeOutput)[0].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
956
957 // Depth stream info
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800958 (*compositeOutput)[1].width = chosenDepthWidth;
959 (*compositeOutput)[1].height = chosenDepthHeight;
Emilian Peev538c90e2018-12-17 18:03:19 +0000960 (*compositeOutput)[1].format = kDepthMapPixelFormat;
961 (*compositeOutput)[1].dataSpace = kDepthMapDataSpace;
962 (*compositeOutput)[1].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
963
964 return NO_ERROR;
965}
966
Emilian Peev538c90e2018-12-17 18:03:19 +0000967}; // namespace camera3
968}; // namespace android