blob: 8b41d0064b268a4b1d1ae37beea24abf48007949 [file] [log] [blame]
Emilian Peev538c90e2018-12-17 18:03:19 +00001/*
2 * Copyright (C) 2018 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "Camera3-DepthCompositeStream"
18#define ATRACE_TAG ATRACE_TAG_CAMERA
19//#define LOG_NDEBUG 0
20
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +000021#include <aidl/android/hardware/camera/device/CameraBlob.h>
22#include <aidl/android/hardware/camera/device/CameraBlobId.h>
Austin Borger1c1bee02023-06-01 16:51:35 -070023#include <camera/StringUtils.h>
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +000024
Emilian Peev538c90e2018-12-17 18:03:19 +000025#include "api1/client2/JpegProcessor.h"
26#include "common/CameraProviderManager.h"
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -080027#include "utils/SessionConfigurationUtils.h"
Emilian Peev538c90e2018-12-17 18:03:19 +000028#include <gui/Surface.h>
29#include <utils/Log.h>
30#include <utils/Trace.h>
31
32#include "DepthCompositeStream.h"
33
Emilian Peev538c90e2018-12-17 18:03:19 +000034namespace android {
35namespace camera3 {
36
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +000037using aidl::android::hardware::camera::device::CameraBlob;
38using aidl::android::hardware::camera::device::CameraBlobId;
39
Shuzhen Wange8675782019-12-05 09:12:14 -080040DepthCompositeStream::DepthCompositeStream(sp<CameraDeviceBase> device,
Emilian Peev538c90e2018-12-17 18:03:19 +000041 wp<hardware::camera2::ICameraDeviceCallbacks> cb) :
42 CompositeStream(device, cb),
43 mBlobStreamId(-1),
44 mBlobSurfaceId(-1),
45 mDepthStreamId(-1),
46 mDepthSurfaceId(-1),
47 mBlobWidth(0),
48 mBlobHeight(0),
49 mDepthBufferAcquired(false),
50 mBlobBufferAcquired(false),
51 mProducerListener(new ProducerListener()),
Jayant Chowdharycd3d36b2021-07-10 10:53:53 -070052 mMaxJpegBufferSize(-1),
53 mUHRMaxJpegBufferSize(-1),
Emilian Peev29e9ec12020-01-02 12:43:50 -080054 mIsLogicalCamera(false) {
Shuzhen Wange8675782019-12-05 09:12:14 -080055 if (device != nullptr) {
56 CameraMetadata staticInfo = device->info();
Emilian Peev538c90e2018-12-17 18:03:19 +000057 auto entry = staticInfo.find(ANDROID_JPEG_MAX_SIZE);
58 if (entry.count > 0) {
Jayant Chowdharycd3d36b2021-07-10 10:53:53 -070059 mMaxJpegBufferSize = entry.data.i32[0];
Emilian Peev538c90e2018-12-17 18:03:19 +000060 } else {
61 ALOGW("%s: Maximum jpeg size absent from camera characteristics", __FUNCTION__);
62 }
63
Jayant Chowdharycd3d36b2021-07-10 10:53:53 -070064 mUHRMaxJpegSize =
65 SessionConfigurationUtils::getMaxJpegResolution(staticInfo,
66 /*ultraHighResolution*/true);
67 mDefaultMaxJpegSize =
68 SessionConfigurationUtils::getMaxJpegResolution(staticInfo,
69 /*isUltraHighResolution*/false);
70
71 mUHRMaxJpegBufferSize =
72 SessionConfigurationUtils::getUHRMaxJpegBufferSize(mUHRMaxJpegSize, mDefaultMaxJpegSize,
73 mMaxJpegBufferSize);
74
Emilian Peev538c90e2018-12-17 18:03:19 +000075 entry = staticInfo.find(ANDROID_LENS_INTRINSIC_CALIBRATION);
76 if (entry.count == 5) {
Emilian Peev94c98022019-06-19 09:11:51 -070077 mIntrinsicCalibration.reserve(5);
78 mIntrinsicCalibration.insert(mIntrinsicCalibration.end(), entry.data.f,
Emilian Peev538c90e2018-12-17 18:03:19 +000079 entry.data.f + 5);
80 } else {
81 ALOGW("%s: Intrinsic calibration absent from camera characteristics!", __FUNCTION__);
82 }
83
84 entry = staticInfo.find(ANDROID_LENS_DISTORTION);
85 if (entry.count == 5) {
86 mLensDistortion.reserve(5);
87 mLensDistortion.insert(mLensDistortion.end(), entry.data.f, entry.data.f + 5);
88 } else {
89 ALOGW("%s: Lens distortion absent from camera characteristics!", __FUNCTION__);
90 }
91
92 entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
93 for (size_t i = 0; i < entry.count; ++i) {
94 uint8_t capability = entry.data.u8[i];
95 if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA) {
96 mIsLogicalCamera = true;
97 break;
98 }
99 }
100
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800101 getSupportedDepthSizes(staticInfo, /*maxResolution*/false, &mSupportedDepthSizes);
Jayant Chowdharydbd1efb2023-02-07 16:14:48 -0800102 if (SessionConfigurationUtils::supportsUltraHighResolutionCapture(staticInfo)) {
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800103 getSupportedDepthSizes(staticInfo, true, &mSupportedDepthSizesMaximumResolution);
104 }
Emilian Peev538c90e2018-12-17 18:03:19 +0000105 }
106}
107
108DepthCompositeStream::~DepthCompositeStream() {
109 mBlobConsumer.clear(),
110 mBlobSurface.clear(),
111 mBlobStreamId = -1;
112 mBlobSurfaceId = -1;
113 mDepthConsumer.clear();
114 mDepthSurface.clear();
115 mDepthConsumer = nullptr;
116 mDepthSurface = nullptr;
117}
118
119void DepthCompositeStream::compilePendingInputLocked() {
120 CpuConsumer::LockedBuffer imgBuffer;
121
122 while (!mInputJpegBuffers.empty() && !mBlobBufferAcquired) {
123 auto it = mInputJpegBuffers.begin();
124 auto res = mBlobConsumer->lockNextBuffer(&imgBuffer);
125 if (res == NOT_ENOUGH_DATA) {
126 // Can not lock any more buffers.
127 break;
128 } else if (res != OK) {
129 ALOGE("%s: Error locking blob image buffer: %s (%d)", __FUNCTION__,
130 strerror(-res), res);
131 mPendingInputFrames[*it].error = true;
Greg Kaiser07095df2019-01-29 06:28:58 -0800132 mInputJpegBuffers.erase(it);
Emilian Peev538c90e2018-12-17 18:03:19 +0000133 continue;
134 }
135
136 if (*it != imgBuffer.timestamp) {
137 ALOGW("%s: Expecting jpeg buffer with time stamp: %" PRId64 " received buffer with "
138 "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
139 }
140
141 if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
142 (mPendingInputFrames[imgBuffer.timestamp].error)) {
143 mBlobConsumer->unlockBuffer(imgBuffer);
144 } else {
145 mPendingInputFrames[imgBuffer.timestamp].jpegBuffer = imgBuffer;
146 mBlobBufferAcquired = true;
147 }
148 mInputJpegBuffers.erase(it);
149 }
150
151 while (!mInputDepthBuffers.empty() && !mDepthBufferAcquired) {
152 auto it = mInputDepthBuffers.begin();
153 auto res = mDepthConsumer->lockNextBuffer(&imgBuffer);
154 if (res == NOT_ENOUGH_DATA) {
155 // Can not lock any more buffers.
156 break;
157 } else if (res != OK) {
158 ALOGE("%s: Error receiving depth image buffer: %s (%d)", __FUNCTION__,
159 strerror(-res), res);
160 mPendingInputFrames[*it].error = true;
161 mInputDepthBuffers.erase(it);
162 continue;
163 }
164
165 if (*it != imgBuffer.timestamp) {
166 ALOGW("%s: Expecting depth buffer with time stamp: %" PRId64 " received buffer with "
167 "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
168 }
169
170 if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
171 (mPendingInputFrames[imgBuffer.timestamp].error)) {
172 mDepthConsumer->unlockBuffer(imgBuffer);
173 } else {
174 mPendingInputFrames[imgBuffer.timestamp].depthBuffer = imgBuffer;
175 mDepthBufferAcquired = true;
176 }
177 mInputDepthBuffers.erase(it);
178 }
179
180 while (!mCaptureResults.empty()) {
181 auto it = mCaptureResults.begin();
182 // Negative timestamp indicates that something went wrong during the capture result
183 // collection process.
184 if (it->first >= 0) {
185 mPendingInputFrames[it->first].frameNumber = std::get<0>(it->second);
186 mPendingInputFrames[it->first].result = std::get<1>(it->second);
187 }
188 mCaptureResults.erase(it);
189 }
190
191 while (!mFrameNumberMap.empty()) {
192 auto it = mFrameNumberMap.begin();
193 mPendingInputFrames[it->second].frameNumber = it->first;
194 mFrameNumberMap.erase(it);
195 }
196
197 auto it = mErrorFrameNumbers.begin();
198 while (it != mErrorFrameNumbers.end()) {
199 bool frameFound = false;
200 for (auto &inputFrame : mPendingInputFrames) {
201 if (inputFrame.second.frameNumber == *it) {
202 inputFrame.second.error = true;
203 frameFound = true;
204 break;
205 }
206 }
207
208 if (frameFound) {
209 it = mErrorFrameNumbers.erase(it);
210 } else {
211 ALOGW("%s: Not able to find failing input with frame number: %" PRId64, __FUNCTION__,
212 *it);
213 it++;
214 }
215 }
216}
217
218bool DepthCompositeStream::getNextReadyInputLocked(int64_t *currentTs /*inout*/) {
219 if (currentTs == nullptr) {
220 return false;
221 }
222
223 bool newInputAvailable = false;
224 for (const auto& it : mPendingInputFrames) {
225 if ((!it.second.error) && (it.second.depthBuffer.data != nullptr) &&
226 (it.second.jpegBuffer.data != nullptr) && (it.first < *currentTs)) {
227 *currentTs = it.first;
228 newInputAvailable = true;
229 }
230 }
231
232 return newInputAvailable;
233}
234
235int64_t DepthCompositeStream::getNextFailingInputLocked(int64_t *currentTs /*inout*/) {
236 int64_t ret = -1;
237 if (currentTs == nullptr) {
238 return ret;
239 }
240
241 for (const auto& it : mPendingInputFrames) {
242 if (it.second.error && !it.second.errorNotified && (it.first < *currentTs)) {
243 *currentTs = it.first;
244 ret = it.second.frameNumber;
245 }
246 }
247
248 return ret;
249}
250
Emilian Peev90a839f2019-10-02 15:12:50 -0700251status_t DepthCompositeStream::processInputFrame(nsecs_t ts, const InputFrame &inputFrame) {
Emilian Peev538c90e2018-12-17 18:03:19 +0000252 status_t res;
253 sp<ANativeWindow> outputANW = mOutputSurface;
254 ANativeWindowBuffer *anb;
255 int fenceFd;
256 void *dstBuffer;
Emilian Peev538c90e2018-12-17 18:03:19 +0000257
258 auto jpegSize = android::camera2::JpegProcessor::findJpegSize(inputFrame.jpegBuffer.data,
259 inputFrame.jpegBuffer.width);
260 if (jpegSize == 0) {
261 ALOGW("%s: Failed to find input jpeg size, default to using entire buffer!", __FUNCTION__);
262 jpegSize = inputFrame.jpegBuffer.width;
263 }
264
Jayant Chowdharycd3d36b2021-07-10 10:53:53 -0700265 size_t maxDepthJpegBufferSize = 0;
266 if (mMaxJpegBufferSize > 0) {
267 // If this is an ultra high resolution sensor and the input frames size
268 // is > default res jpeg.
269 if (mUHRMaxJpegSize.width != 0 &&
270 inputFrame.jpegBuffer.width * inputFrame.jpegBuffer.height >
271 mDefaultMaxJpegSize.width * mDefaultMaxJpegSize.height) {
272 maxDepthJpegBufferSize = mUHRMaxJpegBufferSize;
273 } else {
274 maxDepthJpegBufferSize = mMaxJpegBufferSize;
275 }
Emilian Peev538c90e2018-12-17 18:03:19 +0000276 } else {
Jayant Chowdharycd3d36b2021-07-10 10:53:53 -0700277 maxDepthJpegBufferSize = std::max<size_t> (jpegSize,
Emilian Peev538c90e2018-12-17 18:03:19 +0000278 inputFrame.depthBuffer.width * inputFrame.depthBuffer.height * 3 / 2);
279 }
Jayant Chowdharycd3d36b2021-07-10 10:53:53 -0700280
Emilian Peev538c90e2018-12-17 18:03:19 +0000281 uint8_t jpegQuality = 100;
282 auto entry = inputFrame.result.find(ANDROID_JPEG_QUALITY);
283 if (entry.count > 0) {
284 jpegQuality = entry.data.u8[0];
285 }
Emilian Peev538c90e2018-12-17 18:03:19 +0000286
Emilian Peevcbf174b2019-01-25 14:38:59 -0800287 // The final depth photo will consist of the main jpeg buffer, the depth map buffer (also in
288 // jpeg format) and confidence map (jpeg as well). Assume worst case that all 3 jpeg need
289 // max jpeg size.
Jayant Chowdharycd3d36b2021-07-10 10:53:53 -0700290 size_t finalJpegBufferSize = maxDepthJpegBufferSize * 3;
Emilian Peev538c90e2018-12-17 18:03:19 +0000291
Emilian Peevcbf174b2019-01-25 14:38:59 -0800292 if ((res = native_window_set_buffers_dimensions(mOutputSurface.get(), finalJpegBufferSize, 1))
Emilian Peev538c90e2018-12-17 18:03:19 +0000293 != OK) {
294 ALOGE("%s: Unable to configure stream buffer dimensions"
Emilian Peevcbf174b2019-01-25 14:38:59 -0800295 " %zux%u for stream %d", __FUNCTION__, finalJpegBufferSize, 1U, mBlobStreamId);
Emilian Peev538c90e2018-12-17 18:03:19 +0000296 return res;
297 }
298
299 res = outputANW->dequeueBuffer(mOutputSurface.get(), &anb, &fenceFd);
300 if (res != OK) {
301 ALOGE("%s: Error retrieving output buffer: %s (%d)", __FUNCTION__, strerror(-res),
302 res);
303 return res;
304 }
305
306 sp<GraphicBuffer> gb = GraphicBuffer::from(anb);
Shuzhen Wangc87315d2022-03-17 00:11:20 +0000307 GraphicBufferLocker gbLocker(gb);
308 res = gbLocker.lockAsync(&dstBuffer, fenceFd);
Emilian Peev538c90e2018-12-17 18:03:19 +0000309 if (res != OK) {
310 ALOGE("%s: Error trying to lock output buffer fence: %s (%d)", __FUNCTION__,
311 strerror(-res), res);
312 outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
313 return res;
314 }
315
Emilian Peevcbf174b2019-01-25 14:38:59 -0800316 if ((gb->getWidth() < finalJpegBufferSize) || (gb->getHeight() != 1)) {
Emilian Peev538c90e2018-12-17 18:03:19 +0000317 ALOGE("%s: Blob buffer size mismatch, expected %dx%d received %zux%u", __FUNCTION__,
Emilian Peevcbf174b2019-01-25 14:38:59 -0800318 gb->getWidth(), gb->getHeight(), finalJpegBufferSize, 1U);
Emilian Peev538c90e2018-12-17 18:03:19 +0000319 outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
320 return BAD_VALUE;
321 }
322
Emilian Peevcbf174b2019-01-25 14:38:59 -0800323 DepthPhotoInputFrame depthPhoto;
324 depthPhoto.mMainJpegBuffer = reinterpret_cast<const char*> (inputFrame.jpegBuffer.data);
325 depthPhoto.mMainJpegWidth = mBlobWidth;
326 depthPhoto.mMainJpegHeight = mBlobHeight;
327 depthPhoto.mMainJpegSize = jpegSize;
328 depthPhoto.mDepthMapBuffer = reinterpret_cast<uint16_t*> (inputFrame.depthBuffer.data);
329 depthPhoto.mDepthMapWidth = inputFrame.depthBuffer.width;
330 depthPhoto.mDepthMapHeight = inputFrame.depthBuffer.height;
331 depthPhoto.mDepthMapStride = inputFrame.depthBuffer.stride;
332 depthPhoto.mJpegQuality = jpegQuality;
333 depthPhoto.mIsLogical = mIsLogicalCamera;
Jayant Chowdharycd3d36b2021-07-10 10:53:53 -0700334 depthPhoto.mMaxJpegSize = maxDepthJpegBufferSize;
Emilian Peevcbf174b2019-01-25 14:38:59 -0800335 // The camera intrinsic calibration layout is as follows:
336 // [focalLengthX, focalLengthY, opticalCenterX, opticalCenterY, skew]
Emilian Peev94c98022019-06-19 09:11:51 -0700337 if (mIntrinsicCalibration.size() == 5) {
338 memcpy(depthPhoto.mIntrinsicCalibration, mIntrinsicCalibration.data(),
339 sizeof(depthPhoto.mIntrinsicCalibration));
340 depthPhoto.mIsIntrinsicCalibrationValid = 1;
Emilian Peevcbf174b2019-01-25 14:38:59 -0800341 } else {
Emilian Peev94c98022019-06-19 09:11:51 -0700342 depthPhoto.mIsIntrinsicCalibrationValid = 0;
Emilian Peevcbf174b2019-01-25 14:38:59 -0800343 }
344 // The camera lens distortion contains the following lens correction coefficients.
345 // [kappa_1, kappa_2, kappa_3 kappa_4, kappa_5]
346 if (mLensDistortion.size() == 5) {
347 memcpy(depthPhoto.mLensDistortion, mLensDistortion.data(),
348 sizeof(depthPhoto.mLensDistortion));
349 depthPhoto.mIsLensDistortionValid = 1;
350 } else {
351 depthPhoto.mIsLensDistortionValid = 0;
352 }
Emilian Peev06af8c92019-02-07 12:34:41 -0800353 entry = inputFrame.result.find(ANDROID_JPEG_ORIENTATION);
354 if (entry.count > 0) {
355 // The camera jpeg orientation values must be within [0, 90, 180, 270].
356 switch (entry.data.i32[0]) {
357 case 0:
358 case 90:
359 case 180:
360 case 270:
361 depthPhoto.mOrientation = static_cast<DepthPhotoOrientation> (entry.data.i32[0]);
362 break;
363 default:
364 ALOGE("%s: Unexpected jpeg orientation value: %d, default to 0 degrees",
365 __FUNCTION__, entry.data.i32[0]);
366 }
367 }
Emilian Peevcbf174b2019-01-25 14:38:59 -0800368
369 size_t actualJpegSize = 0;
Emilian Peev29e9ec12020-01-02 12:43:50 -0800370 res = processDepthPhotoFrame(depthPhoto, finalJpegBufferSize, dstBuffer, &actualJpegSize);
Emilian Peevcbf174b2019-01-25 14:38:59 -0800371 if (res != 0) {
372 ALOGE("%s: Depth photo processing failed: %s (%d)", __FUNCTION__, strerror(-res), res);
373 outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
374 return res;
375 }
376
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000377 size_t finalJpegSize = actualJpegSize + sizeof(CameraBlob);
Emilian Peevcbf174b2019-01-25 14:38:59 -0800378 if (finalJpegSize > finalJpegBufferSize) {
379 ALOGE("%s: Final jpeg buffer not large enough for the jpeg blob header", __FUNCTION__);
380 outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
381 return NO_MEMORY;
382 }
383
Emilian Peev90a839f2019-10-02 15:12:50 -0700384 res = native_window_set_buffers_timestamp(mOutputSurface.get(), ts);
385 if (res != OK) {
386 ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)", __FUNCTION__,
387 getStreamId(), strerror(-res), res);
388 return res;
389 }
390
Emilian Peevcbf174b2019-01-25 14:38:59 -0800391 ALOGV("%s: Final jpeg size: %zu", __func__, finalJpegSize);
Emilian Peev538c90e2018-12-17 18:03:19 +0000392 uint8_t* header = static_cast<uint8_t *> (dstBuffer) +
Jayant Chowdharyc67af1b2022-04-07 18:05:04 +0000393 (gb->getWidth() - sizeof(CameraBlob));
394 CameraBlob *blob = reinterpret_cast<CameraBlob*> (header);
395 blob->blobId = CameraBlobId::JPEG;
396 blob->blobSizeBytes = actualJpegSize;
Emilian Peev538c90e2018-12-17 18:03:19 +0000397 outputANW->queueBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
398
399 return res;
400}
401
402void DepthCompositeStream::releaseInputFrameLocked(InputFrame *inputFrame /*out*/) {
403 if (inputFrame == nullptr) {
404 return;
405 }
406
407 if (inputFrame->depthBuffer.data != nullptr) {
408 mDepthConsumer->unlockBuffer(inputFrame->depthBuffer);
409 inputFrame->depthBuffer.data = nullptr;
410 mDepthBufferAcquired = false;
411 }
412
413 if (inputFrame->jpegBuffer.data != nullptr) {
414 mBlobConsumer->unlockBuffer(inputFrame->jpegBuffer);
415 inputFrame->jpegBuffer.data = nullptr;
416 mBlobBufferAcquired = false;
417 }
418
419 if ((inputFrame->error || mErrorState) && !inputFrame->errorNotified) {
Shuzhen Wange8675782019-12-05 09:12:14 -0800420 //TODO: Figure out correct requestId
421 notifyError(inputFrame->frameNumber, -1 /*requestId*/);
Emilian Peev538c90e2018-12-17 18:03:19 +0000422 inputFrame->errorNotified = true;
423 }
424}
425
426void DepthCompositeStream::releaseInputFramesLocked(int64_t currentTs) {
427 auto it = mPendingInputFrames.begin();
428 while (it != mPendingInputFrames.end()) {
429 if (it->first <= currentTs) {
430 releaseInputFrameLocked(&it->second);
431 it = mPendingInputFrames.erase(it);
432 } else {
433 it++;
434 }
435 }
436}
437
438bool DepthCompositeStream::threadLoop() {
439 int64_t currentTs = INT64_MAX;
440 bool newInputAvailable = false;
441
442 {
443 Mutex::Autolock l(mMutex);
444
445 if (mErrorState) {
446 // In case we landed in error state, return any pending buffers and
447 // halt all further processing.
448 compilePendingInputLocked();
449 releaseInputFramesLocked(currentTs);
450 return false;
451 }
452
453 while (!newInputAvailable) {
454 compilePendingInputLocked();
455 newInputAvailable = getNextReadyInputLocked(&currentTs);
456 if (!newInputAvailable) {
457 auto failingFrameNumber = getNextFailingInputLocked(&currentTs);
458 if (failingFrameNumber >= 0) {
459 // We cannot erase 'mPendingInputFrames[currentTs]' at this point because it is
460 // possible for two internal stream buffers to fail. In such scenario the
461 // composite stream should notify the client about a stream buffer error only
462 // once and this information is kept within 'errorNotified'.
463 // Any present failed input frames will be removed on a subsequent call to
464 // 'releaseInputFramesLocked()'.
465 releaseInputFrameLocked(&mPendingInputFrames[currentTs]);
466 currentTs = INT64_MAX;
467 }
468
469 auto ret = mInputReadyCondition.waitRelative(mMutex, kWaitDuration);
470 if (ret == TIMED_OUT) {
471 return true;
472 } else if (ret != OK) {
473 ALOGE("%s: Timed wait on condition failed: %s (%d)", __FUNCTION__,
474 strerror(-ret), ret);
475 return false;
476 }
477 }
478 }
479 }
480
Emilian Peev90a839f2019-10-02 15:12:50 -0700481 auto res = processInputFrame(currentTs, mPendingInputFrames[currentTs]);
Emilian Peev538c90e2018-12-17 18:03:19 +0000482 Mutex::Autolock l(mMutex);
483 if (res != OK) {
484 ALOGE("%s: Failed processing frame with timestamp: %" PRIu64 ": %s (%d)", __FUNCTION__,
485 currentTs, strerror(-res), res);
486 mPendingInputFrames[currentTs].error = true;
487 }
488
489 releaseInputFramesLocked(currentTs);
490
491 return true;
492}
493
494bool DepthCompositeStream::isDepthCompositeStream(const sp<Surface> &surface) {
495 ANativeWindow *anw = surface.get();
496 status_t err;
497 int format;
498 if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
Austin Borger1c1bee02023-06-01 16:51:35 -0700499 std::string msg = fmt::sprintf("Failed to query Surface format: %s (%d)", strerror(-err),
Emilian Peev538c90e2018-12-17 18:03:19 +0000500 err);
Austin Borger1c1bee02023-06-01 16:51:35 -0700501 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
Emilian Peev538c90e2018-12-17 18:03:19 +0000502 return false;
503 }
504
505 int dataspace;
506 if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE, &dataspace)) != OK) {
Austin Borger1c1bee02023-06-01 16:51:35 -0700507 std::string msg = fmt::sprintf("Failed to query Surface dataspace: %s (%d)", strerror(-err),
Emilian Peev538c90e2018-12-17 18:03:19 +0000508 err);
Austin Borger1c1bee02023-06-01 16:51:35 -0700509 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
Emilian Peev538c90e2018-12-17 18:03:19 +0000510 return false;
511 }
512
513 if ((format == HAL_PIXEL_FORMAT_BLOB) && (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH)) {
514 return true;
515 }
516
517 return false;
518}
519
Emilian Peeve75a2852024-05-20 22:35:15 +0000520bool DepthCompositeStream::isDepthCompositeStreamInfo(const OutputStreamInfo& streamInfo) {
521 if ((streamInfo.dataSpace == static_cast<android_dataspace_t>(HAL_DATASPACE_DYNAMIC_DEPTH)) &&
522 (streamInfo.format == HAL_PIXEL_FORMAT_BLOB)) {
523 return true;
524 }
525
526 return false;
527}
528
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800529static bool setContains(std::unordered_set<int32_t> containerSet, int32_t value) {
530 return containerSet.find(value) != containerSet.end();
531}
532
533status_t DepthCompositeStream::checkAndGetMatchingDepthSize(size_t width, size_t height,
534 const std::vector<std::tuple<size_t, size_t>> &depthSizes,
535 const std::vector<std::tuple<size_t, size_t>> &depthSizesMaximumResolution,
536 const std::unordered_set<int32_t> &sensorPixelModesUsed,
537 size_t *depthWidth, size_t *depthHeight) {
538 if (depthWidth == nullptr || depthHeight == nullptr) {
539 return BAD_VALUE;
540 }
541 size_t chosenDepthWidth = 0, chosenDepthHeight = 0;
542 bool hasDefaultSensorPixelMode =
543 setContains(sensorPixelModesUsed, ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
544
545 bool hasMaximumResolutionSensorPixelMode =
546 setContains(sensorPixelModesUsed, ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
547
548 if (!hasDefaultSensorPixelMode && !hasMaximumResolutionSensorPixelMode) {
549 ALOGE("%s: sensor pixel modes don't contain either maximum resolution or default modes",
550 __FUNCTION__);
551 return BAD_VALUE;
552 }
553
554 if (hasDefaultSensorPixelMode) {
555 auto ret = getMatchingDepthSize(width, height, depthSizes, &chosenDepthWidth,
556 &chosenDepthHeight);
557 if (ret != OK) {
558 ALOGE("%s: No matching depth stream size found", __FUNCTION__);
559 return ret;
560 }
561 }
562
563 if (hasMaximumResolutionSensorPixelMode) {
564 size_t depthWidth = 0, depthHeight = 0;
565 auto ret = getMatchingDepthSize(width, height,
566 depthSizesMaximumResolution, &depthWidth, &depthHeight);
567 if (ret != OK) {
568 ALOGE("%s: No matching max resolution depth stream size found", __FUNCTION__);
569 return ret;
570 }
571 // Both matching depth sizes should be the same.
572 if (chosenDepthWidth != 0 && chosenDepthWidth != depthWidth &&
573 chosenDepthHeight != depthHeight) {
574 ALOGE("%s: Maximum resolution sensor pixel mode and default sensor pixel mode don't"
575 " have matching depth sizes", __FUNCTION__);
576 return BAD_VALUE;
577 }
578 if (chosenDepthWidth == 0) {
579 chosenDepthWidth = depthWidth;
580 chosenDepthHeight = depthHeight;
581 }
582 }
583 *depthWidth = chosenDepthWidth;
584 *depthHeight = chosenDepthHeight;
585 return OK;
586}
587
588
Emilian Peev538c90e2018-12-17 18:03:19 +0000589status_t DepthCompositeStream::createInternalStreams(const std::vector<sp<Surface>>& consumers,
590 bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
Austin Borger1c1bee02023-06-01 16:51:35 -0700591 camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800592 const std::unordered_set<int32_t> &sensorPixelModesUsed,
593 std::vector<int> *surfaceIds,
Emilian Peev434248e2022-10-06 14:58:54 -0700594 int /*streamSetId*/, bool /*isShared*/, int32_t /*colorSpace*/,
Shuzhen Wangbce53db2022-12-03 00:38:20 +0000595 int64_t /*dynamicProfile*/, int64_t /*streamUseCase*/, bool useReadoutTimestamp) {
Emilian Peev538c90e2018-12-17 18:03:19 +0000596 if (mSupportedDepthSizes.empty()) {
597 ALOGE("%s: This camera device doesn't support any depth map streams!", __FUNCTION__);
598 return INVALID_OPERATION;
599 }
600
601 size_t depthWidth, depthHeight;
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800602 auto ret =
603 checkAndGetMatchingDepthSize(width, height, mSupportedDepthSizes,
604 mSupportedDepthSizesMaximumResolution, sensorPixelModesUsed, &depthWidth,
605 &depthHeight);
Emilian Peev538c90e2018-12-17 18:03:19 +0000606 if (ret != OK) {
607 ALOGE("%s: Failed to find an appropriate depth stream size!", __FUNCTION__);
608 return ret;
609 }
610
611 sp<CameraDeviceBase> device = mDevice.promote();
612 if (!device.get()) {
613 ALOGE("%s: Invalid camera device!", __FUNCTION__);
614 return NO_INIT;
615 }
616
617 sp<IGraphicBufferProducer> producer;
618 sp<IGraphicBufferConsumer> consumer;
619 BufferQueue::createBufferQueue(&producer, &consumer);
620 mBlobConsumer = new CpuConsumer(consumer, /*maxLockedBuffers*/1, /*controlledByApp*/ true);
621 mBlobConsumer->setFrameAvailableListener(this);
622 mBlobConsumer->setName(String8("Camera3-JpegCompositeStream"));
623 mBlobSurface = new Surface(producer);
624
625 ret = device->createStream(mBlobSurface, width, height, format, kJpegDataSpace, rotation,
Shuzhen Wangbce53db2022-12-03 00:38:20 +0000626 id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
627 camera3::CAMERA3_STREAM_SET_ID_INVALID, /*isShared*/false, /*isMultiResolution*/false,
628 /*consumerUsage*/0, ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
629 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
630 OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
631 OutputConfiguration::MIRROR_MODE_AUTO,
632 ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
633 useReadoutTimestamp);
Emilian Peev538c90e2018-12-17 18:03:19 +0000634 if (ret == OK) {
635 mBlobStreamId = *id;
636 mBlobSurfaceId = (*surfaceIds)[0];
637 mOutputSurface = consumers[0];
638 } else {
639 return ret;
640 }
641
642 BufferQueue::createBufferQueue(&producer, &consumer);
643 mDepthConsumer = new CpuConsumer(consumer, /*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
644 mDepthConsumer->setFrameAvailableListener(this);
645 mDepthConsumer->setName(String8("Camera3-DepthCompositeStream"));
646 mDepthSurface = new Surface(producer);
647 std::vector<int> depthSurfaceId;
648 ret = device->createStream(mDepthSurface, depthWidth, depthHeight, kDepthMapPixelFormat,
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800649 kDepthMapDataSpace, rotation, &mDepthStreamId, physicalCameraId, sensorPixelModesUsed,
Shuzhen Wangbce53db2022-12-03 00:38:20 +0000650 &depthSurfaceId, camera3::CAMERA3_STREAM_SET_ID_INVALID, /*isShared*/false,
651 /*isMultiResolution*/false, /*consumerUsage*/0,
652 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
653 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
654 OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
655 OutputConfiguration::MIRROR_MODE_AUTO,
656 ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
657 useReadoutTimestamp);
Emilian Peev538c90e2018-12-17 18:03:19 +0000658 if (ret == OK) {
659 mDepthSurfaceId = depthSurfaceId[0];
660 } else {
661 return ret;
662 }
663
664 ret = registerCompositeStreamListener(getStreamId());
665 if (ret != OK) {
666 ALOGE("%s: Failed to register blob stream listener!", __FUNCTION__);
667 return ret;
668 }
669
670 ret = registerCompositeStreamListener(mDepthStreamId);
671 if (ret != OK) {
672 ALOGE("%s: Failed to register depth stream listener!", __FUNCTION__);
673 return ret;
674 }
675
676 mBlobWidth = width;
677 mBlobHeight = height;
678
679 return ret;
680}
681
682status_t DepthCompositeStream::configureStream() {
683 if (isRunning()) {
684 // Processing thread is already running, nothing more to do.
685 return NO_ERROR;
686 }
687
688 if (mOutputSurface.get() == nullptr) {
689 ALOGE("%s: No valid output surface set!", __FUNCTION__);
690 return NO_INIT;
691 }
692
693 auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mProducerListener);
694 if (res != OK) {
695 ALOGE("%s: Unable to connect to native window for stream %d",
696 __FUNCTION__, mBlobStreamId);
697 return res;
698 }
699
700 if ((res = native_window_set_buffers_format(mOutputSurface.get(), HAL_PIXEL_FORMAT_BLOB))
701 != OK) {
702 ALOGE("%s: Unable to configure stream buffer format for stream %d", __FUNCTION__,
703 mBlobStreamId);
704 return res;
705 }
706
707 int maxProducerBuffers;
708 ANativeWindow *anw = mBlobSurface.get();
709 if ((res = anw->query(anw, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxProducerBuffers)) != OK) {
710 ALOGE("%s: Unable to query consumer undequeued"
711 " buffer count for stream %d", __FUNCTION__, mBlobStreamId);
712 return res;
713 }
714
715 ANativeWindow *anwConsumer = mOutputSurface.get();
716 int maxConsumerBuffers;
717 if ((res = anwConsumer->query(anwConsumer, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
718 &maxConsumerBuffers)) != OK) {
719 ALOGE("%s: Unable to query consumer undequeued"
720 " buffer count for stream %d", __FUNCTION__, mBlobStreamId);
721 return res;
722 }
723
724 if ((res = native_window_set_buffer_count(
725 anwConsumer, maxProducerBuffers + maxConsumerBuffers)) != OK) {
726 ALOGE("%s: Unable to set buffer count for stream %d", __FUNCTION__, mBlobStreamId);
727 return res;
728 }
729
730 run("DepthCompositeStreamProc");
731
732 return NO_ERROR;
733}
734
735status_t DepthCompositeStream::deleteInternalStreams() {
736 // The 'CameraDeviceClient' parent will delete the blob stream
737 requestExit();
738
739 auto ret = join();
740 if (ret != OK) {
741 ALOGE("%s: Failed to join with the main processing thread: %s (%d)", __FUNCTION__,
742 strerror(-ret), ret);
743 }
744
Emilian Peev538c90e2018-12-17 18:03:19 +0000745 if (mDepthStreamId >= 0) {
Emilian Peevc0fe54c2020-03-11 14:05:07 -0700746 // Camera devices may not be valid after switching to offline mode.
747 // In this case, all offline streams including internal composite streams
748 // are managed and released by the offline session.
749 sp<CameraDeviceBase> device = mDevice.promote();
750 if (device.get() != nullptr) {
751 ret = device->deleteStream(mDepthStreamId);
752 }
753
Emilian Peev538c90e2018-12-17 18:03:19 +0000754 mDepthStreamId = -1;
755 }
756
Shuzhen Wang2c545042019-02-07 10:27:35 -0800757 if (mOutputSurface != nullptr) {
758 mOutputSurface->disconnect(NATIVE_WINDOW_API_CAMERA);
759 mOutputSurface.clear();
760 }
761
Emilian Peev538c90e2018-12-17 18:03:19 +0000762 return ret;
763}
764
765void DepthCompositeStream::onFrameAvailable(const BufferItem& item) {
766 if (item.mDataSpace == kJpegDataSpace) {
767 ALOGV("%s: Jpeg buffer with ts: %" PRIu64 " ms. arrived!",
768 __func__, ns2ms(item.mTimestamp));
769
770 Mutex::Autolock l(mMutex);
771 if (!mErrorState) {
772 mInputJpegBuffers.push_back(item.mTimestamp);
773 mInputReadyCondition.signal();
774 }
775 } else if (item.mDataSpace == kDepthMapDataSpace) {
776 ALOGV("%s: Depth buffer with ts: %" PRIu64 " ms. arrived!", __func__,
777 ns2ms(item.mTimestamp));
778
779 Mutex::Autolock l(mMutex);
780 if (!mErrorState) {
781 mInputDepthBuffers.push_back(item.mTimestamp);
782 mInputReadyCondition.signal();
783 }
784 } else {
785 ALOGE("%s: Unexpected data space: 0x%x", __FUNCTION__, item.mDataSpace);
786 }
787}
788
789status_t DepthCompositeStream::insertGbp(SurfaceMap* /*out*/outSurfaceMap,
790 Vector<int32_t> * /*out*/outputStreamIds, int32_t* /*out*/currentStreamId) {
791 if (outSurfaceMap->find(mDepthStreamId) == outSurfaceMap->end()) {
Emilian Peev538c90e2018-12-17 18:03:19 +0000792 outputStreamIds->push_back(mDepthStreamId);
793 }
794 (*outSurfaceMap)[mDepthStreamId].push_back(mDepthSurfaceId);
795
796 if (outSurfaceMap->find(mBlobStreamId) == outSurfaceMap->end()) {
Emilian Peev538c90e2018-12-17 18:03:19 +0000797 outputStreamIds->push_back(mBlobStreamId);
798 }
799 (*outSurfaceMap)[mBlobStreamId].push_back(mBlobSurfaceId);
800
801 if (currentStreamId != nullptr) {
802 *currentStreamId = mBlobStreamId;
803 }
804
805 return NO_ERROR;
806}
807
Emilian Peev4697b642019-11-19 17:11:14 -0800808status_t DepthCompositeStream::insertCompositeStreamIds(
809 std::vector<int32_t>* compositeStreamIds /*out*/) {
810 if (compositeStreamIds == nullptr) {
811 return BAD_VALUE;
812 }
813
814 compositeStreamIds->push_back(mDepthStreamId);
815 compositeStreamIds->push_back(mBlobStreamId);
816
817 return OK;
818}
819
Emilian Peev538c90e2018-12-17 18:03:19 +0000820void DepthCompositeStream::onResultError(const CaptureResultExtras& resultExtras) {
821 // Processing can continue even in case of result errors.
822 // At the moment depth composite stream processing relies mainly on static camera
823 // characteristics data. The actual result data can be used for the jpeg quality but
824 // in case it is absent we can default to maximum.
825 eraseResult(resultExtras.frameNumber);
826}
827
828bool DepthCompositeStream::onStreamBufferError(const CaptureResultExtras& resultExtras) {
829 bool ret = false;
830 // Buffer errors concerning internal composite streams should not be directly visible to
831 // camera clients. They must only receive a single buffer error with the public composite
832 // stream id.
833 if ((resultExtras.errorStreamId == mDepthStreamId) ||
834 (resultExtras.errorStreamId == mBlobStreamId)) {
835 flagAnErrorFrameNumber(resultExtras.frameNumber);
836 ret = true;
837 }
838
839 return ret;
840}
841
842status_t DepthCompositeStream::getMatchingDepthSize(size_t width, size_t height,
843 const std::vector<std::tuple<size_t, size_t>>& supporedDepthSizes,
844 size_t *depthWidth /*out*/, size_t *depthHeight /*out*/) {
845 if ((depthWidth == nullptr) || (depthHeight == nullptr)) {
846 return BAD_VALUE;
847 }
848
849 float arTol = CameraProviderManager::kDepthARTolerance;
850 *depthWidth = *depthHeight = 0;
851
852 float aspectRatio = static_cast<float> (width) / static_cast<float> (height);
853 for (const auto& it : supporedDepthSizes) {
854 auto currentWidth = std::get<0>(it);
855 auto currentHeight = std::get<1>(it);
856 if ((currentWidth == width) && (currentHeight == height)) {
857 *depthWidth = width;
858 *depthHeight = height;
859 break;
860 } else {
861 float currentRatio = static_cast<float> (currentWidth) /
862 static_cast<float> (currentHeight);
863 auto currentSize = currentWidth * currentHeight;
864 auto oldSize = (*depthWidth) * (*depthHeight);
865 if ((fabs(aspectRatio - currentRatio) <= arTol) && (currentSize > oldSize)) {
866 *depthWidth = currentWidth;
867 *depthHeight = currentHeight;
868 }
869 }
870 }
871
872 return ((*depthWidth > 0) && (*depthHeight > 0)) ? OK : BAD_VALUE;
873}
874
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800875void DepthCompositeStream::getSupportedDepthSizes(const CameraMetadata& ch, bool maxResolution,
Emilian Peev538c90e2018-12-17 18:03:19 +0000876 std::vector<std::tuple<size_t, size_t>>* depthSizes /*out*/) {
877 if (depthSizes == nullptr) {
878 return;
879 }
880
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800881 auto entry = ch.find(
882 camera3::SessionConfigurationUtils::getAppropriateModeTag(
883 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, maxResolution));
Emilian Peev538c90e2018-12-17 18:03:19 +0000884 if (entry.count > 0) {
885 // Depth stream dimensions have four int32_t components
886 // (pixelformat, width, height, type)
887 size_t entryCount = entry.count / 4;
888 depthSizes->reserve(entryCount);
889 for (size_t i = 0; i < entry.count; i += 4) {
890 if ((entry.data.i32[i] == kDepthMapPixelFormat) &&
891 (entry.data.i32[i+3] ==
892 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT)) {
893 depthSizes->push_back(std::make_tuple(entry.data.i32[i+1],
894 entry.data.i32[i+2]));
895 }
896 }
897 }
898}
899
900status_t DepthCompositeStream::getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
901 const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/) {
902 if (compositeOutput == nullptr) {
903 return BAD_VALUE;
904 }
905
906 std::vector<std::tuple<size_t, size_t>> depthSizes;
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800907 std::vector<std::tuple<size_t, size_t>> depthSizesMaximumResolution;
908 getSupportedDepthSizes(ch, /*maxResolution*/false, &depthSizes);
Emilian Peev538c90e2018-12-17 18:03:19 +0000909 if (depthSizes.empty()) {
910 ALOGE("%s: No depth stream configurations present", __FUNCTION__);
911 return BAD_VALUE;
912 }
913
Jayant Chowdharydbd1efb2023-02-07 16:14:48 -0800914 if (SessionConfigurationUtils::supportsUltraHighResolutionCapture(ch)) {
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800915 getSupportedDepthSizes(ch, /*maxResolution*/true, &depthSizesMaximumResolution);
916 if (depthSizesMaximumResolution.empty()) {
917 ALOGE("%s: No depth stream configurations for maximum resolution present",
918 __FUNCTION__);
919 return BAD_VALUE;
920 }
921 }
922
923 size_t chosenDepthWidth = 0, chosenDepthHeight = 0;
924 auto ret = checkAndGetMatchingDepthSize(streamInfo.width, streamInfo.height, depthSizes,
925 depthSizesMaximumResolution, streamInfo.sensorPixelModesUsed, &chosenDepthWidth,
926 &chosenDepthHeight);
927
Emilian Peev538c90e2018-12-17 18:03:19 +0000928 if (ret != OK) {
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800929 ALOGE("%s: Couldn't get matching depth sizes", __FUNCTION__);
Emilian Peev538c90e2018-12-17 18:03:19 +0000930 return ret;
931 }
932
933 compositeOutput->clear();
934 compositeOutput->insert(compositeOutput->end(), 2, streamInfo);
935
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800936 // Sensor pixel modes should stay the same here. They're already overridden.
Emilian Peev538c90e2018-12-17 18:03:19 +0000937 // Jpeg/Blob stream info
938 (*compositeOutput)[0].dataSpace = kJpegDataSpace;
939 (*compositeOutput)[0].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
940
941 // Depth stream info
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800942 (*compositeOutput)[1].width = chosenDepthWidth;
943 (*compositeOutput)[1].height = chosenDepthHeight;
Emilian Peev538c90e2018-12-17 18:03:19 +0000944 (*compositeOutput)[1].format = kDepthMapPixelFormat;
945 (*compositeOutput)[1].dataSpace = kDepthMapDataSpace;
946 (*compositeOutput)[1].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
947
948 return NO_ERROR;
949}
950
Emilian Peev538c90e2018-12-17 18:03:19 +0000951}; // namespace camera3
952}; // namespace android