blob: aa057c73bb26163385e151d865cb6f3563b518f3 [file] [log] [blame]
Emilian Peev538c90e2018-12-17 18:03:19 +00001/*
2 * Copyright (C) 2018 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "Camera3-DepthCompositeStream"
18#define ATRACE_TAG ATRACE_TAG_CAMERA
19//#define LOG_NDEBUG 0
20
21#include "api1/client2/JpegProcessor.h"
22#include "common/CameraProviderManager.h"
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -080023#include "utils/SessionConfigurationUtils.h"
Emilian Peev538c90e2018-12-17 18:03:19 +000024#include <gui/Surface.h>
25#include <utils/Log.h>
26#include <utils/Trace.h>
27
28#include "DepthCompositeStream.h"
29
Emilian Peev538c90e2018-12-17 18:03:19 +000030namespace android {
31namespace camera3 {
32
Shuzhen Wange8675782019-12-05 09:12:14 -080033DepthCompositeStream::DepthCompositeStream(sp<CameraDeviceBase> device,
Emilian Peev538c90e2018-12-17 18:03:19 +000034 wp<hardware::camera2::ICameraDeviceCallbacks> cb) :
35 CompositeStream(device, cb),
36 mBlobStreamId(-1),
37 mBlobSurfaceId(-1),
38 mDepthStreamId(-1),
39 mDepthSurfaceId(-1),
40 mBlobWidth(0),
41 mBlobHeight(0),
42 mDepthBufferAcquired(false),
43 mBlobBufferAcquired(false),
44 mProducerListener(new ProducerListener()),
Jayant Chowdharycd3d36b2021-07-10 10:53:53 -070045 mMaxJpegBufferSize(-1),
46 mUHRMaxJpegBufferSize(-1),
Emilian Peev29e9ec12020-01-02 12:43:50 -080047 mIsLogicalCamera(false) {
Shuzhen Wange8675782019-12-05 09:12:14 -080048 if (device != nullptr) {
49 CameraMetadata staticInfo = device->info();
Emilian Peev538c90e2018-12-17 18:03:19 +000050 auto entry = staticInfo.find(ANDROID_JPEG_MAX_SIZE);
51 if (entry.count > 0) {
Jayant Chowdharycd3d36b2021-07-10 10:53:53 -070052 mMaxJpegBufferSize = entry.data.i32[0];
Emilian Peev538c90e2018-12-17 18:03:19 +000053 } else {
54 ALOGW("%s: Maximum jpeg size absent from camera characteristics", __FUNCTION__);
55 }
56
Jayant Chowdharycd3d36b2021-07-10 10:53:53 -070057 mUHRMaxJpegSize =
58 SessionConfigurationUtils::getMaxJpegResolution(staticInfo,
59 /*ultraHighResolution*/true);
60 mDefaultMaxJpegSize =
61 SessionConfigurationUtils::getMaxJpegResolution(staticInfo,
62 /*isUltraHighResolution*/false);
63
64 mUHRMaxJpegBufferSize =
65 SessionConfigurationUtils::getUHRMaxJpegBufferSize(mUHRMaxJpegSize, mDefaultMaxJpegSize,
66 mMaxJpegBufferSize);
67
Emilian Peev538c90e2018-12-17 18:03:19 +000068 entry = staticInfo.find(ANDROID_LENS_INTRINSIC_CALIBRATION);
69 if (entry.count == 5) {
Emilian Peev94c98022019-06-19 09:11:51 -070070 mIntrinsicCalibration.reserve(5);
71 mIntrinsicCalibration.insert(mIntrinsicCalibration.end(), entry.data.f,
Emilian Peev538c90e2018-12-17 18:03:19 +000072 entry.data.f + 5);
73 } else {
74 ALOGW("%s: Intrinsic calibration absent from camera characteristics!", __FUNCTION__);
75 }
76
77 entry = staticInfo.find(ANDROID_LENS_DISTORTION);
78 if (entry.count == 5) {
79 mLensDistortion.reserve(5);
80 mLensDistortion.insert(mLensDistortion.end(), entry.data.f, entry.data.f + 5);
81 } else {
82 ALOGW("%s: Lens distortion absent from camera characteristics!", __FUNCTION__);
83 }
84
85 entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
86 for (size_t i = 0; i < entry.count; ++i) {
87 uint8_t capability = entry.data.u8[i];
88 if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA) {
89 mIsLogicalCamera = true;
90 break;
91 }
92 }
93
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -080094 getSupportedDepthSizes(staticInfo, /*maxResolution*/false, &mSupportedDepthSizes);
95 if (SessionConfigurationUtils::isUltraHighResolutionSensor(staticInfo)) {
96 getSupportedDepthSizes(staticInfo, true, &mSupportedDepthSizesMaximumResolution);
97 }
Emilian Peev538c90e2018-12-17 18:03:19 +000098 }
99}
100
101DepthCompositeStream::~DepthCompositeStream() {
102 mBlobConsumer.clear(),
103 mBlobSurface.clear(),
104 mBlobStreamId = -1;
105 mBlobSurfaceId = -1;
106 mDepthConsumer.clear();
107 mDepthSurface.clear();
108 mDepthConsumer = nullptr;
109 mDepthSurface = nullptr;
110}
111
112void DepthCompositeStream::compilePendingInputLocked() {
113 CpuConsumer::LockedBuffer imgBuffer;
114
115 while (!mInputJpegBuffers.empty() && !mBlobBufferAcquired) {
116 auto it = mInputJpegBuffers.begin();
117 auto res = mBlobConsumer->lockNextBuffer(&imgBuffer);
118 if (res == NOT_ENOUGH_DATA) {
119 // Can not lock any more buffers.
120 break;
121 } else if (res != OK) {
122 ALOGE("%s: Error locking blob image buffer: %s (%d)", __FUNCTION__,
123 strerror(-res), res);
124 mPendingInputFrames[*it].error = true;
Greg Kaiser07095df2019-01-29 06:28:58 -0800125 mInputJpegBuffers.erase(it);
Emilian Peev538c90e2018-12-17 18:03:19 +0000126 continue;
127 }
128
129 if (*it != imgBuffer.timestamp) {
130 ALOGW("%s: Expecting jpeg buffer with time stamp: %" PRId64 " received buffer with "
131 "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
132 }
133
134 if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
135 (mPendingInputFrames[imgBuffer.timestamp].error)) {
136 mBlobConsumer->unlockBuffer(imgBuffer);
137 } else {
138 mPendingInputFrames[imgBuffer.timestamp].jpegBuffer = imgBuffer;
139 mBlobBufferAcquired = true;
140 }
141 mInputJpegBuffers.erase(it);
142 }
143
144 while (!mInputDepthBuffers.empty() && !mDepthBufferAcquired) {
145 auto it = mInputDepthBuffers.begin();
146 auto res = mDepthConsumer->lockNextBuffer(&imgBuffer);
147 if (res == NOT_ENOUGH_DATA) {
148 // Can not lock any more buffers.
149 break;
150 } else if (res != OK) {
151 ALOGE("%s: Error receiving depth image buffer: %s (%d)", __FUNCTION__,
152 strerror(-res), res);
153 mPendingInputFrames[*it].error = true;
154 mInputDepthBuffers.erase(it);
155 continue;
156 }
157
158 if (*it != imgBuffer.timestamp) {
159 ALOGW("%s: Expecting depth buffer with time stamp: %" PRId64 " received buffer with "
160 "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
161 }
162
163 if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
164 (mPendingInputFrames[imgBuffer.timestamp].error)) {
165 mDepthConsumer->unlockBuffer(imgBuffer);
166 } else {
167 mPendingInputFrames[imgBuffer.timestamp].depthBuffer = imgBuffer;
168 mDepthBufferAcquired = true;
169 }
170 mInputDepthBuffers.erase(it);
171 }
172
173 while (!mCaptureResults.empty()) {
174 auto it = mCaptureResults.begin();
175 // Negative timestamp indicates that something went wrong during the capture result
176 // collection process.
177 if (it->first >= 0) {
178 mPendingInputFrames[it->first].frameNumber = std::get<0>(it->second);
179 mPendingInputFrames[it->first].result = std::get<1>(it->second);
180 }
181 mCaptureResults.erase(it);
182 }
183
184 while (!mFrameNumberMap.empty()) {
185 auto it = mFrameNumberMap.begin();
186 mPendingInputFrames[it->second].frameNumber = it->first;
187 mFrameNumberMap.erase(it);
188 }
189
190 auto it = mErrorFrameNumbers.begin();
191 while (it != mErrorFrameNumbers.end()) {
192 bool frameFound = false;
193 for (auto &inputFrame : mPendingInputFrames) {
194 if (inputFrame.second.frameNumber == *it) {
195 inputFrame.second.error = true;
196 frameFound = true;
197 break;
198 }
199 }
200
201 if (frameFound) {
202 it = mErrorFrameNumbers.erase(it);
203 } else {
204 ALOGW("%s: Not able to find failing input with frame number: %" PRId64, __FUNCTION__,
205 *it);
206 it++;
207 }
208 }
209}
210
211bool DepthCompositeStream::getNextReadyInputLocked(int64_t *currentTs /*inout*/) {
212 if (currentTs == nullptr) {
213 return false;
214 }
215
216 bool newInputAvailable = false;
217 for (const auto& it : mPendingInputFrames) {
218 if ((!it.second.error) && (it.second.depthBuffer.data != nullptr) &&
219 (it.second.jpegBuffer.data != nullptr) && (it.first < *currentTs)) {
220 *currentTs = it.first;
221 newInputAvailable = true;
222 }
223 }
224
225 return newInputAvailable;
226}
227
228int64_t DepthCompositeStream::getNextFailingInputLocked(int64_t *currentTs /*inout*/) {
229 int64_t ret = -1;
230 if (currentTs == nullptr) {
231 return ret;
232 }
233
234 for (const auto& it : mPendingInputFrames) {
235 if (it.second.error && !it.second.errorNotified && (it.first < *currentTs)) {
236 *currentTs = it.first;
237 ret = it.second.frameNumber;
238 }
239 }
240
241 return ret;
242}
243
Emilian Peev90a839f2019-10-02 15:12:50 -0700244status_t DepthCompositeStream::processInputFrame(nsecs_t ts, const InputFrame &inputFrame) {
Emilian Peev538c90e2018-12-17 18:03:19 +0000245 status_t res;
246 sp<ANativeWindow> outputANW = mOutputSurface;
247 ANativeWindowBuffer *anb;
248 int fenceFd;
249 void *dstBuffer;
Emilian Peev538c90e2018-12-17 18:03:19 +0000250
251 auto jpegSize = android::camera2::JpegProcessor::findJpegSize(inputFrame.jpegBuffer.data,
252 inputFrame.jpegBuffer.width);
253 if (jpegSize == 0) {
254 ALOGW("%s: Failed to find input jpeg size, default to using entire buffer!", __FUNCTION__);
255 jpegSize = inputFrame.jpegBuffer.width;
256 }
257
Jayant Chowdharycd3d36b2021-07-10 10:53:53 -0700258 size_t maxDepthJpegBufferSize = 0;
259 if (mMaxJpegBufferSize > 0) {
260 // If this is an ultra high resolution sensor and the input frames size
261 // is > default res jpeg.
262 if (mUHRMaxJpegSize.width != 0 &&
263 inputFrame.jpegBuffer.width * inputFrame.jpegBuffer.height >
264 mDefaultMaxJpegSize.width * mDefaultMaxJpegSize.height) {
265 maxDepthJpegBufferSize = mUHRMaxJpegBufferSize;
266 } else {
267 maxDepthJpegBufferSize = mMaxJpegBufferSize;
268 }
Emilian Peev538c90e2018-12-17 18:03:19 +0000269 } else {
Jayant Chowdharycd3d36b2021-07-10 10:53:53 -0700270 maxDepthJpegBufferSize = std::max<size_t> (jpegSize,
Emilian Peev538c90e2018-12-17 18:03:19 +0000271 inputFrame.depthBuffer.width * inputFrame.depthBuffer.height * 3 / 2);
272 }
Jayant Chowdharycd3d36b2021-07-10 10:53:53 -0700273
Emilian Peev538c90e2018-12-17 18:03:19 +0000274 uint8_t jpegQuality = 100;
275 auto entry = inputFrame.result.find(ANDROID_JPEG_QUALITY);
276 if (entry.count > 0) {
277 jpegQuality = entry.data.u8[0];
278 }
Emilian Peev538c90e2018-12-17 18:03:19 +0000279
Emilian Peevcbf174b2019-01-25 14:38:59 -0800280 // The final depth photo will consist of the main jpeg buffer, the depth map buffer (also in
281 // jpeg format) and confidence map (jpeg as well). Assume worst case that all 3 jpeg need
282 // max jpeg size.
Jayant Chowdharycd3d36b2021-07-10 10:53:53 -0700283 size_t finalJpegBufferSize = maxDepthJpegBufferSize * 3;
Emilian Peev538c90e2018-12-17 18:03:19 +0000284
Emilian Peevcbf174b2019-01-25 14:38:59 -0800285 if ((res = native_window_set_buffers_dimensions(mOutputSurface.get(), finalJpegBufferSize, 1))
Emilian Peev538c90e2018-12-17 18:03:19 +0000286 != OK) {
287 ALOGE("%s: Unable to configure stream buffer dimensions"
Emilian Peevcbf174b2019-01-25 14:38:59 -0800288 " %zux%u for stream %d", __FUNCTION__, finalJpegBufferSize, 1U, mBlobStreamId);
Emilian Peev538c90e2018-12-17 18:03:19 +0000289 return res;
290 }
291
292 res = outputANW->dequeueBuffer(mOutputSurface.get(), &anb, &fenceFd);
293 if (res != OK) {
294 ALOGE("%s: Error retrieving output buffer: %s (%d)", __FUNCTION__, strerror(-res),
295 res);
296 return res;
297 }
298
299 sp<GraphicBuffer> gb = GraphicBuffer::from(anb);
Shuzhen Wangc87315d2022-03-17 00:11:20 +0000300 GraphicBufferLocker gbLocker(gb);
301 res = gbLocker.lockAsync(&dstBuffer, fenceFd);
Emilian Peev538c90e2018-12-17 18:03:19 +0000302 if (res != OK) {
303 ALOGE("%s: Error trying to lock output buffer fence: %s (%d)", __FUNCTION__,
304 strerror(-res), res);
305 outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
306 return res;
307 }
308
Emilian Peevcbf174b2019-01-25 14:38:59 -0800309 if ((gb->getWidth() < finalJpegBufferSize) || (gb->getHeight() != 1)) {
Emilian Peev538c90e2018-12-17 18:03:19 +0000310 ALOGE("%s: Blob buffer size mismatch, expected %dx%d received %zux%u", __FUNCTION__,
Emilian Peevcbf174b2019-01-25 14:38:59 -0800311 gb->getWidth(), gb->getHeight(), finalJpegBufferSize, 1U);
Emilian Peev538c90e2018-12-17 18:03:19 +0000312 outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
313 return BAD_VALUE;
314 }
315
Emilian Peevcbf174b2019-01-25 14:38:59 -0800316 DepthPhotoInputFrame depthPhoto;
317 depthPhoto.mMainJpegBuffer = reinterpret_cast<const char*> (inputFrame.jpegBuffer.data);
318 depthPhoto.mMainJpegWidth = mBlobWidth;
319 depthPhoto.mMainJpegHeight = mBlobHeight;
320 depthPhoto.mMainJpegSize = jpegSize;
321 depthPhoto.mDepthMapBuffer = reinterpret_cast<uint16_t*> (inputFrame.depthBuffer.data);
322 depthPhoto.mDepthMapWidth = inputFrame.depthBuffer.width;
323 depthPhoto.mDepthMapHeight = inputFrame.depthBuffer.height;
324 depthPhoto.mDepthMapStride = inputFrame.depthBuffer.stride;
325 depthPhoto.mJpegQuality = jpegQuality;
326 depthPhoto.mIsLogical = mIsLogicalCamera;
Jayant Chowdharycd3d36b2021-07-10 10:53:53 -0700327 depthPhoto.mMaxJpegSize = maxDepthJpegBufferSize;
Emilian Peevcbf174b2019-01-25 14:38:59 -0800328 // The camera intrinsic calibration layout is as follows:
329 // [focalLengthX, focalLengthY, opticalCenterX, opticalCenterY, skew]
Emilian Peev94c98022019-06-19 09:11:51 -0700330 if (mIntrinsicCalibration.size() == 5) {
331 memcpy(depthPhoto.mIntrinsicCalibration, mIntrinsicCalibration.data(),
332 sizeof(depthPhoto.mIntrinsicCalibration));
333 depthPhoto.mIsIntrinsicCalibrationValid = 1;
Emilian Peevcbf174b2019-01-25 14:38:59 -0800334 } else {
Emilian Peev94c98022019-06-19 09:11:51 -0700335 depthPhoto.mIsIntrinsicCalibrationValid = 0;
Emilian Peevcbf174b2019-01-25 14:38:59 -0800336 }
337 // The camera lens distortion contains the following lens correction coefficients.
338 // [kappa_1, kappa_2, kappa_3 kappa_4, kappa_5]
339 if (mLensDistortion.size() == 5) {
340 memcpy(depthPhoto.mLensDistortion, mLensDistortion.data(),
341 sizeof(depthPhoto.mLensDistortion));
342 depthPhoto.mIsLensDistortionValid = 1;
343 } else {
344 depthPhoto.mIsLensDistortionValid = 0;
345 }
Emilian Peev06af8c92019-02-07 12:34:41 -0800346 entry = inputFrame.result.find(ANDROID_JPEG_ORIENTATION);
347 if (entry.count > 0) {
348 // The camera jpeg orientation values must be within [0, 90, 180, 270].
349 switch (entry.data.i32[0]) {
350 case 0:
351 case 90:
352 case 180:
353 case 270:
354 depthPhoto.mOrientation = static_cast<DepthPhotoOrientation> (entry.data.i32[0]);
355 break;
356 default:
357 ALOGE("%s: Unexpected jpeg orientation value: %d, default to 0 degrees",
358 __FUNCTION__, entry.data.i32[0]);
359 }
360 }
Emilian Peevcbf174b2019-01-25 14:38:59 -0800361
362 size_t actualJpegSize = 0;
Emilian Peev29e9ec12020-01-02 12:43:50 -0800363 res = processDepthPhotoFrame(depthPhoto, finalJpegBufferSize, dstBuffer, &actualJpegSize);
Emilian Peevcbf174b2019-01-25 14:38:59 -0800364 if (res != 0) {
365 ALOGE("%s: Depth photo processing failed: %s (%d)", __FUNCTION__, strerror(-res), res);
366 outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
367 return res;
368 }
369
Emilian Peevf4816702020-04-03 15:44:51 -0700370 size_t finalJpegSize = actualJpegSize + sizeof(struct camera_jpeg_blob);
Emilian Peevcbf174b2019-01-25 14:38:59 -0800371 if (finalJpegSize > finalJpegBufferSize) {
372 ALOGE("%s: Final jpeg buffer not large enough for the jpeg blob header", __FUNCTION__);
373 outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
374 return NO_MEMORY;
375 }
376
Emilian Peev90a839f2019-10-02 15:12:50 -0700377 res = native_window_set_buffers_timestamp(mOutputSurface.get(), ts);
378 if (res != OK) {
379 ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)", __FUNCTION__,
380 getStreamId(), strerror(-res), res);
381 return res;
382 }
383
Emilian Peevcbf174b2019-01-25 14:38:59 -0800384 ALOGV("%s: Final jpeg size: %zu", __func__, finalJpegSize);
Emilian Peev538c90e2018-12-17 18:03:19 +0000385 uint8_t* header = static_cast<uint8_t *> (dstBuffer) +
Emilian Peevf4816702020-04-03 15:44:51 -0700386 (gb->getWidth() - sizeof(struct camera_jpeg_blob));
387 struct camera_jpeg_blob *blob = reinterpret_cast<struct camera_jpeg_blob*> (header);
388 blob->jpeg_blob_id = CAMERA_JPEG_BLOB_ID;
Emilian Peevcbf174b2019-01-25 14:38:59 -0800389 blob->jpeg_size = actualJpegSize;
Emilian Peev538c90e2018-12-17 18:03:19 +0000390 outputANW->queueBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
391
392 return res;
393}
394
395void DepthCompositeStream::releaseInputFrameLocked(InputFrame *inputFrame /*out*/) {
396 if (inputFrame == nullptr) {
397 return;
398 }
399
400 if (inputFrame->depthBuffer.data != nullptr) {
401 mDepthConsumer->unlockBuffer(inputFrame->depthBuffer);
402 inputFrame->depthBuffer.data = nullptr;
403 mDepthBufferAcquired = false;
404 }
405
406 if (inputFrame->jpegBuffer.data != nullptr) {
407 mBlobConsumer->unlockBuffer(inputFrame->jpegBuffer);
408 inputFrame->jpegBuffer.data = nullptr;
409 mBlobBufferAcquired = false;
410 }
411
412 if ((inputFrame->error || mErrorState) && !inputFrame->errorNotified) {
Shuzhen Wange8675782019-12-05 09:12:14 -0800413 //TODO: Figure out correct requestId
414 notifyError(inputFrame->frameNumber, -1 /*requestId*/);
Emilian Peev538c90e2018-12-17 18:03:19 +0000415 inputFrame->errorNotified = true;
416 }
417}
418
419void DepthCompositeStream::releaseInputFramesLocked(int64_t currentTs) {
420 auto it = mPendingInputFrames.begin();
421 while (it != mPendingInputFrames.end()) {
422 if (it->first <= currentTs) {
423 releaseInputFrameLocked(&it->second);
424 it = mPendingInputFrames.erase(it);
425 } else {
426 it++;
427 }
428 }
429}
430
431bool DepthCompositeStream::threadLoop() {
432 int64_t currentTs = INT64_MAX;
433 bool newInputAvailable = false;
434
435 {
436 Mutex::Autolock l(mMutex);
437
438 if (mErrorState) {
439 // In case we landed in error state, return any pending buffers and
440 // halt all further processing.
441 compilePendingInputLocked();
442 releaseInputFramesLocked(currentTs);
443 return false;
444 }
445
446 while (!newInputAvailable) {
447 compilePendingInputLocked();
448 newInputAvailable = getNextReadyInputLocked(&currentTs);
449 if (!newInputAvailable) {
450 auto failingFrameNumber = getNextFailingInputLocked(&currentTs);
451 if (failingFrameNumber >= 0) {
452 // We cannot erase 'mPendingInputFrames[currentTs]' at this point because it is
453 // possible for two internal stream buffers to fail. In such scenario the
454 // composite stream should notify the client about a stream buffer error only
455 // once and this information is kept within 'errorNotified'.
456 // Any present failed input frames will be removed on a subsequent call to
457 // 'releaseInputFramesLocked()'.
458 releaseInputFrameLocked(&mPendingInputFrames[currentTs]);
459 currentTs = INT64_MAX;
460 }
461
462 auto ret = mInputReadyCondition.waitRelative(mMutex, kWaitDuration);
463 if (ret == TIMED_OUT) {
464 return true;
465 } else if (ret != OK) {
466 ALOGE("%s: Timed wait on condition failed: %s (%d)", __FUNCTION__,
467 strerror(-ret), ret);
468 return false;
469 }
470 }
471 }
472 }
473
Emilian Peev90a839f2019-10-02 15:12:50 -0700474 auto res = processInputFrame(currentTs, mPendingInputFrames[currentTs]);
Emilian Peev538c90e2018-12-17 18:03:19 +0000475 Mutex::Autolock l(mMutex);
476 if (res != OK) {
477 ALOGE("%s: Failed processing frame with timestamp: %" PRIu64 ": %s (%d)", __FUNCTION__,
478 currentTs, strerror(-res), res);
479 mPendingInputFrames[currentTs].error = true;
480 }
481
482 releaseInputFramesLocked(currentTs);
483
484 return true;
485}
486
487bool DepthCompositeStream::isDepthCompositeStream(const sp<Surface> &surface) {
488 ANativeWindow *anw = surface.get();
489 status_t err;
490 int format;
491 if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
492 String8 msg = String8::format("Failed to query Surface format: %s (%d)", strerror(-err),
493 err);
494 ALOGE("%s: %s", __FUNCTION__, msg.string());
495 return false;
496 }
497
498 int dataspace;
499 if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE, &dataspace)) != OK) {
500 String8 msg = String8::format("Failed to query Surface dataspace: %s (%d)", strerror(-err),
501 err);
502 ALOGE("%s: %s", __FUNCTION__, msg.string());
503 return false;
504 }
505
506 if ((format == HAL_PIXEL_FORMAT_BLOB) && (dataspace == HAL_DATASPACE_DYNAMIC_DEPTH)) {
507 return true;
508 }
509
510 return false;
511}
512
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800513static bool setContains(std::unordered_set<int32_t> containerSet, int32_t value) {
514 return containerSet.find(value) != containerSet.end();
515}
516
517status_t DepthCompositeStream::checkAndGetMatchingDepthSize(size_t width, size_t height,
518 const std::vector<std::tuple<size_t, size_t>> &depthSizes,
519 const std::vector<std::tuple<size_t, size_t>> &depthSizesMaximumResolution,
520 const std::unordered_set<int32_t> &sensorPixelModesUsed,
521 size_t *depthWidth, size_t *depthHeight) {
522 if (depthWidth == nullptr || depthHeight == nullptr) {
523 return BAD_VALUE;
524 }
525 size_t chosenDepthWidth = 0, chosenDepthHeight = 0;
526 bool hasDefaultSensorPixelMode =
527 setContains(sensorPixelModesUsed, ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
528
529 bool hasMaximumResolutionSensorPixelMode =
530 setContains(sensorPixelModesUsed, ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
531
532 if (!hasDefaultSensorPixelMode && !hasMaximumResolutionSensorPixelMode) {
533 ALOGE("%s: sensor pixel modes don't contain either maximum resolution or default modes",
534 __FUNCTION__);
535 return BAD_VALUE;
536 }
537
538 if (hasDefaultSensorPixelMode) {
539 auto ret = getMatchingDepthSize(width, height, depthSizes, &chosenDepthWidth,
540 &chosenDepthHeight);
541 if (ret != OK) {
542 ALOGE("%s: No matching depth stream size found", __FUNCTION__);
543 return ret;
544 }
545 }
546
547 if (hasMaximumResolutionSensorPixelMode) {
548 size_t depthWidth = 0, depthHeight = 0;
549 auto ret = getMatchingDepthSize(width, height,
550 depthSizesMaximumResolution, &depthWidth, &depthHeight);
551 if (ret != OK) {
552 ALOGE("%s: No matching max resolution depth stream size found", __FUNCTION__);
553 return ret;
554 }
555 // Both matching depth sizes should be the same.
556 if (chosenDepthWidth != 0 && chosenDepthWidth != depthWidth &&
557 chosenDepthHeight != depthHeight) {
558 ALOGE("%s: Maximum resolution sensor pixel mode and default sensor pixel mode don't"
559 " have matching depth sizes", __FUNCTION__);
560 return BAD_VALUE;
561 }
562 if (chosenDepthWidth == 0) {
563 chosenDepthWidth = depthWidth;
564 chosenDepthHeight = depthHeight;
565 }
566 }
567 *depthWidth = chosenDepthWidth;
568 *depthHeight = chosenDepthHeight;
569 return OK;
570}
571
572
Emilian Peev538c90e2018-12-17 18:03:19 +0000573status_t DepthCompositeStream::createInternalStreams(const std::vector<sp<Surface>>& consumers,
574 bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
Emilian Peevf4816702020-04-03 15:44:51 -0700575 camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800576 const std::unordered_set<int32_t> &sensorPixelModesUsed,
577 std::vector<int> *surfaceIds,
578 int /*streamSetId*/, bool /*isShared*/) {
Emilian Peev538c90e2018-12-17 18:03:19 +0000579 if (mSupportedDepthSizes.empty()) {
580 ALOGE("%s: This camera device doesn't support any depth map streams!", __FUNCTION__);
581 return INVALID_OPERATION;
582 }
583
584 size_t depthWidth, depthHeight;
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800585 auto ret =
586 checkAndGetMatchingDepthSize(width, height, mSupportedDepthSizes,
587 mSupportedDepthSizesMaximumResolution, sensorPixelModesUsed, &depthWidth,
588 &depthHeight);
Emilian Peev538c90e2018-12-17 18:03:19 +0000589 if (ret != OK) {
590 ALOGE("%s: Failed to find an appropriate depth stream size!", __FUNCTION__);
591 return ret;
592 }
593
594 sp<CameraDeviceBase> device = mDevice.promote();
595 if (!device.get()) {
596 ALOGE("%s: Invalid camera device!", __FUNCTION__);
597 return NO_INIT;
598 }
599
600 sp<IGraphicBufferProducer> producer;
601 sp<IGraphicBufferConsumer> consumer;
602 BufferQueue::createBufferQueue(&producer, &consumer);
603 mBlobConsumer = new CpuConsumer(consumer, /*maxLockedBuffers*/1, /*controlledByApp*/ true);
604 mBlobConsumer->setFrameAvailableListener(this);
605 mBlobConsumer->setName(String8("Camera3-JpegCompositeStream"));
606 mBlobSurface = new Surface(producer);
607
608 ret = device->createStream(mBlobSurface, width, height, format, kJpegDataSpace, rotation,
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800609 id, physicalCameraId, sensorPixelModesUsed, surfaceIds);
Emilian Peev538c90e2018-12-17 18:03:19 +0000610 if (ret == OK) {
611 mBlobStreamId = *id;
612 mBlobSurfaceId = (*surfaceIds)[0];
613 mOutputSurface = consumers[0];
614 } else {
615 return ret;
616 }
617
618 BufferQueue::createBufferQueue(&producer, &consumer);
619 mDepthConsumer = new CpuConsumer(consumer, /*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
620 mDepthConsumer->setFrameAvailableListener(this);
621 mDepthConsumer->setName(String8("Camera3-DepthCompositeStream"));
622 mDepthSurface = new Surface(producer);
623 std::vector<int> depthSurfaceId;
624 ret = device->createStream(mDepthSurface, depthWidth, depthHeight, kDepthMapPixelFormat,
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800625 kDepthMapDataSpace, rotation, &mDepthStreamId, physicalCameraId, sensorPixelModesUsed,
626 &depthSurfaceId);
Emilian Peev538c90e2018-12-17 18:03:19 +0000627 if (ret == OK) {
628 mDepthSurfaceId = depthSurfaceId[0];
629 } else {
630 return ret;
631 }
632
633 ret = registerCompositeStreamListener(getStreamId());
634 if (ret != OK) {
635 ALOGE("%s: Failed to register blob stream listener!", __FUNCTION__);
636 return ret;
637 }
638
639 ret = registerCompositeStreamListener(mDepthStreamId);
640 if (ret != OK) {
641 ALOGE("%s: Failed to register depth stream listener!", __FUNCTION__);
642 return ret;
643 }
644
645 mBlobWidth = width;
646 mBlobHeight = height;
647
648 return ret;
649}
650
651status_t DepthCompositeStream::configureStream() {
652 if (isRunning()) {
653 // Processing thread is already running, nothing more to do.
654 return NO_ERROR;
655 }
656
657 if (mOutputSurface.get() == nullptr) {
658 ALOGE("%s: No valid output surface set!", __FUNCTION__);
659 return NO_INIT;
660 }
661
662 auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mProducerListener);
663 if (res != OK) {
664 ALOGE("%s: Unable to connect to native window for stream %d",
665 __FUNCTION__, mBlobStreamId);
666 return res;
667 }
668
669 if ((res = native_window_set_buffers_format(mOutputSurface.get(), HAL_PIXEL_FORMAT_BLOB))
670 != OK) {
671 ALOGE("%s: Unable to configure stream buffer format for stream %d", __FUNCTION__,
672 mBlobStreamId);
673 return res;
674 }
675
676 int maxProducerBuffers;
677 ANativeWindow *anw = mBlobSurface.get();
678 if ((res = anw->query(anw, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxProducerBuffers)) != OK) {
679 ALOGE("%s: Unable to query consumer undequeued"
680 " buffer count for stream %d", __FUNCTION__, mBlobStreamId);
681 return res;
682 }
683
684 ANativeWindow *anwConsumer = mOutputSurface.get();
685 int maxConsumerBuffers;
686 if ((res = anwConsumer->query(anwConsumer, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
687 &maxConsumerBuffers)) != OK) {
688 ALOGE("%s: Unable to query consumer undequeued"
689 " buffer count for stream %d", __FUNCTION__, mBlobStreamId);
690 return res;
691 }
692
693 if ((res = native_window_set_buffer_count(
694 anwConsumer, maxProducerBuffers + maxConsumerBuffers)) != OK) {
695 ALOGE("%s: Unable to set buffer count for stream %d", __FUNCTION__, mBlobStreamId);
696 return res;
697 }
698
699 run("DepthCompositeStreamProc");
700
701 return NO_ERROR;
702}
703
704status_t DepthCompositeStream::deleteInternalStreams() {
705 // The 'CameraDeviceClient' parent will delete the blob stream
706 requestExit();
707
708 auto ret = join();
709 if (ret != OK) {
710 ALOGE("%s: Failed to join with the main processing thread: %s (%d)", __FUNCTION__,
711 strerror(-ret), ret);
712 }
713
Emilian Peev538c90e2018-12-17 18:03:19 +0000714 if (mDepthStreamId >= 0) {
Emilian Peevc0fe54c2020-03-11 14:05:07 -0700715 // Camera devices may not be valid after switching to offline mode.
716 // In this case, all offline streams including internal composite streams
717 // are managed and released by the offline session.
718 sp<CameraDeviceBase> device = mDevice.promote();
719 if (device.get() != nullptr) {
720 ret = device->deleteStream(mDepthStreamId);
721 }
722
Emilian Peev538c90e2018-12-17 18:03:19 +0000723 mDepthStreamId = -1;
724 }
725
Shuzhen Wang2c545042019-02-07 10:27:35 -0800726 if (mOutputSurface != nullptr) {
727 mOutputSurface->disconnect(NATIVE_WINDOW_API_CAMERA);
728 mOutputSurface.clear();
729 }
730
Emilian Peev538c90e2018-12-17 18:03:19 +0000731 return ret;
732}
733
734void DepthCompositeStream::onFrameAvailable(const BufferItem& item) {
735 if (item.mDataSpace == kJpegDataSpace) {
736 ALOGV("%s: Jpeg buffer with ts: %" PRIu64 " ms. arrived!",
737 __func__, ns2ms(item.mTimestamp));
738
739 Mutex::Autolock l(mMutex);
740 if (!mErrorState) {
741 mInputJpegBuffers.push_back(item.mTimestamp);
742 mInputReadyCondition.signal();
743 }
744 } else if (item.mDataSpace == kDepthMapDataSpace) {
745 ALOGV("%s: Depth buffer with ts: %" PRIu64 " ms. arrived!", __func__,
746 ns2ms(item.mTimestamp));
747
748 Mutex::Autolock l(mMutex);
749 if (!mErrorState) {
750 mInputDepthBuffers.push_back(item.mTimestamp);
751 mInputReadyCondition.signal();
752 }
753 } else {
754 ALOGE("%s: Unexpected data space: 0x%x", __FUNCTION__, item.mDataSpace);
755 }
756}
757
758status_t DepthCompositeStream::insertGbp(SurfaceMap* /*out*/outSurfaceMap,
759 Vector<int32_t> * /*out*/outputStreamIds, int32_t* /*out*/currentStreamId) {
760 if (outSurfaceMap->find(mDepthStreamId) == outSurfaceMap->end()) {
Emilian Peev538c90e2018-12-17 18:03:19 +0000761 outputStreamIds->push_back(mDepthStreamId);
762 }
763 (*outSurfaceMap)[mDepthStreamId].push_back(mDepthSurfaceId);
764
765 if (outSurfaceMap->find(mBlobStreamId) == outSurfaceMap->end()) {
Emilian Peev538c90e2018-12-17 18:03:19 +0000766 outputStreamIds->push_back(mBlobStreamId);
767 }
768 (*outSurfaceMap)[mBlobStreamId].push_back(mBlobSurfaceId);
769
770 if (currentStreamId != nullptr) {
771 *currentStreamId = mBlobStreamId;
772 }
773
774 return NO_ERROR;
775}
776
Emilian Peev4697b642019-11-19 17:11:14 -0800777status_t DepthCompositeStream::insertCompositeStreamIds(
778 std::vector<int32_t>* compositeStreamIds /*out*/) {
779 if (compositeStreamIds == nullptr) {
780 return BAD_VALUE;
781 }
782
783 compositeStreamIds->push_back(mDepthStreamId);
784 compositeStreamIds->push_back(mBlobStreamId);
785
786 return OK;
787}
788
Emilian Peev538c90e2018-12-17 18:03:19 +0000789void DepthCompositeStream::onResultError(const CaptureResultExtras& resultExtras) {
790 // Processing can continue even in case of result errors.
791 // At the moment depth composite stream processing relies mainly on static camera
792 // characteristics data. The actual result data can be used for the jpeg quality but
793 // in case it is absent we can default to maximum.
794 eraseResult(resultExtras.frameNumber);
795}
796
797bool DepthCompositeStream::onStreamBufferError(const CaptureResultExtras& resultExtras) {
798 bool ret = false;
799 // Buffer errors concerning internal composite streams should not be directly visible to
800 // camera clients. They must only receive a single buffer error with the public composite
801 // stream id.
802 if ((resultExtras.errorStreamId == mDepthStreamId) ||
803 (resultExtras.errorStreamId == mBlobStreamId)) {
804 flagAnErrorFrameNumber(resultExtras.frameNumber);
805 ret = true;
806 }
807
808 return ret;
809}
810
811status_t DepthCompositeStream::getMatchingDepthSize(size_t width, size_t height,
812 const std::vector<std::tuple<size_t, size_t>>& supporedDepthSizes,
813 size_t *depthWidth /*out*/, size_t *depthHeight /*out*/) {
814 if ((depthWidth == nullptr) || (depthHeight == nullptr)) {
815 return BAD_VALUE;
816 }
817
818 float arTol = CameraProviderManager::kDepthARTolerance;
819 *depthWidth = *depthHeight = 0;
820
821 float aspectRatio = static_cast<float> (width) / static_cast<float> (height);
822 for (const auto& it : supporedDepthSizes) {
823 auto currentWidth = std::get<0>(it);
824 auto currentHeight = std::get<1>(it);
825 if ((currentWidth == width) && (currentHeight == height)) {
826 *depthWidth = width;
827 *depthHeight = height;
828 break;
829 } else {
830 float currentRatio = static_cast<float> (currentWidth) /
831 static_cast<float> (currentHeight);
832 auto currentSize = currentWidth * currentHeight;
833 auto oldSize = (*depthWidth) * (*depthHeight);
834 if ((fabs(aspectRatio - currentRatio) <= arTol) && (currentSize > oldSize)) {
835 *depthWidth = currentWidth;
836 *depthHeight = currentHeight;
837 }
838 }
839 }
840
841 return ((*depthWidth > 0) && (*depthHeight > 0)) ? OK : BAD_VALUE;
842}
843
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800844void DepthCompositeStream::getSupportedDepthSizes(const CameraMetadata& ch, bool maxResolution,
Emilian Peev538c90e2018-12-17 18:03:19 +0000845 std::vector<std::tuple<size_t, size_t>>* depthSizes /*out*/) {
846 if (depthSizes == nullptr) {
847 return;
848 }
849
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800850 auto entry = ch.find(
851 camera3::SessionConfigurationUtils::getAppropriateModeTag(
852 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, maxResolution));
Emilian Peev538c90e2018-12-17 18:03:19 +0000853 if (entry.count > 0) {
854 // Depth stream dimensions have four int32_t components
855 // (pixelformat, width, height, type)
856 size_t entryCount = entry.count / 4;
857 depthSizes->reserve(entryCount);
858 for (size_t i = 0; i < entry.count; i += 4) {
859 if ((entry.data.i32[i] == kDepthMapPixelFormat) &&
860 (entry.data.i32[i+3] ==
861 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT)) {
862 depthSizes->push_back(std::make_tuple(entry.data.i32[i+1],
863 entry.data.i32[i+2]));
864 }
865 }
866 }
867}
868
869status_t DepthCompositeStream::getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
870 const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/) {
871 if (compositeOutput == nullptr) {
872 return BAD_VALUE;
873 }
874
875 std::vector<std::tuple<size_t, size_t>> depthSizes;
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800876 std::vector<std::tuple<size_t, size_t>> depthSizesMaximumResolution;
877 getSupportedDepthSizes(ch, /*maxResolution*/false, &depthSizes);
Emilian Peev538c90e2018-12-17 18:03:19 +0000878 if (depthSizes.empty()) {
879 ALOGE("%s: No depth stream configurations present", __FUNCTION__);
880 return BAD_VALUE;
881 }
882
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800883 if (SessionConfigurationUtils::isUltraHighResolutionSensor(ch)) {
884 getSupportedDepthSizes(ch, /*maxResolution*/true, &depthSizesMaximumResolution);
885 if (depthSizesMaximumResolution.empty()) {
886 ALOGE("%s: No depth stream configurations for maximum resolution present",
887 __FUNCTION__);
888 return BAD_VALUE;
889 }
890 }
891
892 size_t chosenDepthWidth = 0, chosenDepthHeight = 0;
893 auto ret = checkAndGetMatchingDepthSize(streamInfo.width, streamInfo.height, depthSizes,
894 depthSizesMaximumResolution, streamInfo.sensorPixelModesUsed, &chosenDepthWidth,
895 &chosenDepthHeight);
896
Emilian Peev538c90e2018-12-17 18:03:19 +0000897 if (ret != OK) {
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800898 ALOGE("%s: Couldn't get matching depth sizes", __FUNCTION__);
Emilian Peev538c90e2018-12-17 18:03:19 +0000899 return ret;
900 }
901
902 compositeOutput->clear();
903 compositeOutput->insert(compositeOutput->end(), 2, streamInfo);
904
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800905 // Sensor pixel modes should stay the same here. They're already overridden.
Emilian Peev538c90e2018-12-17 18:03:19 +0000906 // Jpeg/Blob stream info
907 (*compositeOutput)[0].dataSpace = kJpegDataSpace;
908 (*compositeOutput)[0].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
909
910 // Depth stream info
Jayant Chowdhary13f9b2f2020-12-02 22:46:15 -0800911 (*compositeOutput)[1].width = chosenDepthWidth;
912 (*compositeOutput)[1].height = chosenDepthHeight;
Emilian Peev538c90e2018-12-17 18:03:19 +0000913 (*compositeOutput)[1].format = kDepthMapPixelFormat;
914 (*compositeOutput)[1].dataSpace = kDepthMapDataSpace;
915 (*compositeOutput)[1].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
916
917 return NO_ERROR;
918}
919
Emilian Peev538c90e2018-12-17 18:03:19 +0000920}; // namespace camera3
921}; // namespace android