blob: 367a0c8ae5f7a58d82c5b97d58accee1320df634 [file] [log] [blame]
Emilian Peev434248e2022-10-06 14:58:54 -07001/*
2 * Copyright (C) 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "hardware/gralloc.h"
18#include "system/graphics-base-v1.0.h"
19#include "system/graphics-base-v1.1.h"
20#define LOG_TAG "Camera3-JpegRCompositeStream"
21#define ATRACE_TAG ATRACE_TAG_CAMERA
22//#define LOG_NDEBUG 0
23
24#include <aidl/android/hardware/camera/device/CameraBlob.h>
25#include <aidl/android/hardware/camera/device/CameraBlobId.h>
26
27#include "common/CameraProviderManager.h"
28#include <gui/Surface.h>
29#include <utils/ExifUtils.h>
30#include <utils/Log.h>
31#include "utils/SessionConfigurationUtils.h"
32#include <utils/Trace.h>
33
34#include "JpegRCompositeStream.h"
35
36namespace android {
37namespace camera3 {
38
39using aidl::android::hardware::camera::device::CameraBlob;
40using aidl::android::hardware::camera::device::CameraBlobId;
41
42JpegRCompositeStream::JpegRCompositeStream(sp<CameraDeviceBase> device,
43 wp<hardware::camera2::ICameraDeviceCallbacks> cb) :
44 CompositeStream(device, cb),
45 mBlobStreamId(-1),
46 mBlobSurfaceId(-1),
47 mP010StreamId(-1),
48 mP010SurfaceId(-1),
49 mBlobWidth(0),
50 mBlobHeight(0),
51 mP010BufferAcquired(false),
52 mBlobBufferAcquired(false),
53 mOutputColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
54 mProducerListener(new ProducerListener()),
55 mMaxJpegBufferSize(-1),
56 mUHRMaxJpegBufferSize(-1),
57 mStaticInfo(device->info()) {
58 auto entry = mStaticInfo.find(ANDROID_JPEG_MAX_SIZE);
59 if (entry.count > 0) {
60 mMaxJpegBufferSize = entry.data.i32[0];
61 } else {
62 ALOGW("%s: Maximum jpeg size absent from camera characteristics", __FUNCTION__);
63 }
64
65 mUHRMaxJpegSize =
66 SessionConfigurationUtils::getMaxJpegResolution(mStaticInfo,
67 /*ultraHighResolution*/true);
68 mDefaultMaxJpegSize =
69 SessionConfigurationUtils::getMaxJpegResolution(mStaticInfo,
70 /*isUltraHighResolution*/false);
71
72 mUHRMaxJpegBufferSize =
73 SessionConfigurationUtils::getUHRMaxJpegBufferSize(mUHRMaxJpegSize, mDefaultMaxJpegSize,
74 mMaxJpegBufferSize);
75}
76
77JpegRCompositeStream::~JpegRCompositeStream() {
78 mBlobConsumer.clear(),
79 mBlobSurface.clear(),
80 mBlobStreamId = -1;
81 mBlobSurfaceId = -1;
82 mP010Consumer.clear();
83 mP010Surface.clear();
84 mP010Consumer = nullptr;
85 mP010Surface = nullptr;
86}
87
88void JpegRCompositeStream::compilePendingInputLocked() {
89 CpuConsumer::LockedBuffer imgBuffer;
90
91 while (mSupportInternalJpeg && !mInputJpegBuffers.empty() && !mBlobBufferAcquired) {
92 auto it = mInputJpegBuffers.begin();
93 auto res = mBlobConsumer->lockNextBuffer(&imgBuffer);
94 if (res == NOT_ENOUGH_DATA) {
95 // Can not lock any more buffers.
96 break;
97 } else if (res != OK) {
98 ALOGE("%s: Error locking blob image buffer: %s (%d)", __FUNCTION__,
99 strerror(-res), res);
100 mPendingInputFrames[*it].error = true;
101 mInputJpegBuffers.erase(it);
102 continue;
103 }
104
105 if (*it != imgBuffer.timestamp) {
106 ALOGW("%s: Expecting jpeg buffer with time stamp: %" PRId64 " received buffer with "
107 "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
108 }
109
110 if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
111 (mPendingInputFrames[imgBuffer.timestamp].error)) {
112 mBlobConsumer->unlockBuffer(imgBuffer);
113 } else {
114 mPendingInputFrames[imgBuffer.timestamp].jpegBuffer = imgBuffer;
115 mBlobBufferAcquired = true;
116 }
117 mInputJpegBuffers.erase(it);
118 }
119
120 while (!mInputP010Buffers.empty() && !mP010BufferAcquired) {
121 auto it = mInputP010Buffers.begin();
122 auto res = mP010Consumer->lockNextBuffer(&imgBuffer);
123 if (res == NOT_ENOUGH_DATA) {
124 // Can not lock any more buffers.
125 break;
126 } else if (res != OK) {
127 ALOGE("%s: Error receiving P010 image buffer: %s (%d)", __FUNCTION__,
128 strerror(-res), res);
129 mPendingInputFrames[*it].error = true;
130 mInputP010Buffers.erase(it);
131 continue;
132 }
133
134 if (*it != imgBuffer.timestamp) {
135 ALOGW("%s: Expecting P010 buffer with time stamp: %" PRId64 " received buffer with "
136 "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
137 }
138
139 if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
140 (mPendingInputFrames[imgBuffer.timestamp].error)) {
141 mP010Consumer->unlockBuffer(imgBuffer);
142 } else {
143 mPendingInputFrames[imgBuffer.timestamp].p010Buffer = imgBuffer;
144 mP010BufferAcquired = true;
145 }
146 mInputP010Buffers.erase(it);
147 }
148
149 while (!mCaptureResults.empty()) {
150 auto it = mCaptureResults.begin();
151 // Negative timestamp indicates that something went wrong during the capture result
152 // collection process.
153 if (it->first >= 0) {
154 mPendingInputFrames[it->first].frameNumber = std::get<0>(it->second);
155 mPendingInputFrames[it->first].result = std::get<1>(it->second);
156 }
157 mCaptureResults.erase(it);
158 }
159
160 while (!mFrameNumberMap.empty()) {
161 auto it = mFrameNumberMap.begin();
162 mPendingInputFrames[it->second].frameNumber = it->first;
163 mFrameNumberMap.erase(it);
164 }
165
166 auto it = mErrorFrameNumbers.begin();
167 while (it != mErrorFrameNumbers.end()) {
168 bool frameFound = false;
169 for (auto &inputFrame : mPendingInputFrames) {
170 if (inputFrame.second.frameNumber == *it) {
171 inputFrame.second.error = true;
172 frameFound = true;
173 break;
174 }
175 }
176
177 if (frameFound) {
178 it = mErrorFrameNumbers.erase(it);
179 } else {
180 ALOGW("%s: Not able to find failing input with frame number: %" PRId64, __FUNCTION__,
181 *it);
182 it++;
183 }
184 }
185}
186
187bool JpegRCompositeStream::getNextReadyInputLocked(int64_t *currentTs /*inout*/) {
188 if (currentTs == nullptr) {
189 return false;
190 }
191
192 bool newInputAvailable = false;
193 for (const auto& it : mPendingInputFrames) {
194 if ((!it.second.error) && (it.second.p010Buffer.data != nullptr) &&
195 ((it.second.jpegBuffer.data != nullptr) || !mSupportInternalJpeg) &&
196 (it.first < *currentTs)) {
197 *currentTs = it.first;
198 newInputAvailable = true;
199 }
200 }
201
202 return newInputAvailable;
203}
204
205int64_t JpegRCompositeStream::getNextFailingInputLocked(int64_t *currentTs /*inout*/) {
206 int64_t ret = -1;
207 if (currentTs == nullptr) {
208 return ret;
209 }
210
211 for (const auto& it : mPendingInputFrames) {
212 if (it.second.error && !it.second.errorNotified && (it.first < *currentTs)) {
213 *currentTs = it.first;
214 ret = it.second.frameNumber;
215 }
216 }
217
218 return ret;
219}
220
221status_t JpegRCompositeStream::processInputFrame(nsecs_t ts, const InputFrame &inputFrame) {
222 status_t res;
223 sp<ANativeWindow> outputANW = mOutputSurface;
224 ANativeWindowBuffer *anb;
225 int fenceFd;
226 void *dstBuffer;
227
228 size_t maxJpegBufferSize = 0;
229 if (mMaxJpegBufferSize > 0) {
230 // If this is an ultra high resolution sensor and the input frames size
231 // is > default res jpeg.
232 if (mUHRMaxJpegSize.width != 0 &&
233 inputFrame.jpegBuffer.width * inputFrame.jpegBuffer.height >
234 mDefaultMaxJpegSize.width * mDefaultMaxJpegSize.height) {
235 maxJpegBufferSize = mUHRMaxJpegBufferSize;
236 } else {
237 maxJpegBufferSize = mMaxJpegBufferSize;
238 }
239 } else {
240 maxJpegBufferSize = inputFrame.p010Buffer.width * inputFrame.p010Buffer.height;
241 }
242
243 uint8_t jpegQuality = 100;
244 auto entry = inputFrame.result.find(ANDROID_JPEG_QUALITY);
245 if (entry.count > 0) {
246 jpegQuality = entry.data.u8[0];
247 }
248
249 uint8_t jpegOrientation = 0;
250 entry = inputFrame.result.find(ANDROID_JPEG_ORIENTATION);
251 if (entry.count > 0) {
252 jpegOrientation = entry.data.i32[0];
253 }
254
255 if ((res = native_window_set_buffers_dimensions(mOutputSurface.get(), maxJpegBufferSize, 1))
256 != OK) {
257 ALOGE("%s: Unable to configure stream buffer dimensions"
258 " %zux%u for stream %d", __FUNCTION__, maxJpegBufferSize, 1U, mP010StreamId);
259 return res;
260 }
261
262 res = outputANW->dequeueBuffer(mOutputSurface.get(), &anb, &fenceFd);
263 if (res != OK) {
264 ALOGE("%s: Error retrieving output buffer: %s (%d)", __FUNCTION__, strerror(-res),
265 res);
266 return res;
267 }
268
269 sp<GraphicBuffer> gb = GraphicBuffer::from(anb);
270 GraphicBufferLocker gbLocker(gb);
271 res = gbLocker.lockAsync(&dstBuffer, fenceFd);
272 if (res != OK) {
273 ALOGE("%s: Error trying to lock output buffer fence: %s (%d)", __FUNCTION__,
274 strerror(-res), res);
275 outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
276 return res;
277 }
278
279 if ((gb->getWidth() < maxJpegBufferSize) || (gb->getHeight() != 1)) {
280 ALOGE("%s: Blob buffer size mismatch, expected %zux%u received %dx%d", __FUNCTION__,
281 maxJpegBufferSize, 1, gb->getWidth(), gb->getHeight());
282 outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
283 return BAD_VALUE;
284 }
285
286 if (mOutputColorSpace == ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3) {
287 // Configure Jpeg/R for P3 output and possibly input in case of concurrent SDR Jpeg support
288 } else {
289 // Configure Jpeg/R for SRGB output
290 }
291
292 size_t actualJpegSize = 0;
293 if (mSupportInternalJpeg) {
294 actualJpegSize = android::camera2::JpegProcessor::findJpegSize(inputFrame.jpegBuffer.data,
295 inputFrame.jpegBuffer.width);
296 if (actualJpegSize == 0) {
297 ALOGW("%s: Failed to find input jpeg size, default to using entire buffer!",
298 __FUNCTION__);
299 actualJpegSize = inputFrame.jpegBuffer.width;
300 }
301 if (actualJpegSize <= maxJpegBufferSize) {
302 memcpy(dstBuffer, inputFrame.jpegBuffer.data, actualJpegSize);
303 }
304 } else {
305 const uint8_t* exifBuffer = nullptr;
306 size_t exifBufferSize = 0;
307 std::unique_ptr<ExifUtils> utils(ExifUtils::create());
308 utils->initializeEmpty();
309 utils->setFromMetadata(inputFrame.result, mStaticInfo, inputFrame.p010Buffer.width,
310 inputFrame.p010Buffer.height);
311 if (utils->generateApp1()) {
312 exifBuffer = utils->getApp1Buffer();
313 exifBufferSize = utils->getApp1Length();
314 } else {
315 ALOGE("%s: Unable to generate App1 buffer", __FUNCTION__);
316 }
317 }
318
319 //TODO: Process JpegR here and retrieve the final jpeg/r size
320
321 size_t finalJpegSize = actualJpegSize + sizeof(CameraBlob);
322 if (finalJpegSize > maxJpegBufferSize) {
323 ALOGE("%s: Final jpeg buffer not large enough for the jpeg blob header", __FUNCTION__);
324 outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
325 return NO_MEMORY;
326 }
327
328 res = native_window_set_buffers_timestamp(mOutputSurface.get(), ts);
329 if (res != OK) {
330 ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)", __FUNCTION__,
331 getStreamId(), strerror(-res), res);
332 return res;
333 }
334
335 ALOGV("%s: Final jpeg size: %zu", __func__, finalJpegSize);
336 uint8_t* header = static_cast<uint8_t *> (dstBuffer) +
337 (gb->getWidth() - sizeof(CameraBlob));
338 CameraBlob blobHeader = {
339 .blobId = CameraBlobId::JPEG,
340 .blobSizeBytes = static_cast<int32_t>(actualJpegSize)
341 };
342 memcpy(header, &blobHeader, sizeof(CameraBlob));
343 outputANW->queueBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
344
345 return res;
346}
347
348void JpegRCompositeStream::releaseInputFrameLocked(InputFrame *inputFrame /*out*/) {
349 if (inputFrame == nullptr) {
350 return;
351 }
352
353 if (inputFrame->p010Buffer.data != nullptr) {
354 mP010Consumer->unlockBuffer(inputFrame->p010Buffer);
355 inputFrame->p010Buffer.data = nullptr;
356 mP010BufferAcquired = false;
357 }
358
359 if (inputFrame->jpegBuffer.data != nullptr) {
360 mBlobConsumer->unlockBuffer(inputFrame->jpegBuffer);
361 inputFrame->jpegBuffer.data = nullptr;
362 mBlobBufferAcquired = false;
363 }
364
365 if ((inputFrame->error || mErrorState) && !inputFrame->errorNotified) {
366 //TODO: Figure out correct requestId
367 notifyError(inputFrame->frameNumber, -1 /*requestId*/);
368 inputFrame->errorNotified = true;
369 }
370}
371
372void JpegRCompositeStream::releaseInputFramesLocked(int64_t currentTs) {
373 auto it = mPendingInputFrames.begin();
374 while (it != mPendingInputFrames.end()) {
375 if (it->first <= currentTs) {
376 releaseInputFrameLocked(&it->second);
377 it = mPendingInputFrames.erase(it);
378 } else {
379 it++;
380 }
381 }
382}
383
384bool JpegRCompositeStream::threadLoop() {
385 int64_t currentTs = INT64_MAX;
386 bool newInputAvailable = false;
387
388 {
389 Mutex::Autolock l(mMutex);
390
391 if (mErrorState) {
392 // In case we landed in error state, return any pending buffers and
393 // halt all further processing.
394 compilePendingInputLocked();
395 releaseInputFramesLocked(currentTs);
396 return false;
397 }
398
399 while (!newInputAvailable) {
400 compilePendingInputLocked();
401 newInputAvailable = getNextReadyInputLocked(&currentTs);
402 if (!newInputAvailable) {
403 auto failingFrameNumber = getNextFailingInputLocked(&currentTs);
404 if (failingFrameNumber >= 0) {
405 // We cannot erase 'mPendingInputFrames[currentTs]' at this point because it is
406 // possible for two internal stream buffers to fail. In such scenario the
407 // composite stream should notify the client about a stream buffer error only
408 // once and this information is kept within 'errorNotified'.
409 // Any present failed input frames will be removed on a subsequent call to
410 // 'releaseInputFramesLocked()'.
411 releaseInputFrameLocked(&mPendingInputFrames[currentTs]);
412 currentTs = INT64_MAX;
413 }
414
415 auto ret = mInputReadyCondition.waitRelative(mMutex, kWaitDuration);
416 if (ret == TIMED_OUT) {
417 return true;
418 } else if (ret != OK) {
419 ALOGE("%s: Timed wait on condition failed: %s (%d)", __FUNCTION__,
420 strerror(-ret), ret);
421 return false;
422 }
423 }
424 }
425 }
426
427 auto res = processInputFrame(currentTs, mPendingInputFrames[currentTs]);
428 Mutex::Autolock l(mMutex);
429 if (res != OK) {
430 ALOGE("%s: Failed processing frame with timestamp: %" PRIu64 ": %s (%d)", __FUNCTION__,
431 currentTs, strerror(-res), res);
432 mPendingInputFrames[currentTs].error = true;
433 }
434
435 releaseInputFramesLocked(currentTs);
436
437 return true;
438}
439
440bool JpegRCompositeStream::isJpegRCompositeStream(const sp<Surface> &surface) {
441 if (CameraProviderManager::kFrameworkJpegRDisabled) {
442 return false;
443 }
444 ANativeWindow *anw = surface.get();
445 status_t err;
446 int format;
447 if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
448 ALOGE("%s: Failed to query Surface format: %s (%d)", __FUNCTION__, strerror(-err),
449 err);
450 return false;
451 }
452
453 int dataspace;
454 if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE, &dataspace)) != OK) {
455 ALOGE("%s: Failed to query Surface dataspace: %s (%d)", __FUNCTION__, strerror(-err),
456 err);
457 return false;
458 }
459
460 if ((format == HAL_PIXEL_FORMAT_BLOB) && (dataspace == static_cast<int>(kJpegRDataSpace))) {
461 return true;
462 }
463
464 return false;
465}
466
467void JpegRCompositeStream::deriveDynamicRangeAndDataspace(int64_t dynamicProfile,
468 int64_t* /*out*/dynamicRange, int64_t* /*out*/dataSpace) {
469 if ((dynamicRange == nullptr) || (dataSpace == nullptr)) {
470 return;
471 }
472
473 switch (dynamicProfile) {
474 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
475 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
476 *dynamicRange = dynamicProfile;
477 *dataSpace = HAL_DATASPACE_BT2020_ITU_PQ;
478 break;
479 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF:
480 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO:
481 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM:
482 case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO:
483 *dynamicRange = dynamicProfile;
484 *dataSpace = HAL_DATASPACE_BT2020_ITU_HLG;
485 break;
486 default:
487 *dynamicRange = kP010DefaultDynamicRange;
488 *dataSpace = kP010DefaultDataSpace;
489 }
490
491}
492
493status_t JpegRCompositeStream::createInternalStreams(const std::vector<sp<Surface>>& consumers,
494 bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
495 camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
496 const std::unordered_set<int32_t> &sensorPixelModesUsed,
497 std::vector<int> *surfaceIds,
498 int /*streamSetId*/, bool /*isShared*/, int32_t colorSpace,
499 int64_t dynamicProfile, int64_t streamUseCase) {
500 sp<CameraDeviceBase> device = mDevice.promote();
501 if (!device.get()) {
502 ALOGE("%s: Invalid camera device!", __FUNCTION__);
503 return NO_INIT;
504 }
505
506 deriveDynamicRangeAndDataspace(dynamicProfile, &mP010DynamicRange, &mP010DataSpace);
507 mSupportInternalJpeg = CameraProviderManager::isConcurrentDynamicRangeCaptureSupported(
508 mStaticInfo, mP010DynamicRange,
509 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD);
510
511 sp<IGraphicBufferProducer> producer;
512 sp<IGraphicBufferConsumer> consumer;
513 BufferQueue::createBufferQueue(&producer, &consumer);
514 mP010Consumer = new CpuConsumer(consumer, /*maxLockedBuffers*/1, /*controlledByApp*/ true);
515 mP010Consumer->setFrameAvailableListener(this);
516 mP010Consumer->setName(String8("Camera3-P010CompositeStream"));
517 mP010Surface = new Surface(producer);
518
519 auto ret = device->createStream(mP010Surface, width, height, kP010PixelFormat,
520 static_cast<android_dataspace>(mP010DataSpace), rotation,
521 id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
522 camera3::CAMERA3_STREAM_SET_ID_INVALID, false /*isShared*/, false /*isMultiResolution*/,
523 GRALLOC_USAGE_SW_READ_OFTEN,
524 mP010DynamicRange,
525 streamUseCase);
526 if (ret == OK) {
527 mP010StreamId = *id;
528 mP010SurfaceId = (*surfaceIds)[0];
529 mOutputSurface = consumers[0];
530 } else {
531 return ret;
532 }
533
534 if (mSupportInternalJpeg) {
535 BufferQueue::createBufferQueue(&producer, &consumer);
536 mBlobConsumer = new CpuConsumer(consumer, /*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
537 mBlobConsumer->setFrameAvailableListener(this);
538 mBlobConsumer->setName(String8("Camera3-JpegRCompositeStream"));
539 mBlobSurface = new Surface(producer);
540 std::vector<int> blobSurfaceId;
541 ret = device->createStream(mBlobSurface, width, height, format,
542 kJpegDataSpace, rotation, &mBlobStreamId, physicalCameraId, sensorPixelModesUsed,
543 &blobSurfaceId,
544 /*streamSetI*/ camera3::CAMERA3_STREAM_SET_ID_INVALID,
545 /*isShared*/ false,
546 /*isMultiResolution*/ false,
547 /*consumerUsage*/ GRALLOC_USAGE_SW_READ_OFTEN,
548 /*dynamicProfile*/ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
549 streamUseCase,
550 /*timestampBase*/ OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
551 /*mirrorMode*/ OutputConfiguration::MIRROR_MODE_AUTO,
552 /*colorSpace*/ colorSpace);
553 if (ret == OK) {
554 mBlobSurfaceId = blobSurfaceId[0];
555 } else {
556 return ret;
557 }
558
559 ret = registerCompositeStreamListener(mBlobStreamId);
560 if (ret != OK) {
561 ALOGE("%s: Failed to register jpeg stream listener!", __FUNCTION__);
562 return ret;
563 }
564 }
565
566 ret = registerCompositeStreamListener(getStreamId());
567 if (ret != OK) {
568 ALOGE("%s: Failed to register P010 stream listener!", __FUNCTION__);
569 return ret;
570 }
571
572 mOutputColorSpace = colorSpace;
573 mBlobWidth = width;
574 mBlobHeight = height;
575
576 return ret;
577}
578
579status_t JpegRCompositeStream::configureStream() {
580 if (isRunning()) {
581 // Processing thread is already running, nothing more to do.
582 return NO_ERROR;
583 }
584
585 if (mOutputSurface.get() == nullptr) {
586 ALOGE("%s: No valid output surface set!", __FUNCTION__);
587 return NO_INIT;
588 }
589
590 auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mProducerListener);
591 if (res != OK) {
592 ALOGE("%s: Unable to connect to native window for stream %d",
593 __FUNCTION__, mP010StreamId);
594 return res;
595 }
596
597 if ((res = native_window_set_buffers_format(mOutputSurface.get(), HAL_PIXEL_FORMAT_BLOB))
598 != OK) {
599 ALOGE("%s: Unable to configure stream buffer format for stream %d", __FUNCTION__,
600 mP010StreamId);
601 return res;
602 }
603
604 int maxProducerBuffers;
605 ANativeWindow *anw = mP010Surface.get();
606 if ((res = anw->query(anw, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxProducerBuffers)) != OK) {
607 ALOGE("%s: Unable to query consumer undequeued"
608 " buffer count for stream %d", __FUNCTION__, mP010StreamId);
609 return res;
610 }
611
612 ANativeWindow *anwConsumer = mOutputSurface.get();
613 int maxConsumerBuffers;
614 if ((res = anwConsumer->query(anwConsumer, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
615 &maxConsumerBuffers)) != OK) {
616 ALOGE("%s: Unable to query consumer undequeued"
617 " buffer count for stream %d", __FUNCTION__, mP010StreamId);
618 return res;
619 }
620
621 if ((res = native_window_set_buffer_count(
622 anwConsumer, maxProducerBuffers + maxConsumerBuffers)) != OK) {
623 ALOGE("%s: Unable to set buffer count for stream %d", __FUNCTION__, mP010StreamId);
624 return res;
625 }
626
627 run("JpegRCompositeStreamProc");
628
629 return NO_ERROR;
630}
631
632status_t JpegRCompositeStream::deleteInternalStreams() {
633 // The 'CameraDeviceClient' parent will delete the P010 stream
634 requestExit();
635
636 auto ret = join();
637 if (ret != OK) {
638 ALOGE("%s: Failed to join with the main processing thread: %s (%d)", __FUNCTION__,
639 strerror(-ret), ret);
640 }
641
642 if (mBlobStreamId >= 0) {
643 // Camera devices may not be valid after switching to offline mode.
644 // In this case, all offline streams including internal composite streams
645 // are managed and released by the offline session.
646 sp<CameraDeviceBase> device = mDevice.promote();
647 if (device.get() != nullptr) {
648 ret = device->deleteStream(mBlobStreamId);
649 }
650
651 mBlobStreamId = -1;
652 }
653
654 if (mOutputSurface != nullptr) {
655 mOutputSurface->disconnect(NATIVE_WINDOW_API_CAMERA);
656 mOutputSurface.clear();
657 }
658
659 return ret;
660}
661
662void JpegRCompositeStream::onFrameAvailable(const BufferItem& item) {
663 if (item.mDataSpace == kJpegDataSpace) {
664 ALOGV("%s: Jpeg buffer with ts: %" PRIu64 " ms. arrived!",
665 __func__, ns2ms(item.mTimestamp));
666
667 Mutex::Autolock l(mMutex);
668 if (!mErrorState) {
669 mInputJpegBuffers.push_back(item.mTimestamp);
670 mInputReadyCondition.signal();
671 }
672 } else if (item.mDataSpace == static_cast<android_dataspace_t>(mP010DataSpace)) {
673 ALOGV("%s: P010 buffer with ts: %" PRIu64 " ms. arrived!", __func__,
674 ns2ms(item.mTimestamp));
675
676 Mutex::Autolock l(mMutex);
677 if (!mErrorState) {
678 mInputP010Buffers.push_back(item.mTimestamp);
679 mInputReadyCondition.signal();
680 }
681 } else {
682 ALOGE("%s: Unexpected data space: 0x%x", __FUNCTION__, item.mDataSpace);
683 }
684}
685
686status_t JpegRCompositeStream::insertGbp(SurfaceMap* /*out*/outSurfaceMap,
687 Vector<int32_t> * /*out*/outputStreamIds, int32_t* /*out*/currentStreamId) {
688 if (outputStreamIds == nullptr) {
689 return BAD_VALUE;
690 }
691
692 if (outSurfaceMap->find(mP010StreamId) == outSurfaceMap->end()) {
693 outputStreamIds->push_back(mP010StreamId);
694 }
695 (*outSurfaceMap)[mP010StreamId].push_back(mP010SurfaceId);
696
697 if (mSupportInternalJpeg) {
698 if (outSurfaceMap->find(mBlobStreamId) == outSurfaceMap->end()) {
699 outputStreamIds->push_back(mBlobStreamId);
700 }
701 (*outSurfaceMap)[mBlobStreamId].push_back(mBlobSurfaceId);
702 }
703
704 if (currentStreamId != nullptr) {
705 *currentStreamId = mP010StreamId;
706 }
707
708 return NO_ERROR;
709}
710
711status_t JpegRCompositeStream::insertCompositeStreamIds(
712 std::vector<int32_t>* compositeStreamIds /*out*/) {
713 if (compositeStreamIds == nullptr) {
714 return BAD_VALUE;
715 }
716
717 compositeStreamIds->push_back(mP010StreamId);
718 if (mSupportInternalJpeg) {
719 compositeStreamIds->push_back(mBlobStreamId);
720 }
721
722 return OK;
723}
724
725void JpegRCompositeStream::onResultError(const CaptureResultExtras& resultExtras) {
726 // Processing can continue even in case of result errors.
727 // At the moment Jpeg/R composite stream processing relies mainly on static camera
728 // characteristics data. The actual result data can be used for the jpeg quality but
729 // in case it is absent we can default to maximum.
730 eraseResult(resultExtras.frameNumber);
731}
732
733bool JpegRCompositeStream::onStreamBufferError(const CaptureResultExtras& resultExtras) {
734 bool ret = false;
735 // Buffer errors concerning internal composite streams should not be directly visible to
736 // camera clients. They must only receive a single buffer error with the public composite
737 // stream id.
738 if ((resultExtras.errorStreamId == mP010StreamId) ||
739 (resultExtras.errorStreamId == mBlobStreamId)) {
740 flagAnErrorFrameNumber(resultExtras.frameNumber);
741 ret = true;
742 }
743
744 return ret;
745}
746
747status_t JpegRCompositeStream::getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
748 const CameraMetadata& staticInfo,
749 std::vector<OutputStreamInfo>* compositeOutput /*out*/) {
750 if (compositeOutput == nullptr) {
751 return BAD_VALUE;
752 }
753
754 int64_t dynamicRange, dataSpace;
755 deriveDynamicRangeAndDataspace(streamInfo.dynamicRangeProfile, &dynamicRange, &dataSpace);
756
757 compositeOutput->clear();
758 compositeOutput->push_back({});
759 (*compositeOutput)[0].width = streamInfo.width;
760 (*compositeOutput)[0].height = streamInfo.height;
761 (*compositeOutput)[0].format = kP010PixelFormat;
762 (*compositeOutput)[0].dataSpace = static_cast<android_dataspace_t>(dataSpace);
763 (*compositeOutput)[0].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
764 (*compositeOutput)[0].dynamicRangeProfile = dynamicRange;
765 (*compositeOutput)[0].colorSpace =
766 ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
767
768 if (CameraProviderManager::isConcurrentDynamicRangeCaptureSupported(staticInfo,
769 streamInfo.dynamicRangeProfile,
770 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD)) {
771 compositeOutput->push_back({});
772 (*compositeOutput)[1].width = streamInfo.width;
773 (*compositeOutput)[1].height = streamInfo.height;
774 (*compositeOutput)[1].format = HAL_PIXEL_FORMAT_BLOB;
775 (*compositeOutput)[1].dataSpace = kJpegDataSpace;
776 (*compositeOutput)[1].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
777 (*compositeOutput)[1].dynamicRangeProfile =
778 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
779 (*compositeOutput)[1].colorSpace = streamInfo.colorSpace;
780 }
781
782 return NO_ERROR;
783}
784
785}; // namespace camera3
786}; // namespace android