Emilian Peev | 434248e | 2022-10-06 14:58:54 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2022 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "hardware/gralloc.h" |
| 18 | #include "system/graphics-base-v1.0.h" |
| 19 | #include "system/graphics-base-v1.1.h" |
| 20 | #define LOG_TAG "Camera3-JpegRCompositeStream" |
| 21 | #define ATRACE_TAG ATRACE_TAG_CAMERA |
| 22 | //#define LOG_NDEBUG 0 |
| 23 | |
| 24 | #include <aidl/android/hardware/camera/device/CameraBlob.h> |
| 25 | #include <aidl/android/hardware/camera/device/CameraBlobId.h> |
| 26 | |
| 27 | #include "common/CameraProviderManager.h" |
| 28 | #include <gui/Surface.h> |
Dichen Zhang | 809ed08 | 2023-02-10 22:42:30 +0000 | [diff] [blame] | 29 | #include <jpegrecoverymap/jpegr.h> |
Emilian Peev | 434248e | 2022-10-06 14:58:54 -0700 | [diff] [blame] | 30 | #include <utils/ExifUtils.h> |
| 31 | #include <utils/Log.h> |
| 32 | #include "utils/SessionConfigurationUtils.h" |
| 33 | #include <utils/Trace.h> |
| 34 | |
| 35 | #include "JpegRCompositeStream.h" |
| 36 | |
| 37 | namespace android { |
| 38 | namespace camera3 { |
| 39 | |
| 40 | using aidl::android::hardware::camera::device::CameraBlob; |
| 41 | using aidl::android::hardware::camera::device::CameraBlobId; |
| 42 | |
| 43 | JpegRCompositeStream::JpegRCompositeStream(sp<CameraDeviceBase> device, |
| 44 | wp<hardware::camera2::ICameraDeviceCallbacks> cb) : |
| 45 | CompositeStream(device, cb), |
| 46 | mBlobStreamId(-1), |
| 47 | mBlobSurfaceId(-1), |
| 48 | mP010StreamId(-1), |
| 49 | mP010SurfaceId(-1), |
| 50 | mBlobWidth(0), |
| 51 | mBlobHeight(0), |
| 52 | mP010BufferAcquired(false), |
| 53 | mBlobBufferAcquired(false), |
| 54 | mOutputColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED), |
Emilian Peev | 567c31c | 2023-03-06 15:02:37 -0800 | [diff] [blame] | 55 | mOutputStreamUseCase(0), |
| 56 | mFirstRequestLatency(-1), |
Emilian Peev | 434248e | 2022-10-06 14:58:54 -0700 | [diff] [blame] | 57 | mProducerListener(new ProducerListener()), |
| 58 | mMaxJpegBufferSize(-1), |
| 59 | mUHRMaxJpegBufferSize(-1), |
| 60 | mStaticInfo(device->info()) { |
| 61 | auto entry = mStaticInfo.find(ANDROID_JPEG_MAX_SIZE); |
| 62 | if (entry.count > 0) { |
| 63 | mMaxJpegBufferSize = entry.data.i32[0]; |
| 64 | } else { |
| 65 | ALOGW("%s: Maximum jpeg size absent from camera characteristics", __FUNCTION__); |
| 66 | } |
| 67 | |
| 68 | mUHRMaxJpegSize = |
| 69 | SessionConfigurationUtils::getMaxJpegResolution(mStaticInfo, |
| 70 | /*ultraHighResolution*/true); |
| 71 | mDefaultMaxJpegSize = |
| 72 | SessionConfigurationUtils::getMaxJpegResolution(mStaticInfo, |
| 73 | /*isUltraHighResolution*/false); |
| 74 | |
| 75 | mUHRMaxJpegBufferSize = |
| 76 | SessionConfigurationUtils::getUHRMaxJpegBufferSize(mUHRMaxJpegSize, mDefaultMaxJpegSize, |
| 77 | mMaxJpegBufferSize); |
| 78 | } |
| 79 | |
| 80 | JpegRCompositeStream::~JpegRCompositeStream() { |
| 81 | mBlobConsumer.clear(), |
| 82 | mBlobSurface.clear(), |
| 83 | mBlobStreamId = -1; |
| 84 | mBlobSurfaceId = -1; |
| 85 | mP010Consumer.clear(); |
| 86 | mP010Surface.clear(); |
| 87 | mP010Consumer = nullptr; |
| 88 | mP010Surface = nullptr; |
| 89 | } |
| 90 | |
| 91 | void JpegRCompositeStream::compilePendingInputLocked() { |
| 92 | CpuConsumer::LockedBuffer imgBuffer; |
| 93 | |
| 94 | while (mSupportInternalJpeg && !mInputJpegBuffers.empty() && !mBlobBufferAcquired) { |
| 95 | auto it = mInputJpegBuffers.begin(); |
| 96 | auto res = mBlobConsumer->lockNextBuffer(&imgBuffer); |
| 97 | if (res == NOT_ENOUGH_DATA) { |
| 98 | // Can not lock any more buffers. |
| 99 | break; |
| 100 | } else if (res != OK) { |
| 101 | ALOGE("%s: Error locking blob image buffer: %s (%d)", __FUNCTION__, |
| 102 | strerror(-res), res); |
| 103 | mPendingInputFrames[*it].error = true; |
| 104 | mInputJpegBuffers.erase(it); |
| 105 | continue; |
| 106 | } |
| 107 | |
| 108 | if (*it != imgBuffer.timestamp) { |
| 109 | ALOGW("%s: Expecting jpeg buffer with time stamp: %" PRId64 " received buffer with " |
| 110 | "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp); |
| 111 | } |
| 112 | |
| 113 | if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) && |
| 114 | (mPendingInputFrames[imgBuffer.timestamp].error)) { |
| 115 | mBlobConsumer->unlockBuffer(imgBuffer); |
| 116 | } else { |
| 117 | mPendingInputFrames[imgBuffer.timestamp].jpegBuffer = imgBuffer; |
| 118 | mBlobBufferAcquired = true; |
| 119 | } |
| 120 | mInputJpegBuffers.erase(it); |
| 121 | } |
| 122 | |
| 123 | while (!mInputP010Buffers.empty() && !mP010BufferAcquired) { |
| 124 | auto it = mInputP010Buffers.begin(); |
| 125 | auto res = mP010Consumer->lockNextBuffer(&imgBuffer); |
| 126 | if (res == NOT_ENOUGH_DATA) { |
| 127 | // Can not lock any more buffers. |
| 128 | break; |
| 129 | } else if (res != OK) { |
| 130 | ALOGE("%s: Error receiving P010 image buffer: %s (%d)", __FUNCTION__, |
| 131 | strerror(-res), res); |
| 132 | mPendingInputFrames[*it].error = true; |
| 133 | mInputP010Buffers.erase(it); |
| 134 | continue; |
| 135 | } |
| 136 | |
| 137 | if (*it != imgBuffer.timestamp) { |
| 138 | ALOGW("%s: Expecting P010 buffer with time stamp: %" PRId64 " received buffer with " |
| 139 | "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp); |
| 140 | } |
| 141 | |
| 142 | if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) && |
| 143 | (mPendingInputFrames[imgBuffer.timestamp].error)) { |
| 144 | mP010Consumer->unlockBuffer(imgBuffer); |
| 145 | } else { |
| 146 | mPendingInputFrames[imgBuffer.timestamp].p010Buffer = imgBuffer; |
| 147 | mP010BufferAcquired = true; |
| 148 | } |
| 149 | mInputP010Buffers.erase(it); |
| 150 | } |
| 151 | |
| 152 | while (!mCaptureResults.empty()) { |
| 153 | auto it = mCaptureResults.begin(); |
| 154 | // Negative timestamp indicates that something went wrong during the capture result |
| 155 | // collection process. |
| 156 | if (it->first >= 0) { |
Emilian Peev | 567c31c | 2023-03-06 15:02:37 -0800 | [diff] [blame] | 157 | auto frameNumber = std::get<0>(it->second); |
| 158 | mPendingInputFrames[it->first].frameNumber = frameNumber; |
Emilian Peev | 434248e | 2022-10-06 14:58:54 -0700 | [diff] [blame] | 159 | mPendingInputFrames[it->first].result = std::get<1>(it->second); |
Emilian Peev | 567c31c | 2023-03-06 15:02:37 -0800 | [diff] [blame] | 160 | mSessionStatsBuilder.incResultCounter(false /*dropped*/); |
Emilian Peev | 434248e | 2022-10-06 14:58:54 -0700 | [diff] [blame] | 161 | } |
| 162 | mCaptureResults.erase(it); |
| 163 | } |
| 164 | |
| 165 | while (!mFrameNumberMap.empty()) { |
| 166 | auto it = mFrameNumberMap.begin(); |
Emilian Peev | 567c31c | 2023-03-06 15:02:37 -0800 | [diff] [blame] | 167 | auto frameNumber = it->first; |
| 168 | mPendingInputFrames[it->second].frameNumber = frameNumber; |
| 169 | auto requestTimeIt = mRequestTimeMap.find(frameNumber); |
| 170 | if (requestTimeIt != mRequestTimeMap.end()) { |
| 171 | mPendingInputFrames[it->second].requestTimeNs = requestTimeIt->second; |
| 172 | mRequestTimeMap.erase(requestTimeIt); |
| 173 | } |
Emilian Peev | 434248e | 2022-10-06 14:58:54 -0700 | [diff] [blame] | 174 | mFrameNumberMap.erase(it); |
| 175 | } |
| 176 | |
| 177 | auto it = mErrorFrameNumbers.begin(); |
| 178 | while (it != mErrorFrameNumbers.end()) { |
| 179 | bool frameFound = false; |
| 180 | for (auto &inputFrame : mPendingInputFrames) { |
| 181 | if (inputFrame.second.frameNumber == *it) { |
| 182 | inputFrame.second.error = true; |
| 183 | frameFound = true; |
| 184 | break; |
| 185 | } |
| 186 | } |
| 187 | |
| 188 | if (frameFound) { |
Emilian Peev | 567c31c | 2023-03-06 15:02:37 -0800 | [diff] [blame] | 189 | mSessionStatsBuilder.incCounter(mP010StreamId, true /*dropped*/, |
| 190 | 0 /*captureLatencyMs*/); |
Emilian Peev | 434248e | 2022-10-06 14:58:54 -0700 | [diff] [blame] | 191 | it = mErrorFrameNumbers.erase(it); |
| 192 | } else { |
| 193 | ALOGW("%s: Not able to find failing input with frame number: %" PRId64, __FUNCTION__, |
| 194 | *it); |
| 195 | it++; |
| 196 | } |
| 197 | } |
| 198 | } |
| 199 | |
| 200 | bool JpegRCompositeStream::getNextReadyInputLocked(int64_t *currentTs /*inout*/) { |
| 201 | if (currentTs == nullptr) { |
| 202 | return false; |
| 203 | } |
| 204 | |
| 205 | bool newInputAvailable = false; |
| 206 | for (const auto& it : mPendingInputFrames) { |
| 207 | if ((!it.second.error) && (it.second.p010Buffer.data != nullptr) && |
Emilian Peev | 567c31c | 2023-03-06 15:02:37 -0800 | [diff] [blame] | 208 | (it.second.requestTimeNs != -1) && |
Emilian Peev | 434248e | 2022-10-06 14:58:54 -0700 | [diff] [blame] | 209 | ((it.second.jpegBuffer.data != nullptr) || !mSupportInternalJpeg) && |
| 210 | (it.first < *currentTs)) { |
| 211 | *currentTs = it.first; |
| 212 | newInputAvailable = true; |
| 213 | } |
| 214 | } |
| 215 | |
| 216 | return newInputAvailable; |
| 217 | } |
| 218 | |
| 219 | int64_t JpegRCompositeStream::getNextFailingInputLocked(int64_t *currentTs /*inout*/) { |
| 220 | int64_t ret = -1; |
| 221 | if (currentTs == nullptr) { |
| 222 | return ret; |
| 223 | } |
| 224 | |
| 225 | for (const auto& it : mPendingInputFrames) { |
| 226 | if (it.second.error && !it.second.errorNotified && (it.first < *currentTs)) { |
| 227 | *currentTs = it.first; |
| 228 | ret = it.second.frameNumber; |
| 229 | } |
| 230 | } |
| 231 | |
| 232 | return ret; |
| 233 | } |
| 234 | |
| 235 | status_t JpegRCompositeStream::processInputFrame(nsecs_t ts, const InputFrame &inputFrame) { |
| 236 | status_t res; |
| 237 | sp<ANativeWindow> outputANW = mOutputSurface; |
| 238 | ANativeWindowBuffer *anb; |
| 239 | int fenceFd; |
| 240 | void *dstBuffer; |
| 241 | |
Dichen Zhang | 8a533b7 | 2022-11-15 23:03:02 +0000 | [diff] [blame] | 242 | size_t maxJpegRBufferSize = 0; |
Emilian Peev | 434248e | 2022-10-06 14:58:54 -0700 | [diff] [blame] | 243 | if (mMaxJpegBufferSize > 0) { |
| 244 | // If this is an ultra high resolution sensor and the input frames size |
| 245 | // is > default res jpeg. |
| 246 | if (mUHRMaxJpegSize.width != 0 && |
| 247 | inputFrame.jpegBuffer.width * inputFrame.jpegBuffer.height > |
| 248 | mDefaultMaxJpegSize.width * mDefaultMaxJpegSize.height) { |
Dichen Zhang | 8a533b7 | 2022-11-15 23:03:02 +0000 | [diff] [blame] | 249 | maxJpegRBufferSize = mUHRMaxJpegBufferSize; |
Emilian Peev | 434248e | 2022-10-06 14:58:54 -0700 | [diff] [blame] | 250 | } else { |
Dichen Zhang | 8a533b7 | 2022-11-15 23:03:02 +0000 | [diff] [blame] | 251 | maxJpegRBufferSize = mMaxJpegBufferSize; |
Emilian Peev | 434248e | 2022-10-06 14:58:54 -0700 | [diff] [blame] | 252 | } |
| 253 | } else { |
Dichen Zhang | 8a533b7 | 2022-11-15 23:03:02 +0000 | [diff] [blame] | 254 | maxJpegRBufferSize = inputFrame.p010Buffer.width * inputFrame.p010Buffer.height; |
Emilian Peev | 434248e | 2022-10-06 14:58:54 -0700 | [diff] [blame] | 255 | } |
| 256 | |
| 257 | uint8_t jpegQuality = 100; |
| 258 | auto entry = inputFrame.result.find(ANDROID_JPEG_QUALITY); |
| 259 | if (entry.count > 0) { |
| 260 | jpegQuality = entry.data.u8[0]; |
| 261 | } |
| 262 | |
Dichen Zhang | 8a533b7 | 2022-11-15 23:03:02 +0000 | [diff] [blame] | 263 | if ((res = native_window_set_buffers_dimensions(mOutputSurface.get(), maxJpegRBufferSize, 1)) |
Emilian Peev | 434248e | 2022-10-06 14:58:54 -0700 | [diff] [blame] | 264 | != OK) { |
| 265 | ALOGE("%s: Unable to configure stream buffer dimensions" |
Dichen Zhang | 8a533b7 | 2022-11-15 23:03:02 +0000 | [diff] [blame] | 266 | " %zux%u for stream %d", __FUNCTION__, maxJpegRBufferSize, 1U, mP010StreamId); |
Emilian Peev | 434248e | 2022-10-06 14:58:54 -0700 | [diff] [blame] | 267 | return res; |
| 268 | } |
| 269 | |
| 270 | res = outputANW->dequeueBuffer(mOutputSurface.get(), &anb, &fenceFd); |
| 271 | if (res != OK) { |
| 272 | ALOGE("%s: Error retrieving output buffer: %s (%d)", __FUNCTION__, strerror(-res), |
| 273 | res); |
| 274 | return res; |
| 275 | } |
| 276 | |
| 277 | sp<GraphicBuffer> gb = GraphicBuffer::from(anb); |
| 278 | GraphicBufferLocker gbLocker(gb); |
| 279 | res = gbLocker.lockAsync(&dstBuffer, fenceFd); |
| 280 | if (res != OK) { |
| 281 | ALOGE("%s: Error trying to lock output buffer fence: %s (%d)", __FUNCTION__, |
| 282 | strerror(-res), res); |
| 283 | outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1); |
| 284 | return res; |
| 285 | } |
| 286 | |
Dichen Zhang | 8a533b7 | 2022-11-15 23:03:02 +0000 | [diff] [blame] | 287 | if ((gb->getWidth() < maxJpegRBufferSize) || (gb->getHeight() != 1)) { |
Emilian Peev | 434248e | 2022-10-06 14:58:54 -0700 | [diff] [blame] | 288 | ALOGE("%s: Blob buffer size mismatch, expected %zux%u received %dx%d", __FUNCTION__, |
Dichen Zhang | 8a533b7 | 2022-11-15 23:03:02 +0000 | [diff] [blame] | 289 | maxJpegRBufferSize, 1, gb->getWidth(), gb->getHeight()); |
Emilian Peev | 434248e | 2022-10-06 14:58:54 -0700 | [diff] [blame] | 290 | outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1); |
| 291 | return BAD_VALUE; |
| 292 | } |
| 293 | |
Dichen Zhang | 8a533b7 | 2022-11-15 23:03:02 +0000 | [diff] [blame] | 294 | size_t actualJpegRSize = 0; |
Dichen Zhang | 06edce0 | 2023-02-10 23:07:15 +0000 | [diff] [blame] | 295 | jpegrecoverymap::jpegr_uncompressed_struct p010; |
| 296 | jpegrecoverymap::jpegr_compressed_struct jpegR; |
| 297 | jpegrecoverymap::JpegR jpegREncoder; |
Dichen Zhang | 8a533b7 | 2022-11-15 23:03:02 +0000 | [diff] [blame] | 298 | |
Dichen Zhang | d317647 | 2023-01-05 19:39:23 +0000 | [diff] [blame] | 299 | p010.height = inputFrame.p010Buffer.height; |
| 300 | p010.width = inputFrame.p010Buffer.width; |
Dichen Zhang | 06edce0 | 2023-02-10 23:07:15 +0000 | [diff] [blame] | 301 | p010.colorGamut = jpegrecoverymap::jpegr_color_gamut::JPEGR_COLORGAMUT_BT2100; |
Dichen Zhang | 9bbeca9 | 2023-04-05 12:24:28 -0700 | [diff] [blame] | 302 | p010.data = inputFrame.p010Buffer.data; |
| 303 | p010.chroma_data = inputFrame.p010Buffer.dataCb; |
Emilian Peev | 24523ca | 2023-04-06 11:14:46 -0700 | [diff] [blame^] | 304 | // Strides are expected to be in pixels not bytes |
| 305 | p010.luma_stride = inputFrame.p010Buffer.stride / 2; |
| 306 | p010.chroma_stride = inputFrame.p010Buffer.chromaStride / 2; |
Dichen Zhang | d317647 | 2023-01-05 19:39:23 +0000 | [diff] [blame] | 307 | |
| 308 | jpegR.data = dstBuffer; |
| 309 | jpegR.maxLength = maxJpegRBufferSize; |
| 310 | |
Dichen Zhang | 06edce0 | 2023-02-10 23:07:15 +0000 | [diff] [blame] | 311 | jpegrecoverymap::jpegr_transfer_function transferFunction; |
Dichen Zhang | d317647 | 2023-01-05 19:39:23 +0000 | [diff] [blame] | 312 | switch (mP010DynamicRange) { |
| 313 | case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10: |
| 314 | case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS: |
Dichen Zhang | 06edce0 | 2023-02-10 23:07:15 +0000 | [diff] [blame] | 315 | transferFunction = jpegrecoverymap::jpegr_transfer_function::JPEGR_TF_PQ; |
Dichen Zhang | d317647 | 2023-01-05 19:39:23 +0000 | [diff] [blame] | 316 | break; |
| 317 | default: |
Dichen Zhang | 06edce0 | 2023-02-10 23:07:15 +0000 | [diff] [blame] | 318 | transferFunction = jpegrecoverymap::jpegr_transfer_function::JPEGR_TF_HLG; |
Dichen Zhang | d317647 | 2023-01-05 19:39:23 +0000 | [diff] [blame] | 319 | } |
| 320 | |
| 321 | if (mSupportInternalJpeg) { |
Dichen Zhang | 06edce0 | 2023-02-10 23:07:15 +0000 | [diff] [blame] | 322 | jpegrecoverymap::jpegr_compressed_struct jpeg; |
Dichen Zhang | 8a533b7 | 2022-11-15 23:03:02 +0000 | [diff] [blame] | 323 | |
| 324 | jpeg.data = inputFrame.jpegBuffer.data; |
| 325 | jpeg.length = android::camera2::JpegProcessor::findJpegSize(inputFrame.jpegBuffer.data, |
Emilian Peev | 434248e | 2022-10-06 14:58:54 -0700 | [diff] [blame] | 326 | inputFrame.jpegBuffer.width); |
Dichen Zhang | 8a533b7 | 2022-11-15 23:03:02 +0000 | [diff] [blame] | 327 | if (jpeg.length == 0) { |
Emilian Peev | 434248e | 2022-10-06 14:58:54 -0700 | [diff] [blame] | 328 | ALOGW("%s: Failed to find input jpeg size, default to using entire buffer!", |
| 329 | __FUNCTION__); |
Dichen Zhang | 8a533b7 | 2022-11-15 23:03:02 +0000 | [diff] [blame] | 330 | jpeg.length = inputFrame.jpegBuffer.width; |
Emilian Peev | 434248e | 2022-10-06 14:58:54 -0700 | [diff] [blame] | 331 | } |
Dichen Zhang | 8a533b7 | 2022-11-15 23:03:02 +0000 | [diff] [blame] | 332 | |
| 333 | if (mOutputColorSpace == ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3) { |
Dichen Zhang | 06edce0 | 2023-02-10 23:07:15 +0000 | [diff] [blame] | 334 | jpeg.colorGamut = jpegrecoverymap::jpegr_color_gamut::JPEGR_COLORGAMUT_P3; |
Dichen Zhang | 8a533b7 | 2022-11-15 23:03:02 +0000 | [diff] [blame] | 335 | } else { |
Dichen Zhang | 06edce0 | 2023-02-10 23:07:15 +0000 | [diff] [blame] | 336 | jpeg.colorGamut = jpegrecoverymap::jpegr_color_gamut::JPEGR_COLORGAMUT_BT709; |
Emilian Peev | 434248e | 2022-10-06 14:58:54 -0700 | [diff] [blame] | 337 | } |
Dichen Zhang | 8a533b7 | 2022-11-15 23:03:02 +0000 | [diff] [blame] | 338 | |
Dichen Zhang | 809ed08 | 2023-02-10 22:42:30 +0000 | [diff] [blame] | 339 | res = jpegREncoder.encodeJPEGR(&p010, &jpeg, transferFunction, &jpegR); |
Emilian Peev | 434248e | 2022-10-06 14:58:54 -0700 | [diff] [blame] | 340 | } else { |
| 341 | const uint8_t* exifBuffer = nullptr; |
| 342 | size_t exifBufferSize = 0; |
| 343 | std::unique_ptr<ExifUtils> utils(ExifUtils::create()); |
| 344 | utils->initializeEmpty(); |
| 345 | utils->setFromMetadata(inputFrame.result, mStaticInfo, inputFrame.p010Buffer.width, |
| 346 | inputFrame.p010Buffer.height); |
| 347 | if (utils->generateApp1()) { |
| 348 | exifBuffer = utils->getApp1Buffer(); |
| 349 | exifBufferSize = utils->getApp1Length(); |
| 350 | } else { |
| 351 | ALOGE("%s: Unable to generate App1 buffer", __FUNCTION__); |
| 352 | } |
Dichen Zhang | d317647 | 2023-01-05 19:39:23 +0000 | [diff] [blame] | 353 | |
Dichen Zhang | 06edce0 | 2023-02-10 23:07:15 +0000 | [diff] [blame] | 354 | jpegrecoverymap::jpegr_exif_struct exif; |
Dichen Zhang | d317647 | 2023-01-05 19:39:23 +0000 | [diff] [blame] | 355 | exif.data = reinterpret_cast<void*>(const_cast<uint8_t*>(exifBuffer)); |
| 356 | exif.length = exifBufferSize; |
| 357 | |
Dichen Zhang | 809ed08 | 2023-02-10 22:42:30 +0000 | [diff] [blame] | 358 | res = jpegREncoder.encodeJPEGR(&p010, transferFunction, &jpegR, jpegQuality, &exif); |
Emilian Peev | 434248e | 2022-10-06 14:58:54 -0700 | [diff] [blame] | 359 | } |
| 360 | |
Dichen Zhang | d317647 | 2023-01-05 19:39:23 +0000 | [diff] [blame] | 361 | if (res != OK) { |
| 362 | ALOGE("%s: Error trying to encode JPEG/R: %s (%d)", __FUNCTION__, strerror(-res), res); |
| 363 | return res; |
| 364 | } |
| 365 | |
| 366 | actualJpegRSize = jpegR.length; |
Dichen Zhang | d317647 | 2023-01-05 19:39:23 +0000 | [diff] [blame] | 367 | |
Dichen Zhang | 8a533b7 | 2022-11-15 23:03:02 +0000 | [diff] [blame] | 368 | size_t finalJpegRSize = actualJpegRSize + sizeof(CameraBlob); |
| 369 | if (finalJpegRSize > maxJpegRBufferSize) { |
Emilian Peev | 434248e | 2022-10-06 14:58:54 -0700 | [diff] [blame] | 370 | ALOGE("%s: Final jpeg buffer not large enough for the jpeg blob header", __FUNCTION__); |
| 371 | outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1); |
| 372 | return NO_MEMORY; |
| 373 | } |
| 374 | |
| 375 | res = native_window_set_buffers_timestamp(mOutputSurface.get(), ts); |
| 376 | if (res != OK) { |
| 377 | ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)", __FUNCTION__, |
| 378 | getStreamId(), strerror(-res), res); |
| 379 | return res; |
| 380 | } |
| 381 | |
Dichen Zhang | 8a533b7 | 2022-11-15 23:03:02 +0000 | [diff] [blame] | 382 | ALOGV("%s: Final jpeg size: %zu", __func__, finalJpegRSize); |
Emilian Peev | 434248e | 2022-10-06 14:58:54 -0700 | [diff] [blame] | 383 | uint8_t* header = static_cast<uint8_t *> (dstBuffer) + |
| 384 | (gb->getWidth() - sizeof(CameraBlob)); |
| 385 | CameraBlob blobHeader = { |
| 386 | .blobId = CameraBlobId::JPEG, |
Dichen Zhang | 8a533b7 | 2022-11-15 23:03:02 +0000 | [diff] [blame] | 387 | .blobSizeBytes = static_cast<int32_t>(actualJpegRSize) |
Emilian Peev | 434248e | 2022-10-06 14:58:54 -0700 | [diff] [blame] | 388 | }; |
| 389 | memcpy(header, &blobHeader, sizeof(CameraBlob)); |
Emilian Peev | 567c31c | 2023-03-06 15:02:37 -0800 | [diff] [blame] | 390 | |
| 391 | if (inputFrame.requestTimeNs != -1) { |
| 392 | auto captureLatency = ns2ms(systemTime() - inputFrame.requestTimeNs); |
| 393 | mSessionStatsBuilder.incCounter(mP010StreamId, false /*dropped*/, captureLatency); |
| 394 | if (mFirstRequestLatency == -1) { |
| 395 | mFirstRequestLatency = captureLatency; |
| 396 | } |
| 397 | } |
Emilian Peev | 434248e | 2022-10-06 14:58:54 -0700 | [diff] [blame] | 398 | outputANW->queueBuffer(mOutputSurface.get(), anb, /*fence*/ -1); |
| 399 | |
| 400 | return res; |
| 401 | } |
| 402 | |
| 403 | void JpegRCompositeStream::releaseInputFrameLocked(InputFrame *inputFrame /*out*/) { |
| 404 | if (inputFrame == nullptr) { |
| 405 | return; |
| 406 | } |
| 407 | |
| 408 | if (inputFrame->p010Buffer.data != nullptr) { |
| 409 | mP010Consumer->unlockBuffer(inputFrame->p010Buffer); |
| 410 | inputFrame->p010Buffer.data = nullptr; |
| 411 | mP010BufferAcquired = false; |
| 412 | } |
| 413 | |
| 414 | if (inputFrame->jpegBuffer.data != nullptr) { |
| 415 | mBlobConsumer->unlockBuffer(inputFrame->jpegBuffer); |
| 416 | inputFrame->jpegBuffer.data = nullptr; |
| 417 | mBlobBufferAcquired = false; |
| 418 | } |
| 419 | |
| 420 | if ((inputFrame->error || mErrorState) && !inputFrame->errorNotified) { |
| 421 | //TODO: Figure out correct requestId |
| 422 | notifyError(inputFrame->frameNumber, -1 /*requestId*/); |
| 423 | inputFrame->errorNotified = true; |
Emilian Peev | 567c31c | 2023-03-06 15:02:37 -0800 | [diff] [blame] | 424 | mSessionStatsBuilder.incCounter(mP010StreamId, true /*dropped*/, 0 /*captureLatencyMs*/); |
Emilian Peev | 434248e | 2022-10-06 14:58:54 -0700 | [diff] [blame] | 425 | } |
| 426 | } |
| 427 | |
| 428 | void JpegRCompositeStream::releaseInputFramesLocked(int64_t currentTs) { |
| 429 | auto it = mPendingInputFrames.begin(); |
| 430 | while (it != mPendingInputFrames.end()) { |
| 431 | if (it->first <= currentTs) { |
| 432 | releaseInputFrameLocked(&it->second); |
| 433 | it = mPendingInputFrames.erase(it); |
| 434 | } else { |
| 435 | it++; |
| 436 | } |
| 437 | } |
| 438 | } |
| 439 | |
| 440 | bool JpegRCompositeStream::threadLoop() { |
| 441 | int64_t currentTs = INT64_MAX; |
| 442 | bool newInputAvailable = false; |
| 443 | |
| 444 | { |
| 445 | Mutex::Autolock l(mMutex); |
| 446 | |
| 447 | if (mErrorState) { |
| 448 | // In case we landed in error state, return any pending buffers and |
| 449 | // halt all further processing. |
| 450 | compilePendingInputLocked(); |
| 451 | releaseInputFramesLocked(currentTs); |
| 452 | return false; |
| 453 | } |
| 454 | |
| 455 | while (!newInputAvailable) { |
| 456 | compilePendingInputLocked(); |
| 457 | newInputAvailable = getNextReadyInputLocked(¤tTs); |
| 458 | if (!newInputAvailable) { |
| 459 | auto failingFrameNumber = getNextFailingInputLocked(¤tTs); |
| 460 | if (failingFrameNumber >= 0) { |
| 461 | // We cannot erase 'mPendingInputFrames[currentTs]' at this point because it is |
| 462 | // possible for two internal stream buffers to fail. In such scenario the |
| 463 | // composite stream should notify the client about a stream buffer error only |
| 464 | // once and this information is kept within 'errorNotified'. |
| 465 | // Any present failed input frames will be removed on a subsequent call to |
| 466 | // 'releaseInputFramesLocked()'. |
| 467 | releaseInputFrameLocked(&mPendingInputFrames[currentTs]); |
| 468 | currentTs = INT64_MAX; |
| 469 | } |
| 470 | |
| 471 | auto ret = mInputReadyCondition.waitRelative(mMutex, kWaitDuration); |
| 472 | if (ret == TIMED_OUT) { |
| 473 | return true; |
| 474 | } else if (ret != OK) { |
| 475 | ALOGE("%s: Timed wait on condition failed: %s (%d)", __FUNCTION__, |
| 476 | strerror(-ret), ret); |
| 477 | return false; |
| 478 | } |
| 479 | } |
| 480 | } |
| 481 | } |
| 482 | |
| 483 | auto res = processInputFrame(currentTs, mPendingInputFrames[currentTs]); |
| 484 | Mutex::Autolock l(mMutex); |
| 485 | if (res != OK) { |
| 486 | ALOGE("%s: Failed processing frame with timestamp: %" PRIu64 ": %s (%d)", __FUNCTION__, |
| 487 | currentTs, strerror(-res), res); |
| 488 | mPendingInputFrames[currentTs].error = true; |
| 489 | } |
| 490 | |
| 491 | releaseInputFramesLocked(currentTs); |
| 492 | |
| 493 | return true; |
| 494 | } |
| 495 | |
| 496 | bool JpegRCompositeStream::isJpegRCompositeStream(const sp<Surface> &surface) { |
| 497 | if (CameraProviderManager::kFrameworkJpegRDisabled) { |
| 498 | return false; |
| 499 | } |
| 500 | ANativeWindow *anw = surface.get(); |
| 501 | status_t err; |
| 502 | int format; |
| 503 | if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) { |
| 504 | ALOGE("%s: Failed to query Surface format: %s (%d)", __FUNCTION__, strerror(-err), |
| 505 | err); |
| 506 | return false; |
| 507 | } |
| 508 | |
| 509 | int dataspace; |
| 510 | if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE, &dataspace)) != OK) { |
| 511 | ALOGE("%s: Failed to query Surface dataspace: %s (%d)", __FUNCTION__, strerror(-err), |
| 512 | err); |
| 513 | return false; |
| 514 | } |
| 515 | |
| 516 | if ((format == HAL_PIXEL_FORMAT_BLOB) && (dataspace == static_cast<int>(kJpegRDataSpace))) { |
| 517 | return true; |
| 518 | } |
| 519 | |
| 520 | return false; |
| 521 | } |
| 522 | |
| 523 | void JpegRCompositeStream::deriveDynamicRangeAndDataspace(int64_t dynamicProfile, |
| 524 | int64_t* /*out*/dynamicRange, int64_t* /*out*/dataSpace) { |
| 525 | if ((dynamicRange == nullptr) || (dataSpace == nullptr)) { |
| 526 | return; |
| 527 | } |
| 528 | |
| 529 | switch (dynamicProfile) { |
| 530 | case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10: |
| 531 | case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS: |
| 532 | *dynamicRange = dynamicProfile; |
| 533 | *dataSpace = HAL_DATASPACE_BT2020_ITU_PQ; |
| 534 | break; |
| 535 | case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF: |
| 536 | case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO: |
| 537 | case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM: |
| 538 | case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO: |
| 539 | *dynamicRange = dynamicProfile; |
| 540 | *dataSpace = HAL_DATASPACE_BT2020_ITU_HLG; |
| 541 | break; |
| 542 | default: |
| 543 | *dynamicRange = kP010DefaultDynamicRange; |
| 544 | *dataSpace = kP010DefaultDataSpace; |
| 545 | } |
| 546 | |
| 547 | } |
| 548 | |
| 549 | status_t JpegRCompositeStream::createInternalStreams(const std::vector<sp<Surface>>& consumers, |
| 550 | bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format, |
| 551 | camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId, |
| 552 | const std::unordered_set<int32_t> &sensorPixelModesUsed, |
| 553 | std::vector<int> *surfaceIds, |
| 554 | int /*streamSetId*/, bool /*isShared*/, int32_t colorSpace, |
Shuzhen Wang | bce53db | 2022-12-03 00:38:20 +0000 | [diff] [blame] | 555 | int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp) { |
Emilian Peev | 434248e | 2022-10-06 14:58:54 -0700 | [diff] [blame] | 556 | sp<CameraDeviceBase> device = mDevice.promote(); |
| 557 | if (!device.get()) { |
| 558 | ALOGE("%s: Invalid camera device!", __FUNCTION__); |
| 559 | return NO_INIT; |
| 560 | } |
| 561 | |
| 562 | deriveDynamicRangeAndDataspace(dynamicProfile, &mP010DynamicRange, &mP010DataSpace); |
| 563 | mSupportInternalJpeg = CameraProviderManager::isConcurrentDynamicRangeCaptureSupported( |
| 564 | mStaticInfo, mP010DynamicRange, |
| 565 | ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD); |
| 566 | |
| 567 | sp<IGraphicBufferProducer> producer; |
| 568 | sp<IGraphicBufferConsumer> consumer; |
| 569 | BufferQueue::createBufferQueue(&producer, &consumer); |
| 570 | mP010Consumer = new CpuConsumer(consumer, /*maxLockedBuffers*/1, /*controlledByApp*/ true); |
| 571 | mP010Consumer->setFrameAvailableListener(this); |
| 572 | mP010Consumer->setName(String8("Camera3-P010CompositeStream")); |
| 573 | mP010Surface = new Surface(producer); |
| 574 | |
| 575 | auto ret = device->createStream(mP010Surface, width, height, kP010PixelFormat, |
| 576 | static_cast<android_dataspace>(mP010DataSpace), rotation, |
| 577 | id, physicalCameraId, sensorPixelModesUsed, surfaceIds, |
| 578 | camera3::CAMERA3_STREAM_SET_ID_INVALID, false /*isShared*/, false /*isMultiResolution*/, |
Shuzhen Wang | bce53db | 2022-12-03 00:38:20 +0000 | [diff] [blame] | 579 | GRALLOC_USAGE_SW_READ_OFTEN, mP010DynamicRange, streamUseCase, |
| 580 | OutputConfiguration::TIMESTAMP_BASE_DEFAULT, OutputConfiguration::MIRROR_MODE_AUTO, |
| 581 | ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED, useReadoutTimestamp); |
Emilian Peev | 434248e | 2022-10-06 14:58:54 -0700 | [diff] [blame] | 582 | if (ret == OK) { |
| 583 | mP010StreamId = *id; |
| 584 | mP010SurfaceId = (*surfaceIds)[0]; |
| 585 | mOutputSurface = consumers[0]; |
| 586 | } else { |
| 587 | return ret; |
| 588 | } |
| 589 | |
| 590 | if (mSupportInternalJpeg) { |
| 591 | BufferQueue::createBufferQueue(&producer, &consumer); |
| 592 | mBlobConsumer = new CpuConsumer(consumer, /*maxLockedBuffers*/ 1, /*controlledByApp*/ true); |
| 593 | mBlobConsumer->setFrameAvailableListener(this); |
| 594 | mBlobConsumer->setName(String8("Camera3-JpegRCompositeStream")); |
| 595 | mBlobSurface = new Surface(producer); |
| 596 | std::vector<int> blobSurfaceId; |
| 597 | ret = device->createStream(mBlobSurface, width, height, format, |
| 598 | kJpegDataSpace, rotation, &mBlobStreamId, physicalCameraId, sensorPixelModesUsed, |
| 599 | &blobSurfaceId, |
| 600 | /*streamSetI*/ camera3::CAMERA3_STREAM_SET_ID_INVALID, |
| 601 | /*isShared*/ false, |
| 602 | /*isMultiResolution*/ false, |
| 603 | /*consumerUsage*/ GRALLOC_USAGE_SW_READ_OFTEN, |
| 604 | /*dynamicProfile*/ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD, |
| 605 | streamUseCase, |
| 606 | /*timestampBase*/ OutputConfiguration::TIMESTAMP_BASE_DEFAULT, |
| 607 | /*mirrorMode*/ OutputConfiguration::MIRROR_MODE_AUTO, |
Shuzhen Wang | bce53db | 2022-12-03 00:38:20 +0000 | [diff] [blame] | 608 | /*colorSpace*/ colorSpace, useReadoutTimestamp); |
Emilian Peev | 434248e | 2022-10-06 14:58:54 -0700 | [diff] [blame] | 609 | if (ret == OK) { |
| 610 | mBlobSurfaceId = blobSurfaceId[0]; |
| 611 | } else { |
| 612 | return ret; |
| 613 | } |
| 614 | |
| 615 | ret = registerCompositeStreamListener(mBlobStreamId); |
| 616 | if (ret != OK) { |
| 617 | ALOGE("%s: Failed to register jpeg stream listener!", __FUNCTION__); |
| 618 | return ret; |
| 619 | } |
| 620 | } |
| 621 | |
| 622 | ret = registerCompositeStreamListener(getStreamId()); |
| 623 | if (ret != OK) { |
| 624 | ALOGE("%s: Failed to register P010 stream listener!", __FUNCTION__); |
| 625 | return ret; |
| 626 | } |
| 627 | |
| 628 | mOutputColorSpace = colorSpace; |
Emilian Peev | 567c31c | 2023-03-06 15:02:37 -0800 | [diff] [blame] | 629 | mOutputStreamUseCase = streamUseCase; |
Emilian Peev | 434248e | 2022-10-06 14:58:54 -0700 | [diff] [blame] | 630 | mBlobWidth = width; |
| 631 | mBlobHeight = height; |
| 632 | |
| 633 | return ret; |
| 634 | } |
| 635 | |
| 636 | status_t JpegRCompositeStream::configureStream() { |
| 637 | if (isRunning()) { |
| 638 | // Processing thread is already running, nothing more to do. |
| 639 | return NO_ERROR; |
| 640 | } |
| 641 | |
| 642 | if (mOutputSurface.get() == nullptr) { |
| 643 | ALOGE("%s: No valid output surface set!", __FUNCTION__); |
| 644 | return NO_INIT; |
| 645 | } |
| 646 | |
| 647 | auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mProducerListener); |
| 648 | if (res != OK) { |
| 649 | ALOGE("%s: Unable to connect to native window for stream %d", |
| 650 | __FUNCTION__, mP010StreamId); |
| 651 | return res; |
| 652 | } |
| 653 | |
| 654 | if ((res = native_window_set_buffers_format(mOutputSurface.get(), HAL_PIXEL_FORMAT_BLOB)) |
| 655 | != OK) { |
| 656 | ALOGE("%s: Unable to configure stream buffer format for stream %d", __FUNCTION__, |
| 657 | mP010StreamId); |
| 658 | return res; |
| 659 | } |
| 660 | |
| 661 | int maxProducerBuffers; |
| 662 | ANativeWindow *anw = mP010Surface.get(); |
| 663 | if ((res = anw->query(anw, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxProducerBuffers)) != OK) { |
| 664 | ALOGE("%s: Unable to query consumer undequeued" |
| 665 | " buffer count for stream %d", __FUNCTION__, mP010StreamId); |
| 666 | return res; |
| 667 | } |
| 668 | |
| 669 | ANativeWindow *anwConsumer = mOutputSurface.get(); |
| 670 | int maxConsumerBuffers; |
| 671 | if ((res = anwConsumer->query(anwConsumer, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, |
| 672 | &maxConsumerBuffers)) != OK) { |
| 673 | ALOGE("%s: Unable to query consumer undequeued" |
| 674 | " buffer count for stream %d", __FUNCTION__, mP010StreamId); |
| 675 | return res; |
| 676 | } |
| 677 | |
| 678 | if ((res = native_window_set_buffer_count( |
| 679 | anwConsumer, maxProducerBuffers + maxConsumerBuffers)) != OK) { |
| 680 | ALOGE("%s: Unable to set buffer count for stream %d", __FUNCTION__, mP010StreamId); |
| 681 | return res; |
| 682 | } |
| 683 | |
Emilian Peev | 567c31c | 2023-03-06 15:02:37 -0800 | [diff] [blame] | 684 | mSessionStatsBuilder.addStream(mP010StreamId); |
| 685 | |
Emilian Peev | 434248e | 2022-10-06 14:58:54 -0700 | [diff] [blame] | 686 | run("JpegRCompositeStreamProc"); |
| 687 | |
| 688 | return NO_ERROR; |
| 689 | } |
| 690 | |
| 691 | status_t JpegRCompositeStream::deleteInternalStreams() { |
| 692 | // The 'CameraDeviceClient' parent will delete the P010 stream |
| 693 | requestExit(); |
| 694 | |
| 695 | auto ret = join(); |
| 696 | if (ret != OK) { |
| 697 | ALOGE("%s: Failed to join with the main processing thread: %s (%d)", __FUNCTION__, |
| 698 | strerror(-ret), ret); |
| 699 | } |
| 700 | |
| 701 | if (mBlobStreamId >= 0) { |
| 702 | // Camera devices may not be valid after switching to offline mode. |
| 703 | // In this case, all offline streams including internal composite streams |
| 704 | // are managed and released by the offline session. |
| 705 | sp<CameraDeviceBase> device = mDevice.promote(); |
| 706 | if (device.get() != nullptr) { |
| 707 | ret = device->deleteStream(mBlobStreamId); |
| 708 | } |
| 709 | |
| 710 | mBlobStreamId = -1; |
| 711 | } |
| 712 | |
| 713 | if (mOutputSurface != nullptr) { |
| 714 | mOutputSurface->disconnect(NATIVE_WINDOW_API_CAMERA); |
| 715 | mOutputSurface.clear(); |
| 716 | } |
| 717 | |
| 718 | return ret; |
| 719 | } |
| 720 | |
| 721 | void JpegRCompositeStream::onFrameAvailable(const BufferItem& item) { |
| 722 | if (item.mDataSpace == kJpegDataSpace) { |
| 723 | ALOGV("%s: Jpeg buffer with ts: %" PRIu64 " ms. arrived!", |
| 724 | __func__, ns2ms(item.mTimestamp)); |
| 725 | |
| 726 | Mutex::Autolock l(mMutex); |
| 727 | if (!mErrorState) { |
| 728 | mInputJpegBuffers.push_back(item.mTimestamp); |
| 729 | mInputReadyCondition.signal(); |
| 730 | } |
| 731 | } else if (item.mDataSpace == static_cast<android_dataspace_t>(mP010DataSpace)) { |
| 732 | ALOGV("%s: P010 buffer with ts: %" PRIu64 " ms. arrived!", __func__, |
| 733 | ns2ms(item.mTimestamp)); |
| 734 | |
| 735 | Mutex::Autolock l(mMutex); |
| 736 | if (!mErrorState) { |
| 737 | mInputP010Buffers.push_back(item.mTimestamp); |
| 738 | mInputReadyCondition.signal(); |
| 739 | } |
| 740 | } else { |
| 741 | ALOGE("%s: Unexpected data space: 0x%x", __FUNCTION__, item.mDataSpace); |
| 742 | } |
| 743 | } |
| 744 | |
| 745 | status_t JpegRCompositeStream::insertGbp(SurfaceMap* /*out*/outSurfaceMap, |
| 746 | Vector<int32_t> * /*out*/outputStreamIds, int32_t* /*out*/currentStreamId) { |
| 747 | if (outputStreamIds == nullptr) { |
| 748 | return BAD_VALUE; |
| 749 | } |
| 750 | |
| 751 | if (outSurfaceMap->find(mP010StreamId) == outSurfaceMap->end()) { |
| 752 | outputStreamIds->push_back(mP010StreamId); |
| 753 | } |
| 754 | (*outSurfaceMap)[mP010StreamId].push_back(mP010SurfaceId); |
| 755 | |
| 756 | if (mSupportInternalJpeg) { |
| 757 | if (outSurfaceMap->find(mBlobStreamId) == outSurfaceMap->end()) { |
| 758 | outputStreamIds->push_back(mBlobStreamId); |
| 759 | } |
| 760 | (*outSurfaceMap)[mBlobStreamId].push_back(mBlobSurfaceId); |
| 761 | } |
| 762 | |
| 763 | if (currentStreamId != nullptr) { |
| 764 | *currentStreamId = mP010StreamId; |
| 765 | } |
| 766 | |
| 767 | return NO_ERROR; |
| 768 | } |
| 769 | |
| 770 | status_t JpegRCompositeStream::insertCompositeStreamIds( |
| 771 | std::vector<int32_t>* compositeStreamIds /*out*/) { |
| 772 | if (compositeStreamIds == nullptr) { |
| 773 | return BAD_VALUE; |
| 774 | } |
| 775 | |
| 776 | compositeStreamIds->push_back(mP010StreamId); |
| 777 | if (mSupportInternalJpeg) { |
| 778 | compositeStreamIds->push_back(mBlobStreamId); |
| 779 | } |
| 780 | |
| 781 | return OK; |
| 782 | } |
| 783 | |
| 784 | void JpegRCompositeStream::onResultError(const CaptureResultExtras& resultExtras) { |
| 785 | // Processing can continue even in case of result errors. |
| 786 | // At the moment Jpeg/R composite stream processing relies mainly on static camera |
| 787 | // characteristics data. The actual result data can be used for the jpeg quality but |
| 788 | // in case it is absent we can default to maximum. |
| 789 | eraseResult(resultExtras.frameNumber); |
Emilian Peev | 567c31c | 2023-03-06 15:02:37 -0800 | [diff] [blame] | 790 | mSessionStatsBuilder.incResultCounter(true /*dropped*/); |
Emilian Peev | 434248e | 2022-10-06 14:58:54 -0700 | [diff] [blame] | 791 | } |
| 792 | |
| 793 | bool JpegRCompositeStream::onStreamBufferError(const CaptureResultExtras& resultExtras) { |
| 794 | bool ret = false; |
| 795 | // Buffer errors concerning internal composite streams should not be directly visible to |
| 796 | // camera clients. They must only receive a single buffer error with the public composite |
| 797 | // stream id. |
| 798 | if ((resultExtras.errorStreamId == mP010StreamId) || |
| 799 | (resultExtras.errorStreamId == mBlobStreamId)) { |
| 800 | flagAnErrorFrameNumber(resultExtras.frameNumber); |
| 801 | ret = true; |
| 802 | } |
| 803 | |
| 804 | return ret; |
| 805 | } |
| 806 | |
| 807 | status_t JpegRCompositeStream::getCompositeStreamInfo(const OutputStreamInfo &streamInfo, |
| 808 | const CameraMetadata& staticInfo, |
| 809 | std::vector<OutputStreamInfo>* compositeOutput /*out*/) { |
| 810 | if (compositeOutput == nullptr) { |
| 811 | return BAD_VALUE; |
| 812 | } |
| 813 | |
| 814 | int64_t dynamicRange, dataSpace; |
| 815 | deriveDynamicRangeAndDataspace(streamInfo.dynamicRangeProfile, &dynamicRange, &dataSpace); |
| 816 | |
| 817 | compositeOutput->clear(); |
| 818 | compositeOutput->push_back({}); |
| 819 | (*compositeOutput)[0].width = streamInfo.width; |
| 820 | (*compositeOutput)[0].height = streamInfo.height; |
| 821 | (*compositeOutput)[0].format = kP010PixelFormat; |
| 822 | (*compositeOutput)[0].dataSpace = static_cast<android_dataspace_t>(dataSpace); |
| 823 | (*compositeOutput)[0].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN; |
| 824 | (*compositeOutput)[0].dynamicRangeProfile = dynamicRange; |
| 825 | (*compositeOutput)[0].colorSpace = |
| 826 | ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED; |
| 827 | |
| 828 | if (CameraProviderManager::isConcurrentDynamicRangeCaptureSupported(staticInfo, |
| 829 | streamInfo.dynamicRangeProfile, |
| 830 | ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD)) { |
| 831 | compositeOutput->push_back({}); |
| 832 | (*compositeOutput)[1].width = streamInfo.width; |
| 833 | (*compositeOutput)[1].height = streamInfo.height; |
| 834 | (*compositeOutput)[1].format = HAL_PIXEL_FORMAT_BLOB; |
| 835 | (*compositeOutput)[1].dataSpace = kJpegDataSpace; |
| 836 | (*compositeOutput)[1].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN; |
| 837 | (*compositeOutput)[1].dynamicRangeProfile = |
| 838 | ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD; |
| 839 | (*compositeOutput)[1].colorSpace = streamInfo.colorSpace; |
| 840 | } |
| 841 | |
| 842 | return NO_ERROR; |
| 843 | } |
| 844 | |
Emilian Peev | 567c31c | 2023-03-06 15:02:37 -0800 | [diff] [blame] | 845 | void JpegRCompositeStream::getStreamStats(hardware::CameraStreamStats* streamStats) { |
| 846 | if ((streamStats == nullptr) || (mFirstRequestLatency != -1)) { |
| 847 | return; |
| 848 | } |
| 849 | |
| 850 | bool deviceError; |
| 851 | std::map<int, StreamStats> stats; |
| 852 | mSessionStatsBuilder.buildAndReset(&streamStats->mRequestCount, &streamStats->mErrorCount, |
| 853 | &deviceError, &stats); |
| 854 | if (stats.find(mP010StreamId) != stats.end()) { |
| 855 | streamStats->mWidth = mBlobWidth; |
| 856 | streamStats->mHeight = mBlobHeight; |
| 857 | streamStats->mFormat = HAL_PIXEL_FORMAT_BLOB; |
| 858 | streamStats->mDataSpace = static_cast<int>(kJpegRDataSpace); |
| 859 | streamStats->mDynamicRangeProfile = mP010DynamicRange; |
| 860 | streamStats->mColorSpace = mOutputColorSpace; |
| 861 | streamStats->mStreamUseCase = mOutputStreamUseCase; |
| 862 | streamStats->mStartLatencyMs = mFirstRequestLatency; |
| 863 | streamStats->mHistogramType = hardware::CameraStreamStats::HISTOGRAM_TYPE_CAPTURE_LATENCY; |
| 864 | streamStats->mHistogramBins.assign(stats[mP010StreamId].mCaptureLatencyBins.begin(), |
| 865 | stats[mP010StreamId].mCaptureLatencyBins.end()); |
| 866 | streamStats->mHistogramCounts.assign(stats[mP010StreamId].mCaptureLatencyHistogram.begin(), |
| 867 | stats[mP010StreamId].mCaptureLatencyHistogram.end()); |
| 868 | } |
| 869 | } |
| 870 | |
Emilian Peev | 434248e | 2022-10-06 14:58:54 -0700 | [diff] [blame] | 871 | }; // namespace camera3 |
| 872 | }; // namespace android |