blob: 126b782112e97b1e9ee2b5550e3b4aa46cddf9c3 [file] [log] [blame]
Avichal Rakeshe1857f82022-06-08 17:47:23 -07001/*
2 * Copyright (C) 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "ExtCamDevSsn"
18// #define LOG_NDEBUG 0
19#include <log/log.h>
20
21#include "ExternalCameraDeviceSession.h"
22
23#include <Exif.h>
24#include <ExternalCameraOfflineSession.h>
25#include <aidl/android/hardware/camera/device/CameraBlob.h>
26#include <aidl/android/hardware/camera/device/CameraBlobId.h>
27#include <aidl/android/hardware/camera/device/ErrorMsg.h>
28#include <aidl/android/hardware/camera/device/ShutterMsg.h>
29#include <aidl/android/hardware/camera/device/StreamBufferRet.h>
30#include <aidl/android/hardware/camera/device/StreamBuffersVal.h>
31#include <aidl/android/hardware/camera/device/StreamConfigurationMode.h>
32#include <aidl/android/hardware/camera/device/StreamRotation.h>
33#include <aidl/android/hardware/camera/device/StreamType.h>
34#include <aidl/android/hardware/graphics/common/Dataspace.h>
35#include <aidlcommonsupport/NativeHandle.h>
36#include <convert.h>
37#include <linux/videodev2.h>
38#include <sync/sync.h>
39#include <utils/Trace.h>
40#include <deque>
41
42#define HAVE_JPEG // required for libyuv.h to export MJPEG decode APIs
43#include <libyuv.h>
44#include <libyuv/convert.h>
45
46namespace android {
47namespace hardware {
48namespace camera {
49namespace device {
50namespace implementation {
51
52namespace {
53
54// Size of request/result metadata fast message queue. Change to 0 to always use hwbinder buffer.
55static constexpr size_t kMetadataMsgQueueSize = 1 << 18 /* 256kB */;
56
57const int kBadFramesAfterStreamOn = 1; // drop x frames after streamOn to get rid of some initial
58 // bad frames. TODO: develop a better bad frame detection
59 // method
60constexpr int MAX_RETRY = 15; // Allow retry some ioctl failures a few times to account for some
61 // webcam showing temporarily ioctl failures.
62constexpr int IOCTL_RETRY_SLEEP_US = 33000; // 33ms * MAX_RETRY = 0.5 seconds
63
64// Constants for tryLock during dumpstate
65static constexpr int kDumpLockRetries = 50;
66static constexpr int kDumpLockSleep = 60000;
67
68bool tryLock(Mutex& mutex) {
69 bool locked = false;
70 for (int i = 0; i < kDumpLockRetries; ++i) {
71 if (mutex.tryLock() == NO_ERROR) {
72 locked = true;
73 break;
74 }
75 usleep(kDumpLockSleep);
76 }
77 return locked;
78}
79
80bool tryLock(std::mutex& mutex) {
81 bool locked = false;
82 for (int i = 0; i < kDumpLockRetries; ++i) {
83 if (mutex.try_lock()) {
84 locked = true;
85 break;
86 }
87 usleep(kDumpLockSleep);
88 }
89 return locked;
90}
91
92} // anonymous namespace
93
94using ::aidl::android::hardware::camera::device::BufferRequestStatus;
95using ::aidl::android::hardware::camera::device::CameraBlob;
96using ::aidl::android::hardware::camera::device::CameraBlobId;
97using ::aidl::android::hardware::camera::device::ErrorMsg;
98using ::aidl::android::hardware::camera::device::ShutterMsg;
99using ::aidl::android::hardware::camera::device::StreamBuffer;
100using ::aidl::android::hardware::camera::device::StreamBufferRet;
101using ::aidl::android::hardware::camera::device::StreamBuffersVal;
102using ::aidl::android::hardware::camera::device::StreamConfigurationMode;
103using ::aidl::android::hardware::camera::device::StreamRotation;
104using ::aidl::android::hardware::camera::device::StreamType;
105using ::aidl::android::hardware::graphics::common::Dataspace;
106using ::android::hardware::camera::common::V1_0::helper::ExifUtils;
107
108// Static instances
109const int ExternalCameraDeviceSession::kMaxProcessedStream;
110const int ExternalCameraDeviceSession::kMaxStallStream;
111HandleImporter ExternalCameraDeviceSession::sHandleImporter;
112
113ExternalCameraDeviceSession::ExternalCameraDeviceSession(
114 const std::shared_ptr<ICameraDeviceCallback>& callback, const ExternalCameraConfig& cfg,
115 const std::vector<SupportedV4L2Format>& sortedFormats, const CroppingType& croppingType,
116 const common::V1_0::helper::CameraMetadata& chars, const std::string& cameraId,
117 unique_fd v4l2Fd)
118 : mCallback(callback),
119 mCfg(cfg),
120 mCameraCharacteristics(chars),
121 mSupportedFormats(sortedFormats),
122 mCroppingType(croppingType),
123 mCameraId(cameraId),
124 mV4l2Fd(std::move(v4l2Fd)),
125 mMaxThumbResolution(getMaxThumbResolution()),
126 mMaxJpegResolution(getMaxJpegResolution()) {}
127
128Size ExternalCameraDeviceSession::getMaxThumbResolution() const {
129 return getMaxThumbnailResolution(mCameraCharacteristics);
130}
131
132Size ExternalCameraDeviceSession::getMaxJpegResolution() const {
133 Size ret{0, 0};
134 for (auto& fmt : mSupportedFormats) {
135 if (fmt.width * fmt.height > ret.width * ret.height) {
136 ret = Size{fmt.width, fmt.height};
137 }
138 }
139 return ret;
140}
141
142bool ExternalCameraDeviceSession::initialize() {
143 if (mV4l2Fd.get() < 0) {
144 ALOGE("%s: invalid v4l2 device fd %d!", __FUNCTION__, mV4l2Fd.get());
145 return true;
146 }
147
148 struct v4l2_capability capability;
149 int ret = ioctl(mV4l2Fd.get(), VIDIOC_QUERYCAP, &capability);
150 std::string make, model;
151 if (ret < 0) {
152 ALOGW("%s v4l2 QUERYCAP failed", __FUNCTION__);
153 mExifMake = "Generic UVC webcam";
154 mExifModel = "Generic UVC webcam";
155 } else {
156 // capability.card is UTF-8 encoded
157 char card[32];
158 int j = 0;
159 for (int i = 0; i < 32; i++) {
160 if (capability.card[i] < 128) {
161 card[j++] = capability.card[i];
162 }
163 if (capability.card[i] == '\0') {
164 break;
165 }
166 }
167 if (j == 0 || card[j - 1] != '\0') {
168 mExifMake = "Generic UVC webcam";
169 mExifModel = "Generic UVC webcam";
170 } else {
171 mExifMake = card;
172 mExifModel = card;
173 }
174 }
175
176 initOutputThread();
177 if (mOutputThread == nullptr) {
178 ALOGE("%s: init OutputThread failed!", __FUNCTION__);
179 return true;
180 }
181 mOutputThread->setExifMakeModel(mExifMake, mExifModel);
182
183 status_t status = initDefaultRequests();
184 if (status != OK) {
185 ALOGE("%s: init default requests failed!", __FUNCTION__);
186 return true;
187 }
188
189 mRequestMetadataQueue =
190 std::make_unique<RequestMetadataQueue>(kMetadataMsgQueueSize, false /* non blocking */);
191 if (!mRequestMetadataQueue->isValid()) {
192 ALOGE("%s: invalid request fmq", __FUNCTION__);
193 return true;
194 }
195
196 mResultMetadataQueue =
197 std::make_shared<ResultMetadataQueue>(kMetadataMsgQueueSize, false /* non blocking */);
198 if (!mResultMetadataQueue->isValid()) {
199 ALOGE("%s: invalid result fmq", __FUNCTION__);
200 return true;
201 }
202
203 mOutputThread->run();
204 return false;
205}
206
207bool ExternalCameraDeviceSession::isInitFailed() {
208 Mutex::Autolock _l(mLock);
209 if (!mInitialized) {
210 mInitFail = initialize();
211 mInitialized = true;
212 }
213 return mInitFail;
214}
215
216void ExternalCameraDeviceSession::initOutputThread() {
217 // Grab a shared_ptr to 'this' from ndk::SharedRefBase::ref()
218 std::shared_ptr<ExternalCameraDeviceSession> thiz = ref<ExternalCameraDeviceSession>();
219
Avichal Rakesh740c2562022-12-13 13:25:24 -0800220 mBufferRequestThread = std::make_shared<BufferRequestThread>(/*parent=*/thiz, mCallback);
221 mBufferRequestThread->run();
Avichal Rakeshe1857f82022-06-08 17:47:23 -0700222 mOutputThread = std::make_shared<OutputThread>(/*parent=*/thiz, mCroppingType,
223 mCameraCharacteristics, mBufferRequestThread);
224}
225
226void ExternalCameraDeviceSession::closeOutputThread() {
Avichal Rakeshe1857f82022-06-08 17:47:23 -0700227 if (mOutputThread != nullptr) {
228 mOutputThread->flush();
229 mOutputThread->requestExitAndWait();
230 mOutputThread.reset();
231 }
232}
233
Tang Lee65382f62023-08-01 18:23:07 +0800234void ExternalCameraDeviceSession::closeBufferRequestThread() {
235 if (mBufferRequestThread != nullptr) {
236 mBufferRequestThread->requestExitAndWait();
237 mBufferRequestThread.reset();
238 }
239}
240
Avichal Rakeshe1857f82022-06-08 17:47:23 -0700241Status ExternalCameraDeviceSession::initStatus() const {
242 Mutex::Autolock _l(mLock);
243 Status status = Status::OK;
244 if (mInitFail || mClosed) {
245 ALOGI("%s: session initFailed %d closed %d", __FUNCTION__, mInitFail, mClosed);
246 status = Status::INTERNAL_ERROR;
247 }
248 return status;
249}
250
251ExternalCameraDeviceSession::~ExternalCameraDeviceSession() {
252 if (!isClosed()) {
253 ALOGE("ExternalCameraDeviceSession deleted before close!");
Tang Lee65382f62023-08-01 18:23:07 +0800254 closeImpl();
Avichal Rakeshe1857f82022-06-08 17:47:23 -0700255 }
256}
257
258ScopedAStatus ExternalCameraDeviceSession::constructDefaultRequestSettings(
259 RequestTemplate in_type, CameraMetadata* _aidl_return) {
260 CameraMetadata emptyMetadata;
261 Status status = initStatus();
262 if (status != Status::OK) {
263 return fromStatus(status);
264 }
265 switch (in_type) {
266 case RequestTemplate::PREVIEW:
267 case RequestTemplate::STILL_CAPTURE:
268 case RequestTemplate::VIDEO_RECORD:
269 case RequestTemplate::VIDEO_SNAPSHOT: {
270 *_aidl_return = mDefaultRequests[in_type];
271 break;
272 }
273 case RequestTemplate::MANUAL:
274 case RequestTemplate::ZERO_SHUTTER_LAG:
275 // Don't support MANUAL, ZSL templates
276 status = Status::ILLEGAL_ARGUMENT;
277 break;
278 default:
279 ALOGE("%s: unknown request template type %d", __FUNCTION__, static_cast<int>(in_type));
280 status = Status::ILLEGAL_ARGUMENT;
281 break;
282 }
283 return fromStatus(status);
284}
285
286ScopedAStatus ExternalCameraDeviceSession::configureStreams(
287 const StreamConfiguration& in_requestedConfiguration,
288 std::vector<HalStream>* _aidl_return) {
289 uint32_t blobBufferSize = 0;
290 _aidl_return->clear();
291 Mutex::Autolock _il(mInterfaceLock);
292
293 Status status =
294 isStreamCombinationSupported(in_requestedConfiguration, mSupportedFormats, mCfg);
295 if (status != Status::OK) {
296 return fromStatus(status);
297 }
298
299 status = initStatus();
300 if (status != Status::OK) {
301 return fromStatus(status);
302 }
303
304 {
305 std::lock_guard<std::mutex> lk(mInflightFramesLock);
306 if (!mInflightFrames.empty()) {
307 ALOGE("%s: trying to configureStreams while there are still %zu inflight frames!",
308 __FUNCTION__, mInflightFrames.size());
309 return fromStatus(Status::INTERNAL_ERROR);
310 }
311 }
312
313 Mutex::Autolock _l(mLock);
314 {
315 Mutex::Autolock _cl(mCbsLock);
316 // Add new streams
317 for (const auto& stream : in_requestedConfiguration.streams) {
318 if (mStreamMap.count(stream.id) == 0) {
319 mStreamMap[stream.id] = stream;
320 mCirculatingBuffers.emplace(stream.id, CirculatingBuffers{});
321 }
322 }
323
324 // Cleanup removed streams
325 for (auto it = mStreamMap.begin(); it != mStreamMap.end();) {
326 int id = it->first;
327 bool found = false;
328 for (const auto& stream : in_requestedConfiguration.streams) {
329 if (id == stream.id) {
330 found = true;
331 break;
332 }
333 }
334 if (!found) {
335 // Unmap all buffers of deleted stream
336 cleanupBuffersLocked(id);
337 it = mStreamMap.erase(it);
338 } else {
339 ++it;
340 }
341 }
342 }
343
344 // Now select a V4L2 format to produce all output streams
345 float desiredAr = (mCroppingType == VERTICAL) ? kMaxAspectRatio : kMinAspectRatio;
346 uint32_t maxDim = 0;
347 for (const auto& stream : in_requestedConfiguration.streams) {
348 float aspectRatio = ASPECT_RATIO(stream);
349 ALOGI("%s: request stream %dx%d", __FUNCTION__, stream.width, stream.height);
350 if ((mCroppingType == VERTICAL && aspectRatio < desiredAr) ||
351 (mCroppingType == HORIZONTAL && aspectRatio > desiredAr)) {
352 desiredAr = aspectRatio;
353 }
354
355 // The dimension that's not cropped
356 uint32_t dim = (mCroppingType == VERTICAL) ? stream.width : stream.height;
357 if (dim > maxDim) {
358 maxDim = dim;
359 }
360 }
361
362 // Find the smallest format that matches the desired aspect ratio and is wide/high enough
363 SupportedV4L2Format v4l2Fmt{.width = 0, .height = 0};
364 for (const auto& fmt : mSupportedFormats) {
365 uint32_t dim = (mCroppingType == VERTICAL) ? fmt.width : fmt.height;
366 if (dim >= maxDim) {
367 float aspectRatio = ASPECT_RATIO(fmt);
368 if (isAspectRatioClose(aspectRatio, desiredAr)) {
369 v4l2Fmt = fmt;
370 // since mSupportedFormats is sorted by width then height, the first matching fmt
371 // will be the smallest one with matching aspect ratio
372 break;
373 }
374 }
375 }
376
377 if (v4l2Fmt.width == 0) {
378 // Cannot find exact good aspect ratio candidate, try to find a close one
379 for (const auto& fmt : mSupportedFormats) {
380 uint32_t dim = (mCroppingType == VERTICAL) ? fmt.width : fmt.height;
381 if (dim >= maxDim) {
382 float aspectRatio = ASPECT_RATIO(fmt);
383 if ((mCroppingType == VERTICAL && aspectRatio < desiredAr) ||
384 (mCroppingType == HORIZONTAL && aspectRatio > desiredAr)) {
385 v4l2Fmt = fmt;
386 break;
387 }
388 }
389 }
390 }
391
392 if (v4l2Fmt.width == 0) {
393 ALOGE("%s: unable to find a resolution matching (%s at least %d, aspect ratio %f)",
394 __FUNCTION__, (mCroppingType == VERTICAL) ? "width" : "height", maxDim, desiredAr);
395 return fromStatus(Status::ILLEGAL_ARGUMENT);
396 }
397
398 if (configureV4l2StreamLocked(v4l2Fmt) != 0) {
399 ALOGE("V4L configuration failed!, format:%c%c%c%c, w %d, h %d", v4l2Fmt.fourcc & 0xFF,
400 (v4l2Fmt.fourcc >> 8) & 0xFF, (v4l2Fmt.fourcc >> 16) & 0xFF,
401 (v4l2Fmt.fourcc >> 24) & 0xFF, v4l2Fmt.width, v4l2Fmt.height);
402 return fromStatus(Status::INTERNAL_ERROR);
403 }
404
405 Size v4lSize = {v4l2Fmt.width, v4l2Fmt.height};
406 Size thumbSize{0, 0};
407 camera_metadata_ro_entry entry =
408 mCameraCharacteristics.find(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES);
409 for (uint32_t i = 0; i < entry.count; i += 2) {
410 Size sz{entry.data.i32[i], entry.data.i32[i + 1]};
411 if (sz.width * sz.height > thumbSize.width * thumbSize.height) {
412 thumbSize = sz;
413 }
414 }
415
416 if (thumbSize.width * thumbSize.height == 0) {
417 ALOGE("%s: non-zero thumbnail size not available", __FUNCTION__);
418 return fromStatus(Status::INTERNAL_ERROR);
419 }
420
421 mBlobBufferSize = blobBufferSize;
422 status = mOutputThread->allocateIntermediateBuffers(
423 v4lSize, mMaxThumbResolution, in_requestedConfiguration.streams, blobBufferSize);
424 if (status != Status::OK) {
425 ALOGE("%s: allocating intermediate buffers failed!", __FUNCTION__);
426 return fromStatus(status);
427 }
428
429 std::vector<HalStream>& out = *_aidl_return;
430 out.resize(in_requestedConfiguration.streams.size());
431 for (size_t i = 0; i < in_requestedConfiguration.streams.size(); i++) {
432 out[i].overrideDataSpace = in_requestedConfiguration.streams[i].dataSpace;
433 out[i].id = in_requestedConfiguration.streams[i].id;
434 // TODO: double check should we add those CAMERA flags
435 mStreamMap[in_requestedConfiguration.streams[i].id].usage = out[i].producerUsage =
436 static_cast<BufferUsage>(((int64_t)in_requestedConfiguration.streams[i].usage) |
437 ((int64_t)BufferUsage::CPU_WRITE_OFTEN) |
438 ((int64_t)BufferUsage::CAMERA_OUTPUT));
439 out[i].consumerUsage = static_cast<BufferUsage>(0);
440 out[i].maxBuffers = static_cast<int32_t>(mV4L2BufferCount);
441
442 switch (in_requestedConfiguration.streams[i].format) {
443 case PixelFormat::BLOB:
444 case PixelFormat::YCBCR_420_888:
445 case PixelFormat::YV12: // Used by SurfaceTexture
446 case PixelFormat::Y16:
447 // No override
448 out[i].overrideFormat = in_requestedConfiguration.streams[i].format;
449 break;
450 case PixelFormat::IMPLEMENTATION_DEFINED:
451 // Implementation Defined
452 // This should look at the Stream's dataspace flag to determine the format or leave
453 // it as is if the rest of the system knows how to handle a private format. To keep
454 // this HAL generic, this is being overridden to YUV420
455 out[i].overrideFormat = PixelFormat::YCBCR_420_888;
456 // Save overridden format in mStreamMap
457 mStreamMap[in_requestedConfiguration.streams[i].id].format = out[i].overrideFormat;
458 break;
459 default:
460 ALOGE("%s: unsupported format 0x%x", __FUNCTION__,
461 in_requestedConfiguration.streams[i].format);
462 return fromStatus(Status::ILLEGAL_ARGUMENT);
463 }
464 }
465
466 mFirstRequest = true;
467 mLastStreamConfigCounter = in_requestedConfiguration.streamConfigCounter;
468 return fromStatus(Status::OK);
469}
470
471ScopedAStatus ExternalCameraDeviceSession::flush() {
472 ATRACE_CALL();
473 Mutex::Autolock _il(mInterfaceLock);
474 Status status = initStatus();
475 if (status != Status::OK) {
476 return fromStatus(status);
477 }
478 mOutputThread->flush();
479 return fromStatus(Status::OK);
480}
481
482ScopedAStatus ExternalCameraDeviceSession::getCaptureRequestMetadataQueue(
483 MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) {
484 Mutex::Autolock _il(mInterfaceLock);
485 *_aidl_return = mRequestMetadataQueue->dupeDesc();
486 return fromStatus(Status::OK);
487}
488
489ScopedAStatus ExternalCameraDeviceSession::getCaptureResultMetadataQueue(
490 MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) {
491 Mutex::Autolock _il(mInterfaceLock);
492 *_aidl_return = mResultMetadataQueue->dupeDesc();
493 return fromStatus(Status::OK);
494}
495
496ScopedAStatus ExternalCameraDeviceSession::isReconfigurationRequired(
497 const CameraMetadata& in_oldSessionParams, const CameraMetadata& in_newSessionParams,
498 bool* _aidl_return) {
499 // reconfiguration required if there is any change in the session params
500 *_aidl_return = in_oldSessionParams != in_newSessionParams;
501 return fromStatus(Status::OK);
502}
503
504ScopedAStatus ExternalCameraDeviceSession::processCaptureRequest(
505 const std::vector<CaptureRequest>& in_requests,
506 const std::vector<BufferCache>& in_cachesToRemove, int32_t* _aidl_return) {
507 Mutex::Autolock _il(mInterfaceLock);
508 updateBufferCaches(in_cachesToRemove);
509
510 int32_t& numRequestProcessed = *_aidl_return;
511 numRequestProcessed = 0;
512 Status s = Status::OK;
513 for (size_t i = 0; i < in_requests.size(); i++, numRequestProcessed++) {
514 s = processOneCaptureRequest(in_requests[i]);
515 if (s != Status::OK) {
516 break;
517 }
518 }
519
520 return fromStatus(s);
521}
522
523Status ExternalCameraDeviceSession::processOneCaptureRequest(const CaptureRequest& request) {
524 ATRACE_CALL();
525 Status status = initStatus();
526 if (status != Status::OK) {
527 return status;
528 }
529
530 if (request.inputBuffer.streamId != -1) {
531 ALOGE("%s: external camera does not support reprocessing!", __FUNCTION__);
532 return Status::ILLEGAL_ARGUMENT;
533 }
534
535 Mutex::Autolock _l(mLock);
536 if (!mV4l2Streaming) {
537 ALOGE("%s: cannot process request in streamOff state!", __FUNCTION__);
538 return Status::INTERNAL_ERROR;
539 }
540
541 const camera_metadata_t* rawSettings = nullptr;
542 bool converted;
543 CameraMetadata settingsFmq; // settings from FMQ
544
545 if (request.fmqSettingsSize > 0) {
546 // non-blocking read; client must write metadata before calling
547 // processOneCaptureRequest
548 settingsFmq.metadata.resize(request.fmqSettingsSize);
549 bool read = mRequestMetadataQueue->read(
550 reinterpret_cast<int8_t*>(settingsFmq.metadata.data()), request.fmqSettingsSize);
551 if (read) {
552 converted = convertFromAidl(settingsFmq, &rawSettings);
553 } else {
554 ALOGE("%s: capture request settings metadata couldn't be read from fmq!", __FUNCTION__);
555 converted = false;
556 }
557 } else {
558 converted = convertFromAidl(request.settings, &rawSettings);
559 }
560
561 if (converted && rawSettings != nullptr) {
562 mLatestReqSetting = rawSettings;
563 }
564
565 if (!converted) {
566 ALOGE("%s: capture request settings metadata is corrupt!", __FUNCTION__);
567 return Status::ILLEGAL_ARGUMENT;
568 }
569
570 if (mFirstRequest && rawSettings == nullptr) {
571 ALOGE("%s: capture request settings must not be null for first request!", __FUNCTION__);
572 return Status::ILLEGAL_ARGUMENT;
573 }
574
575 std::vector<buffer_handle_t*> allBufPtrs;
576 std::vector<int> allFences;
577 size_t numOutputBufs = request.outputBuffers.size();
578
579 if (numOutputBufs == 0) {
580 ALOGE("%s: capture request must have at least one output buffer!", __FUNCTION__);
581 return Status::ILLEGAL_ARGUMENT;
582 }
583
584 camera_metadata_entry fpsRange = mLatestReqSetting.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE);
585 if (fpsRange.count == 2) {
586 double requestFpsMax = fpsRange.data.i32[1];
587 double closestFps = 0.0;
588 double fpsError = 1000.0;
589 bool fpsSupported = false;
590 for (const auto& fr : mV4l2StreamingFmt.frameRates) {
591 double f = fr.getFramesPerSecond();
592 if (std::fabs(requestFpsMax - f) < 1.0) {
593 fpsSupported = true;
594 break;
595 }
596 if (std::fabs(requestFpsMax - f) < fpsError) {
597 fpsError = std::fabs(requestFpsMax - f);
598 closestFps = f;
599 }
600 }
601 if (!fpsSupported) {
602 /* This can happen in a few scenarios:
603 * 1. The application is sending an FPS range not supported by the configured outputs.
604 * 2. The application is sending a valid FPS range for all configured outputs, but
605 * the selected V4L2 size can only run at slower speed. This should be very rare
606 * though: for this to happen a sensor needs to support at least 3 different aspect
607 * ratio outputs, and when (at least) two outputs are both not the main aspect ratio
608 * of the webcam, a third size that's larger might be picked and runs into this
609 * issue.
610 */
611 ALOGW("%s: cannot reach fps %d! Will do %f instead", __FUNCTION__, fpsRange.data.i32[1],
612 closestFps);
613 requestFpsMax = closestFps;
614 }
615
616 if (requestFpsMax != mV4l2StreamingFps) {
617 {
618 std::unique_lock<std::mutex> lk(mV4l2BufferLock);
619 while (mNumDequeuedV4l2Buffers != 0) {
620 // Wait until pipeline is idle before reconfigure stream
621 int waitRet = waitForV4L2BufferReturnLocked(lk);
622 if (waitRet != 0) {
623 ALOGE("%s: wait for pipeline idle failed!", __FUNCTION__);
624 return Status::INTERNAL_ERROR;
625 }
626 }
627 }
628 configureV4l2StreamLocked(mV4l2StreamingFmt, requestFpsMax);
629 }
630 }
631
632 status = importRequestLocked(request, allBufPtrs, allFences);
633 if (status != Status::OK) {
634 return status;
635 }
636
637 nsecs_t shutterTs = 0;
638 std::unique_ptr<V4L2Frame> frameIn = dequeueV4l2FrameLocked(&shutterTs);
639 if (frameIn == nullptr) {
640 ALOGE("%s: V4L2 deque frame failed!", __FUNCTION__);
641 return Status::INTERNAL_ERROR;
642 }
643
644 std::shared_ptr<HalRequest> halReq = std::make_shared<HalRequest>();
645 halReq->frameNumber = request.frameNumber;
646 halReq->setting = mLatestReqSetting;
647 halReq->frameIn = std::move(frameIn);
648 halReq->shutterTs = shutterTs;
649 halReq->buffers.resize(numOutputBufs);
650 for (size_t i = 0; i < numOutputBufs; i++) {
651 HalStreamBuffer& halBuf = halReq->buffers[i];
652 int streamId = halBuf.streamId = request.outputBuffers[i].streamId;
653 halBuf.bufferId = request.outputBuffers[i].bufferId;
654 const Stream& stream = mStreamMap[streamId];
655 halBuf.width = stream.width;
656 halBuf.height = stream.height;
657 halBuf.format = stream.format;
658 halBuf.usage = stream.usage;
659 halBuf.bufPtr = allBufPtrs[i];
660 halBuf.acquireFence = allFences[i];
661 halBuf.fenceTimeout = false;
662 }
663 {
664 std::lock_guard<std::mutex> lk(mInflightFramesLock);
665 mInflightFrames.insert(halReq->frameNumber);
666 }
667 // Send request to OutputThread for the rest of processing
668 mOutputThread->submitRequest(halReq);
669 mFirstRequest = false;
670 return Status::OK;
671}
672
673ScopedAStatus ExternalCameraDeviceSession::signalStreamFlush(
674 const std::vector<int32_t>& /*in_streamIds*/, int32_t in_streamConfigCounter) {
675 {
676 Mutex::Autolock _l(mLock);
677 if (in_streamConfigCounter < mLastStreamConfigCounter) {
678 // stale call. new streams have been configured since this call was issued.
679 // Do nothing.
680 return fromStatus(Status::OK);
681 }
682 }
683
684 // TODO: implement if needed.
685 return fromStatus(Status::OK);
686}
687
688ScopedAStatus ExternalCameraDeviceSession::switchToOffline(
689 const std::vector<int32_t>& in_streamsToKeep,
690 CameraOfflineSessionInfo* out_offlineSessionInfo,
691 std::shared_ptr<ICameraOfflineSession>* _aidl_return) {
692 std::vector<NotifyMsg> msgs;
693 std::vector<CaptureResult> results;
694 CameraOfflineSessionInfo info;
695 std::shared_ptr<ICameraOfflineSession> session;
696 Status st = switchToOffline(in_streamsToKeep, &msgs, &results, &info, &session);
697
698 mCallback->notify(msgs);
699 invokeProcessCaptureResultCallback(results, /* tryWriteFmq= */ true);
700 freeReleaseFences(results);
701
702 // setup return values
703 *out_offlineSessionInfo = info;
704 *_aidl_return = session;
705 return fromStatus(st);
706}
707
708Status ExternalCameraDeviceSession::switchToOffline(
709 const std::vector<int32_t>& offlineStreams, std::vector<NotifyMsg>* msgs,
710 std::vector<CaptureResult>* results, CameraOfflineSessionInfo* info,
711 std::shared_ptr<ICameraOfflineSession>* session) {
712 ATRACE_CALL();
713 if (offlineStreams.size() > 1) {
714 ALOGE("%s: more than one offline stream is not supported", __FUNCTION__);
715 return Status::ILLEGAL_ARGUMENT;
716 }
717
Greg Kaiser9b77fd12022-12-05 17:06:02 -0800718 if (msgs == nullptr || results == nullptr || info == nullptr || session == nullptr) {
719 ALOGE("%s, output arguments (%p, %p, %p, %p) must not be null", __FUNCTION__, msgs, results,
Avichal Rakeshe1857f82022-06-08 17:47:23 -0700720 info, session);
721 }
722
723 Mutex::Autolock _il(mInterfaceLock);
724 Status status = initStatus();
725 if (status != Status::OK) {
726 return status;
727 }
728
729 Mutex::Autolock _l(mLock);
730 for (auto streamId : offlineStreams) {
731 if (!supportOfflineLocked(streamId)) {
732 return Status::ILLEGAL_ARGUMENT;
733 }
734 }
735
736 // pause output thread and get all remaining inflight requests
737 auto remainingReqs = mOutputThread->switchToOffline();
738 std::vector<std::shared_ptr<HalRequest>> halReqs;
739
740 // Send out buffer/request error for remaining requests and filter requests
741 // to be handled in offline mode
742 for (auto& halReq : remainingReqs) {
743 bool dropReq = canDropRequest(offlineStreams, halReq);
744 if (dropReq) {
745 // Request is dropped completely. Just send request error and
746 // there is no need to send the request to offline session
747 processCaptureRequestError(halReq, msgs, results);
748 continue;
749 }
750
751 // All requests reach here must have at least one offline stream output
752 NotifyMsg shutter;
753 aidl::android::hardware::camera::device::ShutterMsg shutterMsg = {
754 .frameNumber = static_cast<int32_t>(halReq->frameNumber),
755 .timestamp = halReq->shutterTs};
756 shutter.set<NotifyMsg::Tag::shutter>(shutterMsg);
757 msgs->push_back(shutter);
758
759 std::vector<HalStreamBuffer> offlineBuffers;
760 for (const auto& buffer : halReq->buffers) {
761 bool dropBuffer = true;
762 for (auto offlineStreamId : offlineStreams) {
763 if (buffer.streamId == offlineStreamId) {
764 dropBuffer = false;
765 break;
766 }
767 }
768 if (dropBuffer) {
769 aidl::android::hardware::camera::device::ErrorMsg errorMsg = {
770 .frameNumber = static_cast<int32_t>(halReq->frameNumber),
771 .errorStreamId = buffer.streamId,
772 .errorCode = ErrorCode::ERROR_BUFFER};
773
774 NotifyMsg error;
775 error.set<NotifyMsg::Tag::error>(errorMsg);
776 msgs->push_back(error);
777
778 results->push_back({
779 .frameNumber = static_cast<int32_t>(halReq->frameNumber),
780 .outputBuffers = {},
781 .inputBuffer = {.streamId = -1},
782 .partialResult = 0, // buffer only result
783 });
784
785 CaptureResult& result = results->back();
786 result.outputBuffers.resize(1);
787 StreamBuffer& outputBuffer = result.outputBuffers[0];
788 outputBuffer.streamId = buffer.streamId;
789 outputBuffer.bufferId = buffer.bufferId;
790 outputBuffer.status = BufferStatus::ERROR;
791 if (buffer.acquireFence >= 0) {
Avichal Rakeshea308262024-03-25 21:59:11 +0000792 native_handle_t* handle = native_handle_create(/*numFds*/ 1, /*numInts*/ 0);
793 handle->data[0] = buffer.acquireFence;
794 outputBuffer.releaseFence = android::dupToAidl(handle);
795 native_handle_delete(handle);
Avichal Rakeshe1857f82022-06-08 17:47:23 -0700796 }
797 } else {
798 offlineBuffers.push_back(buffer);
799 }
800 }
801 halReq->buffers = offlineBuffers;
802 halReqs.push_back(halReq);
803 }
804
805 // convert hal requests to offline request
806 std::deque<std::shared_ptr<HalRequest>> offlineReqs(halReqs.size());
807 size_t i = 0;
808 for (auto& v4lReq : halReqs) {
809 offlineReqs[i] = std::make_shared<HalRequest>();
810 offlineReqs[i]->frameNumber = v4lReq->frameNumber;
811 offlineReqs[i]->setting = v4lReq->setting;
812 offlineReqs[i]->shutterTs = v4lReq->shutterTs;
813 offlineReqs[i]->buffers = v4lReq->buffers;
814 std::shared_ptr<V4L2Frame> v4l2Frame(static_cast<V4L2Frame*>(v4lReq->frameIn.get()));
815 offlineReqs[i]->frameIn = std::make_shared<AllocatedV4L2Frame>(v4l2Frame);
816 i++;
817 // enqueue V4L2 frame
818 enqueueV4l2Frame(v4l2Frame);
819 }
820
821 // Collect buffer caches/streams
822 std::vector<Stream> streamInfos(offlineStreams.size());
823 std::map<int, CirculatingBuffers> circulatingBuffers;
824 {
825 Mutex::Autolock _cbsl(mCbsLock);
826 for (auto streamId : offlineStreams) {
827 circulatingBuffers[streamId] = mCirculatingBuffers.at(streamId);
828 mCirculatingBuffers.erase(streamId);
829 streamInfos.push_back(mStreamMap.at(streamId));
830 mStreamMap.erase(streamId);
831 }
832 }
833
834 fillOfflineSessionInfo(offlineStreams, offlineReqs, circulatingBuffers, info);
835 // create the offline session object
836 bool afTrigger;
837 {
838 std::lock_guard<std::mutex> _lk(mAfTriggerLock);
839 afTrigger = mAfTrigger;
840 }
841
842 std::shared_ptr<ExternalCameraOfflineSession> sessionImpl =
843 ndk::SharedRefBase::make<ExternalCameraOfflineSession>(
844 mCroppingType, mCameraCharacteristics, mCameraId, mExifMake, mExifModel,
845 mBlobBufferSize, afTrigger, streamInfos, offlineReqs, circulatingBuffers);
846
847 bool initFailed = sessionImpl->initialize();
848 if (initFailed) {
849 ALOGE("%s: offline session initialize failed!", __FUNCTION__);
850 return Status::INTERNAL_ERROR;
851 }
852
853 // cleanup stream and buffer caches
854 {
855 Mutex::Autolock _cbsl(mCbsLock);
856 for (auto pair : mStreamMap) {
857 cleanupBuffersLocked(/*Stream ID*/ pair.first);
858 }
859 mCirculatingBuffers.clear();
860 }
861 mStreamMap.clear();
862
863 // update inflight records
864 {
865 std::lock_guard<std::mutex> _lk(mInflightFramesLock);
866 mInflightFrames.clear();
867 }
868
869 // stop v4l2 streaming
870 if (v4l2StreamOffLocked() != 0) {
871 ALOGE("%s: stop V4L2 streaming failed!", __FUNCTION__);
872 return Status::INTERNAL_ERROR;
873 }
874
875 // No need to return session if there is no offline requests left
876 if (!offlineReqs.empty()) {
877 *session = sessionImpl;
878 } else {
879 *session = nullptr;
880 }
881
882 return Status::OK;
883}
884
885#define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0]))
886#define UPDATE(md, tag, data, size) \
887 do { \
888 if ((md).update((tag), (data), (size))) { \
889 ALOGE("Update " #tag " failed!"); \
890 return BAD_VALUE; \
891 } \
892 } while (0)
893
894status_t ExternalCameraDeviceSession::initDefaultRequests() {
895 common::V1_0::helper::CameraMetadata md;
896
897 const uint8_t aberrationMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
898 UPDATE(md, ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &aberrationMode, 1);
899
900 const int32_t exposureCompensation = 0;
901 UPDATE(md, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &exposureCompensation, 1);
902
903 const uint8_t videoStabilizationMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
904 UPDATE(md, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &videoStabilizationMode, 1);
905
906 const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
907 UPDATE(md, ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
908
909 const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
910 UPDATE(md, ANDROID_CONTROL_AE_MODE, &aeMode, 1);
911
912 const uint8_t aePrecaptureTrigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
913 UPDATE(md, ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &aePrecaptureTrigger, 1);
914
915 const uint8_t afMode = ANDROID_CONTROL_AF_MODE_AUTO;
916 UPDATE(md, ANDROID_CONTROL_AF_MODE, &afMode, 1);
917
918 const uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
919 UPDATE(md, ANDROID_CONTROL_AF_TRIGGER, &afTrigger, 1);
920
921 const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_DISABLED;
922 UPDATE(md, ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
923
924 const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
925 UPDATE(md, ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
926
927 const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
928 UPDATE(md, ANDROID_FLASH_MODE, &flashMode, 1);
929
930 const int32_t thumbnailSize[] = {240, 180};
931 UPDATE(md, ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2);
932
933 const uint8_t jpegQuality = 90;
934 UPDATE(md, ANDROID_JPEG_QUALITY, &jpegQuality, 1);
935 UPDATE(md, ANDROID_JPEG_THUMBNAIL_QUALITY, &jpegQuality, 1);
936
937 const int32_t jpegOrientation = 0;
938 UPDATE(md, ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1);
939
940 const uint8_t oisMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
941 UPDATE(md, ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &oisMode, 1);
942
943 const uint8_t nrMode = ANDROID_NOISE_REDUCTION_MODE_OFF;
944 UPDATE(md, ANDROID_NOISE_REDUCTION_MODE, &nrMode, 1);
945
946 const int32_t testPatternModes = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
947 UPDATE(md, ANDROID_SENSOR_TEST_PATTERN_MODE, &testPatternModes, 1);
948
949 const uint8_t fdMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
950 UPDATE(md, ANDROID_STATISTICS_FACE_DETECT_MODE, &fdMode, 1);
951
952 const uint8_t hotpixelMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
953 UPDATE(md, ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotpixelMode, 1);
954
955 bool support30Fps = false;
956 int32_t maxFps = std::numeric_limits<int32_t>::min();
957 for (const auto& supportedFormat : mSupportedFormats) {
958 for (const auto& fr : supportedFormat.frameRates) {
959 int32_t framerateInt = static_cast<int32_t>(fr.getFramesPerSecond());
960 if (maxFps < framerateInt) {
961 maxFps = framerateInt;
962 }
963 if (framerateInt == 30) {
964 support30Fps = true;
965 break;
966 }
967 }
968 if (support30Fps) {
969 break;
970 }
971 }
972
973 int32_t defaultFramerate = support30Fps ? 30 : maxFps;
974 int32_t defaultFpsRange[] = {defaultFramerate / 2, defaultFramerate};
975 UPDATE(md, ANDROID_CONTROL_AE_TARGET_FPS_RANGE, defaultFpsRange, ARRAY_SIZE(defaultFpsRange));
976
977 uint8_t antibandingMode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
978 UPDATE(md, ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibandingMode, 1);
979
980 const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
981 UPDATE(md, ANDROID_CONTROL_MODE, &controlMode, 1);
982
983 for (const auto& type : ndk::enum_range<RequestTemplate>()) {
984 common::V1_0::helper::CameraMetadata mdCopy = md;
985 uint8_t intent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
986 switch (type) {
987 case RequestTemplate::PREVIEW:
988 intent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
989 break;
990 case RequestTemplate::STILL_CAPTURE:
991 intent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
992 break;
993 case RequestTemplate::VIDEO_RECORD:
994 intent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
995 break;
996 case RequestTemplate::VIDEO_SNAPSHOT:
997 intent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
998 break;
999 default:
1000 ALOGV("%s: unsupported RequestTemplate type %d", __FUNCTION__, type);
1001 continue;
1002 }
1003 UPDATE(mdCopy, ANDROID_CONTROL_CAPTURE_INTENT, &intent, 1);
1004 camera_metadata_t* mdPtr = mdCopy.release();
1005 uint8_t* rawMd = reinterpret_cast<uint8_t*>(mdPtr);
1006 CameraMetadata aidlMd;
1007 aidlMd.metadata.assign(rawMd, rawMd + get_camera_metadata_size(mdPtr));
1008 mDefaultRequests[type] = aidlMd;
1009 free_camera_metadata(mdPtr);
1010 }
1011 return OK;
1012}
1013
1014status_t ExternalCameraDeviceSession::fillCaptureResult(common::V1_0::helper::CameraMetadata& md,
1015 nsecs_t timestamp) {
1016 bool afTrigger = false;
1017 {
1018 std::lock_guard<std::mutex> lk(mAfTriggerLock);
1019 afTrigger = mAfTrigger;
1020 if (md.exists(ANDROID_CONTROL_AF_TRIGGER)) {
1021 camera_metadata_entry entry = md.find(ANDROID_CONTROL_AF_TRIGGER);
1022 if (entry.data.u8[0] == ANDROID_CONTROL_AF_TRIGGER_START) {
1023 mAfTrigger = afTrigger = true;
1024 } else if (entry.data.u8[0] == ANDROID_CONTROL_AF_TRIGGER_CANCEL) {
1025 mAfTrigger = afTrigger = false;
1026 }
1027 }
1028 }
1029
1030 // For USB camera, the USB camera handles everything and we don't have control
1031 // over AF. We only simply fake the AF metadata based on the request
1032 // received here.
1033 uint8_t afState;
1034 if (afTrigger) {
1035 afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
1036 } else {
1037 afState = ANDROID_CONTROL_AF_STATE_INACTIVE;
1038 }
1039 UPDATE(md, ANDROID_CONTROL_AF_STATE, &afState, 1);
1040
1041 camera_metadata_ro_entry activeArraySize =
1042 mCameraCharacteristics.find(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE);
1043
1044 return fillCaptureResultCommon(md, timestamp, activeArraySize);
1045}
1046
1047int ExternalCameraDeviceSession::configureV4l2StreamLocked(const SupportedV4L2Format& v4l2Fmt,
1048 double requestFps) {
1049 ATRACE_CALL();
1050 int ret = v4l2StreamOffLocked();
1051 if (ret != OK) {
1052 ALOGE("%s: stop v4l2 streaming failed: ret %d", __FUNCTION__, ret);
1053 return ret;
1054 }
1055
1056 // VIDIOC_S_FMT w/h/fmt
1057 v4l2_format fmt;
1058 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
1059 fmt.fmt.pix.width = v4l2Fmt.width;
1060 fmt.fmt.pix.height = v4l2Fmt.height;
1061 fmt.fmt.pix.pixelformat = v4l2Fmt.fourcc;
1062
1063 {
1064 int numAttempt = 0;
1065 do {
1066 ret = TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_S_FMT, &fmt));
1067 if (numAttempt == MAX_RETRY) {
1068 break;
1069 }
1070 numAttempt++;
1071 if (ret < 0) {
1072 ALOGW("%s: VIDIOC_S_FMT failed, wait 33ms and try again", __FUNCTION__);
1073 usleep(IOCTL_RETRY_SLEEP_US); // sleep and try again
1074 }
1075 } while (ret < 0);
1076 if (ret < 0) {
1077 ALOGE("%s: S_FMT ioctl failed: %s", __FUNCTION__, strerror(errno));
1078 return -errno;
1079 }
1080 }
1081
1082 if (v4l2Fmt.width != fmt.fmt.pix.width || v4l2Fmt.height != fmt.fmt.pix.height ||
1083 v4l2Fmt.fourcc != fmt.fmt.pix.pixelformat) {
1084 ALOGE("%s: S_FMT expect %c%c%c%c %dx%d, got %c%c%c%c %dx%d instead!", __FUNCTION__,
1085 v4l2Fmt.fourcc & 0xFF, (v4l2Fmt.fourcc >> 8) & 0xFF, (v4l2Fmt.fourcc >> 16) & 0xFF,
1086 (v4l2Fmt.fourcc >> 24) & 0xFF, v4l2Fmt.width, v4l2Fmt.height,
1087 fmt.fmt.pix.pixelformat & 0xFF, (fmt.fmt.pix.pixelformat >> 8) & 0xFF,
1088 (fmt.fmt.pix.pixelformat >> 16) & 0xFF, (fmt.fmt.pix.pixelformat >> 24) & 0xFF,
1089 fmt.fmt.pix.width, fmt.fmt.pix.height);
1090 return -EINVAL;
1091 }
1092
1093 uint32_t bufferSize = fmt.fmt.pix.sizeimage;
1094 ALOGI("%s: V4L2 buffer size is %d", __FUNCTION__, bufferSize);
1095 uint32_t expectedMaxBufferSize = kMaxBytesPerPixel * fmt.fmt.pix.width * fmt.fmt.pix.height;
1096 if ((bufferSize == 0) || (bufferSize > expectedMaxBufferSize)) {
1097 ALOGE("%s: V4L2 buffer size: %u looks invalid. Expected maximum size: %u", __FUNCTION__,
1098 bufferSize, expectedMaxBufferSize);
1099 return -EINVAL;
1100 }
1101 mMaxV4L2BufferSize = bufferSize;
1102
1103 const double kDefaultFps = 30.0;
1104 double fps = std::numeric_limits<double>::max();
1105 if (requestFps != 0.0) {
1106 fps = requestFps;
1107 } else {
1108 double maxFps = -1.0;
1109 // Try to pick the slowest fps that is at least 30
1110 for (const auto& fr : v4l2Fmt.frameRates) {
1111 double f = fr.getFramesPerSecond();
1112 if (maxFps < f) {
1113 maxFps = f;
1114 }
1115 if (f >= kDefaultFps && f < fps) {
1116 fps = f;
1117 }
1118 }
1119 // No fps > 30 found, use the highest fps available within supported formats.
1120 if (fps == std::numeric_limits<double>::max()) {
1121 fps = maxFps;
1122 }
1123 }
1124
1125 int fpsRet = setV4l2FpsLocked(fps);
1126 if (fpsRet != 0 && fpsRet != -EINVAL) {
1127 ALOGE("%s: set fps failed: %s", __FUNCTION__, strerror(fpsRet));
1128 return fpsRet;
1129 }
1130
1131 uint32_t v4lBufferCount = (fps >= kDefaultFps) ? mCfg.numVideoBuffers : mCfg.numStillBuffers;
1132
1133 // VIDIOC_REQBUFS: create buffers
1134 v4l2_requestbuffers req_buffers{};
1135 req_buffers.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
1136 req_buffers.memory = V4L2_MEMORY_MMAP;
1137 req_buffers.count = v4lBufferCount;
1138 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_REQBUFS, &req_buffers)) < 0) {
1139 ALOGE("%s: VIDIOC_REQBUFS failed: %s", __FUNCTION__, strerror(errno));
1140 return -errno;
1141 }
1142
1143 // Driver can indeed return more buffer if it needs more to operate
1144 if (req_buffers.count < v4lBufferCount) {
1145 ALOGE("%s: VIDIOC_REQBUFS expected %d buffers, got %d instead", __FUNCTION__,
1146 v4lBufferCount, req_buffers.count);
1147 return NO_MEMORY;
1148 }
1149
1150 // VIDIOC_QUERYBUF: get buffer offset in the V4L2 fd
1151 // VIDIOC_QBUF: send buffer to driver
1152 mV4L2BufferCount = req_buffers.count;
1153 for (uint32_t i = 0; i < req_buffers.count; i++) {
1154 v4l2_buffer buffer = {
1155 .index = i, .type = V4L2_BUF_TYPE_VIDEO_CAPTURE, .memory = V4L2_MEMORY_MMAP};
1156
1157 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QUERYBUF, &buffer)) < 0) {
1158 ALOGE("%s: QUERYBUF %d failed: %s", __FUNCTION__, i, strerror(errno));
1159 return -errno;
1160 }
1161
1162 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QBUF, &buffer)) < 0) {
1163 ALOGE("%s: QBUF %d failed: %s", __FUNCTION__, i, strerror(errno));
1164 return -errno;
1165 }
1166 }
1167
1168 {
1169 // VIDIOC_STREAMON: start streaming
1170 v4l2_buf_type capture_type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
1171 int numAttempt = 0;
1172 do {
1173 ret = TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_STREAMON, &capture_type));
1174 if (numAttempt == MAX_RETRY) {
1175 break;
1176 }
1177 if (ret < 0) {
1178 ALOGW("%s: VIDIOC_STREAMON failed, wait 33ms and try again", __FUNCTION__);
1179 usleep(IOCTL_RETRY_SLEEP_US); // sleep 100 ms and try again
1180 }
1181 } while (ret < 0);
1182
1183 if (ret < 0) {
1184 ALOGE("%s: VIDIOC_STREAMON ioctl failed: %s", __FUNCTION__, strerror(errno));
1185 return -errno;
1186 }
1187 }
1188
1189 // Swallow first few frames after streamOn to account for bad frames from some devices
1190 for (int i = 0; i < kBadFramesAfterStreamOn; i++) {
1191 v4l2_buffer buffer{};
1192 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
1193 buffer.memory = V4L2_MEMORY_MMAP;
1194 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_DQBUF, &buffer)) < 0) {
1195 ALOGE("%s: DQBUF fails: %s", __FUNCTION__, strerror(errno));
1196 return -errno;
1197 }
1198
1199 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QBUF, &buffer)) < 0) {
1200 ALOGE("%s: QBUF index %d fails: %s", __FUNCTION__, buffer.index, strerror(errno));
1201 return -errno;
1202 }
1203 }
1204
1205 ALOGI("%s: start V4L2 streaming %dx%d@%ffps", __FUNCTION__, v4l2Fmt.width, v4l2Fmt.height, fps);
1206 mV4l2StreamingFmt = v4l2Fmt;
1207 mV4l2Streaming = true;
1208 return OK;
1209}
1210
1211std::unique_ptr<V4L2Frame> ExternalCameraDeviceSession::dequeueV4l2FrameLocked(nsecs_t* shutterTs) {
1212 ATRACE_CALL();
1213 std::unique_ptr<V4L2Frame> ret = nullptr;
1214 if (shutterTs == nullptr) {
1215 ALOGE("%s: shutterTs must not be null!", __FUNCTION__);
1216 return ret;
1217 }
1218
1219 {
1220 std::unique_lock<std::mutex> lk(mV4l2BufferLock);
1221 if (mNumDequeuedV4l2Buffers == mV4L2BufferCount) {
1222 int waitRet = waitForV4L2BufferReturnLocked(lk);
1223 if (waitRet != 0) {
1224 return ret;
1225 }
1226 }
1227 }
1228
1229 ATRACE_BEGIN("VIDIOC_DQBUF");
1230 v4l2_buffer buffer{};
1231 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
1232 buffer.memory = V4L2_MEMORY_MMAP;
1233 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_DQBUF, &buffer)) < 0) {
1234 ALOGE("%s: DQBUF fails: %s", __FUNCTION__, strerror(errno));
1235 return ret;
1236 }
1237 ATRACE_END();
1238
1239 if (buffer.index >= mV4L2BufferCount) {
1240 ALOGE("%s: Invalid buffer id: %d", __FUNCTION__, buffer.index);
1241 return ret;
1242 }
1243
1244 if (buffer.flags & V4L2_BUF_FLAG_ERROR) {
1245 ALOGE("%s: v4l2 buf error! buf flag 0x%x", __FUNCTION__, buffer.flags);
1246 // TODO: try to dequeue again
1247 }
1248
1249 if (buffer.bytesused > mMaxV4L2BufferSize) {
1250 ALOGE("%s: v4l2 buffer bytes used: %u maximum %u", __FUNCTION__, buffer.bytesused,
1251 mMaxV4L2BufferSize);
1252 return ret;
1253 }
1254
1255 if (buffer.flags & V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC) {
1256 // Ideally we should also check for V4L2_BUF_FLAG_TSTAMP_SRC_SOE, but
1257 // even V4L2_BUF_FLAG_TSTAMP_SRC_EOF is better than capture a timestamp now
1258 *shutterTs = static_cast<nsecs_t>(buffer.timestamp.tv_sec) * 1000000000LL +
1259 buffer.timestamp.tv_usec * 1000LL;
1260 } else {
1261 *shutterTs = systemTime(SYSTEM_TIME_MONOTONIC);
1262 }
1263
1264 {
1265 std::lock_guard<std::mutex> lk(mV4l2BufferLock);
1266 mNumDequeuedV4l2Buffers++;
1267 }
1268
1269 return std::make_unique<V4L2Frame>(mV4l2StreamingFmt.width, mV4l2StreamingFmt.height,
1270 mV4l2StreamingFmt.fourcc, buffer.index, mV4l2Fd.get(),
1271 buffer.bytesused, buffer.m.offset);
1272}
1273
1274void ExternalCameraDeviceSession::enqueueV4l2Frame(const std::shared_ptr<V4L2Frame>& frame) {
1275 ATRACE_CALL();
1276 frame->unmap();
1277 ATRACE_BEGIN("VIDIOC_QBUF");
1278 v4l2_buffer buffer{};
1279 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
1280 buffer.memory = V4L2_MEMORY_MMAP;
1281 buffer.index = frame->mBufferIndex;
1282 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QBUF, &buffer)) < 0) {
1283 ALOGE("%s: QBUF index %d fails: %s", __FUNCTION__, frame->mBufferIndex, strerror(errno));
1284 return;
1285 }
1286 ATRACE_END();
1287
1288 {
1289 std::lock_guard<std::mutex> lk(mV4l2BufferLock);
1290 mNumDequeuedV4l2Buffers--;
1291 }
1292 mV4L2BufferReturned.notify_one();
1293}
1294
1295bool ExternalCameraDeviceSession::isSupported(
1296 const Stream& stream, const std::vector<SupportedV4L2Format>& supportedFormats,
1297 const ExternalCameraConfig& devCfg) {
1298 Dataspace ds = stream.dataSpace;
1299 PixelFormat fmt = stream.format;
1300 uint32_t width = stream.width;
1301 uint32_t height = stream.height;
1302 // TODO: check usage flags
1303
1304 if (stream.streamType != StreamType::OUTPUT) {
1305 ALOGE("%s: does not support non-output stream type", __FUNCTION__);
1306 return false;
1307 }
1308
1309 if (stream.rotation != StreamRotation::ROTATION_0) {
1310 ALOGE("%s: does not support stream rotation", __FUNCTION__);
1311 return false;
1312 }
1313
1314 switch (fmt) {
1315 case PixelFormat::BLOB:
1316 if (ds != Dataspace::JFIF) {
1317 ALOGI("%s: BLOB format does not support dataSpace %x", __FUNCTION__, ds);
1318 return false;
1319 }
1320 break;
1321 case PixelFormat::IMPLEMENTATION_DEFINED:
1322 case PixelFormat::YCBCR_420_888:
1323 case PixelFormat::YV12:
1324 // TODO: check what dataspace we can support here.
1325 // intentional no-ops.
1326 break;
1327 case PixelFormat::Y16:
1328 if (!devCfg.depthEnabled) {
1329 ALOGI("%s: Depth is not Enabled", __FUNCTION__);
1330 return false;
1331 }
1332 if (!(static_cast<int32_t>(ds) & static_cast<int32_t>(Dataspace::DEPTH))) {
1333 ALOGI("%s: Y16 supports only dataSpace DEPTH", __FUNCTION__);
1334 return false;
1335 }
1336 break;
1337 default:
1338 ALOGI("%s: does not support format %x", __FUNCTION__, fmt);
1339 return false;
1340 }
1341
1342 // Assume we can convert any V4L2 format to any of supported output format for now, i.e.
1343 // ignoring v4l2Fmt.fourcc for now. Might need more subtle check if we support more v4l format
1344 // in the futrue.
1345 for (const auto& v4l2Fmt : supportedFormats) {
1346 if (width == v4l2Fmt.width && height == v4l2Fmt.height) {
1347 return true;
1348 }
1349 }
1350 ALOGI("%s: resolution %dx%d is not supported", __FUNCTION__, width, height);
1351 return false;
1352}
1353
1354Status ExternalCameraDeviceSession::importRequestLocked(const CaptureRequest& request,
1355 std::vector<buffer_handle_t*>& allBufPtrs,
1356 std::vector<int>& allFences) {
1357 return importRequestLockedImpl(request, allBufPtrs, allFences);
1358}
1359
1360Status ExternalCameraDeviceSession::importRequestLockedImpl(
1361 const CaptureRequest& request, std::vector<buffer_handle_t*>& allBufPtrs,
1362 std::vector<int>& allFences) {
1363 size_t numOutputBufs = request.outputBuffers.size();
1364 size_t numBufs = numOutputBufs;
1365 // Validate all I/O buffers
1366 std::vector<buffer_handle_t> allBufs;
1367 std::vector<uint64_t> allBufIds;
1368 allBufs.resize(numBufs);
1369 allBufIds.resize(numBufs);
1370 allBufPtrs.resize(numBufs);
1371 allFences.resize(numBufs);
1372 std::vector<int32_t> streamIds(numBufs);
1373
1374 for (size_t i = 0; i < numOutputBufs; i++) {
1375 allBufs[i] = ::android::makeFromAidl(request.outputBuffers[i].buffer);
1376 allBufIds[i] = request.outputBuffers[i].bufferId;
1377 allBufPtrs[i] = &allBufs[i];
1378 streamIds[i] = request.outputBuffers[i].streamId;
1379 }
1380
1381 {
1382 Mutex::Autolock _l(mCbsLock);
1383 for (size_t i = 0; i < numBufs; i++) {
1384 Status st = importBufferLocked(streamIds[i], allBufIds[i], allBufs[i], &allBufPtrs[i]);
1385 if (st != Status::OK) {
1386 // Detailed error logs printed in importBuffer
1387 return st;
1388 }
1389 }
1390 }
1391
1392 // All buffers are imported. Now validate output buffer acquire fences
1393 for (size_t i = 0; i < numOutputBufs; i++) {
1394 if (!sHandleImporter.importFence(
1395 ::android::makeFromAidl(request.outputBuffers[i].acquireFence), allFences[i])) {
1396 ALOGE("%s: output buffer %zu acquire fence is invalid", __FUNCTION__, i);
1397 cleanupInflightFences(allFences, i);
1398 return Status::INTERNAL_ERROR;
1399 }
1400 }
1401 return Status::OK;
1402}
1403
1404Status ExternalCameraDeviceSession::importBuffer(int32_t streamId, uint64_t bufId,
1405 buffer_handle_t buf,
1406 /*out*/ buffer_handle_t** outBufPtr) {
1407 Mutex::Autolock _l(mCbsLock);
1408 return importBufferLocked(streamId, bufId, buf, outBufPtr);
1409}
1410
1411Status ExternalCameraDeviceSession::importBufferLocked(int32_t streamId, uint64_t bufId,
1412 buffer_handle_t buf,
1413 buffer_handle_t** outBufPtr) {
1414 return importBufferImpl(mCirculatingBuffers, sHandleImporter, streamId, bufId, buf, outBufPtr);
1415}
1416
1417ScopedAStatus ExternalCameraDeviceSession::close() {
Tang Lee65382f62023-08-01 18:23:07 +08001418 closeImpl();
Avichal Rakeshe1857f82022-06-08 17:47:23 -07001419 return fromStatus(Status::OK);
1420}
1421
Tang Lee65382f62023-08-01 18:23:07 +08001422void ExternalCameraDeviceSession::closeImpl() {
Avichal Rakeshe1857f82022-06-08 17:47:23 -07001423 Mutex::Autolock _il(mInterfaceLock);
1424 bool closed = isClosed();
1425 if (!closed) {
Tang Lee65382f62023-08-01 18:23:07 +08001426 closeOutputThread();
1427 closeBufferRequestThread();
Avichal Rakeshe1857f82022-06-08 17:47:23 -07001428
1429 Mutex::Autolock _l(mLock);
1430 // free all buffers
1431 {
1432 Mutex::Autolock _cbsl(mCbsLock);
1433 for (auto pair : mStreamMap) {
1434 cleanupBuffersLocked(/*Stream ID*/ pair.first);
1435 }
1436 }
1437 v4l2StreamOffLocked();
1438 ALOGV("%s: closing V4L2 camera FD %d", __FUNCTION__, mV4l2Fd.get());
1439 mV4l2Fd.reset();
1440 mClosed = true;
1441 }
1442}
1443
1444bool ExternalCameraDeviceSession::isClosed() {
1445 Mutex::Autolock _l(mLock);
1446 return mClosed;
1447}
1448
1449ScopedAStatus ExternalCameraDeviceSession::repeatingRequestEnd(
1450 int32_t /*in_frameNumber*/, const std::vector<int32_t>& /*in_streamIds*/) {
1451 // TODO: Figure this one out.
1452 return fromStatus(Status::OK);
1453}
1454
1455int ExternalCameraDeviceSession::v4l2StreamOffLocked() {
1456 if (!mV4l2Streaming) {
1457 return OK;
1458 }
1459
1460 {
1461 std::lock_guard<std::mutex> lk(mV4l2BufferLock);
1462 if (mNumDequeuedV4l2Buffers != 0) {
1463 ALOGE("%s: there are %zu inflight V4L buffers", __FUNCTION__, mNumDequeuedV4l2Buffers);
1464 return -1;
1465 }
1466 }
1467 mV4L2BufferCount = 0;
1468
1469 // VIDIOC_STREAMOFF
1470 v4l2_buf_type capture_type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
1471 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_STREAMOFF, &capture_type)) < 0) {
1472 ALOGE("%s: STREAMOFF failed: %s", __FUNCTION__, strerror(errno));
1473 return -errno;
1474 }
1475
1476 // VIDIOC_REQBUFS: clear buffers
1477 v4l2_requestbuffers req_buffers{};
1478 req_buffers.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
1479 req_buffers.memory = V4L2_MEMORY_MMAP;
1480 req_buffers.count = 0;
1481 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_REQBUFS, &req_buffers)) < 0) {
1482 ALOGE("%s: REQBUFS failed: %s", __FUNCTION__, strerror(errno));
1483 return -errno;
1484 }
1485
1486 mV4l2Streaming = false;
1487 return OK;
1488}
1489
1490int ExternalCameraDeviceSession::setV4l2FpsLocked(double fps) {
1491 // VIDIOC_G_PARM/VIDIOC_S_PARM: set fps
1492 v4l2_streamparm streamparm = {.type = V4L2_BUF_TYPE_VIDEO_CAPTURE};
1493 // The following line checks that the driver knows about framerate get/set.
1494 int ret = TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_G_PARM, &streamparm));
1495 if (ret != 0) {
1496 if (errno == -EINVAL) {
1497 ALOGW("%s: device does not support VIDIOC_G_PARM", __FUNCTION__);
1498 }
1499 return -errno;
1500 }
1501 // Now check if the device is able to accept a capture framerate set.
1502 if (!(streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME)) {
1503 ALOGW("%s: device does not support V4L2_CAP_TIMEPERFRAME", __FUNCTION__);
1504 return -EINVAL;
1505 }
1506
1507 // fps is float, approximate by a fraction.
1508 const int kFrameRatePrecision = 10000;
1509 streamparm.parm.capture.timeperframe.numerator = kFrameRatePrecision;
1510 streamparm.parm.capture.timeperframe.denominator = (fps * kFrameRatePrecision);
1511
1512 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_S_PARM, &streamparm)) < 0) {
1513 ALOGE("%s: failed to set framerate to %f: %s", __FUNCTION__, fps, strerror(errno));
1514 return -1;
1515 }
1516
1517 double retFps = streamparm.parm.capture.timeperframe.denominator /
1518 static_cast<double>(streamparm.parm.capture.timeperframe.numerator);
1519 if (std::fabs(fps - retFps) > 1.0) {
1520 ALOGE("%s: expect fps %f, got %f instead", __FUNCTION__, fps, retFps);
1521 return -1;
1522 }
1523 mV4l2StreamingFps = fps;
1524 return 0;
1525}
1526
1527void ExternalCameraDeviceSession::cleanupInflightFences(std::vector<int>& allFences,
1528 size_t numFences) {
1529 for (size_t j = 0; j < numFences; j++) {
1530 sHandleImporter.closeFence(allFences[j]);
1531 }
1532}
1533
1534void ExternalCameraDeviceSession::cleanupBuffersLocked(int id) {
1535 for (auto& pair : mCirculatingBuffers.at(id)) {
1536 sHandleImporter.freeBuffer(pair.second);
1537 }
1538 mCirculatingBuffers[id].clear();
1539 mCirculatingBuffers.erase(id);
1540}
1541
1542void ExternalCameraDeviceSession::notifyShutter(int32_t frameNumber, nsecs_t shutterTs) {
1543 NotifyMsg msg;
1544 msg.set<NotifyMsg::Tag::shutter>(ShutterMsg{
1545 .frameNumber = frameNumber,
1546 .timestamp = shutterTs,
1547 });
1548 mCallback->notify({msg});
1549}
1550void ExternalCameraDeviceSession::notifyError(int32_t frameNumber, int32_t streamId, ErrorCode ec) {
1551 NotifyMsg msg;
1552 msg.set<NotifyMsg::Tag::error>(ErrorMsg{
1553 .frameNumber = frameNumber,
1554 .errorStreamId = streamId,
1555 .errorCode = ec,
1556 });
1557 mCallback->notify({msg});
1558}
1559
1560void ExternalCameraDeviceSession::invokeProcessCaptureResultCallback(
1561 std::vector<CaptureResult>& results, bool tryWriteFmq) {
1562 if (mProcessCaptureResultLock.tryLock() != OK) {
1563 const nsecs_t NS_TO_SECOND = 1000000000;
1564 ALOGV("%s: previous call is not finished! waiting 1s...", __FUNCTION__);
1565 if (mProcessCaptureResultLock.timedLock(/* 1s */ NS_TO_SECOND) != OK) {
1566 ALOGE("%s: cannot acquire lock in 1s, cannot proceed", __FUNCTION__);
1567 return;
1568 }
1569 }
1570 if (tryWriteFmq && mResultMetadataQueue->availableToWrite() > 0) {
1571 for (CaptureResult& result : results) {
1572 CameraMetadata& md = result.result;
1573 if (!md.metadata.empty()) {
1574 if (mResultMetadataQueue->write(reinterpret_cast<int8_t*>(md.metadata.data()),
1575 md.metadata.size())) {
1576 result.fmqResultSize = md.metadata.size();
1577 md.metadata.resize(0);
1578 } else {
1579 ALOGW("%s: couldn't utilize fmq, fall back to hwbinder", __FUNCTION__);
1580 result.fmqResultSize = 0;
1581 }
1582 } else {
1583 result.fmqResultSize = 0;
1584 }
1585 }
1586 }
1587 auto status = mCallback->processCaptureResult(results);
1588 if (!status.isOk()) {
1589 ALOGE("%s: processCaptureResult ERROR : %d:%d", __FUNCTION__, status.getExceptionCode(),
1590 status.getServiceSpecificError());
1591 }
1592
1593 mProcessCaptureResultLock.unlock();
1594}
1595
1596int ExternalCameraDeviceSession::waitForV4L2BufferReturnLocked(std::unique_lock<std::mutex>& lk) {
1597 ATRACE_CALL();
1598 auto timeout = std::chrono::seconds(kBufferWaitTimeoutSec);
1599 mLock.unlock();
1600 auto st = mV4L2BufferReturned.wait_for(lk, timeout);
1601 // Here we introduce an order where mV4l2BufferLock is acquired before mLock, while
1602 // the normal lock acquisition order is reversed. This is fine because in most of
1603 // cases we are protected by mInterfaceLock. The only thread that can cause deadlock
1604 // is the OutputThread, where we do need to make sure we don't acquire mLock then
1605 // mV4l2BufferLock
1606 mLock.lock();
1607 if (st == std::cv_status::timeout) {
1608 ALOGE("%s: wait for V4L2 buffer return timeout!", __FUNCTION__);
1609 return -1;
1610 }
1611 return 0;
1612}
1613
1614bool ExternalCameraDeviceSession::supportOfflineLocked(int32_t streamId) {
1615 const Stream& stream = mStreamMap[streamId];
1616 if (stream.format == PixelFormat::BLOB &&
1617 static_cast<int32_t>(stream.dataSpace) == static_cast<int32_t>(Dataspace::JFIF)) {
1618 return true;
1619 }
1620 // TODO: support YUV output stream?
1621 return false;
1622}
1623
1624bool ExternalCameraDeviceSession::canDropRequest(const std::vector<int32_t>& offlineStreams,
1625 std::shared_ptr<HalRequest> halReq) {
1626 for (const auto& buffer : halReq->buffers) {
1627 for (auto offlineStreamId : offlineStreams) {
1628 if (buffer.streamId == offlineStreamId) {
1629 return false;
1630 }
1631 }
1632 }
1633 // Only drop a request completely if it has no offline output
1634 return true;
1635}
1636
1637void ExternalCameraDeviceSession::fillOfflineSessionInfo(
1638 const std::vector<int32_t>& offlineStreams,
1639 std::deque<std::shared_ptr<HalRequest>>& offlineReqs,
1640 const std::map<int, CirculatingBuffers>& circulatingBuffers,
1641 CameraOfflineSessionInfo* info) {
1642 if (info == nullptr) {
1643 ALOGE("%s: output info must not be null!", __FUNCTION__);
1644 return;
1645 }
1646
1647 info->offlineStreams.resize(offlineStreams.size());
1648 info->offlineRequests.resize(offlineReqs.size());
1649
1650 // Fill in offline reqs and count outstanding buffers
1651 for (size_t i = 0; i < offlineReqs.size(); i++) {
1652 info->offlineRequests[i].frameNumber = offlineReqs[i]->frameNumber;
1653 info->offlineRequests[i].pendingStreams.resize(offlineReqs[i]->buffers.size());
1654 for (size_t bIdx = 0; bIdx < offlineReqs[i]->buffers.size(); bIdx++) {
1655 int32_t streamId = offlineReqs[i]->buffers[bIdx].streamId;
1656 info->offlineRequests[i].pendingStreams[bIdx] = streamId;
1657 }
1658 }
1659
1660 for (size_t i = 0; i < offlineStreams.size(); i++) {
1661 int32_t streamId = offlineStreams[i];
1662 info->offlineStreams[i].id = streamId;
1663 // outstanding buffers are 0 since we are doing hal buffer management and
1664 // offline session will ask for those buffers later
1665 info->offlineStreams[i].numOutstandingBuffers = 0;
1666 const CirculatingBuffers& bufIdMap = circulatingBuffers.at(streamId);
1667 info->offlineStreams[i].circulatingBufferIds.resize(bufIdMap.size());
1668 size_t bIdx = 0;
1669 for (const auto& pair : bufIdMap) {
1670 // Fill in bufferId
1671 info->offlineStreams[i].circulatingBufferIds[bIdx++] = pair.first;
1672 }
1673 }
1674}
1675
1676Status ExternalCameraDeviceSession::isStreamCombinationSupported(
1677 const StreamConfiguration& config, const std::vector<SupportedV4L2Format>& supportedFormats,
1678 const ExternalCameraConfig& devCfg) {
1679 if (config.operationMode != StreamConfigurationMode::NORMAL_MODE) {
1680 ALOGE("%s: unsupported operation mode: %d", __FUNCTION__, config.operationMode);
1681 return Status::ILLEGAL_ARGUMENT;
1682 }
1683
1684 if (config.streams.size() == 0) {
1685 ALOGE("%s: cannot configure zero stream", __FUNCTION__);
1686 return Status::ILLEGAL_ARGUMENT;
1687 }
1688
1689 int numProcessedStream = 0;
1690 int numStallStream = 0;
1691 for (const auto& stream : config.streams) {
1692 // Check if the format/width/height combo is supported
1693 if (!isSupported(stream, supportedFormats, devCfg)) {
1694 return Status::ILLEGAL_ARGUMENT;
1695 }
1696 if (stream.format == PixelFormat::BLOB) {
1697 numStallStream++;
1698 } else {
1699 numProcessedStream++;
1700 }
1701 }
1702
1703 if (numProcessedStream > kMaxProcessedStream) {
1704 ALOGE("%s: too many processed streams (expect <= %d, got %d)", __FUNCTION__,
1705 kMaxProcessedStream, numProcessedStream);
1706 return Status::ILLEGAL_ARGUMENT;
1707 }
1708
1709 if (numStallStream > kMaxStallStream) {
1710 ALOGE("%s: too many stall streams (expect <= %d, got %d)", __FUNCTION__, kMaxStallStream,
1711 numStallStream);
1712 return Status::ILLEGAL_ARGUMENT;
1713 }
1714
1715 return Status::OK;
1716}
1717void ExternalCameraDeviceSession::updateBufferCaches(
1718 const std::vector<BufferCache>& cachesToRemove) {
1719 Mutex::Autolock _l(mCbsLock);
1720 for (auto& cache : cachesToRemove) {
1721 auto cbsIt = mCirculatingBuffers.find(cache.streamId);
1722 if (cbsIt == mCirculatingBuffers.end()) {
1723 // The stream could have been removed
1724 continue;
1725 }
1726 CirculatingBuffers& cbs = cbsIt->second;
1727 auto it = cbs.find(cache.bufferId);
1728 if (it != cbs.end()) {
1729 sHandleImporter.freeBuffer(it->second);
1730 cbs.erase(it);
1731 } else {
1732 ALOGE("%s: stream %d buffer %" PRIu64 " is not cached", __FUNCTION__, cache.streamId,
1733 cache.bufferId);
1734 }
1735 }
1736}
1737
1738Status ExternalCameraDeviceSession::processCaptureRequestError(
1739 const std::shared_ptr<HalRequest>& req, std::vector<NotifyMsg>* outMsgs,
1740 std::vector<CaptureResult>* outResults) {
1741 ATRACE_CALL();
1742 // Return V4L2 buffer to V4L2 buffer queue
1743 std::shared_ptr<V4L2Frame> v4l2Frame = std::static_pointer_cast<V4L2Frame>(req->frameIn);
1744 enqueueV4l2Frame(v4l2Frame);
1745
1746 if (outMsgs == nullptr) {
1747 notifyShutter(req->frameNumber, req->shutterTs);
1748 notifyError(/*frameNum*/ req->frameNumber, /*stream*/ -1, ErrorCode::ERROR_REQUEST);
1749 } else {
1750 NotifyMsg shutter;
1751 shutter.set<NotifyMsg::Tag::shutter>(
1752 ShutterMsg{.frameNumber = req->frameNumber, .timestamp = req->shutterTs});
1753
1754 NotifyMsg error;
1755 error.set<NotifyMsg::Tag::error>(ErrorMsg{.frameNumber = req->frameNumber,
1756 .errorStreamId = -1,
1757 .errorCode = ErrorCode::ERROR_REQUEST});
1758 outMsgs->push_back(shutter);
1759 outMsgs->push_back(error);
1760 }
1761
1762 // Fill output buffers
1763 CaptureResult result;
1764 result.frameNumber = req->frameNumber;
1765 result.partialResult = 1;
1766 result.inputBuffer.streamId = -1;
1767 result.outputBuffers.resize(req->buffers.size());
1768 for (size_t i = 0; i < req->buffers.size(); i++) {
1769 result.outputBuffers[i].streamId = req->buffers[i].streamId;
1770 result.outputBuffers[i].bufferId = req->buffers[i].bufferId;
1771 result.outputBuffers[i].status = BufferStatus::ERROR;
1772 if (req->buffers[i].acquireFence >= 0) {
Avichal Rakeshea308262024-03-25 21:59:11 +00001773 native_handle_t* handle = native_handle_create(/*numFds*/ 1, /*numInts*/ 0);
1774 handle->data[0] = req->buffers[i].acquireFence;
1775 result.outputBuffers[i].releaseFence = android::dupToAidl(handle);
1776 native_handle_delete(handle);
Avichal Rakeshe1857f82022-06-08 17:47:23 -07001777 }
1778 }
1779
1780 // update inflight records
1781 {
1782 std::lock_guard<std::mutex> lk(mInflightFramesLock);
1783 mInflightFrames.erase(req->frameNumber);
1784 }
1785
1786 if (outResults == nullptr) {
1787 // Callback into framework
1788 std::vector<CaptureResult> results(1);
1789 results[0] = std::move(result);
1790 invokeProcessCaptureResultCallback(results, /* tryWriteFmq */ true);
1791 freeReleaseFences(results);
1792 } else {
1793 outResults->push_back(std::move(result));
1794 }
1795 return Status::OK;
1796}
1797
1798Status ExternalCameraDeviceSession::processCaptureResult(std::shared_ptr<HalRequest>& req) {
1799 ATRACE_CALL();
1800 // Return V4L2 buffer to V4L2 buffer queue
1801 std::shared_ptr<V4L2Frame> v4l2Frame = std::static_pointer_cast<V4L2Frame>(req->frameIn);
1802 enqueueV4l2Frame(v4l2Frame);
1803
1804 // NotifyShutter
1805 notifyShutter(req->frameNumber, req->shutterTs);
1806
1807 // Fill output buffers;
1808 std::vector<CaptureResult> results(1);
1809 CaptureResult& result = results[0];
1810 result.frameNumber = req->frameNumber;
1811 result.partialResult = 1;
1812 result.inputBuffer.streamId = -1;
1813 result.outputBuffers.resize(req->buffers.size());
1814 for (size_t i = 0; i < req->buffers.size(); i++) {
1815 result.outputBuffers[i].streamId = req->buffers[i].streamId;
1816 result.outputBuffers[i].bufferId = req->buffers[i].bufferId;
1817 if (req->buffers[i].fenceTimeout) {
1818 result.outputBuffers[i].status = BufferStatus::ERROR;
1819 if (req->buffers[i].acquireFence >= 0) {
Avichal Rakeshea308262024-03-25 21:59:11 +00001820 native_handle_t* handle = native_handle_create(/*numFds*/ 1, /*numInts*/ 0);
1821 handle->data[0] = req->buffers[i].acquireFence;
1822 result.outputBuffers[i].releaseFence = android::dupToAidl(handle);
1823 native_handle_delete(handle);
Avichal Rakeshe1857f82022-06-08 17:47:23 -07001824 }
1825 notifyError(req->frameNumber, req->buffers[i].streamId, ErrorCode::ERROR_BUFFER);
1826 } else {
1827 result.outputBuffers[i].status = BufferStatus::OK;
1828 // TODO: refactor
1829 if (req->buffers[i].acquireFence >= 0) {
Avichal Rakeshea308262024-03-25 21:59:11 +00001830 native_handle_t* handle = native_handle_create(/*numFds*/ 1, /*numInts*/ 0);
1831 handle->data[0] = req->buffers[i].acquireFence;
1832 result.outputBuffers[i].releaseFence = android::dupToAidl(handle);
1833 native_handle_delete(handle);
Avichal Rakeshe1857f82022-06-08 17:47:23 -07001834 }
1835 }
1836 }
1837
1838 // Fill capture result metadata
1839 fillCaptureResult(req->setting, req->shutterTs);
1840 const camera_metadata_t* rawResult = req->setting.getAndLock();
1841 convertToAidl(rawResult, &result.result);
1842 req->setting.unlock(rawResult);
1843
1844 // update inflight records
1845 {
1846 std::lock_guard<std::mutex> lk(mInflightFramesLock);
1847 mInflightFrames.erase(req->frameNumber);
1848 }
1849
1850 // Callback into framework
1851 invokeProcessCaptureResultCallback(results, /* tryWriteFmq */ true);
1852 freeReleaseFences(results);
1853 return Status::OK;
1854}
1855
1856ssize_t ExternalCameraDeviceSession::getJpegBufferSize(int32_t width, int32_t height) const {
1857 // Constant from camera3.h
1858 const ssize_t kMinJpegBufferSize = 256 * 1024 + sizeof(CameraBlob);
1859 // Get max jpeg size (area-wise).
1860 if (mMaxJpegResolution.width == 0) {
1861 ALOGE("%s: No supported JPEG stream", __FUNCTION__);
1862 return BAD_VALUE;
1863 }
1864
1865 // Get max jpeg buffer size
1866 ssize_t maxJpegBufferSize = 0;
1867 camera_metadata_ro_entry jpegBufMaxSize = mCameraCharacteristics.find(ANDROID_JPEG_MAX_SIZE);
1868 if (jpegBufMaxSize.count == 0) {
1869 ALOGE("%s: Can't find maximum JPEG size in static metadata!", __FUNCTION__);
1870 return BAD_VALUE;
1871 }
1872 maxJpegBufferSize = jpegBufMaxSize.data.i32[0];
1873
1874 if (maxJpegBufferSize <= kMinJpegBufferSize) {
1875 ALOGE("%s: ANDROID_JPEG_MAX_SIZE (%zd) <= kMinJpegBufferSize (%zd)", __FUNCTION__,
1876 maxJpegBufferSize, kMinJpegBufferSize);
1877 return BAD_VALUE;
1878 }
1879
1880 // Calculate final jpeg buffer size for the given resolution.
1881 float scaleFactor =
1882 ((float)(width * height)) / (mMaxJpegResolution.width * mMaxJpegResolution.height);
1883 ssize_t jpegBufferSize =
1884 scaleFactor * (maxJpegBufferSize - kMinJpegBufferSize) + kMinJpegBufferSize;
1885 if (jpegBufferSize > maxJpegBufferSize) {
1886 jpegBufferSize = maxJpegBufferSize;
1887 }
1888
1889 return jpegBufferSize;
1890}
1891binder_status_t ExternalCameraDeviceSession::dump(int fd, const char** /*args*/,
1892 uint32_t /*numArgs*/) {
1893 bool intfLocked = tryLock(mInterfaceLock);
1894 if (!intfLocked) {
1895 dprintf(fd, "!! ExternalCameraDeviceSession interface may be deadlocked !!\n");
1896 }
1897
1898 if (isClosed()) {
1899 dprintf(fd, "External camera %s is closed\n", mCameraId.c_str());
1900 return STATUS_OK;
1901 }
1902
1903 bool streaming = false;
1904 size_t v4L2BufferCount = 0;
1905 SupportedV4L2Format streamingFmt;
1906 {
1907 bool sessionLocked = tryLock(mLock);
1908 if (!sessionLocked) {
1909 dprintf(fd, "!! ExternalCameraDeviceSession mLock may be deadlocked !!\n");
1910 }
1911 streaming = mV4l2Streaming;
1912 streamingFmt = mV4l2StreamingFmt;
1913 v4L2BufferCount = mV4L2BufferCount;
1914
1915 if (sessionLocked) {
1916 mLock.unlock();
1917 }
1918 }
1919
1920 std::unordered_set<uint32_t> inflightFrames;
1921 {
1922 bool iffLocked = tryLock(mInflightFramesLock);
1923 if (!iffLocked) {
1924 dprintf(fd,
1925 "!! ExternalCameraDeviceSession mInflightFramesLock may be deadlocked !!\n");
1926 }
1927 inflightFrames = mInflightFrames;
1928 if (iffLocked) {
1929 mInflightFramesLock.unlock();
1930 }
1931 }
1932
1933 dprintf(fd, "External camera %s V4L2 FD %d, cropping type %s, %s\n", mCameraId.c_str(),
1934 mV4l2Fd.get(), (mCroppingType == VERTICAL) ? "vertical" : "horizontal",
1935 streaming ? "streaming" : "not streaming");
1936
1937 if (streaming) {
1938 // TODO: dump fps later
1939 dprintf(fd, "Current V4L2 format %c%c%c%c %dx%d @ %ffps\n", streamingFmt.fourcc & 0xFF,
1940 (streamingFmt.fourcc >> 8) & 0xFF, (streamingFmt.fourcc >> 16) & 0xFF,
1941 (streamingFmt.fourcc >> 24) & 0xFF, streamingFmt.width, streamingFmt.height,
1942 mV4l2StreamingFps);
1943
1944 size_t numDequeuedV4l2Buffers = 0;
1945 {
1946 std::lock_guard<std::mutex> lk(mV4l2BufferLock);
1947 numDequeuedV4l2Buffers = mNumDequeuedV4l2Buffers;
1948 }
1949 dprintf(fd, "V4L2 buffer queue size %zu, dequeued %zu\n", v4L2BufferCount,
1950 numDequeuedV4l2Buffers);
1951 }
1952
1953 dprintf(fd, "In-flight frames (not sorted):");
1954 for (const auto& frameNumber : inflightFrames) {
1955 dprintf(fd, "%d, ", frameNumber);
1956 }
1957 dprintf(fd, "\n");
1958 mOutputThread->dump(fd);
1959 dprintf(fd, "\n");
1960
1961 if (intfLocked) {
1962 mInterfaceLock.unlock();
1963 }
1964
1965 return STATUS_OK;
1966}
1967
1968// Start ExternalCameraDeviceSession::BufferRequestThread functions
1969ExternalCameraDeviceSession::BufferRequestThread::BufferRequestThread(
1970 std::weak_ptr<OutputThreadInterface> parent,
1971 std::shared_ptr<ICameraDeviceCallback> callbacks)
1972 : mParent(parent), mCallbacks(callbacks) {}
1973
1974int ExternalCameraDeviceSession::BufferRequestThread::requestBufferStart(
1975 const std::vector<HalStreamBuffer>& bufReqs) {
1976 if (bufReqs.empty()) {
1977 ALOGE("%s: bufReqs is empty!", __FUNCTION__);
1978 return -1;
1979 }
1980
1981 {
1982 std::lock_guard<std::mutex> lk(mLock);
1983 if (mRequestingBuffer) {
1984 ALOGE("%s: BufferRequestThread does not support more than one concurrent request!",
1985 __FUNCTION__);
1986 return -1;
1987 }
1988
1989 mBufferReqs = bufReqs;
1990 mRequestingBuffer = true;
1991 }
1992 mRequestCond.notify_one();
1993 return 0;
1994}
1995
1996int ExternalCameraDeviceSession::BufferRequestThread::waitForBufferRequestDone(
1997 std::vector<HalStreamBuffer>* outBufReqs) {
1998 std::unique_lock<std::mutex> lk(mLock);
1999 if (!mRequestingBuffer) {
2000 ALOGE("%s: no pending buffer request!", __FUNCTION__);
2001 return -1;
2002 }
2003
2004 if (mPendingReturnBufferReqs.empty()) {
2005 std::chrono::milliseconds timeout = std::chrono::milliseconds(kReqProcTimeoutMs);
2006 auto st = mRequestDoneCond.wait_for(lk, timeout);
2007 if (st == std::cv_status::timeout) {
2008 ALOGE("%s: wait for buffer request finish timeout!", __FUNCTION__);
2009 return -1;
2010 }
2011 }
2012 mRequestingBuffer = false;
2013 *outBufReqs = std::move(mPendingReturnBufferReqs);
2014 mPendingReturnBufferReqs.clear();
2015 return 0;
2016}
2017
2018void ExternalCameraDeviceSession::BufferRequestThread::waitForNextRequest() {
2019 ATRACE_CALL();
2020 std::unique_lock<std::mutex> lk(mLock);
2021 int waitTimes = 0;
2022 while (mBufferReqs.empty()) {
2023 if (exitPending()) {
2024 return;
2025 }
2026 auto timeout = std::chrono::milliseconds(kReqWaitTimeoutMs);
2027 auto st = mRequestCond.wait_for(lk, timeout);
2028 if (st == std::cv_status::timeout) {
2029 waitTimes++;
2030 if (waitTimes == kReqWaitTimesWarn) {
2031 // BufferRequestThread just wait forever for new buffer request
2032 // But it will print some periodic warning indicating it's waiting
2033 ALOGV("%s: still waiting for new buffer request", __FUNCTION__);
2034 waitTimes = 0;
2035 }
2036 }
2037 }
2038
2039 // Fill in BufferRequest
2040 mHalBufferReqs.resize(mBufferReqs.size());
2041 for (size_t i = 0; i < mHalBufferReqs.size(); i++) {
2042 mHalBufferReqs[i].streamId = mBufferReqs[i].streamId;
2043 mHalBufferReqs[i].numBuffersRequested = 1;
2044 }
2045}
2046
2047bool ExternalCameraDeviceSession::BufferRequestThread::threadLoop() {
2048 waitForNextRequest();
2049 if (exitPending()) {
2050 return false;
2051 }
2052
2053 ATRACE_BEGIN("AIDL requestStreamBuffers");
2054 BufferRequestStatus status;
2055 std::vector<StreamBufferRet> bufRets;
2056 ScopedAStatus ret = mCallbacks->requestStreamBuffers(mHalBufferReqs, &bufRets, &status);
2057 if (!ret.isOk()) {
2058 ALOGE("%s: Transaction error: %d:%d", __FUNCTION__, ret.getExceptionCode(),
2059 ret.getServiceSpecificError());
2060 return false;
2061 }
2062
2063 std::unique_lock<std::mutex> lk(mLock);
2064 if (status == BufferRequestStatus::OK || status == BufferRequestStatus::FAILED_PARTIAL) {
2065 if (bufRets.size() != mHalBufferReqs.size()) {
2066 ALOGE("%s: expect %zu buffer requests returned, only got %zu", __FUNCTION__,
2067 mHalBufferReqs.size(), bufRets.size());
2068 return false;
2069 }
2070
2071 auto parent = mParent.lock();
2072 if (parent == nullptr) {
2073 ALOGE("%s: session has been disconnected!", __FUNCTION__);
2074 return false;
2075 }
2076
2077 std::vector<int> importedFences;
2078 importedFences.resize(bufRets.size());
2079 for (size_t i = 0; i < bufRets.size(); i++) {
2080 int streamId = bufRets[i].streamId;
2081 switch (bufRets[i].val.getTag()) {
2082 case StreamBuffersVal::Tag::error:
2083 continue;
2084 case StreamBuffersVal::Tag::buffers: {
2085 const std::vector<StreamBuffer>& hBufs =
2086 bufRets[i].val.get<StreamBuffersVal::Tag::buffers>();
2087 if (hBufs.size() != 1) {
2088 ALOGE("%s: expect 1 buffer returned, got %zu!", __FUNCTION__, hBufs.size());
2089 return false;
2090 }
2091 const StreamBuffer& hBuf = hBufs[0];
2092
2093 mBufferReqs[i].bufferId = hBuf.bufferId;
2094 // TODO: create a batch import API so we don't need to lock/unlock mCbsLock
2095 // repeatedly?
2096 lk.unlock();
2097 Status s =
2098 parent->importBuffer(streamId, hBuf.bufferId, makeFromAidl(hBuf.buffer),
2099 /*out*/ &mBufferReqs[i].bufPtr);
2100 lk.lock();
2101
2102 if (s != Status::OK) {
2103 ALOGE("%s: stream %d import buffer failed!", __FUNCTION__, streamId);
2104 cleanupInflightFences(importedFences, i - 1);
2105 return false;
2106 }
2107 if (!sHandleImporter.importFence(makeFromAidl(hBuf.acquireFence),
2108 mBufferReqs[i].acquireFence)) {
2109 ALOGE("%s: stream %d import fence failed!", __FUNCTION__, streamId);
2110 cleanupInflightFences(importedFences, i - 1);
2111 return false;
2112 }
2113 importedFences[i] = mBufferReqs[i].acquireFence;
2114 } break;
2115 default:
2116 ALOGE("%s: Unknown StreamBuffersVal!", __FUNCTION__);
2117 return false;
2118 }
2119 }
2120 } else {
2121 ALOGE("%s: requestStreamBuffers call failed!", __FUNCTION__);
2122 }
2123
2124 mPendingReturnBufferReqs = std::move(mBufferReqs);
2125 mBufferReqs.clear();
2126
2127 lk.unlock();
2128 mRequestDoneCond.notify_one();
2129 return true;
2130}
2131
2132// End ExternalCameraDeviceSession::BufferRequestThread functions
2133
2134// Start ExternalCameraDeviceSession::OutputThread functions
2135
2136ExternalCameraDeviceSession::OutputThread::OutputThread(
2137 std::weak_ptr<OutputThreadInterface> parent, CroppingType ct,
2138 const common::V1_0::helper::CameraMetadata& chars,
2139 std::shared_ptr<BufferRequestThread> bufReqThread)
2140 : mParent(parent),
2141 mCroppingType(ct),
2142 mCameraCharacteristics(chars),
2143 mBufferRequestThread(bufReqThread) {}
2144
2145ExternalCameraDeviceSession::OutputThread::~OutputThread() {}
2146
2147Status ExternalCameraDeviceSession::OutputThread::allocateIntermediateBuffers(
2148 const Size& v4lSize, const Size& thumbSize, const std::vector<Stream>& streams,
2149 uint32_t blobBufferSize) {
2150 std::lock_guard<std::mutex> lk(mBufferLock);
2151 if (!mScaledYu12Frames.empty()) {
2152 ALOGE("%s: intermediate buffer pool has %zu inflight buffers! (expect 0)", __FUNCTION__,
2153 mScaledYu12Frames.size());
2154 return Status::INTERNAL_ERROR;
2155 }
2156
2157 // Allocating intermediate YU12 frame
2158 if (mYu12Frame == nullptr || mYu12Frame->mWidth != v4lSize.width ||
2159 mYu12Frame->mHeight != v4lSize.height) {
2160 mYu12Frame.reset();
2161 mYu12Frame = std::make_shared<AllocatedFrame>(v4lSize.width, v4lSize.height);
2162 int ret = mYu12Frame->allocate(&mYu12FrameLayout);
2163 if (ret != 0) {
2164 ALOGE("%s: allocating YU12 frame failed!", __FUNCTION__);
2165 return Status::INTERNAL_ERROR;
2166 }
2167 }
2168
2169 // Allocating intermediate YU12 thumbnail frame
2170 if (mYu12ThumbFrame == nullptr || mYu12ThumbFrame->mWidth != thumbSize.width ||
2171 mYu12ThumbFrame->mHeight != thumbSize.height) {
2172 mYu12ThumbFrame.reset();
2173 mYu12ThumbFrame = std::make_shared<AllocatedFrame>(thumbSize.width, thumbSize.height);
2174 int ret = mYu12ThumbFrame->allocate(&mYu12ThumbFrameLayout);
2175 if (ret != 0) {
2176 ALOGE("%s: allocating YU12 thumb frame failed!", __FUNCTION__);
2177 return Status::INTERNAL_ERROR;
2178 }
2179 }
2180
2181 // Allocating scaled buffers
2182 for (const auto& stream : streams) {
2183 Size sz = {stream.width, stream.height};
2184 if (sz == v4lSize) {
2185 continue; // Don't need an intermediate buffer same size as v4lBuffer
2186 }
2187 if (mIntermediateBuffers.count(sz) == 0) {
2188 // Create new intermediate buffer
2189 std::shared_ptr<AllocatedFrame> buf =
2190 std::make_shared<AllocatedFrame>(stream.width, stream.height);
2191 int ret = buf->allocate();
2192 if (ret != 0) {
2193 ALOGE("%s: allocating intermediate YU12 frame %dx%d failed!", __FUNCTION__,
2194 stream.width, stream.height);
2195 return Status::INTERNAL_ERROR;
2196 }
2197 mIntermediateBuffers[sz] = buf;
2198 }
2199 }
2200
2201 // Remove unconfigured buffers
2202 auto it = mIntermediateBuffers.begin();
2203 while (it != mIntermediateBuffers.end()) {
2204 bool configured = false;
2205 auto sz = it->first;
2206 for (const auto& stream : streams) {
2207 if (stream.width == sz.width && stream.height == sz.height) {
2208 configured = true;
2209 break;
2210 }
2211 }
2212 if (configured) {
2213 it++;
2214 } else {
2215 it = mIntermediateBuffers.erase(it);
2216 }
2217 }
2218
2219 // Allocate mute test pattern frame
2220 mMuteTestPatternFrame.resize(mYu12Frame->mWidth * mYu12Frame->mHeight * 3);
2221
2222 mBlobBufferSize = blobBufferSize;
2223 return Status::OK;
2224}
2225
2226Status ExternalCameraDeviceSession::OutputThread::submitRequest(
2227 const std::shared_ptr<HalRequest>& req) {
2228 std::unique_lock<std::mutex> lk(mRequestListLock);
2229 mRequestList.push_back(req);
2230 lk.unlock();
2231 mRequestCond.notify_one();
2232 return Status::OK;
2233}
2234
2235void ExternalCameraDeviceSession::OutputThread::flush() {
2236 ATRACE_CALL();
2237 auto parent = mParent.lock();
2238 if (parent == nullptr) {
2239 ALOGE("%s: session has been disconnected!", __FUNCTION__);
2240 return;
2241 }
2242
2243 std::unique_lock<std::mutex> lk(mRequestListLock);
2244 std::list<std::shared_ptr<HalRequest>> reqs = std::move(mRequestList);
2245 mRequestList.clear();
2246 if (mProcessingRequest) {
2247 auto timeout = std::chrono::seconds(kFlushWaitTimeoutSec);
2248 auto st = mRequestDoneCond.wait_for(lk, timeout);
2249 if (st == std::cv_status::timeout) {
2250 ALOGE("%s: wait for inflight request finish timeout!", __FUNCTION__);
2251 }
2252 }
2253
2254 ALOGV("%s: flushing inflight requests", __FUNCTION__);
2255 lk.unlock();
2256 for (const auto& req : reqs) {
2257 parent->processCaptureRequestError(req);
2258 }
2259}
2260
2261void ExternalCameraDeviceSession::OutputThread::dump(int fd) {
2262 std::lock_guard<std::mutex> lk(mRequestListLock);
2263 if (mProcessingRequest) {
2264 dprintf(fd, "OutputThread processing frame %d\n", mProcessingFrameNumber);
2265 } else {
2266 dprintf(fd, "OutputThread not processing any frames\n");
2267 }
2268 dprintf(fd, "OutputThread request list contains frame: ");
2269 for (const auto& req : mRequestList) {
2270 dprintf(fd, "%d, ", req->frameNumber);
2271 }
2272 dprintf(fd, "\n");
2273}
2274
2275void ExternalCameraDeviceSession::OutputThread::setExifMakeModel(const std::string& make,
2276 const std::string& model) {
2277 mExifMake = make;
2278 mExifModel = model;
2279}
2280
2281std::list<std::shared_ptr<HalRequest>>
2282ExternalCameraDeviceSession::OutputThread::switchToOffline() {
2283 ATRACE_CALL();
2284 auto parent = mParent.lock();
2285 if (parent == nullptr) {
2286 ALOGE("%s: session has been disconnected!", __FUNCTION__);
2287 return {};
2288 }
2289
2290 std::unique_lock<std::mutex> lk(mRequestListLock);
2291 std::list<std::shared_ptr<HalRequest>> reqs = std::move(mRequestList);
2292 mRequestList.clear();
2293 if (mProcessingRequest) {
2294 auto timeout = std::chrono::seconds(kFlushWaitTimeoutSec);
2295 auto st = mRequestDoneCond.wait_for(lk, timeout);
2296 if (st == std::cv_status::timeout) {
2297 ALOGE("%s: wait for inflight request finish timeout!", __FUNCTION__);
2298 }
2299 }
2300 lk.unlock();
2301 clearIntermediateBuffers();
2302 ALOGV("%s: returning %zu request for offline processing", __FUNCTION__, reqs.size());
2303 return reqs;
2304}
2305
2306int ExternalCameraDeviceSession::OutputThread::requestBufferStart(
2307 const std::vector<HalStreamBuffer>& bufs) {
2308 if (mBufferRequestThread == nullptr) {
2309 return 0;
2310 }
2311 return mBufferRequestThread->requestBufferStart(bufs);
2312}
2313
2314int ExternalCameraDeviceSession::OutputThread::waitForBufferRequestDone(
2315 std::vector<HalStreamBuffer>* outBufs) {
2316 if (mBufferRequestThread == nullptr) {
2317 return 0;
2318 }
2319 return mBufferRequestThread->waitForBufferRequestDone(outBufs);
2320}
2321
2322void ExternalCameraDeviceSession::OutputThread::waitForNextRequest(
2323 std::shared_ptr<HalRequest>* out) {
2324 ATRACE_CALL();
2325 if (out == nullptr) {
2326 ALOGE("%s: out is null", __FUNCTION__);
2327 return;
2328 }
2329
2330 std::unique_lock<std::mutex> lk(mRequestListLock);
2331 int waitTimes = 0;
2332 while (mRequestList.empty()) {
2333 if (exitPending()) {
2334 return;
2335 }
2336 auto timeout = std::chrono::milliseconds(kReqWaitTimeoutMs);
2337 auto st = mRequestCond.wait_for(lk, timeout);
2338 if (st == std::cv_status::timeout) {
2339 waitTimes++;
2340 if (waitTimes == kReqWaitTimesMax) {
2341 // no new request, return
2342 return;
2343 }
2344 }
2345 }
2346 *out = mRequestList.front();
2347 mRequestList.pop_front();
2348 mProcessingRequest = true;
2349 mProcessingFrameNumber = (*out)->frameNumber;
2350}
2351
2352void ExternalCameraDeviceSession::OutputThread::signalRequestDone() {
2353 std::unique_lock<std::mutex> lk(mRequestListLock);
2354 mProcessingRequest = false;
2355 mProcessingFrameNumber = 0;
2356 lk.unlock();
2357 mRequestDoneCond.notify_one();
2358}
2359
2360int ExternalCameraDeviceSession::OutputThread::cropAndScaleLocked(
2361 std::shared_ptr<AllocatedFrame>& in, const Size& outSz, YCbCrLayout* out) {
2362 Size inSz = {in->mWidth, in->mHeight};
2363
2364 int ret;
2365 if (inSz == outSz) {
2366 ret = in->getLayout(out);
2367 if (ret != 0) {
2368 ALOGE("%s: failed to get input image layout", __FUNCTION__);
2369 return ret;
2370 }
2371 return ret;
2372 }
2373
2374 // Cropping to output aspect ratio
2375 IMapper::Rect inputCrop;
2376 ret = getCropRect(mCroppingType, inSz, outSz, &inputCrop);
2377 if (ret != 0) {
2378 ALOGE("%s: failed to compute crop rect for output size %dx%d", __FUNCTION__, outSz.width,
2379 outSz.height);
2380 return ret;
2381 }
2382
2383 YCbCrLayout croppedLayout;
2384 ret = in->getCroppedLayout(inputCrop, &croppedLayout);
2385 if (ret != 0) {
2386 ALOGE("%s: failed to crop input image %dx%d to output size %dx%d", __FUNCTION__, inSz.width,
2387 inSz.height, outSz.width, outSz.height);
2388 return ret;
2389 }
2390
2391 if ((mCroppingType == VERTICAL && inSz.width == outSz.width) ||
2392 (mCroppingType == HORIZONTAL && inSz.height == outSz.height)) {
2393 // No scale is needed
2394 *out = croppedLayout;
2395 return 0;
2396 }
2397
2398 auto it = mScaledYu12Frames.find(outSz);
2399 std::shared_ptr<AllocatedFrame> scaledYu12Buf;
2400 if (it != mScaledYu12Frames.end()) {
2401 scaledYu12Buf = it->second;
2402 } else {
2403 it = mIntermediateBuffers.find(outSz);
2404 if (it == mIntermediateBuffers.end()) {
2405 ALOGE("%s: failed to find intermediate buffer size %dx%d", __FUNCTION__, outSz.width,
2406 outSz.height);
2407 return -1;
2408 }
2409 scaledYu12Buf = it->second;
2410 }
2411 // Scale
2412 YCbCrLayout outLayout;
2413 ret = scaledYu12Buf->getLayout(&outLayout);
2414 if (ret != 0) {
2415 ALOGE("%s: failed to get output buffer layout", __FUNCTION__);
2416 return ret;
2417 }
2418
2419 ret = libyuv::I420Scale(
2420 static_cast<uint8_t*>(croppedLayout.y), croppedLayout.yStride,
2421 static_cast<uint8_t*>(croppedLayout.cb), croppedLayout.cStride,
2422 static_cast<uint8_t*>(croppedLayout.cr), croppedLayout.cStride, inputCrop.width,
2423 inputCrop.height, static_cast<uint8_t*>(outLayout.y), outLayout.yStride,
2424 static_cast<uint8_t*>(outLayout.cb), outLayout.cStride,
2425 static_cast<uint8_t*>(outLayout.cr), outLayout.cStride, outSz.width, outSz.height,
2426 // TODO: b/72261744 see if we can use better filter without losing too much perf
2427 libyuv::FilterMode::kFilterNone);
2428
2429 if (ret != 0) {
2430 ALOGE("%s: failed to scale buffer from %dx%d to %dx%d. Ret %d", __FUNCTION__,
2431 inputCrop.width, inputCrop.height, outSz.width, outSz.height, ret);
2432 return ret;
2433 }
2434
2435 *out = outLayout;
2436 mScaledYu12Frames.insert({outSz, scaledYu12Buf});
2437 return 0;
2438}
2439
2440int ExternalCameraDeviceSession::OutputThread::cropAndScaleThumbLocked(
2441 std::shared_ptr<AllocatedFrame>& in, const Size& outSz, YCbCrLayout* out) {
2442 Size inSz{in->mWidth, in->mHeight};
2443
2444 if ((outSz.width * outSz.height) > (mYu12ThumbFrame->mWidth * mYu12ThumbFrame->mHeight)) {
2445 ALOGE("%s: Requested thumbnail size too big (%d,%d) > (%d,%d)", __FUNCTION__, outSz.width,
2446 outSz.height, mYu12ThumbFrame->mWidth, mYu12ThumbFrame->mHeight);
2447 return -1;
2448 }
2449
2450 int ret;
2451
2452 /* This will crop-and-zoom the input YUV frame to the thumbnail size
2453 * Based on the following logic:
2454 * 1) Square pixels come in, square pixels come out, therefore single
2455 * scale factor is computed to either make input bigger or smaller
2456 * depending on if we are upscaling or downscaling
2457 * 2) That single scale factor would either make height too tall or width
2458 * too wide so we need to crop the input either horizontally or vertically
2459 * but not both
2460 */
2461
2462 /* Convert the input and output dimensions into floats for ease of math */
2463 float fWin = static_cast<float>(inSz.width);
2464 float fHin = static_cast<float>(inSz.height);
2465 float fWout = static_cast<float>(outSz.width);
2466 float fHout = static_cast<float>(outSz.height);
2467
2468 /* Compute the one scale factor from (1) above, it will be the smaller of
2469 * the two possibilities. */
2470 float scaleFactor = std::min(fHin / fHout, fWin / fWout);
2471
2472 /* Since we are crop-and-zooming (as opposed to letter/pillar boxing) we can
2473 * simply multiply the output by our scaleFactor to get the cropped input
2474 * size. Note that at least one of {fWcrop, fHcrop} is going to wind up
2475 * being {fWin, fHin} respectively because fHout or fWout cancels out the
2476 * scaleFactor calculation above.
2477 *
2478 * Specifically:
2479 * if ( fHin / fHout ) < ( fWin / fWout ) we crop the sides off
2480 * input, in which case
2481 * scaleFactor = fHin / fHout
2482 * fWcrop = fHin / fHout * fWout
2483 * fHcrop = fHin
2484 *
2485 * Note that fWcrop <= fWin ( because ( fHin / fHout ) * fWout < fWin, which
2486 * is just the inequality above with both sides multiplied by fWout
2487 *
2488 * on the other hand if ( fWin / fWout ) < ( fHin / fHout) we crop the top
2489 * and the bottom off of input, and
2490 * scaleFactor = fWin / fWout
2491 * fWcrop = fWin
2492 * fHCrop = fWin / fWout * fHout
2493 */
2494 float fWcrop = scaleFactor * fWout;
2495 float fHcrop = scaleFactor * fHout;
2496
2497 /* Convert to integer and truncate to an even number */
2498 Size cropSz = {.width = 2 * static_cast<int32_t>(fWcrop / 2.0f),
2499 .height = 2 * static_cast<int32_t>(fHcrop / 2.0f)};
2500
2501 /* Convert to a centered rectange with even top/left */
2502 IMapper::Rect inputCrop{.left = 2 * static_cast<int32_t>((inSz.width - cropSz.width) / 4),
2503 .top = 2 * static_cast<int32_t>((inSz.height - cropSz.height) / 4),
2504 .width = static_cast<int32_t>(cropSz.width),
2505 .height = static_cast<int32_t>(cropSz.height)};
2506
2507 if ((inputCrop.top < 0) || (inputCrop.top >= static_cast<int32_t>(inSz.height)) ||
2508 (inputCrop.left < 0) || (inputCrop.left >= static_cast<int32_t>(inSz.width)) ||
2509 (inputCrop.width <= 0) ||
2510 (inputCrop.width + inputCrop.left > static_cast<int32_t>(inSz.width)) ||
2511 (inputCrop.height <= 0) ||
2512 (inputCrop.height + inputCrop.top > static_cast<int32_t>(inSz.height))) {
2513 ALOGE("%s: came up with really wrong crop rectangle", __FUNCTION__);
2514 ALOGE("%s: input layout %dx%d to for output size %dx%d", __FUNCTION__, inSz.width,
2515 inSz.height, outSz.width, outSz.height);
2516 ALOGE("%s: computed input crop +%d,+%d %dx%d", __FUNCTION__, inputCrop.left, inputCrop.top,
2517 inputCrop.width, inputCrop.height);
2518 return -1;
2519 }
2520
2521 YCbCrLayout inputLayout;
2522 ret = in->getCroppedLayout(inputCrop, &inputLayout);
2523 if (ret != 0) {
2524 ALOGE("%s: failed to crop input layout %dx%d to for output size %dx%d", __FUNCTION__,
2525 inSz.width, inSz.height, outSz.width, outSz.height);
2526 ALOGE("%s: computed input crop +%d,+%d %dx%d", __FUNCTION__, inputCrop.left, inputCrop.top,
2527 inputCrop.width, inputCrop.height);
2528 return ret;
2529 }
2530 ALOGV("%s: crop input layout %dx%d to for output size %dx%d", __FUNCTION__, inSz.width,
2531 inSz.height, outSz.width, outSz.height);
2532 ALOGV("%s: computed input crop +%d,+%d %dx%d", __FUNCTION__, inputCrop.left, inputCrop.top,
2533 inputCrop.width, inputCrop.height);
2534
2535 // Scale
2536 YCbCrLayout outFullLayout;
2537
2538 ret = mYu12ThumbFrame->getLayout(&outFullLayout);
2539 if (ret != 0) {
2540 ALOGE("%s: failed to get output buffer layout", __FUNCTION__);
2541 return ret;
2542 }
2543
2544 ret = libyuv::I420Scale(static_cast<uint8_t*>(inputLayout.y), inputLayout.yStride,
2545 static_cast<uint8_t*>(inputLayout.cb), inputLayout.cStride,
2546 static_cast<uint8_t*>(inputLayout.cr), inputLayout.cStride,
2547 inputCrop.width, inputCrop.height,
2548 static_cast<uint8_t*>(outFullLayout.y), outFullLayout.yStride,
2549 static_cast<uint8_t*>(outFullLayout.cb), outFullLayout.cStride,
2550 static_cast<uint8_t*>(outFullLayout.cr), outFullLayout.cStride,
2551 outSz.width, outSz.height, libyuv::FilterMode::kFilterNone);
2552
2553 if (ret != 0) {
2554 ALOGE("%s: failed to scale buffer from %dx%d to %dx%d. Ret %d", __FUNCTION__,
2555 inputCrop.width, inputCrop.height, outSz.width, outSz.height, ret);
2556 return ret;
2557 }
2558
2559 *out = outFullLayout;
2560 return 0;
2561}
2562
2563int ExternalCameraDeviceSession::OutputThread::createJpegLocked(
2564 HalStreamBuffer& halBuf, const common::V1_0::helper::CameraMetadata& setting) {
2565 ATRACE_CALL();
2566 int ret;
2567 auto lfail = [&](auto... args) {
2568 ALOGE(args...);
2569
2570 return 1;
2571 };
2572 auto parent = mParent.lock();
2573 if (parent == nullptr) {
2574 ALOGE("%s: session has been disconnected!", __FUNCTION__);
2575 return 1;
2576 }
2577
2578 ALOGV("%s: HAL buffer sid: %d bid: %" PRIu64 " w: %u h: %u", __FUNCTION__, halBuf.streamId,
2579 static_cast<uint64_t>(halBuf.bufferId), halBuf.width, halBuf.height);
2580 ALOGV("%s: HAL buffer fmt: %x usage: %" PRIx64 " ptr: %p", __FUNCTION__, halBuf.format,
2581 static_cast<uint64_t>(halBuf.usage), halBuf.bufPtr);
2582 ALOGV("%s: YV12 buffer %d x %d", __FUNCTION__, mYu12Frame->mWidth, mYu12Frame->mHeight);
2583
2584 int jpegQuality, thumbQuality;
2585 Size thumbSize;
2586 bool outputThumbnail = true;
2587
2588 if (setting.exists(ANDROID_JPEG_QUALITY)) {
2589 camera_metadata_ro_entry entry = setting.find(ANDROID_JPEG_QUALITY);
2590 jpegQuality = entry.data.u8[0];
2591 } else {
2592 return lfail("%s: ANDROID_JPEG_QUALITY not set", __FUNCTION__);
2593 }
2594
2595 if (setting.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
2596 camera_metadata_ro_entry entry = setting.find(ANDROID_JPEG_THUMBNAIL_QUALITY);
2597 thumbQuality = entry.data.u8[0];
2598 } else {
2599 return lfail("%s: ANDROID_JPEG_THUMBNAIL_QUALITY not set", __FUNCTION__);
2600 }
2601
2602 if (setting.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
2603 camera_metadata_ro_entry entry = setting.find(ANDROID_JPEG_THUMBNAIL_SIZE);
2604 thumbSize = Size{.width = entry.data.i32[0], .height = entry.data.i32[1]};
2605 if (thumbSize.width == 0 && thumbSize.height == 0) {
2606 outputThumbnail = false;
2607 }
2608 } else {
2609 return lfail("%s: ANDROID_JPEG_THUMBNAIL_SIZE not set", __FUNCTION__);
2610 }
2611
2612 /* Cropped and scaled YU12 buffer for main and thumbnail */
2613 YCbCrLayout yu12Main;
2614 Size jpegSize{halBuf.width, halBuf.height};
2615
2616 /* Compute temporary buffer sizes accounting for the following:
2617 * thumbnail can't exceed APP1 size of 64K
2618 * main image needs to hold APP1, headers, and at most a poorly
2619 * compressed image */
2620 const ssize_t maxThumbCodeSize = 64 * 1024;
2621 const ssize_t maxJpegCodeSize =
2622 mBlobBufferSize == 0 ? parent->getJpegBufferSize(jpegSize.width, jpegSize.height)
2623 : mBlobBufferSize;
2624
2625 /* Check that getJpegBufferSize did not return an error */
2626 if (maxJpegCodeSize < 0) {
2627 return lfail("%s: getJpegBufferSize returned %zd", __FUNCTION__, maxJpegCodeSize);
2628 }
2629
2630 /* Hold actual thumbnail and main image code sizes */
2631 size_t thumbCodeSize = 0, jpegCodeSize = 0;
2632 /* Temporary thumbnail code buffer */
2633 std::vector<uint8_t> thumbCode(outputThumbnail ? maxThumbCodeSize : 0);
2634
2635 YCbCrLayout yu12Thumb;
2636 if (outputThumbnail) {
2637 ret = cropAndScaleThumbLocked(mYu12Frame, thumbSize, &yu12Thumb);
2638
2639 if (ret != 0) {
2640 return lfail("%s: crop and scale thumbnail failed!", __FUNCTION__);
2641 }
2642 }
2643
2644 /* Scale and crop main jpeg */
2645 ret = cropAndScaleLocked(mYu12Frame, jpegSize, &yu12Main);
2646
2647 if (ret != 0) {
2648 return lfail("%s: crop and scale main failed!", __FUNCTION__);
2649 }
2650
2651 /* Encode the thumbnail image */
2652 if (outputThumbnail) {
2653 ret = encodeJpegYU12(thumbSize, yu12Thumb, thumbQuality, 0, 0, &thumbCode[0],
2654 maxThumbCodeSize, thumbCodeSize);
2655
2656 if (ret != 0) {
2657 return lfail("%s: thumbnail encodeJpegYU12 failed with %d", __FUNCTION__, ret);
2658 }
2659 }
2660
2661 /* Combine camera characteristics with request settings to form EXIF
2662 * metadata */
2663 common::V1_0::helper::CameraMetadata meta(mCameraCharacteristics);
2664 meta.append(setting);
2665
2666 /* Generate EXIF object */
2667 std::unique_ptr<ExifUtils> utils(ExifUtils::create());
2668 /* Make sure it's initialized */
2669 utils->initialize();
2670
2671 utils->setFromMetadata(meta, jpegSize.width, jpegSize.height);
2672 utils->setMake(mExifMake);
2673 utils->setModel(mExifModel);
2674
2675 ret = utils->generateApp1(outputThumbnail ? &thumbCode[0] : nullptr, thumbCodeSize);
2676
2677 if (!ret) {
2678 return lfail("%s: generating APP1 failed", __FUNCTION__);
2679 }
2680
2681 /* Get internal buffer */
2682 size_t exifDataSize = utils->getApp1Length();
2683 const uint8_t* exifData = utils->getApp1Buffer();
2684
2685 /* Lock the HAL jpeg code buffer */
2686 void* bufPtr = sHandleImporter.lock(*(halBuf.bufPtr), static_cast<uint64_t>(halBuf.usage),
2687 maxJpegCodeSize);
2688
2689 if (!bufPtr) {
2690 return lfail("%s: could not lock %zu bytes", __FUNCTION__, maxJpegCodeSize);
2691 }
2692
2693 /* Encode the main jpeg image */
2694 ret = encodeJpegYU12(jpegSize, yu12Main, jpegQuality, exifData, exifDataSize, bufPtr,
2695 maxJpegCodeSize, jpegCodeSize);
2696
2697 /* TODO: Not sure this belongs here, maybe better to pass jpegCodeSize out
2698 * and do this when returning buffer to parent */
2699 CameraBlob blob{CameraBlobId::JPEG, static_cast<int32_t>(jpegCodeSize)};
2700 void* blobDst = reinterpret_cast<void*>(reinterpret_cast<uintptr_t>(bufPtr) + maxJpegCodeSize -
2701 sizeof(CameraBlob));
2702 memcpy(blobDst, &blob, sizeof(CameraBlob));
2703
2704 /* Unlock the HAL jpeg code buffer */
2705 int relFence = sHandleImporter.unlock(*(halBuf.bufPtr));
2706 if (relFence >= 0) {
2707 halBuf.acquireFence = relFence;
2708 }
2709
2710 /* Check if our JPEG actually succeeded */
2711 if (ret != 0) {
2712 return lfail("%s: encodeJpegYU12 failed with %d", __FUNCTION__, ret);
2713 }
2714
2715 ALOGV("%s: encoded JPEG (ret:%d) with Q:%d max size: %zu", __FUNCTION__, ret, jpegQuality,
2716 maxJpegCodeSize);
2717
2718 return 0;
2719}
2720
2721void ExternalCameraDeviceSession::OutputThread::clearIntermediateBuffers() {
2722 std::lock_guard<std::mutex> lk(mBufferLock);
2723 mYu12Frame.reset();
2724 mYu12ThumbFrame.reset();
2725 mIntermediateBuffers.clear();
2726 mMuteTestPatternFrame.clear();
2727 mBlobBufferSize = 0;
2728}
2729
2730bool ExternalCameraDeviceSession::OutputThread::threadLoop() {
2731 std::shared_ptr<HalRequest> req;
2732 auto parent = mParent.lock();
2733 if (parent == nullptr) {
2734 ALOGE("%s: session has been disconnected!", __FUNCTION__);
2735 return false;
2736 }
2737
2738 // TODO: maybe we need to setup a sensor thread to dq/enq v4l frames
2739 // regularly to prevent v4l buffer queue filled with stale buffers
2740 // when app doesn't program a preview request
2741 waitForNextRequest(&req);
2742 if (req == nullptr) {
2743 // No new request, wait again
2744 return true;
2745 }
2746
2747 auto onDeviceError = [&](auto... args) {
2748 ALOGE(args...);
2749 parent->notifyError(req->frameNumber, /*stream*/ -1, ErrorCode::ERROR_DEVICE);
2750 signalRequestDone();
2751 return false;
2752 };
2753
2754 if (req->frameIn->mFourcc != V4L2_PIX_FMT_MJPEG && req->frameIn->mFourcc != V4L2_PIX_FMT_Z16) {
2755 return onDeviceError("%s: do not support V4L2 format %c%c%c%c", __FUNCTION__,
2756 req->frameIn->mFourcc & 0xFF, (req->frameIn->mFourcc >> 8) & 0xFF,
2757 (req->frameIn->mFourcc >> 16) & 0xFF,
2758 (req->frameIn->mFourcc >> 24) & 0xFF);
2759 }
2760
2761 int res = requestBufferStart(req->buffers);
2762 if (res != 0) {
2763 ALOGE("%s: send BufferRequest failed! res %d", __FUNCTION__, res);
2764 return onDeviceError("%s: failed to send buffer request!", __FUNCTION__);
2765 }
2766
2767 std::unique_lock<std::mutex> lk(mBufferLock);
2768 // Convert input V4L2 frame to YU12 of the same size
2769 // TODO: see if we can save some computation by converting to YV12 here
2770 uint8_t* inData;
2771 size_t inDataSize;
2772 if (req->frameIn->getData(&inData, &inDataSize) != 0) {
2773 lk.unlock();
2774 return onDeviceError("%s: V4L2 buffer map failed", __FUNCTION__);
2775 }
2776
2777 // Process camera mute state
2778 auto testPatternMode = req->setting.find(ANDROID_SENSOR_TEST_PATTERN_MODE);
2779 if (testPatternMode.count == 1) {
2780 if (mCameraMuted != (testPatternMode.data.u8[0] != ANDROID_SENSOR_TEST_PATTERN_MODE_OFF)) {
2781 mCameraMuted = !mCameraMuted;
2782 // Get solid color for test pattern, if any was set
2783 if (testPatternMode.data.u8[0] == ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR) {
2784 auto entry = req->setting.find(ANDROID_SENSOR_TEST_PATTERN_DATA);
2785 if (entry.count == 4) {
2786 // Update the mute frame if the pattern color has changed
2787 if (memcmp(entry.data.i32, mTestPatternData, sizeof(mTestPatternData)) != 0) {
2788 memcpy(mTestPatternData, entry.data.i32, sizeof(mTestPatternData));
2789 // Fill the mute frame with the solid color, use only 8 MSB of RGGB as RGB
2790 for (int i = 0; i < mMuteTestPatternFrame.size(); i += 3) {
2791 mMuteTestPatternFrame[i] = entry.data.i32[0] >> 24;
2792 mMuteTestPatternFrame[i + 1] = entry.data.i32[1] >> 24;
2793 mMuteTestPatternFrame[i + 2] = entry.data.i32[3] >> 24;
2794 }
2795 }
2796 }
2797 }
2798 }
2799 }
2800
2801 // TODO: in some special case maybe we can decode jpg directly to gralloc output?
2802 if (req->frameIn->mFourcc == V4L2_PIX_FMT_MJPEG) {
2803 ATRACE_BEGIN("MJPGtoI420");
2804 res = 0;
2805 if (mCameraMuted) {
2806 res = libyuv::ConvertToI420(
2807 mMuteTestPatternFrame.data(), mMuteTestPatternFrame.size(),
2808 static_cast<uint8_t*>(mYu12FrameLayout.y), mYu12FrameLayout.yStride,
2809 static_cast<uint8_t*>(mYu12FrameLayout.cb), mYu12FrameLayout.cStride,
2810 static_cast<uint8_t*>(mYu12FrameLayout.cr), mYu12FrameLayout.cStride, 0, 0,
2811 mYu12Frame->mWidth, mYu12Frame->mHeight, mYu12Frame->mWidth,
2812 mYu12Frame->mHeight, libyuv::kRotate0, libyuv::FOURCC_RAW);
2813 } else {
2814 res = libyuv::MJPGToI420(
2815 inData, inDataSize, static_cast<uint8_t*>(mYu12FrameLayout.y),
2816 mYu12FrameLayout.yStride, static_cast<uint8_t*>(mYu12FrameLayout.cb),
2817 mYu12FrameLayout.cStride, static_cast<uint8_t*>(mYu12FrameLayout.cr),
2818 mYu12FrameLayout.cStride, mYu12Frame->mWidth, mYu12Frame->mHeight,
2819 mYu12Frame->mWidth, mYu12Frame->mHeight);
2820 }
2821 ATRACE_END();
2822
2823 if (res != 0) {
2824 // For some webcam, the first few V4L2 frames might be malformed...
2825 ALOGE("%s: Convert V4L2 frame to YU12 failed! res %d", __FUNCTION__, res);
2826 lk.unlock();
2827 Status st = parent->processCaptureRequestError(req);
2828 if (st != Status::OK) {
2829 return onDeviceError("%s: failed to process capture request error!", __FUNCTION__);
2830 }
2831 signalRequestDone();
2832 return true;
2833 }
2834 }
2835
2836 ATRACE_BEGIN("Wait for BufferRequest done");
2837 res = waitForBufferRequestDone(&req->buffers);
2838 ATRACE_END();
2839
2840 if (res != 0) {
2841 ALOGE("%s: wait for BufferRequest done failed! res %d", __FUNCTION__, res);
2842 lk.unlock();
2843 return onDeviceError("%s: failed to process buffer request error!", __FUNCTION__);
2844 }
2845
2846 ALOGV("%s processing new request", __FUNCTION__);
2847 const int kSyncWaitTimeoutMs = 500;
2848 for (auto& halBuf : req->buffers) {
2849 if (*(halBuf.bufPtr) == nullptr) {
2850 ALOGW("%s: buffer for stream %d missing", __FUNCTION__, halBuf.streamId);
2851 halBuf.fenceTimeout = true;
2852 } else if (halBuf.acquireFence >= 0) {
2853 int ret = sync_wait(halBuf.acquireFence, kSyncWaitTimeoutMs);
2854 if (ret) {
2855 halBuf.fenceTimeout = true;
2856 } else {
2857 ::close(halBuf.acquireFence);
2858 halBuf.acquireFence = -1;
2859 }
2860 }
2861
2862 if (halBuf.fenceTimeout) {
2863 continue;
2864 }
2865
2866 // Gralloc lockYCbCr the buffer
2867 switch (halBuf.format) {
2868 case PixelFormat::BLOB: {
2869 int ret = createJpegLocked(halBuf, req->setting);
2870
2871 if (ret != 0) {
2872 lk.unlock();
2873 return onDeviceError("%s: createJpegLocked failed with %d", __FUNCTION__, ret);
2874 }
2875 } break;
2876 case PixelFormat::Y16: {
2877 void* outLayout = sHandleImporter.lock(
2878 *(halBuf.bufPtr), static_cast<uint64_t>(halBuf.usage), inDataSize);
2879
2880 std::memcpy(outLayout, inData, inDataSize);
2881
2882 int relFence = sHandleImporter.unlock(*(halBuf.bufPtr));
2883 if (relFence >= 0) {
2884 halBuf.acquireFence = relFence;
2885 }
2886 } break;
2887 case PixelFormat::YCBCR_420_888:
2888 case PixelFormat::YV12: {
Devin Moore523660c2023-10-02 15:55:11 +00002889 android::Rect outRect{0, 0, static_cast<int32_t>(halBuf.width),
Avichal Rakeshe1857f82022-06-08 17:47:23 -07002890 static_cast<int32_t>(halBuf.height)};
Devin Moore523660c2023-10-02 15:55:11 +00002891 android_ycbcr result = sHandleImporter.lockYCbCr(
Avichal Rakeshe1857f82022-06-08 17:47:23 -07002892 *(halBuf.bufPtr), static_cast<uint64_t>(halBuf.usage), outRect);
Devin Moore523660c2023-10-02 15:55:11 +00002893 ALOGV("%s: outLayout y %p cb %p cr %p y_str %zu c_str %zu c_step %zu", __FUNCTION__,
2894 result.y, result.cb, result.cr, result.ystride, result.cstride,
2895 result.chroma_step);
2896 if (result.ystride > UINT32_MAX || result.cstride > UINT32_MAX ||
2897 result.chroma_step > UINT32_MAX) {
2898 return onDeviceError("%s: lockYCbCr failed. Unexpected values!", __FUNCTION__);
2899 }
2900 YCbCrLayout outLayout = {.y = result.y,
2901 .cb = result.cb,
2902 .cr = result.cr,
2903 .yStride = static_cast<uint32_t>(result.ystride),
2904 .cStride = static_cast<uint32_t>(result.cstride),
2905 .chromaStep = static_cast<uint32_t>(result.chroma_step)};
Avichal Rakeshe1857f82022-06-08 17:47:23 -07002906
2907 // Convert to output buffer size/format
2908 uint32_t outputFourcc = getFourCcFromLayout(outLayout);
2909 ALOGV("%s: converting to format %c%c%c%c", __FUNCTION__, outputFourcc & 0xFF,
2910 (outputFourcc >> 8) & 0xFF, (outputFourcc >> 16) & 0xFF,
2911 (outputFourcc >> 24) & 0xFF);
2912
2913 YCbCrLayout cropAndScaled;
2914 ATRACE_BEGIN("cropAndScaleLocked");
2915 int ret = cropAndScaleLocked(mYu12Frame, Size{halBuf.width, halBuf.height},
2916 &cropAndScaled);
2917 ATRACE_END();
2918 if (ret != 0) {
2919 lk.unlock();
2920 return onDeviceError("%s: crop and scale failed!", __FUNCTION__);
2921 }
2922
2923 Size sz{halBuf.width, halBuf.height};
2924 ATRACE_BEGIN("formatConvert");
2925 ret = formatConvert(cropAndScaled, outLayout, sz, outputFourcc);
2926 ATRACE_END();
2927 if (ret != 0) {
2928 lk.unlock();
2929 return onDeviceError("%s: format conversion failed!", __FUNCTION__);
2930 }
2931 int relFence = sHandleImporter.unlock(*(halBuf.bufPtr));
2932 if (relFence >= 0) {
2933 halBuf.acquireFence = relFence;
2934 }
2935 } break;
2936 default:
2937 lk.unlock();
2938 return onDeviceError("%s: unknown output format %x", __FUNCTION__, halBuf.format);
2939 }
2940 } // for each buffer
2941 mScaledYu12Frames.clear();
2942
2943 // Don't hold the lock while calling back to parent
2944 lk.unlock();
2945 Status st = parent->processCaptureResult(req);
2946 if (st != Status::OK) {
2947 return onDeviceError("%s: failed to process capture result!", __FUNCTION__);
2948 }
2949 signalRequestDone();
2950 return true;
2951}
2952
2953// End ExternalCameraDeviceSession::OutputThread functions
2954
2955} // namespace implementation
2956} // namespace device
2957} // namespace camera
2958} // namespace hardware
2959} // namespace android