blob: 9a5bd1e5f0b23fe236acd5341bf0e11e043b86b1 [file] [log] [blame]
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +01001/*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "VirtualCameraRenderThread"
18#include "VirtualCameraRenderThread.h"
19
20#include <chrono>
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +010021#include <cstdint>
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010022#include <cstring>
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010023#include <future>
24#include <memory>
25#include <mutex>
26#include <thread>
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010027#include <vector>
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010028
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010029#include "Exif.h"
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +010030#include "GLES/gl.h"
Biswarup Pal8ad8bc52024-02-08 13:41:44 +000031#include "VirtualCameraDevice.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010032#include "VirtualCameraSessionContext.h"
33#include "aidl/android/hardware/camera/common/Status.h"
34#include "aidl/android/hardware/camera/device/BufferStatus.h"
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010035#include "aidl/android/hardware/camera/device/CameraBlob.h"
36#include "aidl/android/hardware/camera/device/CameraBlobId.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010037#include "aidl/android/hardware/camera/device/CameraMetadata.h"
38#include "aidl/android/hardware/camera/device/CaptureResult.h"
39#include "aidl/android/hardware/camera/device/ErrorCode.h"
40#include "aidl/android/hardware/camera/device/ICameraDeviceCallback.h"
41#include "aidl/android/hardware/camera/device/NotifyMsg.h"
42#include "aidl/android/hardware/camera/device/ShutterMsg.h"
43#include "aidl/android/hardware/camera/device/StreamBuffer.h"
44#include "android-base/thread_annotations.h"
45#include "android/binder_auto_utils.h"
46#include "android/hardware_buffer.h"
Jan Sebechlebsky2f4478e2024-05-08 17:26:42 +020047#include "hardware/gralloc.h"
Vadim Caenc0aff132024-03-12 17:20:07 +010048#include "system/camera_metadata.h"
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +010049#include "ui/GraphicBuffer.h"
Jan Sebechlebskyb3771312024-03-15 10:38:02 +010050#include "ui/Rect.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010051#include "util/EglFramebuffer.h"
52#include "util/JpegUtil.h"
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010053#include "util/MetadataUtil.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010054#include "util/Util.h"
55#include "utils/Errors.h"
56
57namespace android {
58namespace companion {
59namespace virtualcamera {
60
61using ::aidl::android::hardware::camera::common::Status;
62using ::aidl::android::hardware::camera::device::BufferStatus;
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010063using ::aidl::android::hardware::camera::device::CameraBlob;
64using ::aidl::android::hardware::camera::device::CameraBlobId;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010065using ::aidl::android::hardware::camera::device::CameraMetadata;
66using ::aidl::android::hardware::camera::device::CaptureResult;
67using ::aidl::android::hardware::camera::device::ErrorCode;
68using ::aidl::android::hardware::camera::device::ErrorMsg;
69using ::aidl::android::hardware::camera::device::ICameraDeviceCallback;
70using ::aidl::android::hardware::camera::device::NotifyMsg;
71using ::aidl::android::hardware::camera::device::ShutterMsg;
72using ::aidl::android::hardware::camera::device::Stream;
73using ::aidl::android::hardware::camera::device::StreamBuffer;
74using ::aidl::android::hardware::graphics::common::PixelFormat;
75using ::android::base::ScopedLockAssertion;
76
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010077using ::android::hardware::camera::common::helper::ExifUtils;
78
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010079namespace {
80
81using namespace std::chrono_literals;
82
83static constexpr std::chrono::milliseconds kAcquireFenceTimeout = 500ms;
84
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +010085// See REQUEST_PIPELINE_DEPTH in CaptureResult.java.
86// This roughly corresponds to frame latency, we set to
87// documented minimum of 2.
88static constexpr uint8_t kPipelineDepth = 2;
89
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010090static constexpr size_t kJpegThumbnailBufferSize = 32 * 1024; // 32 KiB
91
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010092CameraMetadata createCaptureResultMetadata(
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +010093 const std::chrono::nanoseconds timestamp,
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010094 const RequestSettings& requestSettings,
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +010095 const Resolution reportedSensorSize) {
Vadim Caen11dfd932024-03-05 09:57:20 +010096 // All of the keys used in the response needs to be referenced in
97 // availableResultKeys in CameraCharacteristics (see initCameraCharacteristics
98 // in VirtualCameraDevice.cc).
99 MetadataBuilder builder =
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100100 MetadataBuilder()
Jan Sebechlebsky4be2bd02024-02-26 18:35:18 +0100101 .setAberrationCorrectionMode(
102 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF)
Vadim Caen11dfd932024-03-05 09:57:20 +0100103 .setControlAeAvailableAntibandingModes(
104 {ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF})
105 .setControlAeAntibandingMode(ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF)
106 .setControlAeExposureCompensation(0)
107 .setControlAeLockAvailable(false)
108 .setControlAeLock(ANDROID_CONTROL_AE_LOCK_OFF)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100109 .setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
110 .setControlAePrecaptureTrigger(
Vadim Caen6a43beb2024-04-12 15:06:42 +0200111 // Limited devices are expected to have precapture ae enabled and
112 // respond to cancellation request. Since we don't actuall support
113 // AE at all, let's just respect the cancellation expectation in
114 // case it's requested
115 requestSettings.aePrecaptureTrigger ==
116 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
117 ? ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
118 : ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
Vadim Caen11dfd932024-03-05 09:57:20 +0100119 .setControlAeState(ANDROID_CONTROL_AE_STATE_INACTIVE)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100120 .setControlAfMode(ANDROID_CONTROL_AF_MODE_OFF)
Vadim Caen11dfd932024-03-05 09:57:20 +0100121 .setControlAfTrigger(ANDROID_CONTROL_AF_TRIGGER_IDLE)
122 .setControlAfState(ANDROID_CONTROL_AF_STATE_INACTIVE)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100123 .setControlAwbMode(ANDROID_CONTROL_AWB_MODE_AUTO)
Vadim Caen11dfd932024-03-05 09:57:20 +0100124 .setControlAwbLock(ANDROID_CONTROL_AWB_LOCK_OFF)
125 .setControlAwbState(ANDROID_CONTROL_AWB_STATE_INACTIVE)
126 .setControlCaptureIntent(requestSettings.captureIntent)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100127 .setControlEffectMode(ANDROID_CONTROL_EFFECT_MODE_OFF)
128 .setControlMode(ANDROID_CONTROL_MODE_AUTO)
Vadim Caen11dfd932024-03-05 09:57:20 +0100129 .setControlSceneMode(ANDROID_CONTROL_SCENE_MODE_DISABLED)
130 .setControlVideoStabilizationMode(
131 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100132 .setCropRegion(0, 0, reportedSensorSize.width,
133 reportedSensorSize.height)
134 .setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100135 .setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
Vadim Caen11dfd932024-03-05 09:57:20 +0100136 .setFlashMode(ANDROID_FLASH_MODE_OFF)
Biswarup Pal8ad8bc52024-02-08 13:41:44 +0000137 .setFocalLength(VirtualCameraDevice::kFocalLength)
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100138 .setJpegQuality(requestSettings.jpegQuality)
Vadim Caenc0aff132024-03-12 17:20:07 +0100139 .setJpegOrientation(requestSettings.jpegOrientation)
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100140 .setJpegThumbnailSize(requestSettings.thumbnailResolution.width,
141 requestSettings.thumbnailResolution.height)
142 .setJpegThumbnailQuality(requestSettings.thumbnailJpegQuality)
Vadim Caen11dfd932024-03-05 09:57:20 +0100143 .setLensOpticalStabilizationMode(
144 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF)
Jan Sebechlebsky4be2bd02024-02-26 18:35:18 +0100145 .setNoiseReductionMode(ANDROID_NOISE_REDUCTION_MODE_OFF)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100146 .setPipelineDepth(kPipelineDepth)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100147 .setSensorTimestamp(timestamp)
Vadim Caen11dfd932024-03-05 09:57:20 +0100148 .setStatisticsHotPixelMapMode(
149 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF)
150 .setStatisticsLensShadingMapMode(
151 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF)
152 .setStatisticsSceneFlicker(ANDROID_STATISTICS_SCENE_FLICKER_NONE);
153
154 if (requestSettings.fpsRange.has_value()) {
155 builder.setControlAeTargetFpsRange(requestSettings.fpsRange.value());
156 }
157
Vadim Caenc0aff132024-03-12 17:20:07 +0100158 if (requestSettings.gpsCoordinates.has_value()) {
159 const GpsCoordinates& coordinates = requestSettings.gpsCoordinates.value();
160 builder.setJpegGpsCoordinates(coordinates);
161 }
162
Vadim Caen11dfd932024-03-05 09:57:20 +0100163 std::unique_ptr<CameraMetadata> metadata = builder.build();
164
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100165 if (metadata == nullptr) {
166 ALOGE("%s: Failed to build capture result metadata", __func__);
167 return CameraMetadata();
168 }
169 return std::move(*metadata);
170}
171
172NotifyMsg createShutterNotifyMsg(int frameNumber,
173 std::chrono::nanoseconds timestamp) {
174 NotifyMsg msg;
175 msg.set<NotifyMsg::Tag::shutter>(ShutterMsg{
176 .frameNumber = frameNumber,
177 .timestamp = timestamp.count(),
178 });
179 return msg;
180}
181
182NotifyMsg createBufferErrorNotifyMsg(int frameNumber, int streamId) {
183 NotifyMsg msg;
184 msg.set<NotifyMsg::Tag::error>(ErrorMsg{.frameNumber = frameNumber,
185 .errorStreamId = streamId,
186 .errorCode = ErrorCode::ERROR_BUFFER});
187 return msg;
188}
189
190NotifyMsg createRequestErrorNotifyMsg(int frameNumber) {
191 NotifyMsg msg;
192 msg.set<NotifyMsg::Tag::error>(ErrorMsg{
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100193 .frameNumber = frameNumber,
194 // errorStreamId needs to be set to -1 for ERROR_REQUEST
195 // (not tied to specific stream).
196 .errorStreamId = -1,
197 .errorCode = ErrorCode::ERROR_REQUEST});
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100198 return msg;
199}
200
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100201std::shared_ptr<EglFrameBuffer> allocateTemporaryFramebuffer(
202 EGLDisplay eglDisplay, const uint width, const int height) {
203 const AHardwareBuffer_Desc desc{
204 .width = static_cast<uint32_t>(width),
205 .height = static_cast<uint32_t>(height),
206 .layers = 1,
207 .format = AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420,
208 .usage = AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER |
209 AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN,
210 .rfu0 = 0,
211 .rfu1 = 0};
212
213 AHardwareBuffer* hwBufferPtr;
214 int status = AHardwareBuffer_allocate(&desc, &hwBufferPtr);
215 if (status != NO_ERROR) {
216 ALOGE(
217 "%s: Failed to allocate hardware buffer for temporary framebuffer: %d",
218 __func__, status);
219 return nullptr;
220 }
221
222 return std::make_shared<EglFrameBuffer>(
223 eglDisplay,
224 std::shared_ptr<AHardwareBuffer>(hwBufferPtr, AHardwareBuffer_release));
225}
226
227bool isYuvFormat(const PixelFormat pixelFormat) {
228 switch (static_cast<android_pixel_format_t>(pixelFormat)) {
229 case HAL_PIXEL_FORMAT_YCBCR_422_I:
230 case HAL_PIXEL_FORMAT_YCBCR_422_SP:
231 case HAL_PIXEL_FORMAT_Y16:
232 case HAL_PIXEL_FORMAT_YV12:
233 case HAL_PIXEL_FORMAT_YCBCR_420_888:
234 return true;
235 default:
236 return false;
237 }
238}
239
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100240std::vector<uint8_t> createExif(
Vadim Caenc0aff132024-03-12 17:20:07 +0100241 Resolution imageSize, const CameraMetadata resultMetadata,
242 const std::vector<uint8_t>& compressedThumbnail = {}) {
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100243 std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create());
244 exifUtils->initialize();
Vadim Caenc0aff132024-03-12 17:20:07 +0100245
246 // Make a copy of the metadata in order to converting it the HAL metadata
247 // format (as opposed to the AIDL class) and use the setFromMetadata method
248 // from ExifUtil
249 camera_metadata_t* rawSettings =
250 clone_camera_metadata((camera_metadata_t*)resultMetadata.metadata.data());
251 if (rawSettings != nullptr) {
252 android::hardware::camera::common::helper::CameraMetadata halMetadata(
253 rawSettings);
254 exifUtils->setFromMetadata(halMetadata, imageSize.width, imageSize.height);
255 }
256 exifUtils->setMake(VirtualCameraDevice::kDefaultMakeAndModel);
257 exifUtils->setModel(VirtualCameraDevice::kDefaultMakeAndModel);
258 exifUtils->setFlash(0);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100259
260 std::vector<uint8_t> app1Data;
261
262 size_t thumbnailDataSize = compressedThumbnail.size();
263 const void* thumbnailData =
264 thumbnailDataSize > 0
265 ? reinterpret_cast<const void*>(compressedThumbnail.data())
266 : nullptr;
267
268 if (!exifUtils->generateApp1(thumbnailData, thumbnailDataSize)) {
269 ALOGE("%s: Failed to generate APP1 segment for EXIF metadata", __func__);
270 return app1Data;
271 }
272
273 const uint8_t* data = exifUtils->getApp1Buffer();
274 const size_t size = exifUtils->getApp1Length();
275
276 app1Data.insert(app1Data.end(), data, data + size);
277 return app1Data;
278}
279
Jan Sebechlebskyb8282672024-05-22 10:43:37 +0200280std::chrono::nanoseconds getMaxFrameDuration(
281 const RequestSettings& requestSettings) {
282 if (requestSettings.fpsRange.has_value()) {
283 return std::chrono::nanoseconds(static_cast<uint64_t>(
284 1e9 / std::max(1, requestSettings.fpsRange->minFps)));
285 }
286 return std::chrono::nanoseconds(
287 static_cast<uint64_t>(1e9 / VirtualCameraDevice::kMinFps));
288}
289
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100290} // namespace
291
292CaptureRequestBuffer::CaptureRequestBuffer(int streamId, int bufferId,
293 sp<Fence> fence)
294 : mStreamId(streamId), mBufferId(bufferId), mFence(fence) {
295}
296
297int CaptureRequestBuffer::getStreamId() const {
298 return mStreamId;
299}
300
301int CaptureRequestBuffer::getBufferId() const {
302 return mBufferId;
303}
304
305sp<Fence> CaptureRequestBuffer::getFence() const {
306 return mFence;
307}
308
309VirtualCameraRenderThread::VirtualCameraRenderThread(
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100310 VirtualCameraSessionContext& sessionContext,
311 const Resolution inputSurfaceSize, const Resolution reportedSensorSize,
Jan Sebechlebsky288900f2024-05-24 14:47:54 +0200312 std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback)
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100313 : mCameraDeviceCallback(cameraDeviceCallback),
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100314 mInputSurfaceSize(inputSurfaceSize),
315 mReportedSensorSize(reportedSensorSize),
Jan Sebechlebsky9fcd0262024-05-31 15:20:09 +0200316 mSessionContext(sessionContext),
317 mInputSurfaceFuture(mInputSurfacePromise.get_future()) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100318}
319
320VirtualCameraRenderThread::~VirtualCameraRenderThread() {
321 stop();
322 if (mThread.joinable()) {
323 mThread.join();
324 }
325}
326
327ProcessCaptureRequestTask::ProcessCaptureRequestTask(
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100328 int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers,
329 const RequestSettings& requestSettings)
330 : mFrameNumber(frameNumber),
331 mBuffers(requestBuffers),
332 mRequestSettings(requestSettings) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100333}
334
335int ProcessCaptureRequestTask::getFrameNumber() const {
336 return mFrameNumber;
337}
338
339const std::vector<CaptureRequestBuffer>& ProcessCaptureRequestTask::getBuffers()
340 const {
341 return mBuffers;
342}
343
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100344const RequestSettings& ProcessCaptureRequestTask::getRequestSettings() const {
345 return mRequestSettings;
346}
347
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100348void VirtualCameraRenderThread::enqueueTask(
349 std::unique_ptr<ProcessCaptureRequestTask> task) {
350 std::lock_guard<std::mutex> lock(mLock);
351 mQueue.emplace_back(std::move(task));
352 mCondVar.notify_one();
353}
354
355void VirtualCameraRenderThread::flush() {
356 std::lock_guard<std::mutex> lock(mLock);
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100357 while (!mQueue.empty()) {
358 std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
359 mQueue.pop_front();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100360 flushCaptureRequest(*task);
361 }
362}
363
364void VirtualCameraRenderThread::start() {
365 mThread = std::thread(&VirtualCameraRenderThread::threadLoop, this);
366}
367
368void VirtualCameraRenderThread::stop() {
369 {
370 std::lock_guard<std::mutex> lock(mLock);
371 mPendingExit = true;
372 mCondVar.notify_one();
373 }
374}
375
376sp<Surface> VirtualCameraRenderThread::getInputSurface() {
Jan Sebechlebsky9fcd0262024-05-31 15:20:09 +0200377 return mInputSurfaceFuture.get();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100378}
379
380std::unique_ptr<ProcessCaptureRequestTask>
381VirtualCameraRenderThread::dequeueTask() {
382 std::unique_lock<std::mutex> lock(mLock);
383 // Clang's thread safety analysis doesn't perform alias analysis,
384 // so it doesn't support moveable std::unique_lock.
385 //
386 // Lock assertion below is basically explicit declaration that
387 // the lock is held in this scope, which is true, since it's only
388 // released during waiting inside mCondVar.wait calls.
389 ScopedLockAssertion lockAssertion(mLock);
390
391 mCondVar.wait(lock, [this]() REQUIRES(mLock) {
392 return mPendingExit || !mQueue.empty();
393 });
394 if (mPendingExit) {
395 return nullptr;
396 }
397 std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
398 mQueue.pop_front();
399 return task;
400}
401
402void VirtualCameraRenderThread::threadLoop() {
403 ALOGV("Render thread starting");
404
405 mEglDisplayContext = std::make_unique<EglDisplayContext>();
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100406 mEglTextureYuvProgram =
407 std::make_unique<EglTextureProgram>(EglTextureProgram::TextureFormat::YUV);
408 mEglTextureRgbProgram = std::make_unique<EglTextureProgram>(
409 EglTextureProgram::TextureFormat::RGBA);
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100410 mEglSurfaceTexture = std::make_unique<EglSurfaceTexture>(
411 mInputSurfaceSize.width, mInputSurfaceSize.height);
Jan Sebechlebsky6402fef2024-03-25 16:30:26 +0100412
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100413 mInputSurfacePromise.set_value(mEglSurfaceTexture->getSurface());
414
415 while (std::unique_ptr<ProcessCaptureRequestTask> task = dequeueTask()) {
416 processCaptureRequest(*task);
417 }
418
Jan Sebechlebsky06b36672024-03-18 11:52:35 +0100419 // Destroy EGL utilities still on the render thread.
420 mEglSurfaceTexture.reset();
421 mEglTextureRgbProgram.reset();
422 mEglTextureYuvProgram.reset();
423 mEglDisplayContext.reset();
424
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100425 ALOGV("Render thread exiting");
426}
427
428void VirtualCameraRenderThread::processCaptureRequest(
429 const ProcessCaptureRequestTask& request) {
Jan Sebechlebsky2f4478e2024-05-08 17:26:42 +0200430 std::chrono::nanoseconds timestamp =
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100431 std::chrono::duration_cast<std::chrono::nanoseconds>(
432 std::chrono::steady_clock::now().time_since_epoch());
Jan Sebechlebskyb8282672024-05-22 10:43:37 +0200433 const std::chrono::nanoseconds lastAcquisitionTimestamp(
Jan Sebechlebsky2f4478e2024-05-08 17:26:42 +0200434 mLastAcquisitionTimestampNanoseconds.exchange(timestamp.count(),
435 std::memory_order_relaxed));
436
437 if (request.getRequestSettings().fpsRange) {
438 const int maxFps =
439 std::max(1, request.getRequestSettings().fpsRange->maxFps);
440 const std::chrono::nanoseconds minFrameDuration(
441 static_cast<uint64_t>(1e9 / maxFps));
442 const std::chrono::nanoseconds frameDuration =
443 timestamp - lastAcquisitionTimestamp;
444 if (frameDuration < minFrameDuration) {
445 // We're too fast for the configured maxFps, let's wait a bit.
446 const std::chrono::nanoseconds sleepTime =
447 minFrameDuration - frameDuration;
448 ALOGV("Current frame duration would be %" PRIu64
449 " ns corresponding to, "
450 "sleeping for %" PRIu64
451 " ns before updating texture to match maxFps %d",
452 static_cast<uint64_t>(frameDuration.count()),
453 static_cast<uint64_t>(sleepTime.count()), maxFps);
454
455 std::this_thread::sleep_for(sleepTime);
456 timestamp = std::chrono::duration_cast<std::chrono::nanoseconds>(
457 std::chrono::steady_clock::now().time_since_epoch());
458 mLastAcquisitionTimestampNanoseconds.store(timestamp.count(),
459 std::memory_order_relaxed);
460 }
461 }
462
Jan Sebechlebskyb8282672024-05-22 10:43:37 +0200463 // Calculate the maximal amount of time we can afford to wait for next frame.
464 const std::chrono::nanoseconds maxFrameDuration =
465 getMaxFrameDuration(request.getRequestSettings());
466 const std::chrono::nanoseconds elapsedDuration =
467 timestamp - lastAcquisitionTimestamp;
468 if (elapsedDuration < maxFrameDuration) {
469 // We can afford to wait for next frame.
470 // Note that if there's already new frame in the input Surface, the call
471 // below returns immediatelly.
472 bool gotNewFrame = mEglSurfaceTexture->waitForNextFrame(maxFrameDuration -
473 elapsedDuration);
474 timestamp = std::chrono::duration_cast<std::chrono::nanoseconds>(
475 std::chrono::steady_clock::now().time_since_epoch());
476 if (!gotNewFrame) {
477 ALOGV(
478 "%s: No new frame received on input surface after waiting for "
479 "%" PRIu64 "ns, repeating last frame.",
480 __func__,
481 static_cast<uint64_t>((timestamp - lastAcquisitionTimestamp).count()));
482 }
483 mLastAcquisitionTimestampNanoseconds.store(timestamp.count(),
484 std::memory_order_relaxed);
485 }
Jan Sebechlebsky2f4478e2024-05-08 17:26:42 +0200486 // Acquire new (most recent) image from the Surface.
487 mEglSurfaceTexture->updateTexture();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100488
489 CaptureResult captureResult;
490 captureResult.fmqResultSize = 0;
491 captureResult.frameNumber = request.getFrameNumber();
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100492 // Partial result needs to be set to 1 when metadata are present.
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100493 captureResult.partialResult = 1;
494 captureResult.inputBuffer.streamId = -1;
495 captureResult.physicalCameraMetadata.resize(0);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100496 captureResult.result = createCaptureResultMetadata(
497 timestamp, request.getRequestSettings(), mReportedSensorSize);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100498
499 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
500 captureResult.outputBuffers.resize(buffers.size());
501
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100502 for (int i = 0; i < buffers.size(); ++i) {
503 const CaptureRequestBuffer& reqBuffer = buffers[i];
504 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
505 resBuffer.streamId = reqBuffer.getStreamId();
506 resBuffer.bufferId = reqBuffer.getBufferId();
507 resBuffer.status = BufferStatus::OK;
508
509 const std::optional<Stream> streamConfig =
510 mSessionContext.getStreamConfig(reqBuffer.getStreamId());
511
512 if (!streamConfig.has_value()) {
513 resBuffer.status = BufferStatus::ERROR;
514 continue;
515 }
516
517 auto status = streamConfig->format == PixelFormat::BLOB
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100518 ? renderIntoBlobStreamBuffer(
519 reqBuffer.getStreamId(), reqBuffer.getBufferId(),
Vadim Caenc0aff132024-03-12 17:20:07 +0100520 captureResult.result, request.getRequestSettings(),
521 reqBuffer.getFence())
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100522 : renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
523 reqBuffer.getBufferId(),
524 reqBuffer.getFence());
525 if (!status.isOk()) {
526 resBuffer.status = BufferStatus::ERROR;
527 }
528 }
529
530 std::vector<NotifyMsg> notifyMsg{
531 createShutterNotifyMsg(request.getFrameNumber(), timestamp)};
532 for (const StreamBuffer& resBuffer : captureResult.outputBuffers) {
533 if (resBuffer.status != BufferStatus::OK) {
534 notifyMsg.push_back(createBufferErrorNotifyMsg(request.getFrameNumber(),
535 resBuffer.streamId));
536 }
537 }
538
539 auto status = mCameraDeviceCallback->notify(notifyMsg);
540 if (!status.isOk()) {
541 ALOGE("%s: notify call failed: %s", __func__,
542 status.getDescription().c_str());
543 return;
544 }
545
546 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
547 captureResults(1);
548 captureResults[0] = std::move(captureResult);
549
550 status = mCameraDeviceCallback->processCaptureResult(captureResults);
551 if (!status.isOk()) {
552 ALOGE("%s: processCaptureResult call failed: %s", __func__,
553 status.getDescription().c_str());
554 return;
555 }
556
Vadim Caen324fcfb2024-03-21 16:49:08 +0100557 ALOGV("%s: Successfully called processCaptureResult", __func__);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100558}
559
560void VirtualCameraRenderThread::flushCaptureRequest(
561 const ProcessCaptureRequestTask& request) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100562 CaptureResult captureResult;
563 captureResult.fmqResultSize = 0;
564 captureResult.frameNumber = request.getFrameNumber();
565 captureResult.inputBuffer.streamId = -1;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100566
567 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
568 captureResult.outputBuffers.resize(buffers.size());
569
570 for (int i = 0; i < buffers.size(); ++i) {
571 const CaptureRequestBuffer& reqBuffer = buffers[i];
572 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
573 resBuffer.streamId = reqBuffer.getStreamId();
574 resBuffer.bufferId = reqBuffer.getBufferId();
575 resBuffer.status = BufferStatus::ERROR;
576 sp<Fence> fence = reqBuffer.getFence();
577 if (fence != nullptr && fence->isValid()) {
578 resBuffer.releaseFence.fds.emplace_back(fence->dup());
579 }
580 }
581
582 auto status = mCameraDeviceCallback->notify(
583 {createRequestErrorNotifyMsg(request.getFrameNumber())});
584 if (!status.isOk()) {
585 ALOGE("%s: notify call failed: %s", __func__,
586 status.getDescription().c_str());
587 return;
588 }
589
590 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
591 captureResults(1);
592 captureResults[0] = std::move(captureResult);
593
594 status = mCameraDeviceCallback->processCaptureResult(captureResults);
595 if (!status.isOk()) {
596 ALOGE("%s: processCaptureResult call failed: %s", __func__,
597 status.getDescription().c_str());
598 }
599}
600
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100601std::vector<uint8_t> VirtualCameraRenderThread::createThumbnail(
602 const Resolution resolution, const int quality) {
603 if (resolution.width == 0 || resolution.height == 0) {
604 ALOGV("%s: Skipping thumbnail creation, zero size requested", __func__);
605 return {};
606 }
607
608 ALOGV("%s: Creating thumbnail with size %d x %d, quality %d", __func__,
609 resolution.width, resolution.height, quality);
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100610 Resolution bufferSize = roundTo2DctSize(resolution);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100611 std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100612 mEglDisplayContext->getEglDisplay(), bufferSize.width, bufferSize.height);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100613 if (framebuffer == nullptr) {
614 ALOGE(
615 "Failed to allocate temporary framebuffer for JPEG thumbnail "
616 "compression");
617 return {};
618 }
619
620 // TODO(b/324383963) Add support for letterboxing if the thumbnail size
621 // doesn't correspond
622 // to input texture aspect ratio.
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100623 if (!renderIntoEglFramebuffer(*framebuffer, /*fence=*/nullptr,
624 Rect(resolution.width, resolution.height))
625 .isOk()) {
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100626 ALOGE(
627 "Failed to render input texture into temporary framebuffer for JPEG "
628 "thumbnail");
629 return {};
630 }
631
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100632 std::vector<uint8_t> compressedThumbnail;
633 compressedThumbnail.resize(kJpegThumbnailBufferSize);
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100634 ALOGE("%s: Compressing thumbnail %d x %d", __func__, resolution.width,
635 resolution.height);
636 std::optional<size_t> compressedSize =
637 compressJpeg(resolution.width, resolution.height, quality,
638 framebuffer->getHardwareBuffer(), {},
639 compressedThumbnail.size(), compressedThumbnail.data());
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100640 if (!compressedSize.has_value()) {
641 ALOGE("%s: Failed to compress jpeg thumbnail", __func__);
642 return {};
643 }
644 compressedThumbnail.resize(compressedSize.value());
645 return compressedThumbnail;
646}
647
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100648ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoBlobStreamBuffer(
Vadim Caenc0aff132024-03-12 17:20:07 +0100649 const int streamId, const int bufferId, const CameraMetadata& resultMetadata,
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100650 const RequestSettings& requestSettings, sp<Fence> fence) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100651 std::shared_ptr<AHardwareBuffer> hwBuffer =
652 mSessionContext.fetchHardwareBuffer(streamId, bufferId);
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100653 if (hwBuffer == nullptr) {
654 ALOGE("%s: Failed to fetch hardware buffer %d for streamId %d", __func__,
655 bufferId, streamId);
656 return cameraStatus(Status::INTERNAL_ERROR);
657 }
658
659 std::optional<Stream> stream = mSessionContext.getStreamConfig(streamId);
660 if (!stream.has_value()) {
661 ALOGE("%s, failed to fetch information about stream %d", __func__, streamId);
662 return cameraStatus(Status::INTERNAL_ERROR);
663 }
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100664
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100665 ALOGV("%s: Rendering JPEG with size %d x %d, quality %d", __func__,
666 stream->width, stream->height, requestSettings.jpegQuality);
667
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100668 // Let's create YUV framebuffer and render the surface into this.
669 // This will take care about rescaling as well as potential format conversion.
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100670 // The buffer dimensions need to be rounded to nearest multiple of JPEG DCT
671 // size, however we pass the viewport corresponding to size of the stream so
672 // the image will be only rendered to the area corresponding to the stream
673 // size.
674 Resolution bufferSize =
675 roundTo2DctSize(Resolution(stream->width, stream->height));
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100676 std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100677 mEglDisplayContext->getEglDisplay(), bufferSize.width, bufferSize.height);
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100678 if (framebuffer == nullptr) {
679 ALOGE("Failed to allocate temporary framebuffer for JPEG compression");
680 return cameraStatus(Status::INTERNAL_ERROR);
681 }
682
683 // Render into temporary framebuffer.
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100684 ndk::ScopedAStatus status = renderIntoEglFramebuffer(
685 *framebuffer, /*fence=*/nullptr, Rect(stream->width, stream->height));
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100686 if (!status.isOk()) {
687 ALOGE("Failed to render input texture into temporary framebuffer");
688 return status;
689 }
690
Jan Sebechlebsky43543222024-02-16 12:50:32 +0100691 PlanesLockGuard planesLock(hwBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_RARELY,
692 fence);
693 if (planesLock.getStatus() != OK) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100694 return cameraStatus(Status::INTERNAL_ERROR);
695 }
696
Jan Sebechlebsky5c789e42024-02-29 16:32:17 +0100697 std::vector<uint8_t> app1ExifData =
Vadim Caenc0aff132024-03-12 17:20:07 +0100698 createExif(Resolution(stream->width, stream->height), resultMetadata,
Jan Sebechlebsky5c789e42024-02-29 16:32:17 +0100699 createThumbnail(requestSettings.thumbnailResolution,
700 requestSettings.thumbnailJpegQuality));
701 std::optional<size_t> compressedSize = compressJpeg(
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100702 stream->width, stream->height, requestSettings.jpegQuality,
703 framebuffer->getHardwareBuffer(), app1ExifData,
704 stream->bufferSize - sizeof(CameraBlob), (*planesLock).planes[0].data);
Jan Sebechlebsky5c789e42024-02-29 16:32:17 +0100705
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100706 if (!compressedSize.has_value()) {
707 ALOGE("%s: Failed to compress JPEG image", __func__);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100708 return cameraStatus(Status::INTERNAL_ERROR);
709 }
710
711 CameraBlob cameraBlob{
712 .blobId = CameraBlobId::JPEG,
713 .blobSizeBytes = static_cast<int32_t>(compressedSize.value())};
714
Jan Sebechlebsky43543222024-02-16 12:50:32 +0100715 memcpy(reinterpret_cast<uint8_t*>((*planesLock).planes[0].data) +
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100716 (stream->bufferSize - sizeof(cameraBlob)),
717 &cameraBlob, sizeof(cameraBlob));
718
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100719 ALOGV("%s: Successfully compressed JPEG image, resulting size %zu B",
720 __func__, compressedSize.value());
721
722 return ndk::ScopedAStatus::ok();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100723}
724
725ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoImageStreamBuffer(
726 int streamId, int bufferId, sp<Fence> fence) {
727 ALOGV("%s", __func__);
728
729 const std::chrono::nanoseconds before =
730 std::chrono::duration_cast<std::chrono::nanoseconds>(
731 std::chrono::steady_clock::now().time_since_epoch());
732
733 // Render test pattern using EGL.
734 std::shared_ptr<EglFrameBuffer> framebuffer =
735 mSessionContext.fetchOrCreateEglFramebuffer(
736 mEglDisplayContext->getEglDisplay(), streamId, bufferId);
737 if (framebuffer == nullptr) {
738 ALOGE(
739 "%s: Failed to get EGL framebuffer corresponding to buffer id "
740 "%d for streamId %d",
741 __func__, bufferId, streamId);
742 return cameraStatus(Status::ILLEGAL_ARGUMENT);
743 }
744
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100745 ndk::ScopedAStatus status = renderIntoEglFramebuffer(*framebuffer, fence);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100746
747 const std::chrono::nanoseconds after =
748 std::chrono::duration_cast<std::chrono::nanoseconds>(
749 std::chrono::steady_clock::now().time_since_epoch());
750
751 ALOGV("Rendering to buffer %d, stream %d took %lld ns", bufferId, streamId,
752 after.count() - before.count());
753
754 return ndk::ScopedAStatus::ok();
755}
756
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100757ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoEglFramebuffer(
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100758 EglFrameBuffer& framebuffer, sp<Fence> fence, std::optional<Rect> viewport) {
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100759 ALOGV("%s", __func__);
760 // Wait for fence to clear.
761 if (fence != nullptr && fence->isValid()) {
762 status_t ret = fence->wait(kAcquireFenceTimeout.count());
763 if (ret != 0) {
764 ALOGE("Timeout while waiting for the acquire fence for buffer");
765 return cameraStatus(Status::INTERNAL_ERROR);
766 }
767 }
768
769 mEglDisplayContext->makeCurrent();
770 framebuffer.beforeDraw();
771
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100772 Rect viewportRect =
773 viewport.value_or(Rect(framebuffer.getWidth(), framebuffer.getHeight()));
774 glViewport(viewportRect.leftTop().x, viewportRect.leftTop().y,
775 viewportRect.getWidth(), viewportRect.getHeight());
776
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100777 sp<GraphicBuffer> textureBuffer = mEglSurfaceTexture->getCurrentBuffer();
778 if (textureBuffer == nullptr) {
779 // If there's no current buffer, nothing was written to the surface and
780 // texture is not initialized yet. Let's render the framebuffer black
781 // instead of rendering the texture.
782 glClearColor(0.0f, 0.5f, 0.5f, 0.0f);
783 glClear(GL_COLOR_BUFFER_BIT);
784 } else {
785 const bool renderSuccess =
786 isYuvFormat(static_cast<PixelFormat>(textureBuffer->getPixelFormat()))
Jan Sebechlebsky99492e32023-12-20 09:49:45 +0100787 ? mEglTextureYuvProgram->draw(
788 mEglSurfaceTexture->getTextureId(),
789 mEglSurfaceTexture->getTransformMatrix())
790 : mEglTextureRgbProgram->draw(
791 mEglSurfaceTexture->getTextureId(),
792 mEglSurfaceTexture->getTransformMatrix());
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100793 if (!renderSuccess) {
794 ALOGE("%s: Failed to render texture", __func__);
795 return cameraStatus(Status::INTERNAL_ERROR);
796 }
797 }
798 framebuffer.afterDraw();
799
800 return ndk::ScopedAStatus::ok();
801}
802
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100803} // namespace virtualcamera
804} // namespace companion
805} // namespace android