blob: f5cf092c95d1afd0f33b4f9ca2768b2219e46814 [file] [log] [blame]
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +01001/*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "VirtualCameraRenderThread"
18#include "VirtualCameraRenderThread.h"
19
20#include <chrono>
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +010021#include <cstdint>
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010022#include <cstring>
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010023#include <future>
24#include <memory>
25#include <mutex>
26#include <thread>
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010027#include <vector>
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010028
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010029#include "Exif.h"
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +010030#include "GLES/gl.h"
Biswarup Pal8ad8bc52024-02-08 13:41:44 +000031#include "VirtualCameraDevice.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010032#include "VirtualCameraSessionContext.h"
33#include "aidl/android/hardware/camera/common/Status.h"
34#include "aidl/android/hardware/camera/device/BufferStatus.h"
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010035#include "aidl/android/hardware/camera/device/CameraBlob.h"
36#include "aidl/android/hardware/camera/device/CameraBlobId.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010037#include "aidl/android/hardware/camera/device/CameraMetadata.h"
38#include "aidl/android/hardware/camera/device/CaptureResult.h"
39#include "aidl/android/hardware/camera/device/ErrorCode.h"
40#include "aidl/android/hardware/camera/device/ICameraDeviceCallback.h"
41#include "aidl/android/hardware/camera/device/NotifyMsg.h"
42#include "aidl/android/hardware/camera/device/ShutterMsg.h"
43#include "aidl/android/hardware/camera/device/StreamBuffer.h"
44#include "android-base/thread_annotations.h"
45#include "android/binder_auto_utils.h"
46#include "android/hardware_buffer.h"
Jan Sebechlebsky2f4478e2024-05-08 17:26:42 +020047#include "hardware/gralloc.h"
Vadim Caenc0aff132024-03-12 17:20:07 +010048#include "system/camera_metadata.h"
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +010049#include "ui/GraphicBuffer.h"
Jan Sebechlebskyb3771312024-03-15 10:38:02 +010050#include "ui/Rect.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010051#include "util/EglFramebuffer.h"
52#include "util/JpegUtil.h"
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010053#include "util/MetadataUtil.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010054#include "util/TestPatternHelper.h"
55#include "util/Util.h"
56#include "utils/Errors.h"
57
58namespace android {
59namespace companion {
60namespace virtualcamera {
61
62using ::aidl::android::hardware::camera::common::Status;
63using ::aidl::android::hardware::camera::device::BufferStatus;
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010064using ::aidl::android::hardware::camera::device::CameraBlob;
65using ::aidl::android::hardware::camera::device::CameraBlobId;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010066using ::aidl::android::hardware::camera::device::CameraMetadata;
67using ::aidl::android::hardware::camera::device::CaptureResult;
68using ::aidl::android::hardware::camera::device::ErrorCode;
69using ::aidl::android::hardware::camera::device::ErrorMsg;
70using ::aidl::android::hardware::camera::device::ICameraDeviceCallback;
71using ::aidl::android::hardware::camera::device::NotifyMsg;
72using ::aidl::android::hardware::camera::device::ShutterMsg;
73using ::aidl::android::hardware::camera::device::Stream;
74using ::aidl::android::hardware::camera::device::StreamBuffer;
75using ::aidl::android::hardware::graphics::common::PixelFormat;
76using ::android::base::ScopedLockAssertion;
77
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010078using ::android::hardware::camera::common::helper::ExifUtils;
79
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010080namespace {
81
82using namespace std::chrono_literals;
83
84static constexpr std::chrono::milliseconds kAcquireFenceTimeout = 500ms;
85
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +010086// See REQUEST_PIPELINE_DEPTH in CaptureResult.java.
87// This roughly corresponds to frame latency, we set to
88// documented minimum of 2.
89static constexpr uint8_t kPipelineDepth = 2;
90
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010091static constexpr size_t kJpegThumbnailBufferSize = 32 * 1024; // 32 KiB
92
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010093CameraMetadata createCaptureResultMetadata(
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +010094 const std::chrono::nanoseconds timestamp,
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010095 const RequestSettings& requestSettings,
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +010096 const Resolution reportedSensorSize) {
Vadim Caen11dfd932024-03-05 09:57:20 +010097 // All of the keys used in the response needs to be referenced in
98 // availableResultKeys in CameraCharacteristics (see initCameraCharacteristics
99 // in VirtualCameraDevice.cc).
100 MetadataBuilder builder =
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100101 MetadataBuilder()
Jan Sebechlebsky4be2bd02024-02-26 18:35:18 +0100102 .setAberrationCorrectionMode(
103 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF)
Vadim Caen11dfd932024-03-05 09:57:20 +0100104 .setControlAeAvailableAntibandingModes(
105 {ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF})
106 .setControlAeAntibandingMode(ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF)
107 .setControlAeExposureCompensation(0)
108 .setControlAeLockAvailable(false)
109 .setControlAeLock(ANDROID_CONTROL_AE_LOCK_OFF)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100110 .setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
111 .setControlAePrecaptureTrigger(
Vadim Caen6a43beb2024-04-12 15:06:42 +0200112 // Limited devices are expected to have precapture ae enabled and
113 // respond to cancellation request. Since we don't actuall support
114 // AE at all, let's just respect the cancellation expectation in
115 // case it's requested
116 requestSettings.aePrecaptureTrigger ==
117 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
118 ? ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
119 : ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
Vadim Caen11dfd932024-03-05 09:57:20 +0100120 .setControlAeState(ANDROID_CONTROL_AE_STATE_INACTIVE)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100121 .setControlAfMode(ANDROID_CONTROL_AF_MODE_OFF)
Vadim Caen11dfd932024-03-05 09:57:20 +0100122 .setControlAfTrigger(ANDROID_CONTROL_AF_TRIGGER_IDLE)
123 .setControlAfState(ANDROID_CONTROL_AF_STATE_INACTIVE)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100124 .setControlAwbMode(ANDROID_CONTROL_AWB_MODE_AUTO)
Vadim Caen11dfd932024-03-05 09:57:20 +0100125 .setControlAwbLock(ANDROID_CONTROL_AWB_LOCK_OFF)
126 .setControlAwbState(ANDROID_CONTROL_AWB_STATE_INACTIVE)
127 .setControlCaptureIntent(requestSettings.captureIntent)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100128 .setControlEffectMode(ANDROID_CONTROL_EFFECT_MODE_OFF)
129 .setControlMode(ANDROID_CONTROL_MODE_AUTO)
Vadim Caen11dfd932024-03-05 09:57:20 +0100130 .setControlSceneMode(ANDROID_CONTROL_SCENE_MODE_DISABLED)
131 .setControlVideoStabilizationMode(
132 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100133 .setCropRegion(0, 0, reportedSensorSize.width,
134 reportedSensorSize.height)
135 .setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100136 .setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
Vadim Caen11dfd932024-03-05 09:57:20 +0100137 .setFlashMode(ANDROID_FLASH_MODE_OFF)
Biswarup Pal8ad8bc52024-02-08 13:41:44 +0000138 .setFocalLength(VirtualCameraDevice::kFocalLength)
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100139 .setJpegQuality(requestSettings.jpegQuality)
Vadim Caenc0aff132024-03-12 17:20:07 +0100140 .setJpegOrientation(requestSettings.jpegOrientation)
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100141 .setJpegThumbnailSize(requestSettings.thumbnailResolution.width,
142 requestSettings.thumbnailResolution.height)
143 .setJpegThumbnailQuality(requestSettings.thumbnailJpegQuality)
Vadim Caen11dfd932024-03-05 09:57:20 +0100144 .setLensOpticalStabilizationMode(
145 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF)
Jan Sebechlebsky4be2bd02024-02-26 18:35:18 +0100146 .setNoiseReductionMode(ANDROID_NOISE_REDUCTION_MODE_OFF)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100147 .setPipelineDepth(kPipelineDepth)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100148 .setSensorTimestamp(timestamp)
Vadim Caen11dfd932024-03-05 09:57:20 +0100149 .setStatisticsHotPixelMapMode(
150 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF)
151 .setStatisticsLensShadingMapMode(
152 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF)
153 .setStatisticsSceneFlicker(ANDROID_STATISTICS_SCENE_FLICKER_NONE);
154
155 if (requestSettings.fpsRange.has_value()) {
156 builder.setControlAeTargetFpsRange(requestSettings.fpsRange.value());
157 }
158
Vadim Caenc0aff132024-03-12 17:20:07 +0100159 if (requestSettings.gpsCoordinates.has_value()) {
160 const GpsCoordinates& coordinates = requestSettings.gpsCoordinates.value();
161 builder.setJpegGpsCoordinates(coordinates);
162 }
163
Vadim Caen11dfd932024-03-05 09:57:20 +0100164 std::unique_ptr<CameraMetadata> metadata = builder.build();
165
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100166 if (metadata == nullptr) {
167 ALOGE("%s: Failed to build capture result metadata", __func__);
168 return CameraMetadata();
169 }
170 return std::move(*metadata);
171}
172
173NotifyMsg createShutterNotifyMsg(int frameNumber,
174 std::chrono::nanoseconds timestamp) {
175 NotifyMsg msg;
176 msg.set<NotifyMsg::Tag::shutter>(ShutterMsg{
177 .frameNumber = frameNumber,
178 .timestamp = timestamp.count(),
179 });
180 return msg;
181}
182
183NotifyMsg createBufferErrorNotifyMsg(int frameNumber, int streamId) {
184 NotifyMsg msg;
185 msg.set<NotifyMsg::Tag::error>(ErrorMsg{.frameNumber = frameNumber,
186 .errorStreamId = streamId,
187 .errorCode = ErrorCode::ERROR_BUFFER});
188 return msg;
189}
190
191NotifyMsg createRequestErrorNotifyMsg(int frameNumber) {
192 NotifyMsg msg;
193 msg.set<NotifyMsg::Tag::error>(ErrorMsg{
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100194 .frameNumber = frameNumber,
195 // errorStreamId needs to be set to -1 for ERROR_REQUEST
196 // (not tied to specific stream).
197 .errorStreamId = -1,
198 .errorCode = ErrorCode::ERROR_REQUEST});
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100199 return msg;
200}
201
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100202std::shared_ptr<EglFrameBuffer> allocateTemporaryFramebuffer(
203 EGLDisplay eglDisplay, const uint width, const int height) {
204 const AHardwareBuffer_Desc desc{
205 .width = static_cast<uint32_t>(width),
206 .height = static_cast<uint32_t>(height),
207 .layers = 1,
208 .format = AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420,
209 .usage = AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER |
210 AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN,
211 .rfu0 = 0,
212 .rfu1 = 0};
213
214 AHardwareBuffer* hwBufferPtr;
215 int status = AHardwareBuffer_allocate(&desc, &hwBufferPtr);
216 if (status != NO_ERROR) {
217 ALOGE(
218 "%s: Failed to allocate hardware buffer for temporary framebuffer: %d",
219 __func__, status);
220 return nullptr;
221 }
222
223 return std::make_shared<EglFrameBuffer>(
224 eglDisplay,
225 std::shared_ptr<AHardwareBuffer>(hwBufferPtr, AHardwareBuffer_release));
226}
227
228bool isYuvFormat(const PixelFormat pixelFormat) {
229 switch (static_cast<android_pixel_format_t>(pixelFormat)) {
230 case HAL_PIXEL_FORMAT_YCBCR_422_I:
231 case HAL_PIXEL_FORMAT_YCBCR_422_SP:
232 case HAL_PIXEL_FORMAT_Y16:
233 case HAL_PIXEL_FORMAT_YV12:
234 case HAL_PIXEL_FORMAT_YCBCR_420_888:
235 return true;
236 default:
237 return false;
238 }
239}
240
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100241std::vector<uint8_t> createExif(
Vadim Caenc0aff132024-03-12 17:20:07 +0100242 Resolution imageSize, const CameraMetadata resultMetadata,
243 const std::vector<uint8_t>& compressedThumbnail = {}) {
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100244 std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create());
245 exifUtils->initialize();
Vadim Caenc0aff132024-03-12 17:20:07 +0100246
247 // Make a copy of the metadata in order to converting it the HAL metadata
248 // format (as opposed to the AIDL class) and use the setFromMetadata method
249 // from ExifUtil
250 camera_metadata_t* rawSettings =
251 clone_camera_metadata((camera_metadata_t*)resultMetadata.metadata.data());
252 if (rawSettings != nullptr) {
253 android::hardware::camera::common::helper::CameraMetadata halMetadata(
254 rawSettings);
255 exifUtils->setFromMetadata(halMetadata, imageSize.width, imageSize.height);
256 }
257 exifUtils->setMake(VirtualCameraDevice::kDefaultMakeAndModel);
258 exifUtils->setModel(VirtualCameraDevice::kDefaultMakeAndModel);
259 exifUtils->setFlash(0);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100260
261 std::vector<uint8_t> app1Data;
262
263 size_t thumbnailDataSize = compressedThumbnail.size();
264 const void* thumbnailData =
265 thumbnailDataSize > 0
266 ? reinterpret_cast<const void*>(compressedThumbnail.data())
267 : nullptr;
268
269 if (!exifUtils->generateApp1(thumbnailData, thumbnailDataSize)) {
270 ALOGE("%s: Failed to generate APP1 segment for EXIF metadata", __func__);
271 return app1Data;
272 }
273
274 const uint8_t* data = exifUtils->getApp1Buffer();
275 const size_t size = exifUtils->getApp1Length();
276
277 app1Data.insert(app1Data.end(), data, data + size);
278 return app1Data;
279}
280
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100281} // namespace
282
283CaptureRequestBuffer::CaptureRequestBuffer(int streamId, int bufferId,
284 sp<Fence> fence)
285 : mStreamId(streamId), mBufferId(bufferId), mFence(fence) {
286}
287
288int CaptureRequestBuffer::getStreamId() const {
289 return mStreamId;
290}
291
292int CaptureRequestBuffer::getBufferId() const {
293 return mBufferId;
294}
295
296sp<Fence> CaptureRequestBuffer::getFence() const {
297 return mFence;
298}
299
300VirtualCameraRenderThread::VirtualCameraRenderThread(
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100301 VirtualCameraSessionContext& sessionContext,
302 const Resolution inputSurfaceSize, const Resolution reportedSensorSize,
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100303 std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback, bool testMode)
304 : mCameraDeviceCallback(cameraDeviceCallback),
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100305 mInputSurfaceSize(inputSurfaceSize),
306 mReportedSensorSize(reportedSensorSize),
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100307 mTestMode(testMode),
308 mSessionContext(sessionContext) {
309}
310
311VirtualCameraRenderThread::~VirtualCameraRenderThread() {
312 stop();
313 if (mThread.joinable()) {
314 mThread.join();
315 }
316}
317
318ProcessCaptureRequestTask::ProcessCaptureRequestTask(
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100319 int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers,
320 const RequestSettings& requestSettings)
321 : mFrameNumber(frameNumber),
322 mBuffers(requestBuffers),
323 mRequestSettings(requestSettings) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100324}
325
326int ProcessCaptureRequestTask::getFrameNumber() const {
327 return mFrameNumber;
328}
329
330const std::vector<CaptureRequestBuffer>& ProcessCaptureRequestTask::getBuffers()
331 const {
332 return mBuffers;
333}
334
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100335const RequestSettings& ProcessCaptureRequestTask::getRequestSettings() const {
336 return mRequestSettings;
337}
338
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100339void VirtualCameraRenderThread::enqueueTask(
340 std::unique_ptr<ProcessCaptureRequestTask> task) {
341 std::lock_guard<std::mutex> lock(mLock);
342 mQueue.emplace_back(std::move(task));
343 mCondVar.notify_one();
344}
345
346void VirtualCameraRenderThread::flush() {
347 std::lock_guard<std::mutex> lock(mLock);
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100348 while (!mQueue.empty()) {
349 std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
350 mQueue.pop_front();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100351 flushCaptureRequest(*task);
352 }
353}
354
355void VirtualCameraRenderThread::start() {
356 mThread = std::thread(&VirtualCameraRenderThread::threadLoop, this);
357}
358
359void VirtualCameraRenderThread::stop() {
360 {
361 std::lock_guard<std::mutex> lock(mLock);
362 mPendingExit = true;
363 mCondVar.notify_one();
364 }
365}
366
367sp<Surface> VirtualCameraRenderThread::getInputSurface() {
368 return mInputSurfacePromise.get_future().get();
369}
370
371std::unique_ptr<ProcessCaptureRequestTask>
372VirtualCameraRenderThread::dequeueTask() {
373 std::unique_lock<std::mutex> lock(mLock);
374 // Clang's thread safety analysis doesn't perform alias analysis,
375 // so it doesn't support moveable std::unique_lock.
376 //
377 // Lock assertion below is basically explicit declaration that
378 // the lock is held in this scope, which is true, since it's only
379 // released during waiting inside mCondVar.wait calls.
380 ScopedLockAssertion lockAssertion(mLock);
381
382 mCondVar.wait(lock, [this]() REQUIRES(mLock) {
383 return mPendingExit || !mQueue.empty();
384 });
385 if (mPendingExit) {
386 return nullptr;
387 }
388 std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
389 mQueue.pop_front();
390 return task;
391}
392
393void VirtualCameraRenderThread::threadLoop() {
394 ALOGV("Render thread starting");
395
396 mEglDisplayContext = std::make_unique<EglDisplayContext>();
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100397 mEglTextureYuvProgram =
398 std::make_unique<EglTextureProgram>(EglTextureProgram::TextureFormat::YUV);
399 mEglTextureRgbProgram = std::make_unique<EglTextureProgram>(
400 EglTextureProgram::TextureFormat::RGBA);
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100401 mEglSurfaceTexture = std::make_unique<EglSurfaceTexture>(
402 mInputSurfaceSize.width, mInputSurfaceSize.height);
Jan Sebechlebsky6402fef2024-03-25 16:30:26 +0100403
404 sp<Surface> inputSurface = mEglSurfaceTexture->getSurface();
405 if (mTestMode) {
406 inputSurface->connect(NATIVE_WINDOW_API_CPU, false, nullptr);
407 }
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100408 mInputSurfacePromise.set_value(mEglSurfaceTexture->getSurface());
409
410 while (std::unique_ptr<ProcessCaptureRequestTask> task = dequeueTask()) {
411 processCaptureRequest(*task);
412 }
413
Jan Sebechlebsky06b36672024-03-18 11:52:35 +0100414 // Destroy EGL utilities still on the render thread.
415 mEglSurfaceTexture.reset();
416 mEglTextureRgbProgram.reset();
417 mEglTextureYuvProgram.reset();
418 mEglDisplayContext.reset();
419
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100420 ALOGV("Render thread exiting");
421}
422
423void VirtualCameraRenderThread::processCaptureRequest(
424 const ProcessCaptureRequestTask& request) {
Jan Sebechlebsky2f4478e2024-05-08 17:26:42 +0200425 if (mTestMode) {
426 // In test mode let's just render something to the Surface ourselves.
427 renderTestPatternYCbCr420(mEglSurfaceTexture->getSurface(),
428 request.getFrameNumber());
429 }
430
431 std::chrono::nanoseconds timestamp =
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100432 std::chrono::duration_cast<std::chrono::nanoseconds>(
433 std::chrono::steady_clock::now().time_since_epoch());
Jan Sebechlebsky2f4478e2024-05-08 17:26:42 +0200434 std::chrono::nanoseconds lastAcquisitionTimestamp(
435 mLastAcquisitionTimestampNanoseconds.exchange(timestamp.count(),
436 std::memory_order_relaxed));
437
438 if (request.getRequestSettings().fpsRange) {
439 const int maxFps =
440 std::max(1, request.getRequestSettings().fpsRange->maxFps);
441 const std::chrono::nanoseconds minFrameDuration(
442 static_cast<uint64_t>(1e9 / maxFps));
443 const std::chrono::nanoseconds frameDuration =
444 timestamp - lastAcquisitionTimestamp;
445 if (frameDuration < minFrameDuration) {
446 // We're too fast for the configured maxFps, let's wait a bit.
447 const std::chrono::nanoseconds sleepTime =
448 minFrameDuration - frameDuration;
449 ALOGV("Current frame duration would be %" PRIu64
450 " ns corresponding to, "
451 "sleeping for %" PRIu64
452 " ns before updating texture to match maxFps %d",
453 static_cast<uint64_t>(frameDuration.count()),
454 static_cast<uint64_t>(sleepTime.count()), maxFps);
455
456 std::this_thread::sleep_for(sleepTime);
457 timestamp = std::chrono::duration_cast<std::chrono::nanoseconds>(
458 std::chrono::steady_clock::now().time_since_epoch());
459 mLastAcquisitionTimestampNanoseconds.store(timestamp.count(),
460 std::memory_order_relaxed);
461 }
462 }
463
464 // Acquire new (most recent) image from the Surface.
465 mEglSurfaceTexture->updateTexture();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100466
467 CaptureResult captureResult;
468 captureResult.fmqResultSize = 0;
469 captureResult.frameNumber = request.getFrameNumber();
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100470 // Partial result needs to be set to 1 when metadata are present.
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100471 captureResult.partialResult = 1;
472 captureResult.inputBuffer.streamId = -1;
473 captureResult.physicalCameraMetadata.resize(0);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100474 captureResult.result = createCaptureResultMetadata(
475 timestamp, request.getRequestSettings(), mReportedSensorSize);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100476
477 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
478 captureResult.outputBuffers.resize(buffers.size());
479
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100480 for (int i = 0; i < buffers.size(); ++i) {
481 const CaptureRequestBuffer& reqBuffer = buffers[i];
482 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
483 resBuffer.streamId = reqBuffer.getStreamId();
484 resBuffer.bufferId = reqBuffer.getBufferId();
485 resBuffer.status = BufferStatus::OK;
486
487 const std::optional<Stream> streamConfig =
488 mSessionContext.getStreamConfig(reqBuffer.getStreamId());
489
490 if (!streamConfig.has_value()) {
491 resBuffer.status = BufferStatus::ERROR;
492 continue;
493 }
494
495 auto status = streamConfig->format == PixelFormat::BLOB
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100496 ? renderIntoBlobStreamBuffer(
497 reqBuffer.getStreamId(), reqBuffer.getBufferId(),
Vadim Caenc0aff132024-03-12 17:20:07 +0100498 captureResult.result, request.getRequestSettings(),
499 reqBuffer.getFence())
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100500 : renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
501 reqBuffer.getBufferId(),
502 reqBuffer.getFence());
503 if (!status.isOk()) {
504 resBuffer.status = BufferStatus::ERROR;
505 }
506 }
507
508 std::vector<NotifyMsg> notifyMsg{
509 createShutterNotifyMsg(request.getFrameNumber(), timestamp)};
510 for (const StreamBuffer& resBuffer : captureResult.outputBuffers) {
511 if (resBuffer.status != BufferStatus::OK) {
512 notifyMsg.push_back(createBufferErrorNotifyMsg(request.getFrameNumber(),
513 resBuffer.streamId));
514 }
515 }
516
517 auto status = mCameraDeviceCallback->notify(notifyMsg);
518 if (!status.isOk()) {
519 ALOGE("%s: notify call failed: %s", __func__,
520 status.getDescription().c_str());
521 return;
522 }
523
524 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
525 captureResults(1);
526 captureResults[0] = std::move(captureResult);
527
528 status = mCameraDeviceCallback->processCaptureResult(captureResults);
529 if (!status.isOk()) {
530 ALOGE("%s: processCaptureResult call failed: %s", __func__,
531 status.getDescription().c_str());
532 return;
533 }
534
Vadim Caen324fcfb2024-03-21 16:49:08 +0100535 ALOGV("%s: Successfully called processCaptureResult", __func__);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100536}
537
538void VirtualCameraRenderThread::flushCaptureRequest(
539 const ProcessCaptureRequestTask& request) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100540 CaptureResult captureResult;
541 captureResult.fmqResultSize = 0;
542 captureResult.frameNumber = request.getFrameNumber();
543 captureResult.inputBuffer.streamId = -1;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100544
545 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
546 captureResult.outputBuffers.resize(buffers.size());
547
548 for (int i = 0; i < buffers.size(); ++i) {
549 const CaptureRequestBuffer& reqBuffer = buffers[i];
550 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
551 resBuffer.streamId = reqBuffer.getStreamId();
552 resBuffer.bufferId = reqBuffer.getBufferId();
553 resBuffer.status = BufferStatus::ERROR;
554 sp<Fence> fence = reqBuffer.getFence();
555 if (fence != nullptr && fence->isValid()) {
556 resBuffer.releaseFence.fds.emplace_back(fence->dup());
557 }
558 }
559
560 auto status = mCameraDeviceCallback->notify(
561 {createRequestErrorNotifyMsg(request.getFrameNumber())});
562 if (!status.isOk()) {
563 ALOGE("%s: notify call failed: %s", __func__,
564 status.getDescription().c_str());
565 return;
566 }
567
568 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
569 captureResults(1);
570 captureResults[0] = std::move(captureResult);
571
572 status = mCameraDeviceCallback->processCaptureResult(captureResults);
573 if (!status.isOk()) {
574 ALOGE("%s: processCaptureResult call failed: %s", __func__,
575 status.getDescription().c_str());
576 }
577}
578
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100579std::vector<uint8_t> VirtualCameraRenderThread::createThumbnail(
580 const Resolution resolution, const int quality) {
581 if (resolution.width == 0 || resolution.height == 0) {
582 ALOGV("%s: Skipping thumbnail creation, zero size requested", __func__);
583 return {};
584 }
585
586 ALOGV("%s: Creating thumbnail with size %d x %d, quality %d", __func__,
587 resolution.width, resolution.height, quality);
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100588 Resolution bufferSize = roundTo2DctSize(resolution);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100589 std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100590 mEglDisplayContext->getEglDisplay(), bufferSize.width, bufferSize.height);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100591 if (framebuffer == nullptr) {
592 ALOGE(
593 "Failed to allocate temporary framebuffer for JPEG thumbnail "
594 "compression");
595 return {};
596 }
597
598 // TODO(b/324383963) Add support for letterboxing if the thumbnail size
599 // doesn't correspond
600 // to input texture aspect ratio.
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100601 if (!renderIntoEglFramebuffer(*framebuffer, /*fence=*/nullptr,
602 Rect(resolution.width, resolution.height))
603 .isOk()) {
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100604 ALOGE(
605 "Failed to render input texture into temporary framebuffer for JPEG "
606 "thumbnail");
607 return {};
608 }
609
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100610 std::vector<uint8_t> compressedThumbnail;
611 compressedThumbnail.resize(kJpegThumbnailBufferSize);
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100612 ALOGE("%s: Compressing thumbnail %d x %d", __func__, resolution.width,
613 resolution.height);
614 std::optional<size_t> compressedSize =
615 compressJpeg(resolution.width, resolution.height, quality,
616 framebuffer->getHardwareBuffer(), {},
617 compressedThumbnail.size(), compressedThumbnail.data());
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100618 if (!compressedSize.has_value()) {
619 ALOGE("%s: Failed to compress jpeg thumbnail", __func__);
620 return {};
621 }
622 compressedThumbnail.resize(compressedSize.value());
623 return compressedThumbnail;
624}
625
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100626ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoBlobStreamBuffer(
Vadim Caenc0aff132024-03-12 17:20:07 +0100627 const int streamId, const int bufferId, const CameraMetadata& resultMetadata,
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100628 const RequestSettings& requestSettings, sp<Fence> fence) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100629 std::shared_ptr<AHardwareBuffer> hwBuffer =
630 mSessionContext.fetchHardwareBuffer(streamId, bufferId);
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100631 if (hwBuffer == nullptr) {
632 ALOGE("%s: Failed to fetch hardware buffer %d for streamId %d", __func__,
633 bufferId, streamId);
634 return cameraStatus(Status::INTERNAL_ERROR);
635 }
636
637 std::optional<Stream> stream = mSessionContext.getStreamConfig(streamId);
638 if (!stream.has_value()) {
639 ALOGE("%s, failed to fetch information about stream %d", __func__, streamId);
640 return cameraStatus(Status::INTERNAL_ERROR);
641 }
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100642
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100643 ALOGV("%s: Rendering JPEG with size %d x %d, quality %d", __func__,
644 stream->width, stream->height, requestSettings.jpegQuality);
645
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100646 // Let's create YUV framebuffer and render the surface into this.
647 // This will take care about rescaling as well as potential format conversion.
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100648 // The buffer dimensions need to be rounded to nearest multiple of JPEG DCT
649 // size, however we pass the viewport corresponding to size of the stream so
650 // the image will be only rendered to the area corresponding to the stream
651 // size.
652 Resolution bufferSize =
653 roundTo2DctSize(Resolution(stream->width, stream->height));
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100654 std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100655 mEglDisplayContext->getEglDisplay(), bufferSize.width, bufferSize.height);
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100656 if (framebuffer == nullptr) {
657 ALOGE("Failed to allocate temporary framebuffer for JPEG compression");
658 return cameraStatus(Status::INTERNAL_ERROR);
659 }
660
661 // Render into temporary framebuffer.
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100662 ndk::ScopedAStatus status = renderIntoEglFramebuffer(
663 *framebuffer, /*fence=*/nullptr, Rect(stream->width, stream->height));
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100664 if (!status.isOk()) {
665 ALOGE("Failed to render input texture into temporary framebuffer");
666 return status;
667 }
668
Jan Sebechlebsky43543222024-02-16 12:50:32 +0100669 PlanesLockGuard planesLock(hwBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_RARELY,
670 fence);
671 if (planesLock.getStatus() != OK) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100672 return cameraStatus(Status::INTERNAL_ERROR);
673 }
674
Jan Sebechlebsky5c789e42024-02-29 16:32:17 +0100675 std::vector<uint8_t> app1ExifData =
Vadim Caenc0aff132024-03-12 17:20:07 +0100676 createExif(Resolution(stream->width, stream->height), resultMetadata,
Jan Sebechlebsky5c789e42024-02-29 16:32:17 +0100677 createThumbnail(requestSettings.thumbnailResolution,
678 requestSettings.thumbnailJpegQuality));
679 std::optional<size_t> compressedSize = compressJpeg(
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100680 stream->width, stream->height, requestSettings.jpegQuality,
681 framebuffer->getHardwareBuffer(), app1ExifData,
682 stream->bufferSize - sizeof(CameraBlob), (*planesLock).planes[0].data);
Jan Sebechlebsky5c789e42024-02-29 16:32:17 +0100683
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100684 if (!compressedSize.has_value()) {
685 ALOGE("%s: Failed to compress JPEG image", __func__);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100686 return cameraStatus(Status::INTERNAL_ERROR);
687 }
688
689 CameraBlob cameraBlob{
690 .blobId = CameraBlobId::JPEG,
691 .blobSizeBytes = static_cast<int32_t>(compressedSize.value())};
692
Jan Sebechlebsky43543222024-02-16 12:50:32 +0100693 memcpy(reinterpret_cast<uint8_t*>((*planesLock).planes[0].data) +
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100694 (stream->bufferSize - sizeof(cameraBlob)),
695 &cameraBlob, sizeof(cameraBlob));
696
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100697 ALOGV("%s: Successfully compressed JPEG image, resulting size %zu B",
698 __func__, compressedSize.value());
699
700 return ndk::ScopedAStatus::ok();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100701}
702
703ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoImageStreamBuffer(
704 int streamId, int bufferId, sp<Fence> fence) {
705 ALOGV("%s", __func__);
706
707 const std::chrono::nanoseconds before =
708 std::chrono::duration_cast<std::chrono::nanoseconds>(
709 std::chrono::steady_clock::now().time_since_epoch());
710
711 // Render test pattern using EGL.
712 std::shared_ptr<EglFrameBuffer> framebuffer =
713 mSessionContext.fetchOrCreateEglFramebuffer(
714 mEglDisplayContext->getEglDisplay(), streamId, bufferId);
715 if (framebuffer == nullptr) {
716 ALOGE(
717 "%s: Failed to get EGL framebuffer corresponding to buffer id "
718 "%d for streamId %d",
719 __func__, bufferId, streamId);
720 return cameraStatus(Status::ILLEGAL_ARGUMENT);
721 }
722
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100723 ndk::ScopedAStatus status = renderIntoEglFramebuffer(*framebuffer, fence);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100724
725 const std::chrono::nanoseconds after =
726 std::chrono::duration_cast<std::chrono::nanoseconds>(
727 std::chrono::steady_clock::now().time_since_epoch());
728
729 ALOGV("Rendering to buffer %d, stream %d took %lld ns", bufferId, streamId,
730 after.count() - before.count());
731
732 return ndk::ScopedAStatus::ok();
733}
734
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100735ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoEglFramebuffer(
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100736 EglFrameBuffer& framebuffer, sp<Fence> fence, std::optional<Rect> viewport) {
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100737 ALOGV("%s", __func__);
738 // Wait for fence to clear.
739 if (fence != nullptr && fence->isValid()) {
740 status_t ret = fence->wait(kAcquireFenceTimeout.count());
741 if (ret != 0) {
742 ALOGE("Timeout while waiting for the acquire fence for buffer");
743 return cameraStatus(Status::INTERNAL_ERROR);
744 }
745 }
746
747 mEglDisplayContext->makeCurrent();
748 framebuffer.beforeDraw();
749
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100750 Rect viewportRect =
751 viewport.value_or(Rect(framebuffer.getWidth(), framebuffer.getHeight()));
752 glViewport(viewportRect.leftTop().x, viewportRect.leftTop().y,
753 viewportRect.getWidth(), viewportRect.getHeight());
754
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100755 sp<GraphicBuffer> textureBuffer = mEglSurfaceTexture->getCurrentBuffer();
756 if (textureBuffer == nullptr) {
757 // If there's no current buffer, nothing was written to the surface and
758 // texture is not initialized yet. Let's render the framebuffer black
759 // instead of rendering the texture.
760 glClearColor(0.0f, 0.5f, 0.5f, 0.0f);
761 glClear(GL_COLOR_BUFFER_BIT);
762 } else {
763 const bool renderSuccess =
764 isYuvFormat(static_cast<PixelFormat>(textureBuffer->getPixelFormat()))
Jan Sebechlebsky99492e32023-12-20 09:49:45 +0100765 ? mEglTextureYuvProgram->draw(
766 mEglSurfaceTexture->getTextureId(),
767 mEglSurfaceTexture->getTransformMatrix())
768 : mEglTextureRgbProgram->draw(
769 mEglSurfaceTexture->getTextureId(),
770 mEglSurfaceTexture->getTransformMatrix());
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100771 if (!renderSuccess) {
772 ALOGE("%s: Failed to render texture", __func__);
773 return cameraStatus(Status::INTERNAL_ERROR);
774 }
775 }
776 framebuffer.afterDraw();
777
778 return ndk::ScopedAStatus::ok();
779}
780
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100781} // namespace virtualcamera
782} // namespace companion
783} // namespace android