blob: ad2844359e9a1e4b39cc838cc538680be9aa46be [file] [log] [blame]
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +01001/*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "VirtualCameraRenderThread"
18#include "VirtualCameraRenderThread.h"
19
20#include <chrono>
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +010021#include <cstdint>
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010022#include <cstring>
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010023#include <future>
24#include <memory>
25#include <mutex>
26#include <thread>
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +020027#include <utility>
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010028#include <vector>
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010029
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010030#include "Exif.h"
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +010031#include "GLES/gl.h"
Biswarup Pal8ad8bc52024-02-08 13:41:44 +000032#include "VirtualCameraDevice.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010033#include "VirtualCameraSessionContext.h"
34#include "aidl/android/hardware/camera/common/Status.h"
35#include "aidl/android/hardware/camera/device/BufferStatus.h"
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010036#include "aidl/android/hardware/camera/device/CameraBlob.h"
37#include "aidl/android/hardware/camera/device/CameraBlobId.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010038#include "aidl/android/hardware/camera/device/CameraMetadata.h"
39#include "aidl/android/hardware/camera/device/CaptureResult.h"
40#include "aidl/android/hardware/camera/device/ErrorCode.h"
41#include "aidl/android/hardware/camera/device/ICameraDeviceCallback.h"
42#include "aidl/android/hardware/camera/device/NotifyMsg.h"
43#include "aidl/android/hardware/camera/device/ShutterMsg.h"
44#include "aidl/android/hardware/camera/device/StreamBuffer.h"
45#include "android-base/thread_annotations.h"
46#include "android/binder_auto_utils.h"
47#include "android/hardware_buffer.h"
Jan Sebechlebsky2f4478e2024-05-08 17:26:42 +020048#include "hardware/gralloc.h"
Vadim Caenc0aff132024-03-12 17:20:07 +010049#include "system/camera_metadata.h"
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +010050#include "ui/GraphicBuffer.h"
Jan Sebechlebskyb3771312024-03-15 10:38:02 +010051#include "ui/Rect.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010052#include "util/EglFramebuffer.h"
53#include "util/JpegUtil.h"
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010054#include "util/MetadataUtil.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010055#include "util/Util.h"
56#include "utils/Errors.h"
57
58namespace android {
59namespace companion {
60namespace virtualcamera {
61
62using ::aidl::android::hardware::camera::common::Status;
63using ::aidl::android::hardware::camera::device::BufferStatus;
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010064using ::aidl::android::hardware::camera::device::CameraBlob;
65using ::aidl::android::hardware::camera::device::CameraBlobId;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010066using ::aidl::android::hardware::camera::device::CameraMetadata;
67using ::aidl::android::hardware::camera::device::CaptureResult;
68using ::aidl::android::hardware::camera::device::ErrorCode;
69using ::aidl::android::hardware::camera::device::ErrorMsg;
70using ::aidl::android::hardware::camera::device::ICameraDeviceCallback;
71using ::aidl::android::hardware::camera::device::NotifyMsg;
72using ::aidl::android::hardware::camera::device::ShutterMsg;
73using ::aidl::android::hardware::camera::device::Stream;
74using ::aidl::android::hardware::camera::device::StreamBuffer;
75using ::aidl::android::hardware::graphics::common::PixelFormat;
76using ::android::base::ScopedLockAssertion;
77
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010078using ::android::hardware::camera::common::helper::ExifUtils;
79
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010080namespace {
81
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +020082// helper type for the visitor
83template <class... Ts>
84struct overloaded : Ts... {
85 using Ts::operator()...;
86};
87// explicit deduction guide (not needed as of C++20)
88template <class... Ts>
89overloaded(Ts...) -> overloaded<Ts...>;
90
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010091using namespace std::chrono_literals;
92
93static constexpr std::chrono::milliseconds kAcquireFenceTimeout = 500ms;
94
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +010095// See REQUEST_PIPELINE_DEPTH in CaptureResult.java.
96// This roughly corresponds to frame latency, we set to
97// documented minimum of 2.
98static constexpr uint8_t kPipelineDepth = 2;
99
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100100static constexpr size_t kJpegThumbnailBufferSize = 32 * 1024; // 32 KiB
101
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +0200102static constexpr UpdateTextureTask kUpdateTextureTask;
103
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100104CameraMetadata createCaptureResultMetadata(
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100105 const std::chrono::nanoseconds timestamp,
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100106 const RequestSettings& requestSettings,
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100107 const Resolution reportedSensorSize) {
Vadim Caen11dfd932024-03-05 09:57:20 +0100108 // All of the keys used in the response needs to be referenced in
109 // availableResultKeys in CameraCharacteristics (see initCameraCharacteristics
110 // in VirtualCameraDevice.cc).
111 MetadataBuilder builder =
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100112 MetadataBuilder()
Jan Sebechlebsky4be2bd02024-02-26 18:35:18 +0100113 .setAberrationCorrectionMode(
114 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF)
Vadim Caen11dfd932024-03-05 09:57:20 +0100115 .setControlAeAvailableAntibandingModes(
116 {ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF})
117 .setControlAeAntibandingMode(ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF)
118 .setControlAeExposureCompensation(0)
119 .setControlAeLockAvailable(false)
120 .setControlAeLock(ANDROID_CONTROL_AE_LOCK_OFF)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100121 .setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
122 .setControlAePrecaptureTrigger(
Vadim Caen6a43beb2024-04-12 15:06:42 +0200123 // Limited devices are expected to have precapture ae enabled and
124 // respond to cancellation request. Since we don't actuall support
125 // AE at all, let's just respect the cancellation expectation in
126 // case it's requested
127 requestSettings.aePrecaptureTrigger ==
128 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
129 ? ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
130 : ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
Vadim Caen11dfd932024-03-05 09:57:20 +0100131 .setControlAeState(ANDROID_CONTROL_AE_STATE_INACTIVE)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100132 .setControlAfMode(ANDROID_CONTROL_AF_MODE_OFF)
Vadim Caen11dfd932024-03-05 09:57:20 +0100133 .setControlAfTrigger(ANDROID_CONTROL_AF_TRIGGER_IDLE)
134 .setControlAfState(ANDROID_CONTROL_AF_STATE_INACTIVE)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100135 .setControlAwbMode(ANDROID_CONTROL_AWB_MODE_AUTO)
Vadim Caen11dfd932024-03-05 09:57:20 +0100136 .setControlAwbLock(ANDROID_CONTROL_AWB_LOCK_OFF)
137 .setControlAwbState(ANDROID_CONTROL_AWB_STATE_INACTIVE)
138 .setControlCaptureIntent(requestSettings.captureIntent)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100139 .setControlEffectMode(ANDROID_CONTROL_EFFECT_MODE_OFF)
140 .setControlMode(ANDROID_CONTROL_MODE_AUTO)
Vadim Caen11dfd932024-03-05 09:57:20 +0100141 .setControlSceneMode(ANDROID_CONTROL_SCENE_MODE_DISABLED)
142 .setControlVideoStabilizationMode(
143 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100144 .setCropRegion(0, 0, reportedSensorSize.width,
145 reportedSensorSize.height)
146 .setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100147 .setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
Vadim Caen11dfd932024-03-05 09:57:20 +0100148 .setFlashMode(ANDROID_FLASH_MODE_OFF)
Biswarup Pal8ad8bc52024-02-08 13:41:44 +0000149 .setFocalLength(VirtualCameraDevice::kFocalLength)
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100150 .setJpegQuality(requestSettings.jpegQuality)
Vadim Caenc0aff132024-03-12 17:20:07 +0100151 .setJpegOrientation(requestSettings.jpegOrientation)
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100152 .setJpegThumbnailSize(requestSettings.thumbnailResolution.width,
153 requestSettings.thumbnailResolution.height)
154 .setJpegThumbnailQuality(requestSettings.thumbnailJpegQuality)
Vadim Caen11dfd932024-03-05 09:57:20 +0100155 .setLensOpticalStabilizationMode(
156 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF)
Jan Sebechlebsky4be2bd02024-02-26 18:35:18 +0100157 .setNoiseReductionMode(ANDROID_NOISE_REDUCTION_MODE_OFF)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100158 .setPipelineDepth(kPipelineDepth)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100159 .setSensorTimestamp(timestamp)
Vadim Caen11dfd932024-03-05 09:57:20 +0100160 .setStatisticsHotPixelMapMode(
161 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF)
162 .setStatisticsLensShadingMapMode(
163 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF)
164 .setStatisticsSceneFlicker(ANDROID_STATISTICS_SCENE_FLICKER_NONE);
165
166 if (requestSettings.fpsRange.has_value()) {
167 builder.setControlAeTargetFpsRange(requestSettings.fpsRange.value());
168 }
169
Vadim Caenc0aff132024-03-12 17:20:07 +0100170 if (requestSettings.gpsCoordinates.has_value()) {
171 const GpsCoordinates& coordinates = requestSettings.gpsCoordinates.value();
172 builder.setJpegGpsCoordinates(coordinates);
173 }
174
Vadim Caen11dfd932024-03-05 09:57:20 +0100175 std::unique_ptr<CameraMetadata> metadata = builder.build();
176
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100177 if (metadata == nullptr) {
178 ALOGE("%s: Failed to build capture result metadata", __func__);
179 return CameraMetadata();
180 }
181 return std::move(*metadata);
182}
183
184NotifyMsg createShutterNotifyMsg(int frameNumber,
185 std::chrono::nanoseconds timestamp) {
186 NotifyMsg msg;
187 msg.set<NotifyMsg::Tag::shutter>(ShutterMsg{
188 .frameNumber = frameNumber,
189 .timestamp = timestamp.count(),
190 });
191 return msg;
192}
193
194NotifyMsg createBufferErrorNotifyMsg(int frameNumber, int streamId) {
195 NotifyMsg msg;
196 msg.set<NotifyMsg::Tag::error>(ErrorMsg{.frameNumber = frameNumber,
197 .errorStreamId = streamId,
198 .errorCode = ErrorCode::ERROR_BUFFER});
199 return msg;
200}
201
202NotifyMsg createRequestErrorNotifyMsg(int frameNumber) {
203 NotifyMsg msg;
204 msg.set<NotifyMsg::Tag::error>(ErrorMsg{
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100205 .frameNumber = frameNumber,
206 // errorStreamId needs to be set to -1 for ERROR_REQUEST
207 // (not tied to specific stream).
208 .errorStreamId = -1,
209 .errorCode = ErrorCode::ERROR_REQUEST});
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100210 return msg;
211}
212
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100213std::shared_ptr<EglFrameBuffer> allocateTemporaryFramebuffer(
214 EGLDisplay eglDisplay, const uint width, const int height) {
215 const AHardwareBuffer_Desc desc{
216 .width = static_cast<uint32_t>(width),
217 .height = static_cast<uint32_t>(height),
218 .layers = 1,
219 .format = AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420,
220 .usage = AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER |
221 AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN,
222 .rfu0 = 0,
223 .rfu1 = 0};
224
225 AHardwareBuffer* hwBufferPtr;
226 int status = AHardwareBuffer_allocate(&desc, &hwBufferPtr);
227 if (status != NO_ERROR) {
228 ALOGE(
229 "%s: Failed to allocate hardware buffer for temporary framebuffer: %d",
230 __func__, status);
231 return nullptr;
232 }
233
234 return std::make_shared<EglFrameBuffer>(
235 eglDisplay,
236 std::shared_ptr<AHardwareBuffer>(hwBufferPtr, AHardwareBuffer_release));
237}
238
239bool isYuvFormat(const PixelFormat pixelFormat) {
240 switch (static_cast<android_pixel_format_t>(pixelFormat)) {
241 case HAL_PIXEL_FORMAT_YCBCR_422_I:
242 case HAL_PIXEL_FORMAT_YCBCR_422_SP:
243 case HAL_PIXEL_FORMAT_Y16:
244 case HAL_PIXEL_FORMAT_YV12:
245 case HAL_PIXEL_FORMAT_YCBCR_420_888:
246 return true;
247 default:
248 return false;
249 }
250}
251
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100252std::vector<uint8_t> createExif(
Vadim Caenc0aff132024-03-12 17:20:07 +0100253 Resolution imageSize, const CameraMetadata resultMetadata,
254 const std::vector<uint8_t>& compressedThumbnail = {}) {
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100255 std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create());
256 exifUtils->initialize();
Vadim Caenc0aff132024-03-12 17:20:07 +0100257
258 // Make a copy of the metadata in order to converting it the HAL metadata
259 // format (as opposed to the AIDL class) and use the setFromMetadata method
260 // from ExifUtil
261 camera_metadata_t* rawSettings =
262 clone_camera_metadata((camera_metadata_t*)resultMetadata.metadata.data());
263 if (rawSettings != nullptr) {
264 android::hardware::camera::common::helper::CameraMetadata halMetadata(
265 rawSettings);
266 exifUtils->setFromMetadata(halMetadata, imageSize.width, imageSize.height);
267 }
268 exifUtils->setMake(VirtualCameraDevice::kDefaultMakeAndModel);
269 exifUtils->setModel(VirtualCameraDevice::kDefaultMakeAndModel);
270 exifUtils->setFlash(0);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100271
272 std::vector<uint8_t> app1Data;
273
274 size_t thumbnailDataSize = compressedThumbnail.size();
275 const void* thumbnailData =
276 thumbnailDataSize > 0
277 ? reinterpret_cast<const void*>(compressedThumbnail.data())
278 : nullptr;
279
280 if (!exifUtils->generateApp1(thumbnailData, thumbnailDataSize)) {
281 ALOGE("%s: Failed to generate APP1 segment for EXIF metadata", __func__);
282 return app1Data;
283 }
284
285 const uint8_t* data = exifUtils->getApp1Buffer();
286 const size_t size = exifUtils->getApp1Length();
287
288 app1Data.insert(app1Data.end(), data, data + size);
289 return app1Data;
290}
291
Jan Sebechlebskyb8282672024-05-22 10:43:37 +0200292std::chrono::nanoseconds getMaxFrameDuration(
293 const RequestSettings& requestSettings) {
294 if (requestSettings.fpsRange.has_value()) {
295 return std::chrono::nanoseconds(static_cast<uint64_t>(
296 1e9 / std::max(1, requestSettings.fpsRange->minFps)));
297 }
298 return std::chrono::nanoseconds(
299 static_cast<uint64_t>(1e9 / VirtualCameraDevice::kMinFps));
300}
301
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +0200302class FrameAvailableListenerProxy : public ConsumerBase::FrameAvailableListener {
303 public:
304 FrameAvailableListenerProxy(std::function<void()> callback)
305 : mOnFrameAvailableCallback(callback) {
306 }
307
308 virtual void onFrameAvailable(const BufferItem&) override {
309 ALOGV("%s: onFrameAvailable", __func__);
310 mOnFrameAvailableCallback();
311 }
312
313 private:
314 std::function<void()> mOnFrameAvailableCallback;
315};
316
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100317} // namespace
318
319CaptureRequestBuffer::CaptureRequestBuffer(int streamId, int bufferId,
320 sp<Fence> fence)
321 : mStreamId(streamId), mBufferId(bufferId), mFence(fence) {
322}
323
324int CaptureRequestBuffer::getStreamId() const {
325 return mStreamId;
326}
327
328int CaptureRequestBuffer::getBufferId() const {
329 return mBufferId;
330}
331
332sp<Fence> CaptureRequestBuffer::getFence() const {
333 return mFence;
334}
335
336VirtualCameraRenderThread::VirtualCameraRenderThread(
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100337 VirtualCameraSessionContext& sessionContext,
338 const Resolution inputSurfaceSize, const Resolution reportedSensorSize,
Jan Sebechlebsky288900f2024-05-24 14:47:54 +0200339 std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback)
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100340 : mCameraDeviceCallback(cameraDeviceCallback),
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100341 mInputSurfaceSize(inputSurfaceSize),
342 mReportedSensorSize(reportedSensorSize),
Jan Sebechlebsky9fcd0262024-05-31 15:20:09 +0200343 mSessionContext(sessionContext),
344 mInputSurfaceFuture(mInputSurfacePromise.get_future()) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100345}
346
347VirtualCameraRenderThread::~VirtualCameraRenderThread() {
348 stop();
349 if (mThread.joinable()) {
350 mThread.join();
351 }
352}
353
354ProcessCaptureRequestTask::ProcessCaptureRequestTask(
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100355 int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers,
356 const RequestSettings& requestSettings)
357 : mFrameNumber(frameNumber),
358 mBuffers(requestBuffers),
359 mRequestSettings(requestSettings) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100360}
361
362int ProcessCaptureRequestTask::getFrameNumber() const {
363 return mFrameNumber;
364}
365
366const std::vector<CaptureRequestBuffer>& ProcessCaptureRequestTask::getBuffers()
367 const {
368 return mBuffers;
369}
370
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100371const RequestSettings& ProcessCaptureRequestTask::getRequestSettings() const {
372 return mRequestSettings;
373}
374
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +0200375void VirtualCameraRenderThread::requestTextureUpdate() {
376 std::lock_guard<std::mutex> lock(mLock);
377 // If queue is not empty, we don't need to set the mTextureUpdateRequested
378 // flag, since the texture will be updated during ProcessCaptureRequestTask
379 // processing anyway.
380 if (mQueue.empty()) {
381 mTextureUpdateRequested = true;
382 mCondVar.notify_one();
383 }
384}
385
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100386void VirtualCameraRenderThread::enqueueTask(
387 std::unique_ptr<ProcessCaptureRequestTask> task) {
388 std::lock_guard<std::mutex> lock(mLock);
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +0200389 // When enqueving process capture request task, clear the
390 // mTextureUpdateRequested flag. If this flag is set, the texture was not yet
391 // updated and it will be updated when processing ProcessCaptureRequestTask
392 // anyway.
393 mTextureUpdateRequested = false;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100394 mQueue.emplace_back(std::move(task));
395 mCondVar.notify_one();
396}
397
398void VirtualCameraRenderThread::flush() {
399 std::lock_guard<std::mutex> lock(mLock);
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100400 while (!mQueue.empty()) {
401 std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
402 mQueue.pop_front();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100403 flushCaptureRequest(*task);
404 }
405}
406
407void VirtualCameraRenderThread::start() {
408 mThread = std::thread(&VirtualCameraRenderThread::threadLoop, this);
409}
410
411void VirtualCameraRenderThread::stop() {
412 {
413 std::lock_guard<std::mutex> lock(mLock);
414 mPendingExit = true;
415 mCondVar.notify_one();
416 }
417}
418
419sp<Surface> VirtualCameraRenderThread::getInputSurface() {
Jan Sebechlebsky9fcd0262024-05-31 15:20:09 +0200420 return mInputSurfaceFuture.get();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100421}
422
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +0200423RenderThreadTask VirtualCameraRenderThread::dequeueTask() {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100424 std::unique_lock<std::mutex> lock(mLock);
425 // Clang's thread safety analysis doesn't perform alias analysis,
426 // so it doesn't support moveable std::unique_lock.
427 //
428 // Lock assertion below is basically explicit declaration that
429 // the lock is held in this scope, which is true, since it's only
430 // released during waiting inside mCondVar.wait calls.
431 ScopedLockAssertion lockAssertion(mLock);
432
433 mCondVar.wait(lock, [this]() REQUIRES(mLock) {
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +0200434 return mPendingExit || mTextureUpdateRequested || !mQueue.empty();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100435 });
436 if (mPendingExit) {
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +0200437 // Render thread task with null task signals render thread to terminate.
438 return RenderThreadTask(nullptr);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100439 }
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +0200440 if (mTextureUpdateRequested) {
441 // If mTextureUpdateRequested, it's guaranteed the queue is empty, return
442 // kUpdateTextureTask to signal we want render thread to update the texture
443 // (consume buffer from the queue).
444 mTextureUpdateRequested = false;
445 return RenderThreadTask(kUpdateTextureTask);
446 }
447 RenderThreadTask task(std::move(mQueue.front()));
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100448 mQueue.pop_front();
449 return task;
450}
451
452void VirtualCameraRenderThread::threadLoop() {
453 ALOGV("Render thread starting");
454
455 mEglDisplayContext = std::make_unique<EglDisplayContext>();
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100456 mEglTextureYuvProgram =
457 std::make_unique<EglTextureProgram>(EglTextureProgram::TextureFormat::YUV);
458 mEglTextureRgbProgram = std::make_unique<EglTextureProgram>(
459 EglTextureProgram::TextureFormat::RGBA);
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100460 mEglSurfaceTexture = std::make_unique<EglSurfaceTexture>(
461 mInputSurfaceSize.width, mInputSurfaceSize.height);
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +0200462 sp<FrameAvailableListenerProxy> frameAvailableListener =
463 sp<FrameAvailableListenerProxy>::make(
464 [this]() { requestTextureUpdate(); });
465 mEglSurfaceTexture->setFrameAvailableListener(frameAvailableListener);
Jan Sebechlebsky6402fef2024-03-25 16:30:26 +0100466
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100467 mInputSurfacePromise.set_value(mEglSurfaceTexture->getSurface());
468
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +0200469 while (RenderThreadTask task = dequeueTask()) {
470 std::visit(
471 overloaded{[this](const std::unique_ptr<ProcessCaptureRequestTask>& t) {
472 processTask(*t);
473 },
474 [this](const UpdateTextureTask&) {
475 ALOGV("Idle update of the texture");
476 mEglSurfaceTexture->updateTexture();
477 }},
478 task);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100479 }
480
Jan Sebechlebsky06b36672024-03-18 11:52:35 +0100481 // Destroy EGL utilities still on the render thread.
482 mEglSurfaceTexture.reset();
483 mEglTextureRgbProgram.reset();
484 mEglTextureYuvProgram.reset();
485 mEglDisplayContext.reset();
486
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100487 ALOGV("Render thread exiting");
488}
489
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +0200490void VirtualCameraRenderThread::processTask(
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100491 const ProcessCaptureRequestTask& request) {
Jan Sebechlebsky2f4478e2024-05-08 17:26:42 +0200492 std::chrono::nanoseconds timestamp =
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100493 std::chrono::duration_cast<std::chrono::nanoseconds>(
494 std::chrono::steady_clock::now().time_since_epoch());
Jan Sebechlebskyb8282672024-05-22 10:43:37 +0200495 const std::chrono::nanoseconds lastAcquisitionTimestamp(
Jan Sebechlebsky2f4478e2024-05-08 17:26:42 +0200496 mLastAcquisitionTimestampNanoseconds.exchange(timestamp.count(),
497 std::memory_order_relaxed));
498
499 if (request.getRequestSettings().fpsRange) {
500 const int maxFps =
501 std::max(1, request.getRequestSettings().fpsRange->maxFps);
502 const std::chrono::nanoseconds minFrameDuration(
503 static_cast<uint64_t>(1e9 / maxFps));
504 const std::chrono::nanoseconds frameDuration =
505 timestamp - lastAcquisitionTimestamp;
506 if (frameDuration < minFrameDuration) {
507 // We're too fast for the configured maxFps, let's wait a bit.
508 const std::chrono::nanoseconds sleepTime =
509 minFrameDuration - frameDuration;
510 ALOGV("Current frame duration would be %" PRIu64
511 " ns corresponding to, "
512 "sleeping for %" PRIu64
513 " ns before updating texture to match maxFps %d",
514 static_cast<uint64_t>(frameDuration.count()),
515 static_cast<uint64_t>(sleepTime.count()), maxFps);
516
517 std::this_thread::sleep_for(sleepTime);
518 timestamp = std::chrono::duration_cast<std::chrono::nanoseconds>(
519 std::chrono::steady_clock::now().time_since_epoch());
520 mLastAcquisitionTimestampNanoseconds.store(timestamp.count(),
521 std::memory_order_relaxed);
522 }
523 }
524
Jan Sebechlebskyb8282672024-05-22 10:43:37 +0200525 // Calculate the maximal amount of time we can afford to wait for next frame.
526 const std::chrono::nanoseconds maxFrameDuration =
527 getMaxFrameDuration(request.getRequestSettings());
528 const std::chrono::nanoseconds elapsedDuration =
529 timestamp - lastAcquisitionTimestamp;
530 if (elapsedDuration < maxFrameDuration) {
531 // We can afford to wait for next frame.
532 // Note that if there's already new frame in the input Surface, the call
533 // below returns immediatelly.
534 bool gotNewFrame = mEglSurfaceTexture->waitForNextFrame(maxFrameDuration -
535 elapsedDuration);
536 timestamp = std::chrono::duration_cast<std::chrono::nanoseconds>(
537 std::chrono::steady_clock::now().time_since_epoch());
538 if (!gotNewFrame) {
539 ALOGV(
540 "%s: No new frame received on input surface after waiting for "
541 "%" PRIu64 "ns, repeating last frame.",
542 __func__,
543 static_cast<uint64_t>((timestamp - lastAcquisitionTimestamp).count()));
544 }
545 mLastAcquisitionTimestampNanoseconds.store(timestamp.count(),
546 std::memory_order_relaxed);
547 }
Jan Sebechlebsky2f4478e2024-05-08 17:26:42 +0200548 // Acquire new (most recent) image from the Surface.
549 mEglSurfaceTexture->updateTexture();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100550
551 CaptureResult captureResult;
552 captureResult.fmqResultSize = 0;
553 captureResult.frameNumber = request.getFrameNumber();
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100554 // Partial result needs to be set to 1 when metadata are present.
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100555 captureResult.partialResult = 1;
556 captureResult.inputBuffer.streamId = -1;
557 captureResult.physicalCameraMetadata.resize(0);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100558 captureResult.result = createCaptureResultMetadata(
559 timestamp, request.getRequestSettings(), mReportedSensorSize);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100560
561 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
562 captureResult.outputBuffers.resize(buffers.size());
563
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100564 for (int i = 0; i < buffers.size(); ++i) {
565 const CaptureRequestBuffer& reqBuffer = buffers[i];
566 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
567 resBuffer.streamId = reqBuffer.getStreamId();
568 resBuffer.bufferId = reqBuffer.getBufferId();
569 resBuffer.status = BufferStatus::OK;
570
571 const std::optional<Stream> streamConfig =
572 mSessionContext.getStreamConfig(reqBuffer.getStreamId());
573
574 if (!streamConfig.has_value()) {
575 resBuffer.status = BufferStatus::ERROR;
576 continue;
577 }
578
579 auto status = streamConfig->format == PixelFormat::BLOB
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100580 ? renderIntoBlobStreamBuffer(
581 reqBuffer.getStreamId(), reqBuffer.getBufferId(),
Vadim Caenc0aff132024-03-12 17:20:07 +0100582 captureResult.result, request.getRequestSettings(),
583 reqBuffer.getFence())
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100584 : renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
585 reqBuffer.getBufferId(),
586 reqBuffer.getFence());
587 if (!status.isOk()) {
588 resBuffer.status = BufferStatus::ERROR;
589 }
590 }
591
592 std::vector<NotifyMsg> notifyMsg{
593 createShutterNotifyMsg(request.getFrameNumber(), timestamp)};
594 for (const StreamBuffer& resBuffer : captureResult.outputBuffers) {
595 if (resBuffer.status != BufferStatus::OK) {
596 notifyMsg.push_back(createBufferErrorNotifyMsg(request.getFrameNumber(),
597 resBuffer.streamId));
598 }
599 }
600
601 auto status = mCameraDeviceCallback->notify(notifyMsg);
602 if (!status.isOk()) {
603 ALOGE("%s: notify call failed: %s", __func__,
604 status.getDescription().c_str());
605 return;
606 }
607
608 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
609 captureResults(1);
610 captureResults[0] = std::move(captureResult);
611
612 status = mCameraDeviceCallback->processCaptureResult(captureResults);
613 if (!status.isOk()) {
614 ALOGE("%s: processCaptureResult call failed: %s", __func__,
615 status.getDescription().c_str());
616 return;
617 }
618
Vadim Caen324fcfb2024-03-21 16:49:08 +0100619 ALOGV("%s: Successfully called processCaptureResult", __func__);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100620}
621
622void VirtualCameraRenderThread::flushCaptureRequest(
623 const ProcessCaptureRequestTask& request) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100624 CaptureResult captureResult;
625 captureResult.fmqResultSize = 0;
626 captureResult.frameNumber = request.getFrameNumber();
627 captureResult.inputBuffer.streamId = -1;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100628
629 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
630 captureResult.outputBuffers.resize(buffers.size());
631
632 for (int i = 0; i < buffers.size(); ++i) {
633 const CaptureRequestBuffer& reqBuffer = buffers[i];
634 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
635 resBuffer.streamId = reqBuffer.getStreamId();
636 resBuffer.bufferId = reqBuffer.getBufferId();
637 resBuffer.status = BufferStatus::ERROR;
638 sp<Fence> fence = reqBuffer.getFence();
639 if (fence != nullptr && fence->isValid()) {
640 resBuffer.releaseFence.fds.emplace_back(fence->dup());
641 }
642 }
643
644 auto status = mCameraDeviceCallback->notify(
645 {createRequestErrorNotifyMsg(request.getFrameNumber())});
646 if (!status.isOk()) {
647 ALOGE("%s: notify call failed: %s", __func__,
648 status.getDescription().c_str());
649 return;
650 }
651
652 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
653 captureResults(1);
654 captureResults[0] = std::move(captureResult);
655
656 status = mCameraDeviceCallback->processCaptureResult(captureResults);
657 if (!status.isOk()) {
658 ALOGE("%s: processCaptureResult call failed: %s", __func__,
659 status.getDescription().c_str());
660 }
661}
662
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100663std::vector<uint8_t> VirtualCameraRenderThread::createThumbnail(
664 const Resolution resolution, const int quality) {
665 if (resolution.width == 0 || resolution.height == 0) {
666 ALOGV("%s: Skipping thumbnail creation, zero size requested", __func__);
667 return {};
668 }
669
670 ALOGV("%s: Creating thumbnail with size %d x %d, quality %d", __func__,
671 resolution.width, resolution.height, quality);
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100672 Resolution bufferSize = roundTo2DctSize(resolution);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100673 std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100674 mEglDisplayContext->getEglDisplay(), bufferSize.width, bufferSize.height);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100675 if (framebuffer == nullptr) {
676 ALOGE(
677 "Failed to allocate temporary framebuffer for JPEG thumbnail "
678 "compression");
679 return {};
680 }
681
682 // TODO(b/324383963) Add support for letterboxing if the thumbnail size
683 // doesn't correspond
684 // to input texture aspect ratio.
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100685 if (!renderIntoEglFramebuffer(*framebuffer, /*fence=*/nullptr,
686 Rect(resolution.width, resolution.height))
687 .isOk()) {
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100688 ALOGE(
689 "Failed to render input texture into temporary framebuffer for JPEG "
690 "thumbnail");
691 return {};
692 }
693
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100694 std::vector<uint8_t> compressedThumbnail;
695 compressedThumbnail.resize(kJpegThumbnailBufferSize);
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100696 ALOGE("%s: Compressing thumbnail %d x %d", __func__, resolution.width,
697 resolution.height);
698 std::optional<size_t> compressedSize =
699 compressJpeg(resolution.width, resolution.height, quality,
700 framebuffer->getHardwareBuffer(), {},
701 compressedThumbnail.size(), compressedThumbnail.data());
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100702 if (!compressedSize.has_value()) {
703 ALOGE("%s: Failed to compress jpeg thumbnail", __func__);
704 return {};
705 }
706 compressedThumbnail.resize(compressedSize.value());
707 return compressedThumbnail;
708}
709
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100710ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoBlobStreamBuffer(
Vadim Caenc0aff132024-03-12 17:20:07 +0100711 const int streamId, const int bufferId, const CameraMetadata& resultMetadata,
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100712 const RequestSettings& requestSettings, sp<Fence> fence) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100713 std::shared_ptr<AHardwareBuffer> hwBuffer =
714 mSessionContext.fetchHardwareBuffer(streamId, bufferId);
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100715 if (hwBuffer == nullptr) {
716 ALOGE("%s: Failed to fetch hardware buffer %d for streamId %d", __func__,
717 bufferId, streamId);
718 return cameraStatus(Status::INTERNAL_ERROR);
719 }
720
721 std::optional<Stream> stream = mSessionContext.getStreamConfig(streamId);
722 if (!stream.has_value()) {
723 ALOGE("%s, failed to fetch information about stream %d", __func__, streamId);
724 return cameraStatus(Status::INTERNAL_ERROR);
725 }
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100726
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100727 ALOGV("%s: Rendering JPEG with size %d x %d, quality %d", __func__,
728 stream->width, stream->height, requestSettings.jpegQuality);
729
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100730 // Let's create YUV framebuffer and render the surface into this.
731 // This will take care about rescaling as well as potential format conversion.
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100732 // The buffer dimensions need to be rounded to nearest multiple of JPEG DCT
733 // size, however we pass the viewport corresponding to size of the stream so
734 // the image will be only rendered to the area corresponding to the stream
735 // size.
736 Resolution bufferSize =
737 roundTo2DctSize(Resolution(stream->width, stream->height));
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100738 std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100739 mEglDisplayContext->getEglDisplay(), bufferSize.width, bufferSize.height);
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100740 if (framebuffer == nullptr) {
741 ALOGE("Failed to allocate temporary framebuffer for JPEG compression");
742 return cameraStatus(Status::INTERNAL_ERROR);
743 }
744
745 // Render into temporary framebuffer.
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100746 ndk::ScopedAStatus status = renderIntoEglFramebuffer(
747 *framebuffer, /*fence=*/nullptr, Rect(stream->width, stream->height));
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100748 if (!status.isOk()) {
749 ALOGE("Failed to render input texture into temporary framebuffer");
750 return status;
751 }
752
Jan Sebechlebsky0532bad2024-06-11 10:52:01 +0200753 PlanesLockGuard planesLock(hwBuffer, AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN,
Jan Sebechlebsky43543222024-02-16 12:50:32 +0100754 fence);
755 if (planesLock.getStatus() != OK) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100756 return cameraStatus(Status::INTERNAL_ERROR);
757 }
758
Jan Sebechlebsky5c789e42024-02-29 16:32:17 +0100759 std::vector<uint8_t> app1ExifData =
Vadim Caenc0aff132024-03-12 17:20:07 +0100760 createExif(Resolution(stream->width, stream->height), resultMetadata,
Jan Sebechlebsky5c789e42024-02-29 16:32:17 +0100761 createThumbnail(requestSettings.thumbnailResolution,
762 requestSettings.thumbnailJpegQuality));
763 std::optional<size_t> compressedSize = compressJpeg(
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100764 stream->width, stream->height, requestSettings.jpegQuality,
765 framebuffer->getHardwareBuffer(), app1ExifData,
766 stream->bufferSize - sizeof(CameraBlob), (*planesLock).planes[0].data);
Jan Sebechlebsky5c789e42024-02-29 16:32:17 +0100767
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100768 if (!compressedSize.has_value()) {
769 ALOGE("%s: Failed to compress JPEG image", __func__);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100770 return cameraStatus(Status::INTERNAL_ERROR);
771 }
772
773 CameraBlob cameraBlob{
774 .blobId = CameraBlobId::JPEG,
775 .blobSizeBytes = static_cast<int32_t>(compressedSize.value())};
776
Jan Sebechlebsky43543222024-02-16 12:50:32 +0100777 memcpy(reinterpret_cast<uint8_t*>((*planesLock).planes[0].data) +
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100778 (stream->bufferSize - sizeof(cameraBlob)),
779 &cameraBlob, sizeof(cameraBlob));
780
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100781 ALOGV("%s: Successfully compressed JPEG image, resulting size %zu B",
782 __func__, compressedSize.value());
783
784 return ndk::ScopedAStatus::ok();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100785}
786
787ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoImageStreamBuffer(
788 int streamId, int bufferId, sp<Fence> fence) {
789 ALOGV("%s", __func__);
790
791 const std::chrono::nanoseconds before =
792 std::chrono::duration_cast<std::chrono::nanoseconds>(
793 std::chrono::steady_clock::now().time_since_epoch());
794
795 // Render test pattern using EGL.
796 std::shared_ptr<EglFrameBuffer> framebuffer =
797 mSessionContext.fetchOrCreateEglFramebuffer(
798 mEglDisplayContext->getEglDisplay(), streamId, bufferId);
799 if (framebuffer == nullptr) {
800 ALOGE(
801 "%s: Failed to get EGL framebuffer corresponding to buffer id "
802 "%d for streamId %d",
803 __func__, bufferId, streamId);
804 return cameraStatus(Status::ILLEGAL_ARGUMENT);
805 }
806
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100807 ndk::ScopedAStatus status = renderIntoEglFramebuffer(*framebuffer, fence);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100808
809 const std::chrono::nanoseconds after =
810 std::chrono::duration_cast<std::chrono::nanoseconds>(
811 std::chrono::steady_clock::now().time_since_epoch());
812
813 ALOGV("Rendering to buffer %d, stream %d took %lld ns", bufferId, streamId,
814 after.count() - before.count());
815
816 return ndk::ScopedAStatus::ok();
817}
818
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100819ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoEglFramebuffer(
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100820 EglFrameBuffer& framebuffer, sp<Fence> fence, std::optional<Rect> viewport) {
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100821 ALOGV("%s", __func__);
822 // Wait for fence to clear.
823 if (fence != nullptr && fence->isValid()) {
824 status_t ret = fence->wait(kAcquireFenceTimeout.count());
825 if (ret != 0) {
826 ALOGE("Timeout while waiting for the acquire fence for buffer");
827 return cameraStatus(Status::INTERNAL_ERROR);
828 }
829 }
830
831 mEglDisplayContext->makeCurrent();
832 framebuffer.beforeDraw();
833
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100834 Rect viewportRect =
835 viewport.value_or(Rect(framebuffer.getWidth(), framebuffer.getHeight()));
836 glViewport(viewportRect.leftTop().x, viewportRect.leftTop().y,
837 viewportRect.getWidth(), viewportRect.getHeight());
838
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100839 sp<GraphicBuffer> textureBuffer = mEglSurfaceTexture->getCurrentBuffer();
840 if (textureBuffer == nullptr) {
841 // If there's no current buffer, nothing was written to the surface and
842 // texture is not initialized yet. Let's render the framebuffer black
843 // instead of rendering the texture.
844 glClearColor(0.0f, 0.5f, 0.5f, 0.0f);
845 glClear(GL_COLOR_BUFFER_BIT);
846 } else {
847 const bool renderSuccess =
848 isYuvFormat(static_cast<PixelFormat>(textureBuffer->getPixelFormat()))
Jan Sebechlebsky99492e32023-12-20 09:49:45 +0100849 ? mEglTextureYuvProgram->draw(
850 mEglSurfaceTexture->getTextureId(),
851 mEglSurfaceTexture->getTransformMatrix())
852 : mEglTextureRgbProgram->draw(
853 mEglSurfaceTexture->getTextureId(),
854 mEglSurfaceTexture->getTransformMatrix());
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100855 if (!renderSuccess) {
856 ALOGE("%s: Failed to render texture", __func__);
857 return cameraStatus(Status::INTERNAL_ERROR);
858 }
859 }
860 framebuffer.afterDraw();
861
862 return ndk::ScopedAStatus::ok();
863}
864
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100865} // namespace virtualcamera
866} // namespace companion
867} // namespace android