blob: 3be3e92726294c0591fbeeaa8c6e855dbc44d6e9 [file] [log] [blame]
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +01001/*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "VirtualCameraRenderThread"
18#include "VirtualCameraRenderThread.h"
19
20#include <chrono>
21#include <cstddef>
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +010022#include <cstdint>
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010023#include <future>
24#include <memory>
25#include <mutex>
26#include <thread>
27
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +010028#include "GLES/gl.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010029#include "VirtualCameraSessionContext.h"
30#include "aidl/android/hardware/camera/common/Status.h"
31#include "aidl/android/hardware/camera/device/BufferStatus.h"
32#include "aidl/android/hardware/camera/device/CameraMetadata.h"
33#include "aidl/android/hardware/camera/device/CaptureResult.h"
34#include "aidl/android/hardware/camera/device/ErrorCode.h"
35#include "aidl/android/hardware/camera/device/ICameraDeviceCallback.h"
36#include "aidl/android/hardware/camera/device/NotifyMsg.h"
37#include "aidl/android/hardware/camera/device/ShutterMsg.h"
38#include "aidl/android/hardware/camera/device/StreamBuffer.h"
39#include "android-base/thread_annotations.h"
40#include "android/binder_auto_utils.h"
41#include "android/hardware_buffer.h"
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +010042#include "ui/GraphicBuffer.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010043#include "util/EglFramebuffer.h"
44#include "util/JpegUtil.h"
45#include "util/MetadataBuilder.h"
46#include "util/TestPatternHelper.h"
47#include "util/Util.h"
48#include "utils/Errors.h"
49
50namespace android {
51namespace companion {
52namespace virtualcamera {
53
54using ::aidl::android::hardware::camera::common::Status;
55using ::aidl::android::hardware::camera::device::BufferStatus;
56using ::aidl::android::hardware::camera::device::CameraMetadata;
57using ::aidl::android::hardware::camera::device::CaptureResult;
58using ::aidl::android::hardware::camera::device::ErrorCode;
59using ::aidl::android::hardware::camera::device::ErrorMsg;
60using ::aidl::android::hardware::camera::device::ICameraDeviceCallback;
61using ::aidl::android::hardware::camera::device::NotifyMsg;
62using ::aidl::android::hardware::camera::device::ShutterMsg;
63using ::aidl::android::hardware::camera::device::Stream;
64using ::aidl::android::hardware::camera::device::StreamBuffer;
65using ::aidl::android::hardware::graphics::common::PixelFormat;
66using ::android::base::ScopedLockAssertion;
67
68namespace {
69
70using namespace std::chrono_literals;
71
72static constexpr std::chrono::milliseconds kAcquireFenceTimeout = 500ms;
73
74CameraMetadata createCaptureResultMetadata(
75 const std::chrono::nanoseconds timestamp) {
76 std::unique_ptr<CameraMetadata> metadata =
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +010077 MetadataBuilder()
78 .setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
79 .setControlAePrecaptureTrigger(
80 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
81 .setControlAfMode(ANDROID_CONTROL_AF_MODE_AUTO)
82 .setControlAwbMode(ANDROID_CONTROL_AWB_MODE_AUTO)
83 .setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
84 .setSensorTimestamp(timestamp)
85 .build();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010086 if (metadata == nullptr) {
87 ALOGE("%s: Failed to build capture result metadata", __func__);
88 return CameraMetadata();
89 }
90 return std::move(*metadata);
91}
92
93NotifyMsg createShutterNotifyMsg(int frameNumber,
94 std::chrono::nanoseconds timestamp) {
95 NotifyMsg msg;
96 msg.set<NotifyMsg::Tag::shutter>(ShutterMsg{
97 .frameNumber = frameNumber,
98 .timestamp = timestamp.count(),
99 });
100 return msg;
101}
102
103NotifyMsg createBufferErrorNotifyMsg(int frameNumber, int streamId) {
104 NotifyMsg msg;
105 msg.set<NotifyMsg::Tag::error>(ErrorMsg{.frameNumber = frameNumber,
106 .errorStreamId = streamId,
107 .errorCode = ErrorCode::ERROR_BUFFER});
108 return msg;
109}
110
111NotifyMsg createRequestErrorNotifyMsg(int frameNumber) {
112 NotifyMsg msg;
113 msg.set<NotifyMsg::Tag::error>(ErrorMsg{
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100114 .frameNumber = frameNumber,
115 // errorStreamId needs to be set to -1 for ERROR_REQUEST
116 // (not tied to specific stream).
117 .errorStreamId = -1,
118 .errorCode = ErrorCode::ERROR_REQUEST});
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100119 return msg;
120}
121
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100122std::shared_ptr<EglFrameBuffer> allocateTemporaryFramebuffer(
123 EGLDisplay eglDisplay, const uint width, const int height) {
124 const AHardwareBuffer_Desc desc{
125 .width = static_cast<uint32_t>(width),
126 .height = static_cast<uint32_t>(height),
127 .layers = 1,
128 .format = AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420,
129 .usage = AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER |
130 AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN,
131 .rfu0 = 0,
132 .rfu1 = 0};
133
134 AHardwareBuffer* hwBufferPtr;
135 int status = AHardwareBuffer_allocate(&desc, &hwBufferPtr);
136 if (status != NO_ERROR) {
137 ALOGE(
138 "%s: Failed to allocate hardware buffer for temporary framebuffer: %d",
139 __func__, status);
140 return nullptr;
141 }
142
143 return std::make_shared<EglFrameBuffer>(
144 eglDisplay,
145 std::shared_ptr<AHardwareBuffer>(hwBufferPtr, AHardwareBuffer_release));
146}
147
148bool isYuvFormat(const PixelFormat pixelFormat) {
149 switch (static_cast<android_pixel_format_t>(pixelFormat)) {
150 case HAL_PIXEL_FORMAT_YCBCR_422_I:
151 case HAL_PIXEL_FORMAT_YCBCR_422_SP:
152 case HAL_PIXEL_FORMAT_Y16:
153 case HAL_PIXEL_FORMAT_YV12:
154 case HAL_PIXEL_FORMAT_YCBCR_420_888:
155 return true;
156 default:
157 return false;
158 }
159}
160
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100161} // namespace
162
163CaptureRequestBuffer::CaptureRequestBuffer(int streamId, int bufferId,
164 sp<Fence> fence)
165 : mStreamId(streamId), mBufferId(bufferId), mFence(fence) {
166}
167
168int CaptureRequestBuffer::getStreamId() const {
169 return mStreamId;
170}
171
172int CaptureRequestBuffer::getBufferId() const {
173 return mBufferId;
174}
175
176sp<Fence> CaptureRequestBuffer::getFence() const {
177 return mFence;
178}
179
180VirtualCameraRenderThread::VirtualCameraRenderThread(
181 VirtualCameraSessionContext& sessionContext, const int mWidth,
182 const int mHeight,
183 std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback, bool testMode)
184 : mCameraDeviceCallback(cameraDeviceCallback),
185 mInputSurfaceWidth(mWidth),
186 mInputSurfaceHeight(mHeight),
187 mTestMode(testMode),
188 mSessionContext(sessionContext) {
189}
190
191VirtualCameraRenderThread::~VirtualCameraRenderThread() {
192 stop();
193 if (mThread.joinable()) {
194 mThread.join();
195 }
196}
197
198ProcessCaptureRequestTask::ProcessCaptureRequestTask(
199 int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers)
200 : mFrameNumber(frameNumber), mBuffers(requestBuffers) {
201}
202
203int ProcessCaptureRequestTask::getFrameNumber() const {
204 return mFrameNumber;
205}
206
207const std::vector<CaptureRequestBuffer>& ProcessCaptureRequestTask::getBuffers()
208 const {
209 return mBuffers;
210}
211
212void VirtualCameraRenderThread::enqueueTask(
213 std::unique_ptr<ProcessCaptureRequestTask> task) {
214 std::lock_guard<std::mutex> lock(mLock);
215 mQueue.emplace_back(std::move(task));
216 mCondVar.notify_one();
217}
218
219void VirtualCameraRenderThread::flush() {
220 std::lock_guard<std::mutex> lock(mLock);
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100221 while (!mQueue.empty()) {
222 std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
223 mQueue.pop_front();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100224 flushCaptureRequest(*task);
225 }
226}
227
228void VirtualCameraRenderThread::start() {
229 mThread = std::thread(&VirtualCameraRenderThread::threadLoop, this);
230}
231
232void VirtualCameraRenderThread::stop() {
233 {
234 std::lock_guard<std::mutex> lock(mLock);
235 mPendingExit = true;
236 mCondVar.notify_one();
237 }
238}
239
240sp<Surface> VirtualCameraRenderThread::getInputSurface() {
241 return mInputSurfacePromise.get_future().get();
242}
243
244std::unique_ptr<ProcessCaptureRequestTask>
245VirtualCameraRenderThread::dequeueTask() {
246 std::unique_lock<std::mutex> lock(mLock);
247 // Clang's thread safety analysis doesn't perform alias analysis,
248 // so it doesn't support moveable std::unique_lock.
249 //
250 // Lock assertion below is basically explicit declaration that
251 // the lock is held in this scope, which is true, since it's only
252 // released during waiting inside mCondVar.wait calls.
253 ScopedLockAssertion lockAssertion(mLock);
254
255 mCondVar.wait(lock, [this]() REQUIRES(mLock) {
256 return mPendingExit || !mQueue.empty();
257 });
258 if (mPendingExit) {
259 return nullptr;
260 }
261 std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
262 mQueue.pop_front();
263 return task;
264}
265
266void VirtualCameraRenderThread::threadLoop() {
267 ALOGV("Render thread starting");
268
269 mEglDisplayContext = std::make_unique<EglDisplayContext>();
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100270 mEglTextureYuvProgram =
271 std::make_unique<EglTextureProgram>(EglTextureProgram::TextureFormat::YUV);
272 mEglTextureRgbProgram = std::make_unique<EglTextureProgram>(
273 EglTextureProgram::TextureFormat::RGBA);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100274 mEglSurfaceTexture = std::make_unique<EglSurfaceTexture>(mInputSurfaceWidth,
275 mInputSurfaceHeight);
276 mInputSurfacePromise.set_value(mEglSurfaceTexture->getSurface());
277
278 while (std::unique_ptr<ProcessCaptureRequestTask> task = dequeueTask()) {
279 processCaptureRequest(*task);
280 }
281
282 ALOGV("Render thread exiting");
283}
284
285void VirtualCameraRenderThread::processCaptureRequest(
286 const ProcessCaptureRequestTask& request) {
287 const std::chrono::nanoseconds timestamp =
288 std::chrono::duration_cast<std::chrono::nanoseconds>(
289 std::chrono::steady_clock::now().time_since_epoch());
290
291 CaptureResult captureResult;
292 captureResult.fmqResultSize = 0;
293 captureResult.frameNumber = request.getFrameNumber();
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100294 // Partial result needs to be set to 1 when metadata are present.
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100295 captureResult.partialResult = 1;
296 captureResult.inputBuffer.streamId = -1;
297 captureResult.physicalCameraMetadata.resize(0);
298 captureResult.result = createCaptureResultMetadata(timestamp);
299
300 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
301 captureResult.outputBuffers.resize(buffers.size());
302
303 if (mTestMode) {
304 // In test mode let's just render something to the Surface ourselves.
305 renderTestPatternYCbCr420(mEglSurfaceTexture->getSurface(),
306 request.getFrameNumber());
307 }
308
309 mEglSurfaceTexture->updateTexture();
310
311 for (int i = 0; i < buffers.size(); ++i) {
312 const CaptureRequestBuffer& reqBuffer = buffers[i];
313 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
314 resBuffer.streamId = reqBuffer.getStreamId();
315 resBuffer.bufferId = reqBuffer.getBufferId();
316 resBuffer.status = BufferStatus::OK;
317
318 const std::optional<Stream> streamConfig =
319 mSessionContext.getStreamConfig(reqBuffer.getStreamId());
320
321 if (!streamConfig.has_value()) {
322 resBuffer.status = BufferStatus::ERROR;
323 continue;
324 }
325
326 auto status = streamConfig->format == PixelFormat::BLOB
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100327 ? renderIntoBlobStreamBuffer(reqBuffer.getStreamId(),
328 reqBuffer.getBufferId(),
329 reqBuffer.getFence())
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100330 : renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
331 reqBuffer.getBufferId(),
332 reqBuffer.getFence());
333 if (!status.isOk()) {
334 resBuffer.status = BufferStatus::ERROR;
335 }
336 }
337
338 std::vector<NotifyMsg> notifyMsg{
339 createShutterNotifyMsg(request.getFrameNumber(), timestamp)};
340 for (const StreamBuffer& resBuffer : captureResult.outputBuffers) {
341 if (resBuffer.status != BufferStatus::OK) {
342 notifyMsg.push_back(createBufferErrorNotifyMsg(request.getFrameNumber(),
343 resBuffer.streamId));
344 }
345 }
346
347 auto status = mCameraDeviceCallback->notify(notifyMsg);
348 if (!status.isOk()) {
349 ALOGE("%s: notify call failed: %s", __func__,
350 status.getDescription().c_str());
351 return;
352 }
353
354 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
355 captureResults(1);
356 captureResults[0] = std::move(captureResult);
357
358 status = mCameraDeviceCallback->processCaptureResult(captureResults);
359 if (!status.isOk()) {
360 ALOGE("%s: processCaptureResult call failed: %s", __func__,
361 status.getDescription().c_str());
362 return;
363 }
364
365 ALOGD("%s: Successfully called processCaptureResult", __func__);
366}
367
368void VirtualCameraRenderThread::flushCaptureRequest(
369 const ProcessCaptureRequestTask& request) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100370 CaptureResult captureResult;
371 captureResult.fmqResultSize = 0;
372 captureResult.frameNumber = request.getFrameNumber();
373 captureResult.inputBuffer.streamId = -1;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100374
375 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
376 captureResult.outputBuffers.resize(buffers.size());
377
378 for (int i = 0; i < buffers.size(); ++i) {
379 const CaptureRequestBuffer& reqBuffer = buffers[i];
380 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
381 resBuffer.streamId = reqBuffer.getStreamId();
382 resBuffer.bufferId = reqBuffer.getBufferId();
383 resBuffer.status = BufferStatus::ERROR;
384 sp<Fence> fence = reqBuffer.getFence();
385 if (fence != nullptr && fence->isValid()) {
386 resBuffer.releaseFence.fds.emplace_back(fence->dup());
387 }
388 }
389
390 auto status = mCameraDeviceCallback->notify(
391 {createRequestErrorNotifyMsg(request.getFrameNumber())});
392 if (!status.isOk()) {
393 ALOGE("%s: notify call failed: %s", __func__,
394 status.getDescription().c_str());
395 return;
396 }
397
398 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
399 captureResults(1);
400 captureResults[0] = std::move(captureResult);
401
402 status = mCameraDeviceCallback->processCaptureResult(captureResults);
403 if (!status.isOk()) {
404 ALOGE("%s: processCaptureResult call failed: %s", __func__,
405 status.getDescription().c_str());
406 }
407}
408
409ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoBlobStreamBuffer(
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100410 const int streamId, const int bufferId, sp<Fence> fence) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100411 ALOGV("%s", __func__);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100412 std::shared_ptr<AHardwareBuffer> hwBuffer =
413 mSessionContext.fetchHardwareBuffer(streamId, bufferId);
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100414 if (hwBuffer == nullptr) {
415 ALOGE("%s: Failed to fetch hardware buffer %d for streamId %d", __func__,
416 bufferId, streamId);
417 return cameraStatus(Status::INTERNAL_ERROR);
418 }
419
420 std::optional<Stream> stream = mSessionContext.getStreamConfig(streamId);
421 if (!stream.has_value()) {
422 ALOGE("%s, failed to fetch information about stream %d", __func__, streamId);
423 return cameraStatus(Status::INTERNAL_ERROR);
424 }
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100425
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100426 // Let's create YUV framebuffer and render the surface into this.
427 // This will take care about rescaling as well as potential format conversion.
428 std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
429 mEglDisplayContext->getEglDisplay(), stream->width, stream->height);
430 if (framebuffer == nullptr) {
431 ALOGE("Failed to allocate temporary framebuffer for JPEG compression");
432 return cameraStatus(Status::INTERNAL_ERROR);
433 }
434
435 // Render into temporary framebuffer.
436 ndk::ScopedAStatus status = renderIntoEglFramebuffer(*framebuffer);
437 if (!status.isOk()) {
438 ALOGE("Failed to render input texture into temporary framebuffer");
439 return status;
440 }
441
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100442 AHardwareBuffer_Planes planes_info;
443
444 int32_t rawFence = fence != nullptr ? fence->get() : -1;
445 int result = AHardwareBuffer_lockPlanes(hwBuffer.get(),
446 AHARDWAREBUFFER_USAGE_CPU_READ_RARELY,
447 rawFence, nullptr, &planes_info);
448 if (result != OK) {
449 ALOGE("%s: Failed to lock planes for BLOB buffer: %d", __func__, result);
450 return cameraStatus(Status::INTERNAL_ERROR);
451 }
452
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100453 std::shared_ptr<AHardwareBuffer> inHwBuffer = framebuffer->getHardwareBuffer();
454 GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(inHwBuffer.get());
455
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100456 bool compressionSuccess = true;
457 if (gBuffer != nullptr) {
458 android_ycbcr ycbcr;
Jan Sebechlebskyac166cf2023-12-12 13:09:11 +0100459 if (gBuffer->getPixelFormat() != HAL_PIXEL_FORMAT_YCbCr_420_888) {
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100460 // This should never happen since we're allocating the temporary buffer
461 // with YUV420 layout above.
Jan Sebechlebskyac166cf2023-12-12 13:09:11 +0100462 ALOGE("%s: Cannot compress non-YUV buffer (pixelFormat %d)", __func__,
463 gBuffer->getPixelFormat());
464 AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
465 return cameraStatus(Status::INTERNAL_ERROR);
466 }
467
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100468 status_t status =
469 gBuffer->lockYCbCr(AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN, &ycbcr);
470 ALOGV("Locked buffers");
471 if (status != NO_ERROR) {
472 AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
473 ALOGE("%s: Failed to lock graphic buffer: %d", __func__, status);
474 return cameraStatus(Status::INTERNAL_ERROR);
475 }
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100476
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100477 compressionSuccess =
478 compressJpeg(gBuffer->getWidth(), gBuffer->getHeight(), ycbcr,
479 stream->bufferSize, planes_info.planes[0].data);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100480
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100481 status_t res = gBuffer->unlock();
482 if (res != NO_ERROR) {
483 ALOGE("Failed to unlock graphic buffer: %d", res);
484 }
485 } else {
486 compressionSuccess =
487 compressBlackJpeg(stream->width, stream->height, stream->bufferSize,
488 planes_info.planes[0].data);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100489 }
490 AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
491 ALOGV("Unlocked buffers");
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100492 return compressionSuccess ? ndk::ScopedAStatus::ok()
493 : cameraStatus(Status::INTERNAL_ERROR);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100494}
495
496ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoImageStreamBuffer(
497 int streamId, int bufferId, sp<Fence> fence) {
498 ALOGV("%s", __func__);
499
500 const std::chrono::nanoseconds before =
501 std::chrono::duration_cast<std::chrono::nanoseconds>(
502 std::chrono::steady_clock::now().time_since_epoch());
503
504 // Render test pattern using EGL.
505 std::shared_ptr<EglFrameBuffer> framebuffer =
506 mSessionContext.fetchOrCreateEglFramebuffer(
507 mEglDisplayContext->getEglDisplay(), streamId, bufferId);
508 if (framebuffer == nullptr) {
509 ALOGE(
510 "%s: Failed to get EGL framebuffer corresponding to buffer id "
511 "%d for streamId %d",
512 __func__, bufferId, streamId);
513 return cameraStatus(Status::ILLEGAL_ARGUMENT);
514 }
515
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100516 ndk::ScopedAStatus status = renderIntoEglFramebuffer(*framebuffer, fence);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100517
518 const std::chrono::nanoseconds after =
519 std::chrono::duration_cast<std::chrono::nanoseconds>(
520 std::chrono::steady_clock::now().time_since_epoch());
521
522 ALOGV("Rendering to buffer %d, stream %d took %lld ns", bufferId, streamId,
523 after.count() - before.count());
524
525 return ndk::ScopedAStatus::ok();
526}
527
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100528ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoEglFramebuffer(
529 EglFrameBuffer& framebuffer, sp<Fence> fence) {
530 ALOGV("%s", __func__);
531 // Wait for fence to clear.
532 if (fence != nullptr && fence->isValid()) {
533 status_t ret = fence->wait(kAcquireFenceTimeout.count());
534 if (ret != 0) {
535 ALOGE("Timeout while waiting for the acquire fence for buffer");
536 return cameraStatus(Status::INTERNAL_ERROR);
537 }
538 }
539
540 mEglDisplayContext->makeCurrent();
541 framebuffer.beforeDraw();
542
543 sp<GraphicBuffer> textureBuffer = mEglSurfaceTexture->getCurrentBuffer();
544 if (textureBuffer == nullptr) {
545 // If there's no current buffer, nothing was written to the surface and
546 // texture is not initialized yet. Let's render the framebuffer black
547 // instead of rendering the texture.
548 glClearColor(0.0f, 0.5f, 0.5f, 0.0f);
549 glClear(GL_COLOR_BUFFER_BIT);
550 } else {
551 const bool renderSuccess =
552 isYuvFormat(static_cast<PixelFormat>(textureBuffer->getPixelFormat()))
Jan Sebechlebsky99492e32023-12-20 09:49:45 +0100553 ? mEglTextureYuvProgram->draw(
554 mEglSurfaceTexture->getTextureId(),
555 mEglSurfaceTexture->getTransformMatrix())
556 : mEglTextureRgbProgram->draw(
557 mEglSurfaceTexture->getTextureId(),
558 mEglSurfaceTexture->getTransformMatrix());
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100559 if (!renderSuccess) {
560 ALOGE("%s: Failed to render texture", __func__);
561 return cameraStatus(Status::INTERNAL_ERROR);
562 }
563 }
564 framebuffer.afterDraw();
565
566 return ndk::ScopedAStatus::ok();
567}
568
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100569} // namespace virtualcamera
570} // namespace companion
571} // namespace android