blob: cd36c6daed2cf1ed6c3e94d6fb4b85b8811f7046 [file] [log] [blame]
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +01001/*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "VirtualCameraRenderThread"
18#include "VirtualCameraRenderThread.h"
19
20#include <chrono>
21#include <cstddef>
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +010022#include <cstdint>
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010023#include <future>
24#include <memory>
25#include <mutex>
26#include <thread>
27
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +010028#include "GLES/gl.h"
Biswarup Pal8ad8bc52024-02-08 13:41:44 +000029#include "VirtualCameraDevice.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010030#include "VirtualCameraSessionContext.h"
31#include "aidl/android/hardware/camera/common/Status.h"
32#include "aidl/android/hardware/camera/device/BufferStatus.h"
33#include "aidl/android/hardware/camera/device/CameraMetadata.h"
34#include "aidl/android/hardware/camera/device/CaptureResult.h"
35#include "aidl/android/hardware/camera/device/ErrorCode.h"
36#include "aidl/android/hardware/camera/device/ICameraDeviceCallback.h"
37#include "aidl/android/hardware/camera/device/NotifyMsg.h"
38#include "aidl/android/hardware/camera/device/ShutterMsg.h"
39#include "aidl/android/hardware/camera/device/StreamBuffer.h"
40#include "android-base/thread_annotations.h"
41#include "android/binder_auto_utils.h"
42#include "android/hardware_buffer.h"
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +010043#include "ui/GraphicBuffer.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010044#include "util/EglFramebuffer.h"
45#include "util/JpegUtil.h"
46#include "util/MetadataBuilder.h"
47#include "util/TestPatternHelper.h"
48#include "util/Util.h"
49#include "utils/Errors.h"
50
51namespace android {
52namespace companion {
53namespace virtualcamera {
54
55using ::aidl::android::hardware::camera::common::Status;
56using ::aidl::android::hardware::camera::device::BufferStatus;
57using ::aidl::android::hardware::camera::device::CameraMetadata;
58using ::aidl::android::hardware::camera::device::CaptureResult;
59using ::aidl::android::hardware::camera::device::ErrorCode;
60using ::aidl::android::hardware::camera::device::ErrorMsg;
61using ::aidl::android::hardware::camera::device::ICameraDeviceCallback;
62using ::aidl::android::hardware::camera::device::NotifyMsg;
63using ::aidl::android::hardware::camera::device::ShutterMsg;
64using ::aidl::android::hardware::camera::device::Stream;
65using ::aidl::android::hardware::camera::device::StreamBuffer;
66using ::aidl::android::hardware::graphics::common::PixelFormat;
67using ::android::base::ScopedLockAssertion;
68
69namespace {
70
71using namespace std::chrono_literals;
72
73static constexpr std::chrono::milliseconds kAcquireFenceTimeout = 500ms;
74
75CameraMetadata createCaptureResultMetadata(
76 const std::chrono::nanoseconds timestamp) {
77 std::unique_ptr<CameraMetadata> metadata =
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +010078 MetadataBuilder()
79 .setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
80 .setControlAePrecaptureTrigger(
81 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
82 .setControlAfMode(ANDROID_CONTROL_AF_MODE_AUTO)
83 .setControlAwbMode(ANDROID_CONTROL_AWB_MODE_AUTO)
84 .setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
Biswarup Pal8ad8bc52024-02-08 13:41:44 +000085 .setFocalLength(VirtualCameraDevice::kFocalLength)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +010086 .setSensorTimestamp(timestamp)
87 .build();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010088 if (metadata == nullptr) {
89 ALOGE("%s: Failed to build capture result metadata", __func__);
90 return CameraMetadata();
91 }
92 return std::move(*metadata);
93}
94
95NotifyMsg createShutterNotifyMsg(int frameNumber,
96 std::chrono::nanoseconds timestamp) {
97 NotifyMsg msg;
98 msg.set<NotifyMsg::Tag::shutter>(ShutterMsg{
99 .frameNumber = frameNumber,
100 .timestamp = timestamp.count(),
101 });
102 return msg;
103}
104
105NotifyMsg createBufferErrorNotifyMsg(int frameNumber, int streamId) {
106 NotifyMsg msg;
107 msg.set<NotifyMsg::Tag::error>(ErrorMsg{.frameNumber = frameNumber,
108 .errorStreamId = streamId,
109 .errorCode = ErrorCode::ERROR_BUFFER});
110 return msg;
111}
112
113NotifyMsg createRequestErrorNotifyMsg(int frameNumber) {
114 NotifyMsg msg;
115 msg.set<NotifyMsg::Tag::error>(ErrorMsg{
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100116 .frameNumber = frameNumber,
117 // errorStreamId needs to be set to -1 for ERROR_REQUEST
118 // (not tied to specific stream).
119 .errorStreamId = -1,
120 .errorCode = ErrorCode::ERROR_REQUEST});
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100121 return msg;
122}
123
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100124std::shared_ptr<EglFrameBuffer> allocateTemporaryFramebuffer(
125 EGLDisplay eglDisplay, const uint width, const int height) {
126 const AHardwareBuffer_Desc desc{
127 .width = static_cast<uint32_t>(width),
128 .height = static_cast<uint32_t>(height),
129 .layers = 1,
130 .format = AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420,
131 .usage = AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER |
132 AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN,
133 .rfu0 = 0,
134 .rfu1 = 0};
135
136 AHardwareBuffer* hwBufferPtr;
137 int status = AHardwareBuffer_allocate(&desc, &hwBufferPtr);
138 if (status != NO_ERROR) {
139 ALOGE(
140 "%s: Failed to allocate hardware buffer for temporary framebuffer: %d",
141 __func__, status);
142 return nullptr;
143 }
144
145 return std::make_shared<EglFrameBuffer>(
146 eglDisplay,
147 std::shared_ptr<AHardwareBuffer>(hwBufferPtr, AHardwareBuffer_release));
148}
149
150bool isYuvFormat(const PixelFormat pixelFormat) {
151 switch (static_cast<android_pixel_format_t>(pixelFormat)) {
152 case HAL_PIXEL_FORMAT_YCBCR_422_I:
153 case HAL_PIXEL_FORMAT_YCBCR_422_SP:
154 case HAL_PIXEL_FORMAT_Y16:
155 case HAL_PIXEL_FORMAT_YV12:
156 case HAL_PIXEL_FORMAT_YCBCR_420_888:
157 return true;
158 default:
159 return false;
160 }
161}
162
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100163} // namespace
164
165CaptureRequestBuffer::CaptureRequestBuffer(int streamId, int bufferId,
166 sp<Fence> fence)
167 : mStreamId(streamId), mBufferId(bufferId), mFence(fence) {
168}
169
170int CaptureRequestBuffer::getStreamId() const {
171 return mStreamId;
172}
173
174int CaptureRequestBuffer::getBufferId() const {
175 return mBufferId;
176}
177
178sp<Fence> CaptureRequestBuffer::getFence() const {
179 return mFence;
180}
181
182VirtualCameraRenderThread::VirtualCameraRenderThread(
183 VirtualCameraSessionContext& sessionContext, const int mWidth,
184 const int mHeight,
185 std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback, bool testMode)
186 : mCameraDeviceCallback(cameraDeviceCallback),
187 mInputSurfaceWidth(mWidth),
188 mInputSurfaceHeight(mHeight),
189 mTestMode(testMode),
190 mSessionContext(sessionContext) {
191}
192
193VirtualCameraRenderThread::~VirtualCameraRenderThread() {
194 stop();
195 if (mThread.joinable()) {
196 mThread.join();
197 }
198}
199
200ProcessCaptureRequestTask::ProcessCaptureRequestTask(
201 int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers)
202 : mFrameNumber(frameNumber), mBuffers(requestBuffers) {
203}
204
205int ProcessCaptureRequestTask::getFrameNumber() const {
206 return mFrameNumber;
207}
208
209const std::vector<CaptureRequestBuffer>& ProcessCaptureRequestTask::getBuffers()
210 const {
211 return mBuffers;
212}
213
214void VirtualCameraRenderThread::enqueueTask(
215 std::unique_ptr<ProcessCaptureRequestTask> task) {
216 std::lock_guard<std::mutex> lock(mLock);
217 mQueue.emplace_back(std::move(task));
218 mCondVar.notify_one();
219}
220
221void VirtualCameraRenderThread::flush() {
222 std::lock_guard<std::mutex> lock(mLock);
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100223 while (!mQueue.empty()) {
224 std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
225 mQueue.pop_front();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100226 flushCaptureRequest(*task);
227 }
228}
229
230void VirtualCameraRenderThread::start() {
231 mThread = std::thread(&VirtualCameraRenderThread::threadLoop, this);
232}
233
234void VirtualCameraRenderThread::stop() {
235 {
236 std::lock_guard<std::mutex> lock(mLock);
237 mPendingExit = true;
238 mCondVar.notify_one();
239 }
240}
241
242sp<Surface> VirtualCameraRenderThread::getInputSurface() {
243 return mInputSurfacePromise.get_future().get();
244}
245
246std::unique_ptr<ProcessCaptureRequestTask>
247VirtualCameraRenderThread::dequeueTask() {
248 std::unique_lock<std::mutex> lock(mLock);
249 // Clang's thread safety analysis doesn't perform alias analysis,
250 // so it doesn't support moveable std::unique_lock.
251 //
252 // Lock assertion below is basically explicit declaration that
253 // the lock is held in this scope, which is true, since it's only
254 // released during waiting inside mCondVar.wait calls.
255 ScopedLockAssertion lockAssertion(mLock);
256
257 mCondVar.wait(lock, [this]() REQUIRES(mLock) {
258 return mPendingExit || !mQueue.empty();
259 });
260 if (mPendingExit) {
261 return nullptr;
262 }
263 std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
264 mQueue.pop_front();
265 return task;
266}
267
268void VirtualCameraRenderThread::threadLoop() {
269 ALOGV("Render thread starting");
270
271 mEglDisplayContext = std::make_unique<EglDisplayContext>();
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100272 mEglTextureYuvProgram =
273 std::make_unique<EglTextureProgram>(EglTextureProgram::TextureFormat::YUV);
274 mEglTextureRgbProgram = std::make_unique<EglTextureProgram>(
275 EglTextureProgram::TextureFormat::RGBA);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100276 mEglSurfaceTexture = std::make_unique<EglSurfaceTexture>(mInputSurfaceWidth,
277 mInputSurfaceHeight);
278 mInputSurfacePromise.set_value(mEglSurfaceTexture->getSurface());
279
280 while (std::unique_ptr<ProcessCaptureRequestTask> task = dequeueTask()) {
281 processCaptureRequest(*task);
282 }
283
284 ALOGV("Render thread exiting");
285}
286
287void VirtualCameraRenderThread::processCaptureRequest(
288 const ProcessCaptureRequestTask& request) {
289 const std::chrono::nanoseconds timestamp =
290 std::chrono::duration_cast<std::chrono::nanoseconds>(
291 std::chrono::steady_clock::now().time_since_epoch());
292
293 CaptureResult captureResult;
294 captureResult.fmqResultSize = 0;
295 captureResult.frameNumber = request.getFrameNumber();
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100296 // Partial result needs to be set to 1 when metadata are present.
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100297 captureResult.partialResult = 1;
298 captureResult.inputBuffer.streamId = -1;
299 captureResult.physicalCameraMetadata.resize(0);
300 captureResult.result = createCaptureResultMetadata(timestamp);
301
302 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
303 captureResult.outputBuffers.resize(buffers.size());
304
305 if (mTestMode) {
306 // In test mode let's just render something to the Surface ourselves.
307 renderTestPatternYCbCr420(mEglSurfaceTexture->getSurface(),
308 request.getFrameNumber());
309 }
310
311 mEglSurfaceTexture->updateTexture();
312
313 for (int i = 0; i < buffers.size(); ++i) {
314 const CaptureRequestBuffer& reqBuffer = buffers[i];
315 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
316 resBuffer.streamId = reqBuffer.getStreamId();
317 resBuffer.bufferId = reqBuffer.getBufferId();
318 resBuffer.status = BufferStatus::OK;
319
320 const std::optional<Stream> streamConfig =
321 mSessionContext.getStreamConfig(reqBuffer.getStreamId());
322
323 if (!streamConfig.has_value()) {
324 resBuffer.status = BufferStatus::ERROR;
325 continue;
326 }
327
328 auto status = streamConfig->format == PixelFormat::BLOB
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100329 ? renderIntoBlobStreamBuffer(reqBuffer.getStreamId(),
330 reqBuffer.getBufferId(),
331 reqBuffer.getFence())
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100332 : renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
333 reqBuffer.getBufferId(),
334 reqBuffer.getFence());
335 if (!status.isOk()) {
336 resBuffer.status = BufferStatus::ERROR;
337 }
338 }
339
340 std::vector<NotifyMsg> notifyMsg{
341 createShutterNotifyMsg(request.getFrameNumber(), timestamp)};
342 for (const StreamBuffer& resBuffer : captureResult.outputBuffers) {
343 if (resBuffer.status != BufferStatus::OK) {
344 notifyMsg.push_back(createBufferErrorNotifyMsg(request.getFrameNumber(),
345 resBuffer.streamId));
346 }
347 }
348
349 auto status = mCameraDeviceCallback->notify(notifyMsg);
350 if (!status.isOk()) {
351 ALOGE("%s: notify call failed: %s", __func__,
352 status.getDescription().c_str());
353 return;
354 }
355
356 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
357 captureResults(1);
358 captureResults[0] = std::move(captureResult);
359
360 status = mCameraDeviceCallback->processCaptureResult(captureResults);
361 if (!status.isOk()) {
362 ALOGE("%s: processCaptureResult call failed: %s", __func__,
363 status.getDescription().c_str());
364 return;
365 }
366
367 ALOGD("%s: Successfully called processCaptureResult", __func__);
368}
369
370void VirtualCameraRenderThread::flushCaptureRequest(
371 const ProcessCaptureRequestTask& request) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100372 CaptureResult captureResult;
373 captureResult.fmqResultSize = 0;
374 captureResult.frameNumber = request.getFrameNumber();
375 captureResult.inputBuffer.streamId = -1;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100376
377 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
378 captureResult.outputBuffers.resize(buffers.size());
379
380 for (int i = 0; i < buffers.size(); ++i) {
381 const CaptureRequestBuffer& reqBuffer = buffers[i];
382 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
383 resBuffer.streamId = reqBuffer.getStreamId();
384 resBuffer.bufferId = reqBuffer.getBufferId();
385 resBuffer.status = BufferStatus::ERROR;
386 sp<Fence> fence = reqBuffer.getFence();
387 if (fence != nullptr && fence->isValid()) {
388 resBuffer.releaseFence.fds.emplace_back(fence->dup());
389 }
390 }
391
392 auto status = mCameraDeviceCallback->notify(
393 {createRequestErrorNotifyMsg(request.getFrameNumber())});
394 if (!status.isOk()) {
395 ALOGE("%s: notify call failed: %s", __func__,
396 status.getDescription().c_str());
397 return;
398 }
399
400 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
401 captureResults(1);
402 captureResults[0] = std::move(captureResult);
403
404 status = mCameraDeviceCallback->processCaptureResult(captureResults);
405 if (!status.isOk()) {
406 ALOGE("%s: processCaptureResult call failed: %s", __func__,
407 status.getDescription().c_str());
408 }
409}
410
411ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoBlobStreamBuffer(
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100412 const int streamId, const int bufferId, sp<Fence> fence) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100413 ALOGV("%s", __func__);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100414 std::shared_ptr<AHardwareBuffer> hwBuffer =
415 mSessionContext.fetchHardwareBuffer(streamId, bufferId);
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100416 if (hwBuffer == nullptr) {
417 ALOGE("%s: Failed to fetch hardware buffer %d for streamId %d", __func__,
418 bufferId, streamId);
419 return cameraStatus(Status::INTERNAL_ERROR);
420 }
421
422 std::optional<Stream> stream = mSessionContext.getStreamConfig(streamId);
423 if (!stream.has_value()) {
424 ALOGE("%s, failed to fetch information about stream %d", __func__, streamId);
425 return cameraStatus(Status::INTERNAL_ERROR);
426 }
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100427
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100428 // Let's create YUV framebuffer and render the surface into this.
429 // This will take care about rescaling as well as potential format conversion.
430 std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
431 mEglDisplayContext->getEglDisplay(), stream->width, stream->height);
432 if (framebuffer == nullptr) {
433 ALOGE("Failed to allocate temporary framebuffer for JPEG compression");
434 return cameraStatus(Status::INTERNAL_ERROR);
435 }
436
437 // Render into temporary framebuffer.
438 ndk::ScopedAStatus status = renderIntoEglFramebuffer(*framebuffer);
439 if (!status.isOk()) {
440 ALOGE("Failed to render input texture into temporary framebuffer");
441 return status;
442 }
443
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100444 AHardwareBuffer_Planes planes_info;
445
446 int32_t rawFence = fence != nullptr ? fence->get() : -1;
447 int result = AHardwareBuffer_lockPlanes(hwBuffer.get(),
448 AHARDWAREBUFFER_USAGE_CPU_READ_RARELY,
449 rawFence, nullptr, &planes_info);
450 if (result != OK) {
451 ALOGE("%s: Failed to lock planes for BLOB buffer: %d", __func__, result);
452 return cameraStatus(Status::INTERNAL_ERROR);
453 }
454
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100455 std::shared_ptr<AHardwareBuffer> inHwBuffer = framebuffer->getHardwareBuffer();
456 GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(inHwBuffer.get());
457
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100458 bool compressionSuccess = true;
459 if (gBuffer != nullptr) {
460 android_ycbcr ycbcr;
Jan Sebechlebskyac166cf2023-12-12 13:09:11 +0100461 if (gBuffer->getPixelFormat() != HAL_PIXEL_FORMAT_YCbCr_420_888) {
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100462 // This should never happen since we're allocating the temporary buffer
463 // with YUV420 layout above.
Jan Sebechlebskyac166cf2023-12-12 13:09:11 +0100464 ALOGE("%s: Cannot compress non-YUV buffer (pixelFormat %d)", __func__,
465 gBuffer->getPixelFormat());
466 AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
467 return cameraStatus(Status::INTERNAL_ERROR);
468 }
469
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100470 status_t status =
471 gBuffer->lockYCbCr(AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN, &ycbcr);
472 ALOGV("Locked buffers");
473 if (status != NO_ERROR) {
474 AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
475 ALOGE("%s: Failed to lock graphic buffer: %d", __func__, status);
476 return cameraStatus(Status::INTERNAL_ERROR);
477 }
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100478
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100479 compressionSuccess =
480 compressJpeg(gBuffer->getWidth(), gBuffer->getHeight(), ycbcr,
481 stream->bufferSize, planes_info.planes[0].data);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100482
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100483 status_t res = gBuffer->unlock();
484 if (res != NO_ERROR) {
485 ALOGE("Failed to unlock graphic buffer: %d", res);
486 }
487 } else {
488 compressionSuccess =
489 compressBlackJpeg(stream->width, stream->height, stream->bufferSize,
490 planes_info.planes[0].data);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100491 }
492 AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
493 ALOGV("Unlocked buffers");
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100494 return compressionSuccess ? ndk::ScopedAStatus::ok()
495 : cameraStatus(Status::INTERNAL_ERROR);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100496}
497
498ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoImageStreamBuffer(
499 int streamId, int bufferId, sp<Fence> fence) {
500 ALOGV("%s", __func__);
501
502 const std::chrono::nanoseconds before =
503 std::chrono::duration_cast<std::chrono::nanoseconds>(
504 std::chrono::steady_clock::now().time_since_epoch());
505
506 // Render test pattern using EGL.
507 std::shared_ptr<EglFrameBuffer> framebuffer =
508 mSessionContext.fetchOrCreateEglFramebuffer(
509 mEglDisplayContext->getEglDisplay(), streamId, bufferId);
510 if (framebuffer == nullptr) {
511 ALOGE(
512 "%s: Failed to get EGL framebuffer corresponding to buffer id "
513 "%d for streamId %d",
514 __func__, bufferId, streamId);
515 return cameraStatus(Status::ILLEGAL_ARGUMENT);
516 }
517
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100518 ndk::ScopedAStatus status = renderIntoEglFramebuffer(*framebuffer, fence);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100519
520 const std::chrono::nanoseconds after =
521 std::chrono::duration_cast<std::chrono::nanoseconds>(
522 std::chrono::steady_clock::now().time_since_epoch());
523
524 ALOGV("Rendering to buffer %d, stream %d took %lld ns", bufferId, streamId,
525 after.count() - before.count());
526
527 return ndk::ScopedAStatus::ok();
528}
529
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100530ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoEglFramebuffer(
531 EglFrameBuffer& framebuffer, sp<Fence> fence) {
532 ALOGV("%s", __func__);
533 // Wait for fence to clear.
534 if (fence != nullptr && fence->isValid()) {
535 status_t ret = fence->wait(kAcquireFenceTimeout.count());
536 if (ret != 0) {
537 ALOGE("Timeout while waiting for the acquire fence for buffer");
538 return cameraStatus(Status::INTERNAL_ERROR);
539 }
540 }
541
542 mEglDisplayContext->makeCurrent();
543 framebuffer.beforeDraw();
544
545 sp<GraphicBuffer> textureBuffer = mEglSurfaceTexture->getCurrentBuffer();
546 if (textureBuffer == nullptr) {
547 // If there's no current buffer, nothing was written to the surface and
548 // texture is not initialized yet. Let's render the framebuffer black
549 // instead of rendering the texture.
550 glClearColor(0.0f, 0.5f, 0.5f, 0.0f);
551 glClear(GL_COLOR_BUFFER_BIT);
552 } else {
553 const bool renderSuccess =
554 isYuvFormat(static_cast<PixelFormat>(textureBuffer->getPixelFormat()))
Jan Sebechlebsky99492e32023-12-20 09:49:45 +0100555 ? mEglTextureYuvProgram->draw(
556 mEglSurfaceTexture->getTextureId(),
557 mEglSurfaceTexture->getTransformMatrix())
558 : mEglTextureRgbProgram->draw(
559 mEglSurfaceTexture->getTextureId(),
560 mEglSurfaceTexture->getTransformMatrix());
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100561 if (!renderSuccess) {
562 ALOGE("%s: Failed to render texture", __func__);
563 return cameraStatus(Status::INTERNAL_ERROR);
564 }
565 }
566 framebuffer.afterDraw();
567
568 return ndk::ScopedAStatus::ok();
569}
570
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100571} // namespace virtualcamera
572} // namespace companion
573} // namespace android