blob: 79c91ef06ba6e0f4accba3e1698e57d3f3c217e7 [file] [log] [blame]
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +01001/*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "VirtualCameraRenderThread"
18#include "VirtualCameraRenderThread.h"
19
20#include <chrono>
21#include <cstddef>
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +010022#include <cstdint>
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010023#include <future>
24#include <memory>
25#include <mutex>
26#include <thread>
27
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +010028#include "GLES/gl.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010029#include "VirtualCameraSessionContext.h"
30#include "aidl/android/hardware/camera/common/Status.h"
31#include "aidl/android/hardware/camera/device/BufferStatus.h"
32#include "aidl/android/hardware/camera/device/CameraMetadata.h"
33#include "aidl/android/hardware/camera/device/CaptureResult.h"
34#include "aidl/android/hardware/camera/device/ErrorCode.h"
35#include "aidl/android/hardware/camera/device/ICameraDeviceCallback.h"
36#include "aidl/android/hardware/camera/device/NotifyMsg.h"
37#include "aidl/android/hardware/camera/device/ShutterMsg.h"
38#include "aidl/android/hardware/camera/device/StreamBuffer.h"
39#include "android-base/thread_annotations.h"
40#include "android/binder_auto_utils.h"
41#include "android/hardware_buffer.h"
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +010042#include "ui/GraphicBuffer.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010043#include "util/EglFramebuffer.h"
44#include "util/JpegUtil.h"
45#include "util/MetadataBuilder.h"
46#include "util/TestPatternHelper.h"
47#include "util/Util.h"
48#include "utils/Errors.h"
49
50namespace android {
51namespace companion {
52namespace virtualcamera {
53
54using ::aidl::android::hardware::camera::common::Status;
55using ::aidl::android::hardware::camera::device::BufferStatus;
56using ::aidl::android::hardware::camera::device::CameraMetadata;
57using ::aidl::android::hardware::camera::device::CaptureResult;
58using ::aidl::android::hardware::camera::device::ErrorCode;
59using ::aidl::android::hardware::camera::device::ErrorMsg;
60using ::aidl::android::hardware::camera::device::ICameraDeviceCallback;
61using ::aidl::android::hardware::camera::device::NotifyMsg;
62using ::aidl::android::hardware::camera::device::ShutterMsg;
63using ::aidl::android::hardware::camera::device::Stream;
64using ::aidl::android::hardware::camera::device::StreamBuffer;
65using ::aidl::android::hardware::graphics::common::PixelFormat;
66using ::android::base::ScopedLockAssertion;
67
68namespace {
69
70using namespace std::chrono_literals;
71
72static constexpr std::chrono::milliseconds kAcquireFenceTimeout = 500ms;
73
74CameraMetadata createCaptureResultMetadata(
75 const std::chrono::nanoseconds timestamp) {
76 std::unique_ptr<CameraMetadata> metadata =
77 MetadataBuilder().setSensorTimestamp(timestamp).build();
78 if (metadata == nullptr) {
79 ALOGE("%s: Failed to build capture result metadata", __func__);
80 return CameraMetadata();
81 }
82 return std::move(*metadata);
83}
84
85NotifyMsg createShutterNotifyMsg(int frameNumber,
86 std::chrono::nanoseconds timestamp) {
87 NotifyMsg msg;
88 msg.set<NotifyMsg::Tag::shutter>(ShutterMsg{
89 .frameNumber = frameNumber,
90 .timestamp = timestamp.count(),
91 });
92 return msg;
93}
94
95NotifyMsg createBufferErrorNotifyMsg(int frameNumber, int streamId) {
96 NotifyMsg msg;
97 msg.set<NotifyMsg::Tag::error>(ErrorMsg{.frameNumber = frameNumber,
98 .errorStreamId = streamId,
99 .errorCode = ErrorCode::ERROR_BUFFER});
100 return msg;
101}
102
103NotifyMsg createRequestErrorNotifyMsg(int frameNumber) {
104 NotifyMsg msg;
105 msg.set<NotifyMsg::Tag::error>(ErrorMsg{
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100106 .frameNumber = frameNumber,
107 // errorStreamId needs to be set to -1 for ERROR_REQUEST
108 // (not tied to specific stream).
109 .errorStreamId = -1,
110 .errorCode = ErrorCode::ERROR_REQUEST});
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100111 return msg;
112}
113
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100114std::shared_ptr<EglFrameBuffer> allocateTemporaryFramebuffer(
115 EGLDisplay eglDisplay, const uint width, const int height) {
116 const AHardwareBuffer_Desc desc{
117 .width = static_cast<uint32_t>(width),
118 .height = static_cast<uint32_t>(height),
119 .layers = 1,
120 .format = AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420,
121 .usage = AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER |
122 AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN,
123 .rfu0 = 0,
124 .rfu1 = 0};
125
126 AHardwareBuffer* hwBufferPtr;
127 int status = AHardwareBuffer_allocate(&desc, &hwBufferPtr);
128 if (status != NO_ERROR) {
129 ALOGE(
130 "%s: Failed to allocate hardware buffer for temporary framebuffer: %d",
131 __func__, status);
132 return nullptr;
133 }
134
135 return std::make_shared<EglFrameBuffer>(
136 eglDisplay,
137 std::shared_ptr<AHardwareBuffer>(hwBufferPtr, AHardwareBuffer_release));
138}
139
140bool isYuvFormat(const PixelFormat pixelFormat) {
141 switch (static_cast<android_pixel_format_t>(pixelFormat)) {
142 case HAL_PIXEL_FORMAT_YCBCR_422_I:
143 case HAL_PIXEL_FORMAT_YCBCR_422_SP:
144 case HAL_PIXEL_FORMAT_Y16:
145 case HAL_PIXEL_FORMAT_YV12:
146 case HAL_PIXEL_FORMAT_YCBCR_420_888:
147 return true;
148 default:
149 return false;
150 }
151}
152
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100153} // namespace
154
155CaptureRequestBuffer::CaptureRequestBuffer(int streamId, int bufferId,
156 sp<Fence> fence)
157 : mStreamId(streamId), mBufferId(bufferId), mFence(fence) {
158}
159
160int CaptureRequestBuffer::getStreamId() const {
161 return mStreamId;
162}
163
164int CaptureRequestBuffer::getBufferId() const {
165 return mBufferId;
166}
167
168sp<Fence> CaptureRequestBuffer::getFence() const {
169 return mFence;
170}
171
172VirtualCameraRenderThread::VirtualCameraRenderThread(
173 VirtualCameraSessionContext& sessionContext, const int mWidth,
174 const int mHeight,
175 std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback, bool testMode)
176 : mCameraDeviceCallback(cameraDeviceCallback),
177 mInputSurfaceWidth(mWidth),
178 mInputSurfaceHeight(mHeight),
179 mTestMode(testMode),
180 mSessionContext(sessionContext) {
181}
182
183VirtualCameraRenderThread::~VirtualCameraRenderThread() {
184 stop();
185 if (mThread.joinable()) {
186 mThread.join();
187 }
188}
189
190ProcessCaptureRequestTask::ProcessCaptureRequestTask(
191 int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers)
192 : mFrameNumber(frameNumber), mBuffers(requestBuffers) {
193}
194
195int ProcessCaptureRequestTask::getFrameNumber() const {
196 return mFrameNumber;
197}
198
199const std::vector<CaptureRequestBuffer>& ProcessCaptureRequestTask::getBuffers()
200 const {
201 return mBuffers;
202}
203
204void VirtualCameraRenderThread::enqueueTask(
205 std::unique_ptr<ProcessCaptureRequestTask> task) {
206 std::lock_guard<std::mutex> lock(mLock);
207 mQueue.emplace_back(std::move(task));
208 mCondVar.notify_one();
209}
210
211void VirtualCameraRenderThread::flush() {
212 std::lock_guard<std::mutex> lock(mLock);
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100213 while (!mQueue.empty()) {
214 std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
215 mQueue.pop_front();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100216 flushCaptureRequest(*task);
217 }
218}
219
220void VirtualCameraRenderThread::start() {
221 mThread = std::thread(&VirtualCameraRenderThread::threadLoop, this);
222}
223
224void VirtualCameraRenderThread::stop() {
225 {
226 std::lock_guard<std::mutex> lock(mLock);
227 mPendingExit = true;
228 mCondVar.notify_one();
229 }
230}
231
232sp<Surface> VirtualCameraRenderThread::getInputSurface() {
233 return mInputSurfacePromise.get_future().get();
234}
235
236std::unique_ptr<ProcessCaptureRequestTask>
237VirtualCameraRenderThread::dequeueTask() {
238 std::unique_lock<std::mutex> lock(mLock);
239 // Clang's thread safety analysis doesn't perform alias analysis,
240 // so it doesn't support moveable std::unique_lock.
241 //
242 // Lock assertion below is basically explicit declaration that
243 // the lock is held in this scope, which is true, since it's only
244 // released during waiting inside mCondVar.wait calls.
245 ScopedLockAssertion lockAssertion(mLock);
246
247 mCondVar.wait(lock, [this]() REQUIRES(mLock) {
248 return mPendingExit || !mQueue.empty();
249 });
250 if (mPendingExit) {
251 return nullptr;
252 }
253 std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
254 mQueue.pop_front();
255 return task;
256}
257
258void VirtualCameraRenderThread::threadLoop() {
259 ALOGV("Render thread starting");
260
261 mEglDisplayContext = std::make_unique<EglDisplayContext>();
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100262 mEglTextureYuvProgram =
263 std::make_unique<EglTextureProgram>(EglTextureProgram::TextureFormat::YUV);
264 mEglTextureRgbProgram = std::make_unique<EglTextureProgram>(
265 EglTextureProgram::TextureFormat::RGBA);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100266 mEglSurfaceTexture = std::make_unique<EglSurfaceTexture>(mInputSurfaceWidth,
267 mInputSurfaceHeight);
268 mInputSurfacePromise.set_value(mEglSurfaceTexture->getSurface());
269
270 while (std::unique_ptr<ProcessCaptureRequestTask> task = dequeueTask()) {
271 processCaptureRequest(*task);
272 }
273
274 ALOGV("Render thread exiting");
275}
276
277void VirtualCameraRenderThread::processCaptureRequest(
278 const ProcessCaptureRequestTask& request) {
279 const std::chrono::nanoseconds timestamp =
280 std::chrono::duration_cast<std::chrono::nanoseconds>(
281 std::chrono::steady_clock::now().time_since_epoch());
282
283 CaptureResult captureResult;
284 captureResult.fmqResultSize = 0;
285 captureResult.frameNumber = request.getFrameNumber();
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100286 // Partial result needs to be set to 1 when metadata are present.
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100287 captureResult.partialResult = 1;
288 captureResult.inputBuffer.streamId = -1;
289 captureResult.physicalCameraMetadata.resize(0);
290 captureResult.result = createCaptureResultMetadata(timestamp);
291
292 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
293 captureResult.outputBuffers.resize(buffers.size());
294
295 if (mTestMode) {
296 // In test mode let's just render something to the Surface ourselves.
297 renderTestPatternYCbCr420(mEglSurfaceTexture->getSurface(),
298 request.getFrameNumber());
299 }
300
301 mEglSurfaceTexture->updateTexture();
302
303 for (int i = 0; i < buffers.size(); ++i) {
304 const CaptureRequestBuffer& reqBuffer = buffers[i];
305 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
306 resBuffer.streamId = reqBuffer.getStreamId();
307 resBuffer.bufferId = reqBuffer.getBufferId();
308 resBuffer.status = BufferStatus::OK;
309
310 const std::optional<Stream> streamConfig =
311 mSessionContext.getStreamConfig(reqBuffer.getStreamId());
312
313 if (!streamConfig.has_value()) {
314 resBuffer.status = BufferStatus::ERROR;
315 continue;
316 }
317
318 auto status = streamConfig->format == PixelFormat::BLOB
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100319 ? renderIntoBlobStreamBuffer(reqBuffer.getStreamId(),
320 reqBuffer.getBufferId(),
321 reqBuffer.getFence())
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100322 : renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
323 reqBuffer.getBufferId(),
324 reqBuffer.getFence());
325 if (!status.isOk()) {
326 resBuffer.status = BufferStatus::ERROR;
327 }
328 }
329
330 std::vector<NotifyMsg> notifyMsg{
331 createShutterNotifyMsg(request.getFrameNumber(), timestamp)};
332 for (const StreamBuffer& resBuffer : captureResult.outputBuffers) {
333 if (resBuffer.status != BufferStatus::OK) {
334 notifyMsg.push_back(createBufferErrorNotifyMsg(request.getFrameNumber(),
335 resBuffer.streamId));
336 }
337 }
338
339 auto status = mCameraDeviceCallback->notify(notifyMsg);
340 if (!status.isOk()) {
341 ALOGE("%s: notify call failed: %s", __func__,
342 status.getDescription().c_str());
343 return;
344 }
345
346 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
347 captureResults(1);
348 captureResults[0] = std::move(captureResult);
349
350 status = mCameraDeviceCallback->processCaptureResult(captureResults);
351 if (!status.isOk()) {
352 ALOGE("%s: processCaptureResult call failed: %s", __func__,
353 status.getDescription().c_str());
354 return;
355 }
356
357 ALOGD("%s: Successfully called processCaptureResult", __func__);
358}
359
360void VirtualCameraRenderThread::flushCaptureRequest(
361 const ProcessCaptureRequestTask& request) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100362 CaptureResult captureResult;
363 captureResult.fmqResultSize = 0;
364 captureResult.frameNumber = request.getFrameNumber();
365 captureResult.inputBuffer.streamId = -1;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100366
367 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
368 captureResult.outputBuffers.resize(buffers.size());
369
370 for (int i = 0; i < buffers.size(); ++i) {
371 const CaptureRequestBuffer& reqBuffer = buffers[i];
372 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
373 resBuffer.streamId = reqBuffer.getStreamId();
374 resBuffer.bufferId = reqBuffer.getBufferId();
375 resBuffer.status = BufferStatus::ERROR;
376 sp<Fence> fence = reqBuffer.getFence();
377 if (fence != nullptr && fence->isValid()) {
378 resBuffer.releaseFence.fds.emplace_back(fence->dup());
379 }
380 }
381
382 auto status = mCameraDeviceCallback->notify(
383 {createRequestErrorNotifyMsg(request.getFrameNumber())});
384 if (!status.isOk()) {
385 ALOGE("%s: notify call failed: %s", __func__,
386 status.getDescription().c_str());
387 return;
388 }
389
390 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
391 captureResults(1);
392 captureResults[0] = std::move(captureResult);
393
394 status = mCameraDeviceCallback->processCaptureResult(captureResults);
395 if (!status.isOk()) {
396 ALOGE("%s: processCaptureResult call failed: %s", __func__,
397 status.getDescription().c_str());
398 }
399}
400
401ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoBlobStreamBuffer(
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100402 const int streamId, const int bufferId, sp<Fence> fence) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100403 ALOGV("%s", __func__);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100404 std::shared_ptr<AHardwareBuffer> hwBuffer =
405 mSessionContext.fetchHardwareBuffer(streamId, bufferId);
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100406 if (hwBuffer == nullptr) {
407 ALOGE("%s: Failed to fetch hardware buffer %d for streamId %d", __func__,
408 bufferId, streamId);
409 return cameraStatus(Status::INTERNAL_ERROR);
410 }
411
412 std::optional<Stream> stream = mSessionContext.getStreamConfig(streamId);
413 if (!stream.has_value()) {
414 ALOGE("%s, failed to fetch information about stream %d", __func__, streamId);
415 return cameraStatus(Status::INTERNAL_ERROR);
416 }
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100417
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100418 // Let's create YUV framebuffer and render the surface into this.
419 // This will take care about rescaling as well as potential format conversion.
420 std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
421 mEglDisplayContext->getEglDisplay(), stream->width, stream->height);
422 if (framebuffer == nullptr) {
423 ALOGE("Failed to allocate temporary framebuffer for JPEG compression");
424 return cameraStatus(Status::INTERNAL_ERROR);
425 }
426
427 // Render into temporary framebuffer.
428 ndk::ScopedAStatus status = renderIntoEglFramebuffer(*framebuffer);
429 if (!status.isOk()) {
430 ALOGE("Failed to render input texture into temporary framebuffer");
431 return status;
432 }
433
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100434 AHardwareBuffer_Planes planes_info;
435
436 int32_t rawFence = fence != nullptr ? fence->get() : -1;
437 int result = AHardwareBuffer_lockPlanes(hwBuffer.get(),
438 AHARDWAREBUFFER_USAGE_CPU_READ_RARELY,
439 rawFence, nullptr, &planes_info);
440 if (result != OK) {
441 ALOGE("%s: Failed to lock planes for BLOB buffer: %d", __func__, result);
442 return cameraStatus(Status::INTERNAL_ERROR);
443 }
444
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100445 std::shared_ptr<AHardwareBuffer> inHwBuffer = framebuffer->getHardwareBuffer();
446 GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(inHwBuffer.get());
447
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100448 bool compressionSuccess = true;
449 if (gBuffer != nullptr) {
450 android_ycbcr ycbcr;
Jan Sebechlebskyac166cf2023-12-12 13:09:11 +0100451 if (gBuffer->getPixelFormat() != HAL_PIXEL_FORMAT_YCbCr_420_888) {
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100452 // This should never happen since we're allocating the temporary buffer
453 // with YUV420 layout above.
Jan Sebechlebskyac166cf2023-12-12 13:09:11 +0100454 ALOGE("%s: Cannot compress non-YUV buffer (pixelFormat %d)", __func__,
455 gBuffer->getPixelFormat());
456 AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
457 return cameraStatus(Status::INTERNAL_ERROR);
458 }
459
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100460 status_t status =
461 gBuffer->lockYCbCr(AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN, &ycbcr);
462 ALOGV("Locked buffers");
463 if (status != NO_ERROR) {
464 AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
465 ALOGE("%s: Failed to lock graphic buffer: %d", __func__, status);
466 return cameraStatus(Status::INTERNAL_ERROR);
467 }
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100468
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100469 compressionSuccess =
470 compressJpeg(gBuffer->getWidth(), gBuffer->getHeight(), ycbcr,
471 stream->bufferSize, planes_info.planes[0].data);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100472
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100473 status_t res = gBuffer->unlock();
474 if (res != NO_ERROR) {
475 ALOGE("Failed to unlock graphic buffer: %d", res);
476 }
477 } else {
478 compressionSuccess =
479 compressBlackJpeg(stream->width, stream->height, stream->bufferSize,
480 planes_info.planes[0].data);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100481 }
482 AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
483 ALOGV("Unlocked buffers");
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100484 return compressionSuccess ? ndk::ScopedAStatus::ok()
485 : cameraStatus(Status::INTERNAL_ERROR);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100486}
487
488ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoImageStreamBuffer(
489 int streamId, int bufferId, sp<Fence> fence) {
490 ALOGV("%s", __func__);
491
492 const std::chrono::nanoseconds before =
493 std::chrono::duration_cast<std::chrono::nanoseconds>(
494 std::chrono::steady_clock::now().time_since_epoch());
495
496 // Render test pattern using EGL.
497 std::shared_ptr<EglFrameBuffer> framebuffer =
498 mSessionContext.fetchOrCreateEglFramebuffer(
499 mEglDisplayContext->getEglDisplay(), streamId, bufferId);
500 if (framebuffer == nullptr) {
501 ALOGE(
502 "%s: Failed to get EGL framebuffer corresponding to buffer id "
503 "%d for streamId %d",
504 __func__, bufferId, streamId);
505 return cameraStatus(Status::ILLEGAL_ARGUMENT);
506 }
507
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100508 ndk::ScopedAStatus status = renderIntoEglFramebuffer(*framebuffer, fence);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100509
510 const std::chrono::nanoseconds after =
511 std::chrono::duration_cast<std::chrono::nanoseconds>(
512 std::chrono::steady_clock::now().time_since_epoch());
513
514 ALOGV("Rendering to buffer %d, stream %d took %lld ns", bufferId, streamId,
515 after.count() - before.count());
516
517 return ndk::ScopedAStatus::ok();
518}
519
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100520ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoEglFramebuffer(
521 EglFrameBuffer& framebuffer, sp<Fence> fence) {
522 ALOGV("%s", __func__);
523 // Wait for fence to clear.
524 if (fence != nullptr && fence->isValid()) {
525 status_t ret = fence->wait(kAcquireFenceTimeout.count());
526 if (ret != 0) {
527 ALOGE("Timeout while waiting for the acquire fence for buffer");
528 return cameraStatus(Status::INTERNAL_ERROR);
529 }
530 }
531
532 mEglDisplayContext->makeCurrent();
533 framebuffer.beforeDraw();
534
535 sp<GraphicBuffer> textureBuffer = mEglSurfaceTexture->getCurrentBuffer();
536 if (textureBuffer == nullptr) {
537 // If there's no current buffer, nothing was written to the surface and
538 // texture is not initialized yet. Let's render the framebuffer black
539 // instead of rendering the texture.
540 glClearColor(0.0f, 0.5f, 0.5f, 0.0f);
541 glClear(GL_COLOR_BUFFER_BIT);
542 } else {
543 const bool renderSuccess =
544 isYuvFormat(static_cast<PixelFormat>(textureBuffer->getPixelFormat()))
545 ? mEglTextureYuvProgram->draw(mEglSurfaceTexture->updateTexture())
546 : mEglTextureRgbProgram->draw(mEglSurfaceTexture->updateTexture());
547 if (!renderSuccess) {
548 ALOGE("%s: Failed to render texture", __func__);
549 return cameraStatus(Status::INTERNAL_ERROR);
550 }
551 }
552 framebuffer.afterDraw();
553
554 return ndk::ScopedAStatus::ok();
555}
556
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100557} // namespace virtualcamera
558} // namespace companion
559} // namespace android