blob: 7bbc6ea26654c6bf4d261e614a96494e08d9a5fd [file] [log] [blame]
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +01001/*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "VirtualCameraRenderThread"
18#include "VirtualCameraRenderThread.h"
19
20#include <chrono>
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +010021#include <cstdint>
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010022#include <future>
23#include <memory>
24#include <mutex>
25#include <thread>
26
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +010027#include "GLES/gl.h"
Biswarup Pal8ad8bc52024-02-08 13:41:44 +000028#include "VirtualCameraDevice.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010029#include "VirtualCameraSessionContext.h"
30#include "aidl/android/hardware/camera/common/Status.h"
31#include "aidl/android/hardware/camera/device/BufferStatus.h"
32#include "aidl/android/hardware/camera/device/CameraMetadata.h"
33#include "aidl/android/hardware/camera/device/CaptureResult.h"
34#include "aidl/android/hardware/camera/device/ErrorCode.h"
35#include "aidl/android/hardware/camera/device/ICameraDeviceCallback.h"
36#include "aidl/android/hardware/camera/device/NotifyMsg.h"
37#include "aidl/android/hardware/camera/device/ShutterMsg.h"
38#include "aidl/android/hardware/camera/device/StreamBuffer.h"
39#include "android-base/thread_annotations.h"
40#include "android/binder_auto_utils.h"
41#include "android/hardware_buffer.h"
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +010042#include "ui/GraphicBuffer.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010043#include "util/EglFramebuffer.h"
44#include "util/JpegUtil.h"
45#include "util/MetadataBuilder.h"
46#include "util/TestPatternHelper.h"
47#include "util/Util.h"
48#include "utils/Errors.h"
49
50namespace android {
51namespace companion {
52namespace virtualcamera {
53
54using ::aidl::android::hardware::camera::common::Status;
55using ::aidl::android::hardware::camera::device::BufferStatus;
56using ::aidl::android::hardware::camera::device::CameraMetadata;
57using ::aidl::android::hardware::camera::device::CaptureResult;
58using ::aidl::android::hardware::camera::device::ErrorCode;
59using ::aidl::android::hardware::camera::device::ErrorMsg;
60using ::aidl::android::hardware::camera::device::ICameraDeviceCallback;
61using ::aidl::android::hardware::camera::device::NotifyMsg;
62using ::aidl::android::hardware::camera::device::ShutterMsg;
63using ::aidl::android::hardware::camera::device::Stream;
64using ::aidl::android::hardware::camera::device::StreamBuffer;
65using ::aidl::android::hardware::graphics::common::PixelFormat;
66using ::android::base::ScopedLockAssertion;
67
68namespace {
69
70using namespace std::chrono_literals;
71
72static constexpr std::chrono::milliseconds kAcquireFenceTimeout = 500ms;
73
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +010074// See REQUEST_PIPELINE_DEPTH in CaptureResult.java.
75// This roughly corresponds to frame latency, we set to
76// documented minimum of 2.
77static constexpr uint8_t kPipelineDepth = 2;
78
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010079CameraMetadata createCaptureResultMetadata(
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +010080 const std::chrono::nanoseconds timestamp,
81 const Resolution reportedSensorSize) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010082 std::unique_ptr<CameraMetadata> metadata =
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +010083 MetadataBuilder()
84 .setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
85 .setControlAePrecaptureTrigger(
86 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +010087 .setControlAfMode(ANDROID_CONTROL_AF_MODE_OFF)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +010088 .setControlAwbMode(ANDROID_CONTROL_AWB_MODE_AUTO)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +010089 .setControlEffectMode(ANDROID_CONTROL_EFFECT_MODE_OFF)
90 .setControlMode(ANDROID_CONTROL_MODE_AUTO)
91 .setCropRegion(0, 0, reportedSensorSize.width,
92 reportedSensorSize.height)
93 .setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +010094 .setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
Biswarup Pal8ad8bc52024-02-08 13:41:44 +000095 .setFocalLength(VirtualCameraDevice::kFocalLength)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +010096 .setPipelineDepth(kPipelineDepth)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +010097 .setSensorTimestamp(timestamp)
98 .build();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010099 if (metadata == nullptr) {
100 ALOGE("%s: Failed to build capture result metadata", __func__);
101 return CameraMetadata();
102 }
103 return std::move(*metadata);
104}
105
106NotifyMsg createShutterNotifyMsg(int frameNumber,
107 std::chrono::nanoseconds timestamp) {
108 NotifyMsg msg;
109 msg.set<NotifyMsg::Tag::shutter>(ShutterMsg{
110 .frameNumber = frameNumber,
111 .timestamp = timestamp.count(),
112 });
113 return msg;
114}
115
116NotifyMsg createBufferErrorNotifyMsg(int frameNumber, int streamId) {
117 NotifyMsg msg;
118 msg.set<NotifyMsg::Tag::error>(ErrorMsg{.frameNumber = frameNumber,
119 .errorStreamId = streamId,
120 .errorCode = ErrorCode::ERROR_BUFFER});
121 return msg;
122}
123
124NotifyMsg createRequestErrorNotifyMsg(int frameNumber) {
125 NotifyMsg msg;
126 msg.set<NotifyMsg::Tag::error>(ErrorMsg{
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100127 .frameNumber = frameNumber,
128 // errorStreamId needs to be set to -1 for ERROR_REQUEST
129 // (not tied to specific stream).
130 .errorStreamId = -1,
131 .errorCode = ErrorCode::ERROR_REQUEST});
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100132 return msg;
133}
134
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100135std::shared_ptr<EglFrameBuffer> allocateTemporaryFramebuffer(
136 EGLDisplay eglDisplay, const uint width, const int height) {
137 const AHardwareBuffer_Desc desc{
138 .width = static_cast<uint32_t>(width),
139 .height = static_cast<uint32_t>(height),
140 .layers = 1,
141 .format = AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420,
142 .usage = AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER |
143 AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN,
144 .rfu0 = 0,
145 .rfu1 = 0};
146
147 AHardwareBuffer* hwBufferPtr;
148 int status = AHardwareBuffer_allocate(&desc, &hwBufferPtr);
149 if (status != NO_ERROR) {
150 ALOGE(
151 "%s: Failed to allocate hardware buffer for temporary framebuffer: %d",
152 __func__, status);
153 return nullptr;
154 }
155
156 return std::make_shared<EglFrameBuffer>(
157 eglDisplay,
158 std::shared_ptr<AHardwareBuffer>(hwBufferPtr, AHardwareBuffer_release));
159}
160
161bool isYuvFormat(const PixelFormat pixelFormat) {
162 switch (static_cast<android_pixel_format_t>(pixelFormat)) {
163 case HAL_PIXEL_FORMAT_YCBCR_422_I:
164 case HAL_PIXEL_FORMAT_YCBCR_422_SP:
165 case HAL_PIXEL_FORMAT_Y16:
166 case HAL_PIXEL_FORMAT_YV12:
167 case HAL_PIXEL_FORMAT_YCBCR_420_888:
168 return true;
169 default:
170 return false;
171 }
172}
173
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100174} // namespace
175
176CaptureRequestBuffer::CaptureRequestBuffer(int streamId, int bufferId,
177 sp<Fence> fence)
178 : mStreamId(streamId), mBufferId(bufferId), mFence(fence) {
179}
180
181int CaptureRequestBuffer::getStreamId() const {
182 return mStreamId;
183}
184
185int CaptureRequestBuffer::getBufferId() const {
186 return mBufferId;
187}
188
189sp<Fence> CaptureRequestBuffer::getFence() const {
190 return mFence;
191}
192
193VirtualCameraRenderThread::VirtualCameraRenderThread(
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100194 VirtualCameraSessionContext& sessionContext,
195 const Resolution inputSurfaceSize, const Resolution reportedSensorSize,
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100196 std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback, bool testMode)
197 : mCameraDeviceCallback(cameraDeviceCallback),
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100198 mInputSurfaceSize(inputSurfaceSize),
199 mReportedSensorSize(reportedSensorSize),
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100200 mTestMode(testMode),
201 mSessionContext(sessionContext) {
202}
203
204VirtualCameraRenderThread::~VirtualCameraRenderThread() {
205 stop();
206 if (mThread.joinable()) {
207 mThread.join();
208 }
209}
210
211ProcessCaptureRequestTask::ProcessCaptureRequestTask(
212 int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers)
213 : mFrameNumber(frameNumber), mBuffers(requestBuffers) {
214}
215
216int ProcessCaptureRequestTask::getFrameNumber() const {
217 return mFrameNumber;
218}
219
220const std::vector<CaptureRequestBuffer>& ProcessCaptureRequestTask::getBuffers()
221 const {
222 return mBuffers;
223}
224
225void VirtualCameraRenderThread::enqueueTask(
226 std::unique_ptr<ProcessCaptureRequestTask> task) {
227 std::lock_guard<std::mutex> lock(mLock);
228 mQueue.emplace_back(std::move(task));
229 mCondVar.notify_one();
230}
231
232void VirtualCameraRenderThread::flush() {
233 std::lock_guard<std::mutex> lock(mLock);
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100234 while (!mQueue.empty()) {
235 std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
236 mQueue.pop_front();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100237 flushCaptureRequest(*task);
238 }
239}
240
241void VirtualCameraRenderThread::start() {
242 mThread = std::thread(&VirtualCameraRenderThread::threadLoop, this);
243}
244
245void VirtualCameraRenderThread::stop() {
246 {
247 std::lock_guard<std::mutex> lock(mLock);
248 mPendingExit = true;
249 mCondVar.notify_one();
250 }
251}
252
253sp<Surface> VirtualCameraRenderThread::getInputSurface() {
254 return mInputSurfacePromise.get_future().get();
255}
256
257std::unique_ptr<ProcessCaptureRequestTask>
258VirtualCameraRenderThread::dequeueTask() {
259 std::unique_lock<std::mutex> lock(mLock);
260 // Clang's thread safety analysis doesn't perform alias analysis,
261 // so it doesn't support moveable std::unique_lock.
262 //
263 // Lock assertion below is basically explicit declaration that
264 // the lock is held in this scope, which is true, since it's only
265 // released during waiting inside mCondVar.wait calls.
266 ScopedLockAssertion lockAssertion(mLock);
267
268 mCondVar.wait(lock, [this]() REQUIRES(mLock) {
269 return mPendingExit || !mQueue.empty();
270 });
271 if (mPendingExit) {
272 return nullptr;
273 }
274 std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
275 mQueue.pop_front();
276 return task;
277}
278
279void VirtualCameraRenderThread::threadLoop() {
280 ALOGV("Render thread starting");
281
282 mEglDisplayContext = std::make_unique<EglDisplayContext>();
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100283 mEglTextureYuvProgram =
284 std::make_unique<EglTextureProgram>(EglTextureProgram::TextureFormat::YUV);
285 mEglTextureRgbProgram = std::make_unique<EglTextureProgram>(
286 EglTextureProgram::TextureFormat::RGBA);
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100287 mEglSurfaceTexture = std::make_unique<EglSurfaceTexture>(
288 mInputSurfaceSize.width, mInputSurfaceSize.height);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100289 mInputSurfacePromise.set_value(mEglSurfaceTexture->getSurface());
290
291 while (std::unique_ptr<ProcessCaptureRequestTask> task = dequeueTask()) {
292 processCaptureRequest(*task);
293 }
294
295 ALOGV("Render thread exiting");
296}
297
298void VirtualCameraRenderThread::processCaptureRequest(
299 const ProcessCaptureRequestTask& request) {
300 const std::chrono::nanoseconds timestamp =
301 std::chrono::duration_cast<std::chrono::nanoseconds>(
302 std::chrono::steady_clock::now().time_since_epoch());
303
304 CaptureResult captureResult;
305 captureResult.fmqResultSize = 0;
306 captureResult.frameNumber = request.getFrameNumber();
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100307 // Partial result needs to be set to 1 when metadata are present.
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100308 captureResult.partialResult = 1;
309 captureResult.inputBuffer.streamId = -1;
310 captureResult.physicalCameraMetadata.resize(0);
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100311 captureResult.result =
312 createCaptureResultMetadata(timestamp, mReportedSensorSize);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100313
314 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
315 captureResult.outputBuffers.resize(buffers.size());
316
317 if (mTestMode) {
318 // In test mode let's just render something to the Surface ourselves.
319 renderTestPatternYCbCr420(mEglSurfaceTexture->getSurface(),
320 request.getFrameNumber());
321 }
322
323 mEglSurfaceTexture->updateTexture();
324
325 for (int i = 0; i < buffers.size(); ++i) {
326 const CaptureRequestBuffer& reqBuffer = buffers[i];
327 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
328 resBuffer.streamId = reqBuffer.getStreamId();
329 resBuffer.bufferId = reqBuffer.getBufferId();
330 resBuffer.status = BufferStatus::OK;
331
332 const std::optional<Stream> streamConfig =
333 mSessionContext.getStreamConfig(reqBuffer.getStreamId());
334
335 if (!streamConfig.has_value()) {
336 resBuffer.status = BufferStatus::ERROR;
337 continue;
338 }
339
340 auto status = streamConfig->format == PixelFormat::BLOB
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100341 ? renderIntoBlobStreamBuffer(reqBuffer.getStreamId(),
342 reqBuffer.getBufferId(),
343 reqBuffer.getFence())
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100344 : renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
345 reqBuffer.getBufferId(),
346 reqBuffer.getFence());
347 if (!status.isOk()) {
348 resBuffer.status = BufferStatus::ERROR;
349 }
350 }
351
352 std::vector<NotifyMsg> notifyMsg{
353 createShutterNotifyMsg(request.getFrameNumber(), timestamp)};
354 for (const StreamBuffer& resBuffer : captureResult.outputBuffers) {
355 if (resBuffer.status != BufferStatus::OK) {
356 notifyMsg.push_back(createBufferErrorNotifyMsg(request.getFrameNumber(),
357 resBuffer.streamId));
358 }
359 }
360
361 auto status = mCameraDeviceCallback->notify(notifyMsg);
362 if (!status.isOk()) {
363 ALOGE("%s: notify call failed: %s", __func__,
364 status.getDescription().c_str());
365 return;
366 }
367
368 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
369 captureResults(1);
370 captureResults[0] = std::move(captureResult);
371
372 status = mCameraDeviceCallback->processCaptureResult(captureResults);
373 if (!status.isOk()) {
374 ALOGE("%s: processCaptureResult call failed: %s", __func__,
375 status.getDescription().c_str());
376 return;
377 }
378
379 ALOGD("%s: Successfully called processCaptureResult", __func__);
380}
381
382void VirtualCameraRenderThread::flushCaptureRequest(
383 const ProcessCaptureRequestTask& request) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100384 CaptureResult captureResult;
385 captureResult.fmqResultSize = 0;
386 captureResult.frameNumber = request.getFrameNumber();
387 captureResult.inputBuffer.streamId = -1;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100388
389 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
390 captureResult.outputBuffers.resize(buffers.size());
391
392 for (int i = 0; i < buffers.size(); ++i) {
393 const CaptureRequestBuffer& reqBuffer = buffers[i];
394 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
395 resBuffer.streamId = reqBuffer.getStreamId();
396 resBuffer.bufferId = reqBuffer.getBufferId();
397 resBuffer.status = BufferStatus::ERROR;
398 sp<Fence> fence = reqBuffer.getFence();
399 if (fence != nullptr && fence->isValid()) {
400 resBuffer.releaseFence.fds.emplace_back(fence->dup());
401 }
402 }
403
404 auto status = mCameraDeviceCallback->notify(
405 {createRequestErrorNotifyMsg(request.getFrameNumber())});
406 if (!status.isOk()) {
407 ALOGE("%s: notify call failed: %s", __func__,
408 status.getDescription().c_str());
409 return;
410 }
411
412 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
413 captureResults(1);
414 captureResults[0] = std::move(captureResult);
415
416 status = mCameraDeviceCallback->processCaptureResult(captureResults);
417 if (!status.isOk()) {
418 ALOGE("%s: processCaptureResult call failed: %s", __func__,
419 status.getDescription().c_str());
420 }
421}
422
423ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoBlobStreamBuffer(
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100424 const int streamId, const int bufferId, sp<Fence> fence) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100425 ALOGV("%s", __func__);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100426 std::shared_ptr<AHardwareBuffer> hwBuffer =
427 mSessionContext.fetchHardwareBuffer(streamId, bufferId);
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100428 if (hwBuffer == nullptr) {
429 ALOGE("%s: Failed to fetch hardware buffer %d for streamId %d", __func__,
430 bufferId, streamId);
431 return cameraStatus(Status::INTERNAL_ERROR);
432 }
433
434 std::optional<Stream> stream = mSessionContext.getStreamConfig(streamId);
435 if (!stream.has_value()) {
436 ALOGE("%s, failed to fetch information about stream %d", __func__, streamId);
437 return cameraStatus(Status::INTERNAL_ERROR);
438 }
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100439
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100440 // Let's create YUV framebuffer and render the surface into this.
441 // This will take care about rescaling as well as potential format conversion.
442 std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
443 mEglDisplayContext->getEglDisplay(), stream->width, stream->height);
444 if (framebuffer == nullptr) {
445 ALOGE("Failed to allocate temporary framebuffer for JPEG compression");
446 return cameraStatus(Status::INTERNAL_ERROR);
447 }
448
449 // Render into temporary framebuffer.
450 ndk::ScopedAStatus status = renderIntoEglFramebuffer(*framebuffer);
451 if (!status.isOk()) {
452 ALOGE("Failed to render input texture into temporary framebuffer");
453 return status;
454 }
455
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100456 AHardwareBuffer_Planes planes_info;
457
458 int32_t rawFence = fence != nullptr ? fence->get() : -1;
459 int result = AHardwareBuffer_lockPlanes(hwBuffer.get(),
460 AHARDWAREBUFFER_USAGE_CPU_READ_RARELY,
461 rawFence, nullptr, &planes_info);
462 if (result != OK) {
463 ALOGE("%s: Failed to lock planes for BLOB buffer: %d", __func__, result);
464 return cameraStatus(Status::INTERNAL_ERROR);
465 }
466
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100467 std::shared_ptr<AHardwareBuffer> inHwBuffer = framebuffer->getHardwareBuffer();
468 GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(inHwBuffer.get());
469
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100470 bool compressionSuccess = true;
471 if (gBuffer != nullptr) {
472 android_ycbcr ycbcr;
Jan Sebechlebskyac166cf2023-12-12 13:09:11 +0100473 if (gBuffer->getPixelFormat() != HAL_PIXEL_FORMAT_YCbCr_420_888) {
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100474 // This should never happen since we're allocating the temporary buffer
475 // with YUV420 layout above.
Jan Sebechlebskyac166cf2023-12-12 13:09:11 +0100476 ALOGE("%s: Cannot compress non-YUV buffer (pixelFormat %d)", __func__,
477 gBuffer->getPixelFormat());
478 AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
479 return cameraStatus(Status::INTERNAL_ERROR);
480 }
481
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100482 status_t status =
483 gBuffer->lockYCbCr(AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN, &ycbcr);
484 ALOGV("Locked buffers");
485 if (status != NO_ERROR) {
486 AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
487 ALOGE("%s: Failed to lock graphic buffer: %d", __func__, status);
488 return cameraStatus(Status::INTERNAL_ERROR);
489 }
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100490
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100491 compressionSuccess =
492 compressJpeg(gBuffer->getWidth(), gBuffer->getHeight(), ycbcr,
493 stream->bufferSize, planes_info.planes[0].data);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100494
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100495 status_t res = gBuffer->unlock();
496 if (res != NO_ERROR) {
497 ALOGE("Failed to unlock graphic buffer: %d", res);
498 }
499 } else {
500 compressionSuccess =
501 compressBlackJpeg(stream->width, stream->height, stream->bufferSize,
502 planes_info.planes[0].data);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100503 }
504 AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
505 ALOGV("Unlocked buffers");
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100506 return compressionSuccess ? ndk::ScopedAStatus::ok()
507 : cameraStatus(Status::INTERNAL_ERROR);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100508}
509
510ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoImageStreamBuffer(
511 int streamId, int bufferId, sp<Fence> fence) {
512 ALOGV("%s", __func__);
513
514 const std::chrono::nanoseconds before =
515 std::chrono::duration_cast<std::chrono::nanoseconds>(
516 std::chrono::steady_clock::now().time_since_epoch());
517
518 // Render test pattern using EGL.
519 std::shared_ptr<EglFrameBuffer> framebuffer =
520 mSessionContext.fetchOrCreateEglFramebuffer(
521 mEglDisplayContext->getEglDisplay(), streamId, bufferId);
522 if (framebuffer == nullptr) {
523 ALOGE(
524 "%s: Failed to get EGL framebuffer corresponding to buffer id "
525 "%d for streamId %d",
526 __func__, bufferId, streamId);
527 return cameraStatus(Status::ILLEGAL_ARGUMENT);
528 }
529
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100530 ndk::ScopedAStatus status = renderIntoEglFramebuffer(*framebuffer, fence);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100531
532 const std::chrono::nanoseconds after =
533 std::chrono::duration_cast<std::chrono::nanoseconds>(
534 std::chrono::steady_clock::now().time_since_epoch());
535
536 ALOGV("Rendering to buffer %d, stream %d took %lld ns", bufferId, streamId,
537 after.count() - before.count());
538
539 return ndk::ScopedAStatus::ok();
540}
541
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100542ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoEglFramebuffer(
543 EglFrameBuffer& framebuffer, sp<Fence> fence) {
544 ALOGV("%s", __func__);
545 // Wait for fence to clear.
546 if (fence != nullptr && fence->isValid()) {
547 status_t ret = fence->wait(kAcquireFenceTimeout.count());
548 if (ret != 0) {
549 ALOGE("Timeout while waiting for the acquire fence for buffer");
550 return cameraStatus(Status::INTERNAL_ERROR);
551 }
552 }
553
554 mEglDisplayContext->makeCurrent();
555 framebuffer.beforeDraw();
556
557 sp<GraphicBuffer> textureBuffer = mEglSurfaceTexture->getCurrentBuffer();
558 if (textureBuffer == nullptr) {
559 // If there's no current buffer, nothing was written to the surface and
560 // texture is not initialized yet. Let's render the framebuffer black
561 // instead of rendering the texture.
562 glClearColor(0.0f, 0.5f, 0.5f, 0.0f);
563 glClear(GL_COLOR_BUFFER_BIT);
564 } else {
565 const bool renderSuccess =
566 isYuvFormat(static_cast<PixelFormat>(textureBuffer->getPixelFormat()))
Jan Sebechlebsky99492e32023-12-20 09:49:45 +0100567 ? mEglTextureYuvProgram->draw(
568 mEglSurfaceTexture->getTextureId(),
569 mEglSurfaceTexture->getTransformMatrix())
570 : mEglTextureRgbProgram->draw(
571 mEglSurfaceTexture->getTextureId(),
572 mEglSurfaceTexture->getTransformMatrix());
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100573 if (!renderSuccess) {
574 ALOGE("%s: Failed to render texture", __func__);
575 return cameraStatus(Status::INTERNAL_ERROR);
576 }
577 }
578 framebuffer.afterDraw();
579
580 return ndk::ScopedAStatus::ok();
581}
582
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100583} // namespace virtualcamera
584} // namespace companion
585} // namespace android