blob: 8bd8c9b33c15e4e3ffef6247ac40b5bdd830e0fe [file] [log] [blame]
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +01001/*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Jan Sebechlebsky4be2bd02024-02-26 18:35:18 +010017#include "system/camera_metadata.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010018#define LOG_TAG "VirtualCameraRenderThread"
19#include "VirtualCameraRenderThread.h"
20
21#include <chrono>
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +010022#include <cstdint>
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010023#include <cstring>
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010024#include <future>
25#include <memory>
26#include <mutex>
27#include <thread>
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010028#include <vector>
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010029
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010030#include "Exif.h"
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +010031#include "GLES/gl.h"
Biswarup Pal8ad8bc52024-02-08 13:41:44 +000032#include "VirtualCameraDevice.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010033#include "VirtualCameraSessionContext.h"
34#include "aidl/android/hardware/camera/common/Status.h"
35#include "aidl/android/hardware/camera/device/BufferStatus.h"
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010036#include "aidl/android/hardware/camera/device/CameraBlob.h"
37#include "aidl/android/hardware/camera/device/CameraBlobId.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010038#include "aidl/android/hardware/camera/device/CameraMetadata.h"
39#include "aidl/android/hardware/camera/device/CaptureResult.h"
40#include "aidl/android/hardware/camera/device/ErrorCode.h"
41#include "aidl/android/hardware/camera/device/ICameraDeviceCallback.h"
42#include "aidl/android/hardware/camera/device/NotifyMsg.h"
43#include "aidl/android/hardware/camera/device/ShutterMsg.h"
44#include "aidl/android/hardware/camera/device/StreamBuffer.h"
45#include "android-base/thread_annotations.h"
46#include "android/binder_auto_utils.h"
47#include "android/hardware_buffer.h"
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +010048#include "ui/GraphicBuffer.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010049#include "util/EglFramebuffer.h"
50#include "util/JpegUtil.h"
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010051#include "util/MetadataUtil.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010052#include "util/TestPatternHelper.h"
53#include "util/Util.h"
54#include "utils/Errors.h"
55
56namespace android {
57namespace companion {
58namespace virtualcamera {
59
60using ::aidl::android::hardware::camera::common::Status;
61using ::aidl::android::hardware::camera::device::BufferStatus;
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010062using ::aidl::android::hardware::camera::device::CameraBlob;
63using ::aidl::android::hardware::camera::device::CameraBlobId;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010064using ::aidl::android::hardware::camera::device::CameraMetadata;
65using ::aidl::android::hardware::camera::device::CaptureResult;
66using ::aidl::android::hardware::camera::device::ErrorCode;
67using ::aidl::android::hardware::camera::device::ErrorMsg;
68using ::aidl::android::hardware::camera::device::ICameraDeviceCallback;
69using ::aidl::android::hardware::camera::device::NotifyMsg;
70using ::aidl::android::hardware::camera::device::ShutterMsg;
71using ::aidl::android::hardware::camera::device::Stream;
72using ::aidl::android::hardware::camera::device::StreamBuffer;
73using ::aidl::android::hardware::graphics::common::PixelFormat;
74using ::android::base::ScopedLockAssertion;
75
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010076using ::android::hardware::camera::common::helper::ExifUtils;
77
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010078namespace {
79
80using namespace std::chrono_literals;
81
82static constexpr std::chrono::milliseconds kAcquireFenceTimeout = 500ms;
83
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +010084// See REQUEST_PIPELINE_DEPTH in CaptureResult.java.
85// This roughly corresponds to frame latency, we set to
86// documented minimum of 2.
87static constexpr uint8_t kPipelineDepth = 2;
88
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010089static constexpr size_t kJpegThumbnailBufferSize = 32 * 1024; // 32 KiB
90
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010091CameraMetadata createCaptureResultMetadata(
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +010092 const std::chrono::nanoseconds timestamp,
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010093 const RequestSettings& requestSettings,
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +010094 const Resolution reportedSensorSize) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010095 std::unique_ptr<CameraMetadata> metadata =
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +010096 MetadataBuilder()
Jan Sebechlebsky4be2bd02024-02-26 18:35:18 +010097 .setAberrationCorrectionMode(
98 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +010099 .setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
100 .setControlAePrecaptureTrigger(
101 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100102 .setControlAfMode(ANDROID_CONTROL_AF_MODE_OFF)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100103 .setControlAwbMode(ANDROID_CONTROL_AWB_MODE_AUTO)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100104 .setControlEffectMode(ANDROID_CONTROL_EFFECT_MODE_OFF)
105 .setControlMode(ANDROID_CONTROL_MODE_AUTO)
106 .setCropRegion(0, 0, reportedSensorSize.width,
107 reportedSensorSize.height)
108 .setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100109 .setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
Biswarup Pal8ad8bc52024-02-08 13:41:44 +0000110 .setFocalLength(VirtualCameraDevice::kFocalLength)
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100111 .setJpegQuality(requestSettings.jpegQuality)
112 .setJpegThumbnailSize(requestSettings.thumbnailResolution.width,
113 requestSettings.thumbnailResolution.height)
114 .setJpegThumbnailQuality(requestSettings.thumbnailJpegQuality)
Jan Sebechlebsky4be2bd02024-02-26 18:35:18 +0100115 .setNoiseReductionMode(ANDROID_NOISE_REDUCTION_MODE_OFF)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100116 .setPipelineDepth(kPipelineDepth)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100117 .setSensorTimestamp(timestamp)
118 .build();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100119 if (metadata == nullptr) {
120 ALOGE("%s: Failed to build capture result metadata", __func__);
121 return CameraMetadata();
122 }
123 return std::move(*metadata);
124}
125
126NotifyMsg createShutterNotifyMsg(int frameNumber,
127 std::chrono::nanoseconds timestamp) {
128 NotifyMsg msg;
129 msg.set<NotifyMsg::Tag::shutter>(ShutterMsg{
130 .frameNumber = frameNumber,
131 .timestamp = timestamp.count(),
132 });
133 return msg;
134}
135
136NotifyMsg createBufferErrorNotifyMsg(int frameNumber, int streamId) {
137 NotifyMsg msg;
138 msg.set<NotifyMsg::Tag::error>(ErrorMsg{.frameNumber = frameNumber,
139 .errorStreamId = streamId,
140 .errorCode = ErrorCode::ERROR_BUFFER});
141 return msg;
142}
143
144NotifyMsg createRequestErrorNotifyMsg(int frameNumber) {
145 NotifyMsg msg;
146 msg.set<NotifyMsg::Tag::error>(ErrorMsg{
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100147 .frameNumber = frameNumber,
148 // errorStreamId needs to be set to -1 for ERROR_REQUEST
149 // (not tied to specific stream).
150 .errorStreamId = -1,
151 .errorCode = ErrorCode::ERROR_REQUEST});
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100152 return msg;
153}
154
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100155std::shared_ptr<EglFrameBuffer> allocateTemporaryFramebuffer(
156 EGLDisplay eglDisplay, const uint width, const int height) {
157 const AHardwareBuffer_Desc desc{
158 .width = static_cast<uint32_t>(width),
159 .height = static_cast<uint32_t>(height),
160 .layers = 1,
161 .format = AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420,
162 .usage = AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER |
163 AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN,
164 .rfu0 = 0,
165 .rfu1 = 0};
166
167 AHardwareBuffer* hwBufferPtr;
168 int status = AHardwareBuffer_allocate(&desc, &hwBufferPtr);
169 if (status != NO_ERROR) {
170 ALOGE(
171 "%s: Failed to allocate hardware buffer for temporary framebuffer: %d",
172 __func__, status);
173 return nullptr;
174 }
175
176 return std::make_shared<EglFrameBuffer>(
177 eglDisplay,
178 std::shared_ptr<AHardwareBuffer>(hwBufferPtr, AHardwareBuffer_release));
179}
180
181bool isYuvFormat(const PixelFormat pixelFormat) {
182 switch (static_cast<android_pixel_format_t>(pixelFormat)) {
183 case HAL_PIXEL_FORMAT_YCBCR_422_I:
184 case HAL_PIXEL_FORMAT_YCBCR_422_SP:
185 case HAL_PIXEL_FORMAT_Y16:
186 case HAL_PIXEL_FORMAT_YV12:
187 case HAL_PIXEL_FORMAT_YCBCR_420_888:
188 return true;
189 default:
190 return false;
191 }
192}
193
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100194std::vector<uint8_t> createExif(
195 Resolution imageSize, const std::vector<uint8_t>& compressedThumbnail = {}) {
196 std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create());
197 exifUtils->initialize();
198 exifUtils->setImageWidth(imageSize.width);
199 exifUtils->setImageHeight(imageSize.height);
200 // TODO(b/324383963) Set Make/Model and orientation.
201
202 std::vector<uint8_t> app1Data;
203
204 size_t thumbnailDataSize = compressedThumbnail.size();
205 const void* thumbnailData =
206 thumbnailDataSize > 0
207 ? reinterpret_cast<const void*>(compressedThumbnail.data())
208 : nullptr;
209
210 if (!exifUtils->generateApp1(thumbnailData, thumbnailDataSize)) {
211 ALOGE("%s: Failed to generate APP1 segment for EXIF metadata", __func__);
212 return app1Data;
213 }
214
215 const uint8_t* data = exifUtils->getApp1Buffer();
216 const size_t size = exifUtils->getApp1Length();
217
218 app1Data.insert(app1Data.end(), data, data + size);
219 return app1Data;
220}
221
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100222} // namespace
223
224CaptureRequestBuffer::CaptureRequestBuffer(int streamId, int bufferId,
225 sp<Fence> fence)
226 : mStreamId(streamId), mBufferId(bufferId), mFence(fence) {
227}
228
229int CaptureRequestBuffer::getStreamId() const {
230 return mStreamId;
231}
232
233int CaptureRequestBuffer::getBufferId() const {
234 return mBufferId;
235}
236
237sp<Fence> CaptureRequestBuffer::getFence() const {
238 return mFence;
239}
240
241VirtualCameraRenderThread::VirtualCameraRenderThread(
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100242 VirtualCameraSessionContext& sessionContext,
243 const Resolution inputSurfaceSize, const Resolution reportedSensorSize,
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100244 std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback, bool testMode)
245 : mCameraDeviceCallback(cameraDeviceCallback),
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100246 mInputSurfaceSize(inputSurfaceSize),
247 mReportedSensorSize(reportedSensorSize),
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100248 mTestMode(testMode),
249 mSessionContext(sessionContext) {
250}
251
252VirtualCameraRenderThread::~VirtualCameraRenderThread() {
253 stop();
254 if (mThread.joinable()) {
255 mThread.join();
256 }
257}
258
259ProcessCaptureRequestTask::ProcessCaptureRequestTask(
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100260 int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers,
261 const RequestSettings& requestSettings)
262 : mFrameNumber(frameNumber),
263 mBuffers(requestBuffers),
264 mRequestSettings(requestSettings) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100265}
266
267int ProcessCaptureRequestTask::getFrameNumber() const {
268 return mFrameNumber;
269}
270
271const std::vector<CaptureRequestBuffer>& ProcessCaptureRequestTask::getBuffers()
272 const {
273 return mBuffers;
274}
275
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100276const RequestSettings& ProcessCaptureRequestTask::getRequestSettings() const {
277 return mRequestSettings;
278}
279
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100280void VirtualCameraRenderThread::enqueueTask(
281 std::unique_ptr<ProcessCaptureRequestTask> task) {
282 std::lock_guard<std::mutex> lock(mLock);
283 mQueue.emplace_back(std::move(task));
284 mCondVar.notify_one();
285}
286
287void VirtualCameraRenderThread::flush() {
288 std::lock_guard<std::mutex> lock(mLock);
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100289 while (!mQueue.empty()) {
290 std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
291 mQueue.pop_front();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100292 flushCaptureRequest(*task);
293 }
294}
295
296void VirtualCameraRenderThread::start() {
297 mThread = std::thread(&VirtualCameraRenderThread::threadLoop, this);
298}
299
300void VirtualCameraRenderThread::stop() {
301 {
302 std::lock_guard<std::mutex> lock(mLock);
303 mPendingExit = true;
304 mCondVar.notify_one();
305 }
306}
307
308sp<Surface> VirtualCameraRenderThread::getInputSurface() {
309 return mInputSurfacePromise.get_future().get();
310}
311
312std::unique_ptr<ProcessCaptureRequestTask>
313VirtualCameraRenderThread::dequeueTask() {
314 std::unique_lock<std::mutex> lock(mLock);
315 // Clang's thread safety analysis doesn't perform alias analysis,
316 // so it doesn't support moveable std::unique_lock.
317 //
318 // Lock assertion below is basically explicit declaration that
319 // the lock is held in this scope, which is true, since it's only
320 // released during waiting inside mCondVar.wait calls.
321 ScopedLockAssertion lockAssertion(mLock);
322
323 mCondVar.wait(lock, [this]() REQUIRES(mLock) {
324 return mPendingExit || !mQueue.empty();
325 });
326 if (mPendingExit) {
327 return nullptr;
328 }
329 std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
330 mQueue.pop_front();
331 return task;
332}
333
334void VirtualCameraRenderThread::threadLoop() {
335 ALOGV("Render thread starting");
336
337 mEglDisplayContext = std::make_unique<EglDisplayContext>();
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100338 mEglTextureYuvProgram =
339 std::make_unique<EglTextureProgram>(EglTextureProgram::TextureFormat::YUV);
340 mEglTextureRgbProgram = std::make_unique<EglTextureProgram>(
341 EglTextureProgram::TextureFormat::RGBA);
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100342 mEglSurfaceTexture = std::make_unique<EglSurfaceTexture>(
343 mInputSurfaceSize.width, mInputSurfaceSize.height);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100344 mInputSurfacePromise.set_value(mEglSurfaceTexture->getSurface());
345
346 while (std::unique_ptr<ProcessCaptureRequestTask> task = dequeueTask()) {
347 processCaptureRequest(*task);
348 }
349
350 ALOGV("Render thread exiting");
351}
352
353void VirtualCameraRenderThread::processCaptureRequest(
354 const ProcessCaptureRequestTask& request) {
355 const std::chrono::nanoseconds timestamp =
356 std::chrono::duration_cast<std::chrono::nanoseconds>(
357 std::chrono::steady_clock::now().time_since_epoch());
358
359 CaptureResult captureResult;
360 captureResult.fmqResultSize = 0;
361 captureResult.frameNumber = request.getFrameNumber();
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100362 // Partial result needs to be set to 1 when metadata are present.
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100363 captureResult.partialResult = 1;
364 captureResult.inputBuffer.streamId = -1;
365 captureResult.physicalCameraMetadata.resize(0);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100366 captureResult.result = createCaptureResultMetadata(
367 timestamp, request.getRequestSettings(), mReportedSensorSize);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100368
369 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
370 captureResult.outputBuffers.resize(buffers.size());
371
372 if (mTestMode) {
373 // In test mode let's just render something to the Surface ourselves.
374 renderTestPatternYCbCr420(mEglSurfaceTexture->getSurface(),
375 request.getFrameNumber());
376 }
377
378 mEglSurfaceTexture->updateTexture();
379
380 for (int i = 0; i < buffers.size(); ++i) {
381 const CaptureRequestBuffer& reqBuffer = buffers[i];
382 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
383 resBuffer.streamId = reqBuffer.getStreamId();
384 resBuffer.bufferId = reqBuffer.getBufferId();
385 resBuffer.status = BufferStatus::OK;
386
387 const std::optional<Stream> streamConfig =
388 mSessionContext.getStreamConfig(reqBuffer.getStreamId());
389
390 if (!streamConfig.has_value()) {
391 resBuffer.status = BufferStatus::ERROR;
392 continue;
393 }
394
395 auto status = streamConfig->format == PixelFormat::BLOB
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100396 ? renderIntoBlobStreamBuffer(
397 reqBuffer.getStreamId(), reqBuffer.getBufferId(),
398 request.getRequestSettings(), reqBuffer.getFence())
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100399 : renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
400 reqBuffer.getBufferId(),
401 reqBuffer.getFence());
402 if (!status.isOk()) {
403 resBuffer.status = BufferStatus::ERROR;
404 }
405 }
406
407 std::vector<NotifyMsg> notifyMsg{
408 createShutterNotifyMsg(request.getFrameNumber(), timestamp)};
409 for (const StreamBuffer& resBuffer : captureResult.outputBuffers) {
410 if (resBuffer.status != BufferStatus::OK) {
411 notifyMsg.push_back(createBufferErrorNotifyMsg(request.getFrameNumber(),
412 resBuffer.streamId));
413 }
414 }
415
416 auto status = mCameraDeviceCallback->notify(notifyMsg);
417 if (!status.isOk()) {
418 ALOGE("%s: notify call failed: %s", __func__,
419 status.getDescription().c_str());
420 return;
421 }
422
423 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
424 captureResults(1);
425 captureResults[0] = std::move(captureResult);
426
427 status = mCameraDeviceCallback->processCaptureResult(captureResults);
428 if (!status.isOk()) {
429 ALOGE("%s: processCaptureResult call failed: %s", __func__,
430 status.getDescription().c_str());
431 return;
432 }
433
434 ALOGD("%s: Successfully called processCaptureResult", __func__);
435}
436
437void VirtualCameraRenderThread::flushCaptureRequest(
438 const ProcessCaptureRequestTask& request) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100439 CaptureResult captureResult;
440 captureResult.fmqResultSize = 0;
441 captureResult.frameNumber = request.getFrameNumber();
442 captureResult.inputBuffer.streamId = -1;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100443
444 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
445 captureResult.outputBuffers.resize(buffers.size());
446
447 for (int i = 0; i < buffers.size(); ++i) {
448 const CaptureRequestBuffer& reqBuffer = buffers[i];
449 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
450 resBuffer.streamId = reqBuffer.getStreamId();
451 resBuffer.bufferId = reqBuffer.getBufferId();
452 resBuffer.status = BufferStatus::ERROR;
453 sp<Fence> fence = reqBuffer.getFence();
454 if (fence != nullptr && fence->isValid()) {
455 resBuffer.releaseFence.fds.emplace_back(fence->dup());
456 }
457 }
458
459 auto status = mCameraDeviceCallback->notify(
460 {createRequestErrorNotifyMsg(request.getFrameNumber())});
461 if (!status.isOk()) {
462 ALOGE("%s: notify call failed: %s", __func__,
463 status.getDescription().c_str());
464 return;
465 }
466
467 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
468 captureResults(1);
469 captureResults[0] = std::move(captureResult);
470
471 status = mCameraDeviceCallback->processCaptureResult(captureResults);
472 if (!status.isOk()) {
473 ALOGE("%s: processCaptureResult call failed: %s", __func__,
474 status.getDescription().c_str());
475 }
476}
477
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100478std::vector<uint8_t> VirtualCameraRenderThread::createThumbnail(
479 const Resolution resolution, const int quality) {
480 if (resolution.width == 0 || resolution.height == 0) {
481 ALOGV("%s: Skipping thumbnail creation, zero size requested", __func__);
482 return {};
483 }
484
485 ALOGV("%s: Creating thumbnail with size %d x %d, quality %d", __func__,
486 resolution.width, resolution.height, quality);
487 std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
488 mEglDisplayContext->getEglDisplay(), resolution.width, resolution.height);
489 if (framebuffer == nullptr) {
490 ALOGE(
491 "Failed to allocate temporary framebuffer for JPEG thumbnail "
492 "compression");
493 return {};
494 }
495
496 // TODO(b/324383963) Add support for letterboxing if the thumbnail size
497 // doesn't correspond
498 // to input texture aspect ratio.
499 if (!renderIntoEglFramebuffer(*framebuffer).isOk()) {
500 ALOGE(
501 "Failed to render input texture into temporary framebuffer for JPEG "
502 "thumbnail");
503 return {};
504 }
505
506 std::shared_ptr<AHardwareBuffer> inHwBuffer = framebuffer->getHardwareBuffer();
507 GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(inHwBuffer.get());
508
509 if (gBuffer->getPixelFormat() != HAL_PIXEL_FORMAT_YCbCr_420_888) {
510 // This should never happen since we're allocating the temporary buffer
511 // with YUV420 layout above.
512 ALOGE("%s: Cannot compress non-YUV buffer (pixelFormat %d)", __func__,
513 gBuffer->getPixelFormat());
514 return {};
515 }
516
Jan Sebechlebsky43543222024-02-16 12:50:32 +0100517 YCbCrLockGuard yCbCrLock(inHwBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN);
518 if (yCbCrLock.getStatus() != NO_ERROR) {
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100519 ALOGE("%s: Failed to lock graphic buffer while generating thumbnail: %d",
Jan Sebechlebsky43543222024-02-16 12:50:32 +0100520 __func__, yCbCrLock.getStatus());
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100521 return {};
522 }
523
524 std::vector<uint8_t> compressedThumbnail;
525 compressedThumbnail.resize(kJpegThumbnailBufferSize);
526 ALOGE("%s: Compressing thumbnail %d x %d", __func__, gBuffer->getWidth(),
527 gBuffer->getHeight());
Jan Sebechlebsky43543222024-02-16 12:50:32 +0100528 std::optional<size_t> compressedSize = compressJpeg(
529 gBuffer->getWidth(), gBuffer->getHeight(), quality, *yCbCrLock, {},
530 compressedThumbnail.size(), compressedThumbnail.data());
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100531 if (!compressedSize.has_value()) {
532 ALOGE("%s: Failed to compress jpeg thumbnail", __func__);
533 return {};
534 }
535 compressedThumbnail.resize(compressedSize.value());
536 return compressedThumbnail;
537}
538
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100539ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoBlobStreamBuffer(
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100540 const int streamId, const int bufferId,
541 const RequestSettings& requestSettings, sp<Fence> fence) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100542 std::shared_ptr<AHardwareBuffer> hwBuffer =
543 mSessionContext.fetchHardwareBuffer(streamId, bufferId);
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100544 if (hwBuffer == nullptr) {
545 ALOGE("%s: Failed to fetch hardware buffer %d for streamId %d", __func__,
546 bufferId, streamId);
547 return cameraStatus(Status::INTERNAL_ERROR);
548 }
549
550 std::optional<Stream> stream = mSessionContext.getStreamConfig(streamId);
551 if (!stream.has_value()) {
552 ALOGE("%s, failed to fetch information about stream %d", __func__, streamId);
553 return cameraStatus(Status::INTERNAL_ERROR);
554 }
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100555
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100556 ALOGV("%s: Rendering JPEG with size %d x %d, quality %d", __func__,
557 stream->width, stream->height, requestSettings.jpegQuality);
558
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100559 // Let's create YUV framebuffer and render the surface into this.
560 // This will take care about rescaling as well as potential format conversion.
561 std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
562 mEglDisplayContext->getEglDisplay(), stream->width, stream->height);
563 if (framebuffer == nullptr) {
564 ALOGE("Failed to allocate temporary framebuffer for JPEG compression");
565 return cameraStatus(Status::INTERNAL_ERROR);
566 }
567
568 // Render into temporary framebuffer.
569 ndk::ScopedAStatus status = renderIntoEglFramebuffer(*framebuffer);
570 if (!status.isOk()) {
571 ALOGE("Failed to render input texture into temporary framebuffer");
572 return status;
573 }
574
Jan Sebechlebsky43543222024-02-16 12:50:32 +0100575 PlanesLockGuard planesLock(hwBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_RARELY,
576 fence);
577 if (planesLock.getStatus() != OK) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100578 return cameraStatus(Status::INTERNAL_ERROR);
579 }
580
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100581 std::shared_ptr<AHardwareBuffer> inHwBuffer = framebuffer->getHardwareBuffer();
582 GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(inHwBuffer.get());
583
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100584 std::optional<size_t> compressedSize;
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100585 if (gBuffer != nullptr) {
Jan Sebechlebskyac166cf2023-12-12 13:09:11 +0100586 if (gBuffer->getPixelFormat() != HAL_PIXEL_FORMAT_YCbCr_420_888) {
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100587 // This should never happen since we're allocating the temporary buffer
588 // with YUV420 layout above.
Jan Sebechlebskyac166cf2023-12-12 13:09:11 +0100589 ALOGE("%s: Cannot compress non-YUV buffer (pixelFormat %d)", __func__,
590 gBuffer->getPixelFormat());
Jan Sebechlebskyac166cf2023-12-12 13:09:11 +0100591 return cameraStatus(Status::INTERNAL_ERROR);
592 }
593
Jan Sebechlebsky43543222024-02-16 12:50:32 +0100594 YCbCrLockGuard yCbCrLock(inHwBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN);
595 if (yCbCrLock.getStatus() != OK) {
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100596 return cameraStatus(Status::INTERNAL_ERROR);
597 }
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100598
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100599 std::vector<uint8_t> app1ExifData =
600 createExif(Resolution(stream->width, stream->height),
601 createThumbnail(requestSettings.thumbnailResolution,
602 requestSettings.thumbnailJpegQuality));
603 compressedSize = compressJpeg(
604 gBuffer->getWidth(), gBuffer->getHeight(), requestSettings.jpegQuality,
Jan Sebechlebsky43543222024-02-16 12:50:32 +0100605 *yCbCrLock, app1ExifData, stream->bufferSize - sizeof(CameraBlob),
606 (*planesLock).planes[0].data);
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100607 } else {
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100608 std::vector<uint8_t> app1ExifData =
609 createExif(Resolution(stream->width, stream->height));
610 compressedSize = compressBlackJpeg(
611 stream->width, stream->height, requestSettings.jpegQuality, app1ExifData,
Jan Sebechlebsky43543222024-02-16 12:50:32 +0100612 stream->bufferSize - sizeof(CameraBlob), (*planesLock).planes[0].data);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100613 }
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100614
615 if (!compressedSize.has_value()) {
616 ALOGE("%s: Failed to compress JPEG image", __func__);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100617 return cameraStatus(Status::INTERNAL_ERROR);
618 }
619
620 CameraBlob cameraBlob{
621 .blobId = CameraBlobId::JPEG,
622 .blobSizeBytes = static_cast<int32_t>(compressedSize.value())};
623
Jan Sebechlebsky43543222024-02-16 12:50:32 +0100624 memcpy(reinterpret_cast<uint8_t*>((*planesLock).planes[0].data) +
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100625 (stream->bufferSize - sizeof(cameraBlob)),
626 &cameraBlob, sizeof(cameraBlob));
627
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100628 ALOGV("%s: Successfully compressed JPEG image, resulting size %zu B",
629 __func__, compressedSize.value());
630
631 return ndk::ScopedAStatus::ok();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100632}
633
634ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoImageStreamBuffer(
635 int streamId, int bufferId, sp<Fence> fence) {
636 ALOGV("%s", __func__);
637
638 const std::chrono::nanoseconds before =
639 std::chrono::duration_cast<std::chrono::nanoseconds>(
640 std::chrono::steady_clock::now().time_since_epoch());
641
642 // Render test pattern using EGL.
643 std::shared_ptr<EglFrameBuffer> framebuffer =
644 mSessionContext.fetchOrCreateEglFramebuffer(
645 mEglDisplayContext->getEglDisplay(), streamId, bufferId);
646 if (framebuffer == nullptr) {
647 ALOGE(
648 "%s: Failed to get EGL framebuffer corresponding to buffer id "
649 "%d for streamId %d",
650 __func__, bufferId, streamId);
651 return cameraStatus(Status::ILLEGAL_ARGUMENT);
652 }
653
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100654 ndk::ScopedAStatus status = renderIntoEglFramebuffer(*framebuffer, fence);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100655
656 const std::chrono::nanoseconds after =
657 std::chrono::duration_cast<std::chrono::nanoseconds>(
658 std::chrono::steady_clock::now().time_since_epoch());
659
660 ALOGV("Rendering to buffer %d, stream %d took %lld ns", bufferId, streamId,
661 after.count() - before.count());
662
663 return ndk::ScopedAStatus::ok();
664}
665
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100666ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoEglFramebuffer(
667 EglFrameBuffer& framebuffer, sp<Fence> fence) {
668 ALOGV("%s", __func__);
669 // Wait for fence to clear.
670 if (fence != nullptr && fence->isValid()) {
671 status_t ret = fence->wait(kAcquireFenceTimeout.count());
672 if (ret != 0) {
673 ALOGE("Timeout while waiting for the acquire fence for buffer");
674 return cameraStatus(Status::INTERNAL_ERROR);
675 }
676 }
677
678 mEglDisplayContext->makeCurrent();
679 framebuffer.beforeDraw();
680
681 sp<GraphicBuffer> textureBuffer = mEglSurfaceTexture->getCurrentBuffer();
682 if (textureBuffer == nullptr) {
683 // If there's no current buffer, nothing was written to the surface and
684 // texture is not initialized yet. Let's render the framebuffer black
685 // instead of rendering the texture.
686 glClearColor(0.0f, 0.5f, 0.5f, 0.0f);
687 glClear(GL_COLOR_BUFFER_BIT);
688 } else {
689 const bool renderSuccess =
690 isYuvFormat(static_cast<PixelFormat>(textureBuffer->getPixelFormat()))
Jan Sebechlebsky99492e32023-12-20 09:49:45 +0100691 ? mEglTextureYuvProgram->draw(
692 mEglSurfaceTexture->getTextureId(),
693 mEglSurfaceTexture->getTransformMatrix())
694 : mEglTextureRgbProgram->draw(
695 mEglSurfaceTexture->getTextureId(),
696 mEglSurfaceTexture->getTransformMatrix());
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100697 if (!renderSuccess) {
698 ALOGE("%s: Failed to render texture", __func__);
699 return cameraStatus(Status::INTERNAL_ERROR);
700 }
701 }
702 framebuffer.afterDraw();
703
704 return ndk::ScopedAStatus::ok();
705}
706
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100707} // namespace virtualcamera
708} // namespace companion
709} // namespace android