blob: 615e449b09a832217caf0783558891d5aec50cae [file] [log] [blame]
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +01001/*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "VirtualCameraRenderThread"
18#include "VirtualCameraRenderThread.h"
19
20#include <chrono>
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +010021#include <cstdint>
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010022#include <cstring>
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010023#include <future>
24#include <memory>
25#include <mutex>
26#include <thread>
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010027#include <vector>
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010028
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010029#include "Exif.h"
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +010030#include "GLES/gl.h"
Biswarup Pal8ad8bc52024-02-08 13:41:44 +000031#include "VirtualCameraDevice.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010032#include "VirtualCameraSessionContext.h"
33#include "aidl/android/hardware/camera/common/Status.h"
34#include "aidl/android/hardware/camera/device/BufferStatus.h"
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010035#include "aidl/android/hardware/camera/device/CameraBlob.h"
36#include "aidl/android/hardware/camera/device/CameraBlobId.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010037#include "aidl/android/hardware/camera/device/CameraMetadata.h"
38#include "aidl/android/hardware/camera/device/CaptureResult.h"
39#include "aidl/android/hardware/camera/device/ErrorCode.h"
40#include "aidl/android/hardware/camera/device/ICameraDeviceCallback.h"
41#include "aidl/android/hardware/camera/device/NotifyMsg.h"
42#include "aidl/android/hardware/camera/device/ShutterMsg.h"
43#include "aidl/android/hardware/camera/device/StreamBuffer.h"
44#include "android-base/thread_annotations.h"
45#include "android/binder_auto_utils.h"
46#include "android/hardware_buffer.h"
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +010047#include "ui/GraphicBuffer.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010048#include "util/EglFramebuffer.h"
49#include "util/JpegUtil.h"
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010050#include "util/MetadataUtil.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010051#include "util/TestPatternHelper.h"
52#include "util/Util.h"
53#include "utils/Errors.h"
54
55namespace android {
56namespace companion {
57namespace virtualcamera {
58
59using ::aidl::android::hardware::camera::common::Status;
60using ::aidl::android::hardware::camera::device::BufferStatus;
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010061using ::aidl::android::hardware::camera::device::CameraBlob;
62using ::aidl::android::hardware::camera::device::CameraBlobId;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010063using ::aidl::android::hardware::camera::device::CameraMetadata;
64using ::aidl::android::hardware::camera::device::CaptureResult;
65using ::aidl::android::hardware::camera::device::ErrorCode;
66using ::aidl::android::hardware::camera::device::ErrorMsg;
67using ::aidl::android::hardware::camera::device::ICameraDeviceCallback;
68using ::aidl::android::hardware::camera::device::NotifyMsg;
69using ::aidl::android::hardware::camera::device::ShutterMsg;
70using ::aidl::android::hardware::camera::device::Stream;
71using ::aidl::android::hardware::camera::device::StreamBuffer;
72using ::aidl::android::hardware::graphics::common::PixelFormat;
73using ::android::base::ScopedLockAssertion;
74
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010075using ::android::hardware::camera::common::helper::ExifUtils;
76
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010077namespace {
78
79using namespace std::chrono_literals;
80
81static constexpr std::chrono::milliseconds kAcquireFenceTimeout = 500ms;
82
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +010083// See REQUEST_PIPELINE_DEPTH in CaptureResult.java.
84// This roughly corresponds to frame latency, we set to
85// documented minimum of 2.
86static constexpr uint8_t kPipelineDepth = 2;
87
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010088static constexpr size_t kJpegThumbnailBufferSize = 32 * 1024; // 32 KiB
89
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010090CameraMetadata createCaptureResultMetadata(
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +010091 const std::chrono::nanoseconds timestamp,
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010092 const RequestSettings& requestSettings,
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +010093 const Resolution reportedSensorSize) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010094 std::unique_ptr<CameraMetadata> metadata =
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +010095 MetadataBuilder()
96 .setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
97 .setControlAePrecaptureTrigger(
98 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +010099 .setControlAfMode(ANDROID_CONTROL_AF_MODE_OFF)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100100 .setControlAwbMode(ANDROID_CONTROL_AWB_MODE_AUTO)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100101 .setControlEffectMode(ANDROID_CONTROL_EFFECT_MODE_OFF)
102 .setControlMode(ANDROID_CONTROL_MODE_AUTO)
103 .setCropRegion(0, 0, reportedSensorSize.width,
104 reportedSensorSize.height)
105 .setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100106 .setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
Biswarup Pal8ad8bc52024-02-08 13:41:44 +0000107 .setFocalLength(VirtualCameraDevice::kFocalLength)
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100108 .setJpegQuality(requestSettings.jpegQuality)
109 .setJpegThumbnailSize(requestSettings.thumbnailResolution.width,
110 requestSettings.thumbnailResolution.height)
111 .setJpegThumbnailQuality(requestSettings.thumbnailJpegQuality)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100112 .setPipelineDepth(kPipelineDepth)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100113 .setSensorTimestamp(timestamp)
114 .build();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100115 if (metadata == nullptr) {
116 ALOGE("%s: Failed to build capture result metadata", __func__);
117 return CameraMetadata();
118 }
119 return std::move(*metadata);
120}
121
122NotifyMsg createShutterNotifyMsg(int frameNumber,
123 std::chrono::nanoseconds timestamp) {
124 NotifyMsg msg;
125 msg.set<NotifyMsg::Tag::shutter>(ShutterMsg{
126 .frameNumber = frameNumber,
127 .timestamp = timestamp.count(),
128 });
129 return msg;
130}
131
132NotifyMsg createBufferErrorNotifyMsg(int frameNumber, int streamId) {
133 NotifyMsg msg;
134 msg.set<NotifyMsg::Tag::error>(ErrorMsg{.frameNumber = frameNumber,
135 .errorStreamId = streamId,
136 .errorCode = ErrorCode::ERROR_BUFFER});
137 return msg;
138}
139
140NotifyMsg createRequestErrorNotifyMsg(int frameNumber) {
141 NotifyMsg msg;
142 msg.set<NotifyMsg::Tag::error>(ErrorMsg{
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100143 .frameNumber = frameNumber,
144 // errorStreamId needs to be set to -1 for ERROR_REQUEST
145 // (not tied to specific stream).
146 .errorStreamId = -1,
147 .errorCode = ErrorCode::ERROR_REQUEST});
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100148 return msg;
149}
150
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100151std::shared_ptr<EglFrameBuffer> allocateTemporaryFramebuffer(
152 EGLDisplay eglDisplay, const uint width, const int height) {
153 const AHardwareBuffer_Desc desc{
154 .width = static_cast<uint32_t>(width),
155 .height = static_cast<uint32_t>(height),
156 .layers = 1,
157 .format = AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420,
158 .usage = AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER |
159 AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN,
160 .rfu0 = 0,
161 .rfu1 = 0};
162
163 AHardwareBuffer* hwBufferPtr;
164 int status = AHardwareBuffer_allocate(&desc, &hwBufferPtr);
165 if (status != NO_ERROR) {
166 ALOGE(
167 "%s: Failed to allocate hardware buffer for temporary framebuffer: %d",
168 __func__, status);
169 return nullptr;
170 }
171
172 return std::make_shared<EglFrameBuffer>(
173 eglDisplay,
174 std::shared_ptr<AHardwareBuffer>(hwBufferPtr, AHardwareBuffer_release));
175}
176
177bool isYuvFormat(const PixelFormat pixelFormat) {
178 switch (static_cast<android_pixel_format_t>(pixelFormat)) {
179 case HAL_PIXEL_FORMAT_YCBCR_422_I:
180 case HAL_PIXEL_FORMAT_YCBCR_422_SP:
181 case HAL_PIXEL_FORMAT_Y16:
182 case HAL_PIXEL_FORMAT_YV12:
183 case HAL_PIXEL_FORMAT_YCBCR_420_888:
184 return true;
185 default:
186 return false;
187 }
188}
189
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100190std::vector<uint8_t> createExif(
191 Resolution imageSize, const std::vector<uint8_t>& compressedThumbnail = {}) {
192 std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create());
193 exifUtils->initialize();
194 exifUtils->setImageWidth(imageSize.width);
195 exifUtils->setImageHeight(imageSize.height);
196 // TODO(b/324383963) Set Make/Model and orientation.
197
198 std::vector<uint8_t> app1Data;
199
200 size_t thumbnailDataSize = compressedThumbnail.size();
201 const void* thumbnailData =
202 thumbnailDataSize > 0
203 ? reinterpret_cast<const void*>(compressedThumbnail.data())
204 : nullptr;
205
206 if (!exifUtils->generateApp1(thumbnailData, thumbnailDataSize)) {
207 ALOGE("%s: Failed to generate APP1 segment for EXIF metadata", __func__);
208 return app1Data;
209 }
210
211 const uint8_t* data = exifUtils->getApp1Buffer();
212 const size_t size = exifUtils->getApp1Length();
213
214 app1Data.insert(app1Data.end(), data, data + size);
215 return app1Data;
216}
217
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100218} // namespace
219
220CaptureRequestBuffer::CaptureRequestBuffer(int streamId, int bufferId,
221 sp<Fence> fence)
222 : mStreamId(streamId), mBufferId(bufferId), mFence(fence) {
223}
224
225int CaptureRequestBuffer::getStreamId() const {
226 return mStreamId;
227}
228
229int CaptureRequestBuffer::getBufferId() const {
230 return mBufferId;
231}
232
233sp<Fence> CaptureRequestBuffer::getFence() const {
234 return mFence;
235}
236
237VirtualCameraRenderThread::VirtualCameraRenderThread(
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100238 VirtualCameraSessionContext& sessionContext,
239 const Resolution inputSurfaceSize, const Resolution reportedSensorSize,
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100240 std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback, bool testMode)
241 : mCameraDeviceCallback(cameraDeviceCallback),
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100242 mInputSurfaceSize(inputSurfaceSize),
243 mReportedSensorSize(reportedSensorSize),
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100244 mTestMode(testMode),
245 mSessionContext(sessionContext) {
246}
247
248VirtualCameraRenderThread::~VirtualCameraRenderThread() {
249 stop();
250 if (mThread.joinable()) {
251 mThread.join();
252 }
253}
254
255ProcessCaptureRequestTask::ProcessCaptureRequestTask(
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100256 int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers,
257 const RequestSettings& requestSettings)
258 : mFrameNumber(frameNumber),
259 mBuffers(requestBuffers),
260 mRequestSettings(requestSettings) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100261}
262
263int ProcessCaptureRequestTask::getFrameNumber() const {
264 return mFrameNumber;
265}
266
267const std::vector<CaptureRequestBuffer>& ProcessCaptureRequestTask::getBuffers()
268 const {
269 return mBuffers;
270}
271
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100272const RequestSettings& ProcessCaptureRequestTask::getRequestSettings() const {
273 return mRequestSettings;
274}
275
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100276void VirtualCameraRenderThread::enqueueTask(
277 std::unique_ptr<ProcessCaptureRequestTask> task) {
278 std::lock_guard<std::mutex> lock(mLock);
279 mQueue.emplace_back(std::move(task));
280 mCondVar.notify_one();
281}
282
283void VirtualCameraRenderThread::flush() {
284 std::lock_guard<std::mutex> lock(mLock);
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100285 while (!mQueue.empty()) {
286 std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
287 mQueue.pop_front();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100288 flushCaptureRequest(*task);
289 }
290}
291
292void VirtualCameraRenderThread::start() {
293 mThread = std::thread(&VirtualCameraRenderThread::threadLoop, this);
294}
295
296void VirtualCameraRenderThread::stop() {
297 {
298 std::lock_guard<std::mutex> lock(mLock);
299 mPendingExit = true;
300 mCondVar.notify_one();
301 }
302}
303
304sp<Surface> VirtualCameraRenderThread::getInputSurface() {
305 return mInputSurfacePromise.get_future().get();
306}
307
308std::unique_ptr<ProcessCaptureRequestTask>
309VirtualCameraRenderThread::dequeueTask() {
310 std::unique_lock<std::mutex> lock(mLock);
311 // Clang's thread safety analysis doesn't perform alias analysis,
312 // so it doesn't support moveable std::unique_lock.
313 //
314 // Lock assertion below is basically explicit declaration that
315 // the lock is held in this scope, which is true, since it's only
316 // released during waiting inside mCondVar.wait calls.
317 ScopedLockAssertion lockAssertion(mLock);
318
319 mCondVar.wait(lock, [this]() REQUIRES(mLock) {
320 return mPendingExit || !mQueue.empty();
321 });
322 if (mPendingExit) {
323 return nullptr;
324 }
325 std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
326 mQueue.pop_front();
327 return task;
328}
329
330void VirtualCameraRenderThread::threadLoop() {
331 ALOGV("Render thread starting");
332
333 mEglDisplayContext = std::make_unique<EglDisplayContext>();
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100334 mEglTextureYuvProgram =
335 std::make_unique<EglTextureProgram>(EglTextureProgram::TextureFormat::YUV);
336 mEglTextureRgbProgram = std::make_unique<EglTextureProgram>(
337 EglTextureProgram::TextureFormat::RGBA);
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100338 mEglSurfaceTexture = std::make_unique<EglSurfaceTexture>(
339 mInputSurfaceSize.width, mInputSurfaceSize.height);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100340 mInputSurfacePromise.set_value(mEglSurfaceTexture->getSurface());
341
342 while (std::unique_ptr<ProcessCaptureRequestTask> task = dequeueTask()) {
343 processCaptureRequest(*task);
344 }
345
346 ALOGV("Render thread exiting");
347}
348
349void VirtualCameraRenderThread::processCaptureRequest(
350 const ProcessCaptureRequestTask& request) {
351 const std::chrono::nanoseconds timestamp =
352 std::chrono::duration_cast<std::chrono::nanoseconds>(
353 std::chrono::steady_clock::now().time_since_epoch());
354
355 CaptureResult captureResult;
356 captureResult.fmqResultSize = 0;
357 captureResult.frameNumber = request.getFrameNumber();
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100358 // Partial result needs to be set to 1 when metadata are present.
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100359 captureResult.partialResult = 1;
360 captureResult.inputBuffer.streamId = -1;
361 captureResult.physicalCameraMetadata.resize(0);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100362 captureResult.result = createCaptureResultMetadata(
363 timestamp, request.getRequestSettings(), mReportedSensorSize);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100364
365 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
366 captureResult.outputBuffers.resize(buffers.size());
367
368 if (mTestMode) {
369 // In test mode let's just render something to the Surface ourselves.
370 renderTestPatternYCbCr420(mEglSurfaceTexture->getSurface(),
371 request.getFrameNumber());
372 }
373
374 mEglSurfaceTexture->updateTexture();
375
376 for (int i = 0; i < buffers.size(); ++i) {
377 const CaptureRequestBuffer& reqBuffer = buffers[i];
378 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
379 resBuffer.streamId = reqBuffer.getStreamId();
380 resBuffer.bufferId = reqBuffer.getBufferId();
381 resBuffer.status = BufferStatus::OK;
382
383 const std::optional<Stream> streamConfig =
384 mSessionContext.getStreamConfig(reqBuffer.getStreamId());
385
386 if (!streamConfig.has_value()) {
387 resBuffer.status = BufferStatus::ERROR;
388 continue;
389 }
390
391 auto status = streamConfig->format == PixelFormat::BLOB
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100392 ? renderIntoBlobStreamBuffer(
393 reqBuffer.getStreamId(), reqBuffer.getBufferId(),
394 request.getRequestSettings(), reqBuffer.getFence())
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100395 : renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
396 reqBuffer.getBufferId(),
397 reqBuffer.getFence());
398 if (!status.isOk()) {
399 resBuffer.status = BufferStatus::ERROR;
400 }
401 }
402
403 std::vector<NotifyMsg> notifyMsg{
404 createShutterNotifyMsg(request.getFrameNumber(), timestamp)};
405 for (const StreamBuffer& resBuffer : captureResult.outputBuffers) {
406 if (resBuffer.status != BufferStatus::OK) {
407 notifyMsg.push_back(createBufferErrorNotifyMsg(request.getFrameNumber(),
408 resBuffer.streamId));
409 }
410 }
411
412 auto status = mCameraDeviceCallback->notify(notifyMsg);
413 if (!status.isOk()) {
414 ALOGE("%s: notify call failed: %s", __func__,
415 status.getDescription().c_str());
416 return;
417 }
418
419 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
420 captureResults(1);
421 captureResults[0] = std::move(captureResult);
422
423 status = mCameraDeviceCallback->processCaptureResult(captureResults);
424 if (!status.isOk()) {
425 ALOGE("%s: processCaptureResult call failed: %s", __func__,
426 status.getDescription().c_str());
427 return;
428 }
429
430 ALOGD("%s: Successfully called processCaptureResult", __func__);
431}
432
433void VirtualCameraRenderThread::flushCaptureRequest(
434 const ProcessCaptureRequestTask& request) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100435 CaptureResult captureResult;
436 captureResult.fmqResultSize = 0;
437 captureResult.frameNumber = request.getFrameNumber();
438 captureResult.inputBuffer.streamId = -1;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100439
440 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
441 captureResult.outputBuffers.resize(buffers.size());
442
443 for (int i = 0; i < buffers.size(); ++i) {
444 const CaptureRequestBuffer& reqBuffer = buffers[i];
445 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
446 resBuffer.streamId = reqBuffer.getStreamId();
447 resBuffer.bufferId = reqBuffer.getBufferId();
448 resBuffer.status = BufferStatus::ERROR;
449 sp<Fence> fence = reqBuffer.getFence();
450 if (fence != nullptr && fence->isValid()) {
451 resBuffer.releaseFence.fds.emplace_back(fence->dup());
452 }
453 }
454
455 auto status = mCameraDeviceCallback->notify(
456 {createRequestErrorNotifyMsg(request.getFrameNumber())});
457 if (!status.isOk()) {
458 ALOGE("%s: notify call failed: %s", __func__,
459 status.getDescription().c_str());
460 return;
461 }
462
463 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
464 captureResults(1);
465 captureResults[0] = std::move(captureResult);
466
467 status = mCameraDeviceCallback->processCaptureResult(captureResults);
468 if (!status.isOk()) {
469 ALOGE("%s: processCaptureResult call failed: %s", __func__,
470 status.getDescription().c_str());
471 }
472}
473
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100474std::vector<uint8_t> VirtualCameraRenderThread::createThumbnail(
475 const Resolution resolution, const int quality) {
476 if (resolution.width == 0 || resolution.height == 0) {
477 ALOGV("%s: Skipping thumbnail creation, zero size requested", __func__);
478 return {};
479 }
480
481 ALOGV("%s: Creating thumbnail with size %d x %d, quality %d", __func__,
482 resolution.width, resolution.height, quality);
483 std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
484 mEglDisplayContext->getEglDisplay(), resolution.width, resolution.height);
485 if (framebuffer == nullptr) {
486 ALOGE(
487 "Failed to allocate temporary framebuffer for JPEG thumbnail "
488 "compression");
489 return {};
490 }
491
492 // TODO(b/324383963) Add support for letterboxing if the thumbnail size
493 // doesn't correspond
494 // to input texture aspect ratio.
495 if (!renderIntoEglFramebuffer(*framebuffer).isOk()) {
496 ALOGE(
497 "Failed to render input texture into temporary framebuffer for JPEG "
498 "thumbnail");
499 return {};
500 }
501
502 std::shared_ptr<AHardwareBuffer> inHwBuffer = framebuffer->getHardwareBuffer();
503 GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(inHwBuffer.get());
504
505 if (gBuffer->getPixelFormat() != HAL_PIXEL_FORMAT_YCbCr_420_888) {
506 // This should never happen since we're allocating the temporary buffer
507 // with YUV420 layout above.
508 ALOGE("%s: Cannot compress non-YUV buffer (pixelFormat %d)", __func__,
509 gBuffer->getPixelFormat());
510 return {};
511 }
512
Jan Sebechlebsky43543222024-02-16 12:50:32 +0100513 YCbCrLockGuard yCbCrLock(inHwBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN);
514 if (yCbCrLock.getStatus() != NO_ERROR) {
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100515 ALOGE("%s: Failed to lock graphic buffer while generating thumbnail: %d",
Jan Sebechlebsky43543222024-02-16 12:50:32 +0100516 __func__, yCbCrLock.getStatus());
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100517 return {};
518 }
519
520 std::vector<uint8_t> compressedThumbnail;
521 compressedThumbnail.resize(kJpegThumbnailBufferSize);
522 ALOGE("%s: Compressing thumbnail %d x %d", __func__, gBuffer->getWidth(),
523 gBuffer->getHeight());
Jan Sebechlebsky43543222024-02-16 12:50:32 +0100524 std::optional<size_t> compressedSize = compressJpeg(
525 gBuffer->getWidth(), gBuffer->getHeight(), quality, *yCbCrLock, {},
526 compressedThumbnail.size(), compressedThumbnail.data());
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100527 if (!compressedSize.has_value()) {
528 ALOGE("%s: Failed to compress jpeg thumbnail", __func__);
529 return {};
530 }
531 compressedThumbnail.resize(compressedSize.value());
532 return compressedThumbnail;
533}
534
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100535ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoBlobStreamBuffer(
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100536 const int streamId, const int bufferId,
537 const RequestSettings& requestSettings, sp<Fence> fence) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100538 std::shared_ptr<AHardwareBuffer> hwBuffer =
539 mSessionContext.fetchHardwareBuffer(streamId, bufferId);
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100540 if (hwBuffer == nullptr) {
541 ALOGE("%s: Failed to fetch hardware buffer %d for streamId %d", __func__,
542 bufferId, streamId);
543 return cameraStatus(Status::INTERNAL_ERROR);
544 }
545
546 std::optional<Stream> stream = mSessionContext.getStreamConfig(streamId);
547 if (!stream.has_value()) {
548 ALOGE("%s, failed to fetch information about stream %d", __func__, streamId);
549 return cameraStatus(Status::INTERNAL_ERROR);
550 }
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100551
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100552 ALOGV("%s: Rendering JPEG with size %d x %d, quality %d", __func__,
553 stream->width, stream->height, requestSettings.jpegQuality);
554
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100555 // Let's create YUV framebuffer and render the surface into this.
556 // This will take care about rescaling as well as potential format conversion.
557 std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
558 mEglDisplayContext->getEglDisplay(), stream->width, stream->height);
559 if (framebuffer == nullptr) {
560 ALOGE("Failed to allocate temporary framebuffer for JPEG compression");
561 return cameraStatus(Status::INTERNAL_ERROR);
562 }
563
564 // Render into temporary framebuffer.
565 ndk::ScopedAStatus status = renderIntoEglFramebuffer(*framebuffer);
566 if (!status.isOk()) {
567 ALOGE("Failed to render input texture into temporary framebuffer");
568 return status;
569 }
570
Jan Sebechlebsky43543222024-02-16 12:50:32 +0100571 PlanesLockGuard planesLock(hwBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_RARELY,
572 fence);
573 if (planesLock.getStatus() != OK) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100574 return cameraStatus(Status::INTERNAL_ERROR);
575 }
576
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100577 std::shared_ptr<AHardwareBuffer> inHwBuffer = framebuffer->getHardwareBuffer();
578 GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(inHwBuffer.get());
579
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100580 std::optional<size_t> compressedSize;
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100581 if (gBuffer != nullptr) {
Jan Sebechlebskyac166cf2023-12-12 13:09:11 +0100582 if (gBuffer->getPixelFormat() != HAL_PIXEL_FORMAT_YCbCr_420_888) {
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100583 // This should never happen since we're allocating the temporary buffer
584 // with YUV420 layout above.
Jan Sebechlebskyac166cf2023-12-12 13:09:11 +0100585 ALOGE("%s: Cannot compress non-YUV buffer (pixelFormat %d)", __func__,
586 gBuffer->getPixelFormat());
Jan Sebechlebskyac166cf2023-12-12 13:09:11 +0100587 return cameraStatus(Status::INTERNAL_ERROR);
588 }
589
Jan Sebechlebsky43543222024-02-16 12:50:32 +0100590 YCbCrLockGuard yCbCrLock(inHwBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN);
591 if (yCbCrLock.getStatus() != OK) {
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100592 return cameraStatus(Status::INTERNAL_ERROR);
593 }
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100594
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100595 std::vector<uint8_t> app1ExifData =
596 createExif(Resolution(stream->width, stream->height),
597 createThumbnail(requestSettings.thumbnailResolution,
598 requestSettings.thumbnailJpegQuality));
599 compressedSize = compressJpeg(
600 gBuffer->getWidth(), gBuffer->getHeight(), requestSettings.jpegQuality,
Jan Sebechlebsky43543222024-02-16 12:50:32 +0100601 *yCbCrLock, app1ExifData, stream->bufferSize - sizeof(CameraBlob),
602 (*planesLock).planes[0].data);
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100603 } else {
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100604 std::vector<uint8_t> app1ExifData =
605 createExif(Resolution(stream->width, stream->height));
606 compressedSize = compressBlackJpeg(
607 stream->width, stream->height, requestSettings.jpegQuality, app1ExifData,
Jan Sebechlebsky43543222024-02-16 12:50:32 +0100608 stream->bufferSize - sizeof(CameraBlob), (*planesLock).planes[0].data);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100609 }
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100610
611 if (!compressedSize.has_value()) {
612 ALOGE("%s: Failed to compress JPEG image", __func__);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100613 return cameraStatus(Status::INTERNAL_ERROR);
614 }
615
616 CameraBlob cameraBlob{
617 .blobId = CameraBlobId::JPEG,
618 .blobSizeBytes = static_cast<int32_t>(compressedSize.value())};
619
Jan Sebechlebsky43543222024-02-16 12:50:32 +0100620 memcpy(reinterpret_cast<uint8_t*>((*planesLock).planes[0].data) +
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100621 (stream->bufferSize - sizeof(cameraBlob)),
622 &cameraBlob, sizeof(cameraBlob));
623
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100624 ALOGV("%s: Successfully compressed JPEG image, resulting size %zu B",
625 __func__, compressedSize.value());
626
627 return ndk::ScopedAStatus::ok();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100628}
629
630ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoImageStreamBuffer(
631 int streamId, int bufferId, sp<Fence> fence) {
632 ALOGV("%s", __func__);
633
634 const std::chrono::nanoseconds before =
635 std::chrono::duration_cast<std::chrono::nanoseconds>(
636 std::chrono::steady_clock::now().time_since_epoch());
637
638 // Render test pattern using EGL.
639 std::shared_ptr<EglFrameBuffer> framebuffer =
640 mSessionContext.fetchOrCreateEglFramebuffer(
641 mEglDisplayContext->getEglDisplay(), streamId, bufferId);
642 if (framebuffer == nullptr) {
643 ALOGE(
644 "%s: Failed to get EGL framebuffer corresponding to buffer id "
645 "%d for streamId %d",
646 __func__, bufferId, streamId);
647 return cameraStatus(Status::ILLEGAL_ARGUMENT);
648 }
649
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100650 ndk::ScopedAStatus status = renderIntoEglFramebuffer(*framebuffer, fence);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100651
652 const std::chrono::nanoseconds after =
653 std::chrono::duration_cast<std::chrono::nanoseconds>(
654 std::chrono::steady_clock::now().time_since_epoch());
655
656 ALOGV("Rendering to buffer %d, stream %d took %lld ns", bufferId, streamId,
657 after.count() - before.count());
658
659 return ndk::ScopedAStatus::ok();
660}
661
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100662ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoEglFramebuffer(
663 EglFrameBuffer& framebuffer, sp<Fence> fence) {
664 ALOGV("%s", __func__);
665 // Wait for fence to clear.
666 if (fence != nullptr && fence->isValid()) {
667 status_t ret = fence->wait(kAcquireFenceTimeout.count());
668 if (ret != 0) {
669 ALOGE("Timeout while waiting for the acquire fence for buffer");
670 return cameraStatus(Status::INTERNAL_ERROR);
671 }
672 }
673
674 mEglDisplayContext->makeCurrent();
675 framebuffer.beforeDraw();
676
677 sp<GraphicBuffer> textureBuffer = mEglSurfaceTexture->getCurrentBuffer();
678 if (textureBuffer == nullptr) {
679 // If there's no current buffer, nothing was written to the surface and
680 // texture is not initialized yet. Let's render the framebuffer black
681 // instead of rendering the texture.
682 glClearColor(0.0f, 0.5f, 0.5f, 0.0f);
683 glClear(GL_COLOR_BUFFER_BIT);
684 } else {
685 const bool renderSuccess =
686 isYuvFormat(static_cast<PixelFormat>(textureBuffer->getPixelFormat()))
Jan Sebechlebsky99492e32023-12-20 09:49:45 +0100687 ? mEglTextureYuvProgram->draw(
688 mEglSurfaceTexture->getTextureId(),
689 mEglSurfaceTexture->getTransformMatrix())
690 : mEglTextureRgbProgram->draw(
691 mEglSurfaceTexture->getTextureId(),
692 mEglSurfaceTexture->getTransformMatrix());
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100693 if (!renderSuccess) {
694 ALOGE("%s: Failed to render texture", __func__);
695 return cameraStatus(Status::INTERNAL_ERROR);
696 }
697 }
698 framebuffer.afterDraw();
699
700 return ndk::ScopedAStatus::ok();
701}
702
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100703} // namespace virtualcamera
704} // namespace companion
705} // namespace android