blob: 164580f8d6aa5987c344c0a2951602146dd2592c [file] [log] [blame]
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +01001/*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "VirtualCameraRenderThread"
18#include "VirtualCameraRenderThread.h"
19
20#include <chrono>
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +010021#include <cstdint>
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010022#include <cstring>
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010023#include <future>
24#include <memory>
25#include <mutex>
26#include <thread>
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010027#include <vector>
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010028
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010029#include "Exif.h"
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +010030#include "GLES/gl.h"
Biswarup Pal8ad8bc52024-02-08 13:41:44 +000031#include "VirtualCameraDevice.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010032#include "VirtualCameraSessionContext.h"
33#include "aidl/android/hardware/camera/common/Status.h"
34#include "aidl/android/hardware/camera/device/BufferStatus.h"
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010035#include "aidl/android/hardware/camera/device/CameraBlob.h"
36#include "aidl/android/hardware/camera/device/CameraBlobId.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010037#include "aidl/android/hardware/camera/device/CameraMetadata.h"
38#include "aidl/android/hardware/camera/device/CaptureResult.h"
39#include "aidl/android/hardware/camera/device/ErrorCode.h"
40#include "aidl/android/hardware/camera/device/ICameraDeviceCallback.h"
41#include "aidl/android/hardware/camera/device/NotifyMsg.h"
42#include "aidl/android/hardware/camera/device/ShutterMsg.h"
43#include "aidl/android/hardware/camera/device/StreamBuffer.h"
44#include "android-base/thread_annotations.h"
45#include "android/binder_auto_utils.h"
46#include "android/hardware_buffer.h"
Vadim Caenc0aff132024-03-12 17:20:07 +010047#include "system/camera_metadata.h"
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +010048#include "ui/GraphicBuffer.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010049#include "util/EglFramebuffer.h"
50#include "util/JpegUtil.h"
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010051#include "util/MetadataUtil.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010052#include "util/TestPatternHelper.h"
53#include "util/Util.h"
54#include "utils/Errors.h"
55
56namespace android {
57namespace companion {
58namespace virtualcamera {
59
60using ::aidl::android::hardware::camera::common::Status;
61using ::aidl::android::hardware::camera::device::BufferStatus;
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010062using ::aidl::android::hardware::camera::device::CameraBlob;
63using ::aidl::android::hardware::camera::device::CameraBlobId;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010064using ::aidl::android::hardware::camera::device::CameraMetadata;
65using ::aidl::android::hardware::camera::device::CaptureResult;
66using ::aidl::android::hardware::camera::device::ErrorCode;
67using ::aidl::android::hardware::camera::device::ErrorMsg;
68using ::aidl::android::hardware::camera::device::ICameraDeviceCallback;
69using ::aidl::android::hardware::camera::device::NotifyMsg;
70using ::aidl::android::hardware::camera::device::ShutterMsg;
71using ::aidl::android::hardware::camera::device::Stream;
72using ::aidl::android::hardware::camera::device::StreamBuffer;
73using ::aidl::android::hardware::graphics::common::PixelFormat;
74using ::android::base::ScopedLockAssertion;
75
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010076using ::android::hardware::camera::common::helper::ExifUtils;
77
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010078namespace {
79
80using namespace std::chrono_literals;
81
82static constexpr std::chrono::milliseconds kAcquireFenceTimeout = 500ms;
83
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +010084// See REQUEST_PIPELINE_DEPTH in CaptureResult.java.
85// This roughly corresponds to frame latency, we set to
86// documented minimum of 2.
87static constexpr uint8_t kPipelineDepth = 2;
88
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010089static constexpr size_t kJpegThumbnailBufferSize = 32 * 1024; // 32 KiB
90
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010091CameraMetadata createCaptureResultMetadata(
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +010092 const std::chrono::nanoseconds timestamp,
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010093 const RequestSettings& requestSettings,
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +010094 const Resolution reportedSensorSize) {
Vadim Caen11dfd932024-03-05 09:57:20 +010095 // All of the keys used in the response needs to be referenced in
96 // availableResultKeys in CameraCharacteristics (see initCameraCharacteristics
97 // in VirtualCameraDevice.cc).
98 MetadataBuilder builder =
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +010099 MetadataBuilder()
Jan Sebechlebsky4be2bd02024-02-26 18:35:18 +0100100 .setAberrationCorrectionMode(
101 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF)
Vadim Caen11dfd932024-03-05 09:57:20 +0100102 .setControlAeAvailableAntibandingModes(
103 {ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF})
104 .setControlAeAntibandingMode(ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF)
105 .setControlAeExposureCompensation(0)
106 .setControlAeLockAvailable(false)
107 .setControlAeLock(ANDROID_CONTROL_AE_LOCK_OFF)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100108 .setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
109 .setControlAePrecaptureTrigger(
110 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
Vadim Caen11dfd932024-03-05 09:57:20 +0100111 .setControlAeState(ANDROID_CONTROL_AE_STATE_INACTIVE)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100112 .setControlAfMode(ANDROID_CONTROL_AF_MODE_OFF)
Vadim Caen11dfd932024-03-05 09:57:20 +0100113 .setControlAfTrigger(ANDROID_CONTROL_AF_TRIGGER_IDLE)
114 .setControlAfState(ANDROID_CONTROL_AF_STATE_INACTIVE)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100115 .setControlAwbMode(ANDROID_CONTROL_AWB_MODE_AUTO)
Vadim Caen11dfd932024-03-05 09:57:20 +0100116 .setControlAwbLock(ANDROID_CONTROL_AWB_LOCK_OFF)
117 .setControlAwbState(ANDROID_CONTROL_AWB_STATE_INACTIVE)
118 .setControlCaptureIntent(requestSettings.captureIntent)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100119 .setControlEffectMode(ANDROID_CONTROL_EFFECT_MODE_OFF)
120 .setControlMode(ANDROID_CONTROL_MODE_AUTO)
Vadim Caen11dfd932024-03-05 09:57:20 +0100121 .setControlSceneMode(ANDROID_CONTROL_SCENE_MODE_DISABLED)
122 .setControlVideoStabilizationMode(
123 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100124 .setCropRegion(0, 0, reportedSensorSize.width,
125 reportedSensorSize.height)
126 .setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100127 .setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
Vadim Caen11dfd932024-03-05 09:57:20 +0100128 .setFlashMode(ANDROID_FLASH_MODE_OFF)
Biswarup Pal8ad8bc52024-02-08 13:41:44 +0000129 .setFocalLength(VirtualCameraDevice::kFocalLength)
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100130 .setJpegQuality(requestSettings.jpegQuality)
Vadim Caenc0aff132024-03-12 17:20:07 +0100131 .setJpegOrientation(requestSettings.jpegOrientation)
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100132 .setJpegThumbnailSize(requestSettings.thumbnailResolution.width,
133 requestSettings.thumbnailResolution.height)
134 .setJpegThumbnailQuality(requestSettings.thumbnailJpegQuality)
Vadim Caen11dfd932024-03-05 09:57:20 +0100135 .setLensOpticalStabilizationMode(
136 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF)
Jan Sebechlebsky4be2bd02024-02-26 18:35:18 +0100137 .setNoiseReductionMode(ANDROID_NOISE_REDUCTION_MODE_OFF)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100138 .setPipelineDepth(kPipelineDepth)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100139 .setSensorTimestamp(timestamp)
Vadim Caen11dfd932024-03-05 09:57:20 +0100140 .setStatisticsHotPixelMapMode(
141 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF)
142 .setStatisticsLensShadingMapMode(
143 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF)
144 .setStatisticsSceneFlicker(ANDROID_STATISTICS_SCENE_FLICKER_NONE);
145
146 if (requestSettings.fpsRange.has_value()) {
147 builder.setControlAeTargetFpsRange(requestSettings.fpsRange.value());
148 }
149
Vadim Caenc0aff132024-03-12 17:20:07 +0100150 if (requestSettings.gpsCoordinates.has_value()) {
151 const GpsCoordinates& coordinates = requestSettings.gpsCoordinates.value();
152 builder.setJpegGpsCoordinates(coordinates);
153 }
154
Vadim Caen11dfd932024-03-05 09:57:20 +0100155 std::unique_ptr<CameraMetadata> metadata = builder.build();
156
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100157 if (metadata == nullptr) {
158 ALOGE("%s: Failed to build capture result metadata", __func__);
159 return CameraMetadata();
160 }
161 return std::move(*metadata);
162}
163
164NotifyMsg createShutterNotifyMsg(int frameNumber,
165 std::chrono::nanoseconds timestamp) {
166 NotifyMsg msg;
167 msg.set<NotifyMsg::Tag::shutter>(ShutterMsg{
168 .frameNumber = frameNumber,
169 .timestamp = timestamp.count(),
170 });
171 return msg;
172}
173
174NotifyMsg createBufferErrorNotifyMsg(int frameNumber, int streamId) {
175 NotifyMsg msg;
176 msg.set<NotifyMsg::Tag::error>(ErrorMsg{.frameNumber = frameNumber,
177 .errorStreamId = streamId,
178 .errorCode = ErrorCode::ERROR_BUFFER});
179 return msg;
180}
181
182NotifyMsg createRequestErrorNotifyMsg(int frameNumber) {
183 NotifyMsg msg;
184 msg.set<NotifyMsg::Tag::error>(ErrorMsg{
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100185 .frameNumber = frameNumber,
186 // errorStreamId needs to be set to -1 for ERROR_REQUEST
187 // (not tied to specific stream).
188 .errorStreamId = -1,
189 .errorCode = ErrorCode::ERROR_REQUEST});
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100190 return msg;
191}
192
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100193std::shared_ptr<EglFrameBuffer> allocateTemporaryFramebuffer(
194 EGLDisplay eglDisplay, const uint width, const int height) {
195 const AHardwareBuffer_Desc desc{
196 .width = static_cast<uint32_t>(width),
197 .height = static_cast<uint32_t>(height),
198 .layers = 1,
199 .format = AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420,
200 .usage = AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER |
201 AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN,
202 .rfu0 = 0,
203 .rfu1 = 0};
204
205 AHardwareBuffer* hwBufferPtr;
206 int status = AHardwareBuffer_allocate(&desc, &hwBufferPtr);
207 if (status != NO_ERROR) {
208 ALOGE(
209 "%s: Failed to allocate hardware buffer for temporary framebuffer: %d",
210 __func__, status);
211 return nullptr;
212 }
213
214 return std::make_shared<EglFrameBuffer>(
215 eglDisplay,
216 std::shared_ptr<AHardwareBuffer>(hwBufferPtr, AHardwareBuffer_release));
217}
218
219bool isYuvFormat(const PixelFormat pixelFormat) {
220 switch (static_cast<android_pixel_format_t>(pixelFormat)) {
221 case HAL_PIXEL_FORMAT_YCBCR_422_I:
222 case HAL_PIXEL_FORMAT_YCBCR_422_SP:
223 case HAL_PIXEL_FORMAT_Y16:
224 case HAL_PIXEL_FORMAT_YV12:
225 case HAL_PIXEL_FORMAT_YCBCR_420_888:
226 return true;
227 default:
228 return false;
229 }
230}
231
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100232std::vector<uint8_t> createExif(
Vadim Caenc0aff132024-03-12 17:20:07 +0100233 Resolution imageSize, const CameraMetadata resultMetadata,
234 const std::vector<uint8_t>& compressedThumbnail = {}) {
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100235 std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create());
236 exifUtils->initialize();
Vadim Caenc0aff132024-03-12 17:20:07 +0100237
238 // Make a copy of the metadata in order to converting it the HAL metadata
239 // format (as opposed to the AIDL class) and use the setFromMetadata method
240 // from ExifUtil
241 camera_metadata_t* rawSettings =
242 clone_camera_metadata((camera_metadata_t*)resultMetadata.metadata.data());
243 if (rawSettings != nullptr) {
244 android::hardware::camera::common::helper::CameraMetadata halMetadata(
245 rawSettings);
246 exifUtils->setFromMetadata(halMetadata, imageSize.width, imageSize.height);
247 }
248 exifUtils->setMake(VirtualCameraDevice::kDefaultMakeAndModel);
249 exifUtils->setModel(VirtualCameraDevice::kDefaultMakeAndModel);
250 exifUtils->setFlash(0);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100251
252 std::vector<uint8_t> app1Data;
253
254 size_t thumbnailDataSize = compressedThumbnail.size();
255 const void* thumbnailData =
256 thumbnailDataSize > 0
257 ? reinterpret_cast<const void*>(compressedThumbnail.data())
258 : nullptr;
259
260 if (!exifUtils->generateApp1(thumbnailData, thumbnailDataSize)) {
261 ALOGE("%s: Failed to generate APP1 segment for EXIF metadata", __func__);
262 return app1Data;
263 }
264
265 const uint8_t* data = exifUtils->getApp1Buffer();
266 const size_t size = exifUtils->getApp1Length();
267
268 app1Data.insert(app1Data.end(), data, data + size);
269 return app1Data;
270}
271
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100272} // namespace
273
274CaptureRequestBuffer::CaptureRequestBuffer(int streamId, int bufferId,
275 sp<Fence> fence)
276 : mStreamId(streamId), mBufferId(bufferId), mFence(fence) {
277}
278
279int CaptureRequestBuffer::getStreamId() const {
280 return mStreamId;
281}
282
283int CaptureRequestBuffer::getBufferId() const {
284 return mBufferId;
285}
286
287sp<Fence> CaptureRequestBuffer::getFence() const {
288 return mFence;
289}
290
291VirtualCameraRenderThread::VirtualCameraRenderThread(
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100292 VirtualCameraSessionContext& sessionContext,
293 const Resolution inputSurfaceSize, const Resolution reportedSensorSize,
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100294 std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback, bool testMode)
295 : mCameraDeviceCallback(cameraDeviceCallback),
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100296 mInputSurfaceSize(inputSurfaceSize),
297 mReportedSensorSize(reportedSensorSize),
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100298 mTestMode(testMode),
299 mSessionContext(sessionContext) {
300}
301
302VirtualCameraRenderThread::~VirtualCameraRenderThread() {
303 stop();
304 if (mThread.joinable()) {
305 mThread.join();
306 }
307}
308
309ProcessCaptureRequestTask::ProcessCaptureRequestTask(
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100310 int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers,
311 const RequestSettings& requestSettings)
312 : mFrameNumber(frameNumber),
313 mBuffers(requestBuffers),
314 mRequestSettings(requestSettings) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100315}
316
317int ProcessCaptureRequestTask::getFrameNumber() const {
318 return mFrameNumber;
319}
320
321const std::vector<CaptureRequestBuffer>& ProcessCaptureRequestTask::getBuffers()
322 const {
323 return mBuffers;
324}
325
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100326const RequestSettings& ProcessCaptureRequestTask::getRequestSettings() const {
327 return mRequestSettings;
328}
329
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100330void VirtualCameraRenderThread::enqueueTask(
331 std::unique_ptr<ProcessCaptureRequestTask> task) {
332 std::lock_guard<std::mutex> lock(mLock);
333 mQueue.emplace_back(std::move(task));
334 mCondVar.notify_one();
335}
336
337void VirtualCameraRenderThread::flush() {
338 std::lock_guard<std::mutex> lock(mLock);
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100339 while (!mQueue.empty()) {
340 std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
341 mQueue.pop_front();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100342 flushCaptureRequest(*task);
343 }
344}
345
346void VirtualCameraRenderThread::start() {
347 mThread = std::thread(&VirtualCameraRenderThread::threadLoop, this);
348}
349
350void VirtualCameraRenderThread::stop() {
351 {
352 std::lock_guard<std::mutex> lock(mLock);
353 mPendingExit = true;
354 mCondVar.notify_one();
355 }
356}
357
358sp<Surface> VirtualCameraRenderThread::getInputSurface() {
359 return mInputSurfacePromise.get_future().get();
360}
361
362std::unique_ptr<ProcessCaptureRequestTask>
363VirtualCameraRenderThread::dequeueTask() {
364 std::unique_lock<std::mutex> lock(mLock);
365 // Clang's thread safety analysis doesn't perform alias analysis,
366 // so it doesn't support moveable std::unique_lock.
367 //
368 // Lock assertion below is basically explicit declaration that
369 // the lock is held in this scope, which is true, since it's only
370 // released during waiting inside mCondVar.wait calls.
371 ScopedLockAssertion lockAssertion(mLock);
372
373 mCondVar.wait(lock, [this]() REQUIRES(mLock) {
374 return mPendingExit || !mQueue.empty();
375 });
376 if (mPendingExit) {
377 return nullptr;
378 }
379 std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
380 mQueue.pop_front();
381 return task;
382}
383
384void VirtualCameraRenderThread::threadLoop() {
385 ALOGV("Render thread starting");
386
387 mEglDisplayContext = std::make_unique<EglDisplayContext>();
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100388 mEglTextureYuvProgram =
389 std::make_unique<EglTextureProgram>(EglTextureProgram::TextureFormat::YUV);
390 mEglTextureRgbProgram = std::make_unique<EglTextureProgram>(
391 EglTextureProgram::TextureFormat::RGBA);
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100392 mEglSurfaceTexture = std::make_unique<EglSurfaceTexture>(
393 mInputSurfaceSize.width, mInputSurfaceSize.height);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100394 mInputSurfacePromise.set_value(mEglSurfaceTexture->getSurface());
395
396 while (std::unique_ptr<ProcessCaptureRequestTask> task = dequeueTask()) {
397 processCaptureRequest(*task);
398 }
399
400 ALOGV("Render thread exiting");
401}
402
403void VirtualCameraRenderThread::processCaptureRequest(
404 const ProcessCaptureRequestTask& request) {
405 const std::chrono::nanoseconds timestamp =
406 std::chrono::duration_cast<std::chrono::nanoseconds>(
407 std::chrono::steady_clock::now().time_since_epoch());
408
409 CaptureResult captureResult;
410 captureResult.fmqResultSize = 0;
411 captureResult.frameNumber = request.getFrameNumber();
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100412 // Partial result needs to be set to 1 when metadata are present.
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100413 captureResult.partialResult = 1;
414 captureResult.inputBuffer.streamId = -1;
415 captureResult.physicalCameraMetadata.resize(0);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100416 captureResult.result = createCaptureResultMetadata(
417 timestamp, request.getRequestSettings(), mReportedSensorSize);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100418
419 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
420 captureResult.outputBuffers.resize(buffers.size());
421
422 if (mTestMode) {
423 // In test mode let's just render something to the Surface ourselves.
424 renderTestPatternYCbCr420(mEglSurfaceTexture->getSurface(),
425 request.getFrameNumber());
426 }
427
428 mEglSurfaceTexture->updateTexture();
429
430 for (int i = 0; i < buffers.size(); ++i) {
431 const CaptureRequestBuffer& reqBuffer = buffers[i];
432 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
433 resBuffer.streamId = reqBuffer.getStreamId();
434 resBuffer.bufferId = reqBuffer.getBufferId();
435 resBuffer.status = BufferStatus::OK;
436
437 const std::optional<Stream> streamConfig =
438 mSessionContext.getStreamConfig(reqBuffer.getStreamId());
439
440 if (!streamConfig.has_value()) {
441 resBuffer.status = BufferStatus::ERROR;
442 continue;
443 }
444
445 auto status = streamConfig->format == PixelFormat::BLOB
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100446 ? renderIntoBlobStreamBuffer(
447 reqBuffer.getStreamId(), reqBuffer.getBufferId(),
Vadim Caenc0aff132024-03-12 17:20:07 +0100448 captureResult.result, request.getRequestSettings(),
449 reqBuffer.getFence())
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100450 : renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
451 reqBuffer.getBufferId(),
452 reqBuffer.getFence());
453 if (!status.isOk()) {
454 resBuffer.status = BufferStatus::ERROR;
455 }
456 }
457
458 std::vector<NotifyMsg> notifyMsg{
459 createShutterNotifyMsg(request.getFrameNumber(), timestamp)};
460 for (const StreamBuffer& resBuffer : captureResult.outputBuffers) {
461 if (resBuffer.status != BufferStatus::OK) {
462 notifyMsg.push_back(createBufferErrorNotifyMsg(request.getFrameNumber(),
463 resBuffer.streamId));
464 }
465 }
466
467 auto status = mCameraDeviceCallback->notify(notifyMsg);
468 if (!status.isOk()) {
469 ALOGE("%s: notify call failed: %s", __func__,
470 status.getDescription().c_str());
471 return;
472 }
473
474 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
475 captureResults(1);
476 captureResults[0] = std::move(captureResult);
477
478 status = mCameraDeviceCallback->processCaptureResult(captureResults);
479 if (!status.isOk()) {
480 ALOGE("%s: processCaptureResult call failed: %s", __func__,
481 status.getDescription().c_str());
482 return;
483 }
484
485 ALOGD("%s: Successfully called processCaptureResult", __func__);
486}
487
488void VirtualCameraRenderThread::flushCaptureRequest(
489 const ProcessCaptureRequestTask& request) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100490 CaptureResult captureResult;
491 captureResult.fmqResultSize = 0;
492 captureResult.frameNumber = request.getFrameNumber();
493 captureResult.inputBuffer.streamId = -1;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100494
495 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
496 captureResult.outputBuffers.resize(buffers.size());
497
498 for (int i = 0; i < buffers.size(); ++i) {
499 const CaptureRequestBuffer& reqBuffer = buffers[i];
500 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
501 resBuffer.streamId = reqBuffer.getStreamId();
502 resBuffer.bufferId = reqBuffer.getBufferId();
503 resBuffer.status = BufferStatus::ERROR;
504 sp<Fence> fence = reqBuffer.getFence();
505 if (fence != nullptr && fence->isValid()) {
506 resBuffer.releaseFence.fds.emplace_back(fence->dup());
507 }
508 }
509
510 auto status = mCameraDeviceCallback->notify(
511 {createRequestErrorNotifyMsg(request.getFrameNumber())});
512 if (!status.isOk()) {
513 ALOGE("%s: notify call failed: %s", __func__,
514 status.getDescription().c_str());
515 return;
516 }
517
518 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
519 captureResults(1);
520 captureResults[0] = std::move(captureResult);
521
522 status = mCameraDeviceCallback->processCaptureResult(captureResults);
523 if (!status.isOk()) {
524 ALOGE("%s: processCaptureResult call failed: %s", __func__,
525 status.getDescription().c_str());
526 }
527}
528
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100529std::vector<uint8_t> VirtualCameraRenderThread::createThumbnail(
530 const Resolution resolution, const int quality) {
531 if (resolution.width == 0 || resolution.height == 0) {
532 ALOGV("%s: Skipping thumbnail creation, zero size requested", __func__);
533 return {};
534 }
535
536 ALOGV("%s: Creating thumbnail with size %d x %d, quality %d", __func__,
537 resolution.width, resolution.height, quality);
538 std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
539 mEglDisplayContext->getEglDisplay(), resolution.width, resolution.height);
540 if (framebuffer == nullptr) {
541 ALOGE(
542 "Failed to allocate temporary framebuffer for JPEG thumbnail "
543 "compression");
544 return {};
545 }
546
547 // TODO(b/324383963) Add support for letterboxing if the thumbnail size
548 // doesn't correspond
549 // to input texture aspect ratio.
550 if (!renderIntoEglFramebuffer(*framebuffer).isOk()) {
551 ALOGE(
552 "Failed to render input texture into temporary framebuffer for JPEG "
553 "thumbnail");
554 return {};
555 }
556
557 std::shared_ptr<AHardwareBuffer> inHwBuffer = framebuffer->getHardwareBuffer();
558 GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(inHwBuffer.get());
559
560 if (gBuffer->getPixelFormat() != HAL_PIXEL_FORMAT_YCbCr_420_888) {
561 // This should never happen since we're allocating the temporary buffer
562 // with YUV420 layout above.
563 ALOGE("%s: Cannot compress non-YUV buffer (pixelFormat %d)", __func__,
564 gBuffer->getPixelFormat());
565 return {};
566 }
567
Jan Sebechlebsky43543222024-02-16 12:50:32 +0100568 YCbCrLockGuard yCbCrLock(inHwBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN);
569 if (yCbCrLock.getStatus() != NO_ERROR) {
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100570 ALOGE("%s: Failed to lock graphic buffer while generating thumbnail: %d",
Jan Sebechlebsky43543222024-02-16 12:50:32 +0100571 __func__, yCbCrLock.getStatus());
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100572 return {};
573 }
574
575 std::vector<uint8_t> compressedThumbnail;
576 compressedThumbnail.resize(kJpegThumbnailBufferSize);
577 ALOGE("%s: Compressing thumbnail %d x %d", __func__, gBuffer->getWidth(),
578 gBuffer->getHeight());
Jan Sebechlebsky43543222024-02-16 12:50:32 +0100579 std::optional<size_t> compressedSize = compressJpeg(
580 gBuffer->getWidth(), gBuffer->getHeight(), quality, *yCbCrLock, {},
581 compressedThumbnail.size(), compressedThumbnail.data());
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100582 if (!compressedSize.has_value()) {
583 ALOGE("%s: Failed to compress jpeg thumbnail", __func__);
584 return {};
585 }
586 compressedThumbnail.resize(compressedSize.value());
587 return compressedThumbnail;
588}
589
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100590ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoBlobStreamBuffer(
Vadim Caenc0aff132024-03-12 17:20:07 +0100591 const int streamId, const int bufferId, const CameraMetadata& resultMetadata,
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100592 const RequestSettings& requestSettings, sp<Fence> fence) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100593 std::shared_ptr<AHardwareBuffer> hwBuffer =
594 mSessionContext.fetchHardwareBuffer(streamId, bufferId);
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100595 if (hwBuffer == nullptr) {
596 ALOGE("%s: Failed to fetch hardware buffer %d for streamId %d", __func__,
597 bufferId, streamId);
598 return cameraStatus(Status::INTERNAL_ERROR);
599 }
600
601 std::optional<Stream> stream = mSessionContext.getStreamConfig(streamId);
602 if (!stream.has_value()) {
603 ALOGE("%s, failed to fetch information about stream %d", __func__, streamId);
604 return cameraStatus(Status::INTERNAL_ERROR);
605 }
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100606
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100607 ALOGV("%s: Rendering JPEG with size %d x %d, quality %d", __func__,
608 stream->width, stream->height, requestSettings.jpegQuality);
609
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100610 // Let's create YUV framebuffer and render the surface into this.
611 // This will take care about rescaling as well as potential format conversion.
612 std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
613 mEglDisplayContext->getEglDisplay(), stream->width, stream->height);
614 if (framebuffer == nullptr) {
615 ALOGE("Failed to allocate temporary framebuffer for JPEG compression");
616 return cameraStatus(Status::INTERNAL_ERROR);
617 }
618
619 // Render into temporary framebuffer.
620 ndk::ScopedAStatus status = renderIntoEglFramebuffer(*framebuffer);
621 if (!status.isOk()) {
622 ALOGE("Failed to render input texture into temporary framebuffer");
623 return status;
624 }
625
Jan Sebechlebsky43543222024-02-16 12:50:32 +0100626 PlanesLockGuard planesLock(hwBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_RARELY,
627 fence);
628 if (planesLock.getStatus() != OK) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100629 return cameraStatus(Status::INTERNAL_ERROR);
630 }
631
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100632 std::shared_ptr<AHardwareBuffer> inHwBuffer = framebuffer->getHardwareBuffer();
633 GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(inHwBuffer.get());
634
Jan Sebechlebsky5c789e42024-02-29 16:32:17 +0100635 if (gBuffer == nullptr) {
636 ALOGE(
637 "%s: Encountered invalid temporary buffer while rendering JPEG "
638 "into BLOB stream",
639 __func__);
640 return cameraStatus(Status::INTERNAL_ERROR);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100641 }
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100642
Jan Sebechlebsky5c789e42024-02-29 16:32:17 +0100643 if (gBuffer->getPixelFormat() != HAL_PIXEL_FORMAT_YCbCr_420_888) {
644 // This should never happen since we're allocating the temporary buffer
645 // with YUV420 layout above.
646 ALOGE("%s: Cannot compress non-YUV buffer (pixelFormat %d)", __func__,
647 gBuffer->getPixelFormat());
648 return cameraStatus(Status::INTERNAL_ERROR);
649 }
650
651 YCbCrLockGuard yCbCrLock(inHwBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN);
652 if (yCbCrLock.getStatus() != OK) {
653 return cameraStatus(Status::INTERNAL_ERROR);
654 }
655
656 std::vector<uint8_t> app1ExifData =
Vadim Caenc0aff132024-03-12 17:20:07 +0100657 createExif(Resolution(stream->width, stream->height), resultMetadata,
Jan Sebechlebsky5c789e42024-02-29 16:32:17 +0100658 createThumbnail(requestSettings.thumbnailResolution,
659 requestSettings.thumbnailJpegQuality));
660 std::optional<size_t> compressedSize = compressJpeg(
661 gBuffer->getWidth(), gBuffer->getHeight(), requestSettings.jpegQuality,
662 *yCbCrLock, app1ExifData, stream->bufferSize - sizeof(CameraBlob),
663 (*planesLock).planes[0].data);
664
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100665 if (!compressedSize.has_value()) {
666 ALOGE("%s: Failed to compress JPEG image", __func__);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100667 return cameraStatus(Status::INTERNAL_ERROR);
668 }
669
670 CameraBlob cameraBlob{
671 .blobId = CameraBlobId::JPEG,
672 .blobSizeBytes = static_cast<int32_t>(compressedSize.value())};
673
Jan Sebechlebsky43543222024-02-16 12:50:32 +0100674 memcpy(reinterpret_cast<uint8_t*>((*planesLock).planes[0].data) +
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100675 (stream->bufferSize - sizeof(cameraBlob)),
676 &cameraBlob, sizeof(cameraBlob));
677
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100678 ALOGV("%s: Successfully compressed JPEG image, resulting size %zu B",
679 __func__, compressedSize.value());
680
681 return ndk::ScopedAStatus::ok();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100682}
683
684ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoImageStreamBuffer(
685 int streamId, int bufferId, sp<Fence> fence) {
686 ALOGV("%s", __func__);
687
688 const std::chrono::nanoseconds before =
689 std::chrono::duration_cast<std::chrono::nanoseconds>(
690 std::chrono::steady_clock::now().time_since_epoch());
691
692 // Render test pattern using EGL.
693 std::shared_ptr<EglFrameBuffer> framebuffer =
694 mSessionContext.fetchOrCreateEglFramebuffer(
695 mEglDisplayContext->getEglDisplay(), streamId, bufferId);
696 if (framebuffer == nullptr) {
697 ALOGE(
698 "%s: Failed to get EGL framebuffer corresponding to buffer id "
699 "%d for streamId %d",
700 __func__, bufferId, streamId);
701 return cameraStatus(Status::ILLEGAL_ARGUMENT);
702 }
703
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100704 ndk::ScopedAStatus status = renderIntoEglFramebuffer(*framebuffer, fence);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100705
706 const std::chrono::nanoseconds after =
707 std::chrono::duration_cast<std::chrono::nanoseconds>(
708 std::chrono::steady_clock::now().time_since_epoch());
709
710 ALOGV("Rendering to buffer %d, stream %d took %lld ns", bufferId, streamId,
711 after.count() - before.count());
712
713 return ndk::ScopedAStatus::ok();
714}
715
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100716ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoEglFramebuffer(
717 EglFrameBuffer& framebuffer, sp<Fence> fence) {
718 ALOGV("%s", __func__);
719 // Wait for fence to clear.
720 if (fence != nullptr && fence->isValid()) {
721 status_t ret = fence->wait(kAcquireFenceTimeout.count());
722 if (ret != 0) {
723 ALOGE("Timeout while waiting for the acquire fence for buffer");
724 return cameraStatus(Status::INTERNAL_ERROR);
725 }
726 }
727
728 mEglDisplayContext->makeCurrent();
729 framebuffer.beforeDraw();
730
731 sp<GraphicBuffer> textureBuffer = mEglSurfaceTexture->getCurrentBuffer();
732 if (textureBuffer == nullptr) {
733 // If there's no current buffer, nothing was written to the surface and
734 // texture is not initialized yet. Let's render the framebuffer black
735 // instead of rendering the texture.
736 glClearColor(0.0f, 0.5f, 0.5f, 0.0f);
737 glClear(GL_COLOR_BUFFER_BIT);
738 } else {
739 const bool renderSuccess =
740 isYuvFormat(static_cast<PixelFormat>(textureBuffer->getPixelFormat()))
Jan Sebechlebsky99492e32023-12-20 09:49:45 +0100741 ? mEglTextureYuvProgram->draw(
742 mEglSurfaceTexture->getTextureId(),
743 mEglSurfaceTexture->getTransformMatrix())
744 : mEglTextureRgbProgram->draw(
745 mEglSurfaceTexture->getTextureId(),
746 mEglSurfaceTexture->getTransformMatrix());
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100747 if (!renderSuccess) {
748 ALOGE("%s: Failed to render texture", __func__);
749 return cameraStatus(Status::INTERNAL_ERROR);
750 }
751 }
752 framebuffer.afterDraw();
753
754 return ndk::ScopedAStatus::ok();
755}
756
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100757} // namespace virtualcamera
758} // namespace companion
759} // namespace android