blob: 50b5709d57ea040e82a4ac4c2c3f5affc7ee109f [file] [log] [blame]
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +01001/*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Jan Sebechlebsky6402fef2024-03-25 16:30:26 +010017#include "hardware/gralloc.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010018#define LOG_TAG "VirtualCameraRenderThread"
19#include "VirtualCameraRenderThread.h"
20
21#include <chrono>
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +010022#include <cstdint>
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010023#include <cstring>
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010024#include <future>
25#include <memory>
26#include <mutex>
27#include <thread>
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010028#include <vector>
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010029
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010030#include "Exif.h"
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +010031#include "GLES/gl.h"
Biswarup Pal8ad8bc52024-02-08 13:41:44 +000032#include "VirtualCameraDevice.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010033#include "VirtualCameraSessionContext.h"
34#include "aidl/android/hardware/camera/common/Status.h"
35#include "aidl/android/hardware/camera/device/BufferStatus.h"
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010036#include "aidl/android/hardware/camera/device/CameraBlob.h"
37#include "aidl/android/hardware/camera/device/CameraBlobId.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010038#include "aidl/android/hardware/camera/device/CameraMetadata.h"
39#include "aidl/android/hardware/camera/device/CaptureResult.h"
40#include "aidl/android/hardware/camera/device/ErrorCode.h"
41#include "aidl/android/hardware/camera/device/ICameraDeviceCallback.h"
42#include "aidl/android/hardware/camera/device/NotifyMsg.h"
43#include "aidl/android/hardware/camera/device/ShutterMsg.h"
44#include "aidl/android/hardware/camera/device/StreamBuffer.h"
45#include "android-base/thread_annotations.h"
46#include "android/binder_auto_utils.h"
47#include "android/hardware_buffer.h"
Vadim Caenc0aff132024-03-12 17:20:07 +010048#include "system/camera_metadata.h"
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +010049#include "ui/GraphicBuffer.h"
Jan Sebechlebskyb3771312024-03-15 10:38:02 +010050#include "ui/Rect.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010051#include "util/EglFramebuffer.h"
52#include "util/JpegUtil.h"
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010053#include "util/MetadataUtil.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010054#include "util/TestPatternHelper.h"
55#include "util/Util.h"
56#include "utils/Errors.h"
57
58namespace android {
59namespace companion {
60namespace virtualcamera {
61
62using ::aidl::android::hardware::camera::common::Status;
63using ::aidl::android::hardware::camera::device::BufferStatus;
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010064using ::aidl::android::hardware::camera::device::CameraBlob;
65using ::aidl::android::hardware::camera::device::CameraBlobId;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010066using ::aidl::android::hardware::camera::device::CameraMetadata;
67using ::aidl::android::hardware::camera::device::CaptureResult;
68using ::aidl::android::hardware::camera::device::ErrorCode;
69using ::aidl::android::hardware::camera::device::ErrorMsg;
70using ::aidl::android::hardware::camera::device::ICameraDeviceCallback;
71using ::aidl::android::hardware::camera::device::NotifyMsg;
72using ::aidl::android::hardware::camera::device::ShutterMsg;
73using ::aidl::android::hardware::camera::device::Stream;
74using ::aidl::android::hardware::camera::device::StreamBuffer;
75using ::aidl::android::hardware::graphics::common::PixelFormat;
76using ::android::base::ScopedLockAssertion;
77
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010078using ::android::hardware::camera::common::helper::ExifUtils;
79
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010080namespace {
81
82using namespace std::chrono_literals;
83
84static constexpr std::chrono::milliseconds kAcquireFenceTimeout = 500ms;
85
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +010086// See REQUEST_PIPELINE_DEPTH in CaptureResult.java.
87// This roughly corresponds to frame latency, we set to
88// documented minimum of 2.
89static constexpr uint8_t kPipelineDepth = 2;
90
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010091static constexpr size_t kJpegThumbnailBufferSize = 32 * 1024; // 32 KiB
92
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010093CameraMetadata createCaptureResultMetadata(
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +010094 const std::chrono::nanoseconds timestamp,
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010095 const RequestSettings& requestSettings,
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +010096 const Resolution reportedSensorSize) {
Vadim Caen11dfd932024-03-05 09:57:20 +010097 // All of the keys used in the response needs to be referenced in
98 // availableResultKeys in CameraCharacteristics (see initCameraCharacteristics
99 // in VirtualCameraDevice.cc).
100 MetadataBuilder builder =
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100101 MetadataBuilder()
Jan Sebechlebsky4be2bd02024-02-26 18:35:18 +0100102 .setAberrationCorrectionMode(
103 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF)
Vadim Caen11dfd932024-03-05 09:57:20 +0100104 .setControlAeAvailableAntibandingModes(
105 {ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF})
106 .setControlAeAntibandingMode(ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF)
107 .setControlAeExposureCompensation(0)
108 .setControlAeLockAvailable(false)
109 .setControlAeLock(ANDROID_CONTROL_AE_LOCK_OFF)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100110 .setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
111 .setControlAePrecaptureTrigger(
112 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
Vadim Caen11dfd932024-03-05 09:57:20 +0100113 .setControlAeState(ANDROID_CONTROL_AE_STATE_INACTIVE)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100114 .setControlAfMode(ANDROID_CONTROL_AF_MODE_OFF)
Vadim Caen11dfd932024-03-05 09:57:20 +0100115 .setControlAfTrigger(ANDROID_CONTROL_AF_TRIGGER_IDLE)
116 .setControlAfState(ANDROID_CONTROL_AF_STATE_INACTIVE)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100117 .setControlAwbMode(ANDROID_CONTROL_AWB_MODE_AUTO)
Vadim Caen11dfd932024-03-05 09:57:20 +0100118 .setControlAwbLock(ANDROID_CONTROL_AWB_LOCK_OFF)
119 .setControlAwbState(ANDROID_CONTROL_AWB_STATE_INACTIVE)
120 .setControlCaptureIntent(requestSettings.captureIntent)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100121 .setControlEffectMode(ANDROID_CONTROL_EFFECT_MODE_OFF)
122 .setControlMode(ANDROID_CONTROL_MODE_AUTO)
Vadim Caen11dfd932024-03-05 09:57:20 +0100123 .setControlSceneMode(ANDROID_CONTROL_SCENE_MODE_DISABLED)
124 .setControlVideoStabilizationMode(
125 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100126 .setCropRegion(0, 0, reportedSensorSize.width,
127 reportedSensorSize.height)
128 .setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100129 .setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
Vadim Caen11dfd932024-03-05 09:57:20 +0100130 .setFlashMode(ANDROID_FLASH_MODE_OFF)
Biswarup Pal8ad8bc52024-02-08 13:41:44 +0000131 .setFocalLength(VirtualCameraDevice::kFocalLength)
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100132 .setJpegQuality(requestSettings.jpegQuality)
Vadim Caenc0aff132024-03-12 17:20:07 +0100133 .setJpegOrientation(requestSettings.jpegOrientation)
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100134 .setJpegThumbnailSize(requestSettings.thumbnailResolution.width,
135 requestSettings.thumbnailResolution.height)
136 .setJpegThumbnailQuality(requestSettings.thumbnailJpegQuality)
Vadim Caen11dfd932024-03-05 09:57:20 +0100137 .setLensOpticalStabilizationMode(
138 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF)
Jan Sebechlebsky4be2bd02024-02-26 18:35:18 +0100139 .setNoiseReductionMode(ANDROID_NOISE_REDUCTION_MODE_OFF)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100140 .setPipelineDepth(kPipelineDepth)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100141 .setSensorTimestamp(timestamp)
Vadim Caen11dfd932024-03-05 09:57:20 +0100142 .setStatisticsHotPixelMapMode(
143 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF)
144 .setStatisticsLensShadingMapMode(
145 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF)
146 .setStatisticsSceneFlicker(ANDROID_STATISTICS_SCENE_FLICKER_NONE);
147
148 if (requestSettings.fpsRange.has_value()) {
149 builder.setControlAeTargetFpsRange(requestSettings.fpsRange.value());
150 }
151
Vadim Caenc0aff132024-03-12 17:20:07 +0100152 if (requestSettings.gpsCoordinates.has_value()) {
153 const GpsCoordinates& coordinates = requestSettings.gpsCoordinates.value();
154 builder.setJpegGpsCoordinates(coordinates);
155 }
156
Vadim Caen11dfd932024-03-05 09:57:20 +0100157 std::unique_ptr<CameraMetadata> metadata = builder.build();
158
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100159 if (metadata == nullptr) {
160 ALOGE("%s: Failed to build capture result metadata", __func__);
161 return CameraMetadata();
162 }
163 return std::move(*metadata);
164}
165
166NotifyMsg createShutterNotifyMsg(int frameNumber,
167 std::chrono::nanoseconds timestamp) {
168 NotifyMsg msg;
169 msg.set<NotifyMsg::Tag::shutter>(ShutterMsg{
170 .frameNumber = frameNumber,
171 .timestamp = timestamp.count(),
172 });
173 return msg;
174}
175
176NotifyMsg createBufferErrorNotifyMsg(int frameNumber, int streamId) {
177 NotifyMsg msg;
178 msg.set<NotifyMsg::Tag::error>(ErrorMsg{.frameNumber = frameNumber,
179 .errorStreamId = streamId,
180 .errorCode = ErrorCode::ERROR_BUFFER});
181 return msg;
182}
183
184NotifyMsg createRequestErrorNotifyMsg(int frameNumber) {
185 NotifyMsg msg;
186 msg.set<NotifyMsg::Tag::error>(ErrorMsg{
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100187 .frameNumber = frameNumber,
188 // errorStreamId needs to be set to -1 for ERROR_REQUEST
189 // (not tied to specific stream).
190 .errorStreamId = -1,
191 .errorCode = ErrorCode::ERROR_REQUEST});
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100192 return msg;
193}
194
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100195std::shared_ptr<EglFrameBuffer> allocateTemporaryFramebuffer(
196 EGLDisplay eglDisplay, const uint width, const int height) {
197 const AHardwareBuffer_Desc desc{
198 .width = static_cast<uint32_t>(width),
199 .height = static_cast<uint32_t>(height),
200 .layers = 1,
201 .format = AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420,
202 .usage = AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER |
203 AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN,
204 .rfu0 = 0,
205 .rfu1 = 0};
206
207 AHardwareBuffer* hwBufferPtr;
208 int status = AHardwareBuffer_allocate(&desc, &hwBufferPtr);
209 if (status != NO_ERROR) {
210 ALOGE(
211 "%s: Failed to allocate hardware buffer for temporary framebuffer: %d",
212 __func__, status);
213 return nullptr;
214 }
215
216 return std::make_shared<EglFrameBuffer>(
217 eglDisplay,
218 std::shared_ptr<AHardwareBuffer>(hwBufferPtr, AHardwareBuffer_release));
219}
220
221bool isYuvFormat(const PixelFormat pixelFormat) {
222 switch (static_cast<android_pixel_format_t>(pixelFormat)) {
223 case HAL_PIXEL_FORMAT_YCBCR_422_I:
224 case HAL_PIXEL_FORMAT_YCBCR_422_SP:
225 case HAL_PIXEL_FORMAT_Y16:
226 case HAL_PIXEL_FORMAT_YV12:
227 case HAL_PIXEL_FORMAT_YCBCR_420_888:
228 return true;
229 default:
230 return false;
231 }
232}
233
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100234std::vector<uint8_t> createExif(
Vadim Caenc0aff132024-03-12 17:20:07 +0100235 Resolution imageSize, const CameraMetadata resultMetadata,
236 const std::vector<uint8_t>& compressedThumbnail = {}) {
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100237 std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create());
238 exifUtils->initialize();
Vadim Caenc0aff132024-03-12 17:20:07 +0100239
240 // Make a copy of the metadata in order to converting it the HAL metadata
241 // format (as opposed to the AIDL class) and use the setFromMetadata method
242 // from ExifUtil
243 camera_metadata_t* rawSettings =
244 clone_camera_metadata((camera_metadata_t*)resultMetadata.metadata.data());
245 if (rawSettings != nullptr) {
246 android::hardware::camera::common::helper::CameraMetadata halMetadata(
247 rawSettings);
248 exifUtils->setFromMetadata(halMetadata, imageSize.width, imageSize.height);
249 }
250 exifUtils->setMake(VirtualCameraDevice::kDefaultMakeAndModel);
251 exifUtils->setModel(VirtualCameraDevice::kDefaultMakeAndModel);
252 exifUtils->setFlash(0);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100253
254 std::vector<uint8_t> app1Data;
255
256 size_t thumbnailDataSize = compressedThumbnail.size();
257 const void* thumbnailData =
258 thumbnailDataSize > 0
259 ? reinterpret_cast<const void*>(compressedThumbnail.data())
260 : nullptr;
261
262 if (!exifUtils->generateApp1(thumbnailData, thumbnailDataSize)) {
263 ALOGE("%s: Failed to generate APP1 segment for EXIF metadata", __func__);
264 return app1Data;
265 }
266
267 const uint8_t* data = exifUtils->getApp1Buffer();
268 const size_t size = exifUtils->getApp1Length();
269
270 app1Data.insert(app1Data.end(), data, data + size);
271 return app1Data;
272}
273
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100274} // namespace
275
276CaptureRequestBuffer::CaptureRequestBuffer(int streamId, int bufferId,
277 sp<Fence> fence)
278 : mStreamId(streamId), mBufferId(bufferId), mFence(fence) {
279}
280
281int CaptureRequestBuffer::getStreamId() const {
282 return mStreamId;
283}
284
285int CaptureRequestBuffer::getBufferId() const {
286 return mBufferId;
287}
288
289sp<Fence> CaptureRequestBuffer::getFence() const {
290 return mFence;
291}
292
293VirtualCameraRenderThread::VirtualCameraRenderThread(
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100294 VirtualCameraSessionContext& sessionContext,
295 const Resolution inputSurfaceSize, const Resolution reportedSensorSize,
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100296 std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback, bool testMode)
297 : mCameraDeviceCallback(cameraDeviceCallback),
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100298 mInputSurfaceSize(inputSurfaceSize),
299 mReportedSensorSize(reportedSensorSize),
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100300 mTestMode(testMode),
301 mSessionContext(sessionContext) {
302}
303
304VirtualCameraRenderThread::~VirtualCameraRenderThread() {
305 stop();
306 if (mThread.joinable()) {
307 mThread.join();
308 }
309}
310
311ProcessCaptureRequestTask::ProcessCaptureRequestTask(
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100312 int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers,
313 const RequestSettings& requestSettings)
314 : mFrameNumber(frameNumber),
315 mBuffers(requestBuffers),
316 mRequestSettings(requestSettings) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100317}
318
319int ProcessCaptureRequestTask::getFrameNumber() const {
320 return mFrameNumber;
321}
322
323const std::vector<CaptureRequestBuffer>& ProcessCaptureRequestTask::getBuffers()
324 const {
325 return mBuffers;
326}
327
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100328const RequestSettings& ProcessCaptureRequestTask::getRequestSettings() const {
329 return mRequestSettings;
330}
331
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100332void VirtualCameraRenderThread::enqueueTask(
333 std::unique_ptr<ProcessCaptureRequestTask> task) {
334 std::lock_guard<std::mutex> lock(mLock);
335 mQueue.emplace_back(std::move(task));
336 mCondVar.notify_one();
337}
338
339void VirtualCameraRenderThread::flush() {
340 std::lock_guard<std::mutex> lock(mLock);
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100341 while (!mQueue.empty()) {
342 std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
343 mQueue.pop_front();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100344 flushCaptureRequest(*task);
345 }
346}
347
348void VirtualCameraRenderThread::start() {
349 mThread = std::thread(&VirtualCameraRenderThread::threadLoop, this);
350}
351
352void VirtualCameraRenderThread::stop() {
353 {
354 std::lock_guard<std::mutex> lock(mLock);
355 mPendingExit = true;
356 mCondVar.notify_one();
357 }
358}
359
360sp<Surface> VirtualCameraRenderThread::getInputSurface() {
361 return mInputSurfacePromise.get_future().get();
362}
363
364std::unique_ptr<ProcessCaptureRequestTask>
365VirtualCameraRenderThread::dequeueTask() {
366 std::unique_lock<std::mutex> lock(mLock);
367 // Clang's thread safety analysis doesn't perform alias analysis,
368 // so it doesn't support moveable std::unique_lock.
369 //
370 // Lock assertion below is basically explicit declaration that
371 // the lock is held in this scope, which is true, since it's only
372 // released during waiting inside mCondVar.wait calls.
373 ScopedLockAssertion lockAssertion(mLock);
374
375 mCondVar.wait(lock, [this]() REQUIRES(mLock) {
376 return mPendingExit || !mQueue.empty();
377 });
378 if (mPendingExit) {
379 return nullptr;
380 }
381 std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
382 mQueue.pop_front();
383 return task;
384}
385
386void VirtualCameraRenderThread::threadLoop() {
387 ALOGV("Render thread starting");
388
389 mEglDisplayContext = std::make_unique<EglDisplayContext>();
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100390 mEglTextureYuvProgram =
391 std::make_unique<EglTextureProgram>(EglTextureProgram::TextureFormat::YUV);
392 mEglTextureRgbProgram = std::make_unique<EglTextureProgram>(
393 EglTextureProgram::TextureFormat::RGBA);
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100394 mEglSurfaceTexture = std::make_unique<EglSurfaceTexture>(
395 mInputSurfaceSize.width, mInputSurfaceSize.height);
Jan Sebechlebsky6402fef2024-03-25 16:30:26 +0100396
397 sp<Surface> inputSurface = mEglSurfaceTexture->getSurface();
398 if (mTestMode) {
399 inputSurface->connect(NATIVE_WINDOW_API_CPU, false, nullptr);
400 }
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100401 mInputSurfacePromise.set_value(mEglSurfaceTexture->getSurface());
402
403 while (std::unique_ptr<ProcessCaptureRequestTask> task = dequeueTask()) {
404 processCaptureRequest(*task);
405 }
406
Jan Sebechlebsky06b36672024-03-18 11:52:35 +0100407 // Destroy EGL utilities still on the render thread.
408 mEglSurfaceTexture.reset();
409 mEglTextureRgbProgram.reset();
410 mEglTextureYuvProgram.reset();
411 mEglDisplayContext.reset();
412
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100413 ALOGV("Render thread exiting");
414}
415
416void VirtualCameraRenderThread::processCaptureRequest(
417 const ProcessCaptureRequestTask& request) {
418 const std::chrono::nanoseconds timestamp =
419 std::chrono::duration_cast<std::chrono::nanoseconds>(
420 std::chrono::steady_clock::now().time_since_epoch());
421
422 CaptureResult captureResult;
423 captureResult.fmqResultSize = 0;
424 captureResult.frameNumber = request.getFrameNumber();
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100425 // Partial result needs to be set to 1 when metadata are present.
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100426 captureResult.partialResult = 1;
427 captureResult.inputBuffer.streamId = -1;
428 captureResult.physicalCameraMetadata.resize(0);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100429 captureResult.result = createCaptureResultMetadata(
430 timestamp, request.getRequestSettings(), mReportedSensorSize);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100431
432 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
433 captureResult.outputBuffers.resize(buffers.size());
434
435 if (mTestMode) {
436 // In test mode let's just render something to the Surface ourselves.
437 renderTestPatternYCbCr420(mEglSurfaceTexture->getSurface(),
438 request.getFrameNumber());
439 }
440
441 mEglSurfaceTexture->updateTexture();
442
443 for (int i = 0; i < buffers.size(); ++i) {
444 const CaptureRequestBuffer& reqBuffer = buffers[i];
445 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
446 resBuffer.streamId = reqBuffer.getStreamId();
447 resBuffer.bufferId = reqBuffer.getBufferId();
448 resBuffer.status = BufferStatus::OK;
449
450 const std::optional<Stream> streamConfig =
451 mSessionContext.getStreamConfig(reqBuffer.getStreamId());
452
453 if (!streamConfig.has_value()) {
454 resBuffer.status = BufferStatus::ERROR;
455 continue;
456 }
457
458 auto status = streamConfig->format == PixelFormat::BLOB
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100459 ? renderIntoBlobStreamBuffer(
460 reqBuffer.getStreamId(), reqBuffer.getBufferId(),
Vadim Caenc0aff132024-03-12 17:20:07 +0100461 captureResult.result, request.getRequestSettings(),
462 reqBuffer.getFence())
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100463 : renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
464 reqBuffer.getBufferId(),
465 reqBuffer.getFence());
466 if (!status.isOk()) {
467 resBuffer.status = BufferStatus::ERROR;
468 }
469 }
470
471 std::vector<NotifyMsg> notifyMsg{
472 createShutterNotifyMsg(request.getFrameNumber(), timestamp)};
473 for (const StreamBuffer& resBuffer : captureResult.outputBuffers) {
474 if (resBuffer.status != BufferStatus::OK) {
475 notifyMsg.push_back(createBufferErrorNotifyMsg(request.getFrameNumber(),
476 resBuffer.streamId));
477 }
478 }
479
480 auto status = mCameraDeviceCallback->notify(notifyMsg);
481 if (!status.isOk()) {
482 ALOGE("%s: notify call failed: %s", __func__,
483 status.getDescription().c_str());
484 return;
485 }
486
487 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
488 captureResults(1);
489 captureResults[0] = std::move(captureResult);
490
491 status = mCameraDeviceCallback->processCaptureResult(captureResults);
492 if (!status.isOk()) {
493 ALOGE("%s: processCaptureResult call failed: %s", __func__,
494 status.getDescription().c_str());
495 return;
496 }
497
Vadim Caen324fcfb2024-03-21 16:49:08 +0100498 ALOGV("%s: Successfully called processCaptureResult", __func__);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100499}
500
501void VirtualCameraRenderThread::flushCaptureRequest(
502 const ProcessCaptureRequestTask& request) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100503 CaptureResult captureResult;
504 captureResult.fmqResultSize = 0;
505 captureResult.frameNumber = request.getFrameNumber();
506 captureResult.inputBuffer.streamId = -1;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100507
508 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
509 captureResult.outputBuffers.resize(buffers.size());
510
511 for (int i = 0; i < buffers.size(); ++i) {
512 const CaptureRequestBuffer& reqBuffer = buffers[i];
513 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
514 resBuffer.streamId = reqBuffer.getStreamId();
515 resBuffer.bufferId = reqBuffer.getBufferId();
516 resBuffer.status = BufferStatus::ERROR;
517 sp<Fence> fence = reqBuffer.getFence();
518 if (fence != nullptr && fence->isValid()) {
519 resBuffer.releaseFence.fds.emplace_back(fence->dup());
520 }
521 }
522
523 auto status = mCameraDeviceCallback->notify(
524 {createRequestErrorNotifyMsg(request.getFrameNumber())});
525 if (!status.isOk()) {
526 ALOGE("%s: notify call failed: %s", __func__,
527 status.getDescription().c_str());
528 return;
529 }
530
531 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
532 captureResults(1);
533 captureResults[0] = std::move(captureResult);
534
535 status = mCameraDeviceCallback->processCaptureResult(captureResults);
536 if (!status.isOk()) {
537 ALOGE("%s: processCaptureResult call failed: %s", __func__,
538 status.getDescription().c_str());
539 }
540}
541
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100542std::vector<uint8_t> VirtualCameraRenderThread::createThumbnail(
543 const Resolution resolution, const int quality) {
544 if (resolution.width == 0 || resolution.height == 0) {
545 ALOGV("%s: Skipping thumbnail creation, zero size requested", __func__);
546 return {};
547 }
548
549 ALOGV("%s: Creating thumbnail with size %d x %d, quality %d", __func__,
550 resolution.width, resolution.height, quality);
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100551 Resolution bufferSize = roundTo2DctSize(resolution);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100552 std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100553 mEglDisplayContext->getEglDisplay(), bufferSize.width, bufferSize.height);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100554 if (framebuffer == nullptr) {
555 ALOGE(
556 "Failed to allocate temporary framebuffer for JPEG thumbnail "
557 "compression");
558 return {};
559 }
560
561 // TODO(b/324383963) Add support for letterboxing if the thumbnail size
562 // doesn't correspond
563 // to input texture aspect ratio.
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100564 if (!renderIntoEglFramebuffer(*framebuffer, /*fence=*/nullptr,
565 Rect(resolution.width, resolution.height))
566 .isOk()) {
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100567 ALOGE(
568 "Failed to render input texture into temporary framebuffer for JPEG "
569 "thumbnail");
570 return {};
571 }
572
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100573 std::vector<uint8_t> compressedThumbnail;
574 compressedThumbnail.resize(kJpegThumbnailBufferSize);
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100575 ALOGE("%s: Compressing thumbnail %d x %d", __func__, resolution.width,
576 resolution.height);
577 std::optional<size_t> compressedSize =
578 compressJpeg(resolution.width, resolution.height, quality,
579 framebuffer->getHardwareBuffer(), {},
580 compressedThumbnail.size(), compressedThumbnail.data());
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100581 if (!compressedSize.has_value()) {
582 ALOGE("%s: Failed to compress jpeg thumbnail", __func__);
583 return {};
584 }
585 compressedThumbnail.resize(compressedSize.value());
586 return compressedThumbnail;
587}
588
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100589ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoBlobStreamBuffer(
Vadim Caenc0aff132024-03-12 17:20:07 +0100590 const int streamId, const int bufferId, const CameraMetadata& resultMetadata,
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100591 const RequestSettings& requestSettings, sp<Fence> fence) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100592 std::shared_ptr<AHardwareBuffer> hwBuffer =
593 mSessionContext.fetchHardwareBuffer(streamId, bufferId);
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100594 if (hwBuffer == nullptr) {
595 ALOGE("%s: Failed to fetch hardware buffer %d for streamId %d", __func__,
596 bufferId, streamId);
597 return cameraStatus(Status::INTERNAL_ERROR);
598 }
599
600 std::optional<Stream> stream = mSessionContext.getStreamConfig(streamId);
601 if (!stream.has_value()) {
602 ALOGE("%s, failed to fetch information about stream %d", __func__, streamId);
603 return cameraStatus(Status::INTERNAL_ERROR);
604 }
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100605
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100606 ALOGV("%s: Rendering JPEG with size %d x %d, quality %d", __func__,
607 stream->width, stream->height, requestSettings.jpegQuality);
608
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100609 // Let's create YUV framebuffer and render the surface into this.
610 // This will take care about rescaling as well as potential format conversion.
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100611 // The buffer dimensions need to be rounded to nearest multiple of JPEG DCT
612 // size, however we pass the viewport corresponding to size of the stream so
613 // the image will be only rendered to the area corresponding to the stream
614 // size.
615 Resolution bufferSize =
616 roundTo2DctSize(Resolution(stream->width, stream->height));
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100617 std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100618 mEglDisplayContext->getEglDisplay(), bufferSize.width, bufferSize.height);
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100619 if (framebuffer == nullptr) {
620 ALOGE("Failed to allocate temporary framebuffer for JPEG compression");
621 return cameraStatus(Status::INTERNAL_ERROR);
622 }
623
624 // Render into temporary framebuffer.
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100625 ndk::ScopedAStatus status = renderIntoEglFramebuffer(
626 *framebuffer, /*fence=*/nullptr, Rect(stream->width, stream->height));
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100627 if (!status.isOk()) {
628 ALOGE("Failed to render input texture into temporary framebuffer");
629 return status;
630 }
631
Jan Sebechlebsky43543222024-02-16 12:50:32 +0100632 PlanesLockGuard planesLock(hwBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_RARELY,
633 fence);
634 if (planesLock.getStatus() != OK) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100635 return cameraStatus(Status::INTERNAL_ERROR);
636 }
637
Jan Sebechlebsky5c789e42024-02-29 16:32:17 +0100638 std::vector<uint8_t> app1ExifData =
Vadim Caenc0aff132024-03-12 17:20:07 +0100639 createExif(Resolution(stream->width, stream->height), resultMetadata,
Jan Sebechlebsky5c789e42024-02-29 16:32:17 +0100640 createThumbnail(requestSettings.thumbnailResolution,
641 requestSettings.thumbnailJpegQuality));
642 std::optional<size_t> compressedSize = compressJpeg(
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100643 stream->width, stream->height, requestSettings.jpegQuality,
644 framebuffer->getHardwareBuffer(), app1ExifData,
645 stream->bufferSize - sizeof(CameraBlob), (*planesLock).planes[0].data);
Jan Sebechlebsky5c789e42024-02-29 16:32:17 +0100646
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100647 if (!compressedSize.has_value()) {
648 ALOGE("%s: Failed to compress JPEG image", __func__);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100649 return cameraStatus(Status::INTERNAL_ERROR);
650 }
651
652 CameraBlob cameraBlob{
653 .blobId = CameraBlobId::JPEG,
654 .blobSizeBytes = static_cast<int32_t>(compressedSize.value())};
655
Jan Sebechlebsky43543222024-02-16 12:50:32 +0100656 memcpy(reinterpret_cast<uint8_t*>((*planesLock).planes[0].data) +
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100657 (stream->bufferSize - sizeof(cameraBlob)),
658 &cameraBlob, sizeof(cameraBlob));
659
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100660 ALOGV("%s: Successfully compressed JPEG image, resulting size %zu B",
661 __func__, compressedSize.value());
662
663 return ndk::ScopedAStatus::ok();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100664}
665
666ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoImageStreamBuffer(
667 int streamId, int bufferId, sp<Fence> fence) {
668 ALOGV("%s", __func__);
669
670 const std::chrono::nanoseconds before =
671 std::chrono::duration_cast<std::chrono::nanoseconds>(
672 std::chrono::steady_clock::now().time_since_epoch());
673
674 // Render test pattern using EGL.
675 std::shared_ptr<EglFrameBuffer> framebuffer =
676 mSessionContext.fetchOrCreateEglFramebuffer(
677 mEglDisplayContext->getEglDisplay(), streamId, bufferId);
678 if (framebuffer == nullptr) {
679 ALOGE(
680 "%s: Failed to get EGL framebuffer corresponding to buffer id "
681 "%d for streamId %d",
682 __func__, bufferId, streamId);
683 return cameraStatus(Status::ILLEGAL_ARGUMENT);
684 }
685
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100686 ndk::ScopedAStatus status = renderIntoEglFramebuffer(*framebuffer, fence);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100687
688 const std::chrono::nanoseconds after =
689 std::chrono::duration_cast<std::chrono::nanoseconds>(
690 std::chrono::steady_clock::now().time_since_epoch());
691
692 ALOGV("Rendering to buffer %d, stream %d took %lld ns", bufferId, streamId,
693 after.count() - before.count());
694
695 return ndk::ScopedAStatus::ok();
696}
697
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100698ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoEglFramebuffer(
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100699 EglFrameBuffer& framebuffer, sp<Fence> fence, std::optional<Rect> viewport) {
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100700 ALOGV("%s", __func__);
701 // Wait for fence to clear.
702 if (fence != nullptr && fence->isValid()) {
703 status_t ret = fence->wait(kAcquireFenceTimeout.count());
704 if (ret != 0) {
705 ALOGE("Timeout while waiting for the acquire fence for buffer");
706 return cameraStatus(Status::INTERNAL_ERROR);
707 }
708 }
709
710 mEglDisplayContext->makeCurrent();
711 framebuffer.beforeDraw();
712
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100713 Rect viewportRect =
714 viewport.value_or(Rect(framebuffer.getWidth(), framebuffer.getHeight()));
715 glViewport(viewportRect.leftTop().x, viewportRect.leftTop().y,
716 viewportRect.getWidth(), viewportRect.getHeight());
717
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100718 sp<GraphicBuffer> textureBuffer = mEglSurfaceTexture->getCurrentBuffer();
719 if (textureBuffer == nullptr) {
720 // If there's no current buffer, nothing was written to the surface and
721 // texture is not initialized yet. Let's render the framebuffer black
722 // instead of rendering the texture.
723 glClearColor(0.0f, 0.5f, 0.5f, 0.0f);
724 glClear(GL_COLOR_BUFFER_BIT);
725 } else {
726 const bool renderSuccess =
727 isYuvFormat(static_cast<PixelFormat>(textureBuffer->getPixelFormat()))
Jan Sebechlebsky99492e32023-12-20 09:49:45 +0100728 ? mEglTextureYuvProgram->draw(
729 mEglSurfaceTexture->getTextureId(),
730 mEglSurfaceTexture->getTransformMatrix())
731 : mEglTextureRgbProgram->draw(
732 mEglSurfaceTexture->getTextureId(),
733 mEglSurfaceTexture->getTransformMatrix());
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100734 if (!renderSuccess) {
735 ALOGE("%s: Failed to render texture", __func__);
736 return cameraStatus(Status::INTERNAL_ERROR);
737 }
738 }
739 framebuffer.afterDraw();
740
741 return ndk::ScopedAStatus::ok();
742}
743
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100744} // namespace virtualcamera
745} // namespace companion
746} // namespace android