blob: f68efe2c1529d229f1803bab8e234d7a1ceba1c5 [file] [log] [blame]
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +01001/*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "VirtualCameraRenderThread"
18#include "VirtualCameraRenderThread.h"
19
20#include <chrono>
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +010021#include <cstdint>
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010022#include <cstring>
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010023#include <future>
24#include <memory>
25#include <mutex>
26#include <thread>
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010027#include <vector>
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010028
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010029#include "Exif.h"
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +010030#include "GLES/gl.h"
Biswarup Pal8ad8bc52024-02-08 13:41:44 +000031#include "VirtualCameraDevice.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010032#include "VirtualCameraSessionContext.h"
33#include "aidl/android/hardware/camera/common/Status.h"
34#include "aidl/android/hardware/camera/device/BufferStatus.h"
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010035#include "aidl/android/hardware/camera/device/CameraBlob.h"
36#include "aidl/android/hardware/camera/device/CameraBlobId.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010037#include "aidl/android/hardware/camera/device/CameraMetadata.h"
38#include "aidl/android/hardware/camera/device/CaptureResult.h"
39#include "aidl/android/hardware/camera/device/ErrorCode.h"
40#include "aidl/android/hardware/camera/device/ICameraDeviceCallback.h"
41#include "aidl/android/hardware/camera/device/NotifyMsg.h"
42#include "aidl/android/hardware/camera/device/ShutterMsg.h"
43#include "aidl/android/hardware/camera/device/StreamBuffer.h"
44#include "android-base/thread_annotations.h"
45#include "android/binder_auto_utils.h"
46#include "android/hardware_buffer.h"
Vadim Caenc0aff132024-03-12 17:20:07 +010047#include "system/camera_metadata.h"
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +010048#include "ui/GraphicBuffer.h"
Jan Sebechlebskyb3771312024-03-15 10:38:02 +010049#include "ui/Rect.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010050#include "util/EglFramebuffer.h"
51#include "util/JpegUtil.h"
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010052#include "util/MetadataUtil.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010053#include "util/TestPatternHelper.h"
54#include "util/Util.h"
55#include "utils/Errors.h"
56
57namespace android {
58namespace companion {
59namespace virtualcamera {
60
61using ::aidl::android::hardware::camera::common::Status;
62using ::aidl::android::hardware::camera::device::BufferStatus;
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010063using ::aidl::android::hardware::camera::device::CameraBlob;
64using ::aidl::android::hardware::camera::device::CameraBlobId;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010065using ::aidl::android::hardware::camera::device::CameraMetadata;
66using ::aidl::android::hardware::camera::device::CaptureResult;
67using ::aidl::android::hardware::camera::device::ErrorCode;
68using ::aidl::android::hardware::camera::device::ErrorMsg;
69using ::aidl::android::hardware::camera::device::ICameraDeviceCallback;
70using ::aidl::android::hardware::camera::device::NotifyMsg;
71using ::aidl::android::hardware::camera::device::ShutterMsg;
72using ::aidl::android::hardware::camera::device::Stream;
73using ::aidl::android::hardware::camera::device::StreamBuffer;
74using ::aidl::android::hardware::graphics::common::PixelFormat;
75using ::android::base::ScopedLockAssertion;
76
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010077using ::android::hardware::camera::common::helper::ExifUtils;
78
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010079namespace {
80
81using namespace std::chrono_literals;
82
83static constexpr std::chrono::milliseconds kAcquireFenceTimeout = 500ms;
84
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +010085// See REQUEST_PIPELINE_DEPTH in CaptureResult.java.
86// This roughly corresponds to frame latency, we set to
87// documented minimum of 2.
88static constexpr uint8_t kPipelineDepth = 2;
89
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010090static constexpr size_t kJpegThumbnailBufferSize = 32 * 1024; // 32 KiB
91
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010092CameraMetadata createCaptureResultMetadata(
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +010093 const std::chrono::nanoseconds timestamp,
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010094 const RequestSettings& requestSettings,
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +010095 const Resolution reportedSensorSize) {
Vadim Caen11dfd932024-03-05 09:57:20 +010096 // All of the keys used in the response needs to be referenced in
97 // availableResultKeys in CameraCharacteristics (see initCameraCharacteristics
98 // in VirtualCameraDevice.cc).
99 MetadataBuilder builder =
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100100 MetadataBuilder()
Jan Sebechlebsky4be2bd02024-02-26 18:35:18 +0100101 .setAberrationCorrectionMode(
102 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF)
Vadim Caen11dfd932024-03-05 09:57:20 +0100103 .setControlAeAvailableAntibandingModes(
104 {ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF})
105 .setControlAeAntibandingMode(ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF)
106 .setControlAeExposureCompensation(0)
107 .setControlAeLockAvailable(false)
108 .setControlAeLock(ANDROID_CONTROL_AE_LOCK_OFF)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100109 .setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
110 .setControlAePrecaptureTrigger(
111 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
Vadim Caen11dfd932024-03-05 09:57:20 +0100112 .setControlAeState(ANDROID_CONTROL_AE_STATE_INACTIVE)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100113 .setControlAfMode(ANDROID_CONTROL_AF_MODE_OFF)
Vadim Caen11dfd932024-03-05 09:57:20 +0100114 .setControlAfTrigger(ANDROID_CONTROL_AF_TRIGGER_IDLE)
115 .setControlAfState(ANDROID_CONTROL_AF_STATE_INACTIVE)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100116 .setControlAwbMode(ANDROID_CONTROL_AWB_MODE_AUTO)
Vadim Caen11dfd932024-03-05 09:57:20 +0100117 .setControlAwbLock(ANDROID_CONTROL_AWB_LOCK_OFF)
118 .setControlAwbState(ANDROID_CONTROL_AWB_STATE_INACTIVE)
119 .setControlCaptureIntent(requestSettings.captureIntent)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100120 .setControlEffectMode(ANDROID_CONTROL_EFFECT_MODE_OFF)
121 .setControlMode(ANDROID_CONTROL_MODE_AUTO)
Vadim Caen11dfd932024-03-05 09:57:20 +0100122 .setControlSceneMode(ANDROID_CONTROL_SCENE_MODE_DISABLED)
123 .setControlVideoStabilizationMode(
124 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100125 .setCropRegion(0, 0, reportedSensorSize.width,
126 reportedSensorSize.height)
127 .setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100128 .setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
Vadim Caen11dfd932024-03-05 09:57:20 +0100129 .setFlashMode(ANDROID_FLASH_MODE_OFF)
Biswarup Pal8ad8bc52024-02-08 13:41:44 +0000130 .setFocalLength(VirtualCameraDevice::kFocalLength)
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100131 .setJpegQuality(requestSettings.jpegQuality)
Vadim Caenc0aff132024-03-12 17:20:07 +0100132 .setJpegOrientation(requestSettings.jpegOrientation)
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100133 .setJpegThumbnailSize(requestSettings.thumbnailResolution.width,
134 requestSettings.thumbnailResolution.height)
135 .setJpegThumbnailQuality(requestSettings.thumbnailJpegQuality)
Vadim Caen11dfd932024-03-05 09:57:20 +0100136 .setLensOpticalStabilizationMode(
137 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF)
Jan Sebechlebsky4be2bd02024-02-26 18:35:18 +0100138 .setNoiseReductionMode(ANDROID_NOISE_REDUCTION_MODE_OFF)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100139 .setPipelineDepth(kPipelineDepth)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100140 .setSensorTimestamp(timestamp)
Vadim Caen11dfd932024-03-05 09:57:20 +0100141 .setStatisticsHotPixelMapMode(
142 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF)
143 .setStatisticsLensShadingMapMode(
144 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF)
145 .setStatisticsSceneFlicker(ANDROID_STATISTICS_SCENE_FLICKER_NONE);
146
147 if (requestSettings.fpsRange.has_value()) {
148 builder.setControlAeTargetFpsRange(requestSettings.fpsRange.value());
149 }
150
Vadim Caenc0aff132024-03-12 17:20:07 +0100151 if (requestSettings.gpsCoordinates.has_value()) {
152 const GpsCoordinates& coordinates = requestSettings.gpsCoordinates.value();
153 builder.setJpegGpsCoordinates(coordinates);
154 }
155
Vadim Caen11dfd932024-03-05 09:57:20 +0100156 std::unique_ptr<CameraMetadata> metadata = builder.build();
157
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100158 if (metadata == nullptr) {
159 ALOGE("%s: Failed to build capture result metadata", __func__);
160 return CameraMetadata();
161 }
162 return std::move(*metadata);
163}
164
165NotifyMsg createShutterNotifyMsg(int frameNumber,
166 std::chrono::nanoseconds timestamp) {
167 NotifyMsg msg;
168 msg.set<NotifyMsg::Tag::shutter>(ShutterMsg{
169 .frameNumber = frameNumber,
170 .timestamp = timestamp.count(),
171 });
172 return msg;
173}
174
175NotifyMsg createBufferErrorNotifyMsg(int frameNumber, int streamId) {
176 NotifyMsg msg;
177 msg.set<NotifyMsg::Tag::error>(ErrorMsg{.frameNumber = frameNumber,
178 .errorStreamId = streamId,
179 .errorCode = ErrorCode::ERROR_BUFFER});
180 return msg;
181}
182
183NotifyMsg createRequestErrorNotifyMsg(int frameNumber) {
184 NotifyMsg msg;
185 msg.set<NotifyMsg::Tag::error>(ErrorMsg{
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100186 .frameNumber = frameNumber,
187 // errorStreamId needs to be set to -1 for ERROR_REQUEST
188 // (not tied to specific stream).
189 .errorStreamId = -1,
190 .errorCode = ErrorCode::ERROR_REQUEST});
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100191 return msg;
192}
193
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100194std::shared_ptr<EglFrameBuffer> allocateTemporaryFramebuffer(
195 EGLDisplay eglDisplay, const uint width, const int height) {
196 const AHardwareBuffer_Desc desc{
197 .width = static_cast<uint32_t>(width),
198 .height = static_cast<uint32_t>(height),
199 .layers = 1,
200 .format = AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420,
201 .usage = AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER |
202 AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN,
203 .rfu0 = 0,
204 .rfu1 = 0};
205
206 AHardwareBuffer* hwBufferPtr;
207 int status = AHardwareBuffer_allocate(&desc, &hwBufferPtr);
208 if (status != NO_ERROR) {
209 ALOGE(
210 "%s: Failed to allocate hardware buffer for temporary framebuffer: %d",
211 __func__, status);
212 return nullptr;
213 }
214
215 return std::make_shared<EglFrameBuffer>(
216 eglDisplay,
217 std::shared_ptr<AHardwareBuffer>(hwBufferPtr, AHardwareBuffer_release));
218}
219
220bool isYuvFormat(const PixelFormat pixelFormat) {
221 switch (static_cast<android_pixel_format_t>(pixelFormat)) {
222 case HAL_PIXEL_FORMAT_YCBCR_422_I:
223 case HAL_PIXEL_FORMAT_YCBCR_422_SP:
224 case HAL_PIXEL_FORMAT_Y16:
225 case HAL_PIXEL_FORMAT_YV12:
226 case HAL_PIXEL_FORMAT_YCBCR_420_888:
227 return true;
228 default:
229 return false;
230 }
231}
232
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100233std::vector<uint8_t> createExif(
Vadim Caenc0aff132024-03-12 17:20:07 +0100234 Resolution imageSize, const CameraMetadata resultMetadata,
235 const std::vector<uint8_t>& compressedThumbnail = {}) {
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100236 std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create());
237 exifUtils->initialize();
Vadim Caenc0aff132024-03-12 17:20:07 +0100238
239 // Make a copy of the metadata in order to converting it the HAL metadata
240 // format (as opposed to the AIDL class) and use the setFromMetadata method
241 // from ExifUtil
242 camera_metadata_t* rawSettings =
243 clone_camera_metadata((camera_metadata_t*)resultMetadata.metadata.data());
244 if (rawSettings != nullptr) {
245 android::hardware::camera::common::helper::CameraMetadata halMetadata(
246 rawSettings);
247 exifUtils->setFromMetadata(halMetadata, imageSize.width, imageSize.height);
248 }
249 exifUtils->setMake(VirtualCameraDevice::kDefaultMakeAndModel);
250 exifUtils->setModel(VirtualCameraDevice::kDefaultMakeAndModel);
251 exifUtils->setFlash(0);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100252
253 std::vector<uint8_t> app1Data;
254
255 size_t thumbnailDataSize = compressedThumbnail.size();
256 const void* thumbnailData =
257 thumbnailDataSize > 0
258 ? reinterpret_cast<const void*>(compressedThumbnail.data())
259 : nullptr;
260
261 if (!exifUtils->generateApp1(thumbnailData, thumbnailDataSize)) {
262 ALOGE("%s: Failed to generate APP1 segment for EXIF metadata", __func__);
263 return app1Data;
264 }
265
266 const uint8_t* data = exifUtils->getApp1Buffer();
267 const size_t size = exifUtils->getApp1Length();
268
269 app1Data.insert(app1Data.end(), data, data + size);
270 return app1Data;
271}
272
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100273} // namespace
274
275CaptureRequestBuffer::CaptureRequestBuffer(int streamId, int bufferId,
276 sp<Fence> fence)
277 : mStreamId(streamId), mBufferId(bufferId), mFence(fence) {
278}
279
280int CaptureRequestBuffer::getStreamId() const {
281 return mStreamId;
282}
283
284int CaptureRequestBuffer::getBufferId() const {
285 return mBufferId;
286}
287
288sp<Fence> CaptureRequestBuffer::getFence() const {
289 return mFence;
290}
291
292VirtualCameraRenderThread::VirtualCameraRenderThread(
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100293 VirtualCameraSessionContext& sessionContext,
294 const Resolution inputSurfaceSize, const Resolution reportedSensorSize,
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100295 std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback, bool testMode)
296 : mCameraDeviceCallback(cameraDeviceCallback),
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100297 mInputSurfaceSize(inputSurfaceSize),
298 mReportedSensorSize(reportedSensorSize),
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100299 mTestMode(testMode),
300 mSessionContext(sessionContext) {
301}
302
303VirtualCameraRenderThread::~VirtualCameraRenderThread() {
304 stop();
305 if (mThread.joinable()) {
306 mThread.join();
307 }
308}
309
310ProcessCaptureRequestTask::ProcessCaptureRequestTask(
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100311 int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers,
312 const RequestSettings& requestSettings)
313 : mFrameNumber(frameNumber),
314 mBuffers(requestBuffers),
315 mRequestSettings(requestSettings) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100316}
317
318int ProcessCaptureRequestTask::getFrameNumber() const {
319 return mFrameNumber;
320}
321
322const std::vector<CaptureRequestBuffer>& ProcessCaptureRequestTask::getBuffers()
323 const {
324 return mBuffers;
325}
326
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100327const RequestSettings& ProcessCaptureRequestTask::getRequestSettings() const {
328 return mRequestSettings;
329}
330
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100331void VirtualCameraRenderThread::enqueueTask(
332 std::unique_ptr<ProcessCaptureRequestTask> task) {
333 std::lock_guard<std::mutex> lock(mLock);
334 mQueue.emplace_back(std::move(task));
335 mCondVar.notify_one();
336}
337
338void VirtualCameraRenderThread::flush() {
339 std::lock_guard<std::mutex> lock(mLock);
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100340 while (!mQueue.empty()) {
341 std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
342 mQueue.pop_front();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100343 flushCaptureRequest(*task);
344 }
345}
346
347void VirtualCameraRenderThread::start() {
348 mThread = std::thread(&VirtualCameraRenderThread::threadLoop, this);
349}
350
351void VirtualCameraRenderThread::stop() {
352 {
353 std::lock_guard<std::mutex> lock(mLock);
354 mPendingExit = true;
355 mCondVar.notify_one();
356 }
357}
358
359sp<Surface> VirtualCameraRenderThread::getInputSurface() {
360 return mInputSurfacePromise.get_future().get();
361}
362
363std::unique_ptr<ProcessCaptureRequestTask>
364VirtualCameraRenderThread::dequeueTask() {
365 std::unique_lock<std::mutex> lock(mLock);
366 // Clang's thread safety analysis doesn't perform alias analysis,
367 // so it doesn't support moveable std::unique_lock.
368 //
369 // Lock assertion below is basically explicit declaration that
370 // the lock is held in this scope, which is true, since it's only
371 // released during waiting inside mCondVar.wait calls.
372 ScopedLockAssertion lockAssertion(mLock);
373
374 mCondVar.wait(lock, [this]() REQUIRES(mLock) {
375 return mPendingExit || !mQueue.empty();
376 });
377 if (mPendingExit) {
378 return nullptr;
379 }
380 std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
381 mQueue.pop_front();
382 return task;
383}
384
385void VirtualCameraRenderThread::threadLoop() {
386 ALOGV("Render thread starting");
387
388 mEglDisplayContext = std::make_unique<EglDisplayContext>();
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100389 mEglTextureYuvProgram =
390 std::make_unique<EglTextureProgram>(EglTextureProgram::TextureFormat::YUV);
391 mEglTextureRgbProgram = std::make_unique<EglTextureProgram>(
392 EglTextureProgram::TextureFormat::RGBA);
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100393 mEglSurfaceTexture = std::make_unique<EglSurfaceTexture>(
394 mInputSurfaceSize.width, mInputSurfaceSize.height);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100395 mInputSurfacePromise.set_value(mEglSurfaceTexture->getSurface());
396
397 while (std::unique_ptr<ProcessCaptureRequestTask> task = dequeueTask()) {
398 processCaptureRequest(*task);
399 }
400
Jan Sebechlebsky06b36672024-03-18 11:52:35 +0100401 // Destroy EGL utilities still on the render thread.
402 mEglSurfaceTexture.reset();
403 mEglTextureRgbProgram.reset();
404 mEglTextureYuvProgram.reset();
405 mEglDisplayContext.reset();
406
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100407 ALOGV("Render thread exiting");
408}
409
410void VirtualCameraRenderThread::processCaptureRequest(
411 const ProcessCaptureRequestTask& request) {
412 const std::chrono::nanoseconds timestamp =
413 std::chrono::duration_cast<std::chrono::nanoseconds>(
414 std::chrono::steady_clock::now().time_since_epoch());
415
416 CaptureResult captureResult;
417 captureResult.fmqResultSize = 0;
418 captureResult.frameNumber = request.getFrameNumber();
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100419 // Partial result needs to be set to 1 when metadata are present.
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100420 captureResult.partialResult = 1;
421 captureResult.inputBuffer.streamId = -1;
422 captureResult.physicalCameraMetadata.resize(0);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100423 captureResult.result = createCaptureResultMetadata(
424 timestamp, request.getRequestSettings(), mReportedSensorSize);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100425
426 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
427 captureResult.outputBuffers.resize(buffers.size());
428
429 if (mTestMode) {
430 // In test mode let's just render something to the Surface ourselves.
431 renderTestPatternYCbCr420(mEglSurfaceTexture->getSurface(),
432 request.getFrameNumber());
433 }
434
435 mEglSurfaceTexture->updateTexture();
436
437 for (int i = 0; i < buffers.size(); ++i) {
438 const CaptureRequestBuffer& reqBuffer = buffers[i];
439 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
440 resBuffer.streamId = reqBuffer.getStreamId();
441 resBuffer.bufferId = reqBuffer.getBufferId();
442 resBuffer.status = BufferStatus::OK;
443
444 const std::optional<Stream> streamConfig =
445 mSessionContext.getStreamConfig(reqBuffer.getStreamId());
446
447 if (!streamConfig.has_value()) {
448 resBuffer.status = BufferStatus::ERROR;
449 continue;
450 }
451
452 auto status = streamConfig->format == PixelFormat::BLOB
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100453 ? renderIntoBlobStreamBuffer(
454 reqBuffer.getStreamId(), reqBuffer.getBufferId(),
Vadim Caenc0aff132024-03-12 17:20:07 +0100455 captureResult.result, request.getRequestSettings(),
456 reqBuffer.getFence())
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100457 : renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
458 reqBuffer.getBufferId(),
459 reqBuffer.getFence());
460 if (!status.isOk()) {
461 resBuffer.status = BufferStatus::ERROR;
462 }
463 }
464
465 std::vector<NotifyMsg> notifyMsg{
466 createShutterNotifyMsg(request.getFrameNumber(), timestamp)};
467 for (const StreamBuffer& resBuffer : captureResult.outputBuffers) {
468 if (resBuffer.status != BufferStatus::OK) {
469 notifyMsg.push_back(createBufferErrorNotifyMsg(request.getFrameNumber(),
470 resBuffer.streamId));
471 }
472 }
473
474 auto status = mCameraDeviceCallback->notify(notifyMsg);
475 if (!status.isOk()) {
476 ALOGE("%s: notify call failed: %s", __func__,
477 status.getDescription().c_str());
478 return;
479 }
480
481 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
482 captureResults(1);
483 captureResults[0] = std::move(captureResult);
484
485 status = mCameraDeviceCallback->processCaptureResult(captureResults);
486 if (!status.isOk()) {
487 ALOGE("%s: processCaptureResult call failed: %s", __func__,
488 status.getDescription().c_str());
489 return;
490 }
491
492 ALOGD("%s: Successfully called processCaptureResult", __func__);
493}
494
495void VirtualCameraRenderThread::flushCaptureRequest(
496 const ProcessCaptureRequestTask& request) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100497 CaptureResult captureResult;
498 captureResult.fmqResultSize = 0;
499 captureResult.frameNumber = request.getFrameNumber();
500 captureResult.inputBuffer.streamId = -1;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100501
502 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
503 captureResult.outputBuffers.resize(buffers.size());
504
505 for (int i = 0; i < buffers.size(); ++i) {
506 const CaptureRequestBuffer& reqBuffer = buffers[i];
507 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
508 resBuffer.streamId = reqBuffer.getStreamId();
509 resBuffer.bufferId = reqBuffer.getBufferId();
510 resBuffer.status = BufferStatus::ERROR;
511 sp<Fence> fence = reqBuffer.getFence();
512 if (fence != nullptr && fence->isValid()) {
513 resBuffer.releaseFence.fds.emplace_back(fence->dup());
514 }
515 }
516
517 auto status = mCameraDeviceCallback->notify(
518 {createRequestErrorNotifyMsg(request.getFrameNumber())});
519 if (!status.isOk()) {
520 ALOGE("%s: notify call failed: %s", __func__,
521 status.getDescription().c_str());
522 return;
523 }
524
525 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
526 captureResults(1);
527 captureResults[0] = std::move(captureResult);
528
529 status = mCameraDeviceCallback->processCaptureResult(captureResults);
530 if (!status.isOk()) {
531 ALOGE("%s: processCaptureResult call failed: %s", __func__,
532 status.getDescription().c_str());
533 }
534}
535
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100536std::vector<uint8_t> VirtualCameraRenderThread::createThumbnail(
537 const Resolution resolution, const int quality) {
538 if (resolution.width == 0 || resolution.height == 0) {
539 ALOGV("%s: Skipping thumbnail creation, zero size requested", __func__);
540 return {};
541 }
542
543 ALOGV("%s: Creating thumbnail with size %d x %d, quality %d", __func__,
544 resolution.width, resolution.height, quality);
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100545 Resolution bufferSize = roundTo2DctSize(resolution);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100546 std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100547 mEglDisplayContext->getEglDisplay(), bufferSize.width, bufferSize.height);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100548 if (framebuffer == nullptr) {
549 ALOGE(
550 "Failed to allocate temporary framebuffer for JPEG thumbnail "
551 "compression");
552 return {};
553 }
554
555 // TODO(b/324383963) Add support for letterboxing if the thumbnail size
556 // doesn't correspond
557 // to input texture aspect ratio.
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100558 if (!renderIntoEglFramebuffer(*framebuffer, /*fence=*/nullptr,
559 Rect(resolution.width, resolution.height))
560 .isOk()) {
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100561 ALOGE(
562 "Failed to render input texture into temporary framebuffer for JPEG "
563 "thumbnail");
564 return {};
565 }
566
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100567 std::vector<uint8_t> compressedThumbnail;
568 compressedThumbnail.resize(kJpegThumbnailBufferSize);
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100569 ALOGE("%s: Compressing thumbnail %d x %d", __func__, resolution.width,
570 resolution.height);
571 std::optional<size_t> compressedSize =
572 compressJpeg(resolution.width, resolution.height, quality,
573 framebuffer->getHardwareBuffer(), {},
574 compressedThumbnail.size(), compressedThumbnail.data());
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100575 if (!compressedSize.has_value()) {
576 ALOGE("%s: Failed to compress jpeg thumbnail", __func__);
577 return {};
578 }
579 compressedThumbnail.resize(compressedSize.value());
580 return compressedThumbnail;
581}
582
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100583ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoBlobStreamBuffer(
Vadim Caenc0aff132024-03-12 17:20:07 +0100584 const int streamId, const int bufferId, const CameraMetadata& resultMetadata,
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100585 const RequestSettings& requestSettings, sp<Fence> fence) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100586 std::shared_ptr<AHardwareBuffer> hwBuffer =
587 mSessionContext.fetchHardwareBuffer(streamId, bufferId);
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100588 if (hwBuffer == nullptr) {
589 ALOGE("%s: Failed to fetch hardware buffer %d for streamId %d", __func__,
590 bufferId, streamId);
591 return cameraStatus(Status::INTERNAL_ERROR);
592 }
593
594 std::optional<Stream> stream = mSessionContext.getStreamConfig(streamId);
595 if (!stream.has_value()) {
596 ALOGE("%s, failed to fetch information about stream %d", __func__, streamId);
597 return cameraStatus(Status::INTERNAL_ERROR);
598 }
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100599
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100600 ALOGV("%s: Rendering JPEG with size %d x %d, quality %d", __func__,
601 stream->width, stream->height, requestSettings.jpegQuality);
602
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100603 // Let's create YUV framebuffer and render the surface into this.
604 // This will take care about rescaling as well as potential format conversion.
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100605 // The buffer dimensions need to be rounded to nearest multiple of JPEG DCT
606 // size, however we pass the viewport corresponding to size of the stream so
607 // the image will be only rendered to the area corresponding to the stream
608 // size.
609 Resolution bufferSize =
610 roundTo2DctSize(Resolution(stream->width, stream->height));
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100611 std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100612 mEglDisplayContext->getEglDisplay(), bufferSize.width, bufferSize.height);
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100613 if (framebuffer == nullptr) {
614 ALOGE("Failed to allocate temporary framebuffer for JPEG compression");
615 return cameraStatus(Status::INTERNAL_ERROR);
616 }
617
618 // Render into temporary framebuffer.
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100619 ndk::ScopedAStatus status = renderIntoEglFramebuffer(
620 *framebuffer, /*fence=*/nullptr, Rect(stream->width, stream->height));
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100621 if (!status.isOk()) {
622 ALOGE("Failed to render input texture into temporary framebuffer");
623 return status;
624 }
625
Jan Sebechlebsky43543222024-02-16 12:50:32 +0100626 PlanesLockGuard planesLock(hwBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_RARELY,
627 fence);
628 if (planesLock.getStatus() != OK) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100629 return cameraStatus(Status::INTERNAL_ERROR);
630 }
631
Jan Sebechlebsky5c789e42024-02-29 16:32:17 +0100632 std::vector<uint8_t> app1ExifData =
Vadim Caenc0aff132024-03-12 17:20:07 +0100633 createExif(Resolution(stream->width, stream->height), resultMetadata,
Jan Sebechlebsky5c789e42024-02-29 16:32:17 +0100634 createThumbnail(requestSettings.thumbnailResolution,
635 requestSettings.thumbnailJpegQuality));
636 std::optional<size_t> compressedSize = compressJpeg(
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100637 stream->width, stream->height, requestSettings.jpegQuality,
638 framebuffer->getHardwareBuffer(), app1ExifData,
639 stream->bufferSize - sizeof(CameraBlob), (*planesLock).planes[0].data);
Jan Sebechlebsky5c789e42024-02-29 16:32:17 +0100640
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100641 if (!compressedSize.has_value()) {
642 ALOGE("%s: Failed to compress JPEG image", __func__);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100643 return cameraStatus(Status::INTERNAL_ERROR);
644 }
645
646 CameraBlob cameraBlob{
647 .blobId = CameraBlobId::JPEG,
648 .blobSizeBytes = static_cast<int32_t>(compressedSize.value())};
649
Jan Sebechlebsky43543222024-02-16 12:50:32 +0100650 memcpy(reinterpret_cast<uint8_t*>((*planesLock).planes[0].data) +
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100651 (stream->bufferSize - sizeof(cameraBlob)),
652 &cameraBlob, sizeof(cameraBlob));
653
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100654 ALOGV("%s: Successfully compressed JPEG image, resulting size %zu B",
655 __func__, compressedSize.value());
656
657 return ndk::ScopedAStatus::ok();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100658}
659
660ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoImageStreamBuffer(
661 int streamId, int bufferId, sp<Fence> fence) {
662 ALOGV("%s", __func__);
663
664 const std::chrono::nanoseconds before =
665 std::chrono::duration_cast<std::chrono::nanoseconds>(
666 std::chrono::steady_clock::now().time_since_epoch());
667
668 // Render test pattern using EGL.
669 std::shared_ptr<EglFrameBuffer> framebuffer =
670 mSessionContext.fetchOrCreateEglFramebuffer(
671 mEglDisplayContext->getEglDisplay(), streamId, bufferId);
672 if (framebuffer == nullptr) {
673 ALOGE(
674 "%s: Failed to get EGL framebuffer corresponding to buffer id "
675 "%d for streamId %d",
676 __func__, bufferId, streamId);
677 return cameraStatus(Status::ILLEGAL_ARGUMENT);
678 }
679
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100680 ndk::ScopedAStatus status = renderIntoEglFramebuffer(*framebuffer, fence);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100681
682 const std::chrono::nanoseconds after =
683 std::chrono::duration_cast<std::chrono::nanoseconds>(
684 std::chrono::steady_clock::now().time_since_epoch());
685
686 ALOGV("Rendering to buffer %d, stream %d took %lld ns", bufferId, streamId,
687 after.count() - before.count());
688
689 return ndk::ScopedAStatus::ok();
690}
691
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100692ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoEglFramebuffer(
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100693 EglFrameBuffer& framebuffer, sp<Fence> fence, std::optional<Rect> viewport) {
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100694 ALOGV("%s", __func__);
695 // Wait for fence to clear.
696 if (fence != nullptr && fence->isValid()) {
697 status_t ret = fence->wait(kAcquireFenceTimeout.count());
698 if (ret != 0) {
699 ALOGE("Timeout while waiting for the acquire fence for buffer");
700 return cameraStatus(Status::INTERNAL_ERROR);
701 }
702 }
703
704 mEglDisplayContext->makeCurrent();
705 framebuffer.beforeDraw();
706
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100707 Rect viewportRect =
708 viewport.value_or(Rect(framebuffer.getWidth(), framebuffer.getHeight()));
709 glViewport(viewportRect.leftTop().x, viewportRect.leftTop().y,
710 viewportRect.getWidth(), viewportRect.getHeight());
711
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100712 sp<GraphicBuffer> textureBuffer = mEglSurfaceTexture->getCurrentBuffer();
713 if (textureBuffer == nullptr) {
714 // If there's no current buffer, nothing was written to the surface and
715 // texture is not initialized yet. Let's render the framebuffer black
716 // instead of rendering the texture.
717 glClearColor(0.0f, 0.5f, 0.5f, 0.0f);
718 glClear(GL_COLOR_BUFFER_BIT);
719 } else {
720 const bool renderSuccess =
721 isYuvFormat(static_cast<PixelFormat>(textureBuffer->getPixelFormat()))
Jan Sebechlebsky99492e32023-12-20 09:49:45 +0100722 ? mEglTextureYuvProgram->draw(
723 mEglSurfaceTexture->getTextureId(),
724 mEglSurfaceTexture->getTransformMatrix())
725 : mEglTextureRgbProgram->draw(
726 mEglSurfaceTexture->getTextureId(),
727 mEglSurfaceTexture->getTransformMatrix());
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100728 if (!renderSuccess) {
729 ALOGE("%s: Failed to render texture", __func__);
730 return cameraStatus(Status::INTERNAL_ERROR);
731 }
732 }
733 framebuffer.afterDraw();
734
735 return ndk::ScopedAStatus::ok();
736}
737
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100738} // namespace virtualcamera
739} // namespace companion
740} // namespace android