blob: a8d2455711f6e28eebea649c34bb2f7f2c2e253d [file] [log] [blame]
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +01001/*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "VirtualCameraRenderThread"
18#include "VirtualCameraRenderThread.h"
19
20#include <chrono>
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +010021#include <cstdint>
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010022#include <cstring>
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010023#include <future>
24#include <memory>
25#include <mutex>
26#include <thread>
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010027#include <vector>
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010028
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010029#include "Exif.h"
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +010030#include "GLES/gl.h"
Biswarup Pal8ad8bc52024-02-08 13:41:44 +000031#include "VirtualCameraDevice.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010032#include "VirtualCameraSessionContext.h"
33#include "aidl/android/hardware/camera/common/Status.h"
34#include "aidl/android/hardware/camera/device/BufferStatus.h"
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010035#include "aidl/android/hardware/camera/device/CameraBlob.h"
36#include "aidl/android/hardware/camera/device/CameraBlobId.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010037#include "aidl/android/hardware/camera/device/CameraMetadata.h"
38#include "aidl/android/hardware/camera/device/CaptureResult.h"
39#include "aidl/android/hardware/camera/device/ErrorCode.h"
40#include "aidl/android/hardware/camera/device/ICameraDeviceCallback.h"
41#include "aidl/android/hardware/camera/device/NotifyMsg.h"
42#include "aidl/android/hardware/camera/device/ShutterMsg.h"
43#include "aidl/android/hardware/camera/device/StreamBuffer.h"
44#include "android-base/thread_annotations.h"
45#include "android/binder_auto_utils.h"
46#include "android/hardware_buffer.h"
Vadim Caenc0aff132024-03-12 17:20:07 +010047#include "system/camera_metadata.h"
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +010048#include "ui/GraphicBuffer.h"
Jan Sebechlebskyb3771312024-03-15 10:38:02 +010049#include "ui/Rect.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010050#include "util/EglFramebuffer.h"
51#include "util/JpegUtil.h"
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010052#include "util/MetadataUtil.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010053#include "util/TestPatternHelper.h"
54#include "util/Util.h"
55#include "utils/Errors.h"
56
57namespace android {
58namespace companion {
59namespace virtualcamera {
60
61using ::aidl::android::hardware::camera::common::Status;
62using ::aidl::android::hardware::camera::device::BufferStatus;
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010063using ::aidl::android::hardware::camera::device::CameraBlob;
64using ::aidl::android::hardware::camera::device::CameraBlobId;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010065using ::aidl::android::hardware::camera::device::CameraMetadata;
66using ::aidl::android::hardware::camera::device::CaptureResult;
67using ::aidl::android::hardware::camera::device::ErrorCode;
68using ::aidl::android::hardware::camera::device::ErrorMsg;
69using ::aidl::android::hardware::camera::device::ICameraDeviceCallback;
70using ::aidl::android::hardware::camera::device::NotifyMsg;
71using ::aidl::android::hardware::camera::device::ShutterMsg;
72using ::aidl::android::hardware::camera::device::Stream;
73using ::aidl::android::hardware::camera::device::StreamBuffer;
74using ::aidl::android::hardware::graphics::common::PixelFormat;
75using ::android::base::ScopedLockAssertion;
76
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010077using ::android::hardware::camera::common::helper::ExifUtils;
78
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010079namespace {
80
81using namespace std::chrono_literals;
82
83static constexpr std::chrono::milliseconds kAcquireFenceTimeout = 500ms;
84
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +010085// See REQUEST_PIPELINE_DEPTH in CaptureResult.java.
86// This roughly corresponds to frame latency, we set to
87// documented minimum of 2.
88static constexpr uint8_t kPipelineDepth = 2;
89
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010090static constexpr size_t kJpegThumbnailBufferSize = 32 * 1024; // 32 KiB
91
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010092CameraMetadata createCaptureResultMetadata(
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +010093 const std::chrono::nanoseconds timestamp,
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010094 const RequestSettings& requestSettings,
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +010095 const Resolution reportedSensorSize) {
Vadim Caen11dfd932024-03-05 09:57:20 +010096 // All of the keys used in the response needs to be referenced in
97 // availableResultKeys in CameraCharacteristics (see initCameraCharacteristics
98 // in VirtualCameraDevice.cc).
99 MetadataBuilder builder =
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100100 MetadataBuilder()
Jan Sebechlebsky4be2bd02024-02-26 18:35:18 +0100101 .setAberrationCorrectionMode(
102 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF)
Vadim Caen11dfd932024-03-05 09:57:20 +0100103 .setControlAeAvailableAntibandingModes(
104 {ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF})
105 .setControlAeAntibandingMode(ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF)
106 .setControlAeExposureCompensation(0)
107 .setControlAeLockAvailable(false)
108 .setControlAeLock(ANDROID_CONTROL_AE_LOCK_OFF)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100109 .setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
110 .setControlAePrecaptureTrigger(
111 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
Vadim Caen11dfd932024-03-05 09:57:20 +0100112 .setControlAeState(ANDROID_CONTROL_AE_STATE_INACTIVE)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100113 .setControlAfMode(ANDROID_CONTROL_AF_MODE_OFF)
Vadim Caen11dfd932024-03-05 09:57:20 +0100114 .setControlAfTrigger(ANDROID_CONTROL_AF_TRIGGER_IDLE)
115 .setControlAfState(ANDROID_CONTROL_AF_STATE_INACTIVE)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100116 .setControlAwbMode(ANDROID_CONTROL_AWB_MODE_AUTO)
Vadim Caen11dfd932024-03-05 09:57:20 +0100117 .setControlAwbLock(ANDROID_CONTROL_AWB_LOCK_OFF)
118 .setControlAwbState(ANDROID_CONTROL_AWB_STATE_INACTIVE)
119 .setControlCaptureIntent(requestSettings.captureIntent)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100120 .setControlEffectMode(ANDROID_CONTROL_EFFECT_MODE_OFF)
121 .setControlMode(ANDROID_CONTROL_MODE_AUTO)
Vadim Caen11dfd932024-03-05 09:57:20 +0100122 .setControlSceneMode(ANDROID_CONTROL_SCENE_MODE_DISABLED)
123 .setControlVideoStabilizationMode(
124 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100125 .setCropRegion(0, 0, reportedSensorSize.width,
126 reportedSensorSize.height)
127 .setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100128 .setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
Vadim Caen11dfd932024-03-05 09:57:20 +0100129 .setFlashMode(ANDROID_FLASH_MODE_OFF)
Biswarup Pal8ad8bc52024-02-08 13:41:44 +0000130 .setFocalLength(VirtualCameraDevice::kFocalLength)
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100131 .setJpegQuality(requestSettings.jpegQuality)
Vadim Caenc0aff132024-03-12 17:20:07 +0100132 .setJpegOrientation(requestSettings.jpegOrientation)
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100133 .setJpegThumbnailSize(requestSettings.thumbnailResolution.width,
134 requestSettings.thumbnailResolution.height)
135 .setJpegThumbnailQuality(requestSettings.thumbnailJpegQuality)
Vadim Caen11dfd932024-03-05 09:57:20 +0100136 .setLensOpticalStabilizationMode(
137 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF)
Jan Sebechlebsky4be2bd02024-02-26 18:35:18 +0100138 .setNoiseReductionMode(ANDROID_NOISE_REDUCTION_MODE_OFF)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100139 .setPipelineDepth(kPipelineDepth)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100140 .setSensorTimestamp(timestamp)
Vadim Caen11dfd932024-03-05 09:57:20 +0100141 .setStatisticsHotPixelMapMode(
142 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF)
143 .setStatisticsLensShadingMapMode(
144 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF)
145 .setStatisticsSceneFlicker(ANDROID_STATISTICS_SCENE_FLICKER_NONE);
146
147 if (requestSettings.fpsRange.has_value()) {
148 builder.setControlAeTargetFpsRange(requestSettings.fpsRange.value());
149 }
150
Vadim Caenc0aff132024-03-12 17:20:07 +0100151 if (requestSettings.gpsCoordinates.has_value()) {
152 const GpsCoordinates& coordinates = requestSettings.gpsCoordinates.value();
153 builder.setJpegGpsCoordinates(coordinates);
154 }
155
Vadim Caen11dfd932024-03-05 09:57:20 +0100156 std::unique_ptr<CameraMetadata> metadata = builder.build();
157
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100158 if (metadata == nullptr) {
159 ALOGE("%s: Failed to build capture result metadata", __func__);
160 return CameraMetadata();
161 }
162 return std::move(*metadata);
163}
164
165NotifyMsg createShutterNotifyMsg(int frameNumber,
166 std::chrono::nanoseconds timestamp) {
167 NotifyMsg msg;
168 msg.set<NotifyMsg::Tag::shutter>(ShutterMsg{
169 .frameNumber = frameNumber,
170 .timestamp = timestamp.count(),
171 });
172 return msg;
173}
174
175NotifyMsg createBufferErrorNotifyMsg(int frameNumber, int streamId) {
176 NotifyMsg msg;
177 msg.set<NotifyMsg::Tag::error>(ErrorMsg{.frameNumber = frameNumber,
178 .errorStreamId = streamId,
179 .errorCode = ErrorCode::ERROR_BUFFER});
180 return msg;
181}
182
183NotifyMsg createRequestErrorNotifyMsg(int frameNumber) {
184 NotifyMsg msg;
185 msg.set<NotifyMsg::Tag::error>(ErrorMsg{
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100186 .frameNumber = frameNumber,
187 // errorStreamId needs to be set to -1 for ERROR_REQUEST
188 // (not tied to specific stream).
189 .errorStreamId = -1,
190 .errorCode = ErrorCode::ERROR_REQUEST});
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100191 return msg;
192}
193
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100194std::shared_ptr<EglFrameBuffer> allocateTemporaryFramebuffer(
195 EGLDisplay eglDisplay, const uint width, const int height) {
196 const AHardwareBuffer_Desc desc{
197 .width = static_cast<uint32_t>(width),
198 .height = static_cast<uint32_t>(height),
199 .layers = 1,
200 .format = AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420,
201 .usage = AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER |
202 AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN,
203 .rfu0 = 0,
204 .rfu1 = 0};
205
206 AHardwareBuffer* hwBufferPtr;
207 int status = AHardwareBuffer_allocate(&desc, &hwBufferPtr);
208 if (status != NO_ERROR) {
209 ALOGE(
210 "%s: Failed to allocate hardware buffer for temporary framebuffer: %d",
211 __func__, status);
212 return nullptr;
213 }
214
215 return std::make_shared<EglFrameBuffer>(
216 eglDisplay,
217 std::shared_ptr<AHardwareBuffer>(hwBufferPtr, AHardwareBuffer_release));
218}
219
220bool isYuvFormat(const PixelFormat pixelFormat) {
221 switch (static_cast<android_pixel_format_t>(pixelFormat)) {
222 case HAL_PIXEL_FORMAT_YCBCR_422_I:
223 case HAL_PIXEL_FORMAT_YCBCR_422_SP:
224 case HAL_PIXEL_FORMAT_Y16:
225 case HAL_PIXEL_FORMAT_YV12:
226 case HAL_PIXEL_FORMAT_YCBCR_420_888:
227 return true;
228 default:
229 return false;
230 }
231}
232
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100233std::vector<uint8_t> createExif(
Vadim Caenc0aff132024-03-12 17:20:07 +0100234 Resolution imageSize, const CameraMetadata resultMetadata,
235 const std::vector<uint8_t>& compressedThumbnail = {}) {
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100236 std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create());
237 exifUtils->initialize();
Vadim Caenc0aff132024-03-12 17:20:07 +0100238
239 // Make a copy of the metadata in order to converting it the HAL metadata
240 // format (as opposed to the AIDL class) and use the setFromMetadata method
241 // from ExifUtil
242 camera_metadata_t* rawSettings =
243 clone_camera_metadata((camera_metadata_t*)resultMetadata.metadata.data());
244 if (rawSettings != nullptr) {
245 android::hardware::camera::common::helper::CameraMetadata halMetadata(
246 rawSettings);
247 exifUtils->setFromMetadata(halMetadata, imageSize.width, imageSize.height);
248 }
249 exifUtils->setMake(VirtualCameraDevice::kDefaultMakeAndModel);
250 exifUtils->setModel(VirtualCameraDevice::kDefaultMakeAndModel);
251 exifUtils->setFlash(0);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100252
253 std::vector<uint8_t> app1Data;
254
255 size_t thumbnailDataSize = compressedThumbnail.size();
256 const void* thumbnailData =
257 thumbnailDataSize > 0
258 ? reinterpret_cast<const void*>(compressedThumbnail.data())
259 : nullptr;
260
261 if (!exifUtils->generateApp1(thumbnailData, thumbnailDataSize)) {
262 ALOGE("%s: Failed to generate APP1 segment for EXIF metadata", __func__);
263 return app1Data;
264 }
265
266 const uint8_t* data = exifUtils->getApp1Buffer();
267 const size_t size = exifUtils->getApp1Length();
268
269 app1Data.insert(app1Data.end(), data, data + size);
270 return app1Data;
271}
272
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100273} // namespace
274
275CaptureRequestBuffer::CaptureRequestBuffer(int streamId, int bufferId,
276 sp<Fence> fence)
277 : mStreamId(streamId), mBufferId(bufferId), mFence(fence) {
278}
279
280int CaptureRequestBuffer::getStreamId() const {
281 return mStreamId;
282}
283
284int CaptureRequestBuffer::getBufferId() const {
285 return mBufferId;
286}
287
288sp<Fence> CaptureRequestBuffer::getFence() const {
289 return mFence;
290}
291
292VirtualCameraRenderThread::VirtualCameraRenderThread(
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100293 VirtualCameraSessionContext& sessionContext,
294 const Resolution inputSurfaceSize, const Resolution reportedSensorSize,
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100295 std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback, bool testMode)
296 : mCameraDeviceCallback(cameraDeviceCallback),
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100297 mInputSurfaceSize(inputSurfaceSize),
298 mReportedSensorSize(reportedSensorSize),
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100299 mTestMode(testMode),
300 mSessionContext(sessionContext) {
301}
302
303VirtualCameraRenderThread::~VirtualCameraRenderThread() {
304 stop();
305 if (mThread.joinable()) {
306 mThread.join();
307 }
308}
309
310ProcessCaptureRequestTask::ProcessCaptureRequestTask(
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100311 int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers,
312 const RequestSettings& requestSettings)
313 : mFrameNumber(frameNumber),
314 mBuffers(requestBuffers),
315 mRequestSettings(requestSettings) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100316}
317
318int ProcessCaptureRequestTask::getFrameNumber() const {
319 return mFrameNumber;
320}
321
322const std::vector<CaptureRequestBuffer>& ProcessCaptureRequestTask::getBuffers()
323 const {
324 return mBuffers;
325}
326
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100327const RequestSettings& ProcessCaptureRequestTask::getRequestSettings() const {
328 return mRequestSettings;
329}
330
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100331void VirtualCameraRenderThread::enqueueTask(
332 std::unique_ptr<ProcessCaptureRequestTask> task) {
333 std::lock_guard<std::mutex> lock(mLock);
334 mQueue.emplace_back(std::move(task));
335 mCondVar.notify_one();
336}
337
338void VirtualCameraRenderThread::flush() {
339 std::lock_guard<std::mutex> lock(mLock);
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100340 while (!mQueue.empty()) {
341 std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
342 mQueue.pop_front();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100343 flushCaptureRequest(*task);
344 }
345}
346
347void VirtualCameraRenderThread::start() {
348 mThread = std::thread(&VirtualCameraRenderThread::threadLoop, this);
349}
350
351void VirtualCameraRenderThread::stop() {
352 {
353 std::lock_guard<std::mutex> lock(mLock);
354 mPendingExit = true;
355 mCondVar.notify_one();
356 }
357}
358
359sp<Surface> VirtualCameraRenderThread::getInputSurface() {
360 return mInputSurfacePromise.get_future().get();
361}
362
363std::unique_ptr<ProcessCaptureRequestTask>
364VirtualCameraRenderThread::dequeueTask() {
365 std::unique_lock<std::mutex> lock(mLock);
366 // Clang's thread safety analysis doesn't perform alias analysis,
367 // so it doesn't support moveable std::unique_lock.
368 //
369 // Lock assertion below is basically explicit declaration that
370 // the lock is held in this scope, which is true, since it's only
371 // released during waiting inside mCondVar.wait calls.
372 ScopedLockAssertion lockAssertion(mLock);
373
374 mCondVar.wait(lock, [this]() REQUIRES(mLock) {
375 return mPendingExit || !mQueue.empty();
376 });
377 if (mPendingExit) {
378 return nullptr;
379 }
380 std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
381 mQueue.pop_front();
382 return task;
383}
384
385void VirtualCameraRenderThread::threadLoop() {
386 ALOGV("Render thread starting");
387
388 mEglDisplayContext = std::make_unique<EglDisplayContext>();
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100389 mEglTextureYuvProgram =
390 std::make_unique<EglTextureProgram>(EglTextureProgram::TextureFormat::YUV);
391 mEglTextureRgbProgram = std::make_unique<EglTextureProgram>(
392 EglTextureProgram::TextureFormat::RGBA);
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100393 mEglSurfaceTexture = std::make_unique<EglSurfaceTexture>(
394 mInputSurfaceSize.width, mInputSurfaceSize.height);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100395 mInputSurfacePromise.set_value(mEglSurfaceTexture->getSurface());
396
397 while (std::unique_ptr<ProcessCaptureRequestTask> task = dequeueTask()) {
398 processCaptureRequest(*task);
399 }
400
401 ALOGV("Render thread exiting");
402}
403
404void VirtualCameraRenderThread::processCaptureRequest(
405 const ProcessCaptureRequestTask& request) {
406 const std::chrono::nanoseconds timestamp =
407 std::chrono::duration_cast<std::chrono::nanoseconds>(
408 std::chrono::steady_clock::now().time_since_epoch());
409
410 CaptureResult captureResult;
411 captureResult.fmqResultSize = 0;
412 captureResult.frameNumber = request.getFrameNumber();
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100413 // Partial result needs to be set to 1 when metadata are present.
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100414 captureResult.partialResult = 1;
415 captureResult.inputBuffer.streamId = -1;
416 captureResult.physicalCameraMetadata.resize(0);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100417 captureResult.result = createCaptureResultMetadata(
418 timestamp, request.getRequestSettings(), mReportedSensorSize);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100419
420 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
421 captureResult.outputBuffers.resize(buffers.size());
422
423 if (mTestMode) {
424 // In test mode let's just render something to the Surface ourselves.
425 renderTestPatternYCbCr420(mEglSurfaceTexture->getSurface(),
426 request.getFrameNumber());
427 }
428
429 mEglSurfaceTexture->updateTexture();
430
431 for (int i = 0; i < buffers.size(); ++i) {
432 const CaptureRequestBuffer& reqBuffer = buffers[i];
433 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
434 resBuffer.streamId = reqBuffer.getStreamId();
435 resBuffer.bufferId = reqBuffer.getBufferId();
436 resBuffer.status = BufferStatus::OK;
437
438 const std::optional<Stream> streamConfig =
439 mSessionContext.getStreamConfig(reqBuffer.getStreamId());
440
441 if (!streamConfig.has_value()) {
442 resBuffer.status = BufferStatus::ERROR;
443 continue;
444 }
445
446 auto status = streamConfig->format == PixelFormat::BLOB
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100447 ? renderIntoBlobStreamBuffer(
448 reqBuffer.getStreamId(), reqBuffer.getBufferId(),
Vadim Caenc0aff132024-03-12 17:20:07 +0100449 captureResult.result, request.getRequestSettings(),
450 reqBuffer.getFence())
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100451 : renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
452 reqBuffer.getBufferId(),
453 reqBuffer.getFence());
454 if (!status.isOk()) {
455 resBuffer.status = BufferStatus::ERROR;
456 }
457 }
458
459 std::vector<NotifyMsg> notifyMsg{
460 createShutterNotifyMsg(request.getFrameNumber(), timestamp)};
461 for (const StreamBuffer& resBuffer : captureResult.outputBuffers) {
462 if (resBuffer.status != BufferStatus::OK) {
463 notifyMsg.push_back(createBufferErrorNotifyMsg(request.getFrameNumber(),
464 resBuffer.streamId));
465 }
466 }
467
468 auto status = mCameraDeviceCallback->notify(notifyMsg);
469 if (!status.isOk()) {
470 ALOGE("%s: notify call failed: %s", __func__,
471 status.getDescription().c_str());
472 return;
473 }
474
475 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
476 captureResults(1);
477 captureResults[0] = std::move(captureResult);
478
479 status = mCameraDeviceCallback->processCaptureResult(captureResults);
480 if (!status.isOk()) {
481 ALOGE("%s: processCaptureResult call failed: %s", __func__,
482 status.getDescription().c_str());
483 return;
484 }
485
486 ALOGD("%s: Successfully called processCaptureResult", __func__);
487}
488
489void VirtualCameraRenderThread::flushCaptureRequest(
490 const ProcessCaptureRequestTask& request) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100491 CaptureResult captureResult;
492 captureResult.fmqResultSize = 0;
493 captureResult.frameNumber = request.getFrameNumber();
494 captureResult.inputBuffer.streamId = -1;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100495
496 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
497 captureResult.outputBuffers.resize(buffers.size());
498
499 for (int i = 0; i < buffers.size(); ++i) {
500 const CaptureRequestBuffer& reqBuffer = buffers[i];
501 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
502 resBuffer.streamId = reqBuffer.getStreamId();
503 resBuffer.bufferId = reqBuffer.getBufferId();
504 resBuffer.status = BufferStatus::ERROR;
505 sp<Fence> fence = reqBuffer.getFence();
506 if (fence != nullptr && fence->isValid()) {
507 resBuffer.releaseFence.fds.emplace_back(fence->dup());
508 }
509 }
510
511 auto status = mCameraDeviceCallback->notify(
512 {createRequestErrorNotifyMsg(request.getFrameNumber())});
513 if (!status.isOk()) {
514 ALOGE("%s: notify call failed: %s", __func__,
515 status.getDescription().c_str());
516 return;
517 }
518
519 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
520 captureResults(1);
521 captureResults[0] = std::move(captureResult);
522
523 status = mCameraDeviceCallback->processCaptureResult(captureResults);
524 if (!status.isOk()) {
525 ALOGE("%s: processCaptureResult call failed: %s", __func__,
526 status.getDescription().c_str());
527 }
528}
529
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100530std::vector<uint8_t> VirtualCameraRenderThread::createThumbnail(
531 const Resolution resolution, const int quality) {
532 if (resolution.width == 0 || resolution.height == 0) {
533 ALOGV("%s: Skipping thumbnail creation, zero size requested", __func__);
534 return {};
535 }
536
537 ALOGV("%s: Creating thumbnail with size %d x %d, quality %d", __func__,
538 resolution.width, resolution.height, quality);
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100539 Resolution bufferSize = roundTo2DctSize(resolution);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100540 std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100541 mEglDisplayContext->getEglDisplay(), bufferSize.width, bufferSize.height);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100542 if (framebuffer == nullptr) {
543 ALOGE(
544 "Failed to allocate temporary framebuffer for JPEG thumbnail "
545 "compression");
546 return {};
547 }
548
549 // TODO(b/324383963) Add support for letterboxing if the thumbnail size
550 // doesn't correspond
551 // to input texture aspect ratio.
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100552 if (!renderIntoEglFramebuffer(*framebuffer, /*fence=*/nullptr,
553 Rect(resolution.width, resolution.height))
554 .isOk()) {
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100555 ALOGE(
556 "Failed to render input texture into temporary framebuffer for JPEG "
557 "thumbnail");
558 return {};
559 }
560
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100561 std::vector<uint8_t> compressedThumbnail;
562 compressedThumbnail.resize(kJpegThumbnailBufferSize);
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100563 ALOGE("%s: Compressing thumbnail %d x %d", __func__, resolution.width,
564 resolution.height);
565 std::optional<size_t> compressedSize =
566 compressJpeg(resolution.width, resolution.height, quality,
567 framebuffer->getHardwareBuffer(), {},
568 compressedThumbnail.size(), compressedThumbnail.data());
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100569 if (!compressedSize.has_value()) {
570 ALOGE("%s: Failed to compress jpeg thumbnail", __func__);
571 return {};
572 }
573 compressedThumbnail.resize(compressedSize.value());
574 return compressedThumbnail;
575}
576
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100577ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoBlobStreamBuffer(
Vadim Caenc0aff132024-03-12 17:20:07 +0100578 const int streamId, const int bufferId, const CameraMetadata& resultMetadata,
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100579 const RequestSettings& requestSettings, sp<Fence> fence) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100580 std::shared_ptr<AHardwareBuffer> hwBuffer =
581 mSessionContext.fetchHardwareBuffer(streamId, bufferId);
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100582 if (hwBuffer == nullptr) {
583 ALOGE("%s: Failed to fetch hardware buffer %d for streamId %d", __func__,
584 bufferId, streamId);
585 return cameraStatus(Status::INTERNAL_ERROR);
586 }
587
588 std::optional<Stream> stream = mSessionContext.getStreamConfig(streamId);
589 if (!stream.has_value()) {
590 ALOGE("%s, failed to fetch information about stream %d", __func__, streamId);
591 return cameraStatus(Status::INTERNAL_ERROR);
592 }
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100593
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100594 ALOGV("%s: Rendering JPEG with size %d x %d, quality %d", __func__,
595 stream->width, stream->height, requestSettings.jpegQuality);
596
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100597 // Let's create YUV framebuffer and render the surface into this.
598 // This will take care about rescaling as well as potential format conversion.
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100599 // The buffer dimensions need to be rounded to nearest multiple of JPEG DCT
600 // size, however we pass the viewport corresponding to size of the stream so
601 // the image will be only rendered to the area corresponding to the stream
602 // size.
603 Resolution bufferSize =
604 roundTo2DctSize(Resolution(stream->width, stream->height));
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100605 std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100606 mEglDisplayContext->getEglDisplay(), bufferSize.width, bufferSize.height);
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100607 if (framebuffer == nullptr) {
608 ALOGE("Failed to allocate temporary framebuffer for JPEG compression");
609 return cameraStatus(Status::INTERNAL_ERROR);
610 }
611
612 // Render into temporary framebuffer.
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100613 ndk::ScopedAStatus status = renderIntoEglFramebuffer(
614 *framebuffer, /*fence=*/nullptr, Rect(stream->width, stream->height));
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100615 if (!status.isOk()) {
616 ALOGE("Failed to render input texture into temporary framebuffer");
617 return status;
618 }
619
Jan Sebechlebsky43543222024-02-16 12:50:32 +0100620 PlanesLockGuard planesLock(hwBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_RARELY,
621 fence);
622 if (planesLock.getStatus() != OK) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100623 return cameraStatus(Status::INTERNAL_ERROR);
624 }
625
Jan Sebechlebsky5c789e42024-02-29 16:32:17 +0100626 std::vector<uint8_t> app1ExifData =
Vadim Caenc0aff132024-03-12 17:20:07 +0100627 createExif(Resolution(stream->width, stream->height), resultMetadata,
Jan Sebechlebsky5c789e42024-02-29 16:32:17 +0100628 createThumbnail(requestSettings.thumbnailResolution,
629 requestSettings.thumbnailJpegQuality));
630 std::optional<size_t> compressedSize = compressJpeg(
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100631 stream->width, stream->height, requestSettings.jpegQuality,
632 framebuffer->getHardwareBuffer(), app1ExifData,
633 stream->bufferSize - sizeof(CameraBlob), (*planesLock).planes[0].data);
Jan Sebechlebsky5c789e42024-02-29 16:32:17 +0100634
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100635 if (!compressedSize.has_value()) {
636 ALOGE("%s: Failed to compress JPEG image", __func__);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100637 return cameraStatus(Status::INTERNAL_ERROR);
638 }
639
640 CameraBlob cameraBlob{
641 .blobId = CameraBlobId::JPEG,
642 .blobSizeBytes = static_cast<int32_t>(compressedSize.value())};
643
Jan Sebechlebsky43543222024-02-16 12:50:32 +0100644 memcpy(reinterpret_cast<uint8_t*>((*planesLock).planes[0].data) +
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100645 (stream->bufferSize - sizeof(cameraBlob)),
646 &cameraBlob, sizeof(cameraBlob));
647
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100648 ALOGV("%s: Successfully compressed JPEG image, resulting size %zu B",
649 __func__, compressedSize.value());
650
651 return ndk::ScopedAStatus::ok();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100652}
653
654ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoImageStreamBuffer(
655 int streamId, int bufferId, sp<Fence> fence) {
656 ALOGV("%s", __func__);
657
658 const std::chrono::nanoseconds before =
659 std::chrono::duration_cast<std::chrono::nanoseconds>(
660 std::chrono::steady_clock::now().time_since_epoch());
661
662 // Render test pattern using EGL.
663 std::shared_ptr<EglFrameBuffer> framebuffer =
664 mSessionContext.fetchOrCreateEglFramebuffer(
665 mEglDisplayContext->getEglDisplay(), streamId, bufferId);
666 if (framebuffer == nullptr) {
667 ALOGE(
668 "%s: Failed to get EGL framebuffer corresponding to buffer id "
669 "%d for streamId %d",
670 __func__, bufferId, streamId);
671 return cameraStatus(Status::ILLEGAL_ARGUMENT);
672 }
673
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100674 ndk::ScopedAStatus status = renderIntoEglFramebuffer(*framebuffer, fence);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100675
676 const std::chrono::nanoseconds after =
677 std::chrono::duration_cast<std::chrono::nanoseconds>(
678 std::chrono::steady_clock::now().time_since_epoch());
679
680 ALOGV("Rendering to buffer %d, stream %d took %lld ns", bufferId, streamId,
681 after.count() - before.count());
682
683 return ndk::ScopedAStatus::ok();
684}
685
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100686ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoEglFramebuffer(
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100687 EglFrameBuffer& framebuffer, sp<Fence> fence, std::optional<Rect> viewport) {
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100688 ALOGV("%s", __func__);
689 // Wait for fence to clear.
690 if (fence != nullptr && fence->isValid()) {
691 status_t ret = fence->wait(kAcquireFenceTimeout.count());
692 if (ret != 0) {
693 ALOGE("Timeout while waiting for the acquire fence for buffer");
694 return cameraStatus(Status::INTERNAL_ERROR);
695 }
696 }
697
698 mEglDisplayContext->makeCurrent();
699 framebuffer.beforeDraw();
700
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100701 Rect viewportRect =
702 viewport.value_or(Rect(framebuffer.getWidth(), framebuffer.getHeight()));
703 glViewport(viewportRect.leftTop().x, viewportRect.leftTop().y,
704 viewportRect.getWidth(), viewportRect.getHeight());
705
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100706 sp<GraphicBuffer> textureBuffer = mEglSurfaceTexture->getCurrentBuffer();
707 if (textureBuffer == nullptr) {
708 // If there's no current buffer, nothing was written to the surface and
709 // texture is not initialized yet. Let's render the framebuffer black
710 // instead of rendering the texture.
711 glClearColor(0.0f, 0.5f, 0.5f, 0.0f);
712 glClear(GL_COLOR_BUFFER_BIT);
713 } else {
714 const bool renderSuccess =
715 isYuvFormat(static_cast<PixelFormat>(textureBuffer->getPixelFormat()))
Jan Sebechlebsky99492e32023-12-20 09:49:45 +0100716 ? mEglTextureYuvProgram->draw(
717 mEglSurfaceTexture->getTextureId(),
718 mEglSurfaceTexture->getTransformMatrix())
719 : mEglTextureRgbProgram->draw(
720 mEglSurfaceTexture->getTextureId(),
721 mEglSurfaceTexture->getTransformMatrix());
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100722 if (!renderSuccess) {
723 ALOGE("%s: Failed to render texture", __func__);
724 return cameraStatus(Status::INTERNAL_ERROR);
725 }
726 }
727 framebuffer.afterDraw();
728
729 return ndk::ScopedAStatus::ok();
730}
731
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100732} // namespace virtualcamera
733} // namespace companion
734} // namespace android