blob: a5ee922b0ab2badd2423b00b929057c4340cf37d [file] [log] [blame]
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +01001/*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "VirtualCameraRenderThread"
18#include "VirtualCameraRenderThread.h"
19
20#include <chrono>
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +010021#include <cstdint>
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010022#include <cstring>
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010023#include <future>
24#include <memory>
25#include <mutex>
26#include <thread>
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010027#include <vector>
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010028
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010029#include "Exif.h"
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +010030#include "GLES/gl.h"
Biswarup Pal8ad8bc52024-02-08 13:41:44 +000031#include "VirtualCameraDevice.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010032#include "VirtualCameraSessionContext.h"
33#include "aidl/android/hardware/camera/common/Status.h"
34#include "aidl/android/hardware/camera/device/BufferStatus.h"
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010035#include "aidl/android/hardware/camera/device/CameraBlob.h"
36#include "aidl/android/hardware/camera/device/CameraBlobId.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010037#include "aidl/android/hardware/camera/device/CameraMetadata.h"
38#include "aidl/android/hardware/camera/device/CaptureResult.h"
39#include "aidl/android/hardware/camera/device/ErrorCode.h"
40#include "aidl/android/hardware/camera/device/ICameraDeviceCallback.h"
41#include "aidl/android/hardware/camera/device/NotifyMsg.h"
42#include "aidl/android/hardware/camera/device/ShutterMsg.h"
43#include "aidl/android/hardware/camera/device/StreamBuffer.h"
44#include "android-base/thread_annotations.h"
45#include "android/binder_auto_utils.h"
46#include "android/hardware_buffer.h"
Jan Sebechlebsky2f4478e2024-05-08 17:26:42 +020047#include "hardware/gralloc.h"
Vadim Caenc0aff132024-03-12 17:20:07 +010048#include "system/camera_metadata.h"
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +010049#include "ui/GraphicBuffer.h"
Jan Sebechlebskyb3771312024-03-15 10:38:02 +010050#include "ui/Rect.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010051#include "util/EglFramebuffer.h"
52#include "util/JpegUtil.h"
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010053#include "util/MetadataUtil.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010054#include "util/Util.h"
55#include "utils/Errors.h"
56
57namespace android {
58namespace companion {
59namespace virtualcamera {
60
61using ::aidl::android::hardware::camera::common::Status;
62using ::aidl::android::hardware::camera::device::BufferStatus;
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010063using ::aidl::android::hardware::camera::device::CameraBlob;
64using ::aidl::android::hardware::camera::device::CameraBlobId;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010065using ::aidl::android::hardware::camera::device::CameraMetadata;
66using ::aidl::android::hardware::camera::device::CaptureResult;
67using ::aidl::android::hardware::camera::device::ErrorCode;
68using ::aidl::android::hardware::camera::device::ErrorMsg;
69using ::aidl::android::hardware::camera::device::ICameraDeviceCallback;
70using ::aidl::android::hardware::camera::device::NotifyMsg;
71using ::aidl::android::hardware::camera::device::ShutterMsg;
72using ::aidl::android::hardware::camera::device::Stream;
73using ::aidl::android::hardware::camera::device::StreamBuffer;
74using ::aidl::android::hardware::graphics::common::PixelFormat;
75using ::android::base::ScopedLockAssertion;
76
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010077using ::android::hardware::camera::common::helper::ExifUtils;
78
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010079namespace {
80
81using namespace std::chrono_literals;
82
83static constexpr std::chrono::milliseconds kAcquireFenceTimeout = 500ms;
84
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +010085// See REQUEST_PIPELINE_DEPTH in CaptureResult.java.
86// This roughly corresponds to frame latency, we set to
87// documented minimum of 2.
88static constexpr uint8_t kPipelineDepth = 2;
89
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010090static constexpr size_t kJpegThumbnailBufferSize = 32 * 1024; // 32 KiB
91
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010092CameraMetadata createCaptureResultMetadata(
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +010093 const std::chrono::nanoseconds timestamp,
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010094 const RequestSettings& requestSettings,
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +010095 const Resolution reportedSensorSize) {
Vadim Caen11dfd932024-03-05 09:57:20 +010096 // All of the keys used in the response needs to be referenced in
97 // availableResultKeys in CameraCharacteristics (see initCameraCharacteristics
98 // in VirtualCameraDevice.cc).
99 MetadataBuilder builder =
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100100 MetadataBuilder()
Jan Sebechlebsky4be2bd02024-02-26 18:35:18 +0100101 .setAberrationCorrectionMode(
102 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF)
Vadim Caen11dfd932024-03-05 09:57:20 +0100103 .setControlAeAvailableAntibandingModes(
104 {ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF})
105 .setControlAeAntibandingMode(ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF)
106 .setControlAeExposureCompensation(0)
107 .setControlAeLockAvailable(false)
108 .setControlAeLock(ANDROID_CONTROL_AE_LOCK_OFF)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100109 .setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
110 .setControlAePrecaptureTrigger(
Vadim Caen6a43beb2024-04-12 15:06:42 +0200111 // Limited devices are expected to have precapture ae enabled and
112 // respond to cancellation request. Since we don't actuall support
113 // AE at all, let's just respect the cancellation expectation in
114 // case it's requested
115 requestSettings.aePrecaptureTrigger ==
116 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
117 ? ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
118 : ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
Vadim Caen11dfd932024-03-05 09:57:20 +0100119 .setControlAeState(ANDROID_CONTROL_AE_STATE_INACTIVE)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100120 .setControlAfMode(ANDROID_CONTROL_AF_MODE_OFF)
Vadim Caen11dfd932024-03-05 09:57:20 +0100121 .setControlAfTrigger(ANDROID_CONTROL_AF_TRIGGER_IDLE)
122 .setControlAfState(ANDROID_CONTROL_AF_STATE_INACTIVE)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100123 .setControlAwbMode(ANDROID_CONTROL_AWB_MODE_AUTO)
Vadim Caen11dfd932024-03-05 09:57:20 +0100124 .setControlAwbLock(ANDROID_CONTROL_AWB_LOCK_OFF)
125 .setControlAwbState(ANDROID_CONTROL_AWB_STATE_INACTIVE)
126 .setControlCaptureIntent(requestSettings.captureIntent)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100127 .setControlEffectMode(ANDROID_CONTROL_EFFECT_MODE_OFF)
128 .setControlMode(ANDROID_CONTROL_MODE_AUTO)
Vadim Caen11dfd932024-03-05 09:57:20 +0100129 .setControlSceneMode(ANDROID_CONTROL_SCENE_MODE_DISABLED)
130 .setControlVideoStabilizationMode(
131 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100132 .setCropRegion(0, 0, reportedSensorSize.width,
133 reportedSensorSize.height)
134 .setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100135 .setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
Vadim Caen11dfd932024-03-05 09:57:20 +0100136 .setFlashMode(ANDROID_FLASH_MODE_OFF)
Biswarup Pal8ad8bc52024-02-08 13:41:44 +0000137 .setFocalLength(VirtualCameraDevice::kFocalLength)
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100138 .setJpegQuality(requestSettings.jpegQuality)
Vadim Caenc0aff132024-03-12 17:20:07 +0100139 .setJpegOrientation(requestSettings.jpegOrientation)
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100140 .setJpegThumbnailSize(requestSettings.thumbnailResolution.width,
141 requestSettings.thumbnailResolution.height)
142 .setJpegThumbnailQuality(requestSettings.thumbnailJpegQuality)
Vadim Caen11dfd932024-03-05 09:57:20 +0100143 .setLensOpticalStabilizationMode(
144 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF)
Jan Sebechlebsky4be2bd02024-02-26 18:35:18 +0100145 .setNoiseReductionMode(ANDROID_NOISE_REDUCTION_MODE_OFF)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100146 .setPipelineDepth(kPipelineDepth)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100147 .setSensorTimestamp(timestamp)
Vadim Caen11dfd932024-03-05 09:57:20 +0100148 .setStatisticsHotPixelMapMode(
149 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF)
150 .setStatisticsLensShadingMapMode(
151 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF)
152 .setStatisticsSceneFlicker(ANDROID_STATISTICS_SCENE_FLICKER_NONE);
153
154 if (requestSettings.fpsRange.has_value()) {
155 builder.setControlAeTargetFpsRange(requestSettings.fpsRange.value());
156 }
157
Vadim Caenc0aff132024-03-12 17:20:07 +0100158 if (requestSettings.gpsCoordinates.has_value()) {
159 const GpsCoordinates& coordinates = requestSettings.gpsCoordinates.value();
160 builder.setJpegGpsCoordinates(coordinates);
161 }
162
Vadim Caen11dfd932024-03-05 09:57:20 +0100163 std::unique_ptr<CameraMetadata> metadata = builder.build();
164
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100165 if (metadata == nullptr) {
166 ALOGE("%s: Failed to build capture result metadata", __func__);
167 return CameraMetadata();
168 }
169 return std::move(*metadata);
170}
171
172NotifyMsg createShutterNotifyMsg(int frameNumber,
173 std::chrono::nanoseconds timestamp) {
174 NotifyMsg msg;
175 msg.set<NotifyMsg::Tag::shutter>(ShutterMsg{
176 .frameNumber = frameNumber,
177 .timestamp = timestamp.count(),
178 });
179 return msg;
180}
181
182NotifyMsg createBufferErrorNotifyMsg(int frameNumber, int streamId) {
183 NotifyMsg msg;
184 msg.set<NotifyMsg::Tag::error>(ErrorMsg{.frameNumber = frameNumber,
185 .errorStreamId = streamId,
186 .errorCode = ErrorCode::ERROR_BUFFER});
187 return msg;
188}
189
190NotifyMsg createRequestErrorNotifyMsg(int frameNumber) {
191 NotifyMsg msg;
192 msg.set<NotifyMsg::Tag::error>(ErrorMsg{
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100193 .frameNumber = frameNumber,
194 // errorStreamId needs to be set to -1 for ERROR_REQUEST
195 // (not tied to specific stream).
196 .errorStreamId = -1,
197 .errorCode = ErrorCode::ERROR_REQUEST});
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100198 return msg;
199}
200
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100201std::shared_ptr<EglFrameBuffer> allocateTemporaryFramebuffer(
202 EGLDisplay eglDisplay, const uint width, const int height) {
203 const AHardwareBuffer_Desc desc{
204 .width = static_cast<uint32_t>(width),
205 .height = static_cast<uint32_t>(height),
206 .layers = 1,
207 .format = AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420,
208 .usage = AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER |
209 AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN,
210 .rfu0 = 0,
211 .rfu1 = 0};
212
213 AHardwareBuffer* hwBufferPtr;
214 int status = AHardwareBuffer_allocate(&desc, &hwBufferPtr);
215 if (status != NO_ERROR) {
216 ALOGE(
217 "%s: Failed to allocate hardware buffer for temporary framebuffer: %d",
218 __func__, status);
219 return nullptr;
220 }
221
222 return std::make_shared<EglFrameBuffer>(
223 eglDisplay,
224 std::shared_ptr<AHardwareBuffer>(hwBufferPtr, AHardwareBuffer_release));
225}
226
227bool isYuvFormat(const PixelFormat pixelFormat) {
228 switch (static_cast<android_pixel_format_t>(pixelFormat)) {
229 case HAL_PIXEL_FORMAT_YCBCR_422_I:
230 case HAL_PIXEL_FORMAT_YCBCR_422_SP:
231 case HAL_PIXEL_FORMAT_Y16:
232 case HAL_PIXEL_FORMAT_YV12:
233 case HAL_PIXEL_FORMAT_YCBCR_420_888:
234 return true;
235 default:
236 return false;
237 }
238}
239
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100240std::vector<uint8_t> createExif(
Vadim Caenc0aff132024-03-12 17:20:07 +0100241 Resolution imageSize, const CameraMetadata resultMetadata,
242 const std::vector<uint8_t>& compressedThumbnail = {}) {
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100243 std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create());
244 exifUtils->initialize();
Vadim Caenc0aff132024-03-12 17:20:07 +0100245
246 // Make a copy of the metadata in order to converting it the HAL metadata
247 // format (as opposed to the AIDL class) and use the setFromMetadata method
248 // from ExifUtil
249 camera_metadata_t* rawSettings =
250 clone_camera_metadata((camera_metadata_t*)resultMetadata.metadata.data());
251 if (rawSettings != nullptr) {
252 android::hardware::camera::common::helper::CameraMetadata halMetadata(
253 rawSettings);
254 exifUtils->setFromMetadata(halMetadata, imageSize.width, imageSize.height);
255 }
256 exifUtils->setMake(VirtualCameraDevice::kDefaultMakeAndModel);
257 exifUtils->setModel(VirtualCameraDevice::kDefaultMakeAndModel);
258 exifUtils->setFlash(0);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100259
260 std::vector<uint8_t> app1Data;
261
262 size_t thumbnailDataSize = compressedThumbnail.size();
263 const void* thumbnailData =
264 thumbnailDataSize > 0
265 ? reinterpret_cast<const void*>(compressedThumbnail.data())
266 : nullptr;
267
268 if (!exifUtils->generateApp1(thumbnailData, thumbnailDataSize)) {
269 ALOGE("%s: Failed to generate APP1 segment for EXIF metadata", __func__);
270 return app1Data;
271 }
272
273 const uint8_t* data = exifUtils->getApp1Buffer();
274 const size_t size = exifUtils->getApp1Length();
275
276 app1Data.insert(app1Data.end(), data, data + size);
277 return app1Data;
278}
279
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100280} // namespace
281
282CaptureRequestBuffer::CaptureRequestBuffer(int streamId, int bufferId,
283 sp<Fence> fence)
284 : mStreamId(streamId), mBufferId(bufferId), mFence(fence) {
285}
286
287int CaptureRequestBuffer::getStreamId() const {
288 return mStreamId;
289}
290
291int CaptureRequestBuffer::getBufferId() const {
292 return mBufferId;
293}
294
295sp<Fence> CaptureRequestBuffer::getFence() const {
296 return mFence;
297}
298
299VirtualCameraRenderThread::VirtualCameraRenderThread(
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100300 VirtualCameraSessionContext& sessionContext,
301 const Resolution inputSurfaceSize, const Resolution reportedSensorSize,
Jan Sebechlebsky288900f2024-05-24 14:47:54 +0200302 std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback)
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100303 : mCameraDeviceCallback(cameraDeviceCallback),
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100304 mInputSurfaceSize(inputSurfaceSize),
305 mReportedSensorSize(reportedSensorSize),
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100306 mSessionContext(sessionContext) {
307}
308
309VirtualCameraRenderThread::~VirtualCameraRenderThread() {
310 stop();
311 if (mThread.joinable()) {
312 mThread.join();
313 }
314}
315
316ProcessCaptureRequestTask::ProcessCaptureRequestTask(
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100317 int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers,
318 const RequestSettings& requestSettings)
319 : mFrameNumber(frameNumber),
320 mBuffers(requestBuffers),
321 mRequestSettings(requestSettings) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100322}
323
324int ProcessCaptureRequestTask::getFrameNumber() const {
325 return mFrameNumber;
326}
327
328const std::vector<CaptureRequestBuffer>& ProcessCaptureRequestTask::getBuffers()
329 const {
330 return mBuffers;
331}
332
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100333const RequestSettings& ProcessCaptureRequestTask::getRequestSettings() const {
334 return mRequestSettings;
335}
336
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100337void VirtualCameraRenderThread::enqueueTask(
338 std::unique_ptr<ProcessCaptureRequestTask> task) {
339 std::lock_guard<std::mutex> lock(mLock);
340 mQueue.emplace_back(std::move(task));
341 mCondVar.notify_one();
342}
343
344void VirtualCameraRenderThread::flush() {
345 std::lock_guard<std::mutex> lock(mLock);
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100346 while (!mQueue.empty()) {
347 std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
348 mQueue.pop_front();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100349 flushCaptureRequest(*task);
350 }
351}
352
353void VirtualCameraRenderThread::start() {
354 mThread = std::thread(&VirtualCameraRenderThread::threadLoop, this);
355}
356
357void VirtualCameraRenderThread::stop() {
358 {
359 std::lock_guard<std::mutex> lock(mLock);
360 mPendingExit = true;
361 mCondVar.notify_one();
362 }
363}
364
365sp<Surface> VirtualCameraRenderThread::getInputSurface() {
366 return mInputSurfacePromise.get_future().get();
367}
368
369std::unique_ptr<ProcessCaptureRequestTask>
370VirtualCameraRenderThread::dequeueTask() {
371 std::unique_lock<std::mutex> lock(mLock);
372 // Clang's thread safety analysis doesn't perform alias analysis,
373 // so it doesn't support moveable std::unique_lock.
374 //
375 // Lock assertion below is basically explicit declaration that
376 // the lock is held in this scope, which is true, since it's only
377 // released during waiting inside mCondVar.wait calls.
378 ScopedLockAssertion lockAssertion(mLock);
379
380 mCondVar.wait(lock, [this]() REQUIRES(mLock) {
381 return mPendingExit || !mQueue.empty();
382 });
383 if (mPendingExit) {
384 return nullptr;
385 }
386 std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
387 mQueue.pop_front();
388 return task;
389}
390
391void VirtualCameraRenderThread::threadLoop() {
392 ALOGV("Render thread starting");
393
394 mEglDisplayContext = std::make_unique<EglDisplayContext>();
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100395 mEglTextureYuvProgram =
396 std::make_unique<EglTextureProgram>(EglTextureProgram::TextureFormat::YUV);
397 mEglTextureRgbProgram = std::make_unique<EglTextureProgram>(
398 EglTextureProgram::TextureFormat::RGBA);
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100399 mEglSurfaceTexture = std::make_unique<EglSurfaceTexture>(
400 mInputSurfaceSize.width, mInputSurfaceSize.height);
Jan Sebechlebsky6402fef2024-03-25 16:30:26 +0100401
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100402 mInputSurfacePromise.set_value(mEglSurfaceTexture->getSurface());
403
404 while (std::unique_ptr<ProcessCaptureRequestTask> task = dequeueTask()) {
405 processCaptureRequest(*task);
406 }
407
Jan Sebechlebsky06b36672024-03-18 11:52:35 +0100408 // Destroy EGL utilities still on the render thread.
409 mEglSurfaceTexture.reset();
410 mEglTextureRgbProgram.reset();
411 mEglTextureYuvProgram.reset();
412 mEglDisplayContext.reset();
413
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100414 ALOGV("Render thread exiting");
415}
416
417void VirtualCameraRenderThread::processCaptureRequest(
418 const ProcessCaptureRequestTask& request) {
Jan Sebechlebsky2f4478e2024-05-08 17:26:42 +0200419 std::chrono::nanoseconds timestamp =
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100420 std::chrono::duration_cast<std::chrono::nanoseconds>(
421 std::chrono::steady_clock::now().time_since_epoch());
Jan Sebechlebsky2f4478e2024-05-08 17:26:42 +0200422 std::chrono::nanoseconds lastAcquisitionTimestamp(
423 mLastAcquisitionTimestampNanoseconds.exchange(timestamp.count(),
424 std::memory_order_relaxed));
425
426 if (request.getRequestSettings().fpsRange) {
427 const int maxFps =
428 std::max(1, request.getRequestSettings().fpsRange->maxFps);
429 const std::chrono::nanoseconds minFrameDuration(
430 static_cast<uint64_t>(1e9 / maxFps));
431 const std::chrono::nanoseconds frameDuration =
432 timestamp - lastAcquisitionTimestamp;
433 if (frameDuration < minFrameDuration) {
434 // We're too fast for the configured maxFps, let's wait a bit.
435 const std::chrono::nanoseconds sleepTime =
436 minFrameDuration - frameDuration;
437 ALOGV("Current frame duration would be %" PRIu64
438 " ns corresponding to, "
439 "sleeping for %" PRIu64
440 " ns before updating texture to match maxFps %d",
441 static_cast<uint64_t>(frameDuration.count()),
442 static_cast<uint64_t>(sleepTime.count()), maxFps);
443
444 std::this_thread::sleep_for(sleepTime);
445 timestamp = std::chrono::duration_cast<std::chrono::nanoseconds>(
446 std::chrono::steady_clock::now().time_since_epoch());
447 mLastAcquisitionTimestampNanoseconds.store(timestamp.count(),
448 std::memory_order_relaxed);
449 }
450 }
451
452 // Acquire new (most recent) image from the Surface.
453 mEglSurfaceTexture->updateTexture();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100454
455 CaptureResult captureResult;
456 captureResult.fmqResultSize = 0;
457 captureResult.frameNumber = request.getFrameNumber();
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100458 // Partial result needs to be set to 1 when metadata are present.
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100459 captureResult.partialResult = 1;
460 captureResult.inputBuffer.streamId = -1;
461 captureResult.physicalCameraMetadata.resize(0);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100462 captureResult.result = createCaptureResultMetadata(
463 timestamp, request.getRequestSettings(), mReportedSensorSize);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100464
465 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
466 captureResult.outputBuffers.resize(buffers.size());
467
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100468 for (int i = 0; i < buffers.size(); ++i) {
469 const CaptureRequestBuffer& reqBuffer = buffers[i];
470 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
471 resBuffer.streamId = reqBuffer.getStreamId();
472 resBuffer.bufferId = reqBuffer.getBufferId();
473 resBuffer.status = BufferStatus::OK;
474
475 const std::optional<Stream> streamConfig =
476 mSessionContext.getStreamConfig(reqBuffer.getStreamId());
477
478 if (!streamConfig.has_value()) {
479 resBuffer.status = BufferStatus::ERROR;
480 continue;
481 }
482
483 auto status = streamConfig->format == PixelFormat::BLOB
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100484 ? renderIntoBlobStreamBuffer(
485 reqBuffer.getStreamId(), reqBuffer.getBufferId(),
Vadim Caenc0aff132024-03-12 17:20:07 +0100486 captureResult.result, request.getRequestSettings(),
487 reqBuffer.getFence())
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100488 : renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
489 reqBuffer.getBufferId(),
490 reqBuffer.getFence());
491 if (!status.isOk()) {
492 resBuffer.status = BufferStatus::ERROR;
493 }
494 }
495
496 std::vector<NotifyMsg> notifyMsg{
497 createShutterNotifyMsg(request.getFrameNumber(), timestamp)};
498 for (const StreamBuffer& resBuffer : captureResult.outputBuffers) {
499 if (resBuffer.status != BufferStatus::OK) {
500 notifyMsg.push_back(createBufferErrorNotifyMsg(request.getFrameNumber(),
501 resBuffer.streamId));
502 }
503 }
504
505 auto status = mCameraDeviceCallback->notify(notifyMsg);
506 if (!status.isOk()) {
507 ALOGE("%s: notify call failed: %s", __func__,
508 status.getDescription().c_str());
509 return;
510 }
511
512 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
513 captureResults(1);
514 captureResults[0] = std::move(captureResult);
515
516 status = mCameraDeviceCallback->processCaptureResult(captureResults);
517 if (!status.isOk()) {
518 ALOGE("%s: processCaptureResult call failed: %s", __func__,
519 status.getDescription().c_str());
520 return;
521 }
522
Vadim Caen324fcfb2024-03-21 16:49:08 +0100523 ALOGV("%s: Successfully called processCaptureResult", __func__);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100524}
525
526void VirtualCameraRenderThread::flushCaptureRequest(
527 const ProcessCaptureRequestTask& request) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100528 CaptureResult captureResult;
529 captureResult.fmqResultSize = 0;
530 captureResult.frameNumber = request.getFrameNumber();
531 captureResult.inputBuffer.streamId = -1;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100532
533 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
534 captureResult.outputBuffers.resize(buffers.size());
535
536 for (int i = 0; i < buffers.size(); ++i) {
537 const CaptureRequestBuffer& reqBuffer = buffers[i];
538 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
539 resBuffer.streamId = reqBuffer.getStreamId();
540 resBuffer.bufferId = reqBuffer.getBufferId();
541 resBuffer.status = BufferStatus::ERROR;
542 sp<Fence> fence = reqBuffer.getFence();
543 if (fence != nullptr && fence->isValid()) {
544 resBuffer.releaseFence.fds.emplace_back(fence->dup());
545 }
546 }
547
548 auto status = mCameraDeviceCallback->notify(
549 {createRequestErrorNotifyMsg(request.getFrameNumber())});
550 if (!status.isOk()) {
551 ALOGE("%s: notify call failed: %s", __func__,
552 status.getDescription().c_str());
553 return;
554 }
555
556 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
557 captureResults(1);
558 captureResults[0] = std::move(captureResult);
559
560 status = mCameraDeviceCallback->processCaptureResult(captureResults);
561 if (!status.isOk()) {
562 ALOGE("%s: processCaptureResult call failed: %s", __func__,
563 status.getDescription().c_str());
564 }
565}
566
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100567std::vector<uint8_t> VirtualCameraRenderThread::createThumbnail(
568 const Resolution resolution, const int quality) {
569 if (resolution.width == 0 || resolution.height == 0) {
570 ALOGV("%s: Skipping thumbnail creation, zero size requested", __func__);
571 return {};
572 }
573
574 ALOGV("%s: Creating thumbnail with size %d x %d, quality %d", __func__,
575 resolution.width, resolution.height, quality);
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100576 Resolution bufferSize = roundTo2DctSize(resolution);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100577 std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100578 mEglDisplayContext->getEglDisplay(), bufferSize.width, bufferSize.height);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100579 if (framebuffer == nullptr) {
580 ALOGE(
581 "Failed to allocate temporary framebuffer for JPEG thumbnail "
582 "compression");
583 return {};
584 }
585
586 // TODO(b/324383963) Add support for letterboxing if the thumbnail size
587 // doesn't correspond
588 // to input texture aspect ratio.
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100589 if (!renderIntoEglFramebuffer(*framebuffer, /*fence=*/nullptr,
590 Rect(resolution.width, resolution.height))
591 .isOk()) {
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100592 ALOGE(
593 "Failed to render input texture into temporary framebuffer for JPEG "
594 "thumbnail");
595 return {};
596 }
597
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100598 std::vector<uint8_t> compressedThumbnail;
599 compressedThumbnail.resize(kJpegThumbnailBufferSize);
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100600 ALOGE("%s: Compressing thumbnail %d x %d", __func__, resolution.width,
601 resolution.height);
602 std::optional<size_t> compressedSize =
603 compressJpeg(resolution.width, resolution.height, quality,
604 framebuffer->getHardwareBuffer(), {},
605 compressedThumbnail.size(), compressedThumbnail.data());
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100606 if (!compressedSize.has_value()) {
607 ALOGE("%s: Failed to compress jpeg thumbnail", __func__);
608 return {};
609 }
610 compressedThumbnail.resize(compressedSize.value());
611 return compressedThumbnail;
612}
613
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100614ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoBlobStreamBuffer(
Vadim Caenc0aff132024-03-12 17:20:07 +0100615 const int streamId, const int bufferId, const CameraMetadata& resultMetadata,
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100616 const RequestSettings& requestSettings, sp<Fence> fence) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100617 std::shared_ptr<AHardwareBuffer> hwBuffer =
618 mSessionContext.fetchHardwareBuffer(streamId, bufferId);
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100619 if (hwBuffer == nullptr) {
620 ALOGE("%s: Failed to fetch hardware buffer %d for streamId %d", __func__,
621 bufferId, streamId);
622 return cameraStatus(Status::INTERNAL_ERROR);
623 }
624
625 std::optional<Stream> stream = mSessionContext.getStreamConfig(streamId);
626 if (!stream.has_value()) {
627 ALOGE("%s, failed to fetch information about stream %d", __func__, streamId);
628 return cameraStatus(Status::INTERNAL_ERROR);
629 }
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100630
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100631 ALOGV("%s: Rendering JPEG with size %d x %d, quality %d", __func__,
632 stream->width, stream->height, requestSettings.jpegQuality);
633
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100634 // Let's create YUV framebuffer and render the surface into this.
635 // This will take care about rescaling as well as potential format conversion.
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100636 // The buffer dimensions need to be rounded to nearest multiple of JPEG DCT
637 // size, however we pass the viewport corresponding to size of the stream so
638 // the image will be only rendered to the area corresponding to the stream
639 // size.
640 Resolution bufferSize =
641 roundTo2DctSize(Resolution(stream->width, stream->height));
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100642 std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100643 mEglDisplayContext->getEglDisplay(), bufferSize.width, bufferSize.height);
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100644 if (framebuffer == nullptr) {
645 ALOGE("Failed to allocate temporary framebuffer for JPEG compression");
646 return cameraStatus(Status::INTERNAL_ERROR);
647 }
648
649 // Render into temporary framebuffer.
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100650 ndk::ScopedAStatus status = renderIntoEglFramebuffer(
651 *framebuffer, /*fence=*/nullptr, Rect(stream->width, stream->height));
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100652 if (!status.isOk()) {
653 ALOGE("Failed to render input texture into temporary framebuffer");
654 return status;
655 }
656
Jan Sebechlebsky43543222024-02-16 12:50:32 +0100657 PlanesLockGuard planesLock(hwBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_RARELY,
658 fence);
659 if (planesLock.getStatus() != OK) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100660 return cameraStatus(Status::INTERNAL_ERROR);
661 }
662
Jan Sebechlebsky5c789e42024-02-29 16:32:17 +0100663 std::vector<uint8_t> app1ExifData =
Vadim Caenc0aff132024-03-12 17:20:07 +0100664 createExif(Resolution(stream->width, stream->height), resultMetadata,
Jan Sebechlebsky5c789e42024-02-29 16:32:17 +0100665 createThumbnail(requestSettings.thumbnailResolution,
666 requestSettings.thumbnailJpegQuality));
667 std::optional<size_t> compressedSize = compressJpeg(
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100668 stream->width, stream->height, requestSettings.jpegQuality,
669 framebuffer->getHardwareBuffer(), app1ExifData,
670 stream->bufferSize - sizeof(CameraBlob), (*planesLock).planes[0].data);
Jan Sebechlebsky5c789e42024-02-29 16:32:17 +0100671
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100672 if (!compressedSize.has_value()) {
673 ALOGE("%s: Failed to compress JPEG image", __func__);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100674 return cameraStatus(Status::INTERNAL_ERROR);
675 }
676
677 CameraBlob cameraBlob{
678 .blobId = CameraBlobId::JPEG,
679 .blobSizeBytes = static_cast<int32_t>(compressedSize.value())};
680
Jan Sebechlebsky43543222024-02-16 12:50:32 +0100681 memcpy(reinterpret_cast<uint8_t*>((*planesLock).planes[0].data) +
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100682 (stream->bufferSize - sizeof(cameraBlob)),
683 &cameraBlob, sizeof(cameraBlob));
684
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100685 ALOGV("%s: Successfully compressed JPEG image, resulting size %zu B",
686 __func__, compressedSize.value());
687
688 return ndk::ScopedAStatus::ok();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100689}
690
691ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoImageStreamBuffer(
692 int streamId, int bufferId, sp<Fence> fence) {
693 ALOGV("%s", __func__);
694
695 const std::chrono::nanoseconds before =
696 std::chrono::duration_cast<std::chrono::nanoseconds>(
697 std::chrono::steady_clock::now().time_since_epoch());
698
699 // Render test pattern using EGL.
700 std::shared_ptr<EglFrameBuffer> framebuffer =
701 mSessionContext.fetchOrCreateEglFramebuffer(
702 mEglDisplayContext->getEglDisplay(), streamId, bufferId);
703 if (framebuffer == nullptr) {
704 ALOGE(
705 "%s: Failed to get EGL framebuffer corresponding to buffer id "
706 "%d for streamId %d",
707 __func__, bufferId, streamId);
708 return cameraStatus(Status::ILLEGAL_ARGUMENT);
709 }
710
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100711 ndk::ScopedAStatus status = renderIntoEglFramebuffer(*framebuffer, fence);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100712
713 const std::chrono::nanoseconds after =
714 std::chrono::duration_cast<std::chrono::nanoseconds>(
715 std::chrono::steady_clock::now().time_since_epoch());
716
717 ALOGV("Rendering to buffer %d, stream %d took %lld ns", bufferId, streamId,
718 after.count() - before.count());
719
720 return ndk::ScopedAStatus::ok();
721}
722
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100723ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoEglFramebuffer(
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100724 EglFrameBuffer& framebuffer, sp<Fence> fence, std::optional<Rect> viewport) {
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100725 ALOGV("%s", __func__);
726 // Wait for fence to clear.
727 if (fence != nullptr && fence->isValid()) {
728 status_t ret = fence->wait(kAcquireFenceTimeout.count());
729 if (ret != 0) {
730 ALOGE("Timeout while waiting for the acquire fence for buffer");
731 return cameraStatus(Status::INTERNAL_ERROR);
732 }
733 }
734
735 mEglDisplayContext->makeCurrent();
736 framebuffer.beforeDraw();
737
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100738 Rect viewportRect =
739 viewport.value_or(Rect(framebuffer.getWidth(), framebuffer.getHeight()));
740 glViewport(viewportRect.leftTop().x, viewportRect.leftTop().y,
741 viewportRect.getWidth(), viewportRect.getHeight());
742
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100743 sp<GraphicBuffer> textureBuffer = mEglSurfaceTexture->getCurrentBuffer();
744 if (textureBuffer == nullptr) {
745 // If there's no current buffer, nothing was written to the surface and
746 // texture is not initialized yet. Let's render the framebuffer black
747 // instead of rendering the texture.
748 glClearColor(0.0f, 0.5f, 0.5f, 0.0f);
749 glClear(GL_COLOR_BUFFER_BIT);
750 } else {
751 const bool renderSuccess =
752 isYuvFormat(static_cast<PixelFormat>(textureBuffer->getPixelFormat()))
Jan Sebechlebsky99492e32023-12-20 09:49:45 +0100753 ? mEglTextureYuvProgram->draw(
754 mEglSurfaceTexture->getTextureId(),
755 mEglSurfaceTexture->getTransformMatrix())
756 : mEglTextureRgbProgram->draw(
757 mEglSurfaceTexture->getTextureId(),
758 mEglSurfaceTexture->getTransformMatrix());
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100759 if (!renderSuccess) {
760 ALOGE("%s: Failed to render texture", __func__);
761 return cameraStatus(Status::INTERNAL_ERROR);
762 }
763 }
764 framebuffer.afterDraw();
765
766 return ndk::ScopedAStatus::ok();
767}
768
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100769} // namespace virtualcamera
770} // namespace companion
771} // namespace android