blob: 40a96e48c304c303fe65b23399f247b33291c56c [file] [log] [blame]
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +01001/*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "VirtualCameraRenderThread"
18#include "VirtualCameraRenderThread.h"
19
20#include <chrono>
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +010021#include <cstdint>
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010022#include <cstring>
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010023#include <future>
24#include <memory>
25#include <mutex>
26#include <thread>
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +020027#include <utility>
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010028#include <vector>
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010029
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010030#include "Exif.h"
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +010031#include "GLES/gl.h"
Vadim Caen550634d2024-07-17 14:01:21 +020032#include "VirtualCameraCaptureResult.h"
Biswarup Pal8ad8bc52024-02-08 13:41:44 +000033#include "VirtualCameraDevice.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010034#include "VirtualCameraSessionContext.h"
35#include "aidl/android/hardware/camera/common/Status.h"
36#include "aidl/android/hardware/camera/device/BufferStatus.h"
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010037#include "aidl/android/hardware/camera/device/CameraBlob.h"
38#include "aidl/android/hardware/camera/device/CameraBlobId.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010039#include "aidl/android/hardware/camera/device/CameraMetadata.h"
40#include "aidl/android/hardware/camera/device/CaptureResult.h"
41#include "aidl/android/hardware/camera/device/ErrorCode.h"
42#include "aidl/android/hardware/camera/device/ICameraDeviceCallback.h"
43#include "aidl/android/hardware/camera/device/NotifyMsg.h"
44#include "aidl/android/hardware/camera/device/ShutterMsg.h"
45#include "aidl/android/hardware/camera/device/StreamBuffer.h"
46#include "android-base/thread_annotations.h"
47#include "android/binder_auto_utils.h"
48#include "android/hardware_buffer.h"
Jan Sebechlebsky2f4478e2024-05-08 17:26:42 +020049#include "hardware/gralloc.h"
Vadim Caenc0aff132024-03-12 17:20:07 +010050#include "system/camera_metadata.h"
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +010051#include "ui/GraphicBuffer.h"
Jan Sebechlebskyb3771312024-03-15 10:38:02 +010052#include "ui/Rect.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010053#include "util/EglFramebuffer.h"
54#include "util/JpegUtil.h"
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010055#include "util/MetadataUtil.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010056#include "util/Util.h"
57#include "utils/Errors.h"
58
59namespace android {
60namespace companion {
61namespace virtualcamera {
62
63using ::aidl::android::hardware::camera::common::Status;
64using ::aidl::android::hardware::camera::device::BufferStatus;
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010065using ::aidl::android::hardware::camera::device::CameraBlob;
66using ::aidl::android::hardware::camera::device::CameraBlobId;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010067using ::aidl::android::hardware::camera::device::CameraMetadata;
68using ::aidl::android::hardware::camera::device::CaptureResult;
69using ::aidl::android::hardware::camera::device::ErrorCode;
70using ::aidl::android::hardware::camera::device::ErrorMsg;
71using ::aidl::android::hardware::camera::device::ICameraDeviceCallback;
72using ::aidl::android::hardware::camera::device::NotifyMsg;
73using ::aidl::android::hardware::camera::device::ShutterMsg;
74using ::aidl::android::hardware::camera::device::Stream;
75using ::aidl::android::hardware::camera::device::StreamBuffer;
76using ::aidl::android::hardware::graphics::common::PixelFormat;
77using ::android::base::ScopedLockAssertion;
78
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010079using ::android::hardware::camera::common::helper::ExifUtils;
80
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010081namespace {
82
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +020083// helper type for the visitor
84template <class... Ts>
85struct overloaded : Ts... {
86 using Ts::operator()...;
87};
88// explicit deduction guide (not needed as of C++20)
89template <class... Ts>
90overloaded(Ts...) -> overloaded<Ts...>;
91
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010092using namespace std::chrono_literals;
93
94static constexpr std::chrono::milliseconds kAcquireFenceTimeout = 500ms;
95
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010096static constexpr size_t kJpegThumbnailBufferSize = 32 * 1024; // 32 KiB
97
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +020098static constexpr UpdateTextureTask kUpdateTextureTask;
99
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100100NotifyMsg createShutterNotifyMsg(int frameNumber,
101 std::chrono::nanoseconds timestamp) {
102 NotifyMsg msg;
103 msg.set<NotifyMsg::Tag::shutter>(ShutterMsg{
104 .frameNumber = frameNumber,
105 .timestamp = timestamp.count(),
106 });
107 return msg;
108}
109
110NotifyMsg createBufferErrorNotifyMsg(int frameNumber, int streamId) {
111 NotifyMsg msg;
112 msg.set<NotifyMsg::Tag::error>(ErrorMsg{.frameNumber = frameNumber,
113 .errorStreamId = streamId,
114 .errorCode = ErrorCode::ERROR_BUFFER});
115 return msg;
116}
117
118NotifyMsg createRequestErrorNotifyMsg(int frameNumber) {
119 NotifyMsg msg;
120 msg.set<NotifyMsg::Tag::error>(ErrorMsg{
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100121 .frameNumber = frameNumber,
122 // errorStreamId needs to be set to -1 for ERROR_REQUEST
123 // (not tied to specific stream).
124 .errorStreamId = -1,
125 .errorCode = ErrorCode::ERROR_REQUEST});
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100126 return msg;
127}
128
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100129std::shared_ptr<EglFrameBuffer> allocateTemporaryFramebuffer(
130 EGLDisplay eglDisplay, const uint width, const int height) {
131 const AHardwareBuffer_Desc desc{
132 .width = static_cast<uint32_t>(width),
133 .height = static_cast<uint32_t>(height),
134 .layers = 1,
135 .format = AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420,
136 .usage = AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER |
137 AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN,
138 .rfu0 = 0,
139 .rfu1 = 0};
140
141 AHardwareBuffer* hwBufferPtr;
142 int status = AHardwareBuffer_allocate(&desc, &hwBufferPtr);
143 if (status != NO_ERROR) {
144 ALOGE(
145 "%s: Failed to allocate hardware buffer for temporary framebuffer: %d",
146 __func__, status);
147 return nullptr;
148 }
149
150 return std::make_shared<EglFrameBuffer>(
151 eglDisplay,
152 std::shared_ptr<AHardwareBuffer>(hwBufferPtr, AHardwareBuffer_release));
153}
154
155bool isYuvFormat(const PixelFormat pixelFormat) {
156 switch (static_cast<android_pixel_format_t>(pixelFormat)) {
157 case HAL_PIXEL_FORMAT_YCBCR_422_I:
158 case HAL_PIXEL_FORMAT_YCBCR_422_SP:
159 case HAL_PIXEL_FORMAT_Y16:
160 case HAL_PIXEL_FORMAT_YV12:
161 case HAL_PIXEL_FORMAT_YCBCR_420_888:
162 return true;
163 default:
164 return false;
165 }
166}
167
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100168std::vector<uint8_t> createExif(
Vadim Caenc0aff132024-03-12 17:20:07 +0100169 Resolution imageSize, const CameraMetadata resultMetadata,
170 const std::vector<uint8_t>& compressedThumbnail = {}) {
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100171 std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create());
172 exifUtils->initialize();
Vadim Caenc0aff132024-03-12 17:20:07 +0100173
174 // Make a copy of the metadata in order to converting it the HAL metadata
175 // format (as opposed to the AIDL class) and use the setFromMetadata method
176 // from ExifUtil
177 camera_metadata_t* rawSettings =
178 clone_camera_metadata((camera_metadata_t*)resultMetadata.metadata.data());
179 if (rawSettings != nullptr) {
180 android::hardware::camera::common::helper::CameraMetadata halMetadata(
181 rawSettings);
182 exifUtils->setFromMetadata(halMetadata, imageSize.width, imageSize.height);
183 }
184 exifUtils->setMake(VirtualCameraDevice::kDefaultMakeAndModel);
185 exifUtils->setModel(VirtualCameraDevice::kDefaultMakeAndModel);
186 exifUtils->setFlash(0);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100187
188 std::vector<uint8_t> app1Data;
189
190 size_t thumbnailDataSize = compressedThumbnail.size();
191 const void* thumbnailData =
192 thumbnailDataSize > 0
193 ? reinterpret_cast<const void*>(compressedThumbnail.data())
194 : nullptr;
195
196 if (!exifUtils->generateApp1(thumbnailData, thumbnailDataSize)) {
197 ALOGE("%s: Failed to generate APP1 segment for EXIF metadata", __func__);
198 return app1Data;
199 }
200
201 const uint8_t* data = exifUtils->getApp1Buffer();
202 const size_t size = exifUtils->getApp1Length();
203
204 app1Data.insert(app1Data.end(), data, data + size);
205 return app1Data;
206}
207
Jan Sebechlebskyb8282672024-05-22 10:43:37 +0200208std::chrono::nanoseconds getMaxFrameDuration(
209 const RequestSettings& requestSettings) {
210 if (requestSettings.fpsRange.has_value()) {
211 return std::chrono::nanoseconds(static_cast<uint64_t>(
212 1e9 / std::max(1, requestSettings.fpsRange->minFps)));
213 }
214 return std::chrono::nanoseconds(
215 static_cast<uint64_t>(1e9 / VirtualCameraDevice::kMinFps));
216}
217
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +0200218class FrameAvailableListenerProxy : public ConsumerBase::FrameAvailableListener {
219 public:
220 FrameAvailableListenerProxy(std::function<void()> callback)
221 : mOnFrameAvailableCallback(callback) {
222 }
223
224 virtual void onFrameAvailable(const BufferItem&) override {
225 ALOGV("%s: onFrameAvailable", __func__);
226 mOnFrameAvailableCallback();
227 }
228
229 private:
230 std::function<void()> mOnFrameAvailableCallback;
231};
232
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100233} // namespace
234
235CaptureRequestBuffer::CaptureRequestBuffer(int streamId, int bufferId,
236 sp<Fence> fence)
237 : mStreamId(streamId), mBufferId(bufferId), mFence(fence) {
238}
239
240int CaptureRequestBuffer::getStreamId() const {
241 return mStreamId;
242}
243
244int CaptureRequestBuffer::getBufferId() const {
245 return mBufferId;
246}
247
248sp<Fence> CaptureRequestBuffer::getFence() const {
249 return mFence;
250}
251
252VirtualCameraRenderThread::VirtualCameraRenderThread(
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100253 VirtualCameraSessionContext& sessionContext,
254 const Resolution inputSurfaceSize, const Resolution reportedSensorSize,
Jan Sebechlebsky288900f2024-05-24 14:47:54 +0200255 std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback)
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100256 : mCameraDeviceCallback(cameraDeviceCallback),
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100257 mInputSurfaceSize(inputSurfaceSize),
258 mReportedSensorSize(reportedSensorSize),
Jan Sebechlebsky9fcd0262024-05-31 15:20:09 +0200259 mSessionContext(sessionContext),
260 mInputSurfaceFuture(mInputSurfacePromise.get_future()) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100261}
262
263VirtualCameraRenderThread::~VirtualCameraRenderThread() {
264 stop();
265 if (mThread.joinable()) {
266 mThread.join();
267 }
268}
269
270ProcessCaptureRequestTask::ProcessCaptureRequestTask(
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100271 int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers,
272 const RequestSettings& requestSettings)
273 : mFrameNumber(frameNumber),
274 mBuffers(requestBuffers),
275 mRequestSettings(requestSettings) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100276}
277
278int ProcessCaptureRequestTask::getFrameNumber() const {
279 return mFrameNumber;
280}
281
282const std::vector<CaptureRequestBuffer>& ProcessCaptureRequestTask::getBuffers()
283 const {
284 return mBuffers;
285}
286
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100287const RequestSettings& ProcessCaptureRequestTask::getRequestSettings() const {
288 return mRequestSettings;
289}
290
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +0200291void VirtualCameraRenderThread::requestTextureUpdate() {
292 std::lock_guard<std::mutex> lock(mLock);
293 // If queue is not empty, we don't need to set the mTextureUpdateRequested
294 // flag, since the texture will be updated during ProcessCaptureRequestTask
295 // processing anyway.
296 if (mQueue.empty()) {
297 mTextureUpdateRequested = true;
298 mCondVar.notify_one();
299 }
300}
301
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100302void VirtualCameraRenderThread::enqueueTask(
303 std::unique_ptr<ProcessCaptureRequestTask> task) {
304 std::lock_guard<std::mutex> lock(mLock);
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +0200305 // When enqueving process capture request task, clear the
306 // mTextureUpdateRequested flag. If this flag is set, the texture was not yet
307 // updated and it will be updated when processing ProcessCaptureRequestTask
308 // anyway.
309 mTextureUpdateRequested = false;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100310 mQueue.emplace_back(std::move(task));
311 mCondVar.notify_one();
312}
313
314void VirtualCameraRenderThread::flush() {
315 std::lock_guard<std::mutex> lock(mLock);
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100316 while (!mQueue.empty()) {
317 std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
318 mQueue.pop_front();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100319 flushCaptureRequest(*task);
320 }
321}
322
323void VirtualCameraRenderThread::start() {
324 mThread = std::thread(&VirtualCameraRenderThread::threadLoop, this);
325}
326
327void VirtualCameraRenderThread::stop() {
328 {
329 std::lock_guard<std::mutex> lock(mLock);
330 mPendingExit = true;
331 mCondVar.notify_one();
332 }
333}
334
335sp<Surface> VirtualCameraRenderThread::getInputSurface() {
Jan Sebechlebsky9fcd0262024-05-31 15:20:09 +0200336 return mInputSurfaceFuture.get();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100337}
338
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +0200339RenderThreadTask VirtualCameraRenderThread::dequeueTask() {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100340 std::unique_lock<std::mutex> lock(mLock);
341 // Clang's thread safety analysis doesn't perform alias analysis,
342 // so it doesn't support moveable std::unique_lock.
343 //
344 // Lock assertion below is basically explicit declaration that
345 // the lock is held in this scope, which is true, since it's only
346 // released during waiting inside mCondVar.wait calls.
347 ScopedLockAssertion lockAssertion(mLock);
348
349 mCondVar.wait(lock, [this]() REQUIRES(mLock) {
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +0200350 return mPendingExit || mTextureUpdateRequested || !mQueue.empty();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100351 });
352 if (mPendingExit) {
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +0200353 // Render thread task with null task signals render thread to terminate.
354 return RenderThreadTask(nullptr);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100355 }
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +0200356 if (mTextureUpdateRequested) {
357 // If mTextureUpdateRequested, it's guaranteed the queue is empty, return
358 // kUpdateTextureTask to signal we want render thread to update the texture
359 // (consume buffer from the queue).
360 mTextureUpdateRequested = false;
361 return RenderThreadTask(kUpdateTextureTask);
362 }
363 RenderThreadTask task(std::move(mQueue.front()));
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100364 mQueue.pop_front();
365 return task;
366}
367
368void VirtualCameraRenderThread::threadLoop() {
369 ALOGV("Render thread starting");
370
371 mEglDisplayContext = std::make_unique<EglDisplayContext>();
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100372 mEglTextureYuvProgram =
373 std::make_unique<EglTextureProgram>(EglTextureProgram::TextureFormat::YUV);
374 mEglTextureRgbProgram = std::make_unique<EglTextureProgram>(
375 EglTextureProgram::TextureFormat::RGBA);
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100376 mEglSurfaceTexture = std::make_unique<EglSurfaceTexture>(
377 mInputSurfaceSize.width, mInputSurfaceSize.height);
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +0200378 sp<FrameAvailableListenerProxy> frameAvailableListener =
379 sp<FrameAvailableListenerProxy>::make(
380 [this]() { requestTextureUpdate(); });
381 mEglSurfaceTexture->setFrameAvailableListener(frameAvailableListener);
Jan Sebechlebsky6402fef2024-03-25 16:30:26 +0100382
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100383 mInputSurfacePromise.set_value(mEglSurfaceTexture->getSurface());
384
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +0200385 while (RenderThreadTask task = dequeueTask()) {
386 std::visit(
387 overloaded{[this](const std::unique_ptr<ProcessCaptureRequestTask>& t) {
388 processTask(*t);
389 },
390 [this](const UpdateTextureTask&) {
391 ALOGV("Idle update of the texture");
392 mEglSurfaceTexture->updateTexture();
393 }},
394 task);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100395 }
396
Jan Sebechlebsky06b36672024-03-18 11:52:35 +0100397 // Destroy EGL utilities still on the render thread.
398 mEglSurfaceTexture.reset();
399 mEglTextureRgbProgram.reset();
400 mEglTextureYuvProgram.reset();
401 mEglDisplayContext.reset();
402
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100403 ALOGV("Render thread exiting");
404}
405
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +0200406void VirtualCameraRenderThread::processTask(
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100407 const ProcessCaptureRequestTask& request) {
Jan Sebechlebsky2f4478e2024-05-08 17:26:42 +0200408 std::chrono::nanoseconds timestamp =
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100409 std::chrono::duration_cast<std::chrono::nanoseconds>(
410 std::chrono::steady_clock::now().time_since_epoch());
Jan Sebechlebskyb8282672024-05-22 10:43:37 +0200411 const std::chrono::nanoseconds lastAcquisitionTimestamp(
Jan Sebechlebsky2f4478e2024-05-08 17:26:42 +0200412 mLastAcquisitionTimestampNanoseconds.exchange(timestamp.count(),
413 std::memory_order_relaxed));
414
415 if (request.getRequestSettings().fpsRange) {
416 const int maxFps =
417 std::max(1, request.getRequestSettings().fpsRange->maxFps);
418 const std::chrono::nanoseconds minFrameDuration(
419 static_cast<uint64_t>(1e9 / maxFps));
420 const std::chrono::nanoseconds frameDuration =
421 timestamp - lastAcquisitionTimestamp;
422 if (frameDuration < minFrameDuration) {
423 // We're too fast for the configured maxFps, let's wait a bit.
424 const std::chrono::nanoseconds sleepTime =
425 minFrameDuration - frameDuration;
426 ALOGV("Current frame duration would be %" PRIu64
427 " ns corresponding to, "
428 "sleeping for %" PRIu64
429 " ns before updating texture to match maxFps %d",
430 static_cast<uint64_t>(frameDuration.count()),
431 static_cast<uint64_t>(sleepTime.count()), maxFps);
432
433 std::this_thread::sleep_for(sleepTime);
434 timestamp = std::chrono::duration_cast<std::chrono::nanoseconds>(
435 std::chrono::steady_clock::now().time_since_epoch());
436 mLastAcquisitionTimestampNanoseconds.store(timestamp.count(),
437 std::memory_order_relaxed);
438 }
439 }
440
Jan Sebechlebskyb8282672024-05-22 10:43:37 +0200441 // Calculate the maximal amount of time we can afford to wait for next frame.
442 const std::chrono::nanoseconds maxFrameDuration =
443 getMaxFrameDuration(request.getRequestSettings());
444 const std::chrono::nanoseconds elapsedDuration =
445 timestamp - lastAcquisitionTimestamp;
446 if (elapsedDuration < maxFrameDuration) {
447 // We can afford to wait for next frame.
448 // Note that if there's already new frame in the input Surface, the call
449 // below returns immediatelly.
450 bool gotNewFrame = mEglSurfaceTexture->waitForNextFrame(maxFrameDuration -
451 elapsedDuration);
452 timestamp = std::chrono::duration_cast<std::chrono::nanoseconds>(
453 std::chrono::steady_clock::now().time_since_epoch());
454 if (!gotNewFrame) {
455 ALOGV(
456 "%s: No new frame received on input surface after waiting for "
457 "%" PRIu64 "ns, repeating last frame.",
458 __func__,
459 static_cast<uint64_t>((timestamp - lastAcquisitionTimestamp).count()));
460 }
461 mLastAcquisitionTimestampNanoseconds.store(timestamp.count(),
462 std::memory_order_relaxed);
463 }
Jan Sebechlebsky2f4478e2024-05-08 17:26:42 +0200464 // Acquire new (most recent) image from the Surface.
465 mEglSurfaceTexture->updateTexture();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100466
467 CaptureResult captureResult;
468 captureResult.fmqResultSize = 0;
469 captureResult.frameNumber = request.getFrameNumber();
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100470 // Partial result needs to be set to 1 when metadata are present.
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100471 captureResult.partialResult = 1;
472 captureResult.inputBuffer.streamId = -1;
473 captureResult.physicalCameraMetadata.resize(0);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100474 captureResult.result = createCaptureResultMetadata(
475 timestamp, request.getRequestSettings(), mReportedSensorSize);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100476
477 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
478 captureResult.outputBuffers.resize(buffers.size());
479
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100480 for (int i = 0; i < buffers.size(); ++i) {
481 const CaptureRequestBuffer& reqBuffer = buffers[i];
482 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
483 resBuffer.streamId = reqBuffer.getStreamId();
484 resBuffer.bufferId = reqBuffer.getBufferId();
485 resBuffer.status = BufferStatus::OK;
486
487 const std::optional<Stream> streamConfig =
488 mSessionContext.getStreamConfig(reqBuffer.getStreamId());
489
490 if (!streamConfig.has_value()) {
491 resBuffer.status = BufferStatus::ERROR;
492 continue;
493 }
494
495 auto status = streamConfig->format == PixelFormat::BLOB
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100496 ? renderIntoBlobStreamBuffer(
497 reqBuffer.getStreamId(), reqBuffer.getBufferId(),
Vadim Caenc0aff132024-03-12 17:20:07 +0100498 captureResult.result, request.getRequestSettings(),
499 reqBuffer.getFence())
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100500 : renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
501 reqBuffer.getBufferId(),
502 reqBuffer.getFence());
503 if (!status.isOk()) {
504 resBuffer.status = BufferStatus::ERROR;
505 }
506 }
507
508 std::vector<NotifyMsg> notifyMsg{
509 createShutterNotifyMsg(request.getFrameNumber(), timestamp)};
510 for (const StreamBuffer& resBuffer : captureResult.outputBuffers) {
511 if (resBuffer.status != BufferStatus::OK) {
512 notifyMsg.push_back(createBufferErrorNotifyMsg(request.getFrameNumber(),
513 resBuffer.streamId));
514 }
515 }
516
517 auto status = mCameraDeviceCallback->notify(notifyMsg);
518 if (!status.isOk()) {
519 ALOGE("%s: notify call failed: %s", __func__,
520 status.getDescription().c_str());
521 return;
522 }
523
524 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
525 captureResults(1);
526 captureResults[0] = std::move(captureResult);
527
528 status = mCameraDeviceCallback->processCaptureResult(captureResults);
529 if (!status.isOk()) {
530 ALOGE("%s: processCaptureResult call failed: %s", __func__,
531 status.getDescription().c_str());
532 return;
533 }
534
Vadim Caen324fcfb2024-03-21 16:49:08 +0100535 ALOGV("%s: Successfully called processCaptureResult", __func__);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100536}
537
538void VirtualCameraRenderThread::flushCaptureRequest(
539 const ProcessCaptureRequestTask& request) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100540 CaptureResult captureResult;
541 captureResult.fmqResultSize = 0;
542 captureResult.frameNumber = request.getFrameNumber();
543 captureResult.inputBuffer.streamId = -1;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100544
545 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
546 captureResult.outputBuffers.resize(buffers.size());
547
548 for (int i = 0; i < buffers.size(); ++i) {
549 const CaptureRequestBuffer& reqBuffer = buffers[i];
550 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
551 resBuffer.streamId = reqBuffer.getStreamId();
552 resBuffer.bufferId = reqBuffer.getBufferId();
553 resBuffer.status = BufferStatus::ERROR;
554 sp<Fence> fence = reqBuffer.getFence();
555 if (fence != nullptr && fence->isValid()) {
556 resBuffer.releaseFence.fds.emplace_back(fence->dup());
557 }
558 }
559
560 auto status = mCameraDeviceCallback->notify(
561 {createRequestErrorNotifyMsg(request.getFrameNumber())});
562 if (!status.isOk()) {
563 ALOGE("%s: notify call failed: %s", __func__,
564 status.getDescription().c_str());
565 return;
566 }
567
568 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
569 captureResults(1);
570 captureResults[0] = std::move(captureResult);
571
572 status = mCameraDeviceCallback->processCaptureResult(captureResults);
573 if (!status.isOk()) {
574 ALOGE("%s: processCaptureResult call failed: %s", __func__,
575 status.getDescription().c_str());
576 }
577}
578
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100579std::vector<uint8_t> VirtualCameraRenderThread::createThumbnail(
580 const Resolution resolution, const int quality) {
581 if (resolution.width == 0 || resolution.height == 0) {
582 ALOGV("%s: Skipping thumbnail creation, zero size requested", __func__);
583 return {};
584 }
585
586 ALOGV("%s: Creating thumbnail with size %d x %d, quality %d", __func__,
587 resolution.width, resolution.height, quality);
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100588 Resolution bufferSize = roundTo2DctSize(resolution);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100589 std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100590 mEglDisplayContext->getEglDisplay(), bufferSize.width, bufferSize.height);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100591 if (framebuffer == nullptr) {
592 ALOGE(
593 "Failed to allocate temporary framebuffer for JPEG thumbnail "
594 "compression");
595 return {};
596 }
597
Vadim Caen168d3862024-06-11 15:25:51 +0200598 // TODO(b/324383963) Add support for letterboxing if the thumbnail sizese
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100599 // doesn't correspond
600 // to input texture aspect ratio.
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100601 if (!renderIntoEglFramebuffer(*framebuffer, /*fence=*/nullptr,
602 Rect(resolution.width, resolution.height))
603 .isOk()) {
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100604 ALOGE(
605 "Failed to render input texture into temporary framebuffer for JPEG "
606 "thumbnail");
607 return {};
608 }
609
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100610 std::vector<uint8_t> compressedThumbnail;
611 compressedThumbnail.resize(kJpegThumbnailBufferSize);
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100612 ALOGE("%s: Compressing thumbnail %d x %d", __func__, resolution.width,
613 resolution.height);
614 std::optional<size_t> compressedSize =
615 compressJpeg(resolution.width, resolution.height, quality,
616 framebuffer->getHardwareBuffer(), {},
617 compressedThumbnail.size(), compressedThumbnail.data());
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100618 if (!compressedSize.has_value()) {
619 ALOGE("%s: Failed to compress jpeg thumbnail", __func__);
620 return {};
621 }
622 compressedThumbnail.resize(compressedSize.value());
623 return compressedThumbnail;
624}
625
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100626ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoBlobStreamBuffer(
Vadim Caenc0aff132024-03-12 17:20:07 +0100627 const int streamId, const int bufferId, const CameraMetadata& resultMetadata,
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100628 const RequestSettings& requestSettings, sp<Fence> fence) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100629 std::shared_ptr<AHardwareBuffer> hwBuffer =
630 mSessionContext.fetchHardwareBuffer(streamId, bufferId);
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100631 if (hwBuffer == nullptr) {
632 ALOGE("%s: Failed to fetch hardware buffer %d for streamId %d", __func__,
633 bufferId, streamId);
634 return cameraStatus(Status::INTERNAL_ERROR);
635 }
636
637 std::optional<Stream> stream = mSessionContext.getStreamConfig(streamId);
638 if (!stream.has_value()) {
639 ALOGE("%s, failed to fetch information about stream %d", __func__, streamId);
640 return cameraStatus(Status::INTERNAL_ERROR);
641 }
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100642
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100643 ALOGV("%s: Rendering JPEG with size %d x %d, quality %d", __func__,
644 stream->width, stream->height, requestSettings.jpegQuality);
645
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100646 // Let's create YUV framebuffer and render the surface into this.
647 // This will take care about rescaling as well as potential format conversion.
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100648 // The buffer dimensions need to be rounded to nearest multiple of JPEG DCT
649 // size, however we pass the viewport corresponding to size of the stream so
650 // the image will be only rendered to the area corresponding to the stream
651 // size.
652 Resolution bufferSize =
653 roundTo2DctSize(Resolution(stream->width, stream->height));
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100654 std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100655 mEglDisplayContext->getEglDisplay(), bufferSize.width, bufferSize.height);
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100656 if (framebuffer == nullptr) {
657 ALOGE("Failed to allocate temporary framebuffer for JPEG compression");
658 return cameraStatus(Status::INTERNAL_ERROR);
659 }
660
661 // Render into temporary framebuffer.
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100662 ndk::ScopedAStatus status = renderIntoEglFramebuffer(
663 *framebuffer, /*fence=*/nullptr, Rect(stream->width, stream->height));
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100664 if (!status.isOk()) {
665 ALOGE("Failed to render input texture into temporary framebuffer");
666 return status;
667 }
668
Jan Sebechlebsky0532bad2024-06-11 10:52:01 +0200669 PlanesLockGuard planesLock(hwBuffer, AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN,
Jan Sebechlebsky43543222024-02-16 12:50:32 +0100670 fence);
671 if (planesLock.getStatus() != OK) {
Vadim Caen168d3862024-06-11 15:25:51 +0200672 ALOGE("Failed to lock hwBuffer planes");
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100673 return cameraStatus(Status::INTERNAL_ERROR);
674 }
675
Jan Sebechlebsky5c789e42024-02-29 16:32:17 +0100676 std::vector<uint8_t> app1ExifData =
Vadim Caenc0aff132024-03-12 17:20:07 +0100677 createExif(Resolution(stream->width, stream->height), resultMetadata,
Jan Sebechlebsky5c789e42024-02-29 16:32:17 +0100678 createThumbnail(requestSettings.thumbnailResolution,
679 requestSettings.thumbnailJpegQuality));
Vadim Caen168d3862024-06-11 15:25:51 +0200680
681 unsigned long outBufferSize = stream->bufferSize - sizeof(CameraBlob);
682 void* outBuffer = (*planesLock).planes[0].data;
Jan Sebechlebsky5c789e42024-02-29 16:32:17 +0100683 std::optional<size_t> compressedSize = compressJpeg(
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100684 stream->width, stream->height, requestSettings.jpegQuality,
Vadim Caen168d3862024-06-11 15:25:51 +0200685 framebuffer->getHardwareBuffer(), app1ExifData, outBufferSize, outBuffer);
Jan Sebechlebsky5c789e42024-02-29 16:32:17 +0100686
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100687 if (!compressedSize.has_value()) {
688 ALOGE("%s: Failed to compress JPEG image", __func__);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100689 return cameraStatus(Status::INTERNAL_ERROR);
690 }
691
Vadim Caen168d3862024-06-11 15:25:51 +0200692 // Add the transport header at the end of the JPEG output buffer.
693 //
694 // jpegBlobId must start at byte[buffer_size - sizeof(CameraBlob)],
695 // where the buffer_size is the size of gralloc buffer.
696 //
697 // See
698 // hardware/interfaces/camera/device/aidl/android/hardware/camera/device/CameraBlobId.aidl
699 // for the full explanation of the following code.
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100700 CameraBlob cameraBlob{
701 .blobId = CameraBlobId::JPEG,
702 .blobSizeBytes = static_cast<int32_t>(compressedSize.value())};
703
Vadim Caen168d3862024-06-11 15:25:51 +0200704 // Copy the cameraBlob to the end of the JPEG buffer.
705 uint8_t* jpegStreamEndAddress =
706 reinterpret_cast<uint8_t*>((*planesLock).planes[0].data) +
707 (stream->bufferSize - sizeof(cameraBlob));
708 memcpy(jpegStreamEndAddress, &cameraBlob, sizeof(cameraBlob));
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100709
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100710 ALOGV("%s: Successfully compressed JPEG image, resulting size %zu B",
711 __func__, compressedSize.value());
712
713 return ndk::ScopedAStatus::ok();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100714}
715
716ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoImageStreamBuffer(
717 int streamId, int bufferId, sp<Fence> fence) {
718 ALOGV("%s", __func__);
719
720 const std::chrono::nanoseconds before =
721 std::chrono::duration_cast<std::chrono::nanoseconds>(
722 std::chrono::steady_clock::now().time_since_epoch());
723
724 // Render test pattern using EGL.
725 std::shared_ptr<EglFrameBuffer> framebuffer =
726 mSessionContext.fetchOrCreateEglFramebuffer(
727 mEglDisplayContext->getEglDisplay(), streamId, bufferId);
728 if (framebuffer == nullptr) {
729 ALOGE(
730 "%s: Failed to get EGL framebuffer corresponding to buffer id "
731 "%d for streamId %d",
732 __func__, bufferId, streamId);
733 return cameraStatus(Status::ILLEGAL_ARGUMENT);
734 }
735
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100736 ndk::ScopedAStatus status = renderIntoEglFramebuffer(*framebuffer, fence);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100737
738 const std::chrono::nanoseconds after =
739 std::chrono::duration_cast<std::chrono::nanoseconds>(
740 std::chrono::steady_clock::now().time_since_epoch());
741
742 ALOGV("Rendering to buffer %d, stream %d took %lld ns", bufferId, streamId,
743 after.count() - before.count());
744
745 return ndk::ScopedAStatus::ok();
746}
747
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100748ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoEglFramebuffer(
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100749 EglFrameBuffer& framebuffer, sp<Fence> fence, std::optional<Rect> viewport) {
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100750 ALOGV("%s", __func__);
751 // Wait for fence to clear.
752 if (fence != nullptr && fence->isValid()) {
753 status_t ret = fence->wait(kAcquireFenceTimeout.count());
754 if (ret != 0) {
755 ALOGE("Timeout while waiting for the acquire fence for buffer");
756 return cameraStatus(Status::INTERNAL_ERROR);
757 }
758 }
759
760 mEglDisplayContext->makeCurrent();
761 framebuffer.beforeDraw();
762
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100763 Rect viewportRect =
764 viewport.value_or(Rect(framebuffer.getWidth(), framebuffer.getHeight()));
Vadim Caenecf24852024-06-11 15:25:10 +0200765 glViewport(viewportRect.left, viewportRect.top, viewportRect.getWidth(),
766 viewportRect.getHeight());
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100767
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100768 sp<GraphicBuffer> textureBuffer = mEglSurfaceTexture->getCurrentBuffer();
769 if (textureBuffer == nullptr) {
770 // If there's no current buffer, nothing was written to the surface and
771 // texture is not initialized yet. Let's render the framebuffer black
772 // instead of rendering the texture.
773 glClearColor(0.0f, 0.5f, 0.5f, 0.0f);
774 glClear(GL_COLOR_BUFFER_BIT);
775 } else {
776 const bool renderSuccess =
777 isYuvFormat(static_cast<PixelFormat>(textureBuffer->getPixelFormat()))
Jan Sebechlebsky99492e32023-12-20 09:49:45 +0100778 ? mEglTextureYuvProgram->draw(
779 mEglSurfaceTexture->getTextureId(),
780 mEglSurfaceTexture->getTransformMatrix())
781 : mEglTextureRgbProgram->draw(
782 mEglSurfaceTexture->getTextureId(),
783 mEglSurfaceTexture->getTransformMatrix());
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100784 if (!renderSuccess) {
785 ALOGE("%s: Failed to render texture", __func__);
786 return cameraStatus(Status::INTERNAL_ERROR);
787 }
788 }
789 framebuffer.afterDraw();
790
791 return ndk::ScopedAStatus::ok();
792}
793
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100794} // namespace virtualcamera
795} // namespace companion
796} // namespace android