blob: bf4a45d93efd64bcbc2bafb0ef638dbf0d298958 [file] [log] [blame]
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +01001/*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Vadim Caenb79f4e32024-08-30 16:35:33 +020017// #define LOG_NDEBUG 0
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010018#define LOG_TAG "VirtualCameraRenderThread"
19#include "VirtualCameraRenderThread.h"
20
Vadim Caenb79f4e32024-08-30 16:35:33 +020021#include <android_companion_virtualdevice_flags.h>
22
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010023#include <chrono>
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +010024#include <cstdint>
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010025#include <cstring>
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010026#include <future>
27#include <memory>
28#include <mutex>
29#include <thread>
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +020030#include <utility>
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010031#include <vector>
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010032
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010033#include "Exif.h"
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +010034#include "GLES/gl.h"
Vadim Caen550634d2024-07-17 14:01:21 +020035#include "VirtualCameraCaptureResult.h"
Biswarup Pal8ad8bc52024-02-08 13:41:44 +000036#include "VirtualCameraDevice.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010037#include "VirtualCameraSessionContext.h"
38#include "aidl/android/hardware/camera/common/Status.h"
39#include "aidl/android/hardware/camera/device/BufferStatus.h"
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010040#include "aidl/android/hardware/camera/device/CameraBlob.h"
41#include "aidl/android/hardware/camera/device/CameraBlobId.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010042#include "aidl/android/hardware/camera/device/CameraMetadata.h"
43#include "aidl/android/hardware/camera/device/CaptureResult.h"
44#include "aidl/android/hardware/camera/device/ErrorCode.h"
45#include "aidl/android/hardware/camera/device/ICameraDeviceCallback.h"
46#include "aidl/android/hardware/camera/device/NotifyMsg.h"
47#include "aidl/android/hardware/camera/device/ShutterMsg.h"
48#include "aidl/android/hardware/camera/device/StreamBuffer.h"
49#include "android-base/thread_annotations.h"
50#include "android/binder_auto_utils.h"
51#include "android/hardware_buffer.h"
Vadim Caenc0aff132024-03-12 17:20:07 +010052#include "system/camera_metadata.h"
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +010053#include "ui/GraphicBuffer.h"
Jan Sebechlebskyb3771312024-03-15 10:38:02 +010054#include "ui/Rect.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010055#include "util/EglFramebuffer.h"
56#include "util/JpegUtil.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010057#include "util/Util.h"
58#include "utils/Errors.h"
59
60namespace android {
61namespace companion {
62namespace virtualcamera {
63
64using ::aidl::android::hardware::camera::common::Status;
65using ::aidl::android::hardware::camera::device::BufferStatus;
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010066using ::aidl::android::hardware::camera::device::CameraBlob;
67using ::aidl::android::hardware::camera::device::CameraBlobId;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010068using ::aidl::android::hardware::camera::device::CameraMetadata;
69using ::aidl::android::hardware::camera::device::CaptureResult;
70using ::aidl::android::hardware::camera::device::ErrorCode;
71using ::aidl::android::hardware::camera::device::ErrorMsg;
72using ::aidl::android::hardware::camera::device::ICameraDeviceCallback;
73using ::aidl::android::hardware::camera::device::NotifyMsg;
74using ::aidl::android::hardware::camera::device::ShutterMsg;
75using ::aidl::android::hardware::camera::device::Stream;
76using ::aidl::android::hardware::camera::device::StreamBuffer;
77using ::aidl::android::hardware::graphics::common::PixelFormat;
78using ::android::base::ScopedLockAssertion;
79
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010080using ::android::hardware::camera::common::helper::ExifUtils;
81
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010082namespace {
83
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +020084// helper type for the visitor
85template <class... Ts>
86struct overloaded : Ts... {
87 using Ts::operator()...;
88};
89// explicit deduction guide (not needed as of C++20)
90template <class... Ts>
91overloaded(Ts...) -> overloaded<Ts...>;
92
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010093using namespace std::chrono_literals;
94
Vadim Caenb79f4e32024-08-30 16:35:33 +020095namespace flags = ::android::companion::virtualdevice::flags;
96
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010097static constexpr std::chrono::milliseconds kAcquireFenceTimeout = 500ms;
98
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010099static constexpr size_t kJpegThumbnailBufferSize = 32 * 1024; // 32 KiB
100
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +0200101static constexpr UpdateTextureTask kUpdateTextureTask;
102
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100103NotifyMsg createShutterNotifyMsg(int frameNumber,
104 std::chrono::nanoseconds timestamp) {
105 NotifyMsg msg;
106 msg.set<NotifyMsg::Tag::shutter>(ShutterMsg{
107 .frameNumber = frameNumber,
108 .timestamp = timestamp.count(),
109 });
110 return msg;
111}
112
113NotifyMsg createBufferErrorNotifyMsg(int frameNumber, int streamId) {
114 NotifyMsg msg;
115 msg.set<NotifyMsg::Tag::error>(ErrorMsg{.frameNumber = frameNumber,
116 .errorStreamId = streamId,
117 .errorCode = ErrorCode::ERROR_BUFFER});
118 return msg;
119}
120
121NotifyMsg createRequestErrorNotifyMsg(int frameNumber) {
122 NotifyMsg msg;
Vadim Caenb79f4e32024-08-30 16:35:33 +0200123 msg.set<NotifyMsg::Tag::error>(
124 ErrorMsg{.frameNumber = frameNumber,
125 // errorStreamId needs to be set to -1 for ERROR_REQUEST
126 // (not tied to specific stream).
127 .errorStreamId = -1,
128 .errorCode = ErrorCode::ERROR_REQUEST});
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100129 return msg;
130}
131
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100132std::shared_ptr<EglFrameBuffer> allocateTemporaryFramebuffer(
133 EGLDisplay eglDisplay, const uint width, const int height) {
134 const AHardwareBuffer_Desc desc{
135 .width = static_cast<uint32_t>(width),
136 .height = static_cast<uint32_t>(height),
137 .layers = 1,
138 .format = AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420,
139 .usage = AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER |
140 AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN,
141 .rfu0 = 0,
142 .rfu1 = 0};
143
144 AHardwareBuffer* hwBufferPtr;
145 int status = AHardwareBuffer_allocate(&desc, &hwBufferPtr);
146 if (status != NO_ERROR) {
147 ALOGE(
148 "%s: Failed to allocate hardware buffer for temporary framebuffer: %d",
149 __func__, status);
150 return nullptr;
151 }
152
153 return std::make_shared<EglFrameBuffer>(
154 eglDisplay,
155 std::shared_ptr<AHardwareBuffer>(hwBufferPtr, AHardwareBuffer_release));
156}
157
158bool isYuvFormat(const PixelFormat pixelFormat) {
159 switch (static_cast<android_pixel_format_t>(pixelFormat)) {
160 case HAL_PIXEL_FORMAT_YCBCR_422_I:
161 case HAL_PIXEL_FORMAT_YCBCR_422_SP:
162 case HAL_PIXEL_FORMAT_Y16:
163 case HAL_PIXEL_FORMAT_YV12:
164 case HAL_PIXEL_FORMAT_YCBCR_420_888:
165 return true;
166 default:
167 return false;
168 }
169}
170
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100171std::vector<uint8_t> createExif(
Vadim Caenc0aff132024-03-12 17:20:07 +0100172 Resolution imageSize, const CameraMetadata resultMetadata,
173 const std::vector<uint8_t>& compressedThumbnail = {}) {
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100174 std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create());
175 exifUtils->initialize();
Vadim Caenc0aff132024-03-12 17:20:07 +0100176
177 // Make a copy of the metadata in order to converting it the HAL metadata
178 // format (as opposed to the AIDL class) and use the setFromMetadata method
179 // from ExifUtil
180 camera_metadata_t* rawSettings =
181 clone_camera_metadata((camera_metadata_t*)resultMetadata.metadata.data());
182 if (rawSettings != nullptr) {
183 android::hardware::camera::common::helper::CameraMetadata halMetadata(
184 rawSettings);
185 exifUtils->setFromMetadata(halMetadata, imageSize.width, imageSize.height);
186 }
187 exifUtils->setMake(VirtualCameraDevice::kDefaultMakeAndModel);
188 exifUtils->setModel(VirtualCameraDevice::kDefaultMakeAndModel);
189 exifUtils->setFlash(0);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100190
191 std::vector<uint8_t> app1Data;
192
193 size_t thumbnailDataSize = compressedThumbnail.size();
194 const void* thumbnailData =
195 thumbnailDataSize > 0
196 ? reinterpret_cast<const void*>(compressedThumbnail.data())
197 : nullptr;
198
199 if (!exifUtils->generateApp1(thumbnailData, thumbnailDataSize)) {
200 ALOGE("%s: Failed to generate APP1 segment for EXIF metadata", __func__);
201 return app1Data;
202 }
203
204 const uint8_t* data = exifUtils->getApp1Buffer();
205 const size_t size = exifUtils->getApp1Length();
206
207 app1Data.insert(app1Data.end(), data, data + size);
208 return app1Data;
209}
210
Jan Sebechlebskyb8282672024-05-22 10:43:37 +0200211std::chrono::nanoseconds getMaxFrameDuration(
212 const RequestSettings& requestSettings) {
213 if (requestSettings.fpsRange.has_value()) {
214 return std::chrono::nanoseconds(static_cast<uint64_t>(
215 1e9 / std::max(1, requestSettings.fpsRange->minFps)));
216 }
217 return std::chrono::nanoseconds(
218 static_cast<uint64_t>(1e9 / VirtualCameraDevice::kMinFps));
219}
220
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +0200221class FrameAvailableListenerProxy : public ConsumerBase::FrameAvailableListener {
222 public:
223 FrameAvailableListenerProxy(std::function<void()> callback)
224 : mOnFrameAvailableCallback(callback) {
225 }
226
227 virtual void onFrameAvailable(const BufferItem&) override {
228 ALOGV("%s: onFrameAvailable", __func__);
229 mOnFrameAvailableCallback();
230 }
231
232 private:
233 std::function<void()> mOnFrameAvailableCallback;
234};
235
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100236} // namespace
237
238CaptureRequestBuffer::CaptureRequestBuffer(int streamId, int bufferId,
239 sp<Fence> fence)
240 : mStreamId(streamId), mBufferId(bufferId), mFence(fence) {
241}
242
243int CaptureRequestBuffer::getStreamId() const {
244 return mStreamId;
245}
246
247int CaptureRequestBuffer::getBufferId() const {
248 return mBufferId;
249}
250
251sp<Fence> CaptureRequestBuffer::getFence() const {
252 return mFence;
253}
254
255VirtualCameraRenderThread::VirtualCameraRenderThread(
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100256 VirtualCameraSessionContext& sessionContext,
257 const Resolution inputSurfaceSize, const Resolution reportedSensorSize,
Jan Sebechlebsky288900f2024-05-24 14:47:54 +0200258 std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback)
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100259 : mCameraDeviceCallback(cameraDeviceCallback),
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100260 mInputSurfaceSize(inputSurfaceSize),
261 mReportedSensorSize(reportedSensorSize),
Jan Sebechlebsky9fcd0262024-05-31 15:20:09 +0200262 mSessionContext(sessionContext),
263 mInputSurfaceFuture(mInputSurfacePromise.get_future()) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100264}
265
266VirtualCameraRenderThread::~VirtualCameraRenderThread() {
267 stop();
268 if (mThread.joinable()) {
269 mThread.join();
270 }
271}
272
273ProcessCaptureRequestTask::ProcessCaptureRequestTask(
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100274 int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers,
275 const RequestSettings& requestSettings)
276 : mFrameNumber(frameNumber),
277 mBuffers(requestBuffers),
278 mRequestSettings(requestSettings) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100279}
280
281int ProcessCaptureRequestTask::getFrameNumber() const {
282 return mFrameNumber;
283}
284
285const std::vector<CaptureRequestBuffer>& ProcessCaptureRequestTask::getBuffers()
286 const {
287 return mBuffers;
288}
289
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100290const RequestSettings& ProcessCaptureRequestTask::getRequestSettings() const {
291 return mRequestSettings;
292}
293
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +0200294void VirtualCameraRenderThread::requestTextureUpdate() {
295 std::lock_guard<std::mutex> lock(mLock);
296 // If queue is not empty, we don't need to set the mTextureUpdateRequested
297 // flag, since the texture will be updated during ProcessCaptureRequestTask
298 // processing anyway.
299 if (mQueue.empty()) {
300 mTextureUpdateRequested = true;
301 mCondVar.notify_one();
302 }
303}
304
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100305void VirtualCameraRenderThread::enqueueTask(
306 std::unique_ptr<ProcessCaptureRequestTask> task) {
307 std::lock_guard<std::mutex> lock(mLock);
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +0200308 // When enqueving process capture request task, clear the
309 // mTextureUpdateRequested flag. If this flag is set, the texture was not yet
310 // updated and it will be updated when processing ProcessCaptureRequestTask
311 // anyway.
312 mTextureUpdateRequested = false;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100313 mQueue.emplace_back(std::move(task));
314 mCondVar.notify_one();
315}
316
317void VirtualCameraRenderThread::flush() {
318 std::lock_guard<std::mutex> lock(mLock);
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100319 while (!mQueue.empty()) {
320 std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
321 mQueue.pop_front();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100322 flushCaptureRequest(*task);
323 }
324}
325
326void VirtualCameraRenderThread::start() {
327 mThread = std::thread(&VirtualCameraRenderThread::threadLoop, this);
328}
329
330void VirtualCameraRenderThread::stop() {
331 {
332 std::lock_guard<std::mutex> lock(mLock);
333 mPendingExit = true;
334 mCondVar.notify_one();
335 }
336}
337
338sp<Surface> VirtualCameraRenderThread::getInputSurface() {
Jan Sebechlebsky9fcd0262024-05-31 15:20:09 +0200339 return mInputSurfaceFuture.get();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100340}
341
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +0200342RenderThreadTask VirtualCameraRenderThread::dequeueTask() {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100343 std::unique_lock<std::mutex> lock(mLock);
344 // Clang's thread safety analysis doesn't perform alias analysis,
345 // so it doesn't support moveable std::unique_lock.
346 //
347 // Lock assertion below is basically explicit declaration that
348 // the lock is held in this scope, which is true, since it's only
349 // released during waiting inside mCondVar.wait calls.
350 ScopedLockAssertion lockAssertion(mLock);
351
352 mCondVar.wait(lock, [this]() REQUIRES(mLock) {
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +0200353 return mPendingExit || mTextureUpdateRequested || !mQueue.empty();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100354 });
355 if (mPendingExit) {
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +0200356 // Render thread task with null task signals render thread to terminate.
357 return RenderThreadTask(nullptr);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100358 }
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +0200359 if (mTextureUpdateRequested) {
360 // If mTextureUpdateRequested, it's guaranteed the queue is empty, return
361 // kUpdateTextureTask to signal we want render thread to update the texture
362 // (consume buffer from the queue).
363 mTextureUpdateRequested = false;
364 return RenderThreadTask(kUpdateTextureTask);
365 }
366 RenderThreadTask task(std::move(mQueue.front()));
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100367 mQueue.pop_front();
368 return task;
369}
370
371void VirtualCameraRenderThread::threadLoop() {
372 ALOGV("Render thread starting");
373
374 mEglDisplayContext = std::make_unique<EglDisplayContext>();
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100375 mEglTextureYuvProgram =
376 std::make_unique<EglTextureProgram>(EglTextureProgram::TextureFormat::YUV);
377 mEglTextureRgbProgram = std::make_unique<EglTextureProgram>(
378 EglTextureProgram::TextureFormat::RGBA);
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100379 mEglSurfaceTexture = std::make_unique<EglSurfaceTexture>(
380 mInputSurfaceSize.width, mInputSurfaceSize.height);
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +0200381 sp<FrameAvailableListenerProxy> frameAvailableListener =
382 sp<FrameAvailableListenerProxy>::make(
383 [this]() { requestTextureUpdate(); });
384 mEglSurfaceTexture->setFrameAvailableListener(frameAvailableListener);
Jan Sebechlebsky6402fef2024-03-25 16:30:26 +0100385
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100386 mInputSurfacePromise.set_value(mEglSurfaceTexture->getSurface());
387
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +0200388 while (RenderThreadTask task = dequeueTask()) {
389 std::visit(
390 overloaded{[this](const std::unique_ptr<ProcessCaptureRequestTask>& t) {
391 processTask(*t);
392 },
393 [this](const UpdateTextureTask&) {
394 ALOGV("Idle update of the texture");
395 mEglSurfaceTexture->updateTexture();
396 }},
397 task);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100398 }
399
Jan Sebechlebsky06b36672024-03-18 11:52:35 +0100400 // Destroy EGL utilities still on the render thread.
401 mEglSurfaceTexture.reset();
402 mEglTextureRgbProgram.reset();
403 mEglTextureYuvProgram.reset();
404 mEglDisplayContext.reset();
405
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100406 ALOGV("Render thread exiting");
407}
408
Jan Sebechlebsky18ac32c2024-06-07 09:53:53 +0200409void VirtualCameraRenderThread::processTask(
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100410 const ProcessCaptureRequestTask& request) {
Jan Sebechlebsky2f4478e2024-05-08 17:26:42 +0200411 std::chrono::nanoseconds timestamp =
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100412 std::chrono::duration_cast<std::chrono::nanoseconds>(
413 std::chrono::steady_clock::now().time_since_epoch());
Jan Sebechlebskyb8282672024-05-22 10:43:37 +0200414 const std::chrono::nanoseconds lastAcquisitionTimestamp(
Jan Sebechlebsky2f4478e2024-05-08 17:26:42 +0200415 mLastAcquisitionTimestampNanoseconds.exchange(timestamp.count(),
416 std::memory_order_relaxed));
417
418 if (request.getRequestSettings().fpsRange) {
Vadim Caenb79f4e32024-08-30 16:35:33 +0200419 int maxFps = std::max(1, request.getRequestSettings().fpsRange->maxFps);
420 timestamp = throttleRendering(maxFps, lastAcquisitionTimestamp, timestamp);
Jan Sebechlebsky2f4478e2024-05-08 17:26:42 +0200421 }
422
Jan Sebechlebskyb8282672024-05-22 10:43:37 +0200423 // Calculate the maximal amount of time we can afford to wait for next frame.
424 const std::chrono::nanoseconds maxFrameDuration =
425 getMaxFrameDuration(request.getRequestSettings());
426 const std::chrono::nanoseconds elapsedDuration =
427 timestamp - lastAcquisitionTimestamp;
428 if (elapsedDuration < maxFrameDuration) {
429 // We can afford to wait for next frame.
430 // Note that if there's already new frame in the input Surface, the call
431 // below returns immediatelly.
432 bool gotNewFrame = mEglSurfaceTexture->waitForNextFrame(maxFrameDuration -
433 elapsedDuration);
434 timestamp = std::chrono::duration_cast<std::chrono::nanoseconds>(
435 std::chrono::steady_clock::now().time_since_epoch());
436 if (!gotNewFrame) {
437 ALOGV(
438 "%s: No new frame received on input surface after waiting for "
439 "%" PRIu64 "ns, repeating last frame.",
440 __func__,
441 static_cast<uint64_t>((timestamp - lastAcquisitionTimestamp).count()));
442 }
443 mLastAcquisitionTimestampNanoseconds.store(timestamp.count(),
444 std::memory_order_relaxed);
445 }
Jan Sebechlebsky2f4478e2024-05-08 17:26:42 +0200446 // Acquire new (most recent) image from the Surface.
447 mEglSurfaceTexture->updateTexture();
Vadim Caenb79f4e32024-08-30 16:35:33 +0200448 std::chrono::nanoseconds captureTimestamp = timestamp;
449
450 if (flags::camera_timestamp_from_surface()) {
451 std::chrono::nanoseconds surfaceTimestamp =
452 getSurfaceTimestamp(elapsedDuration);
453 if (surfaceTimestamp.count() > 0) {
454 captureTimestamp = surfaceTimestamp;
455 }
456 ALOGV("%s captureTimestamp:%lld timestamp:%lld", __func__,
457 captureTimestamp.count(), timestamp.count());
458 }
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100459
460 CaptureResult captureResult;
461 captureResult.fmqResultSize = 0;
462 captureResult.frameNumber = request.getFrameNumber();
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100463 // Partial result needs to be set to 1 when metadata are present.
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100464 captureResult.partialResult = 1;
465 captureResult.inputBuffer.streamId = -1;
466 captureResult.physicalCameraMetadata.resize(0);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100467 captureResult.result = createCaptureResultMetadata(
Vadim Caenb79f4e32024-08-30 16:35:33 +0200468 captureTimestamp, request.getRequestSettings(), mReportedSensorSize);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100469
470 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
471 captureResult.outputBuffers.resize(buffers.size());
472
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100473 for (int i = 0; i < buffers.size(); ++i) {
474 const CaptureRequestBuffer& reqBuffer = buffers[i];
475 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
476 resBuffer.streamId = reqBuffer.getStreamId();
477 resBuffer.bufferId = reqBuffer.getBufferId();
478 resBuffer.status = BufferStatus::OK;
479
480 const std::optional<Stream> streamConfig =
481 mSessionContext.getStreamConfig(reqBuffer.getStreamId());
482
483 if (!streamConfig.has_value()) {
484 resBuffer.status = BufferStatus::ERROR;
485 continue;
486 }
487
488 auto status = streamConfig->format == PixelFormat::BLOB
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100489 ? renderIntoBlobStreamBuffer(
490 reqBuffer.getStreamId(), reqBuffer.getBufferId(),
Vadim Caenc0aff132024-03-12 17:20:07 +0100491 captureResult.result, request.getRequestSettings(),
492 reqBuffer.getFence())
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100493 : renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
494 reqBuffer.getBufferId(),
495 reqBuffer.getFence());
496 if (!status.isOk()) {
497 resBuffer.status = BufferStatus::ERROR;
498 }
499 }
500
501 std::vector<NotifyMsg> notifyMsg{
Vadim Caenb79f4e32024-08-30 16:35:33 +0200502 createShutterNotifyMsg(request.getFrameNumber(), captureTimestamp)};
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100503 for (const StreamBuffer& resBuffer : captureResult.outputBuffers) {
504 if (resBuffer.status != BufferStatus::OK) {
505 notifyMsg.push_back(createBufferErrorNotifyMsg(request.getFrameNumber(),
506 resBuffer.streamId));
507 }
508 }
509
510 auto status = mCameraDeviceCallback->notify(notifyMsg);
511 if (!status.isOk()) {
512 ALOGE("%s: notify call failed: %s", __func__,
513 status.getDescription().c_str());
514 return;
515 }
516
517 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
518 captureResults(1);
519 captureResults[0] = std::move(captureResult);
520
521 status = mCameraDeviceCallback->processCaptureResult(captureResults);
522 if (!status.isOk()) {
523 ALOGE("%s: processCaptureResult call failed: %s", __func__,
524 status.getDescription().c_str());
525 return;
526 }
527
Vadim Caen324fcfb2024-03-21 16:49:08 +0100528 ALOGV("%s: Successfully called processCaptureResult", __func__);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100529}
530
Vadim Caenb79f4e32024-08-30 16:35:33 +0200531std::chrono::nanoseconds VirtualCameraRenderThread::throttleRendering(
532 int maxFps, std::chrono::nanoseconds lastAcquisitionTimestamp,
533 std::chrono::nanoseconds timestamp) {
534 const std::chrono::nanoseconds minFrameDuration(
535 static_cast<uint64_t>(1e9 / maxFps));
536 const std::chrono::nanoseconds frameDuration =
537 timestamp - lastAcquisitionTimestamp;
538 if (frameDuration < minFrameDuration) {
539 // We're too fast for the configured maxFps, let's wait a bit.
540 const std::chrono::nanoseconds sleepTime = minFrameDuration - frameDuration;
541 ALOGV("Current frame duration would be %" PRIu64
542 " ns corresponding to, "
543 "sleeping for %" PRIu64
544 " ns before updating texture to match maxFps %d",
545 static_cast<uint64_t>(frameDuration.count()),
546 static_cast<uint64_t>(sleepTime.count()), maxFps);
547
548 std::this_thread::sleep_for(sleepTime);
549 timestamp = std::chrono::duration_cast<std::chrono::nanoseconds>(
550 std::chrono::steady_clock::now().time_since_epoch());
551 mLastAcquisitionTimestampNanoseconds.store(timestamp.count(),
552 std::memory_order_relaxed);
553 }
554 return timestamp;
555}
556
557std::chrono::nanoseconds VirtualCameraRenderThread::getSurfaceTimestamp(
558 std::chrono::nanoseconds timeSinceLastFrame) {
559 std::chrono::nanoseconds surfaceTimestamp = mEglSurfaceTexture->getTimestamp();
560 if (surfaceTimestamp.count() < 0) {
561 uint64_t lastSurfaceTimestamp = mLastSurfaceTimestampNanoseconds.load();
562 if (lastSurfaceTimestamp > 0) {
563 // The timestamps were provided by the producer but we are
564 // repeating the last frame, so we increase the previous timestamp by
565 // the elapsed time sinced its capture, otherwise the camera framework
566 // will discard the frame.
567 surfaceTimestamp = std::chrono::nanoseconds(lastSurfaceTimestamp +
568 timeSinceLastFrame.count());
569 }
570 }
571 mLastSurfaceTimestampNanoseconds.store(surfaceTimestamp.count(),
572 std::memory_order_relaxed);
573 return surfaceTimestamp;
574}
575
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100576void VirtualCameraRenderThread::flushCaptureRequest(
577 const ProcessCaptureRequestTask& request) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100578 CaptureResult captureResult;
579 captureResult.fmqResultSize = 0;
580 captureResult.frameNumber = request.getFrameNumber();
581 captureResult.inputBuffer.streamId = -1;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100582
583 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
584 captureResult.outputBuffers.resize(buffers.size());
585
586 for (int i = 0; i < buffers.size(); ++i) {
587 const CaptureRequestBuffer& reqBuffer = buffers[i];
588 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
589 resBuffer.streamId = reqBuffer.getStreamId();
590 resBuffer.bufferId = reqBuffer.getBufferId();
591 resBuffer.status = BufferStatus::ERROR;
592 sp<Fence> fence = reqBuffer.getFence();
593 if (fence != nullptr && fence->isValid()) {
594 resBuffer.releaseFence.fds.emplace_back(fence->dup());
595 }
596 }
597
598 auto status = mCameraDeviceCallback->notify(
599 {createRequestErrorNotifyMsg(request.getFrameNumber())});
600 if (!status.isOk()) {
601 ALOGE("%s: notify call failed: %s", __func__,
602 status.getDescription().c_str());
603 return;
604 }
605
606 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
607 captureResults(1);
608 captureResults[0] = std::move(captureResult);
609
610 status = mCameraDeviceCallback->processCaptureResult(captureResults);
611 if (!status.isOk()) {
612 ALOGE("%s: processCaptureResult call failed: %s", __func__,
613 status.getDescription().c_str());
614 }
615}
616
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100617std::vector<uint8_t> VirtualCameraRenderThread::createThumbnail(
618 const Resolution resolution, const int quality) {
619 if (resolution.width == 0 || resolution.height == 0) {
620 ALOGV("%s: Skipping thumbnail creation, zero size requested", __func__);
621 return {};
622 }
623
624 ALOGV("%s: Creating thumbnail with size %d x %d, quality %d", __func__,
625 resolution.width, resolution.height, quality);
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100626 Resolution bufferSize = roundTo2DctSize(resolution);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100627 std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100628 mEglDisplayContext->getEglDisplay(), bufferSize.width, bufferSize.height);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100629 if (framebuffer == nullptr) {
630 ALOGE(
631 "Failed to allocate temporary framebuffer for JPEG thumbnail "
632 "compression");
633 return {};
634 }
635
Vadim Caen168d3862024-06-11 15:25:51 +0200636 // TODO(b/324383963) Add support for letterboxing if the thumbnail sizese
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100637 // doesn't correspond
638 // to input texture aspect ratio.
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100639 if (!renderIntoEglFramebuffer(*framebuffer, /*fence=*/nullptr,
640 Rect(resolution.width, resolution.height))
641 .isOk()) {
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100642 ALOGE(
643 "Failed to render input texture into temporary framebuffer for JPEG "
644 "thumbnail");
645 return {};
646 }
647
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100648 std::vector<uint8_t> compressedThumbnail;
649 compressedThumbnail.resize(kJpegThumbnailBufferSize);
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100650 ALOGE("%s: Compressing thumbnail %d x %d", __func__, resolution.width,
651 resolution.height);
652 std::optional<size_t> compressedSize =
653 compressJpeg(resolution.width, resolution.height, quality,
654 framebuffer->getHardwareBuffer(), {},
655 compressedThumbnail.size(), compressedThumbnail.data());
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100656 if (!compressedSize.has_value()) {
657 ALOGE("%s: Failed to compress jpeg thumbnail", __func__);
658 return {};
659 }
660 compressedThumbnail.resize(compressedSize.value());
661 return compressedThumbnail;
662}
663
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100664ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoBlobStreamBuffer(
Vadim Caenc0aff132024-03-12 17:20:07 +0100665 const int streamId, const int bufferId, const CameraMetadata& resultMetadata,
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100666 const RequestSettings& requestSettings, sp<Fence> fence) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100667 std::shared_ptr<AHardwareBuffer> hwBuffer =
668 mSessionContext.fetchHardwareBuffer(streamId, bufferId);
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100669 if (hwBuffer == nullptr) {
670 ALOGE("%s: Failed to fetch hardware buffer %d for streamId %d", __func__,
671 bufferId, streamId);
672 return cameraStatus(Status::INTERNAL_ERROR);
673 }
674
675 std::optional<Stream> stream = mSessionContext.getStreamConfig(streamId);
676 if (!stream.has_value()) {
677 ALOGE("%s, failed to fetch information about stream %d", __func__, streamId);
678 return cameraStatus(Status::INTERNAL_ERROR);
679 }
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100680
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100681 ALOGV("%s: Rendering JPEG with size %d x %d, quality %d", __func__,
682 stream->width, stream->height, requestSettings.jpegQuality);
683
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100684 // Let's create YUV framebuffer and render the surface into this.
685 // This will take care about rescaling as well as potential format conversion.
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100686 // The buffer dimensions need to be rounded to nearest multiple of JPEG DCT
687 // size, however we pass the viewport corresponding to size of the stream so
688 // the image will be only rendered to the area corresponding to the stream
689 // size.
690 Resolution bufferSize =
691 roundTo2DctSize(Resolution(stream->width, stream->height));
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100692 std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100693 mEglDisplayContext->getEglDisplay(), bufferSize.width, bufferSize.height);
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100694 if (framebuffer == nullptr) {
695 ALOGE("Failed to allocate temporary framebuffer for JPEG compression");
696 return cameraStatus(Status::INTERNAL_ERROR);
697 }
698
699 // Render into temporary framebuffer.
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100700 ndk::ScopedAStatus status = renderIntoEglFramebuffer(
701 *framebuffer, /*fence=*/nullptr, Rect(stream->width, stream->height));
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100702 if (!status.isOk()) {
703 ALOGE("Failed to render input texture into temporary framebuffer");
704 return status;
705 }
706
Jan Sebechlebsky0532bad2024-06-11 10:52:01 +0200707 PlanesLockGuard planesLock(hwBuffer, AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN,
Jan Sebechlebsky43543222024-02-16 12:50:32 +0100708 fence);
709 if (planesLock.getStatus() != OK) {
Vadim Caen168d3862024-06-11 15:25:51 +0200710 ALOGE("Failed to lock hwBuffer planes");
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100711 return cameraStatus(Status::INTERNAL_ERROR);
712 }
713
Jan Sebechlebsky5c789e42024-02-29 16:32:17 +0100714 std::vector<uint8_t> app1ExifData =
Vadim Caenc0aff132024-03-12 17:20:07 +0100715 createExif(Resolution(stream->width, stream->height), resultMetadata,
Jan Sebechlebsky5c789e42024-02-29 16:32:17 +0100716 createThumbnail(requestSettings.thumbnailResolution,
717 requestSettings.thumbnailJpegQuality));
Vadim Caen168d3862024-06-11 15:25:51 +0200718
719 unsigned long outBufferSize = stream->bufferSize - sizeof(CameraBlob);
720 void* outBuffer = (*planesLock).planes[0].data;
Jan Sebechlebsky5c789e42024-02-29 16:32:17 +0100721 std::optional<size_t> compressedSize = compressJpeg(
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100722 stream->width, stream->height, requestSettings.jpegQuality,
Vadim Caen168d3862024-06-11 15:25:51 +0200723 framebuffer->getHardwareBuffer(), app1ExifData, outBufferSize, outBuffer);
Jan Sebechlebsky5c789e42024-02-29 16:32:17 +0100724
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100725 if (!compressedSize.has_value()) {
726 ALOGE("%s: Failed to compress JPEG image", __func__);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100727 return cameraStatus(Status::INTERNAL_ERROR);
728 }
729
Vadim Caen168d3862024-06-11 15:25:51 +0200730 // Add the transport header at the end of the JPEG output buffer.
731 //
732 // jpegBlobId must start at byte[buffer_size - sizeof(CameraBlob)],
733 // where the buffer_size is the size of gralloc buffer.
734 //
735 // See
736 // hardware/interfaces/camera/device/aidl/android/hardware/camera/device/CameraBlobId.aidl
737 // for the full explanation of the following code.
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100738 CameraBlob cameraBlob{
739 .blobId = CameraBlobId::JPEG,
740 .blobSizeBytes = static_cast<int32_t>(compressedSize.value())};
741
Vadim Caen168d3862024-06-11 15:25:51 +0200742 // Copy the cameraBlob to the end of the JPEG buffer.
743 uint8_t* jpegStreamEndAddress =
744 reinterpret_cast<uint8_t*>((*planesLock).planes[0].data) +
745 (stream->bufferSize - sizeof(cameraBlob));
746 memcpy(jpegStreamEndAddress, &cameraBlob, sizeof(cameraBlob));
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100747
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100748 ALOGV("%s: Successfully compressed JPEG image, resulting size %zu B",
749 __func__, compressedSize.value());
750
751 return ndk::ScopedAStatus::ok();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100752}
753
754ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoImageStreamBuffer(
755 int streamId, int bufferId, sp<Fence> fence) {
756 ALOGV("%s", __func__);
757
758 const std::chrono::nanoseconds before =
759 std::chrono::duration_cast<std::chrono::nanoseconds>(
760 std::chrono::steady_clock::now().time_since_epoch());
761
762 // Render test pattern using EGL.
763 std::shared_ptr<EglFrameBuffer> framebuffer =
764 mSessionContext.fetchOrCreateEglFramebuffer(
765 mEglDisplayContext->getEglDisplay(), streamId, bufferId);
766 if (framebuffer == nullptr) {
767 ALOGE(
768 "%s: Failed to get EGL framebuffer corresponding to buffer id "
769 "%d for streamId %d",
770 __func__, bufferId, streamId);
771 return cameraStatus(Status::ILLEGAL_ARGUMENT);
772 }
773
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100774 ndk::ScopedAStatus status = renderIntoEglFramebuffer(*framebuffer, fence);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100775
776 const std::chrono::nanoseconds after =
777 std::chrono::duration_cast<std::chrono::nanoseconds>(
778 std::chrono::steady_clock::now().time_since_epoch());
779
780 ALOGV("Rendering to buffer %d, stream %d took %lld ns", bufferId, streamId,
781 after.count() - before.count());
782
783 return ndk::ScopedAStatus::ok();
784}
785
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100786ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoEglFramebuffer(
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100787 EglFrameBuffer& framebuffer, sp<Fence> fence, std::optional<Rect> viewport) {
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100788 ALOGV("%s", __func__);
789 // Wait for fence to clear.
790 if (fence != nullptr && fence->isValid()) {
791 status_t ret = fence->wait(kAcquireFenceTimeout.count());
792 if (ret != 0) {
793 ALOGE("Timeout while waiting for the acquire fence for buffer");
794 return cameraStatus(Status::INTERNAL_ERROR);
795 }
796 }
797
798 mEglDisplayContext->makeCurrent();
799 framebuffer.beforeDraw();
800
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100801 Rect viewportRect =
802 viewport.value_or(Rect(framebuffer.getWidth(), framebuffer.getHeight()));
Vadim Caenecf24852024-06-11 15:25:10 +0200803 glViewport(viewportRect.left, viewportRect.top, viewportRect.getWidth(),
804 viewportRect.getHeight());
Jan Sebechlebskyb3771312024-03-15 10:38:02 +0100805
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100806 sp<GraphicBuffer> textureBuffer = mEglSurfaceTexture->getCurrentBuffer();
807 if (textureBuffer == nullptr) {
808 // If there's no current buffer, nothing was written to the surface and
809 // texture is not initialized yet. Let's render the framebuffer black
810 // instead of rendering the texture.
811 glClearColor(0.0f, 0.5f, 0.5f, 0.0f);
812 glClear(GL_COLOR_BUFFER_BIT);
813 } else {
814 const bool renderSuccess =
815 isYuvFormat(static_cast<PixelFormat>(textureBuffer->getPixelFormat()))
Jan Sebechlebsky99492e32023-12-20 09:49:45 +0100816 ? mEglTextureYuvProgram->draw(
817 mEglSurfaceTexture->getTextureId(),
818 mEglSurfaceTexture->getTransformMatrix())
819 : mEglTextureRgbProgram->draw(
820 mEglSurfaceTexture->getTextureId(),
821 mEglSurfaceTexture->getTransformMatrix());
Jan Sebechlebsky042d1fb2023-12-12 16:37:00 +0100822 if (!renderSuccess) {
823 ALOGE("%s: Failed to render texture", __func__);
824 return cameraStatus(Status::INTERNAL_ERROR);
825 }
826 }
827 framebuffer.afterDraw();
828
829 return ndk::ScopedAStatus::ok();
830}
831
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100832} // namespace virtualcamera
833} // namespace companion
834} // namespace android