blob: 86211609470d52f53b73c027c216c75b5b692182 [file] [log] [blame]
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +01001/*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "VirtualCameraRenderThread"
18#include "VirtualCameraRenderThread.h"
19
20#include <chrono>
21#include <cstddef>
22#include <future>
23#include <memory>
24#include <mutex>
25#include <thread>
26
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +010027#include "GLES/gl.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010028#include "VirtualCameraSessionContext.h"
29#include "aidl/android/hardware/camera/common/Status.h"
30#include "aidl/android/hardware/camera/device/BufferStatus.h"
31#include "aidl/android/hardware/camera/device/CameraMetadata.h"
32#include "aidl/android/hardware/camera/device/CaptureResult.h"
33#include "aidl/android/hardware/camera/device/ErrorCode.h"
34#include "aidl/android/hardware/camera/device/ICameraDeviceCallback.h"
35#include "aidl/android/hardware/camera/device/NotifyMsg.h"
36#include "aidl/android/hardware/camera/device/ShutterMsg.h"
37#include "aidl/android/hardware/camera/device/StreamBuffer.h"
38#include "android-base/thread_annotations.h"
39#include "android/binder_auto_utils.h"
40#include "android/hardware_buffer.h"
41#include "util/EglFramebuffer.h"
42#include "util/JpegUtil.h"
43#include "util/MetadataBuilder.h"
44#include "util/TestPatternHelper.h"
45#include "util/Util.h"
46#include "utils/Errors.h"
47
48namespace android {
49namespace companion {
50namespace virtualcamera {
51
52using ::aidl::android::hardware::camera::common::Status;
53using ::aidl::android::hardware::camera::device::BufferStatus;
54using ::aidl::android::hardware::camera::device::CameraMetadata;
55using ::aidl::android::hardware::camera::device::CaptureResult;
56using ::aidl::android::hardware::camera::device::ErrorCode;
57using ::aidl::android::hardware::camera::device::ErrorMsg;
58using ::aidl::android::hardware::camera::device::ICameraDeviceCallback;
59using ::aidl::android::hardware::camera::device::NotifyMsg;
60using ::aidl::android::hardware::camera::device::ShutterMsg;
61using ::aidl::android::hardware::camera::device::Stream;
62using ::aidl::android::hardware::camera::device::StreamBuffer;
63using ::aidl::android::hardware::graphics::common::PixelFormat;
64using ::android::base::ScopedLockAssertion;
65
66namespace {
67
68using namespace std::chrono_literals;
69
70static constexpr std::chrono::milliseconds kAcquireFenceTimeout = 500ms;
71
72CameraMetadata createCaptureResultMetadata(
73 const std::chrono::nanoseconds timestamp) {
74 std::unique_ptr<CameraMetadata> metadata =
75 MetadataBuilder().setSensorTimestamp(timestamp).build();
76 if (metadata == nullptr) {
77 ALOGE("%s: Failed to build capture result metadata", __func__);
78 return CameraMetadata();
79 }
80 return std::move(*metadata);
81}
82
83NotifyMsg createShutterNotifyMsg(int frameNumber,
84 std::chrono::nanoseconds timestamp) {
85 NotifyMsg msg;
86 msg.set<NotifyMsg::Tag::shutter>(ShutterMsg{
87 .frameNumber = frameNumber,
88 .timestamp = timestamp.count(),
89 });
90 return msg;
91}
92
93NotifyMsg createBufferErrorNotifyMsg(int frameNumber, int streamId) {
94 NotifyMsg msg;
95 msg.set<NotifyMsg::Tag::error>(ErrorMsg{.frameNumber = frameNumber,
96 .errorStreamId = streamId,
97 .errorCode = ErrorCode::ERROR_BUFFER});
98 return msg;
99}
100
101NotifyMsg createRequestErrorNotifyMsg(int frameNumber) {
102 NotifyMsg msg;
103 msg.set<NotifyMsg::Tag::error>(ErrorMsg{
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100104 .frameNumber = frameNumber,
105 // errorStreamId needs to be set to -1 for ERROR_REQUEST
106 // (not tied to specific stream).
107 .errorStreamId = -1,
108 .errorCode = ErrorCode::ERROR_REQUEST});
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100109 return msg;
110}
111
112} // namespace
113
114CaptureRequestBuffer::CaptureRequestBuffer(int streamId, int bufferId,
115 sp<Fence> fence)
116 : mStreamId(streamId), mBufferId(bufferId), mFence(fence) {
117}
118
119int CaptureRequestBuffer::getStreamId() const {
120 return mStreamId;
121}
122
123int CaptureRequestBuffer::getBufferId() const {
124 return mBufferId;
125}
126
127sp<Fence> CaptureRequestBuffer::getFence() const {
128 return mFence;
129}
130
131VirtualCameraRenderThread::VirtualCameraRenderThread(
132 VirtualCameraSessionContext& sessionContext, const int mWidth,
133 const int mHeight,
134 std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback, bool testMode)
135 : mCameraDeviceCallback(cameraDeviceCallback),
136 mInputSurfaceWidth(mWidth),
137 mInputSurfaceHeight(mHeight),
138 mTestMode(testMode),
139 mSessionContext(sessionContext) {
140}
141
142VirtualCameraRenderThread::~VirtualCameraRenderThread() {
143 stop();
144 if (mThread.joinable()) {
145 mThread.join();
146 }
147}
148
149ProcessCaptureRequestTask::ProcessCaptureRequestTask(
150 int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers)
151 : mFrameNumber(frameNumber), mBuffers(requestBuffers) {
152}
153
154int ProcessCaptureRequestTask::getFrameNumber() const {
155 return mFrameNumber;
156}
157
158const std::vector<CaptureRequestBuffer>& ProcessCaptureRequestTask::getBuffers()
159 const {
160 return mBuffers;
161}
162
163void VirtualCameraRenderThread::enqueueTask(
164 std::unique_ptr<ProcessCaptureRequestTask> task) {
165 std::lock_guard<std::mutex> lock(mLock);
166 mQueue.emplace_back(std::move(task));
167 mCondVar.notify_one();
168}
169
170void VirtualCameraRenderThread::flush() {
171 std::lock_guard<std::mutex> lock(mLock);
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100172 while (!mQueue.empty()) {
173 std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
174 mQueue.pop_front();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100175 flushCaptureRequest(*task);
176 }
177}
178
179void VirtualCameraRenderThread::start() {
180 mThread = std::thread(&VirtualCameraRenderThread::threadLoop, this);
181}
182
183void VirtualCameraRenderThread::stop() {
184 {
185 std::lock_guard<std::mutex> lock(mLock);
186 mPendingExit = true;
187 mCondVar.notify_one();
188 }
189}
190
191sp<Surface> VirtualCameraRenderThread::getInputSurface() {
192 return mInputSurfacePromise.get_future().get();
193}
194
195std::unique_ptr<ProcessCaptureRequestTask>
196VirtualCameraRenderThread::dequeueTask() {
197 std::unique_lock<std::mutex> lock(mLock);
198 // Clang's thread safety analysis doesn't perform alias analysis,
199 // so it doesn't support moveable std::unique_lock.
200 //
201 // Lock assertion below is basically explicit declaration that
202 // the lock is held in this scope, which is true, since it's only
203 // released during waiting inside mCondVar.wait calls.
204 ScopedLockAssertion lockAssertion(mLock);
205
206 mCondVar.wait(lock, [this]() REQUIRES(mLock) {
207 return mPendingExit || !mQueue.empty();
208 });
209 if (mPendingExit) {
210 return nullptr;
211 }
212 std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
213 mQueue.pop_front();
214 return task;
215}
216
217void VirtualCameraRenderThread::threadLoop() {
218 ALOGV("Render thread starting");
219
220 mEglDisplayContext = std::make_unique<EglDisplayContext>();
221 mEglTextureProgram = std::make_unique<EglTextureProgram>();
222 mEglSurfaceTexture = std::make_unique<EglSurfaceTexture>(mInputSurfaceWidth,
223 mInputSurfaceHeight);
224 mInputSurfacePromise.set_value(mEglSurfaceTexture->getSurface());
225
226 while (std::unique_ptr<ProcessCaptureRequestTask> task = dequeueTask()) {
227 processCaptureRequest(*task);
228 }
229
230 ALOGV("Render thread exiting");
231}
232
233void VirtualCameraRenderThread::processCaptureRequest(
234 const ProcessCaptureRequestTask& request) {
235 const std::chrono::nanoseconds timestamp =
236 std::chrono::duration_cast<std::chrono::nanoseconds>(
237 std::chrono::steady_clock::now().time_since_epoch());
238
239 CaptureResult captureResult;
240 captureResult.fmqResultSize = 0;
241 captureResult.frameNumber = request.getFrameNumber();
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100242 // Partial result needs to be set to 1 when metadata are present.
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100243 captureResult.partialResult = 1;
244 captureResult.inputBuffer.streamId = -1;
245 captureResult.physicalCameraMetadata.resize(0);
246 captureResult.result = createCaptureResultMetadata(timestamp);
247
248 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
249 captureResult.outputBuffers.resize(buffers.size());
250
251 if (mTestMode) {
252 // In test mode let's just render something to the Surface ourselves.
253 renderTestPatternYCbCr420(mEglSurfaceTexture->getSurface(),
254 request.getFrameNumber());
255 }
256
257 mEglSurfaceTexture->updateTexture();
258
259 for (int i = 0; i < buffers.size(); ++i) {
260 const CaptureRequestBuffer& reqBuffer = buffers[i];
261 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
262 resBuffer.streamId = reqBuffer.getStreamId();
263 resBuffer.bufferId = reqBuffer.getBufferId();
264 resBuffer.status = BufferStatus::OK;
265
266 const std::optional<Stream> streamConfig =
267 mSessionContext.getStreamConfig(reqBuffer.getStreamId());
268
269 if (!streamConfig.has_value()) {
270 resBuffer.status = BufferStatus::ERROR;
271 continue;
272 }
273
274 auto status = streamConfig->format == PixelFormat::BLOB
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100275 ? renderIntoBlobStreamBuffer(reqBuffer.getStreamId(),
276 reqBuffer.getBufferId(),
277 reqBuffer.getFence())
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100278 : renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
279 reqBuffer.getBufferId(),
280 reqBuffer.getFence());
281 if (!status.isOk()) {
282 resBuffer.status = BufferStatus::ERROR;
283 }
284 }
285
286 std::vector<NotifyMsg> notifyMsg{
287 createShutterNotifyMsg(request.getFrameNumber(), timestamp)};
288 for (const StreamBuffer& resBuffer : captureResult.outputBuffers) {
289 if (resBuffer.status != BufferStatus::OK) {
290 notifyMsg.push_back(createBufferErrorNotifyMsg(request.getFrameNumber(),
291 resBuffer.streamId));
292 }
293 }
294
295 auto status = mCameraDeviceCallback->notify(notifyMsg);
296 if (!status.isOk()) {
297 ALOGE("%s: notify call failed: %s", __func__,
298 status.getDescription().c_str());
299 return;
300 }
301
302 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
303 captureResults(1);
304 captureResults[0] = std::move(captureResult);
305
306 status = mCameraDeviceCallback->processCaptureResult(captureResults);
307 if (!status.isOk()) {
308 ALOGE("%s: processCaptureResult call failed: %s", __func__,
309 status.getDescription().c_str());
310 return;
311 }
312
313 ALOGD("%s: Successfully called processCaptureResult", __func__);
314}
315
316void VirtualCameraRenderThread::flushCaptureRequest(
317 const ProcessCaptureRequestTask& request) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100318 CaptureResult captureResult;
319 captureResult.fmqResultSize = 0;
320 captureResult.frameNumber = request.getFrameNumber();
321 captureResult.inputBuffer.streamId = -1;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100322
323 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
324 captureResult.outputBuffers.resize(buffers.size());
325
326 for (int i = 0; i < buffers.size(); ++i) {
327 const CaptureRequestBuffer& reqBuffer = buffers[i];
328 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
329 resBuffer.streamId = reqBuffer.getStreamId();
330 resBuffer.bufferId = reqBuffer.getBufferId();
331 resBuffer.status = BufferStatus::ERROR;
332 sp<Fence> fence = reqBuffer.getFence();
333 if (fence != nullptr && fence->isValid()) {
334 resBuffer.releaseFence.fds.emplace_back(fence->dup());
335 }
336 }
337
338 auto status = mCameraDeviceCallback->notify(
339 {createRequestErrorNotifyMsg(request.getFrameNumber())});
340 if (!status.isOk()) {
341 ALOGE("%s: notify call failed: %s", __func__,
342 status.getDescription().c_str());
343 return;
344 }
345
346 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
347 captureResults(1);
348 captureResults[0] = std::move(captureResult);
349
350 status = mCameraDeviceCallback->processCaptureResult(captureResults);
351 if (!status.isOk()) {
352 ALOGE("%s: processCaptureResult call failed: %s", __func__,
353 status.getDescription().c_str());
354 }
355}
356
357ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoBlobStreamBuffer(
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100358 const int streamId, const int bufferId, sp<Fence> fence) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100359 ALOGV("%s", __func__);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100360 std::shared_ptr<AHardwareBuffer> hwBuffer =
361 mSessionContext.fetchHardwareBuffer(streamId, bufferId);
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100362 if (hwBuffer == nullptr) {
363 ALOGE("%s: Failed to fetch hardware buffer %d for streamId %d", __func__,
364 bufferId, streamId);
365 return cameraStatus(Status::INTERNAL_ERROR);
366 }
367
368 std::optional<Stream> stream = mSessionContext.getStreamConfig(streamId);
369 if (!stream.has_value()) {
370 ALOGE("%s, failed to fetch information about stream %d", __func__, streamId);
371 return cameraStatus(Status::INTERNAL_ERROR);
372 }
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100373
374 AHardwareBuffer_Planes planes_info;
375
376 int32_t rawFence = fence != nullptr ? fence->get() : -1;
377 int result = AHardwareBuffer_lockPlanes(hwBuffer.get(),
378 AHARDWAREBUFFER_USAGE_CPU_READ_RARELY,
379 rawFence, nullptr, &planes_info);
380 if (result != OK) {
381 ALOGE("%s: Failed to lock planes for BLOB buffer: %d", __func__, result);
382 return cameraStatus(Status::INTERNAL_ERROR);
383 }
384
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100385 sp<GraphicBuffer> gBuffer = mEglSurfaceTexture->getCurrentBuffer();
386 bool compressionSuccess = true;
387 if (gBuffer != nullptr) {
388 android_ycbcr ycbcr;
389 status_t status =
390 gBuffer->lockYCbCr(AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN, &ycbcr);
391 ALOGV("Locked buffers");
392 if (status != NO_ERROR) {
393 AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
394 ALOGE("%s: Failed to lock graphic buffer: %d", __func__, status);
395 return cameraStatus(Status::INTERNAL_ERROR);
396 }
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100397
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100398 compressionSuccess =
399 compressJpeg(gBuffer->getWidth(), gBuffer->getHeight(), ycbcr,
400 stream->bufferSize, planes_info.planes[0].data);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100401
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100402 status_t res = gBuffer->unlock();
403 if (res != NO_ERROR) {
404 ALOGE("Failed to unlock graphic buffer: %d", res);
405 }
406 } else {
407 compressionSuccess =
408 compressBlackJpeg(stream->width, stream->height, stream->bufferSize,
409 planes_info.planes[0].data);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100410 }
411 AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
412 ALOGV("Unlocked buffers");
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100413 return compressionSuccess ? ndk::ScopedAStatus::ok()
414 : cameraStatus(Status::INTERNAL_ERROR);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100415}
416
417ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoImageStreamBuffer(
418 int streamId, int bufferId, sp<Fence> fence) {
419 ALOGV("%s", __func__);
420
421 const std::chrono::nanoseconds before =
422 std::chrono::duration_cast<std::chrono::nanoseconds>(
423 std::chrono::steady_clock::now().time_since_epoch());
424
425 // Render test pattern using EGL.
426 std::shared_ptr<EglFrameBuffer> framebuffer =
427 mSessionContext.fetchOrCreateEglFramebuffer(
428 mEglDisplayContext->getEglDisplay(), streamId, bufferId);
429 if (framebuffer == nullptr) {
430 ALOGE(
431 "%s: Failed to get EGL framebuffer corresponding to buffer id "
432 "%d for streamId %d",
433 __func__, bufferId, streamId);
434 return cameraStatus(Status::ILLEGAL_ARGUMENT);
435 }
436
437 // Wait for fence to clear.
438 if (fence != nullptr && fence->isValid()) {
439 status_t ret = fence->wait(kAcquireFenceTimeout.count());
440 if (ret != 0) {
441 ALOGE(
442 "Timeout while waiting for the acquire fence for buffer %d"
443 " for streamId %d",
444 bufferId, streamId);
445 return cameraStatus(Status::INTERNAL_ERROR);
446 }
447 }
448
449 mEglDisplayContext->makeCurrent();
450 framebuffer->beforeDraw();
451
Jan Sebechlebsky9ae496f2023-12-05 15:56:28 +0100452 if (mEglSurfaceTexture->getCurrentBuffer() == nullptr) {
453 // If there's no current buffer, nothing was written to the surface and
454 // texture is not initialized yet. Let's render the framebuffer black
455 // instead of rendering the texture.
456 glClearColor(0.0f, 0.5f, 0.5f, 0.0f);
457 glClear(GL_COLOR_BUFFER_BIT);
458 } else {
459 mEglTextureProgram->draw(mEglSurfaceTexture->updateTexture());
460 }
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100461 framebuffer->afterDraw();
462
463 const std::chrono::nanoseconds after =
464 std::chrono::duration_cast<std::chrono::nanoseconds>(
465 std::chrono::steady_clock::now().time_since_epoch());
466
467 ALOGV("Rendering to buffer %d, stream %d took %lld ns", bufferId, streamId,
468 after.count() - before.count());
469
470 return ndk::ScopedAStatus::ok();
471}
472
473} // namespace virtualcamera
474} // namespace companion
475} // namespace android