blob: 8a2db1cc0b35fb1f4bd95cea63ede5849b42ee33 [file] [log] [blame]
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +01001/*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "VirtualCameraRenderThread"
18#include "VirtualCameraRenderThread.h"
19
20#include <chrono>
21#include <cstddef>
22#include <future>
23#include <memory>
24#include <mutex>
25#include <thread>
26
27#include "VirtualCameraSessionContext.h"
28#include "aidl/android/hardware/camera/common/Status.h"
29#include "aidl/android/hardware/camera/device/BufferStatus.h"
30#include "aidl/android/hardware/camera/device/CameraMetadata.h"
31#include "aidl/android/hardware/camera/device/CaptureResult.h"
32#include "aidl/android/hardware/camera/device/ErrorCode.h"
33#include "aidl/android/hardware/camera/device/ICameraDeviceCallback.h"
34#include "aidl/android/hardware/camera/device/NotifyMsg.h"
35#include "aidl/android/hardware/camera/device/ShutterMsg.h"
36#include "aidl/android/hardware/camera/device/StreamBuffer.h"
37#include "android-base/thread_annotations.h"
38#include "android/binder_auto_utils.h"
39#include "android/hardware_buffer.h"
40#include "util/EglFramebuffer.h"
41#include "util/JpegUtil.h"
42#include "util/MetadataBuilder.h"
43#include "util/TestPatternHelper.h"
44#include "util/Util.h"
45#include "utils/Errors.h"
46
47namespace android {
48namespace companion {
49namespace virtualcamera {
50
51using ::aidl::android::hardware::camera::common::Status;
52using ::aidl::android::hardware::camera::device::BufferStatus;
53using ::aidl::android::hardware::camera::device::CameraMetadata;
54using ::aidl::android::hardware::camera::device::CaptureResult;
55using ::aidl::android::hardware::camera::device::ErrorCode;
56using ::aidl::android::hardware::camera::device::ErrorMsg;
57using ::aidl::android::hardware::camera::device::ICameraDeviceCallback;
58using ::aidl::android::hardware::camera::device::NotifyMsg;
59using ::aidl::android::hardware::camera::device::ShutterMsg;
60using ::aidl::android::hardware::camera::device::Stream;
61using ::aidl::android::hardware::camera::device::StreamBuffer;
62using ::aidl::android::hardware::graphics::common::PixelFormat;
63using ::android::base::ScopedLockAssertion;
64
65namespace {
66
67using namespace std::chrono_literals;
68
69static constexpr std::chrono::milliseconds kAcquireFenceTimeout = 500ms;
70
71CameraMetadata createCaptureResultMetadata(
72 const std::chrono::nanoseconds timestamp) {
73 std::unique_ptr<CameraMetadata> metadata =
74 MetadataBuilder().setSensorTimestamp(timestamp).build();
75 if (metadata == nullptr) {
76 ALOGE("%s: Failed to build capture result metadata", __func__);
77 return CameraMetadata();
78 }
79 return std::move(*metadata);
80}
81
82NotifyMsg createShutterNotifyMsg(int frameNumber,
83 std::chrono::nanoseconds timestamp) {
84 NotifyMsg msg;
85 msg.set<NotifyMsg::Tag::shutter>(ShutterMsg{
86 .frameNumber = frameNumber,
87 .timestamp = timestamp.count(),
88 });
89 return msg;
90}
91
92NotifyMsg createBufferErrorNotifyMsg(int frameNumber, int streamId) {
93 NotifyMsg msg;
94 msg.set<NotifyMsg::Tag::error>(ErrorMsg{.frameNumber = frameNumber,
95 .errorStreamId = streamId,
96 .errorCode = ErrorCode::ERROR_BUFFER});
97 return msg;
98}
99
100NotifyMsg createRequestErrorNotifyMsg(int frameNumber) {
101 NotifyMsg msg;
102 msg.set<NotifyMsg::Tag::error>(ErrorMsg{
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100103 .frameNumber = frameNumber,
104 // errorStreamId needs to be set to -1 for ERROR_REQUEST
105 // (not tied to specific stream).
106 .errorStreamId = -1,
107 .errorCode = ErrorCode::ERROR_REQUEST});
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100108 return msg;
109}
110
111} // namespace
112
113CaptureRequestBuffer::CaptureRequestBuffer(int streamId, int bufferId,
114 sp<Fence> fence)
115 : mStreamId(streamId), mBufferId(bufferId), mFence(fence) {
116}
117
118int CaptureRequestBuffer::getStreamId() const {
119 return mStreamId;
120}
121
122int CaptureRequestBuffer::getBufferId() const {
123 return mBufferId;
124}
125
126sp<Fence> CaptureRequestBuffer::getFence() const {
127 return mFence;
128}
129
130VirtualCameraRenderThread::VirtualCameraRenderThread(
131 VirtualCameraSessionContext& sessionContext, const int mWidth,
132 const int mHeight,
133 std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback, bool testMode)
134 : mCameraDeviceCallback(cameraDeviceCallback),
135 mInputSurfaceWidth(mWidth),
136 mInputSurfaceHeight(mHeight),
137 mTestMode(testMode),
138 mSessionContext(sessionContext) {
139}
140
141VirtualCameraRenderThread::~VirtualCameraRenderThread() {
142 stop();
143 if (mThread.joinable()) {
144 mThread.join();
145 }
146}
147
148ProcessCaptureRequestTask::ProcessCaptureRequestTask(
149 int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers)
150 : mFrameNumber(frameNumber), mBuffers(requestBuffers) {
151}
152
153int ProcessCaptureRequestTask::getFrameNumber() const {
154 return mFrameNumber;
155}
156
157const std::vector<CaptureRequestBuffer>& ProcessCaptureRequestTask::getBuffers()
158 const {
159 return mBuffers;
160}
161
162void VirtualCameraRenderThread::enqueueTask(
163 std::unique_ptr<ProcessCaptureRequestTask> task) {
164 std::lock_guard<std::mutex> lock(mLock);
165 mQueue.emplace_back(std::move(task));
166 mCondVar.notify_one();
167}
168
169void VirtualCameraRenderThread::flush() {
170 std::lock_guard<std::mutex> lock(mLock);
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100171 while (!mQueue.empty()) {
172 std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
173 mQueue.pop_front();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100174 flushCaptureRequest(*task);
175 }
176}
177
178void VirtualCameraRenderThread::start() {
179 mThread = std::thread(&VirtualCameraRenderThread::threadLoop, this);
180}
181
182void VirtualCameraRenderThread::stop() {
183 {
184 std::lock_guard<std::mutex> lock(mLock);
185 mPendingExit = true;
186 mCondVar.notify_one();
187 }
188}
189
190sp<Surface> VirtualCameraRenderThread::getInputSurface() {
191 return mInputSurfacePromise.get_future().get();
192}
193
194std::unique_ptr<ProcessCaptureRequestTask>
195VirtualCameraRenderThread::dequeueTask() {
196 std::unique_lock<std::mutex> lock(mLock);
197 // Clang's thread safety analysis doesn't perform alias analysis,
198 // so it doesn't support moveable std::unique_lock.
199 //
200 // Lock assertion below is basically explicit declaration that
201 // the lock is held in this scope, which is true, since it's only
202 // released during waiting inside mCondVar.wait calls.
203 ScopedLockAssertion lockAssertion(mLock);
204
205 mCondVar.wait(lock, [this]() REQUIRES(mLock) {
206 return mPendingExit || !mQueue.empty();
207 });
208 if (mPendingExit) {
209 return nullptr;
210 }
211 std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
212 mQueue.pop_front();
213 return task;
214}
215
216void VirtualCameraRenderThread::threadLoop() {
217 ALOGV("Render thread starting");
218
219 mEglDisplayContext = std::make_unique<EglDisplayContext>();
220 mEglTextureProgram = std::make_unique<EglTextureProgram>();
221 mEglSurfaceTexture = std::make_unique<EglSurfaceTexture>(mInputSurfaceWidth,
222 mInputSurfaceHeight);
223 mInputSurfacePromise.set_value(mEglSurfaceTexture->getSurface());
224
225 while (std::unique_ptr<ProcessCaptureRequestTask> task = dequeueTask()) {
226 processCaptureRequest(*task);
227 }
228
229 ALOGV("Render thread exiting");
230}
231
232void VirtualCameraRenderThread::processCaptureRequest(
233 const ProcessCaptureRequestTask& request) {
234 const std::chrono::nanoseconds timestamp =
235 std::chrono::duration_cast<std::chrono::nanoseconds>(
236 std::chrono::steady_clock::now().time_since_epoch());
237
238 CaptureResult captureResult;
239 captureResult.fmqResultSize = 0;
240 captureResult.frameNumber = request.getFrameNumber();
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100241 // Partial result needs to be set to 1 when metadata are present.
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100242 captureResult.partialResult = 1;
243 captureResult.inputBuffer.streamId = -1;
244 captureResult.physicalCameraMetadata.resize(0);
245 captureResult.result = createCaptureResultMetadata(timestamp);
246
247 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
248 captureResult.outputBuffers.resize(buffers.size());
249
250 if (mTestMode) {
251 // In test mode let's just render something to the Surface ourselves.
252 renderTestPatternYCbCr420(mEglSurfaceTexture->getSurface(),
253 request.getFrameNumber());
254 }
255
256 mEglSurfaceTexture->updateTexture();
257
258 for (int i = 0; i < buffers.size(); ++i) {
259 const CaptureRequestBuffer& reqBuffer = buffers[i];
260 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
261 resBuffer.streamId = reqBuffer.getStreamId();
262 resBuffer.bufferId = reqBuffer.getBufferId();
263 resBuffer.status = BufferStatus::OK;
264
265 const std::optional<Stream> streamConfig =
266 mSessionContext.getStreamConfig(reqBuffer.getStreamId());
267
268 if (!streamConfig.has_value()) {
269 resBuffer.status = BufferStatus::ERROR;
270 continue;
271 }
272
273 auto status = streamConfig->format == PixelFormat::BLOB
274 ? renderIntoBlobStreamBuffer(
275 reqBuffer.getStreamId(), reqBuffer.getBufferId(),
276 streamConfig->bufferSize, reqBuffer.getFence())
277 : renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
278 reqBuffer.getBufferId(),
279 reqBuffer.getFence());
280 if (!status.isOk()) {
281 resBuffer.status = BufferStatus::ERROR;
282 }
283 }
284
285 std::vector<NotifyMsg> notifyMsg{
286 createShutterNotifyMsg(request.getFrameNumber(), timestamp)};
287 for (const StreamBuffer& resBuffer : captureResult.outputBuffers) {
288 if (resBuffer.status != BufferStatus::OK) {
289 notifyMsg.push_back(createBufferErrorNotifyMsg(request.getFrameNumber(),
290 resBuffer.streamId));
291 }
292 }
293
294 auto status = mCameraDeviceCallback->notify(notifyMsg);
295 if (!status.isOk()) {
296 ALOGE("%s: notify call failed: %s", __func__,
297 status.getDescription().c_str());
298 return;
299 }
300
301 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
302 captureResults(1);
303 captureResults[0] = std::move(captureResult);
304
305 status = mCameraDeviceCallback->processCaptureResult(captureResults);
306 if (!status.isOk()) {
307 ALOGE("%s: processCaptureResult call failed: %s", __func__,
308 status.getDescription().c_str());
309 return;
310 }
311
312 ALOGD("%s: Successfully called processCaptureResult", __func__);
313}
314
315void VirtualCameraRenderThread::flushCaptureRequest(
316 const ProcessCaptureRequestTask& request) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100317 CaptureResult captureResult;
318 captureResult.fmqResultSize = 0;
319 captureResult.frameNumber = request.getFrameNumber();
320 captureResult.inputBuffer.streamId = -1;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100321
322 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
323 captureResult.outputBuffers.resize(buffers.size());
324
325 for (int i = 0; i < buffers.size(); ++i) {
326 const CaptureRequestBuffer& reqBuffer = buffers[i];
327 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
328 resBuffer.streamId = reqBuffer.getStreamId();
329 resBuffer.bufferId = reqBuffer.getBufferId();
330 resBuffer.status = BufferStatus::ERROR;
331 sp<Fence> fence = reqBuffer.getFence();
332 if (fence != nullptr && fence->isValid()) {
333 resBuffer.releaseFence.fds.emplace_back(fence->dup());
334 }
335 }
336
337 auto status = mCameraDeviceCallback->notify(
338 {createRequestErrorNotifyMsg(request.getFrameNumber())});
339 if (!status.isOk()) {
340 ALOGE("%s: notify call failed: %s", __func__,
341 status.getDescription().c_str());
342 return;
343 }
344
345 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
346 captureResults(1);
347 captureResults[0] = std::move(captureResult);
348
349 status = mCameraDeviceCallback->processCaptureResult(captureResults);
350 if (!status.isOk()) {
351 ALOGE("%s: processCaptureResult call failed: %s", __func__,
352 status.getDescription().c_str());
353 }
354}
355
356ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoBlobStreamBuffer(
357 const int streamId, const int bufferId, const size_t bufferSize,
358 sp<Fence> fence) {
359 ALOGV("%s", __func__);
360 sp<GraphicBuffer> gBuffer = mEglSurfaceTexture->getCurrentBuffer();
Jan Sebechlebsky96772402023-11-23 15:56:58 +0100361 if (gBuffer == nullptr) {
362 // Most probably nothing was yet written to input surface if we reached this.
363 ALOGE("%s: Cannot fetch most recent buffer from SurfaceTexture", __func__);
364 return cameraStatus(Status::INTERNAL_ERROR);
365 }
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100366 std::shared_ptr<AHardwareBuffer> hwBuffer =
367 mSessionContext.fetchHardwareBuffer(streamId, bufferId);
368
369 AHardwareBuffer_Planes planes_info;
370
371 int32_t rawFence = fence != nullptr ? fence->get() : -1;
372 int result = AHardwareBuffer_lockPlanes(hwBuffer.get(),
373 AHARDWAREBUFFER_USAGE_CPU_READ_RARELY,
374 rawFence, nullptr, &planes_info);
375 if (result != OK) {
376 ALOGE("%s: Failed to lock planes for BLOB buffer: %d", __func__, result);
377 return cameraStatus(Status::INTERNAL_ERROR);
378 }
379
380 android_ycbcr ycbcr;
381 status_t status =
382 gBuffer->lockYCbCr(AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN, &ycbcr);
383 ALOGV("Locked buffers");
384 if (status != NO_ERROR) {
385 AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
386 ALOGE("%s: Failed to lock graphic buffer: %d", __func__, status);
387 return cameraStatus(Status::INTERNAL_ERROR);
388 }
389
390 bool success = compressJpeg(gBuffer->getWidth(), gBuffer->getHeight(), ycbcr,
391 bufferSize, planes_info.planes[0].data);
392
393 status_t res = gBuffer->unlock();
394 if (res != NO_ERROR) {
395 ALOGE("Failed to unlock graphic buffer: %d", res);
396 }
397 AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
398 ALOGV("Unlocked buffers");
399 return success ? ndk::ScopedAStatus::ok()
400 : cameraStatus(Status::INTERNAL_ERROR);
401}
402
403ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoImageStreamBuffer(
404 int streamId, int bufferId, sp<Fence> fence) {
405 ALOGV("%s", __func__);
406
407 const std::chrono::nanoseconds before =
408 std::chrono::duration_cast<std::chrono::nanoseconds>(
409 std::chrono::steady_clock::now().time_since_epoch());
410
411 // Render test pattern using EGL.
412 std::shared_ptr<EglFrameBuffer> framebuffer =
413 mSessionContext.fetchOrCreateEglFramebuffer(
414 mEglDisplayContext->getEglDisplay(), streamId, bufferId);
415 if (framebuffer == nullptr) {
416 ALOGE(
417 "%s: Failed to get EGL framebuffer corresponding to buffer id "
418 "%d for streamId %d",
419 __func__, bufferId, streamId);
420 return cameraStatus(Status::ILLEGAL_ARGUMENT);
421 }
422
423 // Wait for fence to clear.
424 if (fence != nullptr && fence->isValid()) {
425 status_t ret = fence->wait(kAcquireFenceTimeout.count());
426 if (ret != 0) {
427 ALOGE(
428 "Timeout while waiting for the acquire fence for buffer %d"
429 " for streamId %d",
430 bufferId, streamId);
431 return cameraStatus(Status::INTERNAL_ERROR);
432 }
433 }
434
435 mEglDisplayContext->makeCurrent();
436 framebuffer->beforeDraw();
437
438 mEglTextureProgram->draw(mEglSurfaceTexture->updateTexture());
439 framebuffer->afterDraw();
440
441 const std::chrono::nanoseconds after =
442 std::chrono::duration_cast<std::chrono::nanoseconds>(
443 std::chrono::steady_clock::now().time_since_epoch());
444
445 ALOGV("Rendering to buffer %d, stream %d took %lld ns", bufferId, streamId,
446 after.count() - before.count());
447
448 return ndk::ScopedAStatus::ok();
449}
450
451} // namespace virtualcamera
452} // namespace companion
453} // namespace android