blob: 575679745f12264d55b043961edd51f71d8540f6 [file] [log] [blame]
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +01001/*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "VirtualCameraRenderThread"
18#include "VirtualCameraRenderThread.h"
19
20#include <chrono>
21#include <cstddef>
22#include <future>
23#include <memory>
24#include <mutex>
25#include <thread>
26
27#include "VirtualCameraSessionContext.h"
28#include "aidl/android/hardware/camera/common/Status.h"
29#include "aidl/android/hardware/camera/device/BufferStatus.h"
30#include "aidl/android/hardware/camera/device/CameraMetadata.h"
31#include "aidl/android/hardware/camera/device/CaptureResult.h"
32#include "aidl/android/hardware/camera/device/ErrorCode.h"
33#include "aidl/android/hardware/camera/device/ICameraDeviceCallback.h"
34#include "aidl/android/hardware/camera/device/NotifyMsg.h"
35#include "aidl/android/hardware/camera/device/ShutterMsg.h"
36#include "aidl/android/hardware/camera/device/StreamBuffer.h"
37#include "android-base/thread_annotations.h"
38#include "android/binder_auto_utils.h"
39#include "android/hardware_buffer.h"
40#include "util/EglFramebuffer.h"
41#include "util/JpegUtil.h"
42#include "util/MetadataBuilder.h"
43#include "util/TestPatternHelper.h"
44#include "util/Util.h"
45#include "utils/Errors.h"
46
47namespace android {
48namespace companion {
49namespace virtualcamera {
50
51using ::aidl::android::hardware::camera::common::Status;
52using ::aidl::android::hardware::camera::device::BufferStatus;
53using ::aidl::android::hardware::camera::device::CameraMetadata;
54using ::aidl::android::hardware::camera::device::CaptureResult;
55using ::aidl::android::hardware::camera::device::ErrorCode;
56using ::aidl::android::hardware::camera::device::ErrorMsg;
57using ::aidl::android::hardware::camera::device::ICameraDeviceCallback;
58using ::aidl::android::hardware::camera::device::NotifyMsg;
59using ::aidl::android::hardware::camera::device::ShutterMsg;
60using ::aidl::android::hardware::camera::device::Stream;
61using ::aidl::android::hardware::camera::device::StreamBuffer;
62using ::aidl::android::hardware::graphics::common::PixelFormat;
63using ::android::base::ScopedLockAssertion;
64
65namespace {
66
67using namespace std::chrono_literals;
68
69static constexpr std::chrono::milliseconds kAcquireFenceTimeout = 500ms;
70
71CameraMetadata createCaptureResultMetadata(
72 const std::chrono::nanoseconds timestamp) {
73 std::unique_ptr<CameraMetadata> metadata =
74 MetadataBuilder().setSensorTimestamp(timestamp).build();
75 if (metadata == nullptr) {
76 ALOGE("%s: Failed to build capture result metadata", __func__);
77 return CameraMetadata();
78 }
79 return std::move(*metadata);
80}
81
82NotifyMsg createShutterNotifyMsg(int frameNumber,
83 std::chrono::nanoseconds timestamp) {
84 NotifyMsg msg;
85 msg.set<NotifyMsg::Tag::shutter>(ShutterMsg{
86 .frameNumber = frameNumber,
87 .timestamp = timestamp.count(),
88 });
89 return msg;
90}
91
92NotifyMsg createBufferErrorNotifyMsg(int frameNumber, int streamId) {
93 NotifyMsg msg;
94 msg.set<NotifyMsg::Tag::error>(ErrorMsg{.frameNumber = frameNumber,
95 .errorStreamId = streamId,
96 .errorCode = ErrorCode::ERROR_BUFFER});
97 return msg;
98}
99
100NotifyMsg createRequestErrorNotifyMsg(int frameNumber) {
101 NotifyMsg msg;
102 msg.set<NotifyMsg::Tag::error>(ErrorMsg{
103 .frameNumber = frameNumber, .errorCode = ErrorCode::ERROR_REQUEST});
104 return msg;
105}
106
107} // namespace
108
109CaptureRequestBuffer::CaptureRequestBuffer(int streamId, int bufferId,
110 sp<Fence> fence)
111 : mStreamId(streamId), mBufferId(bufferId), mFence(fence) {
112}
113
114int CaptureRequestBuffer::getStreamId() const {
115 return mStreamId;
116}
117
118int CaptureRequestBuffer::getBufferId() const {
119 return mBufferId;
120}
121
122sp<Fence> CaptureRequestBuffer::getFence() const {
123 return mFence;
124}
125
126VirtualCameraRenderThread::VirtualCameraRenderThread(
127 VirtualCameraSessionContext& sessionContext, const int mWidth,
128 const int mHeight,
129 std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback, bool testMode)
130 : mCameraDeviceCallback(cameraDeviceCallback),
131 mInputSurfaceWidth(mWidth),
132 mInputSurfaceHeight(mHeight),
133 mTestMode(testMode),
134 mSessionContext(sessionContext) {
135}
136
137VirtualCameraRenderThread::~VirtualCameraRenderThread() {
138 stop();
139 if (mThread.joinable()) {
140 mThread.join();
141 }
142}
143
144ProcessCaptureRequestTask::ProcessCaptureRequestTask(
145 int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers)
146 : mFrameNumber(frameNumber), mBuffers(requestBuffers) {
147}
148
149int ProcessCaptureRequestTask::getFrameNumber() const {
150 return mFrameNumber;
151}
152
153const std::vector<CaptureRequestBuffer>& ProcessCaptureRequestTask::getBuffers()
154 const {
155 return mBuffers;
156}
157
158void VirtualCameraRenderThread::enqueueTask(
159 std::unique_ptr<ProcessCaptureRequestTask> task) {
160 std::lock_guard<std::mutex> lock(mLock);
161 mQueue.emplace_back(std::move(task));
162 mCondVar.notify_one();
163}
164
165void VirtualCameraRenderThread::flush() {
166 std::lock_guard<std::mutex> lock(mLock);
167 for (auto task = std::move(mQueue.front()); !mQueue.empty();
168 mQueue.pop_front()) {
169 flushCaptureRequest(*task);
170 }
171}
172
173void VirtualCameraRenderThread::start() {
174 mThread = std::thread(&VirtualCameraRenderThread::threadLoop, this);
175}
176
177void VirtualCameraRenderThread::stop() {
178 {
179 std::lock_guard<std::mutex> lock(mLock);
180 mPendingExit = true;
181 mCondVar.notify_one();
182 }
183}
184
185sp<Surface> VirtualCameraRenderThread::getInputSurface() {
186 return mInputSurfacePromise.get_future().get();
187}
188
189std::unique_ptr<ProcessCaptureRequestTask>
190VirtualCameraRenderThread::dequeueTask() {
191 std::unique_lock<std::mutex> lock(mLock);
192 // Clang's thread safety analysis doesn't perform alias analysis,
193 // so it doesn't support moveable std::unique_lock.
194 //
195 // Lock assertion below is basically explicit declaration that
196 // the lock is held in this scope, which is true, since it's only
197 // released during waiting inside mCondVar.wait calls.
198 ScopedLockAssertion lockAssertion(mLock);
199
200 mCondVar.wait(lock, [this]() REQUIRES(mLock) {
201 return mPendingExit || !mQueue.empty();
202 });
203 if (mPendingExit) {
204 return nullptr;
205 }
206 std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
207 mQueue.pop_front();
208 return task;
209}
210
211void VirtualCameraRenderThread::threadLoop() {
212 ALOGV("Render thread starting");
213
214 mEglDisplayContext = std::make_unique<EglDisplayContext>();
215 mEglTextureProgram = std::make_unique<EglTextureProgram>();
216 mEglSurfaceTexture = std::make_unique<EglSurfaceTexture>(mInputSurfaceWidth,
217 mInputSurfaceHeight);
218 mInputSurfacePromise.set_value(mEglSurfaceTexture->getSurface());
219
220 while (std::unique_ptr<ProcessCaptureRequestTask> task = dequeueTask()) {
221 processCaptureRequest(*task);
222 }
223
224 ALOGV("Render thread exiting");
225}
226
227void VirtualCameraRenderThread::processCaptureRequest(
228 const ProcessCaptureRequestTask& request) {
229 const std::chrono::nanoseconds timestamp =
230 std::chrono::duration_cast<std::chrono::nanoseconds>(
231 std::chrono::steady_clock::now().time_since_epoch());
232
233 CaptureResult captureResult;
234 captureResult.fmqResultSize = 0;
235 captureResult.frameNumber = request.getFrameNumber();
236 captureResult.partialResult = 1;
237 captureResult.inputBuffer.streamId = -1;
238 captureResult.physicalCameraMetadata.resize(0);
239 captureResult.result = createCaptureResultMetadata(timestamp);
240
241 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
242 captureResult.outputBuffers.resize(buffers.size());
243
244 if (mTestMode) {
245 // In test mode let's just render something to the Surface ourselves.
246 renderTestPatternYCbCr420(mEglSurfaceTexture->getSurface(),
247 request.getFrameNumber());
248 }
249
250 mEglSurfaceTexture->updateTexture();
251
252 for (int i = 0; i < buffers.size(); ++i) {
253 const CaptureRequestBuffer& reqBuffer = buffers[i];
254 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
255 resBuffer.streamId = reqBuffer.getStreamId();
256 resBuffer.bufferId = reqBuffer.getBufferId();
257 resBuffer.status = BufferStatus::OK;
258
259 const std::optional<Stream> streamConfig =
260 mSessionContext.getStreamConfig(reqBuffer.getStreamId());
261
262 if (!streamConfig.has_value()) {
263 resBuffer.status = BufferStatus::ERROR;
264 continue;
265 }
266
267 auto status = streamConfig->format == PixelFormat::BLOB
268 ? renderIntoBlobStreamBuffer(
269 reqBuffer.getStreamId(), reqBuffer.getBufferId(),
270 streamConfig->bufferSize, reqBuffer.getFence())
271 : renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
272 reqBuffer.getBufferId(),
273 reqBuffer.getFence());
274 if (!status.isOk()) {
275 resBuffer.status = BufferStatus::ERROR;
276 }
277 }
278
279 std::vector<NotifyMsg> notifyMsg{
280 createShutterNotifyMsg(request.getFrameNumber(), timestamp)};
281 for (const StreamBuffer& resBuffer : captureResult.outputBuffers) {
282 if (resBuffer.status != BufferStatus::OK) {
283 notifyMsg.push_back(createBufferErrorNotifyMsg(request.getFrameNumber(),
284 resBuffer.streamId));
285 }
286 }
287
288 auto status = mCameraDeviceCallback->notify(notifyMsg);
289 if (!status.isOk()) {
290 ALOGE("%s: notify call failed: %s", __func__,
291 status.getDescription().c_str());
292 return;
293 }
294
295 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
296 captureResults(1);
297 captureResults[0] = std::move(captureResult);
298
299 status = mCameraDeviceCallback->processCaptureResult(captureResults);
300 if (!status.isOk()) {
301 ALOGE("%s: processCaptureResult call failed: %s", __func__,
302 status.getDescription().c_str());
303 return;
304 }
305
306 ALOGD("%s: Successfully called processCaptureResult", __func__);
307}
308
309void VirtualCameraRenderThread::flushCaptureRequest(
310 const ProcessCaptureRequestTask& request) {
311 const std::chrono::nanoseconds timestamp =
312 std::chrono::duration_cast<std::chrono::nanoseconds>(
313 std::chrono::steady_clock::now().time_since_epoch());
314
315 CaptureResult captureResult;
316 captureResult.fmqResultSize = 0;
317 captureResult.frameNumber = request.getFrameNumber();
318 captureResult.inputBuffer.streamId = -1;
319 captureResult.result = createCaptureResultMetadata(timestamp);
320
321 const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
322 captureResult.outputBuffers.resize(buffers.size());
323
324 for (int i = 0; i < buffers.size(); ++i) {
325 const CaptureRequestBuffer& reqBuffer = buffers[i];
326 StreamBuffer& resBuffer = captureResult.outputBuffers[i];
327 resBuffer.streamId = reqBuffer.getStreamId();
328 resBuffer.bufferId = reqBuffer.getBufferId();
329 resBuffer.status = BufferStatus::ERROR;
330 sp<Fence> fence = reqBuffer.getFence();
331 if (fence != nullptr && fence->isValid()) {
332 resBuffer.releaseFence.fds.emplace_back(fence->dup());
333 }
334 }
335
336 auto status = mCameraDeviceCallback->notify(
337 {createRequestErrorNotifyMsg(request.getFrameNumber())});
338 if (!status.isOk()) {
339 ALOGE("%s: notify call failed: %s", __func__,
340 status.getDescription().c_str());
341 return;
342 }
343
344 std::vector<::aidl::android::hardware::camera::device::CaptureResult>
345 captureResults(1);
346 captureResults[0] = std::move(captureResult);
347
348 status = mCameraDeviceCallback->processCaptureResult(captureResults);
349 if (!status.isOk()) {
350 ALOGE("%s: processCaptureResult call failed: %s", __func__,
351 status.getDescription().c_str());
352 }
353}
354
355ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoBlobStreamBuffer(
356 const int streamId, const int bufferId, const size_t bufferSize,
357 sp<Fence> fence) {
358 ALOGV("%s", __func__);
359 sp<GraphicBuffer> gBuffer = mEglSurfaceTexture->getCurrentBuffer();
Jan Sebechlebsky96772402023-11-23 15:56:58 +0100360 if (gBuffer == nullptr) {
361 // Most probably nothing was yet written to input surface if we reached this.
362 ALOGE("%s: Cannot fetch most recent buffer from SurfaceTexture", __func__);
363 return cameraStatus(Status::INTERNAL_ERROR);
364 }
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100365 std::shared_ptr<AHardwareBuffer> hwBuffer =
366 mSessionContext.fetchHardwareBuffer(streamId, bufferId);
367
368 AHardwareBuffer_Planes planes_info;
369
370 int32_t rawFence = fence != nullptr ? fence->get() : -1;
371 int result = AHardwareBuffer_lockPlanes(hwBuffer.get(),
372 AHARDWAREBUFFER_USAGE_CPU_READ_RARELY,
373 rawFence, nullptr, &planes_info);
374 if (result != OK) {
375 ALOGE("%s: Failed to lock planes for BLOB buffer: %d", __func__, result);
376 return cameraStatus(Status::INTERNAL_ERROR);
377 }
378
379 android_ycbcr ycbcr;
380 status_t status =
381 gBuffer->lockYCbCr(AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN, &ycbcr);
382 ALOGV("Locked buffers");
383 if (status != NO_ERROR) {
384 AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
385 ALOGE("%s: Failed to lock graphic buffer: %d", __func__, status);
386 return cameraStatus(Status::INTERNAL_ERROR);
387 }
388
389 bool success = compressJpeg(gBuffer->getWidth(), gBuffer->getHeight(), ycbcr,
390 bufferSize, planes_info.planes[0].data);
391
392 status_t res = gBuffer->unlock();
393 if (res != NO_ERROR) {
394 ALOGE("Failed to unlock graphic buffer: %d", res);
395 }
396 AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
397 ALOGV("Unlocked buffers");
398 return success ? ndk::ScopedAStatus::ok()
399 : cameraStatus(Status::INTERNAL_ERROR);
400}
401
402ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoImageStreamBuffer(
403 int streamId, int bufferId, sp<Fence> fence) {
404 ALOGV("%s", __func__);
405
406 const std::chrono::nanoseconds before =
407 std::chrono::duration_cast<std::chrono::nanoseconds>(
408 std::chrono::steady_clock::now().time_since_epoch());
409
410 // Render test pattern using EGL.
411 std::shared_ptr<EglFrameBuffer> framebuffer =
412 mSessionContext.fetchOrCreateEglFramebuffer(
413 mEglDisplayContext->getEglDisplay(), streamId, bufferId);
414 if (framebuffer == nullptr) {
415 ALOGE(
416 "%s: Failed to get EGL framebuffer corresponding to buffer id "
417 "%d for streamId %d",
418 __func__, bufferId, streamId);
419 return cameraStatus(Status::ILLEGAL_ARGUMENT);
420 }
421
422 // Wait for fence to clear.
423 if (fence != nullptr && fence->isValid()) {
424 status_t ret = fence->wait(kAcquireFenceTimeout.count());
425 if (ret != 0) {
426 ALOGE(
427 "Timeout while waiting for the acquire fence for buffer %d"
428 " for streamId %d",
429 bufferId, streamId);
430 return cameraStatus(Status::INTERNAL_ERROR);
431 }
432 }
433
434 mEglDisplayContext->makeCurrent();
435 framebuffer->beforeDraw();
436
437 mEglTextureProgram->draw(mEglSurfaceTexture->updateTexture());
438 framebuffer->afterDraw();
439
440 const std::chrono::nanoseconds after =
441 std::chrono::duration_cast<std::chrono::nanoseconds>(
442 std::chrono::steady_clock::now().time_since_epoch());
443
444 ALOGV("Rendering to buffer %d, stream %d took %lld ns", bufferId, streamId,
445 after.count() - before.count());
446
447 return ndk::ScopedAStatus::ok();
448}
449
450} // namespace virtualcamera
451} // namespace companion
452} // namespace android