blob: 55678b7679764acb9abd98a40162a1fe70e8e014 [file] [log] [blame]
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +01001/*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17// #define LOG_NDEBUG 0
18#define LOG_TAG "VirtualCameraSession"
19#include "VirtualCameraSession.h"
20
21#include <atomic>
22#include <chrono>
23#include <cstddef>
24#include <cstdint>
25#include <cstring>
26#include <map>
27#include <memory>
28#include <mutex>
29#include <optional>
30#include <tuple>
31#include <unordered_set>
32#include <utility>
33#include <vector>
34
35#include "CameraMetadata.h"
36#include "EGL/egl.h"
37#include "VirtualCameraRenderThread.h"
38#include "VirtualCameraStream.h"
39#include "aidl/android/hardware/camera/common/Status.h"
40#include "aidl/android/hardware/camera/device/BufferCache.h"
41#include "aidl/android/hardware/camera/device/BufferStatus.h"
42#include "aidl/android/hardware/camera/device/CaptureRequest.h"
43#include "aidl/android/hardware/camera/device/HalStream.h"
44#include "aidl/android/hardware/camera/device/NotifyMsg.h"
45#include "aidl/android/hardware/camera/device/ShutterMsg.h"
46#include "aidl/android/hardware/camera/device/StreamBuffer.h"
47#include "aidl/android/hardware/camera/device/StreamConfiguration.h"
48#include "aidl/android/hardware/camera/device/StreamRotation.h"
49#include "aidl/android/hardware/graphics/common/BufferUsage.h"
50#include "aidl/android/hardware/graphics/common/PixelFormat.h"
51#include "android/hardware_buffer.h"
52#include "android/native_window_aidl.h"
53#include "fmq/AidlMessageQueue.h"
54#include "system/camera_metadata.h"
55#include "ui/GraphicBuffer.h"
56#include "util/EglDisplayContext.h"
57#include "util/EglFramebuffer.h"
58#include "util/EglProgram.h"
59#include "util/JpegUtil.h"
60#include "util/MetadataBuilder.h"
61#include "util/TestPatternHelper.h"
62#include "util/Util.h"
63
64namespace android {
65namespace companion {
66namespace virtualcamera {
67
68using ::aidl::android::companion::virtualcamera::Format;
69using ::aidl::android::companion::virtualcamera::IVirtualCameraCallback;
70using ::aidl::android::hardware::camera::common::Status;
71using ::aidl::android::hardware::camera::device::BufferCache;
72using ::aidl::android::hardware::camera::device::CameraMetadata;
73using ::aidl::android::hardware::camera::device::CameraOfflineSessionInfo;
74using ::aidl::android::hardware::camera::device::CaptureRequest;
75using ::aidl::android::hardware::camera::device::HalStream;
76using ::aidl::android::hardware::camera::device::ICameraDeviceCallback;
77using ::aidl::android::hardware::camera::device::ICameraOfflineSession;
78using ::aidl::android::hardware::camera::device::RequestTemplate;
79using ::aidl::android::hardware::camera::device::Stream;
80using ::aidl::android::hardware::camera::device::StreamBuffer;
81using ::aidl::android::hardware::camera::device::StreamConfiguration;
82using ::aidl::android::hardware::camera::device::StreamRotation;
83using ::aidl::android::hardware::common::fmq::MQDescriptor;
84using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
85using ::aidl::android::hardware::graphics::common::BufferUsage;
86using ::aidl::android::hardware::graphics::common::PixelFormat;
87using ::android::base::unique_fd;
88
89namespace {
90
91using metadata_ptr =
92 std::unique_ptr<camera_metadata_t, void (*)(camera_metadata_t*)>;
93
94using namespace std::chrono_literals;
95
96// Size of request/result metadata fast message queue.
97// Setting to 0 to always disables FMQ.
98static constexpr size_t kMetadataMsgQueueSize = 0;
99
100// Maximum number of buffers to use per single stream.
101static constexpr size_t kMaxStreamBuffers = 2;
102
103CameraMetadata createDefaultRequestSettings(RequestTemplate type) {
104 hardware::camera::common::V1_0::helper::CameraMetadata metadataHelper;
105
106 camera_metadata_enum_android_control_capture_intent_t intent =
107 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
108 switch (type) {
109 case RequestTemplate::PREVIEW:
110 intent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
111 break;
112 case RequestTemplate::STILL_CAPTURE:
113 intent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
114 break;
115 case RequestTemplate::VIDEO_RECORD:
116 intent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
117 break;
118 case RequestTemplate::VIDEO_SNAPSHOT:
119 intent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
120 break;
121 default:
122 // Leave default.
123 break;
124 }
125
126 auto metadata = MetadataBuilder().setControlCaptureIntent(intent).build();
127 return (metadata != nullptr) ? std::move(*metadata) : CameraMetadata();
128}
129
130HalStream getHalStream(const Stream& stream) {
131 HalStream halStream;
132 halStream.id = stream.id;
133 halStream.physicalCameraId = stream.physicalCameraId;
134 halStream.maxBuffers = kMaxStreamBuffers;
135
136 if (stream.format == PixelFormat::IMPLEMENTATION_DEFINED) {
137 // If format is implementation defined we need it to override
138 // it with actual format.
139 // TODO(b/301023410) Override with the format based on the
140 // camera configuration, once we support more formats.
141 halStream.overrideFormat = PixelFormat::YCBCR_420_888;
142 } else {
143 halStream.overrideFormat = stream.format;
144 }
145 halStream.overrideDataSpace = stream.dataSpace;
146
147 halStream.producerUsage = BufferUsage::GPU_RENDER_TARGET;
148 halStream.supportOffline = false;
149 return halStream;
150}
151
152} // namespace
153
154VirtualCameraSession::VirtualCameraSession(
155 const std::string& cameraId,
156 std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback,
157 std::shared_ptr<IVirtualCameraCallback> virtualCameraClientCallback)
158 : mCameraId(cameraId),
159 mCameraDeviceCallback(cameraDeviceCallback),
160 mVirtualCameraClientCallback(virtualCameraClientCallback) {
161 mRequestMetadataQueue = std::make_unique<RequestMetadataQueue>(
162 kMetadataMsgQueueSize, false /* non blocking */);
163 if (!mRequestMetadataQueue->isValid()) {
164 ALOGE("%s: invalid request fmq", __func__);
165 }
166
167 mResultMetadataQueue = std::make_shared<ResultMetadataQueue>(
168 kMetadataMsgQueueSize, false /* non blocking */);
169 if (!mResultMetadataQueue->isValid()) {
170 ALOGE("%s: invalid result fmq", __func__);
171 }
172}
173
174ndk::ScopedAStatus VirtualCameraSession::close() {
175 ALOGV("%s", __func__);
176
177 if (mVirtualCameraClientCallback != nullptr) {
178 mVirtualCameraClientCallback->onStreamClosed(/*streamId=*/0);
179 }
180
181 mSessionContext.closeAllStreams();
182 return ndk::ScopedAStatus::ok();
183}
184
185ndk::ScopedAStatus VirtualCameraSession::configureStreams(
186 const StreamConfiguration& in_requestedConfiguration,
187 std::vector<HalStream>* _aidl_return) {
188 ALOGV("%s: requestedConfiguration: %s", __func__,
189 in_requestedConfiguration.toString().c_str());
190
191 if (_aidl_return == nullptr) {
192 return cameraStatus(Status::ILLEGAL_ARGUMENT);
193 }
194
195 mSessionContext.removeStreamsNotInStreamConfiguration(
196 in_requestedConfiguration);
197
198 auto& streams = in_requestedConfiguration.streams;
199 auto& halStreams = *_aidl_return;
200 halStreams.clear();
201 halStreams.resize(in_requestedConfiguration.streams.size());
202
203 sp<Surface> inputSurface = nullptr;
204 int inputWidth;
205 int inputHeight;
206
207 {
208 std::lock_guard<std::mutex> lock(mLock);
209 for (int i = 0; i < in_requestedConfiguration.streams.size(); ++i) {
210 // TODO(b/301023410) remove hardcoded format checks, verify against configuration.
211 if (streams[i].width != 640 || streams[i].height != 480 ||
212 streams[i].rotation != StreamRotation::ROTATION_0 ||
213 (streams[i].format != PixelFormat::IMPLEMENTATION_DEFINED &&
214 streams[i].format != PixelFormat::YCBCR_420_888 &&
215 streams[i].format != PixelFormat::BLOB)) {
216 halStreams.clear();
217 return cameraStatus(Status::ILLEGAL_ARGUMENT);
218 }
219 halStreams[i] = getHalStream(streams[i]);
220 if (mSessionContext.initializeStream(streams[i])) {
221 ALOGV("Configured new stream: %s", streams[i].toString().c_str());
222 }
223 }
224
225 inputWidth = streams[0].width;
226 inputHeight = streams[0].height;
227 if (mRenderThread == nullptr) {
228 // If there's no client callback, start camera in test mode.
229 const bool testMode = mVirtualCameraClientCallback == nullptr;
230 mRenderThread = std::make_unique<VirtualCameraRenderThread>(
231 mSessionContext, inputWidth, inputHeight, mCameraDeviceCallback,
232 testMode);
233 mRenderThread->start();
234 inputSurface = mRenderThread->getInputSurface();
235 }
236 }
237
238 if (mVirtualCameraClientCallback != nullptr && inputSurface != nullptr) {
239 // TODO(b/301023410) Pass streamId based on client input stream id once
240 // support for multiple input streams is implemented. For now we always
241 // create single texture.
242 mVirtualCameraClientCallback->onStreamConfigured(
243 /*streamId=*/0, aidl::android::view::Surface(inputSurface.get()),
244 inputWidth, inputHeight, Format::YUV_420_888);
245 }
246
247 mFirstRequest.store(true);
248 return ndk::ScopedAStatus::ok();
249}
250
251ndk::ScopedAStatus VirtualCameraSession::constructDefaultRequestSettings(
252 RequestTemplate in_type, CameraMetadata* _aidl_return) {
253 ALOGV("%s: type %d", __func__, static_cast<int32_t>(in_type));
254
255 switch (in_type) {
256 case RequestTemplate::PREVIEW:
257 case RequestTemplate::STILL_CAPTURE:
258 case RequestTemplate::VIDEO_RECORD: {
259 *_aidl_return = createDefaultRequestSettings(in_type);
260 return ndk::ScopedAStatus::ok();
261 }
262 case RequestTemplate::VIDEO_SNAPSHOT:
263 case RequestTemplate::MANUAL:
264 case RequestTemplate::ZERO_SHUTTER_LAG:
265 // Don't support VIDEO_SNAPSHOT, MANUAL, ZSL templates
266 return ndk::ScopedAStatus::fromServiceSpecificError(
267 static_cast<int32_t>(Status::ILLEGAL_ARGUMENT));
268 ;
269 default:
270 ALOGE("%s: unknown request template type %d", __FUNCTION__,
271 static_cast<int>(in_type));
272 return ndk::ScopedAStatus::fromServiceSpecificError(
273 static_cast<int32_t>(Status::ILLEGAL_ARGUMENT));
274 ;
275 }
276}
277
278ndk::ScopedAStatus VirtualCameraSession::flush() {
279 ALOGV("%s", __func__);
280 std::lock_guard<std::mutex> lock(mLock);
281 mRenderThread->flush();
282 return ndk::ScopedAStatus::ok();
283}
284
285ndk::ScopedAStatus VirtualCameraSession::getCaptureRequestMetadataQueue(
286 MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) {
287 ALOGV("%s", __func__);
288 *_aidl_return = mRequestMetadataQueue->dupeDesc();
289 return ndk::ScopedAStatus::ok();
290}
291
292ndk::ScopedAStatus VirtualCameraSession::getCaptureResultMetadataQueue(
293 MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) {
294 ALOGV("%s", __func__);
295 *_aidl_return = mResultMetadataQueue->dupeDesc();
296 return ndk::ScopedAStatus::ok();
297}
298
299ndk::ScopedAStatus VirtualCameraSession::isReconfigurationRequired(
300 const CameraMetadata& in_oldSessionParams,
301 const CameraMetadata& in_newSessionParams, bool* _aidl_return) {
302 ALOGV("%s: oldSessionParams: %s newSessionParams: %s", __func__,
303 in_newSessionParams.toString().c_str(),
304 in_oldSessionParams.toString().c_str());
305
306 if (_aidl_return == nullptr) {
307 return ndk::ScopedAStatus::fromServiceSpecificError(
308 static_cast<int32_t>(Status::ILLEGAL_ARGUMENT));
309 }
310
311 *_aidl_return = true;
312 return ndk::ScopedAStatus::ok();
313}
314
315ndk::ScopedAStatus VirtualCameraSession::processCaptureRequest(
316 const std::vector<CaptureRequest>& in_requests,
317 const std::vector<BufferCache>& in_cachesToRemove, int32_t* _aidl_return) {
318 ALOGV("%s", __func__);
319
320 if (!in_cachesToRemove.empty()) {
321 mSessionContext.removeBufferCaches(in_cachesToRemove);
322 }
323
324 for (const auto& captureRequest : in_requests) {
325 auto status = processCaptureRequest(captureRequest);
326 if (!status.isOk()) {
327 return status;
328 }
329 }
330 *_aidl_return = in_requests.size();
331 return ndk::ScopedAStatus::ok();
332}
333
334ndk::ScopedAStatus VirtualCameraSession::signalStreamFlush(
335 const std::vector<int32_t>& in_streamIds, int32_t in_streamConfigCounter) {
336 ALOGV("%s", __func__);
337
338 (void)in_streamIds;
339 (void)in_streamConfigCounter;
340 return ndk::ScopedAStatus::ok();
341}
342
343ndk::ScopedAStatus VirtualCameraSession::switchToOffline(
344 const std::vector<int32_t>& in_streamsToKeep,
345 CameraOfflineSessionInfo* out_offlineSessionInfo,
346 std::shared_ptr<ICameraOfflineSession>* _aidl_return) {
347 ALOGV("%s", __func__);
348
349 (void)in_streamsToKeep;
350 (void)out_offlineSessionInfo;
351
352 if (_aidl_return == nullptr) {
353 return ndk::ScopedAStatus::fromServiceSpecificError(
354 static_cast<int32_t>(Status::ILLEGAL_ARGUMENT));
355 }
356
357 *_aidl_return = nullptr;
358 return cameraStatus(Status::OPERATION_NOT_SUPPORTED);
359}
360
361ndk::ScopedAStatus VirtualCameraSession::repeatingRequestEnd(
362 int32_t in_frameNumber, const std::vector<int32_t>& in_streamIds) {
363 ALOGV("%s", __func__);
364 (void)in_frameNumber;
365 (void)in_streamIds;
366 return ndk::ScopedAStatus::ok();
367}
368
369std::set<int> VirtualCameraSession::getStreamIds() const {
370 return mSessionContext.getStreamIds();
371}
372
373ndk::ScopedAStatus VirtualCameraSession::processCaptureRequest(
374 const CaptureRequest& request) {
375 ALOGD("%s: request: %s", __func__, request.toString().c_str());
376
377 if (mFirstRequest.exchange(false) && request.settings.metadata.empty()) {
378 return cameraStatus(Status::ILLEGAL_ARGUMENT);
379 }
380
381 std::shared_ptr<ICameraDeviceCallback> cameraCallback = nullptr;
382 {
383 std::lock_guard<std::mutex> lock(mLock);
384 cameraCallback = mCameraDeviceCallback;
385 }
386
387 if (cameraCallback == nullptr) {
388 ALOGE(
389 "%s: processCaptureRequest called, but there's no camera callback "
390 "configured",
391 __func__);
392 return cameraStatus(Status::INTERNAL_ERROR);
393 }
394
395 if (!mSessionContext.importBuffersFromCaptureRequest(request)) {
396 ALOGE("Failed to import buffers from capture request.");
397 return cameraStatus(Status::INTERNAL_ERROR);
398 }
399
400 std::vector<CaptureRequestBuffer> taskBuffers;
401 taskBuffers.reserve(request.outputBuffers.size());
402 for (const StreamBuffer& streamBuffer : request.outputBuffers) {
403 taskBuffers.emplace_back(streamBuffer.streamId, streamBuffer.bufferId,
404 importFence(streamBuffer.acquireFence));
405 }
406
407 {
408 std::lock_guard<std::mutex> lock(mLock);
409 if (mRenderThread == nullptr) {
410 ALOGE(
411 "%s: processCaptureRequest (frameNumber %d)called before configure "
412 "(render thread not initialized)",
413 __func__, request.frameNumber);
414 return cameraStatus(Status::INTERNAL_ERROR);
415 }
416 mRenderThread->enqueueTask(std::make_unique<ProcessCaptureRequestTask>(
417 request.frameNumber, taskBuffers));
418 }
419
420 if (mVirtualCameraClientCallback != nullptr) {
421 auto status = mVirtualCameraClientCallback->onProcessCaptureRequest(
422 /*streamId=*/0, request.frameNumber);
423 if (!status.isOk()) {
424 ALOGE(
425 "Failed to invoke onProcessCaptureRequest client callback for frame "
426 "%d",
427 request.frameNumber);
428 }
429 }
430
431 return ndk::ScopedAStatus::ok();
432}
433
434} // namespace virtualcamera
435} // namespace companion
436} // namespace android