blob: e1815c769df5e5c3cea57699902187d6cd35b23f [file] [log] [blame]
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +01001/*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17// #define LOG_NDEBUG 0
18#define LOG_TAG "VirtualCameraSession"
19#include "VirtualCameraSession.h"
20
Jan Sebechlebsky39129f82024-01-19 16:42:11 +010021#include <algorithm>
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010022#include <atomic>
23#include <chrono>
Jan Sebechlebsky4c9bb1e2024-02-28 16:32:39 +010024#include <cmath>
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010025#include <cstddef>
26#include <cstdint>
27#include <cstring>
28#include <map>
29#include <memory>
30#include <mutex>
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +010031#include <numeric>
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010032#include <optional>
33#include <tuple>
34#include <unordered_set>
35#include <utility>
36#include <vector>
37
38#include "CameraMetadata.h"
39#include "EGL/egl.h"
Jan Sebechlebsky3b478c42023-11-23 13:15:56 +010040#include "VirtualCameraDevice.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010041#include "VirtualCameraRenderThread.h"
42#include "VirtualCameraStream.h"
Jan Sebechlebsky4c9bb1e2024-02-28 16:32:39 +010043#include "aidl/android/companion/virtualcamera/SupportedStreamConfiguration.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010044#include "aidl/android/hardware/camera/common/Status.h"
45#include "aidl/android/hardware/camera/device/BufferCache.h"
46#include "aidl/android/hardware/camera/device/BufferStatus.h"
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010047#include "aidl/android/hardware/camera/device/CameraMetadata.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010048#include "aidl/android/hardware/camera/device/CaptureRequest.h"
49#include "aidl/android/hardware/camera/device/HalStream.h"
50#include "aidl/android/hardware/camera/device/NotifyMsg.h"
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +010051#include "aidl/android/hardware/camera/device/RequestTemplate.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010052#include "aidl/android/hardware/camera/device/ShutterMsg.h"
Jan Sebechlebsky4c9bb1e2024-02-28 16:32:39 +010053#include "aidl/android/hardware/camera/device/Stream.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010054#include "aidl/android/hardware/camera/device/StreamBuffer.h"
55#include "aidl/android/hardware/camera/device/StreamConfiguration.h"
56#include "aidl/android/hardware/camera/device/StreamRotation.h"
57#include "aidl/android/hardware/graphics/common/BufferUsage.h"
58#include "aidl/android/hardware/graphics/common/PixelFormat.h"
59#include "android/hardware_buffer.h"
60#include "android/native_window_aidl.h"
61#include "fmq/AidlMessageQueue.h"
62#include "system/camera_metadata.h"
63#include "ui/GraphicBuffer.h"
64#include "util/EglDisplayContext.h"
65#include "util/EglFramebuffer.h"
66#include "util/EglProgram.h"
67#include "util/JpegUtil.h"
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010068#include "util/MetadataUtil.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010069#include "util/Util.h"
70
71namespace android {
72namespace companion {
73namespace virtualcamera {
74
75using ::aidl::android::companion::virtualcamera::Format;
76using ::aidl::android::companion::virtualcamera::IVirtualCameraCallback;
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +010077using ::aidl::android::companion::virtualcamera::SupportedStreamConfiguration;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010078using ::aidl::android::hardware::camera::common::Status;
79using ::aidl::android::hardware::camera::device::BufferCache;
80using ::aidl::android::hardware::camera::device::CameraMetadata;
81using ::aidl::android::hardware::camera::device::CameraOfflineSessionInfo;
82using ::aidl::android::hardware::camera::device::CaptureRequest;
83using ::aidl::android::hardware::camera::device::HalStream;
84using ::aidl::android::hardware::camera::device::ICameraDeviceCallback;
85using ::aidl::android::hardware::camera::device::ICameraOfflineSession;
86using ::aidl::android::hardware::camera::device::RequestTemplate;
87using ::aidl::android::hardware::camera::device::Stream;
88using ::aidl::android::hardware::camera::device::StreamBuffer;
89using ::aidl::android::hardware::camera::device::StreamConfiguration;
90using ::aidl::android::hardware::camera::device::StreamRotation;
91using ::aidl::android::hardware::common::fmq::MQDescriptor;
92using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
93using ::aidl::android::hardware::graphics::common::BufferUsage;
94using ::aidl::android::hardware::graphics::common::PixelFormat;
95using ::android::base::unique_fd;
96
97namespace {
98
99using metadata_ptr =
100 std::unique_ptr<camera_metadata_t, void (*)(camera_metadata_t*)>;
101
102using namespace std::chrono_literals;
103
104// Size of request/result metadata fast message queue.
105// Setting to 0 to always disables FMQ.
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100106constexpr size_t kMetadataMsgQueueSize = 0;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100107
108// Maximum number of buffers to use per single stream.
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100109constexpr size_t kMaxStreamBuffers = 2;
110
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100111// Thumbnail size (0,0) correspods to disabling thumbnail.
112const Resolution kDefaultJpegThumbnailSize(0, 0);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100113
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100114camera_metadata_enum_android_control_capture_intent_t requestTemplateToIntent(
115 const RequestTemplate type) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100116 switch (type) {
117 case RequestTemplate::PREVIEW:
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100118 return ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100119 case RequestTemplate::STILL_CAPTURE:
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100120 return ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100121 case RequestTemplate::VIDEO_RECORD:
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100122 return ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100123 case RequestTemplate::VIDEO_SNAPSHOT:
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100124 return ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100125 default:
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100126 // Return PREVIEW by default
127 return ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100128 }
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100129}
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100130
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100131int getMaxFps(const std::vector<SupportedStreamConfiguration>& configs) {
132 return std::transform_reduce(
133 configs.begin(), configs.end(), 0,
134 [](const int a, const int b) { return std::max(a, b); },
135 [](const SupportedStreamConfiguration& config) { return config.maxFps; });
136}
137
138CameraMetadata createDefaultRequestSettings(
139 const RequestTemplate type,
140 const std::vector<SupportedStreamConfiguration>& inputConfigs) {
141 int maxFps = getMaxFps(inputConfigs);
142 auto metadata =
143 MetadataBuilder()
Jan Sebechlebsky4be2bd02024-02-26 18:35:18 +0100144 .setAberrationCorrectionMode(
145 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100146 .setControlCaptureIntent(requestTemplateToIntent(type))
147 .setControlMode(ANDROID_CONTROL_MODE_AUTO)
148 .setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
149 .setControlAeExposureCompensation(0)
Vadim Caen11dfd932024-03-05 09:57:20 +0100150 .setControlAeTargetFpsRange(FpsRange{maxFps, maxFps})
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100151 .setControlAeAntibandingMode(ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO)
152 .setControlAePrecaptureTrigger(
153 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
154 .setControlAfTrigger(ANDROID_CONTROL_AF_TRIGGER_IDLE)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100155 .setControlAfMode(ANDROID_CONTROL_AF_MODE_OFF)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100156 .setControlAwbMode(ANDROID_CONTROL_AWB_MODE_AUTO)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100157 .setControlEffectMode(ANDROID_CONTROL_EFFECT_MODE_OFF)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100158 .setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
159 .setFlashMode(ANDROID_FLASH_MODE_OFF)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100160 .setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100161 .setJpegQuality(VirtualCameraDevice::kDefaultJpegQuality)
162 .setJpegThumbnailQuality(VirtualCameraDevice::kDefaultJpegQuality)
163 .setJpegThumbnailSize(0, 0)
Jan Sebechlebsky4be2bd02024-02-26 18:35:18 +0100164 .setNoiseReductionMode(ANDROID_NOISE_REDUCTION_MODE_OFF)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100165 .build();
166 if (metadata == nullptr) {
167 ALOGE("%s: Failed to construct metadata for default request type %s",
168 __func__, toString(type).c_str());
169 return CameraMetadata();
170 } else {
171 ALOGV("%s: Successfully created metadata for request type %s", __func__,
172 toString(type).c_str());
173 }
174 return *metadata;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100175}
176
177HalStream getHalStream(const Stream& stream) {
178 HalStream halStream;
179 halStream.id = stream.id;
180 halStream.physicalCameraId = stream.physicalCameraId;
181 halStream.maxBuffers = kMaxStreamBuffers;
182
183 if (stream.format == PixelFormat::IMPLEMENTATION_DEFINED) {
184 // If format is implementation defined we need it to override
185 // it with actual format.
186 // TODO(b/301023410) Override with the format based on the
187 // camera configuration, once we support more formats.
188 halStream.overrideFormat = PixelFormat::YCBCR_420_888;
189 } else {
190 halStream.overrideFormat = stream.format;
191 }
192 halStream.overrideDataSpace = stream.dataSpace;
193
194 halStream.producerUsage = BufferUsage::GPU_RENDER_TARGET;
195 halStream.supportOffline = false;
196 return halStream;
197}
198
Jan Sebechlebsky39129f82024-01-19 16:42:11 +0100199Stream getHighestResolutionStream(const std::vector<Stream>& streams) {
200 return *(std::max_element(streams.begin(), streams.end(),
201 [](const Stream& a, const Stream& b) {
202 return a.width * a.height < b.width * b.height;
203 }));
204}
205
Jan Sebechlebsky4c9bb1e2024-02-28 16:32:39 +0100206Resolution resolutionFromStream(const Stream& stream) {
207 return Resolution(stream.width, stream.height);
208}
209
210Resolution resolutionFromInputConfig(
211 const SupportedStreamConfiguration& inputConfig) {
212 return Resolution(inputConfig.width, inputConfig.height);
213}
214
Jan Sebechlebsky66ef83f2024-05-31 12:03:27 +0200215std::optional<Resolution> resolutionFromSurface(const sp<Surface> surface) {
216 Resolution res{0, 0};
217 if (surface == nullptr) {
218 ALOGE("%s: Cannot get resolution from null surface", __func__);
219 return std::nullopt;
220 }
221
222 int status = surface->query(NATIVE_WINDOW_WIDTH, &res.width);
223 if (status != NO_ERROR) {
224 ALOGE("%s: Failed to get width from surface", __func__);
225 return std::nullopt;
226 }
227
228 status = surface->query(NATIVE_WINDOW_HEIGHT, &res.height);
229 if (status != NO_ERROR) {
230 ALOGE("%s: Failed to get height from surface", __func__);
231 return std::nullopt;
232 }
233 return res;
234}
235
Jan Sebechlebsky4c9bb1e2024-02-28 16:32:39 +0100236std::optional<SupportedStreamConfiguration> pickInputConfigurationForStreams(
237 const std::vector<Stream>& requestedStreams,
238 const std::vector<SupportedStreamConfiguration>& supportedInputConfigs) {
239 Stream maxResolutionStream = getHighestResolutionStream(requestedStreams);
240 Resolution maxResolution = resolutionFromStream(maxResolutionStream);
241
242 // Find best fitting stream to satisfy all requested streams:
243 // Best fitting => same or higher resolution as input with lowest pixel count
244 // difference and same aspect ratio.
245 auto isBetterInputConfig = [maxResolution](
246 const SupportedStreamConfiguration& configA,
247 const SupportedStreamConfiguration& configB) {
248 int maxResPixelCount = maxResolution.width * maxResolution.height;
249 int pixelCountDiffA =
250 std::abs((configA.width * configA.height) - maxResPixelCount);
251 int pixelCountDiffB =
252 std::abs((configB.width * configB.height) - maxResPixelCount);
253
254 return pixelCountDiffA < pixelCountDiffB;
255 };
256
257 std::optional<SupportedStreamConfiguration> bestConfig;
258 for (const SupportedStreamConfiguration& inputConfig : supportedInputConfigs) {
259 Resolution inputConfigResolution = resolutionFromInputConfig(inputConfig);
260 if (inputConfigResolution < maxResolution ||
261 !isApproximatellySameAspectRatio(inputConfigResolution, maxResolution)) {
262 // We don't want to upscale from lower resolution, or use different aspect
263 // ratio, skip.
264 continue;
265 }
266
267 if (!bestConfig.has_value() ||
268 isBetterInputConfig(inputConfig, bestConfig.value())) {
269 bestConfig = inputConfig;
270 }
271 }
272
273 return bestConfig;
274}
275
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100276RequestSettings createSettingsFromMetadata(const CameraMetadata& metadata) {
277 return RequestSettings{
278 .jpegQuality = getJpegQuality(metadata).value_or(
279 VirtualCameraDevice::kDefaultJpegQuality),
Vadim Caenc0aff132024-03-12 17:20:07 +0100280 .jpegOrientation = getJpegOrientation(metadata),
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100281 .thumbnailResolution =
282 getJpegThumbnailSize(metadata).value_or(Resolution(0, 0)),
283 .thumbnailJpegQuality = getJpegThumbnailQuality(metadata).value_or(
Vadim Caen11dfd932024-03-05 09:57:20 +0100284 VirtualCameraDevice::kDefaultJpegQuality),
285 .fpsRange = getFpsRange(metadata),
286 .captureIntent = getCaptureIntent(metadata).value_or(
Vadim Caenc0aff132024-03-12 17:20:07 +0100287 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW),
Vadim Caen6a43beb2024-04-12 15:06:42 +0200288 .gpsCoordinates = getGpsCoordinates(metadata),
289 .aePrecaptureTrigger = getPrecaptureTrigger(metadata)};
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100290}
291
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100292} // namespace
293
294VirtualCameraSession::VirtualCameraSession(
Jan Sebechlebsky0bb5e092023-12-08 16:17:54 +0100295 std::shared_ptr<VirtualCameraDevice> cameraDevice,
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100296 std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback,
297 std::shared_ptr<IVirtualCameraCallback> virtualCameraClientCallback)
Jan Sebechlebsky3b478c42023-11-23 13:15:56 +0100298 : mCameraDevice(cameraDevice),
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100299 mCameraDeviceCallback(cameraDeviceCallback),
300 mVirtualCameraClientCallback(virtualCameraClientCallback) {
301 mRequestMetadataQueue = std::make_unique<RequestMetadataQueue>(
302 kMetadataMsgQueueSize, false /* non blocking */);
303 if (!mRequestMetadataQueue->isValid()) {
304 ALOGE("%s: invalid request fmq", __func__);
305 }
306
307 mResultMetadataQueue = std::make_shared<ResultMetadataQueue>(
308 kMetadataMsgQueueSize, false /* non blocking */);
309 if (!mResultMetadataQueue->isValid()) {
310 ALOGE("%s: invalid result fmq", __func__);
311 }
312}
313
314ndk::ScopedAStatus VirtualCameraSession::close() {
315 ALOGV("%s", __func__);
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100316 {
317 std::lock_guard<std::mutex> lock(mLock);
Jan Sebechlebsky66ef83f2024-05-31 12:03:27 +0200318
319 if (mVirtualCameraClientCallback != nullptr) {
320 mVirtualCameraClientCallback->onStreamClosed(mCurrentInputStreamId);
321 }
322
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100323 if (mRenderThread != nullptr) {
324 mRenderThread->stop();
325 mRenderThread = nullptr;
326 }
327 }
328
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100329 mSessionContext.closeAllStreams();
330 return ndk::ScopedAStatus::ok();
331}
332
333ndk::ScopedAStatus VirtualCameraSession::configureStreams(
334 const StreamConfiguration& in_requestedConfiguration,
335 std::vector<HalStream>* _aidl_return) {
336 ALOGV("%s: requestedConfiguration: %s", __func__,
337 in_requestedConfiguration.toString().c_str());
338
339 if (_aidl_return == nullptr) {
340 return cameraStatus(Status::ILLEGAL_ARGUMENT);
341 }
342
Jan Sebechlebsky0bb5e092023-12-08 16:17:54 +0100343 std::shared_ptr<VirtualCameraDevice> virtualCamera = mCameraDevice.lock();
344 if (virtualCamera == nullptr) {
345 ALOGW("%s: configure called on already unregistered camera", __func__);
346 return cameraStatus(Status::CAMERA_DISCONNECTED);
347 }
348
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100349 mSessionContext.removeStreamsNotInStreamConfiguration(
350 in_requestedConfiguration);
351
352 auto& streams = in_requestedConfiguration.streams;
353 auto& halStreams = *_aidl_return;
354 halStreams.clear();
355 halStreams.resize(in_requestedConfiguration.streams.size());
356
Jan Sebechlebsky0bb5e092023-12-08 16:17:54 +0100357 if (!virtualCamera->isStreamCombinationSupported(in_requestedConfiguration)) {
Jan Sebechlebsky3b478c42023-11-23 13:15:56 +0100358 ALOGE("%s: Requested stream configuration is not supported", __func__);
359 return cameraStatus(Status::ILLEGAL_ARGUMENT);
360 }
361
Jan Sebechlebsky4c9bb1e2024-02-28 16:32:39 +0100362 sp<Surface> inputSurface = nullptr;
Jan Sebechlebsky66ef83f2024-05-31 12:03:27 +0200363 int inputStreamId = -1;
Jan Sebechlebsky4c9bb1e2024-02-28 16:32:39 +0100364 std::optional<SupportedStreamConfiguration> inputConfig;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100365 {
366 std::lock_guard<std::mutex> lock(mLock);
367 for (int i = 0; i < in_requestedConfiguration.streams.size(); ++i) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100368 halStreams[i] = getHalStream(streams[i]);
369 if (mSessionContext.initializeStream(streams[i])) {
370 ALOGV("Configured new stream: %s", streams[i].toString().c_str());
371 }
372 }
373
Jan Sebechlebsky4c9bb1e2024-02-28 16:32:39 +0100374 inputConfig = pickInputConfigurationForStreams(
375 streams, virtualCamera->getInputConfigs());
376 if (!inputConfig.has_value()) {
377 ALOGE(
378 "%s: Failed to pick any input configuration for stream configuration "
379 "request: %s",
380 __func__, in_requestedConfiguration.toString().c_str());
381 return cameraStatus(Status::ILLEGAL_ARGUMENT);
382 }
Jan Sebechlebsky66ef83f2024-05-31 12:03:27 +0200383
384 if (mRenderThread != nullptr) {
385 // If there's already a render thread, it means this is not a first
386 // configuration call. If the surface has the same resolution and pixel
387 // format as the picked config, we don't need to do anything, the current
388 // render thread is capable of serving new set of configuration. However
389 // if it differens, we need to discard the current surface and
390 // reinitialize the render thread.
391
392 std::optional<Resolution> currentInputResolution =
393 resolutionFromSurface(mRenderThread->getInputSurface());
394 if (currentInputResolution.has_value() &&
395 *currentInputResolution == resolutionFromInputConfig(*inputConfig)) {
396 ALOGI(
397 "%s: Newly configured set of streams matches existing client "
398 "surface (%dx%d)",
399 __func__, currentInputResolution->width,
400 currentInputResolution->height);
401 return ndk::ScopedAStatus::ok();
402 }
403
404 if (mVirtualCameraClientCallback != nullptr) {
405 mVirtualCameraClientCallback->onStreamClosed(mCurrentInputStreamId);
406 }
407
408 ALOGV(
409 "%s: Newly requested output streams are not suitable for "
410 "pre-existing surface (%dx%d), creating new surface (%dx%d)",
411 __func__, currentInputResolution->width,
412 currentInputResolution->height, inputConfig->width,
413 inputConfig->height);
414
415 mRenderThread->flush();
416 mRenderThread->stop();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100417 }
Jan Sebechlebsky66ef83f2024-05-31 12:03:27 +0200418
419 mRenderThread = std::make_unique<VirtualCameraRenderThread>(
420 mSessionContext, resolutionFromInputConfig(*inputConfig),
421 virtualCamera->getMaxInputResolution(), mCameraDeviceCallback);
422 mRenderThread->start();
423 inputSurface = mRenderThread->getInputSurface();
424 inputStreamId = mCurrentInputStreamId =
425 virtualCamera->allocateInputStreamId();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100426 }
427
428 if (mVirtualCameraClientCallback != nullptr && inputSurface != nullptr) {
429 // TODO(b/301023410) Pass streamId based on client input stream id once
430 // support for multiple input streams is implemented. For now we always
431 // create single texture.
432 mVirtualCameraClientCallback->onStreamConfigured(
Jan Sebechlebsky66ef83f2024-05-31 12:03:27 +0200433 inputStreamId, aidl::android::view::Surface(inputSurface.get()),
Jan Sebechlebsky4c9bb1e2024-02-28 16:32:39 +0100434 inputConfig->width, inputConfig->height, inputConfig->pixelFormat);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100435 }
436
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100437 return ndk::ScopedAStatus::ok();
438}
439
440ndk::ScopedAStatus VirtualCameraSession::constructDefaultRequestSettings(
441 RequestTemplate in_type, CameraMetadata* _aidl_return) {
442 ALOGV("%s: type %d", __func__, static_cast<int32_t>(in_type));
443
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100444 std::shared_ptr<VirtualCameraDevice> camera = mCameraDevice.lock();
445 if (camera == nullptr) {
446 ALOGW(
447 "%s: constructDefaultRequestSettings called on already unregistered "
448 "camera",
449 __func__);
450 return cameraStatus(Status::CAMERA_DISCONNECTED);
451 }
452
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100453 switch (in_type) {
454 case RequestTemplate::PREVIEW:
455 case RequestTemplate::STILL_CAPTURE:
Jan Sebechlebskyb0119fa2023-12-04 10:29:06 +0100456 case RequestTemplate::VIDEO_RECORD:
457 case RequestTemplate::VIDEO_SNAPSHOT: {
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100458 *_aidl_return =
459 createDefaultRequestSettings(in_type, camera->getInputConfigs());
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100460 return ndk::ScopedAStatus::ok();
461 }
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100462 case RequestTemplate::MANUAL:
463 case RequestTemplate::ZERO_SHUTTER_LAG:
464 // Don't support VIDEO_SNAPSHOT, MANUAL, ZSL templates
465 return ndk::ScopedAStatus::fromServiceSpecificError(
466 static_cast<int32_t>(Status::ILLEGAL_ARGUMENT));
467 ;
468 default:
469 ALOGE("%s: unknown request template type %d", __FUNCTION__,
470 static_cast<int>(in_type));
471 return ndk::ScopedAStatus::fromServiceSpecificError(
472 static_cast<int32_t>(Status::ILLEGAL_ARGUMENT));
473 ;
474 }
475}
476
477ndk::ScopedAStatus VirtualCameraSession::flush() {
478 ALOGV("%s", __func__);
479 std::lock_guard<std::mutex> lock(mLock);
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100480 if (mRenderThread != nullptr) {
481 mRenderThread->flush();
482 }
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100483 return ndk::ScopedAStatus::ok();
484}
485
486ndk::ScopedAStatus VirtualCameraSession::getCaptureRequestMetadataQueue(
487 MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) {
488 ALOGV("%s", __func__);
489 *_aidl_return = mRequestMetadataQueue->dupeDesc();
490 return ndk::ScopedAStatus::ok();
491}
492
493ndk::ScopedAStatus VirtualCameraSession::getCaptureResultMetadataQueue(
494 MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) {
495 ALOGV("%s", __func__);
496 *_aidl_return = mResultMetadataQueue->dupeDesc();
497 return ndk::ScopedAStatus::ok();
498}
499
500ndk::ScopedAStatus VirtualCameraSession::isReconfigurationRequired(
501 const CameraMetadata& in_oldSessionParams,
502 const CameraMetadata& in_newSessionParams, bool* _aidl_return) {
503 ALOGV("%s: oldSessionParams: %s newSessionParams: %s", __func__,
504 in_newSessionParams.toString().c_str(),
505 in_oldSessionParams.toString().c_str());
506
507 if (_aidl_return == nullptr) {
508 return ndk::ScopedAStatus::fromServiceSpecificError(
509 static_cast<int32_t>(Status::ILLEGAL_ARGUMENT));
510 }
511
512 *_aidl_return = true;
513 return ndk::ScopedAStatus::ok();
514}
515
516ndk::ScopedAStatus VirtualCameraSession::processCaptureRequest(
517 const std::vector<CaptureRequest>& in_requests,
518 const std::vector<BufferCache>& in_cachesToRemove, int32_t* _aidl_return) {
519 ALOGV("%s", __func__);
520
521 if (!in_cachesToRemove.empty()) {
522 mSessionContext.removeBufferCaches(in_cachesToRemove);
523 }
524
525 for (const auto& captureRequest : in_requests) {
526 auto status = processCaptureRequest(captureRequest);
527 if (!status.isOk()) {
528 return status;
529 }
530 }
531 *_aidl_return = in_requests.size();
532 return ndk::ScopedAStatus::ok();
533}
534
535ndk::ScopedAStatus VirtualCameraSession::signalStreamFlush(
536 const std::vector<int32_t>& in_streamIds, int32_t in_streamConfigCounter) {
537 ALOGV("%s", __func__);
538
539 (void)in_streamIds;
540 (void)in_streamConfigCounter;
541 return ndk::ScopedAStatus::ok();
542}
543
544ndk::ScopedAStatus VirtualCameraSession::switchToOffline(
545 const std::vector<int32_t>& in_streamsToKeep,
546 CameraOfflineSessionInfo* out_offlineSessionInfo,
547 std::shared_ptr<ICameraOfflineSession>* _aidl_return) {
548 ALOGV("%s", __func__);
549
550 (void)in_streamsToKeep;
551 (void)out_offlineSessionInfo;
552
553 if (_aidl_return == nullptr) {
554 return ndk::ScopedAStatus::fromServiceSpecificError(
555 static_cast<int32_t>(Status::ILLEGAL_ARGUMENT));
556 }
557
558 *_aidl_return = nullptr;
559 return cameraStatus(Status::OPERATION_NOT_SUPPORTED);
560}
561
562ndk::ScopedAStatus VirtualCameraSession::repeatingRequestEnd(
563 int32_t in_frameNumber, const std::vector<int32_t>& in_streamIds) {
564 ALOGV("%s", __func__);
565 (void)in_frameNumber;
566 (void)in_streamIds;
567 return ndk::ScopedAStatus::ok();
568}
569
570std::set<int> VirtualCameraSession::getStreamIds() const {
571 return mSessionContext.getStreamIds();
572}
573
574ndk::ScopedAStatus VirtualCameraSession::processCaptureRequest(
575 const CaptureRequest& request) {
Vadim Caen324fcfb2024-03-21 16:49:08 +0100576 ALOGV("%s: request: %s", __func__, request.toString().c_str());
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100577
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100578 std::shared_ptr<ICameraDeviceCallback> cameraCallback = nullptr;
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100579 RequestSettings requestSettings;
Jan Sebechlebsky66ef83f2024-05-31 12:03:27 +0200580 int currentInputStreamId;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100581 {
582 std::lock_guard<std::mutex> lock(mLock);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100583
584 // If metadata it empty, last received metadata applies, if it's non-empty
585 // update it.
586 if (!request.settings.metadata.empty()) {
587 mCurrentRequestMetadata = request.settings;
588 }
589
590 // We don't have any metadata for this request - this means we received none
591 // in first request, this is an error state.
592 if (mCurrentRequestMetadata.metadata.empty()) {
593 return cameraStatus(Status::ILLEGAL_ARGUMENT);
594 }
595
596 requestSettings = createSettingsFromMetadata(mCurrentRequestMetadata);
597
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100598 cameraCallback = mCameraDeviceCallback;
Jan Sebechlebsky66ef83f2024-05-31 12:03:27 +0200599 currentInputStreamId = mCurrentInputStreamId;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100600 }
601
602 if (cameraCallback == nullptr) {
603 ALOGE(
604 "%s: processCaptureRequest called, but there's no camera callback "
605 "configured",
606 __func__);
607 return cameraStatus(Status::INTERNAL_ERROR);
608 }
609
610 if (!mSessionContext.importBuffersFromCaptureRequest(request)) {
611 ALOGE("Failed to import buffers from capture request.");
612 return cameraStatus(Status::INTERNAL_ERROR);
613 }
614
615 std::vector<CaptureRequestBuffer> taskBuffers;
616 taskBuffers.reserve(request.outputBuffers.size());
617 for (const StreamBuffer& streamBuffer : request.outputBuffers) {
618 taskBuffers.emplace_back(streamBuffer.streamId, streamBuffer.bufferId,
619 importFence(streamBuffer.acquireFence));
620 }
621
622 {
623 std::lock_guard<std::mutex> lock(mLock);
624 if (mRenderThread == nullptr) {
625 ALOGE(
626 "%s: processCaptureRequest (frameNumber %d)called before configure "
627 "(render thread not initialized)",
628 __func__, request.frameNumber);
629 return cameraStatus(Status::INTERNAL_ERROR);
630 }
631 mRenderThread->enqueueTask(std::make_unique<ProcessCaptureRequestTask>(
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100632 request.frameNumber, taskBuffers, requestSettings));
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100633 }
634
635 if (mVirtualCameraClientCallback != nullptr) {
636 auto status = mVirtualCameraClientCallback->onProcessCaptureRequest(
Jan Sebechlebsky66ef83f2024-05-31 12:03:27 +0200637 currentInputStreamId, request.frameNumber);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100638 if (!status.isOk()) {
639 ALOGE(
640 "Failed to invoke onProcessCaptureRequest client callback for frame "
641 "%d",
642 request.frameNumber);
643 }
644 }
645
646 return ndk::ScopedAStatus::ok();
647}
648
649} // namespace virtualcamera
650} // namespace companion
651} // namespace android