blob: 88929ccfedba3a15fa4b10c0b3031f27a3ed0c9e [file] [log] [blame]
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +01001/*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17// #define LOG_NDEBUG 0
18#define LOG_TAG "VirtualCameraSession"
19#include "VirtualCameraSession.h"
20
Jan Sebechlebsky39129f82024-01-19 16:42:11 +010021#include <algorithm>
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010022#include <atomic>
23#include <chrono>
Jan Sebechlebsky4c9bb1e2024-02-28 16:32:39 +010024#include <cmath>
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010025#include <cstddef>
26#include <cstdint>
27#include <cstring>
28#include <map>
29#include <memory>
30#include <mutex>
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +010031#include <numeric>
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010032#include <optional>
33#include <tuple>
34#include <unordered_set>
35#include <utility>
36#include <vector>
37
38#include "CameraMetadata.h"
39#include "EGL/egl.h"
Jan Sebechlebsky3b478c42023-11-23 13:15:56 +010040#include "VirtualCameraDevice.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010041#include "VirtualCameraRenderThread.h"
42#include "VirtualCameraStream.h"
Jan Sebechlebsky4c9bb1e2024-02-28 16:32:39 +010043#include "aidl/android/companion/virtualcamera/SupportedStreamConfiguration.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010044#include "aidl/android/hardware/camera/common/Status.h"
45#include "aidl/android/hardware/camera/device/BufferCache.h"
46#include "aidl/android/hardware/camera/device/BufferStatus.h"
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010047#include "aidl/android/hardware/camera/device/CameraMetadata.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010048#include "aidl/android/hardware/camera/device/CaptureRequest.h"
49#include "aidl/android/hardware/camera/device/HalStream.h"
50#include "aidl/android/hardware/camera/device/NotifyMsg.h"
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +010051#include "aidl/android/hardware/camera/device/RequestTemplate.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010052#include "aidl/android/hardware/camera/device/ShutterMsg.h"
Jan Sebechlebsky4c9bb1e2024-02-28 16:32:39 +010053#include "aidl/android/hardware/camera/device/Stream.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010054#include "aidl/android/hardware/camera/device/StreamBuffer.h"
55#include "aidl/android/hardware/camera/device/StreamConfiguration.h"
56#include "aidl/android/hardware/camera/device/StreamRotation.h"
57#include "aidl/android/hardware/graphics/common/BufferUsage.h"
58#include "aidl/android/hardware/graphics/common/PixelFormat.h"
59#include "android/hardware_buffer.h"
60#include "android/native_window_aidl.h"
61#include "fmq/AidlMessageQueue.h"
62#include "system/camera_metadata.h"
63#include "ui/GraphicBuffer.h"
64#include "util/EglDisplayContext.h"
65#include "util/EglFramebuffer.h"
66#include "util/EglProgram.h"
67#include "util/JpegUtil.h"
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +010068#include "util/MetadataUtil.h"
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010069#include "util/Util.h"
70
71namespace android {
72namespace companion {
73namespace virtualcamera {
74
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010075using ::aidl::android::companion::virtualcamera::IVirtualCameraCallback;
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +010076using ::aidl::android::companion::virtualcamera::SupportedStreamConfiguration;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010077using ::aidl::android::hardware::camera::common::Status;
78using ::aidl::android::hardware::camera::device::BufferCache;
79using ::aidl::android::hardware::camera::device::CameraMetadata;
80using ::aidl::android::hardware::camera::device::CameraOfflineSessionInfo;
81using ::aidl::android::hardware::camera::device::CaptureRequest;
82using ::aidl::android::hardware::camera::device::HalStream;
83using ::aidl::android::hardware::camera::device::ICameraDeviceCallback;
84using ::aidl::android::hardware::camera::device::ICameraOfflineSession;
85using ::aidl::android::hardware::camera::device::RequestTemplate;
86using ::aidl::android::hardware::camera::device::Stream;
87using ::aidl::android::hardware::camera::device::StreamBuffer;
88using ::aidl::android::hardware::camera::device::StreamConfiguration;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +010089using ::aidl::android::hardware::common::fmq::MQDescriptor;
90using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
91using ::aidl::android::hardware::graphics::common::BufferUsage;
92using ::aidl::android::hardware::graphics::common::PixelFormat;
93using ::android::base::unique_fd;
94
95namespace {
96
97using metadata_ptr =
98 std::unique_ptr<camera_metadata_t, void (*)(camera_metadata_t*)>;
99
100using namespace std::chrono_literals;
101
102// Size of request/result metadata fast message queue.
103// Setting to 0 to always disables FMQ.
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100104constexpr size_t kMetadataMsgQueueSize = 0;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100105
106// Maximum number of buffers to use per single stream.
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100107constexpr size_t kMaxStreamBuffers = 2;
108
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100109// Thumbnail size (0,0) correspods to disabling thumbnail.
110const Resolution kDefaultJpegThumbnailSize(0, 0);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100111
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100112camera_metadata_enum_android_control_capture_intent_t requestTemplateToIntent(
113 const RequestTemplate type) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100114 switch (type) {
115 case RequestTemplate::PREVIEW:
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100116 return ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100117 case RequestTemplate::STILL_CAPTURE:
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100118 return ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100119 case RequestTemplate::VIDEO_RECORD:
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100120 return ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100121 case RequestTemplate::VIDEO_SNAPSHOT:
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100122 return ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100123 default:
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100124 // Return PREVIEW by default
125 return ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100126 }
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100127}
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100128
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100129int getMaxFps(const std::vector<SupportedStreamConfiguration>& configs) {
130 return std::transform_reduce(
131 configs.begin(), configs.end(), 0,
132 [](const int a, const int b) { return std::max(a, b); },
133 [](const SupportedStreamConfiguration& config) { return config.maxFps; });
134}
135
136CameraMetadata createDefaultRequestSettings(
137 const RequestTemplate type,
138 const std::vector<SupportedStreamConfiguration>& inputConfigs) {
139 int maxFps = getMaxFps(inputConfigs);
140 auto metadata =
141 MetadataBuilder()
Jan Sebechlebsky4be2bd02024-02-26 18:35:18 +0100142 .setAberrationCorrectionMode(
143 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100144 .setControlCaptureIntent(requestTemplateToIntent(type))
145 .setControlMode(ANDROID_CONTROL_MODE_AUTO)
146 .setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
147 .setControlAeExposureCompensation(0)
Vadim Caen11dfd932024-03-05 09:57:20 +0100148 .setControlAeTargetFpsRange(FpsRange{maxFps, maxFps})
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100149 .setControlAeAntibandingMode(ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO)
150 .setControlAePrecaptureTrigger(
151 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
152 .setControlAfTrigger(ANDROID_CONTROL_AF_TRIGGER_IDLE)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100153 .setControlAfMode(ANDROID_CONTROL_AF_MODE_OFF)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100154 .setControlAwbMode(ANDROID_CONTROL_AWB_MODE_AUTO)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100155 .setControlEffectMode(ANDROID_CONTROL_EFFECT_MODE_OFF)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100156 .setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
157 .setFlashMode(ANDROID_FLASH_MODE_OFF)
Jan Sebechlebskybb01c1d2024-02-12 11:41:37 +0100158 .setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100159 .setJpegQuality(VirtualCameraDevice::kDefaultJpegQuality)
160 .setJpegThumbnailQuality(VirtualCameraDevice::kDefaultJpegQuality)
161 .setJpegThumbnailSize(0, 0)
Jan Sebechlebsky4be2bd02024-02-26 18:35:18 +0100162 .setNoiseReductionMode(ANDROID_NOISE_REDUCTION_MODE_OFF)
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100163 .build();
164 if (metadata == nullptr) {
165 ALOGE("%s: Failed to construct metadata for default request type %s",
166 __func__, toString(type).c_str());
167 return CameraMetadata();
168 } else {
169 ALOGV("%s: Successfully created metadata for request type %s", __func__,
170 toString(type).c_str());
171 }
172 return *metadata;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100173}
174
175HalStream getHalStream(const Stream& stream) {
176 HalStream halStream;
177 halStream.id = stream.id;
178 halStream.physicalCameraId = stream.physicalCameraId;
179 halStream.maxBuffers = kMaxStreamBuffers;
180
181 if (stream.format == PixelFormat::IMPLEMENTATION_DEFINED) {
182 // If format is implementation defined we need it to override
183 // it with actual format.
184 // TODO(b/301023410) Override with the format based on the
185 // camera configuration, once we support more formats.
186 halStream.overrideFormat = PixelFormat::YCBCR_420_888;
187 } else {
188 halStream.overrideFormat = stream.format;
189 }
190 halStream.overrideDataSpace = stream.dataSpace;
191
192 halStream.producerUsage = BufferUsage::GPU_RENDER_TARGET;
193 halStream.supportOffline = false;
194 return halStream;
195}
196
Jan Sebechlebsky39129f82024-01-19 16:42:11 +0100197Stream getHighestResolutionStream(const std::vector<Stream>& streams) {
198 return *(std::max_element(streams.begin(), streams.end(),
199 [](const Stream& a, const Stream& b) {
200 return a.width * a.height < b.width * b.height;
201 }));
202}
203
Jan Sebechlebsky4c9bb1e2024-02-28 16:32:39 +0100204Resolution resolutionFromStream(const Stream& stream) {
205 return Resolution(stream.width, stream.height);
206}
207
208Resolution resolutionFromInputConfig(
209 const SupportedStreamConfiguration& inputConfig) {
210 return Resolution(inputConfig.width, inputConfig.height);
211}
212
Jan Sebechlebsky66ef83f2024-05-31 12:03:27 +0200213std::optional<Resolution> resolutionFromSurface(const sp<Surface> surface) {
214 Resolution res{0, 0};
215 if (surface == nullptr) {
216 ALOGE("%s: Cannot get resolution from null surface", __func__);
217 return std::nullopt;
218 }
219
220 int status = surface->query(NATIVE_WINDOW_WIDTH, &res.width);
221 if (status != NO_ERROR) {
222 ALOGE("%s: Failed to get width from surface", __func__);
223 return std::nullopt;
224 }
225
226 status = surface->query(NATIVE_WINDOW_HEIGHT, &res.height);
227 if (status != NO_ERROR) {
228 ALOGE("%s: Failed to get height from surface", __func__);
229 return std::nullopt;
230 }
231 return res;
232}
233
Jan Sebechlebsky4c9bb1e2024-02-28 16:32:39 +0100234std::optional<SupportedStreamConfiguration> pickInputConfigurationForStreams(
235 const std::vector<Stream>& requestedStreams,
236 const std::vector<SupportedStreamConfiguration>& supportedInputConfigs) {
237 Stream maxResolutionStream = getHighestResolutionStream(requestedStreams);
238 Resolution maxResolution = resolutionFromStream(maxResolutionStream);
239
240 // Find best fitting stream to satisfy all requested streams:
241 // Best fitting => same or higher resolution as input with lowest pixel count
242 // difference and same aspect ratio.
243 auto isBetterInputConfig = [maxResolution](
244 const SupportedStreamConfiguration& configA,
245 const SupportedStreamConfiguration& configB) {
246 int maxResPixelCount = maxResolution.width * maxResolution.height;
247 int pixelCountDiffA =
248 std::abs((configA.width * configA.height) - maxResPixelCount);
249 int pixelCountDiffB =
250 std::abs((configB.width * configB.height) - maxResPixelCount);
251
252 return pixelCountDiffA < pixelCountDiffB;
253 };
254
255 std::optional<SupportedStreamConfiguration> bestConfig;
256 for (const SupportedStreamConfiguration& inputConfig : supportedInputConfigs) {
257 Resolution inputConfigResolution = resolutionFromInputConfig(inputConfig);
258 if (inputConfigResolution < maxResolution ||
259 !isApproximatellySameAspectRatio(inputConfigResolution, maxResolution)) {
260 // We don't want to upscale from lower resolution, or use different aspect
261 // ratio, skip.
262 continue;
263 }
264
265 if (!bestConfig.has_value() ||
266 isBetterInputConfig(inputConfig, bestConfig.value())) {
267 bestConfig = inputConfig;
268 }
269 }
270
271 return bestConfig;
272}
273
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100274RequestSettings createSettingsFromMetadata(const CameraMetadata& metadata) {
275 return RequestSettings{
276 .jpegQuality = getJpegQuality(metadata).value_or(
277 VirtualCameraDevice::kDefaultJpegQuality),
Vadim Caenc0aff132024-03-12 17:20:07 +0100278 .jpegOrientation = getJpegOrientation(metadata),
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100279 .thumbnailResolution =
280 getJpegThumbnailSize(metadata).value_or(Resolution(0, 0)),
281 .thumbnailJpegQuality = getJpegThumbnailQuality(metadata).value_or(
Vadim Caen11dfd932024-03-05 09:57:20 +0100282 VirtualCameraDevice::kDefaultJpegQuality),
283 .fpsRange = getFpsRange(metadata),
284 .captureIntent = getCaptureIntent(metadata).value_or(
Vadim Caenc0aff132024-03-12 17:20:07 +0100285 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW),
Vadim Caen6a43beb2024-04-12 15:06:42 +0200286 .gpsCoordinates = getGpsCoordinates(metadata),
287 .aePrecaptureTrigger = getPrecaptureTrigger(metadata)};
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100288}
289
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100290} // namespace
291
292VirtualCameraSession::VirtualCameraSession(
Jan Sebechlebsky0bb5e092023-12-08 16:17:54 +0100293 std::shared_ptr<VirtualCameraDevice> cameraDevice,
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100294 std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback,
295 std::shared_ptr<IVirtualCameraCallback> virtualCameraClientCallback)
Jan Sebechlebsky3b478c42023-11-23 13:15:56 +0100296 : mCameraDevice(cameraDevice),
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100297 mCameraDeviceCallback(cameraDeviceCallback),
298 mVirtualCameraClientCallback(virtualCameraClientCallback) {
299 mRequestMetadataQueue = std::make_unique<RequestMetadataQueue>(
300 kMetadataMsgQueueSize, false /* non blocking */);
301 if (!mRequestMetadataQueue->isValid()) {
302 ALOGE("%s: invalid request fmq", __func__);
303 }
304
305 mResultMetadataQueue = std::make_shared<ResultMetadataQueue>(
306 kMetadataMsgQueueSize, false /* non blocking */);
307 if (!mResultMetadataQueue->isValid()) {
308 ALOGE("%s: invalid result fmq", __func__);
309 }
310}
311
312ndk::ScopedAStatus VirtualCameraSession::close() {
313 ALOGV("%s", __func__);
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100314 {
315 std::lock_guard<std::mutex> lock(mLock);
Jan Sebechlebsky66ef83f2024-05-31 12:03:27 +0200316
317 if (mVirtualCameraClientCallback != nullptr) {
318 mVirtualCameraClientCallback->onStreamClosed(mCurrentInputStreamId);
319 }
320
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100321 if (mRenderThread != nullptr) {
322 mRenderThread->stop();
323 mRenderThread = nullptr;
324 }
325 }
326
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100327 mSessionContext.closeAllStreams();
328 return ndk::ScopedAStatus::ok();
329}
330
331ndk::ScopedAStatus VirtualCameraSession::configureStreams(
332 const StreamConfiguration& in_requestedConfiguration,
333 std::vector<HalStream>* _aidl_return) {
334 ALOGV("%s: requestedConfiguration: %s", __func__,
335 in_requestedConfiguration.toString().c_str());
336
337 if (_aidl_return == nullptr) {
338 return cameraStatus(Status::ILLEGAL_ARGUMENT);
339 }
340
Jan Sebechlebsky0bb5e092023-12-08 16:17:54 +0100341 std::shared_ptr<VirtualCameraDevice> virtualCamera = mCameraDevice.lock();
342 if (virtualCamera == nullptr) {
343 ALOGW("%s: configure called on already unregistered camera", __func__);
344 return cameraStatus(Status::CAMERA_DISCONNECTED);
345 }
346
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100347 mSessionContext.removeStreamsNotInStreamConfiguration(
348 in_requestedConfiguration);
349
350 auto& streams = in_requestedConfiguration.streams;
351 auto& halStreams = *_aidl_return;
352 halStreams.clear();
353 halStreams.resize(in_requestedConfiguration.streams.size());
354
Jan Sebechlebsky0bb5e092023-12-08 16:17:54 +0100355 if (!virtualCamera->isStreamCombinationSupported(in_requestedConfiguration)) {
Jan Sebechlebsky3b478c42023-11-23 13:15:56 +0100356 ALOGE("%s: Requested stream configuration is not supported", __func__);
357 return cameraStatus(Status::ILLEGAL_ARGUMENT);
358 }
359
Jan Sebechlebsky4c9bb1e2024-02-28 16:32:39 +0100360 sp<Surface> inputSurface = nullptr;
Jan Sebechlebsky66ef83f2024-05-31 12:03:27 +0200361 int inputStreamId = -1;
Jan Sebechlebsky4c9bb1e2024-02-28 16:32:39 +0100362 std::optional<SupportedStreamConfiguration> inputConfig;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100363 {
364 std::lock_guard<std::mutex> lock(mLock);
365 for (int i = 0; i < in_requestedConfiguration.streams.size(); ++i) {
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100366 halStreams[i] = getHalStream(streams[i]);
367 if (mSessionContext.initializeStream(streams[i])) {
368 ALOGV("Configured new stream: %s", streams[i].toString().c_str());
369 }
370 }
371
Jan Sebechlebsky4c9bb1e2024-02-28 16:32:39 +0100372 inputConfig = pickInputConfigurationForStreams(
373 streams, virtualCamera->getInputConfigs());
374 if (!inputConfig.has_value()) {
375 ALOGE(
376 "%s: Failed to pick any input configuration for stream configuration "
377 "request: %s",
378 __func__, in_requestedConfiguration.toString().c_str());
379 return cameraStatus(Status::ILLEGAL_ARGUMENT);
380 }
Jan Sebechlebsky66ef83f2024-05-31 12:03:27 +0200381
382 if (mRenderThread != nullptr) {
383 // If there's already a render thread, it means this is not a first
384 // configuration call. If the surface has the same resolution and pixel
385 // format as the picked config, we don't need to do anything, the current
386 // render thread is capable of serving new set of configuration. However
387 // if it differens, we need to discard the current surface and
388 // reinitialize the render thread.
389
390 std::optional<Resolution> currentInputResolution =
391 resolutionFromSurface(mRenderThread->getInputSurface());
392 if (currentInputResolution.has_value() &&
393 *currentInputResolution == resolutionFromInputConfig(*inputConfig)) {
394 ALOGI(
395 "%s: Newly configured set of streams matches existing client "
396 "surface (%dx%d)",
397 __func__, currentInputResolution->width,
398 currentInputResolution->height);
399 return ndk::ScopedAStatus::ok();
400 }
401
402 if (mVirtualCameraClientCallback != nullptr) {
403 mVirtualCameraClientCallback->onStreamClosed(mCurrentInputStreamId);
404 }
405
406 ALOGV(
407 "%s: Newly requested output streams are not suitable for "
408 "pre-existing surface (%dx%d), creating new surface (%dx%d)",
409 __func__, currentInputResolution->width,
410 currentInputResolution->height, inputConfig->width,
411 inputConfig->height);
412
413 mRenderThread->flush();
414 mRenderThread->stop();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100415 }
Jan Sebechlebsky66ef83f2024-05-31 12:03:27 +0200416
417 mRenderThread = std::make_unique<VirtualCameraRenderThread>(
418 mSessionContext, resolutionFromInputConfig(*inputConfig),
419 virtualCamera->getMaxInputResolution(), mCameraDeviceCallback);
420 mRenderThread->start();
421 inputSurface = mRenderThread->getInputSurface();
422 inputStreamId = mCurrentInputStreamId =
423 virtualCamera->allocateInputStreamId();
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100424 }
425
426 if (mVirtualCameraClientCallback != nullptr && inputSurface != nullptr) {
427 // TODO(b/301023410) Pass streamId based on client input stream id once
428 // support for multiple input streams is implemented. For now we always
429 // create single texture.
430 mVirtualCameraClientCallback->onStreamConfigured(
Jan Sebechlebsky66ef83f2024-05-31 12:03:27 +0200431 inputStreamId, aidl::android::view::Surface(inputSurface.get()),
Jan Sebechlebsky4c9bb1e2024-02-28 16:32:39 +0100432 inputConfig->width, inputConfig->height, inputConfig->pixelFormat);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100433 }
434
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100435 return ndk::ScopedAStatus::ok();
436}
437
438ndk::ScopedAStatus VirtualCameraSession::constructDefaultRequestSettings(
439 RequestTemplate in_type, CameraMetadata* _aidl_return) {
440 ALOGV("%s: type %d", __func__, static_cast<int32_t>(in_type));
441
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100442 std::shared_ptr<VirtualCameraDevice> camera = mCameraDevice.lock();
443 if (camera == nullptr) {
444 ALOGW(
445 "%s: constructDefaultRequestSettings called on already unregistered "
446 "camera",
447 __func__);
448 return cameraStatus(Status::CAMERA_DISCONNECTED);
449 }
450
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100451 switch (in_type) {
452 case RequestTemplate::PREVIEW:
453 case RequestTemplate::STILL_CAPTURE:
Jan Sebechlebskyb0119fa2023-12-04 10:29:06 +0100454 case RequestTemplate::VIDEO_RECORD:
455 case RequestTemplate::VIDEO_SNAPSHOT: {
Jan Sebechlebskyc3e1a632024-02-06 14:19:05 +0100456 *_aidl_return =
457 createDefaultRequestSettings(in_type, camera->getInputConfigs());
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100458 return ndk::ScopedAStatus::ok();
459 }
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100460 case RequestTemplate::MANUAL:
461 case RequestTemplate::ZERO_SHUTTER_LAG:
462 // Don't support VIDEO_SNAPSHOT, MANUAL, ZSL templates
463 return ndk::ScopedAStatus::fromServiceSpecificError(
464 static_cast<int32_t>(Status::ILLEGAL_ARGUMENT));
465 ;
466 default:
467 ALOGE("%s: unknown request template type %d", __FUNCTION__,
468 static_cast<int>(in_type));
469 return ndk::ScopedAStatus::fromServiceSpecificError(
470 static_cast<int32_t>(Status::ILLEGAL_ARGUMENT));
471 ;
472 }
473}
474
475ndk::ScopedAStatus VirtualCameraSession::flush() {
476 ALOGV("%s", __func__);
477 std::lock_guard<std::mutex> lock(mLock);
Jan Sebechlebskyb0d8cab2023-11-28 10:55:04 +0100478 if (mRenderThread != nullptr) {
479 mRenderThread->flush();
480 }
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100481 return ndk::ScopedAStatus::ok();
482}
483
484ndk::ScopedAStatus VirtualCameraSession::getCaptureRequestMetadataQueue(
485 MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) {
486 ALOGV("%s", __func__);
487 *_aidl_return = mRequestMetadataQueue->dupeDesc();
488 return ndk::ScopedAStatus::ok();
489}
490
491ndk::ScopedAStatus VirtualCameraSession::getCaptureResultMetadataQueue(
492 MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) {
493 ALOGV("%s", __func__);
494 *_aidl_return = mResultMetadataQueue->dupeDesc();
495 return ndk::ScopedAStatus::ok();
496}
497
498ndk::ScopedAStatus VirtualCameraSession::isReconfigurationRequired(
499 const CameraMetadata& in_oldSessionParams,
500 const CameraMetadata& in_newSessionParams, bool* _aidl_return) {
501 ALOGV("%s: oldSessionParams: %s newSessionParams: %s", __func__,
502 in_newSessionParams.toString().c_str(),
503 in_oldSessionParams.toString().c_str());
504
505 if (_aidl_return == nullptr) {
506 return ndk::ScopedAStatus::fromServiceSpecificError(
507 static_cast<int32_t>(Status::ILLEGAL_ARGUMENT));
508 }
509
510 *_aidl_return = true;
511 return ndk::ScopedAStatus::ok();
512}
513
514ndk::ScopedAStatus VirtualCameraSession::processCaptureRequest(
515 const std::vector<CaptureRequest>& in_requests,
516 const std::vector<BufferCache>& in_cachesToRemove, int32_t* _aidl_return) {
517 ALOGV("%s", __func__);
518
519 if (!in_cachesToRemove.empty()) {
520 mSessionContext.removeBufferCaches(in_cachesToRemove);
521 }
522
523 for (const auto& captureRequest : in_requests) {
524 auto status = processCaptureRequest(captureRequest);
525 if (!status.isOk()) {
526 return status;
527 }
528 }
529 *_aidl_return = in_requests.size();
530 return ndk::ScopedAStatus::ok();
531}
532
533ndk::ScopedAStatus VirtualCameraSession::signalStreamFlush(
534 const std::vector<int32_t>& in_streamIds, int32_t in_streamConfigCounter) {
535 ALOGV("%s", __func__);
536
537 (void)in_streamIds;
538 (void)in_streamConfigCounter;
539 return ndk::ScopedAStatus::ok();
540}
541
542ndk::ScopedAStatus VirtualCameraSession::switchToOffline(
543 const std::vector<int32_t>& in_streamsToKeep,
544 CameraOfflineSessionInfo* out_offlineSessionInfo,
545 std::shared_ptr<ICameraOfflineSession>* _aidl_return) {
546 ALOGV("%s", __func__);
547
548 (void)in_streamsToKeep;
549 (void)out_offlineSessionInfo;
550
551 if (_aidl_return == nullptr) {
552 return ndk::ScopedAStatus::fromServiceSpecificError(
553 static_cast<int32_t>(Status::ILLEGAL_ARGUMENT));
554 }
555
556 *_aidl_return = nullptr;
557 return cameraStatus(Status::OPERATION_NOT_SUPPORTED);
558}
559
560ndk::ScopedAStatus VirtualCameraSession::repeatingRequestEnd(
561 int32_t in_frameNumber, const std::vector<int32_t>& in_streamIds) {
562 ALOGV("%s", __func__);
563 (void)in_frameNumber;
564 (void)in_streamIds;
565 return ndk::ScopedAStatus::ok();
566}
567
568std::set<int> VirtualCameraSession::getStreamIds() const {
569 return mSessionContext.getStreamIds();
570}
571
572ndk::ScopedAStatus VirtualCameraSession::processCaptureRequest(
573 const CaptureRequest& request) {
Vadim Caen324fcfb2024-03-21 16:49:08 +0100574 ALOGV("%s: request: %s", __func__, request.toString().c_str());
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100575
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100576 std::shared_ptr<ICameraDeviceCallback> cameraCallback = nullptr;
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100577 RequestSettings requestSettings;
Jan Sebechlebsky66ef83f2024-05-31 12:03:27 +0200578 int currentInputStreamId;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100579 {
580 std::lock_guard<std::mutex> lock(mLock);
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100581
582 // If metadata it empty, last received metadata applies, if it's non-empty
583 // update it.
584 if (!request.settings.metadata.empty()) {
585 mCurrentRequestMetadata = request.settings;
586 }
587
588 // We don't have any metadata for this request - this means we received none
589 // in first request, this is an error state.
590 if (mCurrentRequestMetadata.metadata.empty()) {
591 return cameraStatus(Status::ILLEGAL_ARGUMENT);
592 }
593
594 requestSettings = createSettingsFromMetadata(mCurrentRequestMetadata);
595
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100596 cameraCallback = mCameraDeviceCallback;
Jan Sebechlebsky66ef83f2024-05-31 12:03:27 +0200597 currentInputStreamId = mCurrentInputStreamId;
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100598 }
599
600 if (cameraCallback == nullptr) {
601 ALOGE(
602 "%s: processCaptureRequest called, but there's no camera callback "
603 "configured",
604 __func__);
605 return cameraStatus(Status::INTERNAL_ERROR);
606 }
607
608 if (!mSessionContext.importBuffersFromCaptureRequest(request)) {
609 ALOGE("Failed to import buffers from capture request.");
610 return cameraStatus(Status::INTERNAL_ERROR);
611 }
612
613 std::vector<CaptureRequestBuffer> taskBuffers;
614 taskBuffers.reserve(request.outputBuffers.size());
615 for (const StreamBuffer& streamBuffer : request.outputBuffers) {
616 taskBuffers.emplace_back(streamBuffer.streamId, streamBuffer.bufferId,
617 importFence(streamBuffer.acquireFence));
618 }
619
620 {
621 std::lock_guard<std::mutex> lock(mLock);
622 if (mRenderThread == nullptr) {
623 ALOGE(
624 "%s: processCaptureRequest (frameNumber %d)called before configure "
625 "(render thread not initialized)",
626 __func__, request.frameNumber);
627 return cameraStatus(Status::INTERNAL_ERROR);
628 }
629 mRenderThread->enqueueTask(std::make_unique<ProcessCaptureRequestTask>(
Jan Sebechlebsky4ce32082024-02-14 16:02:11 +0100630 request.frameNumber, taskBuffers, requestSettings));
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100631 }
632
633 if (mVirtualCameraClientCallback != nullptr) {
634 auto status = mVirtualCameraClientCallback->onProcessCaptureRequest(
Jan Sebechlebsky66ef83f2024-05-31 12:03:27 +0200635 currentInputStreamId, request.frameNumber);
Jan Sebechlebsky5cb39962023-11-22 17:33:07 +0100636 if (!status.isOk()) {
637 ALOGE(
638 "Failed to invoke onProcessCaptureRequest client callback for frame "
639 "%d",
640 request.frameNumber);
641 }
642 }
643
644 return ndk::ScopedAStatus::ok();
645}
646
647} // namespace virtualcamera
648} // namespace companion
649} // namespace android